1 #include <../src/mat/impls/nest/matnestimpl.h> /*I "petscmat.h" I*/ 2 #include <../src/mat/impls/aij/seq/aij.h> 3 #include <petscsf.h> 4 5 static PetscErrorCode MatSetUp_NestIS_Private(Mat, PetscInt, const IS[], PetscInt, const IS[]); 6 static PetscErrorCode MatCreateVecs_Nest(Mat, Vec *, Vec *); 7 static PetscErrorCode MatReset_Nest(Mat); 8 9 PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat, MatType, MatReuse, Mat *); 10 11 /* private functions */ 12 static PetscErrorCode MatNestGetSizes_Private(Mat A, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N) 13 { 14 Mat_Nest *bA = (Mat_Nest *)A->data; 15 PetscInt i, j; 16 17 PetscFunctionBegin; 18 *m = *n = *M = *N = 0; 19 for (i = 0; i < bA->nr; i++) { /* rows */ 20 PetscInt sm, sM; 21 PetscCall(ISGetLocalSize(bA->isglobal.row[i], &sm)); 22 PetscCall(ISGetSize(bA->isglobal.row[i], &sM)); 23 *m += sm; 24 *M += sM; 25 } 26 for (j = 0; j < bA->nc; j++) { /* cols */ 27 PetscInt sn, sN; 28 PetscCall(ISGetLocalSize(bA->isglobal.col[j], &sn)); 29 PetscCall(ISGetSize(bA->isglobal.col[j], &sN)); 30 *n += sn; 31 *N += sN; 32 } 33 PetscFunctionReturn(PETSC_SUCCESS); 34 } 35 36 /* operations */ 37 static PetscErrorCode MatMult_Nest(Mat A, Vec x, Vec y) 38 { 39 Mat_Nest *bA = (Mat_Nest *)A->data; 40 Vec *bx = bA->right, *by = bA->left; 41 PetscInt i, j, nr = bA->nr, nc = bA->nc; 42 43 PetscFunctionBegin; 44 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by[i])); 45 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 46 for (i = 0; i < nr; i++) { 47 PetscCall(VecZeroEntries(by[i])); 48 for (j = 0; j < nc; j++) { 49 if (!bA->m[i][j]) continue; 50 /* y[i] <- y[i] + A[i][j] * x[j] */ 51 PetscCall(MatMultAdd(bA->m[i][j], bx[j], by[i], by[i])); 52 } 53 } 54 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by[i])); 55 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 56 PetscFunctionReturn(PETSC_SUCCESS); 57 } 58 59 static PetscErrorCode MatMultAdd_Nest(Mat A, Vec x, Vec y, Vec z) 60 { 61 Mat_Nest *bA = (Mat_Nest *)A->data; 62 Vec *bx = bA->right, *bz = bA->left; 63 PetscInt i, j, nr = bA->nr, nc = bA->nc; 64 65 PetscFunctionBegin; 66 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(z, bA->isglobal.row[i], &bz[i])); 67 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 68 for (i = 0; i < nr; i++) { 69 if (y != z) { 70 Vec by; 71 PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by)); 72 PetscCall(VecCopy(by, bz[i])); 73 PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by)); 74 } 75 for (j = 0; j < nc; j++) { 76 if (!bA->m[i][j]) continue; 77 /* y[i] <- y[i] + A[i][j] * x[j] */ 78 PetscCall(MatMultAdd(bA->m[i][j], bx[j], bz[i], bz[i])); 79 } 80 } 81 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.row[i], &bz[i])); 82 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 83 PetscFunctionReturn(PETSC_SUCCESS); 84 } 85 86 typedef struct { 87 Mat *workC; /* array of Mat with specific containers depending on the underlying MatMatMult implementation */ 88 PetscScalar *tarray; /* buffer for storing all temporary products A[i][j] B[j] */ 89 PetscInt *dm, *dn, k; /* displacements and number of submatrices */ 90 } Nest_Dense; 91 92 PETSC_INTERN PetscErrorCode MatProductNumeric_Nest_Dense(Mat C) 93 { 94 Mat_Nest *bA; 95 Nest_Dense *contents; 96 Mat viewB, viewC, productB, workC; 97 const PetscScalar *barray; 98 PetscScalar *carray; 99 PetscInt i, j, M, N, nr, nc, ldb, ldc; 100 Mat A, B; 101 102 PetscFunctionBegin; 103 MatCheckProduct(C, 3); 104 A = C->product->A; 105 B = C->product->B; 106 PetscCall(MatGetSize(B, NULL, &N)); 107 if (!N) { 108 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 109 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 110 PetscFunctionReturn(PETSC_SUCCESS); 111 } 112 contents = (Nest_Dense *)C->product->data; 113 PetscCheck(contents, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data empty"); 114 bA = (Mat_Nest *)A->data; 115 nr = bA->nr; 116 nc = bA->nc; 117 PetscCall(MatDenseGetLDA(B, &ldb)); 118 PetscCall(MatDenseGetLDA(C, &ldc)); 119 PetscCall(MatZeroEntries(C)); 120 PetscCall(MatDenseGetArrayRead(B, &barray)); 121 PetscCall(MatDenseGetArray(C, &carray)); 122 for (i = 0; i < nr; i++) { 123 PetscCall(ISGetSize(bA->isglobal.row[i], &M)); 124 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dm[i + 1] - contents->dm[i], PETSC_DECIDE, M, N, carray + contents->dm[i], &viewC)); 125 PetscCall(MatDenseSetLDA(viewC, ldc)); 126 for (j = 0; j < nc; j++) { 127 if (!bA->m[i][j]) continue; 128 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 129 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, (PetscScalar *)(barray + contents->dn[j]), &viewB)); 130 PetscCall(MatDenseSetLDA(viewB, ldb)); 131 132 /* MatMatMultNumeric(bA->m[i][j],viewB,contents->workC[i*nc + j]); */ 133 workC = contents->workC[i * nc + j]; 134 productB = workC->product->B; 135 workC->product->B = viewB; /* use newly created dense matrix viewB */ 136 PetscCall(MatProductNumeric(workC)); 137 PetscCall(MatDestroy(&viewB)); 138 workC->product->B = productB; /* resume original B */ 139 140 /* C[i] <- workC + C[i] */ 141 PetscCall(MatAXPY(viewC, 1.0, contents->workC[i * nc + j], SAME_NONZERO_PATTERN)); 142 } 143 PetscCall(MatDestroy(&viewC)); 144 } 145 PetscCall(MatDenseRestoreArray(C, &carray)); 146 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 147 148 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 149 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 150 PetscFunctionReturn(PETSC_SUCCESS); 151 } 152 153 PetscErrorCode MatNest_DenseDestroy(void *ctx) 154 { 155 Nest_Dense *contents = (Nest_Dense *)ctx; 156 PetscInt i; 157 158 PetscFunctionBegin; 159 PetscCall(PetscFree(contents->tarray)); 160 for (i = 0; i < contents->k; i++) PetscCall(MatDestroy(contents->workC + i)); 161 PetscCall(PetscFree3(contents->dm, contents->dn, contents->workC)); 162 PetscCall(PetscFree(contents)); 163 PetscFunctionReturn(PETSC_SUCCESS); 164 } 165 166 PETSC_INTERN PetscErrorCode MatProductSymbolic_Nest_Dense(Mat C) 167 { 168 Mat_Nest *bA; 169 Mat viewB, workC; 170 const PetscScalar *barray; 171 PetscInt i, j, M, N, m, n, nr, nc, maxm = 0, ldb; 172 Nest_Dense *contents = NULL; 173 PetscBool cisdense; 174 Mat A, B; 175 PetscReal fill; 176 177 PetscFunctionBegin; 178 MatCheckProduct(C, 4); 179 PetscCheck(!C->product->data, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data not empty"); 180 A = C->product->A; 181 B = C->product->B; 182 fill = C->product->fill; 183 bA = (Mat_Nest *)A->data; 184 nr = bA->nr; 185 nc = bA->nc; 186 PetscCall(MatGetLocalSize(C, &m, &n)); 187 PetscCall(MatGetSize(C, &M, &N)); 188 if (m == PETSC_DECIDE || n == PETSC_DECIDE || M == PETSC_DECIDE || N == PETSC_DECIDE) { 189 PetscCall(MatGetLocalSize(B, NULL, &n)); 190 PetscCall(MatGetSize(B, NULL, &N)); 191 PetscCall(MatGetLocalSize(A, &m, NULL)); 192 PetscCall(MatGetSize(A, &M, NULL)); 193 PetscCall(MatSetSizes(C, m, n, M, N)); 194 } 195 PetscCall(PetscObjectTypeCompareAny((PetscObject)C, &cisdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, MATMPIDENSECUDA, "")); 196 if (!cisdense) PetscCall(MatSetType(C, ((PetscObject)B)->type_name)); 197 PetscCall(MatSetUp(C)); 198 if (!N) { 199 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 200 PetscFunctionReturn(PETSC_SUCCESS); 201 } 202 203 PetscCall(PetscNew(&contents)); 204 C->product->data = contents; 205 C->product->destroy = MatNest_DenseDestroy; 206 PetscCall(PetscCalloc3(nr + 1, &contents->dm, nc + 1, &contents->dn, nr * nc, &contents->workC)); 207 contents->k = nr * nc; 208 for (i = 0; i < nr; i++) { 209 PetscCall(ISGetLocalSize(bA->isglobal.row[i], contents->dm + i + 1)); 210 maxm = PetscMax(maxm, contents->dm[i + 1]); 211 contents->dm[i + 1] += contents->dm[i]; 212 } 213 for (i = 0; i < nc; i++) { 214 PetscCall(ISGetLocalSize(bA->isglobal.col[i], contents->dn + i + 1)); 215 contents->dn[i + 1] += contents->dn[i]; 216 } 217 PetscCall(PetscMalloc1(maxm * N, &contents->tarray)); 218 PetscCall(MatDenseGetLDA(B, &ldb)); 219 PetscCall(MatGetSize(B, NULL, &N)); 220 PetscCall(MatDenseGetArrayRead(B, &barray)); 221 /* loops are permuted compared to MatMatMultNumeric so that viewB is created only once per column of A */ 222 for (j = 0; j < nc; j++) { 223 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 224 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, (PetscScalar *)(barray + contents->dn[j]), &viewB)); 225 PetscCall(MatDenseSetLDA(viewB, ldb)); 226 for (i = 0; i < nr; i++) { 227 if (!bA->m[i][j]) continue; 228 /* MatMatMultSymbolic may attach a specific container (depending on MatType of bA->m[i][j]) to workC[i][j] */ 229 230 PetscCall(MatProductCreate(bA->m[i][j], viewB, NULL, &contents->workC[i * nc + j])); 231 workC = contents->workC[i * nc + j]; 232 PetscCall(MatProductSetType(workC, MATPRODUCT_AB)); 233 PetscCall(MatProductSetAlgorithm(workC, "default")); 234 PetscCall(MatProductSetFill(workC, fill)); 235 PetscCall(MatProductSetFromOptions(workC)); 236 PetscCall(MatProductSymbolic(workC)); 237 238 /* since tarray will be shared by all Mat */ 239 PetscCall(MatSeqDenseSetPreallocation(workC, contents->tarray)); 240 PetscCall(MatMPIDenseSetPreallocation(workC, contents->tarray)); 241 } 242 PetscCall(MatDestroy(&viewB)); 243 } 244 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 245 246 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 247 PetscFunctionReturn(PETSC_SUCCESS); 248 } 249 250 /* --------------------------------------------------------- */ 251 static PetscErrorCode MatProductSetFromOptions_Nest_Dense_AB(Mat C) 252 { 253 PetscFunctionBegin; 254 C->ops->productsymbolic = MatProductSymbolic_Nest_Dense; 255 PetscFunctionReturn(PETSC_SUCCESS); 256 } 257 258 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Nest_Dense(Mat C) 259 { 260 Mat_Product *product = C->product; 261 262 PetscFunctionBegin; 263 if (product->type == MATPRODUCT_AB) PetscCall(MatProductSetFromOptions_Nest_Dense_AB(C)); 264 PetscFunctionReturn(PETSC_SUCCESS); 265 } 266 /* --------------------------------------------------------- */ 267 268 static PetscErrorCode MatMultTranspose_Nest(Mat A, Vec x, Vec y) 269 { 270 Mat_Nest *bA = (Mat_Nest *)A->data; 271 Vec *bx = bA->left, *by = bA->right; 272 PetscInt i, j, nr = bA->nr, nc = bA->nc; 273 274 PetscFunctionBegin; 275 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 276 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(y, bA->isglobal.col[i], &by[i])); 277 for (j = 0; j < nc; j++) { 278 PetscCall(VecZeroEntries(by[j])); 279 for (i = 0; i < nr; i++) { 280 if (!bA->m[i][j]) continue; 281 /* y[j] <- y[j] + (A[i][j])^T * x[i] */ 282 PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); 283 } 284 } 285 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 286 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.col[i], &by[i])); 287 PetscFunctionReturn(PETSC_SUCCESS); 288 } 289 290 static PetscErrorCode MatMultTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 291 { 292 Mat_Nest *bA = (Mat_Nest *)A->data; 293 Vec *bx = bA->left, *bz = bA->right; 294 PetscInt i, j, nr = bA->nr, nc = bA->nc; 295 296 PetscFunctionBegin; 297 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 298 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(z, bA->isglobal.col[i], &bz[i])); 299 for (j = 0; j < nc; j++) { 300 if (y != z) { 301 Vec by; 302 PetscCall(VecGetSubVector(y, bA->isglobal.col[j], &by)); 303 PetscCall(VecCopy(by, bz[j])); 304 PetscCall(VecRestoreSubVector(y, bA->isglobal.col[j], &by)); 305 } 306 for (i = 0; i < nr; i++) { 307 if (!bA->m[i][j]) continue; 308 /* z[j] <- y[j] + (A[i][j])^T * x[i] */ 309 PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); 310 } 311 } 312 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 313 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.col[i], &bz[i])); 314 PetscFunctionReturn(PETSC_SUCCESS); 315 } 316 317 static PetscErrorCode MatTranspose_Nest(Mat A, MatReuse reuse, Mat *B) 318 { 319 Mat_Nest *bA = (Mat_Nest *)A->data, *bC; 320 Mat C; 321 PetscInt i, j, nr = bA->nr, nc = bA->nc; 322 323 PetscFunctionBegin; 324 if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B)); 325 PetscCheck(reuse != MAT_INPLACE_MATRIX || nr == nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Square nested matrix only for in-place"); 326 327 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) { 328 Mat *subs; 329 IS *is_row, *is_col; 330 331 PetscCall(PetscCalloc1(nr * nc, &subs)); 332 PetscCall(PetscMalloc2(nr, &is_row, nc, &is_col)); 333 PetscCall(MatNestGetISs(A, is_row, is_col)); 334 if (reuse == MAT_INPLACE_MATRIX) { 335 for (i = 0; i < nr; i++) { 336 for (j = 0; j < nc; j++) subs[i + nr * j] = bA->m[i][j]; 337 } 338 } 339 340 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nc, is_col, nr, is_row, subs, &C)); 341 PetscCall(PetscFree(subs)); 342 PetscCall(PetscFree2(is_row, is_col)); 343 } else { 344 C = *B; 345 } 346 347 bC = (Mat_Nest *)C->data; 348 for (i = 0; i < nr; i++) { 349 for (j = 0; j < nc; j++) { 350 if (bA->m[i][j]) { 351 PetscCall(MatTranspose(bA->m[i][j], reuse, &(bC->m[j][i]))); 352 } else { 353 bC->m[j][i] = NULL; 354 } 355 } 356 } 357 358 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) { 359 *B = C; 360 } else { 361 PetscCall(MatHeaderMerge(A, &C)); 362 } 363 PetscFunctionReturn(PETSC_SUCCESS); 364 } 365 366 static PetscErrorCode MatNestDestroyISList(PetscInt n, IS **list) 367 { 368 IS *lst = *list; 369 PetscInt i; 370 371 PetscFunctionBegin; 372 if (!lst) PetscFunctionReturn(PETSC_SUCCESS); 373 for (i = 0; i < n; i++) 374 if (lst[i]) PetscCall(ISDestroy(&lst[i])); 375 PetscCall(PetscFree(lst)); 376 *list = NULL; 377 PetscFunctionReturn(PETSC_SUCCESS); 378 } 379 380 static PetscErrorCode MatReset_Nest(Mat A) 381 { 382 Mat_Nest *vs = (Mat_Nest *)A->data; 383 PetscInt i, j; 384 385 PetscFunctionBegin; 386 /* release the matrices and the place holders */ 387 PetscCall(MatNestDestroyISList(vs->nr, &vs->isglobal.row)); 388 PetscCall(MatNestDestroyISList(vs->nc, &vs->isglobal.col)); 389 PetscCall(MatNestDestroyISList(vs->nr, &vs->islocal.row)); 390 PetscCall(MatNestDestroyISList(vs->nc, &vs->islocal.col)); 391 392 PetscCall(PetscFree(vs->row_len)); 393 PetscCall(PetscFree(vs->col_len)); 394 PetscCall(PetscFree(vs->nnzstate)); 395 396 PetscCall(PetscFree2(vs->left, vs->right)); 397 398 /* release the matrices and the place holders */ 399 if (vs->m) { 400 for (i = 0; i < vs->nr; i++) { 401 for (j = 0; j < vs->nc; j++) PetscCall(MatDestroy(&vs->m[i][j])); 402 PetscCall(PetscFree(vs->m[i])); 403 } 404 PetscCall(PetscFree(vs->m)); 405 } 406 407 /* restore defaults */ 408 vs->nr = 0; 409 vs->nc = 0; 410 vs->splitassembly = PETSC_FALSE; 411 PetscFunctionReturn(PETSC_SUCCESS); 412 } 413 414 static PetscErrorCode MatDestroy_Nest(Mat A) 415 { 416 PetscFunctionBegin; 417 PetscCall(MatReset_Nest(A)); 418 PetscCall(PetscFree(A->data)); 419 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", NULL)); 420 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", NULL)); 421 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", NULL)); 422 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", NULL)); 423 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", NULL)); 424 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", NULL)); 425 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", NULL)); 426 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", NULL)); 427 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", NULL)); 428 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", NULL)); 429 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", NULL)); 430 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", NULL)); 431 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", NULL)); 432 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", NULL)); 433 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", NULL)); 434 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", NULL)); 435 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", NULL)); 436 PetscFunctionReturn(PETSC_SUCCESS); 437 } 438 439 static PetscErrorCode MatMissingDiagonal_Nest(Mat mat, PetscBool *missing, PetscInt *dd) 440 { 441 Mat_Nest *vs = (Mat_Nest *)mat->data; 442 PetscInt i; 443 444 PetscFunctionBegin; 445 if (dd) *dd = 0; 446 if (!vs->nr) { 447 *missing = PETSC_TRUE; 448 PetscFunctionReturn(PETSC_SUCCESS); 449 } 450 *missing = PETSC_FALSE; 451 for (i = 0; i < vs->nr && !(*missing); i++) { 452 *missing = PETSC_TRUE; 453 if (vs->m[i][i]) { 454 PetscCall(MatMissingDiagonal(vs->m[i][i], missing, NULL)); 455 PetscCheck(!*missing || !dd, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "First missing entry not yet implemented"); 456 } 457 } 458 PetscFunctionReturn(PETSC_SUCCESS); 459 } 460 461 static PetscErrorCode MatAssemblyBegin_Nest(Mat A, MatAssemblyType type) 462 { 463 Mat_Nest *vs = (Mat_Nest *)A->data; 464 PetscInt i, j; 465 PetscBool nnzstate = PETSC_FALSE; 466 467 PetscFunctionBegin; 468 for (i = 0; i < vs->nr; i++) { 469 for (j = 0; j < vs->nc; j++) { 470 PetscObjectState subnnzstate = 0; 471 if (vs->m[i][j]) { 472 PetscCall(MatAssemblyBegin(vs->m[i][j], type)); 473 if (!vs->splitassembly) { 474 /* Note: split assembly will fail if the same block appears more than once (even indirectly through a nested 475 * sub-block). This could be fixed by adding a flag to Mat so that there was a way to check if a Mat was 476 * already performing an assembly, but the result would by more complicated and appears to offer less 477 * potential for diagnostics and correctness checking. Split assembly should be fixed once there is an 478 * interface for libraries to make asynchronous progress in "user-defined non-blocking collectives". 479 */ 480 PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 481 PetscCall(MatGetNonzeroState(vs->m[i][j], &subnnzstate)); 482 } 483 } 484 nnzstate = (PetscBool)(nnzstate || vs->nnzstate[i * vs->nc + j] != subnnzstate); 485 vs->nnzstate[i * vs->nc + j] = subnnzstate; 486 } 487 } 488 if (nnzstate) A->nonzerostate++; 489 PetscFunctionReturn(PETSC_SUCCESS); 490 } 491 492 static PetscErrorCode MatAssemblyEnd_Nest(Mat A, MatAssemblyType type) 493 { 494 Mat_Nest *vs = (Mat_Nest *)A->data; 495 PetscInt i, j; 496 497 PetscFunctionBegin; 498 for (i = 0; i < vs->nr; i++) { 499 for (j = 0; j < vs->nc; j++) { 500 if (vs->m[i][j]) { 501 if (vs->splitassembly) PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 502 } 503 } 504 } 505 PetscFunctionReturn(PETSC_SUCCESS); 506 } 507 508 static PetscErrorCode MatNestFindNonzeroSubMatRow(Mat A, PetscInt row, Mat *B) 509 { 510 Mat_Nest *vs = (Mat_Nest *)A->data; 511 PetscInt j; 512 Mat sub; 513 514 PetscFunctionBegin; 515 sub = (row < vs->nc) ? vs->m[row][row] : (Mat)NULL; /* Prefer to find on the diagonal */ 516 for (j = 0; !sub && j < vs->nc; j++) sub = vs->m[row][j]; 517 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 518 *B = sub; 519 PetscFunctionReturn(PETSC_SUCCESS); 520 } 521 522 static PetscErrorCode MatNestFindNonzeroSubMatCol(Mat A, PetscInt col, Mat *B) 523 { 524 Mat_Nest *vs = (Mat_Nest *)A->data; 525 PetscInt i; 526 Mat sub; 527 528 PetscFunctionBegin; 529 sub = (col < vs->nr) ? vs->m[col][col] : (Mat)NULL; /* Prefer to find on the diagonal */ 530 for (i = 0; !sub && i < vs->nr; i++) sub = vs->m[i][col]; 531 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 532 *B = sub; 533 PetscFunctionReturn(PETSC_SUCCESS); 534 } 535 536 static PetscErrorCode MatNestFindISRange(Mat A, PetscInt n, const IS list[], IS is, PetscInt *begin, PetscInt *end) 537 { 538 PetscInt i, j, size, m; 539 PetscBool flg; 540 IS out, concatenate[2]; 541 542 PetscFunctionBegin; 543 PetscValidPointer(list, 3); 544 PetscValidHeaderSpecific(is, IS_CLASSID, 4); 545 if (begin) { 546 PetscValidIntPointer(begin, 5); 547 *begin = -1; 548 } 549 if (end) { 550 PetscValidIntPointer(end, 6); 551 *end = -1; 552 } 553 for (i = 0; i < n; i++) { 554 if (!list[i]) continue; 555 PetscCall(ISEqualUnsorted(list[i], is, &flg)); 556 if (flg) { 557 if (begin) *begin = i; 558 if (end) *end = i + 1; 559 PetscFunctionReturn(PETSC_SUCCESS); 560 } 561 } 562 PetscCall(ISGetSize(is, &size)); 563 for (i = 0; i < n - 1; i++) { 564 if (!list[i]) continue; 565 m = 0; 566 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, list + i, &out)); 567 PetscCall(ISGetSize(out, &m)); 568 for (j = i + 2; j < n && m < size; j++) { 569 if (list[j]) { 570 concatenate[0] = out; 571 concatenate[1] = list[j]; 572 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, concatenate, &out)); 573 PetscCall(ISDestroy(concatenate)); 574 PetscCall(ISGetSize(out, &m)); 575 } 576 } 577 if (m == size) { 578 PetscCall(ISEqualUnsorted(out, is, &flg)); 579 if (flg) { 580 if (begin) *begin = i; 581 if (end) *end = j; 582 PetscCall(ISDestroy(&out)); 583 PetscFunctionReturn(PETSC_SUCCESS); 584 } 585 } 586 PetscCall(ISDestroy(&out)); 587 } 588 PetscFunctionReturn(PETSC_SUCCESS); 589 } 590 591 static PetscErrorCode MatNestFillEmptyMat_Private(Mat A, PetscInt i, PetscInt j, Mat *B) 592 { 593 Mat_Nest *vs = (Mat_Nest *)A->data; 594 PetscInt lr, lc; 595 596 PetscFunctionBegin; 597 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 598 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &lr)); 599 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &lc)); 600 PetscCall(MatSetSizes(*B, lr, lc, PETSC_DECIDE, PETSC_DECIDE)); 601 PetscCall(MatSetType(*B, MATAIJ)); 602 PetscCall(MatSeqAIJSetPreallocation(*B, 0, NULL)); 603 PetscCall(MatMPIAIJSetPreallocation(*B, 0, NULL, 0, NULL)); 604 PetscCall(MatSetUp(*B)); 605 PetscCall(MatSetOption(*B, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 606 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 607 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 608 PetscFunctionReturn(PETSC_SUCCESS); 609 } 610 611 static PetscErrorCode MatNestGetBlock_Private(Mat A, PetscInt rbegin, PetscInt rend, PetscInt cbegin, PetscInt cend, Mat *B) 612 { 613 Mat_Nest *vs = (Mat_Nest *)A->data; 614 Mat *a; 615 PetscInt i, j, k, l, nr = rend - rbegin, nc = cend - cbegin; 616 char keyname[256]; 617 PetscBool *b; 618 PetscBool flg; 619 620 PetscFunctionBegin; 621 *B = NULL; 622 PetscCall(PetscSNPrintf(keyname, sizeof(keyname), "NestBlock_%" PetscInt_FMT "-%" PetscInt_FMT "x%" PetscInt_FMT "-%" PetscInt_FMT, rbegin, rend, cbegin, cend)); 623 PetscCall(PetscObjectQuery((PetscObject)A, keyname, (PetscObject *)B)); 624 if (*B) PetscFunctionReturn(PETSC_SUCCESS); 625 626 PetscCall(PetscMalloc2(nr * nc, &a, nr * nc, &b)); 627 for (i = 0; i < nr; i++) { 628 for (j = 0; j < nc; j++) { 629 a[i * nc + j] = vs->m[rbegin + i][cbegin + j]; 630 b[i * nc + j] = PETSC_FALSE; 631 } 632 } 633 if (nc != vs->nc && nr != vs->nr) { 634 for (i = 0; i < nr; i++) { 635 for (j = 0; j < nc; j++) { 636 flg = PETSC_FALSE; 637 for (k = 0; (k < nr && !flg); k++) { 638 if (a[j + k * nc]) flg = PETSC_TRUE; 639 } 640 if (flg) { 641 flg = PETSC_FALSE; 642 for (l = 0; (l < nc && !flg); l++) { 643 if (a[i * nc + l]) flg = PETSC_TRUE; 644 } 645 } 646 if (!flg) { 647 b[i * nc + j] = PETSC_TRUE; 648 PetscCall(MatNestFillEmptyMat_Private(A, rbegin + i, cbegin + j, a + i * nc + j)); 649 } 650 } 651 } 652 } 653 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, nr != vs->nr ? NULL : vs->isglobal.row, nc, nc != vs->nc ? NULL : vs->isglobal.col, a, B)); 654 for (i = 0; i < nr; i++) { 655 for (j = 0; j < nc; j++) { 656 if (b[i * nc + j]) PetscCall(MatDestroy(a + i * nc + j)); 657 } 658 } 659 PetscCall(PetscFree2(a, b)); 660 (*B)->assembled = A->assembled; 661 PetscCall(PetscObjectCompose((PetscObject)A, keyname, (PetscObject)*B)); 662 PetscCall(PetscObjectDereference((PetscObject)*B)); /* Leave the only remaining reference in the composition */ 663 PetscFunctionReturn(PETSC_SUCCESS); 664 } 665 666 static PetscErrorCode MatNestFindSubMat(Mat A, struct MatNestISPair *is, IS isrow, IS iscol, Mat *B) 667 { 668 Mat_Nest *vs = (Mat_Nest *)A->data; 669 PetscInt rbegin, rend, cbegin, cend; 670 671 PetscFunctionBegin; 672 PetscCall(MatNestFindISRange(A, vs->nr, is->row, isrow, &rbegin, &rend)); 673 PetscCall(MatNestFindISRange(A, vs->nc, is->col, iscol, &cbegin, &cend)); 674 if (rend == rbegin + 1 && cend == cbegin + 1) { 675 if (!vs->m[rbegin][cbegin]) PetscCall(MatNestFillEmptyMat_Private(A, rbegin, cbegin, vs->m[rbegin] + cbegin)); 676 *B = vs->m[rbegin][cbegin]; 677 } else if (rbegin != -1 && cbegin != -1) { 678 PetscCall(MatNestGetBlock_Private(A, rbegin, rend, cbegin, cend, B)); 679 } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Could not find index set"); 680 PetscFunctionReturn(PETSC_SUCCESS); 681 } 682 683 /* 684 TODO: This does not actually returns a submatrix we can modify 685 */ 686 static PetscErrorCode MatCreateSubMatrix_Nest(Mat A, IS isrow, IS iscol, MatReuse reuse, Mat *B) 687 { 688 Mat_Nest *vs = (Mat_Nest *)A->data; 689 Mat sub; 690 691 PetscFunctionBegin; 692 PetscCall(MatNestFindSubMat(A, &vs->isglobal, isrow, iscol, &sub)); 693 switch (reuse) { 694 case MAT_INITIAL_MATRIX: 695 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 696 *B = sub; 697 break; 698 case MAT_REUSE_MATRIX: 699 PetscCheck(sub == *B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Submatrix was not used before in this call"); 700 break; 701 case MAT_IGNORE_MATRIX: /* Nothing to do */ 702 break; 703 case MAT_INPLACE_MATRIX: /* Nothing to do */ 704 SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX is not supported yet"); 705 } 706 PetscFunctionReturn(PETSC_SUCCESS); 707 } 708 709 PetscErrorCode MatGetLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 710 { 711 Mat_Nest *vs = (Mat_Nest *)A->data; 712 Mat sub; 713 714 PetscFunctionBegin; 715 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 716 /* We allow the submatrix to be NULL, perhaps it would be better for the user to return an empty matrix instead */ 717 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 718 *B = sub; 719 PetscFunctionReturn(PETSC_SUCCESS); 720 } 721 722 static PetscErrorCode MatRestoreLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 723 { 724 Mat_Nest *vs = (Mat_Nest *)A->data; 725 Mat sub; 726 727 PetscFunctionBegin; 728 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 729 PetscCheck(*B == sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has not been gotten"); 730 if (sub) { 731 PetscCheck(((PetscObject)sub)->refct > 1, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has had reference count decremented too many times"); 732 PetscCall(MatDestroy(B)); 733 } 734 PetscFunctionReturn(PETSC_SUCCESS); 735 } 736 737 static PetscErrorCode MatGetDiagonal_Nest(Mat A, Vec v) 738 { 739 Mat_Nest *bA = (Mat_Nest *)A->data; 740 PetscInt i; 741 742 PetscFunctionBegin; 743 for (i = 0; i < bA->nr; i++) { 744 Vec bv; 745 PetscCall(VecGetSubVector(v, bA->isglobal.row[i], &bv)); 746 if (bA->m[i][i]) { 747 PetscCall(MatGetDiagonal(bA->m[i][i], bv)); 748 } else { 749 PetscCall(VecSet(bv, 0.0)); 750 } 751 PetscCall(VecRestoreSubVector(v, bA->isglobal.row[i], &bv)); 752 } 753 PetscFunctionReturn(PETSC_SUCCESS); 754 } 755 756 static PetscErrorCode MatDiagonalScale_Nest(Mat A, Vec l, Vec r) 757 { 758 Mat_Nest *bA = (Mat_Nest *)A->data; 759 Vec bl, *br; 760 PetscInt i, j; 761 762 PetscFunctionBegin; 763 PetscCall(PetscCalloc1(bA->nc, &br)); 764 if (r) { 765 for (j = 0; j < bA->nc; j++) PetscCall(VecGetSubVector(r, bA->isglobal.col[j], &br[j])); 766 } 767 bl = NULL; 768 for (i = 0; i < bA->nr; i++) { 769 if (l) PetscCall(VecGetSubVector(l, bA->isglobal.row[i], &bl)); 770 for (j = 0; j < bA->nc; j++) { 771 if (bA->m[i][j]) PetscCall(MatDiagonalScale(bA->m[i][j], bl, br[j])); 772 } 773 if (l) PetscCall(VecRestoreSubVector(l, bA->isglobal.row[i], &bl)); 774 } 775 if (r) { 776 for (j = 0; j < bA->nc; j++) PetscCall(VecRestoreSubVector(r, bA->isglobal.col[j], &br[j])); 777 } 778 PetscCall(PetscFree(br)); 779 PetscFunctionReturn(PETSC_SUCCESS); 780 } 781 782 static PetscErrorCode MatScale_Nest(Mat A, PetscScalar a) 783 { 784 Mat_Nest *bA = (Mat_Nest *)A->data; 785 PetscInt i, j; 786 787 PetscFunctionBegin; 788 for (i = 0; i < bA->nr; i++) { 789 for (j = 0; j < bA->nc; j++) { 790 if (bA->m[i][j]) PetscCall(MatScale(bA->m[i][j], a)); 791 } 792 } 793 PetscFunctionReturn(PETSC_SUCCESS); 794 } 795 796 static PetscErrorCode MatShift_Nest(Mat A, PetscScalar a) 797 { 798 Mat_Nest *bA = (Mat_Nest *)A->data; 799 PetscInt i; 800 PetscBool nnzstate = PETSC_FALSE; 801 802 PetscFunctionBegin; 803 for (i = 0; i < bA->nr; i++) { 804 PetscObjectState subnnzstate = 0; 805 PetscCheck(bA->m[i][i], PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for shifting an empty diagonal block, insert a matrix in block (%" PetscInt_FMT ",%" PetscInt_FMT ")", i, i); 806 PetscCall(MatShift(bA->m[i][i], a)); 807 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 808 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 809 bA->nnzstate[i * bA->nc + i] = subnnzstate; 810 } 811 if (nnzstate) A->nonzerostate++; 812 PetscFunctionReturn(PETSC_SUCCESS); 813 } 814 815 static PetscErrorCode MatDiagonalSet_Nest(Mat A, Vec D, InsertMode is) 816 { 817 Mat_Nest *bA = (Mat_Nest *)A->data; 818 PetscInt i; 819 PetscBool nnzstate = PETSC_FALSE; 820 821 PetscFunctionBegin; 822 for (i = 0; i < bA->nr; i++) { 823 PetscObjectState subnnzstate = 0; 824 Vec bv; 825 PetscCall(VecGetSubVector(D, bA->isglobal.row[i], &bv)); 826 if (bA->m[i][i]) { 827 PetscCall(MatDiagonalSet(bA->m[i][i], bv, is)); 828 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 829 } 830 PetscCall(VecRestoreSubVector(D, bA->isglobal.row[i], &bv)); 831 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 832 bA->nnzstate[i * bA->nc + i] = subnnzstate; 833 } 834 if (nnzstate) A->nonzerostate++; 835 PetscFunctionReturn(PETSC_SUCCESS); 836 } 837 838 static PetscErrorCode MatSetRandom_Nest(Mat A, PetscRandom rctx) 839 { 840 Mat_Nest *bA = (Mat_Nest *)A->data; 841 PetscInt i, j; 842 843 PetscFunctionBegin; 844 for (i = 0; i < bA->nr; i++) { 845 for (j = 0; j < bA->nc; j++) { 846 if (bA->m[i][j]) PetscCall(MatSetRandom(bA->m[i][j], rctx)); 847 } 848 } 849 PetscFunctionReturn(PETSC_SUCCESS); 850 } 851 852 static PetscErrorCode MatCreateVecs_Nest(Mat A, Vec *right, Vec *left) 853 { 854 Mat_Nest *bA = (Mat_Nest *)A->data; 855 Vec *L, *R; 856 MPI_Comm comm; 857 PetscInt i, j; 858 859 PetscFunctionBegin; 860 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 861 if (right) { 862 /* allocate R */ 863 PetscCall(PetscMalloc1(bA->nc, &R)); 864 /* Create the right vectors */ 865 for (j = 0; j < bA->nc; j++) { 866 for (i = 0; i < bA->nr; i++) { 867 if (bA->m[i][j]) { 868 PetscCall(MatCreateVecs(bA->m[i][j], &R[j], NULL)); 869 break; 870 } 871 } 872 PetscCheck(i != bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null column."); 873 } 874 PetscCall(VecCreateNest(comm, bA->nc, bA->isglobal.col, R, right)); 875 /* hand back control to the nest vector */ 876 for (j = 0; j < bA->nc; j++) PetscCall(VecDestroy(&R[j])); 877 PetscCall(PetscFree(R)); 878 } 879 880 if (left) { 881 /* allocate L */ 882 PetscCall(PetscMalloc1(bA->nr, &L)); 883 /* Create the left vectors */ 884 for (i = 0; i < bA->nr; i++) { 885 for (j = 0; j < bA->nc; j++) { 886 if (bA->m[i][j]) { 887 PetscCall(MatCreateVecs(bA->m[i][j], NULL, &L[i])); 888 break; 889 } 890 } 891 PetscCheck(j != bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null row."); 892 } 893 894 PetscCall(VecCreateNest(comm, bA->nr, bA->isglobal.row, L, left)); 895 for (i = 0; i < bA->nr; i++) PetscCall(VecDestroy(&L[i])); 896 897 PetscCall(PetscFree(L)); 898 } 899 PetscFunctionReturn(PETSC_SUCCESS); 900 } 901 902 static PetscErrorCode MatView_Nest(Mat A, PetscViewer viewer) 903 { 904 Mat_Nest *bA = (Mat_Nest *)A->data; 905 PetscBool isascii, viewSub = PETSC_FALSE; 906 PetscInt i, j; 907 908 PetscFunctionBegin; 909 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 910 if (isascii) { 911 PetscCall(PetscOptionsGetBool(((PetscObject)A)->options, ((PetscObject)A)->prefix, "-mat_view_nest_sub", &viewSub, NULL)); 912 PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix object: \n")); 913 PetscCall(PetscViewerASCIIPushTab(viewer)); 914 PetscCall(PetscViewerASCIIPrintf(viewer, "type=nest, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT " \n", bA->nr, bA->nc)); 915 916 PetscCall(PetscViewerASCIIPrintf(viewer, "MatNest structure: \n")); 917 for (i = 0; i < bA->nr; i++) { 918 for (j = 0; j < bA->nc; j++) { 919 MatType type; 920 char name[256] = "", prefix[256] = ""; 921 PetscInt NR, NC; 922 PetscBool isNest = PETSC_FALSE; 923 924 if (!bA->m[i][j]) { 925 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : NULL \n", i, j)); 926 continue; 927 } 928 PetscCall(MatGetSize(bA->m[i][j], &NR, &NC)); 929 PetscCall(MatGetType(bA->m[i][j], &type)); 930 if (((PetscObject)bA->m[i][j])->name) PetscCall(PetscSNPrintf(name, sizeof(name), "name=\"%s\", ", ((PetscObject)bA->m[i][j])->name)); 931 if (((PetscObject)bA->m[i][j])->prefix) PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "prefix=\"%s\", ", ((PetscObject)bA->m[i][j])->prefix)); 932 PetscCall(PetscObjectTypeCompare((PetscObject)bA->m[i][j], MATNEST, &isNest)); 933 934 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : %s%stype=%s, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT " \n", i, j, name, prefix, type, NR, NC)); 935 936 if (isNest || viewSub) { 937 PetscCall(PetscViewerASCIIPushTab(viewer)); /* push1 */ 938 PetscCall(MatView(bA->m[i][j], viewer)); 939 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop1 */ 940 } 941 } 942 } 943 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop0 */ 944 } 945 PetscFunctionReturn(PETSC_SUCCESS); 946 } 947 948 static PetscErrorCode MatZeroEntries_Nest(Mat A) 949 { 950 Mat_Nest *bA = (Mat_Nest *)A->data; 951 PetscInt i, j; 952 953 PetscFunctionBegin; 954 for (i = 0; i < bA->nr; i++) { 955 for (j = 0; j < bA->nc; j++) { 956 if (!bA->m[i][j]) continue; 957 PetscCall(MatZeroEntries(bA->m[i][j])); 958 } 959 } 960 PetscFunctionReturn(PETSC_SUCCESS); 961 } 962 963 static PetscErrorCode MatCopy_Nest(Mat A, Mat B, MatStructure str) 964 { 965 Mat_Nest *bA = (Mat_Nest *)A->data, *bB = (Mat_Nest *)B->data; 966 PetscInt i, j, nr = bA->nr, nc = bA->nc; 967 PetscBool nnzstate = PETSC_FALSE; 968 969 PetscFunctionBegin; 970 PetscCheck(nr == bB->nr && nc == bB->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Cannot copy a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") to a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bB->nr, bB->nc, nr, nc); 971 for (i = 0; i < nr; i++) { 972 for (j = 0; j < nc; j++) { 973 PetscObjectState subnnzstate = 0; 974 if (bA->m[i][j] && bB->m[i][j]) { 975 PetscCall(MatCopy(bA->m[i][j], bB->m[i][j], str)); 976 } else PetscCheck(!bA->m[i][j] && !bB->m[i][j], PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT, i, j); 977 PetscCall(MatGetNonzeroState(bB->m[i][j], &subnnzstate)); 978 nnzstate = (PetscBool)(nnzstate || bB->nnzstate[i * nc + j] != subnnzstate); 979 bB->nnzstate[i * nc + j] = subnnzstate; 980 } 981 } 982 if (nnzstate) B->nonzerostate++; 983 PetscFunctionReturn(PETSC_SUCCESS); 984 } 985 986 static PetscErrorCode MatAXPY_Nest(Mat Y, PetscScalar a, Mat X, MatStructure str) 987 { 988 Mat_Nest *bY = (Mat_Nest *)Y->data, *bX = (Mat_Nest *)X->data; 989 PetscInt i, j, nr = bY->nr, nc = bY->nc; 990 PetscBool nnzstate = PETSC_FALSE; 991 992 PetscFunctionBegin; 993 PetscCheck(nr == bX->nr && nc == bX->nc, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Cannot AXPY a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") with a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bX->nr, bX->nc, nr, nc); 994 for (i = 0; i < nr; i++) { 995 for (j = 0; j < nc; j++) { 996 PetscObjectState subnnzstate = 0; 997 if (bY->m[i][j] && bX->m[i][j]) { 998 PetscCall(MatAXPY(bY->m[i][j], a, bX->m[i][j], str)); 999 } else if (bX->m[i][j]) { 1000 Mat M; 1001 1002 PetscCheck(str == DIFFERENT_NONZERO_PATTERN, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT ". Use DIFFERENT_NONZERO_PATTERN", i, j); 1003 PetscCall(MatDuplicate(bX->m[i][j], MAT_COPY_VALUES, &M)); 1004 PetscCall(MatNestSetSubMat(Y, i, j, M)); 1005 PetscCall(MatDestroy(&M)); 1006 } 1007 if (bY->m[i][j]) PetscCall(MatGetNonzeroState(bY->m[i][j], &subnnzstate)); 1008 nnzstate = (PetscBool)(nnzstate || bY->nnzstate[i * nc + j] != subnnzstate); 1009 bY->nnzstate[i * nc + j] = subnnzstate; 1010 } 1011 } 1012 if (nnzstate) Y->nonzerostate++; 1013 PetscFunctionReturn(PETSC_SUCCESS); 1014 } 1015 1016 static PetscErrorCode MatDuplicate_Nest(Mat A, MatDuplicateOption op, Mat *B) 1017 { 1018 Mat_Nest *bA = (Mat_Nest *)A->data; 1019 Mat *b; 1020 PetscInt i, j, nr = bA->nr, nc = bA->nc; 1021 1022 PetscFunctionBegin; 1023 PetscCall(PetscMalloc1(nr * nc, &b)); 1024 for (i = 0; i < nr; i++) { 1025 for (j = 0; j < nc; j++) { 1026 if (bA->m[i][j]) { 1027 PetscCall(MatDuplicate(bA->m[i][j], op, &b[i * nc + j])); 1028 } else { 1029 b[i * nc + j] = NULL; 1030 } 1031 } 1032 } 1033 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, bA->isglobal.row, nc, bA->isglobal.col, b, B)); 1034 /* Give the new MatNest exclusive ownership */ 1035 for (i = 0; i < nr * nc; i++) PetscCall(MatDestroy(&b[i])); 1036 PetscCall(PetscFree(b)); 1037 1038 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 1039 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 1040 PetscFunctionReturn(PETSC_SUCCESS); 1041 } 1042 1043 /* nest api */ 1044 PetscErrorCode MatNestGetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat *mat) 1045 { 1046 Mat_Nest *bA = (Mat_Nest *)A->data; 1047 1048 PetscFunctionBegin; 1049 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1050 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1051 *mat = bA->m[idxm][jdxm]; 1052 PetscFunctionReturn(PETSC_SUCCESS); 1053 } 1054 1055 /*@ 1056 MatNestGetSubMat - Returns a single, sub-matrix from a `MATNEST` 1057 1058 Not collective 1059 1060 Input Parameters: 1061 + A - `MATNEST` matrix 1062 . idxm - index of the matrix within the nest matrix 1063 - jdxm - index of the matrix within the nest matrix 1064 1065 Output Parameter: 1066 . sub - matrix at index idxm,jdxm within the nest matrix 1067 1068 Level: developer 1069 1070 .seealso: `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MATNEST`, `MatNestSetSubMat()`, 1071 `MatNestGetLocalISs()`, `MatNestGetISs()` 1072 @*/ 1073 PetscErrorCode MatNestGetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat *sub) 1074 { 1075 PetscFunctionBegin; 1076 PetscUseMethod(A, "MatNestGetSubMat_C", (Mat, PetscInt, PetscInt, Mat *), (A, idxm, jdxm, sub)); 1077 PetscFunctionReturn(PETSC_SUCCESS); 1078 } 1079 1080 PetscErrorCode MatNestSetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat mat) 1081 { 1082 Mat_Nest *bA = (Mat_Nest *)A->data; 1083 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1084 1085 PetscFunctionBegin; 1086 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1087 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1088 PetscCall(MatGetLocalSize(mat, &m, &n)); 1089 PetscCall(MatGetSize(mat, &M, &N)); 1090 PetscCall(ISGetLocalSize(bA->isglobal.row[idxm], &mi)); 1091 PetscCall(ISGetSize(bA->isglobal.row[idxm], &Mi)); 1092 PetscCall(ISGetLocalSize(bA->isglobal.col[jdxm], &ni)); 1093 PetscCall(ISGetSize(bA->isglobal.col[jdxm], &Ni)); 1094 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, Mi, Ni); 1095 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix local dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, mi, ni); 1096 1097 /* do not increase object state */ 1098 if (mat == bA->m[idxm][jdxm]) PetscFunctionReturn(PETSC_SUCCESS); 1099 1100 PetscCall(PetscObjectReference((PetscObject)mat)); 1101 PetscCall(MatDestroy(&bA->m[idxm][jdxm])); 1102 bA->m[idxm][jdxm] = mat; 1103 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1104 PetscCall(MatGetNonzeroState(mat, &bA->nnzstate[idxm * bA->nc + jdxm])); 1105 A->nonzerostate++; 1106 PetscFunctionReturn(PETSC_SUCCESS); 1107 } 1108 1109 /*@ 1110 MatNestSetSubMat - Set a single submatrix in the `MATNEST` 1111 1112 Logically collective on the submatrix communicator 1113 1114 Input Parameters: 1115 + A - `MATNEST` matrix 1116 . idxm - index of the matrix within the nest matrix 1117 . jdxm - index of the matrix within the nest matrix 1118 - sub - matrix at index idxm,jdxm within the nest matrix 1119 1120 Notes: 1121 The new submatrix must have the same size and communicator as that block of the nest. 1122 1123 This increments the reference count of the submatrix. 1124 1125 Level: developer 1126 1127 .seealso: `MATNEST`, `MatNestSetSubMats()`, `MatNestGetSubMats()`, `MatNestGetLocalISs()`, `MATNEST`, `MatCreateNest()`, 1128 `MatNestGetSubMat()`, `MatNestGetISs()`, `MatNestGetSize()` 1129 @*/ 1130 PetscErrorCode MatNestSetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat sub) 1131 { 1132 PetscFunctionBegin; 1133 PetscUseMethod(A, "MatNestSetSubMat_C", (Mat, PetscInt, PetscInt, Mat), (A, idxm, jdxm, sub)); 1134 PetscFunctionReturn(PETSC_SUCCESS); 1135 } 1136 1137 PetscErrorCode MatNestGetSubMats_Nest(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1138 { 1139 Mat_Nest *bA = (Mat_Nest *)A->data; 1140 1141 PetscFunctionBegin; 1142 if (M) *M = bA->nr; 1143 if (N) *N = bA->nc; 1144 if (mat) *mat = bA->m; 1145 PetscFunctionReturn(PETSC_SUCCESS); 1146 } 1147 1148 /*@C 1149 MatNestGetSubMats - Returns the entire two dimensional array of matrices defining a `MATNEST` matrix. 1150 1151 Not collective 1152 1153 Input Parameter: 1154 . A - nest matrix 1155 1156 Output Parameters: 1157 + M - number of rows in the nest matrix 1158 . N - number of cols in the nest matrix 1159 - mat - 2d array of matrices 1160 1161 Note: 1162 The user should not free the array mat. 1163 1164 Fortran Note: 1165 This routine has a calling sequence 1166 $ call MatNestGetSubMats(A, M, N, mat, ierr) 1167 where the space allocated for the optional argument mat is assumed large enough (if provided). 1168 1169 Level: developer 1170 1171 .seealso: `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MATNEST`, `MatCreateNest()`, 1172 `MatNestSetSubMats()`, `MatNestGetISs()`, `MatNestSetSubMat()` 1173 @*/ 1174 PetscErrorCode MatNestGetSubMats(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1175 { 1176 PetscFunctionBegin; 1177 PetscUseMethod(A, "MatNestGetSubMats_C", (Mat, PetscInt *, PetscInt *, Mat ***), (A, M, N, mat)); 1178 PetscFunctionReturn(PETSC_SUCCESS); 1179 } 1180 1181 PetscErrorCode MatNestGetSize_Nest(Mat A, PetscInt *M, PetscInt *N) 1182 { 1183 Mat_Nest *bA = (Mat_Nest *)A->data; 1184 1185 PetscFunctionBegin; 1186 if (M) *M = bA->nr; 1187 if (N) *N = bA->nc; 1188 PetscFunctionReturn(PETSC_SUCCESS); 1189 } 1190 1191 /*@ 1192 MatNestGetSize - Returns the size of the `MATNEST` matrix. 1193 1194 Not collective 1195 1196 Input Parameter: 1197 . A - `MATNEST` matrix 1198 1199 Output Parameters: 1200 + M - number of rows in the nested mat 1201 - N - number of cols in the nested mat 1202 1203 Level: developer 1204 1205 .seealso: `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MATNEST`, `MatCreateNest()`, `MatNestGetLocalISs()`, 1206 `MatNestGetISs()` 1207 @*/ 1208 PetscErrorCode MatNestGetSize(Mat A, PetscInt *M, PetscInt *N) 1209 { 1210 PetscFunctionBegin; 1211 PetscUseMethod(A, "MatNestGetSize_C", (Mat, PetscInt *, PetscInt *), (A, M, N)); 1212 PetscFunctionReturn(PETSC_SUCCESS); 1213 } 1214 1215 static PetscErrorCode MatNestGetISs_Nest(Mat A, IS rows[], IS cols[]) 1216 { 1217 Mat_Nest *vs = (Mat_Nest *)A->data; 1218 PetscInt i; 1219 1220 PetscFunctionBegin; 1221 if (rows) 1222 for (i = 0; i < vs->nr; i++) rows[i] = vs->isglobal.row[i]; 1223 if (cols) 1224 for (i = 0; i < vs->nc; i++) cols[i] = vs->isglobal.col[i]; 1225 PetscFunctionReturn(PETSC_SUCCESS); 1226 } 1227 1228 /*@C 1229 MatNestGetISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1230 1231 Not collective 1232 1233 Input Parameter: 1234 . A - `MATNEST` matrix 1235 1236 Output Parameters: 1237 + rows - array of row index sets 1238 - cols - array of column index sets 1239 1240 Level: advanced 1241 1242 Note: 1243 The user must have allocated arrays of the correct size. The reference count is not increased on the returned ISs. 1244 1245 .seealso: `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetLocalISs()`, `MATNEST`, 1246 `MatCreateNest()`, `MatNestGetSubMats()`, `MatNestSetSubMats()` 1247 @*/ 1248 PetscErrorCode MatNestGetISs(Mat A, IS rows[], IS cols[]) 1249 { 1250 PetscFunctionBegin; 1251 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1252 PetscUseMethod(A, "MatNestGetISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1253 PetscFunctionReturn(PETSC_SUCCESS); 1254 } 1255 1256 static PetscErrorCode MatNestGetLocalISs_Nest(Mat A, IS rows[], IS cols[]) 1257 { 1258 Mat_Nest *vs = (Mat_Nest *)A->data; 1259 PetscInt i; 1260 1261 PetscFunctionBegin; 1262 if (rows) 1263 for (i = 0; i < vs->nr; i++) rows[i] = vs->islocal.row[i]; 1264 if (cols) 1265 for (i = 0; i < vs->nc; i++) cols[i] = vs->islocal.col[i]; 1266 PetscFunctionReturn(PETSC_SUCCESS); 1267 } 1268 1269 /*@C 1270 MatNestGetLocalISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1271 1272 Not collective 1273 1274 Input Parameter: 1275 . A - `MATNEST` matrix 1276 1277 Output Parameters: 1278 + rows - array of row index sets (or NULL to ignore) 1279 - cols - array of column index sets (or NULL to ignore) 1280 1281 Level: advanced 1282 1283 Note: 1284 The user must have allocated arrays of the correct size. The reference count is not increased on the returned ISs. 1285 1286 .seealso: `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetISs()`, `MatCreateNest()`, 1287 `MATNEST`, `MatNestSetSubMats()`, `MatNestSetSubMat()` 1288 @*/ 1289 PetscErrorCode MatNestGetLocalISs(Mat A, IS rows[], IS cols[]) 1290 { 1291 PetscFunctionBegin; 1292 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1293 PetscUseMethod(A, "MatNestGetLocalISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1294 PetscFunctionReturn(PETSC_SUCCESS); 1295 } 1296 1297 PetscErrorCode MatNestSetVecType_Nest(Mat A, VecType vtype) 1298 { 1299 PetscBool flg; 1300 1301 PetscFunctionBegin; 1302 PetscCall(PetscStrcmp(vtype, VECNEST, &flg)); 1303 /* In reality, this only distinguishes VECNEST and "other" */ 1304 if (flg) A->ops->getvecs = MatCreateVecs_Nest; 1305 else A->ops->getvecs = (PetscErrorCode(*)(Mat, Vec *, Vec *))0; 1306 PetscFunctionReturn(PETSC_SUCCESS); 1307 } 1308 1309 /*@C 1310 MatNestSetVecType - Sets the type of `Vec` returned by `MatCreateVecs()` 1311 1312 Not collective 1313 1314 Input Parameters: 1315 + A - `MATNEST` matrix 1316 - vtype - `VecType` to use for creating vectors 1317 1318 Level: developer 1319 1320 .seealso: `MATNEST`, `MatCreateVecs()`, `MATNEST`, `MatCreateNest()`, `VecType` 1321 @*/ 1322 PetscErrorCode MatNestSetVecType(Mat A, VecType vtype) 1323 { 1324 PetscFunctionBegin; 1325 PetscTryMethod(A, "MatNestSetVecType_C", (Mat, VecType), (A, vtype)); 1326 PetscFunctionReturn(PETSC_SUCCESS); 1327 } 1328 1329 PetscErrorCode MatNestSetSubMats_Nest(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1330 { 1331 Mat_Nest *s = (Mat_Nest *)A->data; 1332 PetscInt i, j, m, n, M, N; 1333 PetscBool cong, isstd, sametype = PETSC_FALSE; 1334 VecType vtype, type; 1335 1336 PetscFunctionBegin; 1337 PetscCall(MatReset_Nest(A)); 1338 1339 s->nr = nr; 1340 s->nc = nc; 1341 1342 /* Create space for submatrices */ 1343 PetscCall(PetscMalloc1(nr, &s->m)); 1344 for (i = 0; i < nr; i++) PetscCall(PetscMalloc1(nc, &s->m[i])); 1345 for (i = 0; i < nr; i++) { 1346 for (j = 0; j < nc; j++) { 1347 s->m[i][j] = a[i * nc + j]; 1348 if (a[i * nc + j]) PetscCall(PetscObjectReference((PetscObject)a[i * nc + j])); 1349 } 1350 } 1351 PetscCall(MatGetVecType(A, &vtype)); 1352 PetscCall(PetscStrcmp(vtype, VECSTANDARD, &isstd)); 1353 if (isstd) { 1354 /* check if all blocks have the same vectype */ 1355 vtype = NULL; 1356 for (i = 0; i < nr; i++) { 1357 for (j = 0; j < nc; j++) { 1358 if (a[i * nc + j]) { 1359 if (!vtype) { /* first visited block */ 1360 PetscCall(MatGetVecType(a[i * nc + j], &vtype)); 1361 sametype = PETSC_TRUE; 1362 } else if (sametype) { 1363 PetscCall(MatGetVecType(a[i * nc + j], &type)); 1364 PetscCall(PetscStrcmp(vtype, type, &sametype)); 1365 } 1366 } 1367 } 1368 } 1369 if (sametype) { /* propagate vectype */ 1370 PetscCall(MatSetVecType(A, vtype)); 1371 } 1372 } 1373 1374 PetscCall(MatSetUp_NestIS_Private(A, nr, is_row, nc, is_col)); 1375 1376 PetscCall(PetscMalloc1(nr, &s->row_len)); 1377 PetscCall(PetscMalloc1(nc, &s->col_len)); 1378 for (i = 0; i < nr; i++) s->row_len[i] = -1; 1379 for (j = 0; j < nc; j++) s->col_len[j] = -1; 1380 1381 PetscCall(PetscCalloc1(nr * nc, &s->nnzstate)); 1382 for (i = 0; i < nr; i++) { 1383 for (j = 0; j < nc; j++) { 1384 if (s->m[i][j]) PetscCall(MatGetNonzeroState(s->m[i][j], &s->nnzstate[i * nc + j])); 1385 } 1386 } 1387 1388 PetscCall(MatNestGetSizes_Private(A, &m, &n, &M, &N)); 1389 1390 PetscCall(PetscLayoutSetSize(A->rmap, M)); 1391 PetscCall(PetscLayoutSetLocalSize(A->rmap, m)); 1392 PetscCall(PetscLayoutSetSize(A->cmap, N)); 1393 PetscCall(PetscLayoutSetLocalSize(A->cmap, n)); 1394 1395 PetscCall(PetscLayoutSetUp(A->rmap)); 1396 PetscCall(PetscLayoutSetUp(A->cmap)); 1397 1398 /* disable operations that are not supported for non-square matrices, 1399 or matrices for which is_row != is_col */ 1400 PetscCall(MatHasCongruentLayouts(A, &cong)); 1401 if (cong && nr != nc) cong = PETSC_FALSE; 1402 if (cong) { 1403 for (i = 0; cong && i < nr; i++) PetscCall(ISEqualUnsorted(s->isglobal.row[i], s->isglobal.col[i], &cong)); 1404 } 1405 if (!cong) { 1406 A->ops->missingdiagonal = NULL; 1407 A->ops->getdiagonal = NULL; 1408 A->ops->shift = NULL; 1409 A->ops->diagonalset = NULL; 1410 } 1411 1412 PetscCall(PetscCalloc2(nr, &s->left, nc, &s->right)); 1413 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1414 A->nonzerostate++; 1415 PetscFunctionReturn(PETSC_SUCCESS); 1416 } 1417 1418 /*@ 1419 MatNestSetSubMats - Sets the nested submatrices in a `MATNEST` 1420 1421 Collective 1422 1423 Input Parameters: 1424 + A - `MATNEST` matrix 1425 . nr - number of nested row blocks 1426 . is_row - index sets for each nested row block, or NULL to make contiguous 1427 . nc - number of nested column blocks 1428 . is_col - index sets for each nested column block, or NULL to make contiguous 1429 - a - row-aligned array of nr*nc submatrices, empty submatrices can be passed using NULL 1430 1431 Note: 1432 This always resets any submatrix information previously set 1433 1434 Level: advanced 1435 1436 .seealso: `MATNEST`, `MatCreateNest()`, `MATNEST`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, `MatNestGetSubMats()` 1437 @*/ 1438 PetscErrorCode MatNestSetSubMats(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1439 { 1440 PetscInt i; 1441 1442 PetscFunctionBegin; 1443 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1444 PetscCheck(nr >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of rows cannot be negative"); 1445 if (nr && is_row) { 1446 PetscValidPointer(is_row, 3); 1447 for (i = 0; i < nr; i++) PetscValidHeaderSpecific(is_row[i], IS_CLASSID, 3); 1448 } 1449 PetscCheck(nc >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of columns cannot be negative"); 1450 if (nc && is_col) { 1451 PetscValidPointer(is_col, 5); 1452 for (i = 0; i < nc; i++) PetscValidHeaderSpecific(is_col[i], IS_CLASSID, 5); 1453 } 1454 if (nr * nc > 0) PetscValidPointer(a, 6); 1455 PetscUseMethod(A, "MatNestSetSubMats_C", (Mat, PetscInt, const IS[], PetscInt, const IS[], const Mat[]), (A, nr, is_row, nc, is_col, a)); 1456 PetscFunctionReturn(PETSC_SUCCESS); 1457 } 1458 1459 static PetscErrorCode MatNestCreateAggregateL2G_Private(Mat A, PetscInt n, const IS islocal[], const IS isglobal[], PetscBool colflg, ISLocalToGlobalMapping *ltog) 1460 { 1461 PetscBool flg; 1462 PetscInt i, j, m, mi, *ix; 1463 1464 PetscFunctionBegin; 1465 *ltog = NULL; 1466 for (i = 0, m = 0, flg = PETSC_FALSE; i < n; i++) { 1467 if (islocal[i]) { 1468 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1469 flg = PETSC_TRUE; /* We found a non-trivial entry */ 1470 } else { 1471 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1472 } 1473 m += mi; 1474 } 1475 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 1476 1477 PetscCall(PetscMalloc1(m, &ix)); 1478 for (i = 0, m = 0; i < n; i++) { 1479 ISLocalToGlobalMapping smap = NULL; 1480 Mat sub = NULL; 1481 PetscSF sf; 1482 PetscLayout map; 1483 const PetscInt *ix2; 1484 1485 if (!colflg) { 1486 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1487 } else { 1488 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1489 } 1490 if (sub) { 1491 if (!colflg) { 1492 PetscCall(MatGetLocalToGlobalMapping(sub, &smap, NULL)); 1493 } else { 1494 PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &smap)); 1495 } 1496 } 1497 /* 1498 Now we need to extract the monolithic global indices that correspond to the given split global indices. 1499 In many/most cases, we only want MatGetLocalSubMatrix() to work, in which case we only need to know the size of the local spaces. 1500 */ 1501 PetscCall(ISGetIndices(isglobal[i], &ix2)); 1502 if (islocal[i]) { 1503 PetscInt *ilocal, *iremote; 1504 PetscInt mil, nleaves; 1505 1506 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1507 PetscCheck(smap, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing local to global map"); 1508 for (j = 0; j < mi; j++) ix[m + j] = j; 1509 PetscCall(ISLocalToGlobalMappingApply(smap, mi, ix + m, ix + m)); 1510 1511 /* PetscSFSetGraphLayout does not like negative indices */ 1512 PetscCall(PetscMalloc2(mi, &ilocal, mi, &iremote)); 1513 for (j = 0, nleaves = 0; j < mi; j++) { 1514 if (ix[m + j] < 0) continue; 1515 ilocal[nleaves] = j; 1516 iremote[nleaves] = ix[m + j]; 1517 nleaves++; 1518 } 1519 PetscCall(ISGetLocalSize(isglobal[i], &mil)); 1520 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &sf)); 1521 PetscCall(PetscLayoutCreate(PetscObjectComm((PetscObject)A), &map)); 1522 PetscCall(PetscLayoutSetLocalSize(map, mil)); 1523 PetscCall(PetscLayoutSetUp(map)); 1524 PetscCall(PetscSFSetGraphLayout(sf, map, nleaves, ilocal, PETSC_USE_POINTER, iremote)); 1525 PetscCall(PetscLayoutDestroy(&map)); 1526 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1527 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1528 PetscCall(PetscSFDestroy(&sf)); 1529 PetscCall(PetscFree2(ilocal, iremote)); 1530 } else { 1531 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1532 for (j = 0; j < mi; j++) ix[m + j] = ix2[i]; 1533 } 1534 PetscCall(ISRestoreIndices(isglobal[i], &ix2)); 1535 m += mi; 1536 } 1537 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, m, ix, PETSC_OWN_POINTER, ltog)); 1538 PetscFunctionReturn(PETSC_SUCCESS); 1539 } 1540 1541 /* If an IS was provided, there is nothing Nest needs to do, otherwise Nest will build a strided IS */ 1542 /* 1543 nprocessors = NP 1544 Nest x^T = ((g_0,g_1,...g_nprocs-1), (h_0,h_1,...h_NP-1)) 1545 proc 0: => (g_0,h_0,) 1546 proc 1: => (g_1,h_1,) 1547 ... 1548 proc nprocs-1: => (g_NP-1,h_NP-1,) 1549 1550 proc 0: proc 1: proc nprocs-1: 1551 is[0] = (0,1,2,...,nlocal(g_0)-1) (0,1,...,nlocal(g_1)-1) (0,1,...,nlocal(g_NP-1)) 1552 1553 proc 0: 1554 is[1] = (nlocal(g_0),nlocal(g_0)+1,...,nlocal(g_0)+nlocal(h_0)-1) 1555 proc 1: 1556 is[1] = (nlocal(g_1),nlocal(g_1)+1,...,nlocal(g_1)+nlocal(h_1)-1) 1557 1558 proc NP-1: 1559 is[1] = (nlocal(g_NP-1),nlocal(g_NP-1)+1,...,nlocal(g_NP-1)+nlocal(h_NP-1)-1) 1560 */ 1561 static PetscErrorCode MatSetUp_NestIS_Private(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[]) 1562 { 1563 Mat_Nest *vs = (Mat_Nest *)A->data; 1564 PetscInt i, j, offset, n, nsum, bs; 1565 Mat sub = NULL; 1566 1567 PetscFunctionBegin; 1568 PetscCall(PetscMalloc1(nr, &vs->isglobal.row)); 1569 PetscCall(PetscMalloc1(nc, &vs->isglobal.col)); 1570 if (is_row) { /* valid IS is passed in */ 1571 /* refs on is[] are incremented */ 1572 for (i = 0; i < vs->nr; i++) { 1573 PetscCall(PetscObjectReference((PetscObject)is_row[i])); 1574 1575 vs->isglobal.row[i] = is_row[i]; 1576 } 1577 } else { /* Create the ISs by inspecting sizes of a submatrix in each row */ 1578 nsum = 0; 1579 for (i = 0; i < vs->nr; i++) { /* Add up the local sizes to compute the aggregate offset */ 1580 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1581 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in row %" PetscInt_FMT, i); 1582 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1583 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1584 nsum += n; 1585 } 1586 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1587 offset -= nsum; 1588 for (i = 0; i < vs->nr; i++) { 1589 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1590 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1591 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1592 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.row[i])); 1593 PetscCall(ISSetBlockSize(vs->isglobal.row[i], bs)); 1594 offset += n; 1595 } 1596 } 1597 1598 if (is_col) { /* valid IS is passed in */ 1599 /* refs on is[] are incremented */ 1600 for (j = 0; j < vs->nc; j++) { 1601 PetscCall(PetscObjectReference((PetscObject)is_col[j])); 1602 1603 vs->isglobal.col[j] = is_col[j]; 1604 } 1605 } else { /* Create the ISs by inspecting sizes of a submatrix in each column */ 1606 offset = A->cmap->rstart; 1607 nsum = 0; 1608 for (j = 0; j < vs->nc; j++) { 1609 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1610 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in column %" PetscInt_FMT, i); 1611 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1612 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1613 nsum += n; 1614 } 1615 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1616 offset -= nsum; 1617 for (j = 0; j < vs->nc; j++) { 1618 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1619 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1620 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1621 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.col[j])); 1622 PetscCall(ISSetBlockSize(vs->isglobal.col[j], bs)); 1623 offset += n; 1624 } 1625 } 1626 1627 /* Set up the local ISs */ 1628 PetscCall(PetscMalloc1(vs->nr, &vs->islocal.row)); 1629 PetscCall(PetscMalloc1(vs->nc, &vs->islocal.col)); 1630 for (i = 0, offset = 0; i < vs->nr; i++) { 1631 IS isloc; 1632 ISLocalToGlobalMapping rmap = NULL; 1633 PetscInt nlocal, bs; 1634 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1635 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, &rmap, NULL)); 1636 if (rmap) { 1637 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1638 PetscCall(ISLocalToGlobalMappingGetSize(rmap, &nlocal)); 1639 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1640 PetscCall(ISSetBlockSize(isloc, bs)); 1641 } else { 1642 nlocal = 0; 1643 isloc = NULL; 1644 } 1645 vs->islocal.row[i] = isloc; 1646 offset += nlocal; 1647 } 1648 for (i = 0, offset = 0; i < vs->nc; i++) { 1649 IS isloc; 1650 ISLocalToGlobalMapping cmap = NULL; 1651 PetscInt nlocal, bs; 1652 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1653 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &cmap)); 1654 if (cmap) { 1655 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1656 PetscCall(ISLocalToGlobalMappingGetSize(cmap, &nlocal)); 1657 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1658 PetscCall(ISSetBlockSize(isloc, bs)); 1659 } else { 1660 nlocal = 0; 1661 isloc = NULL; 1662 } 1663 vs->islocal.col[i] = isloc; 1664 offset += nlocal; 1665 } 1666 1667 /* Set up the aggregate ISLocalToGlobalMapping */ 1668 { 1669 ISLocalToGlobalMapping rmap, cmap; 1670 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nr, vs->islocal.row, vs->isglobal.row, PETSC_FALSE, &rmap)); 1671 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nc, vs->islocal.col, vs->isglobal.col, PETSC_TRUE, &cmap)); 1672 if (rmap && cmap) PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap)); 1673 PetscCall(ISLocalToGlobalMappingDestroy(&rmap)); 1674 PetscCall(ISLocalToGlobalMappingDestroy(&cmap)); 1675 } 1676 1677 if (PetscDefined(USE_DEBUG)) { 1678 for (i = 0; i < vs->nr; i++) { 1679 for (j = 0; j < vs->nc; j++) { 1680 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1681 Mat B = vs->m[i][j]; 1682 if (!B) continue; 1683 PetscCall(MatGetSize(B, &M, &N)); 1684 PetscCall(MatGetLocalSize(B, &m, &n)); 1685 PetscCall(ISGetSize(vs->isglobal.row[i], &Mi)); 1686 PetscCall(ISGetSize(vs->isglobal.col[j], &Ni)); 1687 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &mi)); 1688 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &ni)); 1689 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Global sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, i, j, Mi, Ni); 1690 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Local sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, i, j, mi, ni); 1691 } 1692 } 1693 } 1694 1695 /* Set A->assembled if all non-null blocks are currently assembled */ 1696 for (i = 0; i < vs->nr; i++) { 1697 for (j = 0; j < vs->nc; j++) { 1698 if (vs->m[i][j] && !vs->m[i][j]->assembled) PetscFunctionReturn(PETSC_SUCCESS); 1699 } 1700 } 1701 A->assembled = PETSC_TRUE; 1702 PetscFunctionReturn(PETSC_SUCCESS); 1703 } 1704 1705 /*@C 1706 MatCreateNest - Creates a new `MATNEST` matrix containing several nested submatrices, each stored separately 1707 1708 Collective 1709 1710 Input Parameters: 1711 + comm - Communicator for the new `MATNEST` 1712 . nr - number of nested row blocks 1713 . is_row - index sets for each nested row block, or NULL to make contiguous 1714 . nc - number of nested column blocks 1715 . is_col - index sets for each nested column block, or NULL to make contiguous 1716 - a - row-aligned array of nr*nc submatrices, empty submatrices can be passed using NULL 1717 1718 Output Parameter: 1719 . B - new matrix 1720 1721 Level: advanced 1722 1723 .seealso: `MATNEST`, `MatCreate()`, `VecCreateNest()`, `DMCreateMatrix()`, `MATNEST`, `MatNestSetSubMat()`, 1724 `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatNestGetSize()`, 1725 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 1726 @*/ 1727 PetscErrorCode MatCreateNest(MPI_Comm comm, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[], Mat *B) 1728 { 1729 Mat A; 1730 1731 PetscFunctionBegin; 1732 *B = NULL; 1733 PetscCall(MatCreate(comm, &A)); 1734 PetscCall(MatSetType(A, MATNEST)); 1735 A->preallocated = PETSC_TRUE; 1736 PetscCall(MatNestSetSubMats(A, nr, is_row, nc, is_col, a)); 1737 *B = A; 1738 PetscFunctionReturn(PETSC_SUCCESS); 1739 } 1740 1741 PetscErrorCode MatConvert_Nest_SeqAIJ_fast(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 1742 { 1743 Mat_Nest *nest = (Mat_Nest *)A->data; 1744 Mat *trans; 1745 PetscScalar **avv; 1746 PetscScalar *vv; 1747 PetscInt **aii, **ajj; 1748 PetscInt *ii, *jj, *ci; 1749 PetscInt nr, nc, nnz, i, j; 1750 PetscBool done; 1751 1752 PetscFunctionBegin; 1753 PetscCall(MatGetSize(A, &nr, &nc)); 1754 if (reuse == MAT_REUSE_MATRIX) { 1755 PetscInt rnr; 1756 1757 PetscCall(MatGetRowIJ(*newmat, 0, PETSC_FALSE, PETSC_FALSE, &rnr, (const PetscInt **)&ii, (const PetscInt **)&jj, &done)); 1758 PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "MatGetRowIJ"); 1759 PetscCheck(rnr == nr, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of rows"); 1760 PetscCall(MatSeqAIJGetArray(*newmat, &vv)); 1761 } 1762 /* extract CSR for nested SeqAIJ matrices */ 1763 nnz = 0; 1764 PetscCall(PetscCalloc4(nest->nr * nest->nc, &aii, nest->nr * nest->nc, &ajj, nest->nr * nest->nc, &avv, nest->nr * nest->nc, &trans)); 1765 for (i = 0; i < nest->nr; ++i) { 1766 for (j = 0; j < nest->nc; ++j) { 1767 Mat B = nest->m[i][j]; 1768 if (B) { 1769 PetscScalar *naa; 1770 PetscInt *nii, *njj, nnr; 1771 PetscBool istrans; 1772 1773 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 1774 if (istrans) { 1775 Mat Bt; 1776 1777 PetscCall(MatTransposeGetMat(B, &Bt)); 1778 PetscCall(MatTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1779 B = trans[i * nest->nc + j]; 1780 } else { 1781 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 1782 if (istrans) { 1783 Mat Bt; 1784 1785 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 1786 PetscCall(MatHermitianTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1787 B = trans[i * nest->nc + j]; 1788 } 1789 } 1790 PetscCall(MatGetRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&nii, (const PetscInt **)&njj, &done)); 1791 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatGetRowIJ"); 1792 PetscCall(MatSeqAIJGetArray(B, &naa)); 1793 nnz += nii[nnr]; 1794 1795 aii[i * nest->nc + j] = nii; 1796 ajj[i * nest->nc + j] = njj; 1797 avv[i * nest->nc + j] = naa; 1798 } 1799 } 1800 } 1801 if (reuse != MAT_REUSE_MATRIX) { 1802 PetscCall(PetscMalloc1(nr + 1, &ii)); 1803 PetscCall(PetscMalloc1(nnz, &jj)); 1804 PetscCall(PetscMalloc1(nnz, &vv)); 1805 } else { 1806 PetscCheck(nnz == ii[nr], PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of nonzeros"); 1807 } 1808 1809 /* new row pointer */ 1810 PetscCall(PetscArrayzero(ii, nr + 1)); 1811 for (i = 0; i < nest->nr; ++i) { 1812 PetscInt ncr, rst; 1813 1814 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1815 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1816 for (j = 0; j < nest->nc; ++j) { 1817 if (aii[i * nest->nc + j]) { 1818 PetscInt *nii = aii[i * nest->nc + j]; 1819 PetscInt ir; 1820 1821 for (ir = rst; ir < ncr + rst; ++ir) { 1822 ii[ir + 1] += nii[1] - nii[0]; 1823 nii++; 1824 } 1825 } 1826 } 1827 } 1828 for (i = 0; i < nr; i++) ii[i + 1] += ii[i]; 1829 1830 /* construct CSR for the new matrix */ 1831 PetscCall(PetscCalloc1(nr, &ci)); 1832 for (i = 0; i < nest->nr; ++i) { 1833 PetscInt ncr, rst; 1834 1835 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1836 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1837 for (j = 0; j < nest->nc; ++j) { 1838 if (aii[i * nest->nc + j]) { 1839 PetscScalar *nvv = avv[i * nest->nc + j]; 1840 PetscInt *nii = aii[i * nest->nc + j]; 1841 PetscInt *njj = ajj[i * nest->nc + j]; 1842 PetscInt ir, cst; 1843 1844 PetscCall(ISStrideGetInfo(nest->isglobal.col[j], &cst, NULL)); 1845 for (ir = rst; ir < ncr + rst; ++ir) { 1846 PetscInt ij, rsize = nii[1] - nii[0], ist = ii[ir] + ci[ir]; 1847 1848 for (ij = 0; ij < rsize; ij++) { 1849 jj[ist + ij] = *njj + cst; 1850 vv[ist + ij] = *nvv; 1851 njj++; 1852 nvv++; 1853 } 1854 ci[ir] += rsize; 1855 nii++; 1856 } 1857 } 1858 } 1859 } 1860 PetscCall(PetscFree(ci)); 1861 1862 /* restore info */ 1863 for (i = 0; i < nest->nr; ++i) { 1864 for (j = 0; j < nest->nc; ++j) { 1865 Mat B = nest->m[i][j]; 1866 if (B) { 1867 PetscInt nnr = 0, k = i * nest->nc + j; 1868 1869 B = (trans[k] ? trans[k] : B); 1870 PetscCall(MatRestoreRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&aii[k], (const PetscInt **)&ajj[k], &done)); 1871 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatRestoreRowIJ"); 1872 PetscCall(MatSeqAIJRestoreArray(B, &avv[k])); 1873 PetscCall(MatDestroy(&trans[k])); 1874 } 1875 } 1876 } 1877 PetscCall(PetscFree4(aii, ajj, avv, trans)); 1878 1879 /* finalize newmat */ 1880 if (reuse == MAT_INITIAL_MATRIX) { 1881 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, newmat)); 1882 } else if (reuse == MAT_INPLACE_MATRIX) { 1883 Mat B; 1884 1885 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, &B)); 1886 PetscCall(MatHeaderReplace(A, &B)); 1887 } 1888 PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY)); 1889 PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY)); 1890 { 1891 Mat_SeqAIJ *a = (Mat_SeqAIJ *)((*newmat)->data); 1892 a->free_a = PETSC_TRUE; 1893 a->free_ij = PETSC_TRUE; 1894 } 1895 PetscFunctionReturn(PETSC_SUCCESS); 1896 } 1897 1898 PETSC_INTERN PetscErrorCode MatAXPY_Dense_Nest(Mat Y, PetscScalar a, Mat X) 1899 { 1900 Mat_Nest *nest = (Mat_Nest *)X->data; 1901 PetscInt i, j, k, rstart; 1902 PetscBool flg; 1903 1904 PetscFunctionBegin; 1905 /* Fill by row */ 1906 for (j = 0; j < nest->nc; ++j) { 1907 /* Using global column indices and ISAllGather() is not scalable. */ 1908 IS bNis; 1909 PetscInt bN; 1910 const PetscInt *bNindices; 1911 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 1912 PetscCall(ISGetSize(bNis, &bN)); 1913 PetscCall(ISGetIndices(bNis, &bNindices)); 1914 for (i = 0; i < nest->nr; ++i) { 1915 Mat B = nest->m[i][j], D = NULL; 1916 PetscInt bm, br; 1917 const PetscInt *bmindices; 1918 if (!B) continue; 1919 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 1920 if (flg) { 1921 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1922 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1923 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 1924 B = D; 1925 } 1926 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 1927 if (flg) { 1928 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 1929 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 1930 B = D; 1931 } 1932 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 1933 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 1934 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 1935 for (br = 0; br < bm; ++br) { 1936 PetscInt row = bmindices[br], brncols, *cols; 1937 const PetscInt *brcols; 1938 const PetscScalar *brcoldata; 1939 PetscScalar *vals = NULL; 1940 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1941 PetscCall(PetscMalloc1(brncols, &cols)); 1942 for (k = 0; k < brncols; k++) cols[k] = bNindices[brcols[k]]; 1943 /* 1944 Nest blocks are required to be nonoverlapping -- otherwise nest and monolithic index layouts wouldn't match. 1945 Thus, we could use INSERT_VALUES, but I prefer ADD_VALUES. 1946 */ 1947 if (a != 1.0) { 1948 PetscCall(PetscMalloc1(brncols, &vals)); 1949 for (k = 0; k < brncols; k++) vals[k] = a * brcoldata[k]; 1950 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, vals, ADD_VALUES)); 1951 PetscCall(PetscFree(vals)); 1952 } else { 1953 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, brcoldata, ADD_VALUES)); 1954 } 1955 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1956 PetscCall(PetscFree(cols)); 1957 } 1958 if (D) PetscCall(MatDestroy(&D)); 1959 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 1960 } 1961 PetscCall(ISRestoreIndices(bNis, &bNindices)); 1962 PetscCall(ISDestroy(&bNis)); 1963 } 1964 PetscCall(MatAssemblyBegin(Y, MAT_FINAL_ASSEMBLY)); 1965 PetscCall(MatAssemblyEnd(Y, MAT_FINAL_ASSEMBLY)); 1966 PetscFunctionReturn(PETSC_SUCCESS); 1967 } 1968 1969 PetscErrorCode MatConvert_Nest_AIJ(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 1970 { 1971 Mat_Nest *nest = (Mat_Nest *)A->data; 1972 PetscInt m, n, M, N, i, j, k, *dnnz, *onnz, rstart, cstart, cend; 1973 PetscMPIInt size; 1974 Mat C; 1975 1976 PetscFunctionBegin; 1977 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 1978 if (size == 1) { /* look for a special case with SeqAIJ matrices and strided-1, contiguous, blocks */ 1979 PetscInt nf; 1980 PetscBool fast; 1981 1982 PetscCall(PetscStrcmp(newtype, MATAIJ, &fast)); 1983 if (!fast) PetscCall(PetscStrcmp(newtype, MATSEQAIJ, &fast)); 1984 for (i = 0; i < nest->nr && fast; ++i) { 1985 for (j = 0; j < nest->nc && fast; ++j) { 1986 Mat B = nest->m[i][j]; 1987 if (B) { 1988 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATSEQAIJ, &fast)); 1989 if (!fast) { 1990 PetscBool istrans; 1991 1992 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 1993 if (istrans) { 1994 Mat Bt; 1995 1996 PetscCall(MatTransposeGetMat(B, &Bt)); 1997 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 1998 } else { 1999 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 2000 if (istrans) { 2001 Mat Bt; 2002 2003 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 2004 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2005 } 2006 } 2007 } 2008 } 2009 } 2010 } 2011 for (i = 0, nf = 0; i < nest->nr && fast; ++i) { 2012 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.row[i], ISSTRIDE, &fast)); 2013 if (fast) { 2014 PetscInt f, s; 2015 2016 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &f, &s)); 2017 if (f != nf || s != 1) { 2018 fast = PETSC_FALSE; 2019 } else { 2020 PetscCall(ISGetSize(nest->isglobal.row[i], &f)); 2021 nf += f; 2022 } 2023 } 2024 } 2025 for (i = 0, nf = 0; i < nest->nc && fast; ++i) { 2026 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.col[i], ISSTRIDE, &fast)); 2027 if (fast) { 2028 PetscInt f, s; 2029 2030 PetscCall(ISStrideGetInfo(nest->isglobal.col[i], &f, &s)); 2031 if (f != nf || s != 1) { 2032 fast = PETSC_FALSE; 2033 } else { 2034 PetscCall(ISGetSize(nest->isglobal.col[i], &f)); 2035 nf += f; 2036 } 2037 } 2038 } 2039 if (fast) { 2040 PetscCall(MatConvert_Nest_SeqAIJ_fast(A, newtype, reuse, newmat)); 2041 PetscFunctionReturn(PETSC_SUCCESS); 2042 } 2043 } 2044 PetscCall(MatGetSize(A, &M, &N)); 2045 PetscCall(MatGetLocalSize(A, &m, &n)); 2046 PetscCall(MatGetOwnershipRangeColumn(A, &cstart, &cend)); 2047 if (reuse == MAT_REUSE_MATRIX) C = *newmat; 2048 else { 2049 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2050 PetscCall(MatSetType(C, newtype)); 2051 PetscCall(MatSetSizes(C, m, n, M, N)); 2052 } 2053 PetscCall(PetscMalloc1(2 * m, &dnnz)); 2054 onnz = dnnz + m; 2055 for (k = 0; k < m; k++) { 2056 dnnz[k] = 0; 2057 onnz[k] = 0; 2058 } 2059 for (j = 0; j < nest->nc; ++j) { 2060 IS bNis; 2061 PetscInt bN; 2062 const PetscInt *bNindices; 2063 PetscBool flg; 2064 /* Using global column indices and ISAllGather() is not scalable. */ 2065 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 2066 PetscCall(ISGetSize(bNis, &bN)); 2067 PetscCall(ISGetIndices(bNis, &bNindices)); 2068 for (i = 0; i < nest->nr; ++i) { 2069 PetscSF bmsf; 2070 PetscSFNode *iremote; 2071 Mat B = nest->m[i][j], D = NULL; 2072 PetscInt bm, *sub_dnnz, *sub_onnz, br; 2073 const PetscInt *bmindices; 2074 if (!B) continue; 2075 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 2076 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 2077 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &bmsf)); 2078 PetscCall(PetscMalloc1(bm, &iremote)); 2079 PetscCall(PetscMalloc1(bm, &sub_dnnz)); 2080 PetscCall(PetscMalloc1(bm, &sub_onnz)); 2081 for (k = 0; k < bm; ++k) { 2082 sub_dnnz[k] = 0; 2083 sub_onnz[k] = 0; 2084 } 2085 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &flg)); 2086 if (flg) { 2087 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2088 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2089 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 2090 B = D; 2091 } 2092 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 2093 if (flg) { 2094 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 2095 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 2096 B = D; 2097 } 2098 /* 2099 Locate the owners for all of the locally-owned global row indices for this row block. 2100 These determine the roots of PetscSF used to communicate preallocation data to row owners. 2101 The roots correspond to the dnnz and onnz entries; thus, there are two roots per row. 2102 */ 2103 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 2104 for (br = 0; br < bm; ++br) { 2105 PetscInt row = bmindices[br], brncols, col; 2106 const PetscInt *brcols; 2107 PetscInt rowrel = 0; /* row's relative index on its owner rank */ 2108 PetscMPIInt rowowner = 0; 2109 PetscCall(PetscLayoutFindOwnerIndex(A->rmap, row, &rowowner, &rowrel)); 2110 /* how many roots */ 2111 iremote[br].rank = rowowner; 2112 iremote[br].index = rowrel; /* edge from bmdnnz to dnnz */ 2113 /* get nonzero pattern */ 2114 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, NULL)); 2115 for (k = 0; k < brncols; k++) { 2116 col = bNindices[brcols[k]]; 2117 if (col >= A->cmap->range[rowowner] && col < A->cmap->range[rowowner + 1]) { 2118 sub_dnnz[br]++; 2119 } else { 2120 sub_onnz[br]++; 2121 } 2122 } 2123 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, NULL)); 2124 } 2125 if (D) PetscCall(MatDestroy(&D)); 2126 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 2127 /* bsf will have to take care of disposing of bedges. */ 2128 PetscCall(PetscSFSetGraph(bmsf, m, bm, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 2129 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2130 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2131 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2132 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2133 PetscCall(PetscFree(sub_dnnz)); 2134 PetscCall(PetscFree(sub_onnz)); 2135 PetscCall(PetscSFDestroy(&bmsf)); 2136 } 2137 PetscCall(ISRestoreIndices(bNis, &bNindices)); 2138 PetscCall(ISDestroy(&bNis)); 2139 } 2140 /* Resize preallocation if overestimated */ 2141 for (i = 0; i < m; i++) { 2142 dnnz[i] = PetscMin(dnnz[i], A->cmap->n); 2143 onnz[i] = PetscMin(onnz[i], A->cmap->N - A->cmap->n); 2144 } 2145 PetscCall(MatSeqAIJSetPreallocation(C, 0, dnnz)); 2146 PetscCall(MatMPIAIJSetPreallocation(C, 0, dnnz, 0, onnz)); 2147 PetscCall(PetscFree(dnnz)); 2148 PetscCall(MatAXPY_Dense_Nest(C, 1.0, A)); 2149 if (reuse == MAT_INPLACE_MATRIX) { 2150 PetscCall(MatHeaderReplace(A, &C)); 2151 } else *newmat = C; 2152 PetscFunctionReturn(PETSC_SUCCESS); 2153 } 2154 2155 PetscErrorCode MatConvert_Nest_Dense(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 2156 { 2157 Mat B; 2158 PetscInt m, n, M, N; 2159 2160 PetscFunctionBegin; 2161 PetscCall(MatGetSize(A, &M, &N)); 2162 PetscCall(MatGetLocalSize(A, &m, &n)); 2163 if (reuse == MAT_REUSE_MATRIX) { 2164 B = *newmat; 2165 PetscCall(MatZeroEntries(B)); 2166 } else { 2167 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), m, PETSC_DECIDE, M, N, NULL, &B)); 2168 } 2169 PetscCall(MatAXPY_Dense_Nest(B, 1.0, A)); 2170 if (reuse == MAT_INPLACE_MATRIX) { 2171 PetscCall(MatHeaderReplace(A, &B)); 2172 } else if (reuse == MAT_INITIAL_MATRIX) *newmat = B; 2173 PetscFunctionReturn(PETSC_SUCCESS); 2174 } 2175 2176 PetscErrorCode MatHasOperation_Nest(Mat mat, MatOperation op, PetscBool *has) 2177 { 2178 Mat_Nest *bA = (Mat_Nest *)mat->data; 2179 MatOperation opAdd; 2180 PetscInt i, j, nr = bA->nr, nc = bA->nc; 2181 PetscBool flg; 2182 PetscFunctionBegin; 2183 2184 *has = PETSC_FALSE; 2185 if (op == MATOP_MULT || op == MATOP_MULT_ADD || op == MATOP_MULT_TRANSPOSE || op == MATOP_MULT_TRANSPOSE_ADD) { 2186 opAdd = (op == MATOP_MULT || op == MATOP_MULT_ADD ? MATOP_MULT_ADD : MATOP_MULT_TRANSPOSE_ADD); 2187 for (j = 0; j < nc; j++) { 2188 for (i = 0; i < nr; i++) { 2189 if (!bA->m[i][j]) continue; 2190 PetscCall(MatHasOperation(bA->m[i][j], opAdd, &flg)); 2191 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 2192 } 2193 } 2194 } 2195 if (((void **)mat->ops)[op]) *has = PETSC_TRUE; 2196 PetscFunctionReturn(PETSC_SUCCESS); 2197 } 2198 2199 /*MC 2200 MATNEST - MATNEST = "nest" - Matrix type consisting of nested submatrices, each stored separately. 2201 2202 Level: intermediate 2203 2204 Notes: 2205 This matrix type permits scalable use of `PCFIELDSPLIT` and avoids the large memory costs of extracting submatrices. 2206 It allows the use of symmetric and block formats for parts of multi-physics simulations. 2207 It is usually used with `DMCOMPOSITE` and `DMCreateMatrix()` 2208 2209 Each of the submatrices lives on the same MPI communicator as the original nest matrix (though they can have zero 2210 rows/columns on some processes.) Thus this is not meant for cases where the submatrices live on far fewer processes 2211 than the nest matrix. 2212 2213 .seealso: `MATNEST`, `MatCreate()`, `MatType`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, 2214 `VecCreateNest()`, `DMCreateMatrix()`, `DMCOMPOSITE`, `MatNestSetVecType()`, `MatNestGetLocalISs()`, 2215 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 2216 M*/ 2217 PETSC_EXTERN PetscErrorCode MatCreate_Nest(Mat A) 2218 { 2219 Mat_Nest *s; 2220 2221 PetscFunctionBegin; 2222 PetscCall(PetscNew(&s)); 2223 A->data = (void *)s; 2224 2225 s->nr = -1; 2226 s->nc = -1; 2227 s->m = NULL; 2228 s->splitassembly = PETSC_FALSE; 2229 2230 PetscCall(PetscMemzero(A->ops, sizeof(*A->ops))); 2231 2232 A->ops->mult = MatMult_Nest; 2233 A->ops->multadd = MatMultAdd_Nest; 2234 A->ops->multtranspose = MatMultTranspose_Nest; 2235 A->ops->multtransposeadd = MatMultTransposeAdd_Nest; 2236 A->ops->transpose = MatTranspose_Nest; 2237 A->ops->assemblybegin = MatAssemblyBegin_Nest; 2238 A->ops->assemblyend = MatAssemblyEnd_Nest; 2239 A->ops->zeroentries = MatZeroEntries_Nest; 2240 A->ops->copy = MatCopy_Nest; 2241 A->ops->axpy = MatAXPY_Nest; 2242 A->ops->duplicate = MatDuplicate_Nest; 2243 A->ops->createsubmatrix = MatCreateSubMatrix_Nest; 2244 A->ops->destroy = MatDestroy_Nest; 2245 A->ops->view = MatView_Nest; 2246 A->ops->getvecs = NULL; /* Use VECNEST by calling MatNestSetVecType(A,VECNEST) */ 2247 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_Nest; 2248 A->ops->restorelocalsubmatrix = MatRestoreLocalSubMatrix_Nest; 2249 A->ops->getdiagonal = MatGetDiagonal_Nest; 2250 A->ops->diagonalscale = MatDiagonalScale_Nest; 2251 A->ops->scale = MatScale_Nest; 2252 A->ops->shift = MatShift_Nest; 2253 A->ops->diagonalset = MatDiagonalSet_Nest; 2254 A->ops->setrandom = MatSetRandom_Nest; 2255 A->ops->hasoperation = MatHasOperation_Nest; 2256 A->ops->missingdiagonal = MatMissingDiagonal_Nest; 2257 2258 A->spptr = NULL; 2259 A->assembled = PETSC_FALSE; 2260 2261 /* expose Nest api's */ 2262 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", MatNestGetSubMat_Nest)); 2263 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", MatNestSetSubMat_Nest)); 2264 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", MatNestGetSubMats_Nest)); 2265 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", MatNestGetSize_Nest)); 2266 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", MatNestGetISs_Nest)); 2267 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", MatNestGetLocalISs_Nest)); 2268 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", MatNestSetVecType_Nest)); 2269 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", MatNestSetSubMats_Nest)); 2270 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", MatConvert_Nest_AIJ)); 2271 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", MatConvert_Nest_AIJ)); 2272 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", MatConvert_Nest_AIJ)); 2273 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", MatConvert_Nest_IS)); 2274 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", MatConvert_Nest_Dense)); 2275 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", MatConvert_Nest_Dense)); 2276 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", MatProductSetFromOptions_Nest_Dense)); 2277 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", MatProductSetFromOptions_Nest_Dense)); 2278 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", MatProductSetFromOptions_Nest_Dense)); 2279 2280 PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATNEST)); 2281 PetscFunctionReturn(PETSC_SUCCESS); 2282 } 2283