1 #include <../src/mat/impls/nest/matnestimpl.h> /*I "petscmat.h" I*/ 2 #include <../src/mat/impls/aij/seq/aij.h> 3 #include <petscsf.h> 4 5 static PetscErrorCode MatSetUp_NestIS_Private(Mat, PetscInt, const IS[], PetscInt, const IS[]); 6 static PetscErrorCode MatCreateVecs_Nest(Mat, Vec *, Vec *); 7 static PetscErrorCode MatReset_Nest(Mat); 8 9 PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat, MatType, MatReuse, Mat *); 10 11 /* private functions */ 12 static PetscErrorCode MatNestGetSizes_Private(Mat A, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N) 13 { 14 Mat_Nest *bA = (Mat_Nest *)A->data; 15 PetscInt i, j; 16 17 PetscFunctionBegin; 18 *m = *n = *M = *N = 0; 19 for (i = 0; i < bA->nr; i++) { /* rows */ 20 PetscInt sm, sM; 21 PetscCall(ISGetLocalSize(bA->isglobal.row[i], &sm)); 22 PetscCall(ISGetSize(bA->isglobal.row[i], &sM)); 23 *m += sm; 24 *M += sM; 25 } 26 for (j = 0; j < bA->nc; j++) { /* cols */ 27 PetscInt sn, sN; 28 PetscCall(ISGetLocalSize(bA->isglobal.col[j], &sn)); 29 PetscCall(ISGetSize(bA->isglobal.col[j], &sN)); 30 *n += sn; 31 *N += sN; 32 } 33 PetscFunctionReturn(PETSC_SUCCESS); 34 } 35 36 /* operations */ 37 static PetscErrorCode MatMult_Nest(Mat A, Vec x, Vec y) 38 { 39 Mat_Nest *bA = (Mat_Nest *)A->data; 40 Vec *bx = bA->right, *by = bA->left; 41 PetscInt i, j, nr = bA->nr, nc = bA->nc; 42 43 PetscFunctionBegin; 44 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by[i])); 45 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 46 for (i = 0; i < nr; i++) { 47 PetscCall(VecZeroEntries(by[i])); 48 for (j = 0; j < nc; j++) { 49 if (!bA->m[i][j]) continue; 50 /* y[i] <- y[i] + A[i][j] * x[j] */ 51 PetscCall(MatMultAdd(bA->m[i][j], bx[j], by[i], by[i])); 52 } 53 } 54 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by[i])); 55 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 56 PetscFunctionReturn(PETSC_SUCCESS); 57 } 58 59 static PetscErrorCode MatMultAdd_Nest(Mat A, Vec x, Vec y, Vec z) 60 { 61 Mat_Nest *bA = (Mat_Nest *)A->data; 62 Vec *bx = bA->right, *bz = bA->left; 63 PetscInt i, j, nr = bA->nr, nc = bA->nc; 64 65 PetscFunctionBegin; 66 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(z, bA->isglobal.row[i], &bz[i])); 67 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 68 for (i = 0; i < nr; i++) { 69 if (y != z) { 70 Vec by; 71 PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by)); 72 PetscCall(VecCopy(by, bz[i])); 73 PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by)); 74 } 75 for (j = 0; j < nc; j++) { 76 if (!bA->m[i][j]) continue; 77 /* y[i] <- y[i] + A[i][j] * x[j] */ 78 PetscCall(MatMultAdd(bA->m[i][j], bx[j], bz[i], bz[i])); 79 } 80 } 81 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.row[i], &bz[i])); 82 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 83 PetscFunctionReturn(PETSC_SUCCESS); 84 } 85 86 typedef struct { 87 Mat *workC; /* array of Mat with specific containers depending on the underlying MatMatMult implementation */ 88 PetscScalar *tarray; /* buffer for storing all temporary products A[i][j] B[j] */ 89 PetscInt *dm, *dn, k; /* displacements and number of submatrices */ 90 } Nest_Dense; 91 92 static PetscErrorCode MatProductNumeric_Nest_Dense(Mat C) 93 { 94 Mat_Nest *bA; 95 Nest_Dense *contents; 96 Mat viewB, viewC, productB, workC; 97 const PetscScalar *barray; 98 PetscScalar *carray; 99 PetscInt i, j, M, N, nr, nc, ldb, ldc; 100 Mat A, B; 101 102 PetscFunctionBegin; 103 MatCheckProduct(C, 1); 104 A = C->product->A; 105 B = C->product->B; 106 PetscCall(MatGetSize(B, NULL, &N)); 107 if (!N) { 108 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 109 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 110 PetscFunctionReturn(PETSC_SUCCESS); 111 } 112 contents = (Nest_Dense *)C->product->data; 113 PetscCheck(contents, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data empty"); 114 bA = (Mat_Nest *)A->data; 115 nr = bA->nr; 116 nc = bA->nc; 117 PetscCall(MatDenseGetLDA(B, &ldb)); 118 PetscCall(MatDenseGetLDA(C, &ldc)); 119 PetscCall(MatZeroEntries(C)); 120 PetscCall(MatDenseGetArrayRead(B, &barray)); 121 PetscCall(MatDenseGetArray(C, &carray)); 122 for (i = 0; i < nr; i++) { 123 PetscCall(ISGetSize(bA->isglobal.row[i], &M)); 124 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dm[i + 1] - contents->dm[i], PETSC_DECIDE, M, N, carray + contents->dm[i], &viewC)); 125 PetscCall(MatDenseSetLDA(viewC, ldc)); 126 for (j = 0; j < nc; j++) { 127 if (!bA->m[i][j]) continue; 128 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 129 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, (PetscScalar *)(barray + contents->dn[j]), &viewB)); 130 PetscCall(MatDenseSetLDA(viewB, ldb)); 131 132 /* MatMatMultNumeric(bA->m[i][j],viewB,contents->workC[i*nc + j]); */ 133 workC = contents->workC[i * nc + j]; 134 productB = workC->product->B; 135 workC->product->B = viewB; /* use newly created dense matrix viewB */ 136 PetscCall(MatProductNumeric(workC)); 137 PetscCall(MatDestroy(&viewB)); 138 workC->product->B = productB; /* resume original B */ 139 140 /* C[i] <- workC + C[i] */ 141 PetscCall(MatAXPY(viewC, 1.0, contents->workC[i * nc + j], SAME_NONZERO_PATTERN)); 142 } 143 PetscCall(MatDestroy(&viewC)); 144 } 145 PetscCall(MatDenseRestoreArray(C, &carray)); 146 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 147 148 PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 149 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 150 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 151 PetscFunctionReturn(PETSC_SUCCESS); 152 } 153 154 static PetscErrorCode MatNest_DenseDestroy(void *ctx) 155 { 156 Nest_Dense *contents = (Nest_Dense *)ctx; 157 PetscInt i; 158 159 PetscFunctionBegin; 160 PetscCall(PetscFree(contents->tarray)); 161 for (i = 0; i < contents->k; i++) PetscCall(MatDestroy(contents->workC + i)); 162 PetscCall(PetscFree3(contents->dm, contents->dn, contents->workC)); 163 PetscCall(PetscFree(contents)); 164 PetscFunctionReturn(PETSC_SUCCESS); 165 } 166 167 static PetscErrorCode MatProductSymbolic_Nest_Dense(Mat C) 168 { 169 Mat_Nest *bA; 170 Mat viewB, workC; 171 const PetscScalar *barray; 172 PetscInt i, j, M, N, m, n, nr, nc, maxm = 0, ldb; 173 Nest_Dense *contents = NULL; 174 PetscBool cisdense; 175 Mat A, B; 176 PetscReal fill; 177 178 PetscFunctionBegin; 179 MatCheckProduct(C, 1); 180 PetscCheck(!C->product->data, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data not empty"); 181 A = C->product->A; 182 B = C->product->B; 183 fill = C->product->fill; 184 bA = (Mat_Nest *)A->data; 185 nr = bA->nr; 186 nc = bA->nc; 187 PetscCall(MatGetLocalSize(C, &m, &n)); 188 PetscCall(MatGetSize(C, &M, &N)); 189 if (m == PETSC_DECIDE || n == PETSC_DECIDE || M == PETSC_DECIDE || N == PETSC_DECIDE) { 190 PetscCall(MatGetLocalSize(B, NULL, &n)); 191 PetscCall(MatGetSize(B, NULL, &N)); 192 PetscCall(MatGetLocalSize(A, &m, NULL)); 193 PetscCall(MatGetSize(A, &M, NULL)); 194 PetscCall(MatSetSizes(C, m, n, M, N)); 195 } 196 PetscCall(PetscObjectTypeCompareAny((PetscObject)C, &cisdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, MATMPIDENSECUDA, "")); 197 if (!cisdense) PetscCall(MatSetType(C, ((PetscObject)B)->type_name)); 198 PetscCall(MatSetUp(C)); 199 if (!N) { 200 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 201 PetscFunctionReturn(PETSC_SUCCESS); 202 } 203 204 PetscCall(PetscNew(&contents)); 205 C->product->data = contents; 206 C->product->destroy = MatNest_DenseDestroy; 207 PetscCall(PetscCalloc3(nr + 1, &contents->dm, nc + 1, &contents->dn, nr * nc, &contents->workC)); 208 contents->k = nr * nc; 209 for (i = 0; i < nr; i++) { 210 PetscCall(ISGetLocalSize(bA->isglobal.row[i], contents->dm + i + 1)); 211 maxm = PetscMax(maxm, contents->dm[i + 1]); 212 contents->dm[i + 1] += contents->dm[i]; 213 } 214 for (i = 0; i < nc; i++) { 215 PetscCall(ISGetLocalSize(bA->isglobal.col[i], contents->dn + i + 1)); 216 contents->dn[i + 1] += contents->dn[i]; 217 } 218 PetscCall(PetscMalloc1(maxm * N, &contents->tarray)); 219 PetscCall(MatDenseGetLDA(B, &ldb)); 220 PetscCall(MatGetSize(B, NULL, &N)); 221 PetscCall(MatDenseGetArrayRead(B, &barray)); 222 /* loops are permuted compared to MatMatMultNumeric so that viewB is created only once per column of A */ 223 for (j = 0; j < nc; j++) { 224 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 225 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, (PetscScalar *)(barray + contents->dn[j]), &viewB)); 226 PetscCall(MatDenseSetLDA(viewB, ldb)); 227 for (i = 0; i < nr; i++) { 228 if (!bA->m[i][j]) continue; 229 /* MatMatMultSymbolic may attach a specific container (depending on MatType of bA->m[i][j]) to workC[i][j] */ 230 231 PetscCall(MatProductCreate(bA->m[i][j], viewB, NULL, &contents->workC[i * nc + j])); 232 workC = contents->workC[i * nc + j]; 233 PetscCall(MatProductSetType(workC, MATPRODUCT_AB)); 234 PetscCall(MatProductSetAlgorithm(workC, "default")); 235 PetscCall(MatProductSetFill(workC, fill)); 236 PetscCall(MatProductSetFromOptions(workC)); 237 PetscCall(MatProductSymbolic(workC)); 238 239 /* since tarray will be shared by all Mat */ 240 PetscCall(MatSeqDenseSetPreallocation(workC, contents->tarray)); 241 PetscCall(MatMPIDenseSetPreallocation(workC, contents->tarray)); 242 } 243 PetscCall(MatDestroy(&viewB)); 244 } 245 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 246 247 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 248 PetscFunctionReturn(PETSC_SUCCESS); 249 } 250 251 static PetscErrorCode MatProductSetFromOptions_Nest_Dense_AB(Mat C) 252 { 253 PetscFunctionBegin; 254 C->ops->productsymbolic = MatProductSymbolic_Nest_Dense; 255 PetscFunctionReturn(PETSC_SUCCESS); 256 } 257 258 static PetscErrorCode MatProductSetFromOptions_Nest_Dense(Mat C) 259 { 260 Mat_Product *product = C->product; 261 262 PetscFunctionBegin; 263 if (product->type == MATPRODUCT_AB) PetscCall(MatProductSetFromOptions_Nest_Dense_AB(C)); 264 PetscFunctionReturn(PETSC_SUCCESS); 265 } 266 267 static PetscErrorCode MatMultTranspose_Nest(Mat A, Vec x, Vec y) 268 { 269 Mat_Nest *bA = (Mat_Nest *)A->data; 270 Vec *bx = bA->left, *by = bA->right; 271 PetscInt i, j, nr = bA->nr, nc = bA->nc; 272 273 PetscFunctionBegin; 274 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 275 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(y, bA->isglobal.col[i], &by[i])); 276 for (j = 0; j < nc; j++) { 277 PetscCall(VecZeroEntries(by[j])); 278 for (i = 0; i < nr; i++) { 279 if (!bA->m[i][j]) continue; 280 /* y[j] <- y[j] + (A[i][j])^T * x[i] */ 281 PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); 282 } 283 } 284 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 285 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.col[i], &by[i])); 286 PetscFunctionReturn(PETSC_SUCCESS); 287 } 288 289 static PetscErrorCode MatMultTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 290 { 291 Mat_Nest *bA = (Mat_Nest *)A->data; 292 Vec *bx = bA->left, *bz = bA->right; 293 PetscInt i, j, nr = bA->nr, nc = bA->nc; 294 295 PetscFunctionBegin; 296 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 297 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(z, bA->isglobal.col[i], &bz[i])); 298 for (j = 0; j < nc; j++) { 299 if (y != z) { 300 Vec by; 301 PetscCall(VecGetSubVector(y, bA->isglobal.col[j], &by)); 302 PetscCall(VecCopy(by, bz[j])); 303 PetscCall(VecRestoreSubVector(y, bA->isglobal.col[j], &by)); 304 } 305 for (i = 0; i < nr; i++) { 306 if (!bA->m[i][j]) continue; 307 /* z[j] <- y[j] + (A[i][j])^T * x[i] */ 308 PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); 309 } 310 } 311 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 312 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.col[i], &bz[i])); 313 PetscFunctionReturn(PETSC_SUCCESS); 314 } 315 316 static PetscErrorCode MatTranspose_Nest(Mat A, MatReuse reuse, Mat *B) 317 { 318 Mat_Nest *bA = (Mat_Nest *)A->data, *bC; 319 Mat C; 320 PetscInt i, j, nr = bA->nr, nc = bA->nc; 321 322 PetscFunctionBegin; 323 if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B)); 324 PetscCheck(reuse != MAT_INPLACE_MATRIX || nr == nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Square nested matrix only for in-place"); 325 326 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) { 327 Mat *subs; 328 IS *is_row, *is_col; 329 330 PetscCall(PetscCalloc1(nr * nc, &subs)); 331 PetscCall(PetscMalloc2(nr, &is_row, nc, &is_col)); 332 PetscCall(MatNestGetISs(A, is_row, is_col)); 333 if (reuse == MAT_INPLACE_MATRIX) { 334 for (i = 0; i < nr; i++) { 335 for (j = 0; j < nc; j++) subs[i + nr * j] = bA->m[i][j]; 336 } 337 } 338 339 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nc, is_col, nr, is_row, subs, &C)); 340 PetscCall(PetscFree(subs)); 341 PetscCall(PetscFree2(is_row, is_col)); 342 } else { 343 C = *B; 344 } 345 346 bC = (Mat_Nest *)C->data; 347 for (i = 0; i < nr; i++) { 348 for (j = 0; j < nc; j++) { 349 if (bA->m[i][j]) { 350 PetscCall(MatTranspose(bA->m[i][j], reuse, &(bC->m[j][i]))); 351 } else { 352 bC->m[j][i] = NULL; 353 } 354 } 355 } 356 357 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) { 358 *B = C; 359 } else { 360 PetscCall(MatHeaderMerge(A, &C)); 361 } 362 PetscFunctionReturn(PETSC_SUCCESS); 363 } 364 365 static PetscErrorCode MatNestDestroyISList(PetscInt n, IS **list) 366 { 367 IS *lst = *list; 368 PetscInt i; 369 370 PetscFunctionBegin; 371 if (!lst) PetscFunctionReturn(PETSC_SUCCESS); 372 for (i = 0; i < n; i++) 373 if (lst[i]) PetscCall(ISDestroy(&lst[i])); 374 PetscCall(PetscFree(lst)); 375 *list = NULL; 376 PetscFunctionReturn(PETSC_SUCCESS); 377 } 378 379 static PetscErrorCode MatReset_Nest(Mat A) 380 { 381 Mat_Nest *vs = (Mat_Nest *)A->data; 382 PetscInt i, j; 383 384 PetscFunctionBegin; 385 /* release the matrices and the place holders */ 386 PetscCall(MatNestDestroyISList(vs->nr, &vs->isglobal.row)); 387 PetscCall(MatNestDestroyISList(vs->nc, &vs->isglobal.col)); 388 PetscCall(MatNestDestroyISList(vs->nr, &vs->islocal.row)); 389 PetscCall(MatNestDestroyISList(vs->nc, &vs->islocal.col)); 390 391 PetscCall(PetscFree(vs->row_len)); 392 PetscCall(PetscFree(vs->col_len)); 393 PetscCall(PetscFree(vs->nnzstate)); 394 395 PetscCall(PetscFree2(vs->left, vs->right)); 396 397 /* release the matrices and the place holders */ 398 if (vs->m) { 399 for (i = 0; i < vs->nr; i++) { 400 for (j = 0; j < vs->nc; j++) PetscCall(MatDestroy(&vs->m[i][j])); 401 PetscCall(PetscFree(vs->m[i])); 402 } 403 PetscCall(PetscFree(vs->m)); 404 } 405 406 /* restore defaults */ 407 vs->nr = 0; 408 vs->nc = 0; 409 vs->splitassembly = PETSC_FALSE; 410 PetscFunctionReturn(PETSC_SUCCESS); 411 } 412 413 static PetscErrorCode MatDestroy_Nest(Mat A) 414 { 415 PetscFunctionBegin; 416 PetscCall(MatReset_Nest(A)); 417 PetscCall(PetscFree(A->data)); 418 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", NULL)); 419 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", NULL)); 420 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", NULL)); 421 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", NULL)); 422 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", NULL)); 423 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", NULL)); 424 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", NULL)); 425 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", NULL)); 426 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", NULL)); 427 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", NULL)); 428 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", NULL)); 429 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", NULL)); 430 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", NULL)); 431 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", NULL)); 432 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", NULL)); 433 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", NULL)); 434 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", NULL)); 435 PetscFunctionReturn(PETSC_SUCCESS); 436 } 437 438 static PetscErrorCode MatMissingDiagonal_Nest(Mat mat, PetscBool *missing, PetscInt *dd) 439 { 440 Mat_Nest *vs = (Mat_Nest *)mat->data; 441 PetscInt i; 442 443 PetscFunctionBegin; 444 if (dd) *dd = 0; 445 if (!vs->nr) { 446 *missing = PETSC_TRUE; 447 PetscFunctionReturn(PETSC_SUCCESS); 448 } 449 *missing = PETSC_FALSE; 450 for (i = 0; i < vs->nr && !(*missing); i++) { 451 *missing = PETSC_TRUE; 452 if (vs->m[i][i]) { 453 PetscCall(MatMissingDiagonal(vs->m[i][i], missing, NULL)); 454 PetscCheck(!*missing || !dd, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "First missing entry not yet implemented"); 455 } 456 } 457 PetscFunctionReturn(PETSC_SUCCESS); 458 } 459 460 static PetscErrorCode MatAssemblyBegin_Nest(Mat A, MatAssemblyType type) 461 { 462 Mat_Nest *vs = (Mat_Nest *)A->data; 463 PetscInt i, j; 464 PetscBool nnzstate = PETSC_FALSE; 465 466 PetscFunctionBegin; 467 for (i = 0; i < vs->nr; i++) { 468 for (j = 0; j < vs->nc; j++) { 469 PetscObjectState subnnzstate = 0; 470 if (vs->m[i][j]) { 471 PetscCall(MatAssemblyBegin(vs->m[i][j], type)); 472 if (!vs->splitassembly) { 473 /* Note: split assembly will fail if the same block appears more than once (even indirectly through a nested 474 * sub-block). This could be fixed by adding a flag to Mat so that there was a way to check if a Mat was 475 * already performing an assembly, but the result would by more complicated and appears to offer less 476 * potential for diagnostics and correctness checking. Split assembly should be fixed once there is an 477 * interface for libraries to make asynchronous progress in "user-defined non-blocking collectives". 478 */ 479 PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 480 PetscCall(MatGetNonzeroState(vs->m[i][j], &subnnzstate)); 481 } 482 } 483 nnzstate = (PetscBool)(nnzstate || vs->nnzstate[i * vs->nc + j] != subnnzstate); 484 vs->nnzstate[i * vs->nc + j] = subnnzstate; 485 } 486 } 487 if (nnzstate) A->nonzerostate++; 488 PetscFunctionReturn(PETSC_SUCCESS); 489 } 490 491 static PetscErrorCode MatAssemblyEnd_Nest(Mat A, MatAssemblyType type) 492 { 493 Mat_Nest *vs = (Mat_Nest *)A->data; 494 PetscInt i, j; 495 496 PetscFunctionBegin; 497 for (i = 0; i < vs->nr; i++) { 498 for (j = 0; j < vs->nc; j++) { 499 if (vs->m[i][j]) { 500 if (vs->splitassembly) PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 501 } 502 } 503 } 504 PetscFunctionReturn(PETSC_SUCCESS); 505 } 506 507 static PetscErrorCode MatNestFindNonzeroSubMatRow(Mat A, PetscInt row, Mat *B) 508 { 509 Mat_Nest *vs = (Mat_Nest *)A->data; 510 PetscInt j; 511 Mat sub; 512 513 PetscFunctionBegin; 514 sub = (row < vs->nc) ? vs->m[row][row] : (Mat)NULL; /* Prefer to find on the diagonal */ 515 for (j = 0; !sub && j < vs->nc; j++) sub = vs->m[row][j]; 516 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 517 *B = sub; 518 PetscFunctionReturn(PETSC_SUCCESS); 519 } 520 521 static PetscErrorCode MatNestFindNonzeroSubMatCol(Mat A, PetscInt col, Mat *B) 522 { 523 Mat_Nest *vs = (Mat_Nest *)A->data; 524 PetscInt i; 525 Mat sub; 526 527 PetscFunctionBegin; 528 sub = (col < vs->nr) ? vs->m[col][col] : (Mat)NULL; /* Prefer to find on the diagonal */ 529 for (i = 0; !sub && i < vs->nr; i++) sub = vs->m[i][col]; 530 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 531 *B = sub; 532 PetscFunctionReturn(PETSC_SUCCESS); 533 } 534 535 static PetscErrorCode MatNestFindISRange(Mat A, PetscInt n, const IS list[], IS is, PetscInt *begin, PetscInt *end) 536 { 537 PetscInt i, j, size, m; 538 PetscBool flg; 539 IS out, concatenate[2]; 540 541 PetscFunctionBegin; 542 PetscAssertPointer(list, 3); 543 PetscValidHeaderSpecific(is, IS_CLASSID, 4); 544 if (begin) { 545 PetscAssertPointer(begin, 5); 546 *begin = -1; 547 } 548 if (end) { 549 PetscAssertPointer(end, 6); 550 *end = -1; 551 } 552 for (i = 0; i < n; i++) { 553 if (!list[i]) continue; 554 PetscCall(ISEqualUnsorted(list[i], is, &flg)); 555 if (flg) { 556 if (begin) *begin = i; 557 if (end) *end = i + 1; 558 PetscFunctionReturn(PETSC_SUCCESS); 559 } 560 } 561 PetscCall(ISGetSize(is, &size)); 562 for (i = 0; i < n - 1; i++) { 563 if (!list[i]) continue; 564 m = 0; 565 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, list + i, &out)); 566 PetscCall(ISGetSize(out, &m)); 567 for (j = i + 2; j < n && m < size; j++) { 568 if (list[j]) { 569 concatenate[0] = out; 570 concatenate[1] = list[j]; 571 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, concatenate, &out)); 572 PetscCall(ISDestroy(concatenate)); 573 PetscCall(ISGetSize(out, &m)); 574 } 575 } 576 if (m == size) { 577 PetscCall(ISEqualUnsorted(out, is, &flg)); 578 if (flg) { 579 if (begin) *begin = i; 580 if (end) *end = j; 581 PetscCall(ISDestroy(&out)); 582 PetscFunctionReturn(PETSC_SUCCESS); 583 } 584 } 585 PetscCall(ISDestroy(&out)); 586 } 587 PetscFunctionReturn(PETSC_SUCCESS); 588 } 589 590 static PetscErrorCode MatNestFillEmptyMat_Private(Mat A, PetscInt i, PetscInt j, Mat *B) 591 { 592 Mat_Nest *vs = (Mat_Nest *)A->data; 593 PetscInt lr, lc; 594 595 PetscFunctionBegin; 596 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 597 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &lr)); 598 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &lc)); 599 PetscCall(MatSetSizes(*B, lr, lc, PETSC_DECIDE, PETSC_DECIDE)); 600 PetscCall(MatSetType(*B, MATAIJ)); 601 PetscCall(MatSeqAIJSetPreallocation(*B, 0, NULL)); 602 PetscCall(MatMPIAIJSetPreallocation(*B, 0, NULL, 0, NULL)); 603 PetscCall(MatSetUp(*B)); 604 PetscCall(MatSetOption(*B, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 605 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 606 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 607 PetscFunctionReturn(PETSC_SUCCESS); 608 } 609 610 static PetscErrorCode MatNestGetBlock_Private(Mat A, PetscInt rbegin, PetscInt rend, PetscInt cbegin, PetscInt cend, Mat *B) 611 { 612 Mat_Nest *vs = (Mat_Nest *)A->data; 613 Mat *a; 614 PetscInt i, j, k, l, nr = rend - rbegin, nc = cend - cbegin; 615 char keyname[256]; 616 PetscBool *b; 617 PetscBool flg; 618 619 PetscFunctionBegin; 620 *B = NULL; 621 PetscCall(PetscSNPrintf(keyname, sizeof(keyname), "NestBlock_%" PetscInt_FMT "-%" PetscInt_FMT "x%" PetscInt_FMT "-%" PetscInt_FMT, rbegin, rend, cbegin, cend)); 622 PetscCall(PetscObjectQuery((PetscObject)A, keyname, (PetscObject *)B)); 623 if (*B) PetscFunctionReturn(PETSC_SUCCESS); 624 625 PetscCall(PetscMalloc2(nr * nc, &a, nr * nc, &b)); 626 for (i = 0; i < nr; i++) { 627 for (j = 0; j < nc; j++) { 628 a[i * nc + j] = vs->m[rbegin + i][cbegin + j]; 629 b[i * nc + j] = PETSC_FALSE; 630 } 631 } 632 if (nc != vs->nc && nr != vs->nr) { 633 for (i = 0; i < nr; i++) { 634 for (j = 0; j < nc; j++) { 635 flg = PETSC_FALSE; 636 for (k = 0; (k < nr && !flg); k++) { 637 if (a[j + k * nc]) flg = PETSC_TRUE; 638 } 639 if (flg) { 640 flg = PETSC_FALSE; 641 for (l = 0; (l < nc && !flg); l++) { 642 if (a[i * nc + l]) flg = PETSC_TRUE; 643 } 644 } 645 if (!flg) { 646 b[i * nc + j] = PETSC_TRUE; 647 PetscCall(MatNestFillEmptyMat_Private(A, rbegin + i, cbegin + j, a + i * nc + j)); 648 } 649 } 650 } 651 } 652 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, nr != vs->nr ? NULL : vs->isglobal.row, nc, nc != vs->nc ? NULL : vs->isglobal.col, a, B)); 653 for (i = 0; i < nr; i++) { 654 for (j = 0; j < nc; j++) { 655 if (b[i * nc + j]) PetscCall(MatDestroy(a + i * nc + j)); 656 } 657 } 658 PetscCall(PetscFree2(a, b)); 659 (*B)->assembled = A->assembled; 660 PetscCall(PetscObjectCompose((PetscObject)A, keyname, (PetscObject)*B)); 661 PetscCall(PetscObjectDereference((PetscObject)*B)); /* Leave the only remaining reference in the composition */ 662 PetscFunctionReturn(PETSC_SUCCESS); 663 } 664 665 static PetscErrorCode MatNestFindSubMat(Mat A, struct MatNestISPair *is, IS isrow, IS iscol, Mat *B) 666 { 667 Mat_Nest *vs = (Mat_Nest *)A->data; 668 PetscInt rbegin, rend, cbegin, cend; 669 670 PetscFunctionBegin; 671 PetscCall(MatNestFindISRange(A, vs->nr, is->row, isrow, &rbegin, &rend)); 672 PetscCall(MatNestFindISRange(A, vs->nc, is->col, iscol, &cbegin, &cend)); 673 if (rend == rbegin + 1 && cend == cbegin + 1) { 674 if (!vs->m[rbegin][cbegin]) PetscCall(MatNestFillEmptyMat_Private(A, rbegin, cbegin, vs->m[rbegin] + cbegin)); 675 *B = vs->m[rbegin][cbegin]; 676 } else if (rbegin != -1 && cbegin != -1) { 677 PetscCall(MatNestGetBlock_Private(A, rbegin, rend, cbegin, cend, B)); 678 } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Could not find index set"); 679 PetscFunctionReturn(PETSC_SUCCESS); 680 } 681 682 /* 683 TODO: This does not actually returns a submatrix we can modify 684 */ 685 static PetscErrorCode MatCreateSubMatrix_Nest(Mat A, IS isrow, IS iscol, MatReuse reuse, Mat *B) 686 { 687 Mat_Nest *vs = (Mat_Nest *)A->data; 688 Mat sub; 689 690 PetscFunctionBegin; 691 PetscCall(MatNestFindSubMat(A, &vs->isglobal, isrow, iscol, &sub)); 692 switch (reuse) { 693 case MAT_INITIAL_MATRIX: 694 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 695 *B = sub; 696 break; 697 case MAT_REUSE_MATRIX: 698 PetscCheck(sub == *B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Submatrix was not used before in this call"); 699 break; 700 case MAT_IGNORE_MATRIX: /* Nothing to do */ 701 break; 702 case MAT_INPLACE_MATRIX: /* Nothing to do */ 703 SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX is not supported yet"); 704 } 705 PetscFunctionReturn(PETSC_SUCCESS); 706 } 707 708 static PetscErrorCode MatGetLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 709 { 710 Mat_Nest *vs = (Mat_Nest *)A->data; 711 Mat sub; 712 713 PetscFunctionBegin; 714 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 715 /* We allow the submatrix to be NULL, perhaps it would be better for the user to return an empty matrix instead */ 716 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 717 *B = sub; 718 PetscFunctionReturn(PETSC_SUCCESS); 719 } 720 721 static PetscErrorCode MatRestoreLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 722 { 723 Mat_Nest *vs = (Mat_Nest *)A->data; 724 Mat sub; 725 726 PetscFunctionBegin; 727 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 728 PetscCheck(*B == sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has not been gotten"); 729 if (sub) { 730 PetscCheck(((PetscObject)sub)->refct > 1, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has had reference count decremented too many times"); 731 PetscCall(MatDestroy(B)); 732 } 733 PetscFunctionReturn(PETSC_SUCCESS); 734 } 735 736 static PetscErrorCode MatGetDiagonal_Nest(Mat A, Vec v) 737 { 738 Mat_Nest *bA = (Mat_Nest *)A->data; 739 PetscInt i; 740 741 PetscFunctionBegin; 742 for (i = 0; i < bA->nr; i++) { 743 Vec bv; 744 PetscCall(VecGetSubVector(v, bA->isglobal.row[i], &bv)); 745 if (bA->m[i][i]) { 746 PetscCall(MatGetDiagonal(bA->m[i][i], bv)); 747 } else { 748 PetscCall(VecSet(bv, 0.0)); 749 } 750 PetscCall(VecRestoreSubVector(v, bA->isglobal.row[i], &bv)); 751 } 752 PetscFunctionReturn(PETSC_SUCCESS); 753 } 754 755 static PetscErrorCode MatDiagonalScale_Nest(Mat A, Vec l, Vec r) 756 { 757 Mat_Nest *bA = (Mat_Nest *)A->data; 758 Vec bl, *br; 759 PetscInt i, j; 760 761 PetscFunctionBegin; 762 PetscCall(PetscCalloc1(bA->nc, &br)); 763 if (r) { 764 for (j = 0; j < bA->nc; j++) PetscCall(VecGetSubVector(r, bA->isglobal.col[j], &br[j])); 765 } 766 bl = NULL; 767 for (i = 0; i < bA->nr; i++) { 768 if (l) PetscCall(VecGetSubVector(l, bA->isglobal.row[i], &bl)); 769 for (j = 0; j < bA->nc; j++) { 770 if (bA->m[i][j]) PetscCall(MatDiagonalScale(bA->m[i][j], bl, br[j])); 771 } 772 if (l) PetscCall(VecRestoreSubVector(l, bA->isglobal.row[i], &bl)); 773 } 774 if (r) { 775 for (j = 0; j < bA->nc; j++) PetscCall(VecRestoreSubVector(r, bA->isglobal.col[j], &br[j])); 776 } 777 PetscCall(PetscFree(br)); 778 PetscFunctionReturn(PETSC_SUCCESS); 779 } 780 781 static PetscErrorCode MatScale_Nest(Mat A, PetscScalar a) 782 { 783 Mat_Nest *bA = (Mat_Nest *)A->data; 784 PetscInt i, j; 785 786 PetscFunctionBegin; 787 for (i = 0; i < bA->nr; i++) { 788 for (j = 0; j < bA->nc; j++) { 789 if (bA->m[i][j]) PetscCall(MatScale(bA->m[i][j], a)); 790 } 791 } 792 PetscFunctionReturn(PETSC_SUCCESS); 793 } 794 795 static PetscErrorCode MatShift_Nest(Mat A, PetscScalar a) 796 { 797 Mat_Nest *bA = (Mat_Nest *)A->data; 798 PetscInt i; 799 PetscBool nnzstate = PETSC_FALSE; 800 801 PetscFunctionBegin; 802 for (i = 0; i < bA->nr; i++) { 803 PetscObjectState subnnzstate = 0; 804 PetscCheck(bA->m[i][i], PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for shifting an empty diagonal block, insert a matrix in block (%" PetscInt_FMT ",%" PetscInt_FMT ")", i, i); 805 PetscCall(MatShift(bA->m[i][i], a)); 806 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 807 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 808 bA->nnzstate[i * bA->nc + i] = subnnzstate; 809 } 810 if (nnzstate) A->nonzerostate++; 811 PetscFunctionReturn(PETSC_SUCCESS); 812 } 813 814 static PetscErrorCode MatDiagonalSet_Nest(Mat A, Vec D, InsertMode is) 815 { 816 Mat_Nest *bA = (Mat_Nest *)A->data; 817 PetscInt i; 818 PetscBool nnzstate = PETSC_FALSE; 819 820 PetscFunctionBegin; 821 for (i = 0; i < bA->nr; i++) { 822 PetscObjectState subnnzstate = 0; 823 Vec bv; 824 PetscCall(VecGetSubVector(D, bA->isglobal.row[i], &bv)); 825 if (bA->m[i][i]) { 826 PetscCall(MatDiagonalSet(bA->m[i][i], bv, is)); 827 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 828 } 829 PetscCall(VecRestoreSubVector(D, bA->isglobal.row[i], &bv)); 830 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 831 bA->nnzstate[i * bA->nc + i] = subnnzstate; 832 } 833 if (nnzstate) A->nonzerostate++; 834 PetscFunctionReturn(PETSC_SUCCESS); 835 } 836 837 static PetscErrorCode MatSetRandom_Nest(Mat A, PetscRandom rctx) 838 { 839 Mat_Nest *bA = (Mat_Nest *)A->data; 840 PetscInt i, j; 841 842 PetscFunctionBegin; 843 for (i = 0; i < bA->nr; i++) { 844 for (j = 0; j < bA->nc; j++) { 845 if (bA->m[i][j]) PetscCall(MatSetRandom(bA->m[i][j], rctx)); 846 } 847 } 848 PetscFunctionReturn(PETSC_SUCCESS); 849 } 850 851 static PetscErrorCode MatCreateVecs_Nest(Mat A, Vec *right, Vec *left) 852 { 853 Mat_Nest *bA = (Mat_Nest *)A->data; 854 Vec *L, *R; 855 MPI_Comm comm; 856 PetscInt i, j; 857 858 PetscFunctionBegin; 859 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 860 if (right) { 861 /* allocate R */ 862 PetscCall(PetscMalloc1(bA->nc, &R)); 863 /* Create the right vectors */ 864 for (j = 0; j < bA->nc; j++) { 865 for (i = 0; i < bA->nr; i++) { 866 if (bA->m[i][j]) { 867 PetscCall(MatCreateVecs(bA->m[i][j], &R[j], NULL)); 868 break; 869 } 870 } 871 PetscCheck(i != bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null column."); 872 } 873 PetscCall(VecCreateNest(comm, bA->nc, bA->isglobal.col, R, right)); 874 /* hand back control to the nest vector */ 875 for (j = 0; j < bA->nc; j++) PetscCall(VecDestroy(&R[j])); 876 PetscCall(PetscFree(R)); 877 } 878 879 if (left) { 880 /* allocate L */ 881 PetscCall(PetscMalloc1(bA->nr, &L)); 882 /* Create the left vectors */ 883 for (i = 0; i < bA->nr; i++) { 884 for (j = 0; j < bA->nc; j++) { 885 if (bA->m[i][j]) { 886 PetscCall(MatCreateVecs(bA->m[i][j], NULL, &L[i])); 887 break; 888 } 889 } 890 PetscCheck(j != bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null row."); 891 } 892 893 PetscCall(VecCreateNest(comm, bA->nr, bA->isglobal.row, L, left)); 894 for (i = 0; i < bA->nr; i++) PetscCall(VecDestroy(&L[i])); 895 896 PetscCall(PetscFree(L)); 897 } 898 PetscFunctionReturn(PETSC_SUCCESS); 899 } 900 901 static PetscErrorCode MatView_Nest(Mat A, PetscViewer viewer) 902 { 903 Mat_Nest *bA = (Mat_Nest *)A->data; 904 PetscBool isascii, viewSub = PETSC_FALSE; 905 PetscInt i, j; 906 907 PetscFunctionBegin; 908 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 909 if (isascii) { 910 PetscCall(PetscOptionsGetBool(((PetscObject)A)->options, ((PetscObject)A)->prefix, "-mat_view_nest_sub", &viewSub, NULL)); 911 PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix object:\n")); 912 PetscCall(PetscViewerASCIIPushTab(viewer)); 913 PetscCall(PetscViewerASCIIPrintf(viewer, "type=nest, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", bA->nr, bA->nc)); 914 915 PetscCall(PetscViewerASCIIPrintf(viewer, "MatNest structure:\n")); 916 for (i = 0; i < bA->nr; i++) { 917 for (j = 0; j < bA->nc; j++) { 918 MatType type; 919 char name[256] = "", prefix[256] = ""; 920 PetscInt NR, NC; 921 PetscBool isNest = PETSC_FALSE; 922 923 if (!bA->m[i][j]) { 924 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : NULL\n", i, j)); 925 continue; 926 } 927 PetscCall(MatGetSize(bA->m[i][j], &NR, &NC)); 928 PetscCall(MatGetType(bA->m[i][j], &type)); 929 if (((PetscObject)bA->m[i][j])->name) PetscCall(PetscSNPrintf(name, sizeof(name), "name=\"%s\", ", ((PetscObject)bA->m[i][j])->name)); 930 if (((PetscObject)bA->m[i][j])->prefix) PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "prefix=\"%s\", ", ((PetscObject)bA->m[i][j])->prefix)); 931 PetscCall(PetscObjectTypeCompare((PetscObject)bA->m[i][j], MATNEST, &isNest)); 932 933 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : %s%stype=%s, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", i, j, name, prefix, type, NR, NC)); 934 935 if (isNest || viewSub) { 936 PetscCall(PetscViewerASCIIPushTab(viewer)); /* push1 */ 937 PetscCall(MatView(bA->m[i][j], viewer)); 938 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop1 */ 939 } 940 } 941 } 942 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop0 */ 943 } 944 PetscFunctionReturn(PETSC_SUCCESS); 945 } 946 947 static PetscErrorCode MatZeroEntries_Nest(Mat A) 948 { 949 Mat_Nest *bA = (Mat_Nest *)A->data; 950 PetscInt i, j; 951 952 PetscFunctionBegin; 953 for (i = 0; i < bA->nr; i++) { 954 for (j = 0; j < bA->nc; j++) { 955 if (!bA->m[i][j]) continue; 956 PetscCall(MatZeroEntries(bA->m[i][j])); 957 } 958 } 959 PetscFunctionReturn(PETSC_SUCCESS); 960 } 961 962 static PetscErrorCode MatCopy_Nest(Mat A, Mat B, MatStructure str) 963 { 964 Mat_Nest *bA = (Mat_Nest *)A->data, *bB = (Mat_Nest *)B->data; 965 PetscInt i, j, nr = bA->nr, nc = bA->nc; 966 PetscBool nnzstate = PETSC_FALSE; 967 968 PetscFunctionBegin; 969 PetscCheck(nr == bB->nr && nc == bB->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Cannot copy a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") to a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bB->nr, bB->nc, nr, nc); 970 for (i = 0; i < nr; i++) { 971 for (j = 0; j < nc; j++) { 972 PetscObjectState subnnzstate = 0; 973 if (bA->m[i][j] && bB->m[i][j]) { 974 PetscCall(MatCopy(bA->m[i][j], bB->m[i][j], str)); 975 } else PetscCheck(!bA->m[i][j] && !bB->m[i][j], PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT, i, j); 976 PetscCall(MatGetNonzeroState(bB->m[i][j], &subnnzstate)); 977 nnzstate = (PetscBool)(nnzstate || bB->nnzstate[i * nc + j] != subnnzstate); 978 bB->nnzstate[i * nc + j] = subnnzstate; 979 } 980 } 981 if (nnzstate) B->nonzerostate++; 982 PetscFunctionReturn(PETSC_SUCCESS); 983 } 984 985 static PetscErrorCode MatAXPY_Nest(Mat Y, PetscScalar a, Mat X, MatStructure str) 986 { 987 Mat_Nest *bY = (Mat_Nest *)Y->data, *bX = (Mat_Nest *)X->data; 988 PetscInt i, j, nr = bY->nr, nc = bY->nc; 989 PetscBool nnzstate = PETSC_FALSE; 990 991 PetscFunctionBegin; 992 PetscCheck(nr == bX->nr && nc == bX->nc, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Cannot AXPY a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") with a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bX->nr, bX->nc, nr, nc); 993 for (i = 0; i < nr; i++) { 994 for (j = 0; j < nc; j++) { 995 PetscObjectState subnnzstate = 0; 996 if (bY->m[i][j] && bX->m[i][j]) { 997 PetscCall(MatAXPY(bY->m[i][j], a, bX->m[i][j], str)); 998 } else if (bX->m[i][j]) { 999 Mat M; 1000 1001 PetscCheck(str == DIFFERENT_NONZERO_PATTERN || str == UNKNOWN_NONZERO_PATTERN, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT ". Use DIFFERENT_NONZERO_PATTERN or UNKNOWN_NONZERO_PATTERN", i, j); 1002 PetscCall(MatDuplicate(bX->m[i][j], MAT_COPY_VALUES, &M)); 1003 PetscCall(MatNestSetSubMat(Y, i, j, M)); 1004 PetscCall(MatDestroy(&M)); 1005 } 1006 if (bY->m[i][j]) PetscCall(MatGetNonzeroState(bY->m[i][j], &subnnzstate)); 1007 nnzstate = (PetscBool)(nnzstate || bY->nnzstate[i * nc + j] != subnnzstate); 1008 bY->nnzstate[i * nc + j] = subnnzstate; 1009 } 1010 } 1011 if (nnzstate) Y->nonzerostate++; 1012 PetscFunctionReturn(PETSC_SUCCESS); 1013 } 1014 1015 static PetscErrorCode MatDuplicate_Nest(Mat A, MatDuplicateOption op, Mat *B) 1016 { 1017 Mat_Nest *bA = (Mat_Nest *)A->data; 1018 Mat *b; 1019 PetscInt i, j, nr = bA->nr, nc = bA->nc; 1020 1021 PetscFunctionBegin; 1022 PetscCall(PetscMalloc1(nr * nc, &b)); 1023 for (i = 0; i < nr; i++) { 1024 for (j = 0; j < nc; j++) { 1025 if (bA->m[i][j]) { 1026 PetscCall(MatDuplicate(bA->m[i][j], op, &b[i * nc + j])); 1027 } else { 1028 b[i * nc + j] = NULL; 1029 } 1030 } 1031 } 1032 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, bA->isglobal.row, nc, bA->isglobal.col, b, B)); 1033 /* Give the new MatNest exclusive ownership */ 1034 for (i = 0; i < nr * nc; i++) PetscCall(MatDestroy(&b[i])); 1035 PetscCall(PetscFree(b)); 1036 1037 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 1038 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 1039 PetscFunctionReturn(PETSC_SUCCESS); 1040 } 1041 1042 /* nest api */ 1043 static PetscErrorCode MatNestGetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat *mat) 1044 { 1045 Mat_Nest *bA = (Mat_Nest *)A->data; 1046 1047 PetscFunctionBegin; 1048 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1049 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1050 *mat = bA->m[idxm][jdxm]; 1051 PetscFunctionReturn(PETSC_SUCCESS); 1052 } 1053 1054 /*@ 1055 MatNestGetSubMat - Returns a single, sub-matrix from a `MATNEST` 1056 1057 Not Collective 1058 1059 Input Parameters: 1060 + A - `MATNEST` matrix 1061 . idxm - index of the matrix within the nest matrix 1062 - jdxm - index of the matrix within the nest matrix 1063 1064 Output Parameter: 1065 . sub - matrix at index `idxm`, `jdxm` within the nest matrix 1066 1067 Level: developer 1068 1069 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestSetSubMat()`, 1070 `MatNestGetLocalISs()`, `MatNestGetISs()` 1071 @*/ 1072 PetscErrorCode MatNestGetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat *sub) 1073 { 1074 PetscFunctionBegin; 1075 PetscUseMethod(A, "MatNestGetSubMat_C", (Mat, PetscInt, PetscInt, Mat *), (A, idxm, jdxm, sub)); 1076 PetscFunctionReturn(PETSC_SUCCESS); 1077 } 1078 1079 static PetscErrorCode MatNestSetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat mat) 1080 { 1081 Mat_Nest *bA = (Mat_Nest *)A->data; 1082 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1083 1084 PetscFunctionBegin; 1085 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1086 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1087 PetscCall(MatGetLocalSize(mat, &m, &n)); 1088 PetscCall(MatGetSize(mat, &M, &N)); 1089 PetscCall(ISGetLocalSize(bA->isglobal.row[idxm], &mi)); 1090 PetscCall(ISGetSize(bA->isglobal.row[idxm], &Mi)); 1091 PetscCall(ISGetLocalSize(bA->isglobal.col[jdxm], &ni)); 1092 PetscCall(ISGetSize(bA->isglobal.col[jdxm], &Ni)); 1093 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, Mi, Ni); 1094 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix local dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, mi, ni); 1095 1096 /* do not increase object state */ 1097 if (mat == bA->m[idxm][jdxm]) PetscFunctionReturn(PETSC_SUCCESS); 1098 1099 PetscCall(PetscObjectReference((PetscObject)mat)); 1100 PetscCall(MatDestroy(&bA->m[idxm][jdxm])); 1101 bA->m[idxm][jdxm] = mat; 1102 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1103 PetscCall(MatGetNonzeroState(mat, &bA->nnzstate[idxm * bA->nc + jdxm])); 1104 A->nonzerostate++; 1105 PetscFunctionReturn(PETSC_SUCCESS); 1106 } 1107 1108 /*@ 1109 MatNestSetSubMat - Set a single submatrix in the `MATNEST` 1110 1111 Logically Collective 1112 1113 Input Parameters: 1114 + A - `MATNEST` matrix 1115 . idxm - index of the matrix within the nest matrix 1116 . jdxm - index of the matrix within the nest matrix 1117 - sub - matrix at index `idxm`, `jdxm` within the nest matrix 1118 1119 Level: developer 1120 1121 Notes: 1122 The new submatrix must have the same size and communicator as that block of the nest. 1123 1124 This increments the reference count of the submatrix. 1125 1126 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestSetSubMats()`, `MatNestGetSubMats()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1127 `MatNestGetSubMat()`, `MatNestGetISs()`, `MatNestGetSize()` 1128 @*/ 1129 PetscErrorCode MatNestSetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat sub) 1130 { 1131 PetscFunctionBegin; 1132 PetscUseMethod(A, "MatNestSetSubMat_C", (Mat, PetscInt, PetscInt, Mat), (A, idxm, jdxm, sub)); 1133 PetscFunctionReturn(PETSC_SUCCESS); 1134 } 1135 1136 static PetscErrorCode MatNestGetSubMats_Nest(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1137 { 1138 Mat_Nest *bA = (Mat_Nest *)A->data; 1139 1140 PetscFunctionBegin; 1141 if (M) *M = bA->nr; 1142 if (N) *N = bA->nc; 1143 if (mat) *mat = bA->m; 1144 PetscFunctionReturn(PETSC_SUCCESS); 1145 } 1146 1147 /*@C 1148 MatNestGetSubMats - Returns the entire two dimensional array of matrices defining a `MATNEST` matrix. 1149 1150 Not Collective 1151 1152 Input Parameter: 1153 . A - nest matrix 1154 1155 Output Parameters: 1156 + M - number of rows in the nest matrix 1157 . N - number of cols in the nest matrix 1158 - mat - 2d array of matrices 1159 1160 Level: developer 1161 1162 Note: 1163 The user should not free the array `mat`. 1164 1165 Fortran Notes: 1166 This routine has a calling sequence 1167 $ call MatNestGetSubMats(A, M, N, mat, ierr) 1168 where the space allocated for the optional argument `mat` is assumed large enough (if provided). 1169 1170 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1171 `MatNestSetSubMats()`, `MatNestGetISs()`, `MatNestSetSubMat()` 1172 @*/ 1173 PetscErrorCode MatNestGetSubMats(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1174 { 1175 PetscFunctionBegin; 1176 PetscUseMethod(A, "MatNestGetSubMats_C", (Mat, PetscInt *, PetscInt *, Mat ***), (A, M, N, mat)); 1177 PetscFunctionReturn(PETSC_SUCCESS); 1178 } 1179 1180 static PetscErrorCode MatNestGetSize_Nest(Mat A, PetscInt *M, PetscInt *N) 1181 { 1182 Mat_Nest *bA = (Mat_Nest *)A->data; 1183 1184 PetscFunctionBegin; 1185 if (M) *M = bA->nr; 1186 if (N) *N = bA->nc; 1187 PetscFunctionReturn(PETSC_SUCCESS); 1188 } 1189 1190 /*@ 1191 MatNestGetSize - Returns the size of the `MATNEST` matrix. 1192 1193 Not Collective 1194 1195 Input Parameter: 1196 . A - `MATNEST` matrix 1197 1198 Output Parameters: 1199 + M - number of rows in the nested mat 1200 - N - number of cols in the nested mat 1201 1202 Level: developer 1203 1204 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestGetLocalISs()`, 1205 `MatNestGetISs()` 1206 @*/ 1207 PetscErrorCode MatNestGetSize(Mat A, PetscInt *M, PetscInt *N) 1208 { 1209 PetscFunctionBegin; 1210 PetscUseMethod(A, "MatNestGetSize_C", (Mat, PetscInt *, PetscInt *), (A, M, N)); 1211 PetscFunctionReturn(PETSC_SUCCESS); 1212 } 1213 1214 static PetscErrorCode MatNestGetISs_Nest(Mat A, IS rows[], IS cols[]) 1215 { 1216 Mat_Nest *vs = (Mat_Nest *)A->data; 1217 PetscInt i; 1218 1219 PetscFunctionBegin; 1220 if (rows) 1221 for (i = 0; i < vs->nr; i++) rows[i] = vs->isglobal.row[i]; 1222 if (cols) 1223 for (i = 0; i < vs->nc; i++) cols[i] = vs->isglobal.col[i]; 1224 PetscFunctionReturn(PETSC_SUCCESS); 1225 } 1226 1227 /*@C 1228 MatNestGetISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1229 1230 Not Collective 1231 1232 Input Parameter: 1233 . A - `MATNEST` matrix 1234 1235 Output Parameters: 1236 + rows - array of row index sets 1237 - cols - array of column index sets 1238 1239 Level: advanced 1240 1241 Note: 1242 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1243 1244 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetLocalISs()`, 1245 `MatCreateNest()`, `MatNestSetSubMats()` 1246 @*/ 1247 PetscErrorCode MatNestGetISs(Mat A, IS rows[], IS cols[]) 1248 { 1249 PetscFunctionBegin; 1250 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1251 PetscUseMethod(A, "MatNestGetISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1252 PetscFunctionReturn(PETSC_SUCCESS); 1253 } 1254 1255 static PetscErrorCode MatNestGetLocalISs_Nest(Mat A, IS rows[], IS cols[]) 1256 { 1257 Mat_Nest *vs = (Mat_Nest *)A->data; 1258 PetscInt i; 1259 1260 PetscFunctionBegin; 1261 if (rows) 1262 for (i = 0; i < vs->nr; i++) rows[i] = vs->islocal.row[i]; 1263 if (cols) 1264 for (i = 0; i < vs->nc; i++) cols[i] = vs->islocal.col[i]; 1265 PetscFunctionReturn(PETSC_SUCCESS); 1266 } 1267 1268 /*@C 1269 MatNestGetLocalISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1270 1271 Not Collective 1272 1273 Input Parameter: 1274 . A - `MATNEST` matrix 1275 1276 Output Parameters: 1277 + rows - array of row index sets (or `NULL` to ignore) 1278 - cols - array of column index sets (or `NULL` to ignore) 1279 1280 Level: advanced 1281 1282 Note: 1283 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1284 1285 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetISs()`, `MatCreateNest()`, 1286 `MatNestSetSubMats()`, `MatNestSetSubMat()` 1287 @*/ 1288 PetscErrorCode MatNestGetLocalISs(Mat A, IS rows[], IS cols[]) 1289 { 1290 PetscFunctionBegin; 1291 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1292 PetscUseMethod(A, "MatNestGetLocalISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1293 PetscFunctionReturn(PETSC_SUCCESS); 1294 } 1295 1296 static PetscErrorCode MatNestSetVecType_Nest(Mat A, VecType vtype) 1297 { 1298 PetscBool flg; 1299 1300 PetscFunctionBegin; 1301 PetscCall(PetscStrcmp(vtype, VECNEST, &flg)); 1302 /* In reality, this only distinguishes VECNEST and "other" */ 1303 if (flg) A->ops->getvecs = MatCreateVecs_Nest; 1304 else A->ops->getvecs = (PetscErrorCode(*)(Mat, Vec *, Vec *))0; 1305 PetscFunctionReturn(PETSC_SUCCESS); 1306 } 1307 1308 /*@C 1309 MatNestSetVecType - Sets the type of `Vec` returned by `MatCreateVecs()` 1310 1311 Not Collective 1312 1313 Input Parameters: 1314 + A - `MATNEST` matrix 1315 - vtype - `VecType` to use for creating vectors 1316 1317 Level: developer 1318 1319 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateVecs()`, `MatCreateNest()`, `VecType` 1320 @*/ 1321 PetscErrorCode MatNestSetVecType(Mat A, VecType vtype) 1322 { 1323 PetscFunctionBegin; 1324 PetscTryMethod(A, "MatNestSetVecType_C", (Mat, VecType), (A, vtype)); 1325 PetscFunctionReturn(PETSC_SUCCESS); 1326 } 1327 1328 static PetscErrorCode MatNestSetSubMats_Nest(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1329 { 1330 Mat_Nest *s = (Mat_Nest *)A->data; 1331 PetscInt i, j, m, n, M, N; 1332 PetscBool cong, isstd, sametype = PETSC_FALSE; 1333 VecType vtype, type; 1334 1335 PetscFunctionBegin; 1336 PetscCall(MatReset_Nest(A)); 1337 1338 s->nr = nr; 1339 s->nc = nc; 1340 1341 /* Create space for submatrices */ 1342 PetscCall(PetscMalloc1(nr, &s->m)); 1343 for (i = 0; i < nr; i++) PetscCall(PetscMalloc1(nc, &s->m[i])); 1344 for (i = 0; i < nr; i++) { 1345 for (j = 0; j < nc; j++) { 1346 s->m[i][j] = a[i * nc + j]; 1347 if (a[i * nc + j]) PetscCall(PetscObjectReference((PetscObject)a[i * nc + j])); 1348 } 1349 } 1350 PetscCall(MatGetVecType(A, &vtype)); 1351 PetscCall(PetscStrcmp(vtype, VECSTANDARD, &isstd)); 1352 if (isstd) { 1353 /* check if all blocks have the same vectype */ 1354 vtype = NULL; 1355 for (i = 0; i < nr; i++) { 1356 for (j = 0; j < nc; j++) { 1357 if (a[i * nc + j]) { 1358 if (!vtype) { /* first visited block */ 1359 PetscCall(MatGetVecType(a[i * nc + j], &vtype)); 1360 sametype = PETSC_TRUE; 1361 } else if (sametype) { 1362 PetscCall(MatGetVecType(a[i * nc + j], &type)); 1363 PetscCall(PetscStrcmp(vtype, type, &sametype)); 1364 } 1365 } 1366 } 1367 } 1368 if (sametype) { /* propagate vectype */ 1369 PetscCall(MatSetVecType(A, vtype)); 1370 } 1371 } 1372 1373 PetscCall(MatSetUp_NestIS_Private(A, nr, is_row, nc, is_col)); 1374 1375 PetscCall(PetscMalloc1(nr, &s->row_len)); 1376 PetscCall(PetscMalloc1(nc, &s->col_len)); 1377 for (i = 0; i < nr; i++) s->row_len[i] = -1; 1378 for (j = 0; j < nc; j++) s->col_len[j] = -1; 1379 1380 PetscCall(PetscCalloc1(nr * nc, &s->nnzstate)); 1381 for (i = 0; i < nr; i++) { 1382 for (j = 0; j < nc; j++) { 1383 if (s->m[i][j]) PetscCall(MatGetNonzeroState(s->m[i][j], &s->nnzstate[i * nc + j])); 1384 } 1385 } 1386 1387 PetscCall(MatNestGetSizes_Private(A, &m, &n, &M, &N)); 1388 1389 PetscCall(PetscLayoutSetSize(A->rmap, M)); 1390 PetscCall(PetscLayoutSetLocalSize(A->rmap, m)); 1391 PetscCall(PetscLayoutSetSize(A->cmap, N)); 1392 PetscCall(PetscLayoutSetLocalSize(A->cmap, n)); 1393 1394 PetscCall(PetscLayoutSetUp(A->rmap)); 1395 PetscCall(PetscLayoutSetUp(A->cmap)); 1396 1397 /* disable operations that are not supported for non-square matrices, 1398 or matrices for which is_row != is_col */ 1399 PetscCall(MatHasCongruentLayouts(A, &cong)); 1400 if (cong && nr != nc) cong = PETSC_FALSE; 1401 if (cong) { 1402 for (i = 0; cong && i < nr; i++) PetscCall(ISEqualUnsorted(s->isglobal.row[i], s->isglobal.col[i], &cong)); 1403 } 1404 if (!cong) { 1405 A->ops->missingdiagonal = NULL; 1406 A->ops->getdiagonal = NULL; 1407 A->ops->shift = NULL; 1408 A->ops->diagonalset = NULL; 1409 } 1410 1411 PetscCall(PetscCalloc2(nr, &s->left, nc, &s->right)); 1412 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1413 A->nonzerostate++; 1414 PetscFunctionReturn(PETSC_SUCCESS); 1415 } 1416 1417 /*@ 1418 MatNestSetSubMats - Sets the nested submatrices in a `MATNEST` 1419 1420 Collective 1421 1422 Input Parameters: 1423 + A - `MATNEST` matrix 1424 . nr - number of nested row blocks 1425 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1426 . nc - number of nested column blocks 1427 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1428 - a - row-aligned array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1429 1430 Level: advanced 1431 1432 Note: 1433 This always resets any submatrix information previously set 1434 1435 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, `MatNestGetSubMats()` 1436 @*/ 1437 PetscErrorCode MatNestSetSubMats(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1438 { 1439 PetscInt i; 1440 1441 PetscFunctionBegin; 1442 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1443 PetscCheck(nr >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of rows cannot be negative"); 1444 if (nr && is_row) { 1445 PetscAssertPointer(is_row, 3); 1446 for (i = 0; i < nr; i++) PetscValidHeaderSpecific(is_row[i], IS_CLASSID, 3); 1447 } 1448 PetscCheck(nc >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of columns cannot be negative"); 1449 if (nc && is_col) { 1450 PetscAssertPointer(is_col, 5); 1451 for (i = 0; i < nc; i++) PetscValidHeaderSpecific(is_col[i], IS_CLASSID, 5); 1452 } 1453 if (nr * nc > 0) PetscAssertPointer(a, 6); 1454 PetscUseMethod(A, "MatNestSetSubMats_C", (Mat, PetscInt, const IS[], PetscInt, const IS[], const Mat[]), (A, nr, is_row, nc, is_col, a)); 1455 PetscFunctionReturn(PETSC_SUCCESS); 1456 } 1457 1458 static PetscErrorCode MatNestCreateAggregateL2G_Private(Mat A, PetscInt n, const IS islocal[], const IS isglobal[], PetscBool colflg, ISLocalToGlobalMapping *ltog) 1459 { 1460 PetscBool flg; 1461 PetscInt i, j, m, mi, *ix; 1462 1463 PetscFunctionBegin; 1464 *ltog = NULL; 1465 for (i = 0, m = 0, flg = PETSC_FALSE; i < n; i++) { 1466 if (islocal[i]) { 1467 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1468 flg = PETSC_TRUE; /* We found a non-trivial entry */ 1469 } else { 1470 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1471 } 1472 m += mi; 1473 } 1474 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 1475 1476 PetscCall(PetscMalloc1(m, &ix)); 1477 for (i = 0, m = 0; i < n; i++) { 1478 ISLocalToGlobalMapping smap = NULL; 1479 Mat sub = NULL; 1480 PetscSF sf; 1481 PetscLayout map; 1482 const PetscInt *ix2; 1483 1484 if (!colflg) { 1485 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1486 } else { 1487 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1488 } 1489 if (sub) { 1490 if (!colflg) { 1491 PetscCall(MatGetLocalToGlobalMapping(sub, &smap, NULL)); 1492 } else { 1493 PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &smap)); 1494 } 1495 } 1496 /* 1497 Now we need to extract the monolithic global indices that correspond to the given split global indices. 1498 In many/most cases, we only want MatGetLocalSubMatrix() to work, in which case we only need to know the size of the local spaces. 1499 */ 1500 PetscCall(ISGetIndices(isglobal[i], &ix2)); 1501 if (islocal[i]) { 1502 PetscInt *ilocal, *iremote; 1503 PetscInt mil, nleaves; 1504 1505 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1506 PetscCheck(smap, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing local to global map"); 1507 for (j = 0; j < mi; j++) ix[m + j] = j; 1508 PetscCall(ISLocalToGlobalMappingApply(smap, mi, ix + m, ix + m)); 1509 1510 /* PetscSFSetGraphLayout does not like negative indices */ 1511 PetscCall(PetscMalloc2(mi, &ilocal, mi, &iremote)); 1512 for (j = 0, nleaves = 0; j < mi; j++) { 1513 if (ix[m + j] < 0) continue; 1514 ilocal[nleaves] = j; 1515 iremote[nleaves] = ix[m + j]; 1516 nleaves++; 1517 } 1518 PetscCall(ISGetLocalSize(isglobal[i], &mil)); 1519 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &sf)); 1520 PetscCall(PetscLayoutCreate(PetscObjectComm((PetscObject)A), &map)); 1521 PetscCall(PetscLayoutSetLocalSize(map, mil)); 1522 PetscCall(PetscLayoutSetUp(map)); 1523 PetscCall(PetscSFSetGraphLayout(sf, map, nleaves, ilocal, PETSC_USE_POINTER, iremote)); 1524 PetscCall(PetscLayoutDestroy(&map)); 1525 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1526 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1527 PetscCall(PetscSFDestroy(&sf)); 1528 PetscCall(PetscFree2(ilocal, iremote)); 1529 } else { 1530 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1531 for (j = 0; j < mi; j++) ix[m + j] = ix2[i]; 1532 } 1533 PetscCall(ISRestoreIndices(isglobal[i], &ix2)); 1534 m += mi; 1535 } 1536 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, m, ix, PETSC_OWN_POINTER, ltog)); 1537 PetscFunctionReturn(PETSC_SUCCESS); 1538 } 1539 1540 /* If an IS was provided, there is nothing Nest needs to do, otherwise Nest will build a strided IS */ 1541 /* 1542 nprocessors = NP 1543 Nest x^T = ((g_0,g_1,...g_nprocs-1), (h_0,h_1,...h_NP-1)) 1544 proc 0: => (g_0,h_0,) 1545 proc 1: => (g_1,h_1,) 1546 ... 1547 proc nprocs-1: => (g_NP-1,h_NP-1,) 1548 1549 proc 0: proc 1: proc nprocs-1: 1550 is[0] = (0,1,2,...,nlocal(g_0)-1) (0,1,...,nlocal(g_1)-1) (0,1,...,nlocal(g_NP-1)) 1551 1552 proc 0: 1553 is[1] = (nlocal(g_0),nlocal(g_0)+1,...,nlocal(g_0)+nlocal(h_0)-1) 1554 proc 1: 1555 is[1] = (nlocal(g_1),nlocal(g_1)+1,...,nlocal(g_1)+nlocal(h_1)-1) 1556 1557 proc NP-1: 1558 is[1] = (nlocal(g_NP-1),nlocal(g_NP-1)+1,...,nlocal(g_NP-1)+nlocal(h_NP-1)-1) 1559 */ 1560 static PetscErrorCode MatSetUp_NestIS_Private(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[]) 1561 { 1562 Mat_Nest *vs = (Mat_Nest *)A->data; 1563 PetscInt i, j, offset, n, nsum, bs; 1564 Mat sub = NULL; 1565 1566 PetscFunctionBegin; 1567 PetscCall(PetscMalloc1(nr, &vs->isglobal.row)); 1568 PetscCall(PetscMalloc1(nc, &vs->isglobal.col)); 1569 if (is_row) { /* valid IS is passed in */ 1570 /* refs on is[] are incremented */ 1571 for (i = 0; i < vs->nr; i++) { 1572 PetscCall(PetscObjectReference((PetscObject)is_row[i])); 1573 1574 vs->isglobal.row[i] = is_row[i]; 1575 } 1576 } else { /* Create the ISs by inspecting sizes of a submatrix in each row */ 1577 nsum = 0; 1578 for (i = 0; i < vs->nr; i++) { /* Add up the local sizes to compute the aggregate offset */ 1579 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1580 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in row %" PetscInt_FMT, i); 1581 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1582 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1583 nsum += n; 1584 } 1585 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1586 offset -= nsum; 1587 for (i = 0; i < vs->nr; i++) { 1588 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1589 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1590 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1591 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.row[i])); 1592 PetscCall(ISSetBlockSize(vs->isglobal.row[i], bs)); 1593 offset += n; 1594 } 1595 } 1596 1597 if (is_col) { /* valid IS is passed in */ 1598 /* refs on is[] are incremented */ 1599 for (j = 0; j < vs->nc; j++) { 1600 PetscCall(PetscObjectReference((PetscObject)is_col[j])); 1601 1602 vs->isglobal.col[j] = is_col[j]; 1603 } 1604 } else { /* Create the ISs by inspecting sizes of a submatrix in each column */ 1605 offset = A->cmap->rstart; 1606 nsum = 0; 1607 for (j = 0; j < vs->nc; j++) { 1608 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1609 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in column %" PetscInt_FMT, i); 1610 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1611 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1612 nsum += n; 1613 } 1614 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1615 offset -= nsum; 1616 for (j = 0; j < vs->nc; j++) { 1617 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1618 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1619 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1620 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.col[j])); 1621 PetscCall(ISSetBlockSize(vs->isglobal.col[j], bs)); 1622 offset += n; 1623 } 1624 } 1625 1626 /* Set up the local ISs */ 1627 PetscCall(PetscMalloc1(vs->nr, &vs->islocal.row)); 1628 PetscCall(PetscMalloc1(vs->nc, &vs->islocal.col)); 1629 for (i = 0, offset = 0; i < vs->nr; i++) { 1630 IS isloc; 1631 ISLocalToGlobalMapping rmap = NULL; 1632 PetscInt nlocal, bs; 1633 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1634 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, &rmap, NULL)); 1635 if (rmap) { 1636 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1637 PetscCall(ISLocalToGlobalMappingGetSize(rmap, &nlocal)); 1638 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1639 PetscCall(ISSetBlockSize(isloc, bs)); 1640 } else { 1641 nlocal = 0; 1642 isloc = NULL; 1643 } 1644 vs->islocal.row[i] = isloc; 1645 offset += nlocal; 1646 } 1647 for (i = 0, offset = 0; i < vs->nc; i++) { 1648 IS isloc; 1649 ISLocalToGlobalMapping cmap = NULL; 1650 PetscInt nlocal, bs; 1651 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1652 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &cmap)); 1653 if (cmap) { 1654 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1655 PetscCall(ISLocalToGlobalMappingGetSize(cmap, &nlocal)); 1656 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1657 PetscCall(ISSetBlockSize(isloc, bs)); 1658 } else { 1659 nlocal = 0; 1660 isloc = NULL; 1661 } 1662 vs->islocal.col[i] = isloc; 1663 offset += nlocal; 1664 } 1665 1666 /* Set up the aggregate ISLocalToGlobalMapping */ 1667 { 1668 ISLocalToGlobalMapping rmap, cmap; 1669 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nr, vs->islocal.row, vs->isglobal.row, PETSC_FALSE, &rmap)); 1670 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nc, vs->islocal.col, vs->isglobal.col, PETSC_TRUE, &cmap)); 1671 if (rmap && cmap) PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap)); 1672 PetscCall(ISLocalToGlobalMappingDestroy(&rmap)); 1673 PetscCall(ISLocalToGlobalMappingDestroy(&cmap)); 1674 } 1675 1676 if (PetscDefined(USE_DEBUG)) { 1677 for (i = 0; i < vs->nr; i++) { 1678 for (j = 0; j < vs->nc; j++) { 1679 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1680 Mat B = vs->m[i][j]; 1681 if (!B) continue; 1682 PetscCall(MatGetSize(B, &M, &N)); 1683 PetscCall(MatGetLocalSize(B, &m, &n)); 1684 PetscCall(ISGetSize(vs->isglobal.row[i], &Mi)); 1685 PetscCall(ISGetSize(vs->isglobal.col[j], &Ni)); 1686 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &mi)); 1687 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &ni)); 1688 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Global sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, i, j, Mi, Ni); 1689 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Local sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, i, j, mi, ni); 1690 } 1691 } 1692 } 1693 1694 /* Set A->assembled if all non-null blocks are currently assembled */ 1695 for (i = 0; i < vs->nr; i++) { 1696 for (j = 0; j < vs->nc; j++) { 1697 if (vs->m[i][j] && !vs->m[i][j]->assembled) PetscFunctionReturn(PETSC_SUCCESS); 1698 } 1699 } 1700 A->assembled = PETSC_TRUE; 1701 PetscFunctionReturn(PETSC_SUCCESS); 1702 } 1703 1704 /*@C 1705 MatCreateNest - Creates a new `MATNEST` matrix containing several nested submatrices, each stored separately 1706 1707 Collective 1708 1709 Input Parameters: 1710 + comm - Communicator for the new `MATNEST` 1711 . nr - number of nested row blocks 1712 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1713 . nc - number of nested column blocks 1714 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1715 - a - row-aligned array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1716 1717 Output Parameter: 1718 . B - new matrix 1719 1720 Level: advanced 1721 1722 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `VecCreateNest()`, `DMCreateMatrix()`, `MatNestSetSubMat()`, 1723 `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatNestGetSize()`, 1724 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 1725 @*/ 1726 PetscErrorCode MatCreateNest(MPI_Comm comm, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[], Mat *B) 1727 { 1728 Mat A; 1729 1730 PetscFunctionBegin; 1731 *B = NULL; 1732 PetscCall(MatCreate(comm, &A)); 1733 PetscCall(MatSetType(A, MATNEST)); 1734 A->preallocated = PETSC_TRUE; 1735 PetscCall(MatNestSetSubMats(A, nr, is_row, nc, is_col, a)); 1736 *B = A; 1737 PetscFunctionReturn(PETSC_SUCCESS); 1738 } 1739 1740 static PetscErrorCode MatConvert_Nest_SeqAIJ_fast(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 1741 { 1742 Mat_Nest *nest = (Mat_Nest *)A->data; 1743 Mat *trans; 1744 PetscScalar **avv; 1745 PetscScalar *vv; 1746 PetscInt **aii, **ajj; 1747 PetscInt *ii, *jj, *ci; 1748 PetscInt nr, nc, nnz, i, j; 1749 PetscBool done; 1750 1751 PetscFunctionBegin; 1752 PetscCall(MatGetSize(A, &nr, &nc)); 1753 if (reuse == MAT_REUSE_MATRIX) { 1754 PetscInt rnr; 1755 1756 PetscCall(MatGetRowIJ(*newmat, 0, PETSC_FALSE, PETSC_FALSE, &rnr, (const PetscInt **)&ii, (const PetscInt **)&jj, &done)); 1757 PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "MatGetRowIJ"); 1758 PetscCheck(rnr == nr, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of rows"); 1759 PetscCall(MatSeqAIJGetArray(*newmat, &vv)); 1760 } 1761 /* extract CSR for nested SeqAIJ matrices */ 1762 nnz = 0; 1763 PetscCall(PetscCalloc4(nest->nr * nest->nc, &aii, nest->nr * nest->nc, &ajj, nest->nr * nest->nc, &avv, nest->nr * nest->nc, &trans)); 1764 for (i = 0; i < nest->nr; ++i) { 1765 for (j = 0; j < nest->nc; ++j) { 1766 Mat B = nest->m[i][j]; 1767 if (B) { 1768 PetscScalar *naa; 1769 PetscInt *nii, *njj, nnr; 1770 PetscBool istrans; 1771 1772 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 1773 if (istrans) { 1774 Mat Bt; 1775 1776 PetscCall(MatTransposeGetMat(B, &Bt)); 1777 PetscCall(MatTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1778 B = trans[i * nest->nc + j]; 1779 } else { 1780 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 1781 if (istrans) { 1782 Mat Bt; 1783 1784 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 1785 PetscCall(MatHermitianTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1786 B = trans[i * nest->nc + j]; 1787 } 1788 } 1789 PetscCall(MatGetRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&nii, (const PetscInt **)&njj, &done)); 1790 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatGetRowIJ"); 1791 PetscCall(MatSeqAIJGetArray(B, &naa)); 1792 nnz += nii[nnr]; 1793 1794 aii[i * nest->nc + j] = nii; 1795 ajj[i * nest->nc + j] = njj; 1796 avv[i * nest->nc + j] = naa; 1797 } 1798 } 1799 } 1800 if (reuse != MAT_REUSE_MATRIX) { 1801 PetscCall(PetscMalloc1(nr + 1, &ii)); 1802 PetscCall(PetscMalloc1(nnz, &jj)); 1803 PetscCall(PetscMalloc1(nnz, &vv)); 1804 } else { 1805 PetscCheck(nnz == ii[nr], PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of nonzeros"); 1806 } 1807 1808 /* new row pointer */ 1809 PetscCall(PetscArrayzero(ii, nr + 1)); 1810 for (i = 0; i < nest->nr; ++i) { 1811 PetscInt ncr, rst; 1812 1813 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1814 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1815 for (j = 0; j < nest->nc; ++j) { 1816 if (aii[i * nest->nc + j]) { 1817 PetscInt *nii = aii[i * nest->nc + j]; 1818 PetscInt ir; 1819 1820 for (ir = rst; ir < ncr + rst; ++ir) { 1821 ii[ir + 1] += nii[1] - nii[0]; 1822 nii++; 1823 } 1824 } 1825 } 1826 } 1827 for (i = 0; i < nr; i++) ii[i + 1] += ii[i]; 1828 1829 /* construct CSR for the new matrix */ 1830 PetscCall(PetscCalloc1(nr, &ci)); 1831 for (i = 0; i < nest->nr; ++i) { 1832 PetscInt ncr, rst; 1833 1834 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1835 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1836 for (j = 0; j < nest->nc; ++j) { 1837 if (aii[i * nest->nc + j]) { 1838 PetscScalar *nvv = avv[i * nest->nc + j]; 1839 PetscInt *nii = aii[i * nest->nc + j]; 1840 PetscInt *njj = ajj[i * nest->nc + j]; 1841 PetscInt ir, cst; 1842 1843 PetscCall(ISStrideGetInfo(nest->isglobal.col[j], &cst, NULL)); 1844 for (ir = rst; ir < ncr + rst; ++ir) { 1845 PetscInt ij, rsize = nii[1] - nii[0], ist = ii[ir] + ci[ir]; 1846 1847 for (ij = 0; ij < rsize; ij++) { 1848 jj[ist + ij] = *njj + cst; 1849 vv[ist + ij] = *nvv; 1850 njj++; 1851 nvv++; 1852 } 1853 ci[ir] += rsize; 1854 nii++; 1855 } 1856 } 1857 } 1858 } 1859 PetscCall(PetscFree(ci)); 1860 1861 /* restore info */ 1862 for (i = 0; i < nest->nr; ++i) { 1863 for (j = 0; j < nest->nc; ++j) { 1864 Mat B = nest->m[i][j]; 1865 if (B) { 1866 PetscInt nnr = 0, k = i * nest->nc + j; 1867 1868 B = (trans[k] ? trans[k] : B); 1869 PetscCall(MatRestoreRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&aii[k], (const PetscInt **)&ajj[k], &done)); 1870 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatRestoreRowIJ"); 1871 PetscCall(MatSeqAIJRestoreArray(B, &avv[k])); 1872 PetscCall(MatDestroy(&trans[k])); 1873 } 1874 } 1875 } 1876 PetscCall(PetscFree4(aii, ajj, avv, trans)); 1877 1878 /* finalize newmat */ 1879 if (reuse == MAT_INITIAL_MATRIX) { 1880 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, newmat)); 1881 } else if (reuse == MAT_INPLACE_MATRIX) { 1882 Mat B; 1883 1884 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, &B)); 1885 PetscCall(MatHeaderReplace(A, &B)); 1886 } 1887 PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY)); 1888 PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY)); 1889 { 1890 Mat_SeqAIJ *a = (Mat_SeqAIJ *)((*newmat)->data); 1891 a->free_a = PETSC_TRUE; 1892 a->free_ij = PETSC_TRUE; 1893 } 1894 PetscFunctionReturn(PETSC_SUCCESS); 1895 } 1896 1897 PETSC_INTERN PetscErrorCode MatAXPY_Dense_Nest(Mat Y, PetscScalar a, Mat X) 1898 { 1899 Mat_Nest *nest = (Mat_Nest *)X->data; 1900 PetscInt i, j, k, rstart; 1901 PetscBool flg; 1902 1903 PetscFunctionBegin; 1904 /* Fill by row */ 1905 for (j = 0; j < nest->nc; ++j) { 1906 /* Using global column indices and ISAllGather() is not scalable. */ 1907 IS bNis; 1908 PetscInt bN; 1909 const PetscInt *bNindices; 1910 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 1911 PetscCall(ISGetSize(bNis, &bN)); 1912 PetscCall(ISGetIndices(bNis, &bNindices)); 1913 for (i = 0; i < nest->nr; ++i) { 1914 Mat B = nest->m[i][j], D = NULL; 1915 PetscInt bm, br; 1916 const PetscInt *bmindices; 1917 if (!B) continue; 1918 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 1919 if (flg) { 1920 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1921 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1922 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 1923 B = D; 1924 } 1925 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 1926 if (flg) { 1927 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 1928 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 1929 B = D; 1930 } 1931 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 1932 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 1933 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 1934 for (br = 0; br < bm; ++br) { 1935 PetscInt row = bmindices[br], brncols, *cols; 1936 const PetscInt *brcols; 1937 const PetscScalar *brcoldata; 1938 PetscScalar *vals = NULL; 1939 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1940 PetscCall(PetscMalloc1(brncols, &cols)); 1941 for (k = 0; k < brncols; k++) cols[k] = bNindices[brcols[k]]; 1942 /* 1943 Nest blocks are required to be nonoverlapping -- otherwise nest and monolithic index layouts wouldn't match. 1944 Thus, we could use INSERT_VALUES, but I prefer ADD_VALUES. 1945 */ 1946 if (a != 1.0) { 1947 PetscCall(PetscMalloc1(brncols, &vals)); 1948 for (k = 0; k < brncols; k++) vals[k] = a * brcoldata[k]; 1949 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, vals, ADD_VALUES)); 1950 PetscCall(PetscFree(vals)); 1951 } else { 1952 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, brcoldata, ADD_VALUES)); 1953 } 1954 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1955 PetscCall(PetscFree(cols)); 1956 } 1957 if (D) PetscCall(MatDestroy(&D)); 1958 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 1959 } 1960 PetscCall(ISRestoreIndices(bNis, &bNindices)); 1961 PetscCall(ISDestroy(&bNis)); 1962 } 1963 PetscCall(MatAssemblyBegin(Y, MAT_FINAL_ASSEMBLY)); 1964 PetscCall(MatAssemblyEnd(Y, MAT_FINAL_ASSEMBLY)); 1965 PetscFunctionReturn(PETSC_SUCCESS); 1966 } 1967 1968 static PetscErrorCode MatConvert_Nest_AIJ(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 1969 { 1970 Mat_Nest *nest = (Mat_Nest *)A->data; 1971 PetscInt m, n, M, N, i, j, k, *dnnz, *onnz = NULL, rstart, cstart, cend; 1972 PetscMPIInt size; 1973 Mat C; 1974 1975 PetscFunctionBegin; 1976 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 1977 if (size == 1) { /* look for a special case with SeqAIJ matrices and strided-1, contiguous, blocks */ 1978 PetscInt nf; 1979 PetscBool fast; 1980 1981 PetscCall(PetscStrcmp(newtype, MATAIJ, &fast)); 1982 if (!fast) PetscCall(PetscStrcmp(newtype, MATSEQAIJ, &fast)); 1983 for (i = 0; i < nest->nr && fast; ++i) { 1984 for (j = 0; j < nest->nc && fast; ++j) { 1985 Mat B = nest->m[i][j]; 1986 if (B) { 1987 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATSEQAIJ, &fast)); 1988 if (!fast) { 1989 PetscBool istrans; 1990 1991 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 1992 if (istrans) { 1993 Mat Bt; 1994 1995 PetscCall(MatTransposeGetMat(B, &Bt)); 1996 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 1997 } else { 1998 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 1999 if (istrans) { 2000 Mat Bt; 2001 2002 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 2003 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2004 } 2005 } 2006 } 2007 } 2008 } 2009 } 2010 for (i = 0, nf = 0; i < nest->nr && fast; ++i) { 2011 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.row[i], ISSTRIDE, &fast)); 2012 if (fast) { 2013 PetscInt f, s; 2014 2015 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &f, &s)); 2016 if (f != nf || s != 1) { 2017 fast = PETSC_FALSE; 2018 } else { 2019 PetscCall(ISGetSize(nest->isglobal.row[i], &f)); 2020 nf += f; 2021 } 2022 } 2023 } 2024 for (i = 0, nf = 0; i < nest->nc && fast; ++i) { 2025 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.col[i], ISSTRIDE, &fast)); 2026 if (fast) { 2027 PetscInt f, s; 2028 2029 PetscCall(ISStrideGetInfo(nest->isglobal.col[i], &f, &s)); 2030 if (f != nf || s != 1) { 2031 fast = PETSC_FALSE; 2032 } else { 2033 PetscCall(ISGetSize(nest->isglobal.col[i], &f)); 2034 nf += f; 2035 } 2036 } 2037 } 2038 if (fast) { 2039 PetscCall(MatConvert_Nest_SeqAIJ_fast(A, newtype, reuse, newmat)); 2040 PetscFunctionReturn(PETSC_SUCCESS); 2041 } 2042 } 2043 PetscCall(MatGetSize(A, &M, &N)); 2044 PetscCall(MatGetLocalSize(A, &m, &n)); 2045 PetscCall(MatGetOwnershipRangeColumn(A, &cstart, &cend)); 2046 if (reuse == MAT_REUSE_MATRIX) C = *newmat; 2047 else { 2048 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2049 PetscCall(MatSetType(C, newtype)); 2050 PetscCall(MatSetSizes(C, m, n, M, N)); 2051 } 2052 PetscCall(PetscMalloc1(2 * m, &dnnz)); 2053 if (m) { 2054 onnz = dnnz + m; 2055 for (k = 0; k < m; k++) { 2056 dnnz[k] = 0; 2057 onnz[k] = 0; 2058 } 2059 } 2060 for (j = 0; j < nest->nc; ++j) { 2061 IS bNis; 2062 PetscInt bN; 2063 const PetscInt *bNindices; 2064 PetscBool flg; 2065 /* Using global column indices and ISAllGather() is not scalable. */ 2066 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 2067 PetscCall(ISGetSize(bNis, &bN)); 2068 PetscCall(ISGetIndices(bNis, &bNindices)); 2069 for (i = 0; i < nest->nr; ++i) { 2070 PetscSF bmsf; 2071 PetscSFNode *iremote; 2072 Mat B = nest->m[i][j], D = NULL; 2073 PetscInt bm, *sub_dnnz, *sub_onnz, br; 2074 const PetscInt *bmindices; 2075 if (!B) continue; 2076 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 2077 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 2078 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &bmsf)); 2079 PetscCall(PetscMalloc1(bm, &iremote)); 2080 PetscCall(PetscMalloc1(bm, &sub_dnnz)); 2081 PetscCall(PetscMalloc1(bm, &sub_onnz)); 2082 for (k = 0; k < bm; ++k) { 2083 sub_dnnz[k] = 0; 2084 sub_onnz[k] = 0; 2085 } 2086 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 2087 if (flg) { 2088 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2089 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2090 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 2091 B = D; 2092 } 2093 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 2094 if (flg) { 2095 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 2096 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 2097 B = D; 2098 } 2099 /* 2100 Locate the owners for all of the locally-owned global row indices for this row block. 2101 These determine the roots of PetscSF used to communicate preallocation data to row owners. 2102 The roots correspond to the dnnz and onnz entries; thus, there are two roots per row. 2103 */ 2104 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 2105 for (br = 0; br < bm; ++br) { 2106 PetscInt row = bmindices[br], brncols, col; 2107 const PetscInt *brcols; 2108 PetscInt rowrel = 0; /* row's relative index on its owner rank */ 2109 PetscMPIInt rowowner = 0; 2110 PetscCall(PetscLayoutFindOwnerIndex(A->rmap, row, &rowowner, &rowrel)); 2111 /* how many roots */ 2112 iremote[br].rank = rowowner; 2113 iremote[br].index = rowrel; /* edge from bmdnnz to dnnz */ 2114 /* get nonzero pattern */ 2115 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, NULL)); 2116 for (k = 0; k < brncols; k++) { 2117 col = bNindices[brcols[k]]; 2118 if (col >= A->cmap->range[rowowner] && col < A->cmap->range[rowowner + 1]) { 2119 sub_dnnz[br]++; 2120 } else { 2121 sub_onnz[br]++; 2122 } 2123 } 2124 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, NULL)); 2125 } 2126 if (D) PetscCall(MatDestroy(&D)); 2127 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 2128 /* bsf will have to take care of disposing of bedges. */ 2129 PetscCall(PetscSFSetGraph(bmsf, m, bm, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 2130 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2131 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2132 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2133 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2134 PetscCall(PetscFree(sub_dnnz)); 2135 PetscCall(PetscFree(sub_onnz)); 2136 PetscCall(PetscSFDestroy(&bmsf)); 2137 } 2138 PetscCall(ISRestoreIndices(bNis, &bNindices)); 2139 PetscCall(ISDestroy(&bNis)); 2140 } 2141 /* Resize preallocation if overestimated */ 2142 for (i = 0; i < m; i++) { 2143 dnnz[i] = PetscMin(dnnz[i], A->cmap->n); 2144 onnz[i] = PetscMin(onnz[i], A->cmap->N - A->cmap->n); 2145 } 2146 PetscCall(MatSeqAIJSetPreallocation(C, 0, dnnz)); 2147 PetscCall(MatMPIAIJSetPreallocation(C, 0, dnnz, 0, onnz)); 2148 PetscCall(PetscFree(dnnz)); 2149 PetscCall(MatAXPY_Dense_Nest(C, 1.0, A)); 2150 if (reuse == MAT_INPLACE_MATRIX) { 2151 PetscCall(MatHeaderReplace(A, &C)); 2152 } else *newmat = C; 2153 PetscFunctionReturn(PETSC_SUCCESS); 2154 } 2155 2156 static PetscErrorCode MatConvert_Nest_Dense(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 2157 { 2158 Mat B; 2159 PetscInt m, n, M, N; 2160 2161 PetscFunctionBegin; 2162 PetscCall(MatGetSize(A, &M, &N)); 2163 PetscCall(MatGetLocalSize(A, &m, &n)); 2164 if (reuse == MAT_REUSE_MATRIX) { 2165 B = *newmat; 2166 PetscCall(MatZeroEntries(B)); 2167 } else { 2168 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), m, PETSC_DECIDE, M, N, NULL, &B)); 2169 } 2170 PetscCall(MatAXPY_Dense_Nest(B, 1.0, A)); 2171 if (reuse == MAT_INPLACE_MATRIX) { 2172 PetscCall(MatHeaderReplace(A, &B)); 2173 } else if (reuse == MAT_INITIAL_MATRIX) *newmat = B; 2174 PetscFunctionReturn(PETSC_SUCCESS); 2175 } 2176 2177 static PetscErrorCode MatHasOperation_Nest(Mat mat, MatOperation op, PetscBool *has) 2178 { 2179 Mat_Nest *bA = (Mat_Nest *)mat->data; 2180 MatOperation opAdd; 2181 PetscInt i, j, nr = bA->nr, nc = bA->nc; 2182 PetscBool flg; 2183 PetscFunctionBegin; 2184 2185 *has = PETSC_FALSE; 2186 if (op == MATOP_MULT || op == MATOP_MULT_ADD || op == MATOP_MULT_TRANSPOSE || op == MATOP_MULT_TRANSPOSE_ADD) { 2187 opAdd = (op == MATOP_MULT || op == MATOP_MULT_ADD ? MATOP_MULT_ADD : MATOP_MULT_TRANSPOSE_ADD); 2188 for (j = 0; j < nc; j++) { 2189 for (i = 0; i < nr; i++) { 2190 if (!bA->m[i][j]) continue; 2191 PetscCall(MatHasOperation(bA->m[i][j], opAdd, &flg)); 2192 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 2193 } 2194 } 2195 } 2196 if (((void **)mat->ops)[op]) *has = PETSC_TRUE; 2197 PetscFunctionReturn(PETSC_SUCCESS); 2198 } 2199 2200 /*MC 2201 MATNEST - "nest" - Matrix type consisting of nested submatrices, each stored separately. 2202 2203 Level: intermediate 2204 2205 Notes: 2206 This matrix type permits scalable use of `PCFIELDSPLIT` and avoids the large memory costs of extracting submatrices. 2207 It allows the use of symmetric and block formats for parts of multi-physics simulations. 2208 It is usually used with `DMCOMPOSITE` and `DMCreateMatrix()` 2209 2210 Each of the submatrices lives on the same MPI communicator as the original nest matrix (though they can have zero 2211 rows/columns on some processes.) Thus this is not meant for cases where the submatrices live on far fewer processes 2212 than the nest matrix. 2213 2214 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `MatType`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, 2215 `VecCreateNest()`, `DMCreateMatrix()`, `DMCOMPOSITE`, `MatNestSetVecType()`, `MatNestGetLocalISs()`, 2216 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 2217 M*/ 2218 PETSC_EXTERN PetscErrorCode MatCreate_Nest(Mat A) 2219 { 2220 Mat_Nest *s; 2221 2222 PetscFunctionBegin; 2223 PetscCall(PetscNew(&s)); 2224 A->data = (void *)s; 2225 2226 s->nr = -1; 2227 s->nc = -1; 2228 s->m = NULL; 2229 s->splitassembly = PETSC_FALSE; 2230 2231 PetscCall(PetscMemzero(A->ops, sizeof(*A->ops))); 2232 2233 A->ops->mult = MatMult_Nest; 2234 A->ops->multadd = MatMultAdd_Nest; 2235 A->ops->multtranspose = MatMultTranspose_Nest; 2236 A->ops->multtransposeadd = MatMultTransposeAdd_Nest; 2237 A->ops->transpose = MatTranspose_Nest; 2238 A->ops->assemblybegin = MatAssemblyBegin_Nest; 2239 A->ops->assemblyend = MatAssemblyEnd_Nest; 2240 A->ops->zeroentries = MatZeroEntries_Nest; 2241 A->ops->copy = MatCopy_Nest; 2242 A->ops->axpy = MatAXPY_Nest; 2243 A->ops->duplicate = MatDuplicate_Nest; 2244 A->ops->createsubmatrix = MatCreateSubMatrix_Nest; 2245 A->ops->destroy = MatDestroy_Nest; 2246 A->ops->view = MatView_Nest; 2247 A->ops->getvecs = NULL; /* Use VECNEST by calling MatNestSetVecType(A,VECNEST) */ 2248 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_Nest; 2249 A->ops->restorelocalsubmatrix = MatRestoreLocalSubMatrix_Nest; 2250 A->ops->getdiagonal = MatGetDiagonal_Nest; 2251 A->ops->diagonalscale = MatDiagonalScale_Nest; 2252 A->ops->scale = MatScale_Nest; 2253 A->ops->shift = MatShift_Nest; 2254 A->ops->diagonalset = MatDiagonalSet_Nest; 2255 A->ops->setrandom = MatSetRandom_Nest; 2256 A->ops->hasoperation = MatHasOperation_Nest; 2257 A->ops->missingdiagonal = MatMissingDiagonal_Nest; 2258 2259 A->spptr = NULL; 2260 A->assembled = PETSC_FALSE; 2261 2262 /* expose Nest api's */ 2263 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", MatNestGetSubMat_Nest)); 2264 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", MatNestSetSubMat_Nest)); 2265 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", MatNestGetSubMats_Nest)); 2266 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", MatNestGetSize_Nest)); 2267 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", MatNestGetISs_Nest)); 2268 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", MatNestGetLocalISs_Nest)); 2269 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", MatNestSetVecType_Nest)); 2270 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", MatNestSetSubMats_Nest)); 2271 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", MatConvert_Nest_AIJ)); 2272 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", MatConvert_Nest_AIJ)); 2273 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", MatConvert_Nest_AIJ)); 2274 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", MatConvert_Nest_IS)); 2275 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", MatConvert_Nest_Dense)); 2276 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", MatConvert_Nest_Dense)); 2277 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", MatProductSetFromOptions_Nest_Dense)); 2278 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", MatProductSetFromOptions_Nest_Dense)); 2279 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", MatProductSetFromOptions_Nest_Dense)); 2280 2281 PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATNEST)); 2282 PetscFunctionReturn(PETSC_SUCCESS); 2283 } 2284