1 #include <../src/mat/impls/nest/matnestimpl.h> /*I "petscmat.h" I*/ 2 #include <../src/mat/impls/aij/seq/aij.h> 3 #include <petscsf.h> 4 5 static PetscErrorCode MatSetUp_NestIS_Private(Mat, PetscInt, const IS[], PetscInt, const IS[]); 6 static PetscErrorCode MatCreateVecs_Nest(Mat, Vec *, Vec *); 7 static PetscErrorCode MatReset_Nest(Mat); 8 9 PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat, MatType, MatReuse, Mat *); 10 11 /* private functions */ 12 static PetscErrorCode MatNestGetSizes_Private(Mat A, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N) 13 { 14 Mat_Nest *bA = (Mat_Nest *)A->data; 15 PetscInt i, j; 16 17 PetscFunctionBegin; 18 *m = *n = *M = *N = 0; 19 for (i = 0; i < bA->nr; i++) { /* rows */ 20 PetscInt sm, sM; 21 PetscCall(ISGetLocalSize(bA->isglobal.row[i], &sm)); 22 PetscCall(ISGetSize(bA->isglobal.row[i], &sM)); 23 *m += sm; 24 *M += sM; 25 } 26 for (j = 0; j < bA->nc; j++) { /* cols */ 27 PetscInt sn, sN; 28 PetscCall(ISGetLocalSize(bA->isglobal.col[j], &sn)); 29 PetscCall(ISGetSize(bA->isglobal.col[j], &sN)); 30 *n += sn; 31 *N += sN; 32 } 33 PetscFunctionReturn(PETSC_SUCCESS); 34 } 35 36 /* operations */ 37 static PetscErrorCode MatMult_Nest(Mat A, Vec x, Vec y) 38 { 39 Mat_Nest *bA = (Mat_Nest *)A->data; 40 Vec *bx = bA->right, *by = bA->left; 41 PetscInt i, j, nr = bA->nr, nc = bA->nc; 42 43 PetscFunctionBegin; 44 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by[i])); 45 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 46 for (i = 0; i < nr; i++) { 47 PetscCall(VecZeroEntries(by[i])); 48 for (j = 0; j < nc; j++) { 49 if (!bA->m[i][j]) continue; 50 /* y[i] <- y[i] + A[i][j] * x[j] */ 51 PetscCall(MatMultAdd(bA->m[i][j], bx[j], by[i], by[i])); 52 } 53 } 54 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by[i])); 55 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 56 PetscFunctionReturn(PETSC_SUCCESS); 57 } 58 59 static PetscErrorCode MatMultAdd_Nest(Mat A, Vec x, Vec y, Vec z) 60 { 61 Mat_Nest *bA = (Mat_Nest *)A->data; 62 Vec *bx = bA->right, *bz = bA->left; 63 PetscInt i, j, nr = bA->nr, nc = bA->nc; 64 65 PetscFunctionBegin; 66 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(z, bA->isglobal.row[i], &bz[i])); 67 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 68 for (i = 0; i < nr; i++) { 69 if (y != z) { 70 Vec by; 71 PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by)); 72 PetscCall(VecCopy(by, bz[i])); 73 PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by)); 74 } 75 for (j = 0; j < nc; j++) { 76 if (!bA->m[i][j]) continue; 77 /* y[i] <- y[i] + A[i][j] * x[j] */ 78 PetscCall(MatMultAdd(bA->m[i][j], bx[j], bz[i], bz[i])); 79 } 80 } 81 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.row[i], &bz[i])); 82 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 83 PetscFunctionReturn(PETSC_SUCCESS); 84 } 85 86 typedef struct { 87 Mat *workC; /* array of Mat with specific containers depending on the underlying MatMatMult implementation */ 88 PetscScalar *tarray; /* buffer for storing all temporary products A[i][j] B[j] */ 89 PetscInt *dm, *dn, k; /* displacements and number of submatrices */ 90 } Nest_Dense; 91 92 static PetscErrorCode MatProductNumeric_Nest_Dense(Mat C) 93 { 94 Mat_Nest *bA; 95 Nest_Dense *contents; 96 Mat viewB, viewC, productB, workC; 97 const PetscScalar *barray; 98 PetscScalar *carray; 99 PetscInt i, j, M, N, nr, nc, ldb, ldc; 100 Mat A, B; 101 102 PetscFunctionBegin; 103 MatCheckProduct(C, 1); 104 A = C->product->A; 105 B = C->product->B; 106 PetscCall(MatGetSize(B, NULL, &N)); 107 if (!N) { 108 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 109 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 110 PetscFunctionReturn(PETSC_SUCCESS); 111 } 112 contents = (Nest_Dense *)C->product->data; 113 PetscCheck(contents, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data empty"); 114 bA = (Mat_Nest *)A->data; 115 nr = bA->nr; 116 nc = bA->nc; 117 PetscCall(MatDenseGetLDA(B, &ldb)); 118 PetscCall(MatDenseGetLDA(C, &ldc)); 119 PetscCall(MatZeroEntries(C)); 120 PetscCall(MatDenseGetArrayRead(B, &barray)); 121 PetscCall(MatDenseGetArray(C, &carray)); 122 for (i = 0; i < nr; i++) { 123 PetscCall(ISGetSize(bA->isglobal.row[i], &M)); 124 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dm[i + 1] - contents->dm[i], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset(carray, contents->dm[i]), &viewC)); 125 PetscCall(MatDenseSetLDA(viewC, ldc)); 126 for (j = 0; j < nc; j++) { 127 if (!bA->m[i][j]) continue; 128 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 129 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset((PetscScalar *)barray, contents->dn[j]), &viewB)); 130 PetscCall(MatDenseSetLDA(viewB, ldb)); 131 132 /* MatMatMultNumeric(bA->m[i][j],viewB,contents->workC[i*nc + j]); */ 133 workC = contents->workC[i * nc + j]; 134 productB = workC->product->B; 135 workC->product->B = viewB; /* use newly created dense matrix viewB */ 136 PetscCall(MatProductNumeric(workC)); 137 PetscCall(MatDestroy(&viewB)); 138 workC->product->B = productB; /* resume original B */ 139 140 /* C[i] <- workC + C[i] */ 141 PetscCall(MatAXPY(viewC, 1.0, contents->workC[i * nc + j], SAME_NONZERO_PATTERN)); 142 } 143 PetscCall(MatDestroy(&viewC)); 144 } 145 PetscCall(MatDenseRestoreArray(C, &carray)); 146 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 147 148 PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 149 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 150 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 151 PetscFunctionReturn(PETSC_SUCCESS); 152 } 153 154 static PetscErrorCode MatNest_DenseDestroy(void *ctx) 155 { 156 Nest_Dense *contents = (Nest_Dense *)ctx; 157 PetscInt i; 158 159 PetscFunctionBegin; 160 PetscCall(PetscFree(contents->tarray)); 161 for (i = 0; i < contents->k; i++) PetscCall(MatDestroy(contents->workC + i)); 162 PetscCall(PetscFree3(contents->dm, contents->dn, contents->workC)); 163 PetscCall(PetscFree(contents)); 164 PetscFunctionReturn(PETSC_SUCCESS); 165 } 166 167 static PetscErrorCode MatProductSymbolic_Nest_Dense(Mat C) 168 { 169 Mat_Nest *bA; 170 Mat viewB, workC; 171 const PetscScalar *barray; 172 PetscInt i, j, M, N, m, n, nr, nc, maxm = 0, ldb; 173 Nest_Dense *contents = NULL; 174 PetscBool cisdense; 175 Mat A, B; 176 PetscReal fill; 177 178 PetscFunctionBegin; 179 MatCheckProduct(C, 1); 180 PetscCheck(!C->product->data, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data not empty"); 181 A = C->product->A; 182 B = C->product->B; 183 fill = C->product->fill; 184 bA = (Mat_Nest *)A->data; 185 nr = bA->nr; 186 nc = bA->nc; 187 PetscCall(MatGetLocalSize(C, &m, &n)); 188 PetscCall(MatGetSize(C, &M, &N)); 189 if (m == PETSC_DECIDE || n == PETSC_DECIDE || M == PETSC_DECIDE || N == PETSC_DECIDE) { 190 PetscCall(MatGetLocalSize(B, NULL, &n)); 191 PetscCall(MatGetSize(B, NULL, &N)); 192 PetscCall(MatGetLocalSize(A, &m, NULL)); 193 PetscCall(MatGetSize(A, &M, NULL)); 194 PetscCall(MatSetSizes(C, m, n, M, N)); 195 } 196 PetscCall(PetscObjectTypeCompareAny((PetscObject)C, &cisdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, MATMPIDENSECUDA, "")); 197 if (!cisdense) PetscCall(MatSetType(C, ((PetscObject)B)->type_name)); 198 PetscCall(MatSetUp(C)); 199 if (!N) { 200 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 201 PetscFunctionReturn(PETSC_SUCCESS); 202 } 203 204 PetscCall(PetscNew(&contents)); 205 C->product->data = contents; 206 C->product->destroy = MatNest_DenseDestroy; 207 PetscCall(PetscCalloc3(nr + 1, &contents->dm, nc + 1, &contents->dn, nr * nc, &contents->workC)); 208 contents->k = nr * nc; 209 for (i = 0; i < nr; i++) { 210 PetscCall(ISGetLocalSize(bA->isglobal.row[i], contents->dm + i + 1)); 211 maxm = PetscMax(maxm, contents->dm[i + 1]); 212 contents->dm[i + 1] += contents->dm[i]; 213 } 214 for (i = 0; i < nc; i++) { 215 PetscCall(ISGetLocalSize(bA->isglobal.col[i], contents->dn + i + 1)); 216 contents->dn[i + 1] += contents->dn[i]; 217 } 218 PetscCall(PetscMalloc1(maxm * N, &contents->tarray)); 219 PetscCall(MatDenseGetLDA(B, &ldb)); 220 PetscCall(MatGetSize(B, NULL, &N)); 221 PetscCall(MatDenseGetArrayRead(B, &barray)); 222 /* loops are permuted compared to MatMatMultNumeric so that viewB is created only once per column of A */ 223 for (j = 0; j < nc; j++) { 224 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 225 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset((PetscScalar *)barray, contents->dn[j]), &viewB)); 226 PetscCall(MatDenseSetLDA(viewB, ldb)); 227 for (i = 0; i < nr; i++) { 228 if (!bA->m[i][j]) continue; 229 /* MatMatMultSymbolic may attach a specific container (depending on MatType of bA->m[i][j]) to workC[i][j] */ 230 231 PetscCall(MatProductCreate(bA->m[i][j], viewB, NULL, &contents->workC[i * nc + j])); 232 workC = contents->workC[i * nc + j]; 233 PetscCall(MatProductSetType(workC, MATPRODUCT_AB)); 234 PetscCall(MatProductSetAlgorithm(workC, "default")); 235 PetscCall(MatProductSetFill(workC, fill)); 236 PetscCall(MatProductSetFromOptions(workC)); 237 PetscCall(MatProductSymbolic(workC)); 238 239 /* since tarray will be shared by all Mat */ 240 PetscCall(MatSeqDenseSetPreallocation(workC, contents->tarray)); 241 PetscCall(MatMPIDenseSetPreallocation(workC, contents->tarray)); 242 } 243 PetscCall(MatDestroy(&viewB)); 244 } 245 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 246 247 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 248 PetscFunctionReturn(PETSC_SUCCESS); 249 } 250 251 static PetscErrorCode MatProductSetFromOptions_Nest_Dense_AB(Mat C) 252 { 253 PetscFunctionBegin; 254 C->ops->productsymbolic = MatProductSymbolic_Nest_Dense; 255 PetscFunctionReturn(PETSC_SUCCESS); 256 } 257 258 static PetscErrorCode MatProductSetFromOptions_Nest_Dense(Mat C) 259 { 260 Mat_Product *product = C->product; 261 262 PetscFunctionBegin; 263 if (product->type == MATPRODUCT_AB) PetscCall(MatProductSetFromOptions_Nest_Dense_AB(C)); 264 PetscFunctionReturn(PETSC_SUCCESS); 265 } 266 267 static PetscErrorCode MatMultTransposeKernel_Nest(Mat A, Vec x, Vec y, PetscBool herm) 268 { 269 Mat_Nest *bA = (Mat_Nest *)A->data; 270 Vec *bx = bA->left, *by = bA->right; 271 PetscInt i, j, nr = bA->nr, nc = bA->nc; 272 273 PetscFunctionBegin; 274 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 275 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(y, bA->isglobal.col[i], &by[i])); 276 for (j = 0; j < nc; j++) { 277 PetscCall(VecZeroEntries(by[j])); 278 for (i = 0; i < nr; i++) { 279 if (!bA->m[i][j]) continue; 280 if (herm) PetscCall(MatMultHermitianTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); /* y[j] <- y[j] + (A[i][j])^H * x[i] */ 281 else PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); /* y[j] <- y[j] + (A[i][j])^T * x[i] */ 282 } 283 } 284 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 285 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.col[i], &by[i])); 286 PetscFunctionReturn(PETSC_SUCCESS); 287 } 288 289 static PetscErrorCode MatMultTranspose_Nest(Mat A, Vec x, Vec y) 290 { 291 PetscFunctionBegin; 292 PetscCall(MatMultTransposeKernel_Nest(A, x, y, PETSC_FALSE)); 293 PetscFunctionReturn(PETSC_SUCCESS); 294 } 295 296 static PetscErrorCode MatMultHermitianTranspose_Nest(Mat A, Vec x, Vec y) 297 { 298 PetscFunctionBegin; 299 PetscCall(MatMultTransposeKernel_Nest(A, x, y, PETSC_TRUE)); 300 PetscFunctionReturn(PETSC_SUCCESS); 301 } 302 303 static PetscErrorCode MatMultTransposeAddKernel_Nest(Mat A, Vec x, Vec y, Vec z, PetscBool herm) 304 { 305 Mat_Nest *bA = (Mat_Nest *)A->data; 306 Vec *bx = bA->left, *bz = bA->right; 307 PetscInt i, j, nr = bA->nr, nc = bA->nc; 308 309 PetscFunctionBegin; 310 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 311 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(z, bA->isglobal.col[i], &bz[i])); 312 for (j = 0; j < nc; j++) { 313 if (y != z) { 314 Vec by; 315 PetscCall(VecGetSubVector(y, bA->isglobal.col[j], &by)); 316 PetscCall(VecCopy(by, bz[j])); 317 PetscCall(VecRestoreSubVector(y, bA->isglobal.col[j], &by)); 318 } 319 for (i = 0; i < nr; i++) { 320 if (!bA->m[i][j]) continue; 321 if (herm) PetscCall(MatMultHermitianTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); /* z[j] <- y[j] + (A[i][j])^H * x[i] */ 322 else PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); /* z[j] <- y[j] + (A[i][j])^T * x[i] */ 323 } 324 } 325 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 326 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.col[i], &bz[i])); 327 PetscFunctionReturn(PETSC_SUCCESS); 328 } 329 330 static PetscErrorCode MatMultTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 331 { 332 PetscFunctionBegin; 333 PetscCall(MatMultTransposeAddKernel_Nest(A, x, y, z, PETSC_FALSE)); 334 PetscFunctionReturn(PETSC_SUCCESS); 335 } 336 337 static PetscErrorCode MatMultHermitianTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 338 { 339 PetscFunctionBegin; 340 PetscCall(MatMultTransposeAddKernel_Nest(A, x, y, z, PETSC_TRUE)); 341 PetscFunctionReturn(PETSC_SUCCESS); 342 } 343 344 static PetscErrorCode MatTranspose_Nest(Mat A, MatReuse reuse, Mat *B) 345 { 346 Mat_Nest *bA = (Mat_Nest *)A->data, *bC; 347 Mat C; 348 PetscInt i, j, nr = bA->nr, nc = bA->nc; 349 350 PetscFunctionBegin; 351 if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B)); 352 PetscCheck(reuse != MAT_INPLACE_MATRIX || nr == nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Square nested matrix only for in-place"); 353 354 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) { 355 Mat *subs; 356 IS *is_row, *is_col; 357 358 PetscCall(PetscCalloc1(nr * nc, &subs)); 359 PetscCall(PetscMalloc2(nr, &is_row, nc, &is_col)); 360 PetscCall(MatNestGetISs(A, is_row, is_col)); 361 if (reuse == MAT_INPLACE_MATRIX) { 362 for (i = 0; i < nr; i++) { 363 for (j = 0; j < nc; j++) subs[i + nr * j] = bA->m[i][j]; 364 } 365 } 366 367 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nc, is_col, nr, is_row, subs, &C)); 368 PetscCall(PetscFree(subs)); 369 PetscCall(PetscFree2(is_row, is_col)); 370 } else { 371 C = *B; 372 } 373 374 bC = (Mat_Nest *)C->data; 375 for (i = 0; i < nr; i++) { 376 for (j = 0; j < nc; j++) { 377 if (bA->m[i][j]) { 378 PetscCall(MatTranspose(bA->m[i][j], reuse, &(bC->m[j][i]))); 379 } else { 380 bC->m[j][i] = NULL; 381 } 382 } 383 } 384 385 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) { 386 *B = C; 387 } else { 388 PetscCall(MatHeaderMerge(A, &C)); 389 } 390 PetscFunctionReturn(PETSC_SUCCESS); 391 } 392 393 static PetscErrorCode MatNestDestroyISList(PetscInt n, IS **list) 394 { 395 IS *lst = *list; 396 PetscInt i; 397 398 PetscFunctionBegin; 399 if (!lst) PetscFunctionReturn(PETSC_SUCCESS); 400 for (i = 0; i < n; i++) 401 if (lst[i]) PetscCall(ISDestroy(&lst[i])); 402 PetscCall(PetscFree(lst)); 403 *list = NULL; 404 PetscFunctionReturn(PETSC_SUCCESS); 405 } 406 407 static PetscErrorCode MatReset_Nest(Mat A) 408 { 409 Mat_Nest *vs = (Mat_Nest *)A->data; 410 PetscInt i, j; 411 412 PetscFunctionBegin; 413 /* release the matrices and the place holders */ 414 PetscCall(MatNestDestroyISList(vs->nr, &vs->isglobal.row)); 415 PetscCall(MatNestDestroyISList(vs->nc, &vs->isglobal.col)); 416 PetscCall(MatNestDestroyISList(vs->nr, &vs->islocal.row)); 417 PetscCall(MatNestDestroyISList(vs->nc, &vs->islocal.col)); 418 419 PetscCall(PetscFree(vs->row_len)); 420 PetscCall(PetscFree(vs->col_len)); 421 PetscCall(PetscFree(vs->nnzstate)); 422 423 PetscCall(PetscFree2(vs->left, vs->right)); 424 425 /* release the matrices and the place holders */ 426 if (vs->m) { 427 for (i = 0; i < vs->nr; i++) { 428 for (j = 0; j < vs->nc; j++) PetscCall(MatDestroy(&vs->m[i][j])); 429 } 430 PetscCall(PetscFree(vs->m[0])); 431 PetscCall(PetscFree(vs->m)); 432 } 433 434 /* restore defaults */ 435 vs->nr = 0; 436 vs->nc = 0; 437 vs->splitassembly = PETSC_FALSE; 438 PetscFunctionReturn(PETSC_SUCCESS); 439 } 440 441 static PetscErrorCode MatDestroy_Nest(Mat A) 442 { 443 PetscFunctionBegin; 444 PetscCall(MatReset_Nest(A)); 445 PetscCall(PetscFree(A->data)); 446 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", NULL)); 447 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", NULL)); 448 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", NULL)); 449 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", NULL)); 450 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", NULL)); 451 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", NULL)); 452 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", NULL)); 453 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", NULL)); 454 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", NULL)); 455 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", NULL)); 456 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", NULL)); 457 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", NULL)); 458 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", NULL)); 459 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", NULL)); 460 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", NULL)); 461 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", NULL)); 462 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", NULL)); 463 PetscFunctionReturn(PETSC_SUCCESS); 464 } 465 466 static PetscErrorCode MatMissingDiagonal_Nest(Mat mat, PetscBool *missing, PetscInt *dd) 467 { 468 Mat_Nest *vs = (Mat_Nest *)mat->data; 469 PetscInt i; 470 471 PetscFunctionBegin; 472 if (dd) *dd = 0; 473 if (!vs->nr) { 474 *missing = PETSC_TRUE; 475 PetscFunctionReturn(PETSC_SUCCESS); 476 } 477 *missing = PETSC_FALSE; 478 for (i = 0; i < vs->nr && !(*missing); i++) { 479 *missing = PETSC_TRUE; 480 if (vs->m[i][i]) { 481 PetscCall(MatMissingDiagonal(vs->m[i][i], missing, NULL)); 482 PetscCheck(!*missing || !dd, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "First missing entry not yet implemented"); 483 } 484 } 485 PetscFunctionReturn(PETSC_SUCCESS); 486 } 487 488 static PetscErrorCode MatAssemblyBegin_Nest(Mat A, MatAssemblyType type) 489 { 490 Mat_Nest *vs = (Mat_Nest *)A->data; 491 PetscInt i, j; 492 PetscBool nnzstate = PETSC_FALSE; 493 494 PetscFunctionBegin; 495 for (i = 0; i < vs->nr; i++) { 496 for (j = 0; j < vs->nc; j++) { 497 PetscObjectState subnnzstate = 0; 498 if (vs->m[i][j]) { 499 PetscCall(MatAssemblyBegin(vs->m[i][j], type)); 500 if (!vs->splitassembly) { 501 /* Note: split assembly will fail if the same block appears more than once (even indirectly through a nested 502 * sub-block). This could be fixed by adding a flag to Mat so that there was a way to check if a Mat was 503 * already performing an assembly, but the result would by more complicated and appears to offer less 504 * potential for diagnostics and correctness checking. Split assembly should be fixed once there is an 505 * interface for libraries to make asynchronous progress in "user-defined non-blocking collectives". 506 */ 507 PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 508 PetscCall(MatGetNonzeroState(vs->m[i][j], &subnnzstate)); 509 } 510 } 511 nnzstate = (PetscBool)(nnzstate || vs->nnzstate[i * vs->nc + j] != subnnzstate); 512 vs->nnzstate[i * vs->nc + j] = subnnzstate; 513 } 514 } 515 if (nnzstate) A->nonzerostate++; 516 PetscFunctionReturn(PETSC_SUCCESS); 517 } 518 519 static PetscErrorCode MatAssemblyEnd_Nest(Mat A, MatAssemblyType type) 520 { 521 Mat_Nest *vs = (Mat_Nest *)A->data; 522 PetscInt i, j; 523 524 PetscFunctionBegin; 525 for (i = 0; i < vs->nr; i++) { 526 for (j = 0; j < vs->nc; j++) { 527 if (vs->m[i][j]) { 528 if (vs->splitassembly) PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 529 } 530 } 531 } 532 PetscFunctionReturn(PETSC_SUCCESS); 533 } 534 535 static PetscErrorCode MatNestFindNonzeroSubMatRow(Mat A, PetscInt row, Mat *B) 536 { 537 Mat_Nest *vs = (Mat_Nest *)A->data; 538 PetscInt j; 539 Mat sub; 540 541 PetscFunctionBegin; 542 sub = (row < vs->nc) ? vs->m[row][row] : (Mat)NULL; /* Prefer to find on the diagonal */ 543 for (j = 0; !sub && j < vs->nc; j++) sub = vs->m[row][j]; 544 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 545 *B = sub; 546 PetscFunctionReturn(PETSC_SUCCESS); 547 } 548 549 static PetscErrorCode MatNestFindNonzeroSubMatCol(Mat A, PetscInt col, Mat *B) 550 { 551 Mat_Nest *vs = (Mat_Nest *)A->data; 552 PetscInt i; 553 Mat sub; 554 555 PetscFunctionBegin; 556 sub = (col < vs->nr) ? vs->m[col][col] : (Mat)NULL; /* Prefer to find on the diagonal */ 557 for (i = 0; !sub && i < vs->nr; i++) sub = vs->m[i][col]; 558 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 559 *B = sub; 560 PetscFunctionReturn(PETSC_SUCCESS); 561 } 562 563 static PetscErrorCode MatNestFindISRange(Mat A, PetscInt n, const IS list[], IS is, PetscInt *begin, PetscInt *end) 564 { 565 PetscInt i, j, size, m; 566 PetscBool flg; 567 IS out, concatenate[2]; 568 569 PetscFunctionBegin; 570 PetscAssertPointer(list, 3); 571 PetscValidHeaderSpecific(is, IS_CLASSID, 4); 572 if (begin) { 573 PetscAssertPointer(begin, 5); 574 *begin = -1; 575 } 576 if (end) { 577 PetscAssertPointer(end, 6); 578 *end = -1; 579 } 580 for (i = 0; i < n; i++) { 581 if (!list[i]) continue; 582 PetscCall(ISEqualUnsorted(list[i], is, &flg)); 583 if (flg) { 584 if (begin) *begin = i; 585 if (end) *end = i + 1; 586 PetscFunctionReturn(PETSC_SUCCESS); 587 } 588 } 589 PetscCall(ISGetSize(is, &size)); 590 for (i = 0; i < n - 1; i++) { 591 if (!list[i]) continue; 592 m = 0; 593 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, list + i, &out)); 594 PetscCall(ISGetSize(out, &m)); 595 for (j = i + 2; j < n && m < size; j++) { 596 if (list[j]) { 597 concatenate[0] = out; 598 concatenate[1] = list[j]; 599 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, concatenate, &out)); 600 PetscCall(ISDestroy(concatenate)); 601 PetscCall(ISGetSize(out, &m)); 602 } 603 } 604 if (m == size) { 605 PetscCall(ISEqualUnsorted(out, is, &flg)); 606 if (flg) { 607 if (begin) *begin = i; 608 if (end) *end = j; 609 PetscCall(ISDestroy(&out)); 610 PetscFunctionReturn(PETSC_SUCCESS); 611 } 612 } 613 PetscCall(ISDestroy(&out)); 614 } 615 PetscFunctionReturn(PETSC_SUCCESS); 616 } 617 618 static PetscErrorCode MatNestFillEmptyMat_Private(Mat A, PetscInt i, PetscInt j, Mat *B) 619 { 620 Mat_Nest *vs = (Mat_Nest *)A->data; 621 PetscInt lr, lc; 622 623 PetscFunctionBegin; 624 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 625 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &lr)); 626 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &lc)); 627 PetscCall(MatSetSizes(*B, lr, lc, PETSC_DECIDE, PETSC_DECIDE)); 628 PetscCall(MatSetType(*B, MATAIJ)); 629 PetscCall(MatSeqAIJSetPreallocation(*B, 0, NULL)); 630 PetscCall(MatMPIAIJSetPreallocation(*B, 0, NULL, 0, NULL)); 631 PetscCall(MatSetUp(*B)); 632 PetscCall(MatSetOption(*B, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 633 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 634 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 635 PetscFunctionReturn(PETSC_SUCCESS); 636 } 637 638 static PetscErrorCode MatNestGetBlock_Private(Mat A, PetscInt rbegin, PetscInt rend, PetscInt cbegin, PetscInt cend, Mat *B) 639 { 640 Mat_Nest *vs = (Mat_Nest *)A->data; 641 Mat *a; 642 PetscInt i, j, k, l, nr = rend - rbegin, nc = cend - cbegin; 643 char keyname[256]; 644 PetscBool *b; 645 PetscBool flg; 646 647 PetscFunctionBegin; 648 *B = NULL; 649 PetscCall(PetscSNPrintf(keyname, sizeof(keyname), "NestBlock_%" PetscInt_FMT "-%" PetscInt_FMT "x%" PetscInt_FMT "-%" PetscInt_FMT, rbegin, rend, cbegin, cend)); 650 PetscCall(PetscObjectQuery((PetscObject)A, keyname, (PetscObject *)B)); 651 if (*B) PetscFunctionReturn(PETSC_SUCCESS); 652 653 PetscCall(PetscMalloc2(nr * nc, &a, nr * nc, &b)); 654 for (i = 0; i < nr; i++) { 655 for (j = 0; j < nc; j++) { 656 a[i * nc + j] = vs->m[rbegin + i][cbegin + j]; 657 b[i * nc + j] = PETSC_FALSE; 658 } 659 } 660 if (nc != vs->nc && nr != vs->nr) { 661 for (i = 0; i < nr; i++) { 662 for (j = 0; j < nc; j++) { 663 flg = PETSC_FALSE; 664 for (k = 0; (k < nr && !flg); k++) { 665 if (a[j + k * nc]) flg = PETSC_TRUE; 666 } 667 if (flg) { 668 flg = PETSC_FALSE; 669 for (l = 0; (l < nc && !flg); l++) { 670 if (a[i * nc + l]) flg = PETSC_TRUE; 671 } 672 } 673 if (!flg) { 674 b[i * nc + j] = PETSC_TRUE; 675 PetscCall(MatNestFillEmptyMat_Private(A, rbegin + i, cbegin + j, a + i * nc + j)); 676 } 677 } 678 } 679 } 680 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, nr != vs->nr ? NULL : vs->isglobal.row, nc, nc != vs->nc ? NULL : vs->isglobal.col, a, B)); 681 for (i = 0; i < nr; i++) { 682 for (j = 0; j < nc; j++) { 683 if (b[i * nc + j]) PetscCall(MatDestroy(a + i * nc + j)); 684 } 685 } 686 PetscCall(PetscFree2(a, b)); 687 (*B)->assembled = A->assembled; 688 PetscCall(PetscObjectCompose((PetscObject)A, keyname, (PetscObject)*B)); 689 PetscCall(PetscObjectDereference((PetscObject)*B)); /* Leave the only remaining reference in the composition */ 690 PetscFunctionReturn(PETSC_SUCCESS); 691 } 692 693 static PetscErrorCode MatNestFindSubMat(Mat A, struct MatNestISPair *is, IS isrow, IS iscol, Mat *B) 694 { 695 Mat_Nest *vs = (Mat_Nest *)A->data; 696 PetscInt rbegin, rend, cbegin, cend; 697 698 PetscFunctionBegin; 699 PetscCall(MatNestFindISRange(A, vs->nr, is->row, isrow, &rbegin, &rend)); 700 PetscCall(MatNestFindISRange(A, vs->nc, is->col, iscol, &cbegin, &cend)); 701 if (rend == rbegin + 1 && cend == cbegin + 1) { 702 if (!vs->m[rbegin][cbegin]) PetscCall(MatNestFillEmptyMat_Private(A, rbegin, cbegin, vs->m[rbegin] + cbegin)); 703 *B = vs->m[rbegin][cbegin]; 704 } else if (rbegin != -1 && cbegin != -1) { 705 PetscCall(MatNestGetBlock_Private(A, rbegin, rend, cbegin, cend, B)); 706 } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Could not find index set"); 707 PetscFunctionReturn(PETSC_SUCCESS); 708 } 709 710 /* 711 TODO: This does not actually returns a submatrix we can modify 712 */ 713 static PetscErrorCode MatCreateSubMatrix_Nest(Mat A, IS isrow, IS iscol, MatReuse reuse, Mat *B) 714 { 715 Mat_Nest *vs = (Mat_Nest *)A->data; 716 Mat sub; 717 718 PetscFunctionBegin; 719 PetscCall(MatNestFindSubMat(A, &vs->isglobal, isrow, iscol, &sub)); 720 switch (reuse) { 721 case MAT_INITIAL_MATRIX: 722 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 723 *B = sub; 724 break; 725 case MAT_REUSE_MATRIX: 726 PetscCheck(sub == *B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Submatrix was not used before in this call"); 727 break; 728 case MAT_IGNORE_MATRIX: /* Nothing to do */ 729 break; 730 case MAT_INPLACE_MATRIX: /* Nothing to do */ 731 SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX is not supported yet"); 732 } 733 PetscFunctionReturn(PETSC_SUCCESS); 734 } 735 736 static PetscErrorCode MatGetLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 737 { 738 Mat_Nest *vs = (Mat_Nest *)A->data; 739 Mat sub; 740 741 PetscFunctionBegin; 742 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 743 /* We allow the submatrix to be NULL, perhaps it would be better for the user to return an empty matrix instead */ 744 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 745 *B = sub; 746 PetscFunctionReturn(PETSC_SUCCESS); 747 } 748 749 static PetscErrorCode MatRestoreLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 750 { 751 Mat_Nest *vs = (Mat_Nest *)A->data; 752 Mat sub; 753 754 PetscFunctionBegin; 755 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 756 PetscCheck(*B == sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has not been gotten"); 757 if (sub) { 758 PetscCheck(((PetscObject)sub)->refct > 1, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has had reference count decremented too many times"); 759 PetscCall(MatDestroy(B)); 760 } 761 PetscFunctionReturn(PETSC_SUCCESS); 762 } 763 764 static PetscErrorCode MatGetDiagonal_Nest(Mat A, Vec v) 765 { 766 Mat_Nest *bA = (Mat_Nest *)A->data; 767 PetscInt i; 768 769 PetscFunctionBegin; 770 for (i = 0; i < bA->nr; i++) { 771 Vec bv; 772 PetscCall(VecGetSubVector(v, bA->isglobal.row[i], &bv)); 773 if (bA->m[i][i]) { 774 PetscCall(MatGetDiagonal(bA->m[i][i], bv)); 775 } else { 776 PetscCall(VecSet(bv, 0.0)); 777 } 778 PetscCall(VecRestoreSubVector(v, bA->isglobal.row[i], &bv)); 779 } 780 PetscFunctionReturn(PETSC_SUCCESS); 781 } 782 783 static PetscErrorCode MatDiagonalScale_Nest(Mat A, Vec l, Vec r) 784 { 785 Mat_Nest *bA = (Mat_Nest *)A->data; 786 Vec bl, *br; 787 PetscInt i, j; 788 789 PetscFunctionBegin; 790 PetscCall(PetscCalloc1(bA->nc, &br)); 791 if (r) { 792 for (j = 0; j < bA->nc; j++) PetscCall(VecGetSubVector(r, bA->isglobal.col[j], &br[j])); 793 } 794 bl = NULL; 795 for (i = 0; i < bA->nr; i++) { 796 if (l) PetscCall(VecGetSubVector(l, bA->isglobal.row[i], &bl)); 797 for (j = 0; j < bA->nc; j++) { 798 if (bA->m[i][j]) PetscCall(MatDiagonalScale(bA->m[i][j], bl, br[j])); 799 } 800 if (l) PetscCall(VecRestoreSubVector(l, bA->isglobal.row[i], &bl)); 801 } 802 if (r) { 803 for (j = 0; j < bA->nc; j++) PetscCall(VecRestoreSubVector(r, bA->isglobal.col[j], &br[j])); 804 } 805 PetscCall(PetscFree(br)); 806 PetscFunctionReturn(PETSC_SUCCESS); 807 } 808 809 static PetscErrorCode MatScale_Nest(Mat A, PetscScalar a) 810 { 811 Mat_Nest *bA = (Mat_Nest *)A->data; 812 PetscInt i, j; 813 814 PetscFunctionBegin; 815 for (i = 0; i < bA->nr; i++) { 816 for (j = 0; j < bA->nc; j++) { 817 if (bA->m[i][j]) PetscCall(MatScale(bA->m[i][j], a)); 818 } 819 } 820 PetscFunctionReturn(PETSC_SUCCESS); 821 } 822 823 static PetscErrorCode MatShift_Nest(Mat A, PetscScalar a) 824 { 825 Mat_Nest *bA = (Mat_Nest *)A->data; 826 PetscInt i; 827 PetscBool nnzstate = PETSC_FALSE; 828 829 PetscFunctionBegin; 830 for (i = 0; i < bA->nr; i++) { 831 PetscObjectState subnnzstate = 0; 832 PetscCheck(bA->m[i][i], PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for shifting an empty diagonal block, insert a matrix in block (%" PetscInt_FMT ",%" PetscInt_FMT ")", i, i); 833 PetscCall(MatShift(bA->m[i][i], a)); 834 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 835 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 836 bA->nnzstate[i * bA->nc + i] = subnnzstate; 837 } 838 if (nnzstate) A->nonzerostate++; 839 PetscFunctionReturn(PETSC_SUCCESS); 840 } 841 842 static PetscErrorCode MatDiagonalSet_Nest(Mat A, Vec D, InsertMode is) 843 { 844 Mat_Nest *bA = (Mat_Nest *)A->data; 845 PetscInt i; 846 PetscBool nnzstate = PETSC_FALSE; 847 848 PetscFunctionBegin; 849 for (i = 0; i < bA->nr; i++) { 850 PetscObjectState subnnzstate = 0; 851 Vec bv; 852 PetscCall(VecGetSubVector(D, bA->isglobal.row[i], &bv)); 853 if (bA->m[i][i]) { 854 PetscCall(MatDiagonalSet(bA->m[i][i], bv, is)); 855 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 856 } 857 PetscCall(VecRestoreSubVector(D, bA->isglobal.row[i], &bv)); 858 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 859 bA->nnzstate[i * bA->nc + i] = subnnzstate; 860 } 861 if (nnzstate) A->nonzerostate++; 862 PetscFunctionReturn(PETSC_SUCCESS); 863 } 864 865 static PetscErrorCode MatSetRandom_Nest(Mat A, PetscRandom rctx) 866 { 867 Mat_Nest *bA = (Mat_Nest *)A->data; 868 PetscInt i, j; 869 870 PetscFunctionBegin; 871 for (i = 0; i < bA->nr; i++) { 872 for (j = 0; j < bA->nc; j++) { 873 if (bA->m[i][j]) PetscCall(MatSetRandom(bA->m[i][j], rctx)); 874 } 875 } 876 PetscFunctionReturn(PETSC_SUCCESS); 877 } 878 879 static PetscErrorCode MatCreateVecs_Nest(Mat A, Vec *right, Vec *left) 880 { 881 Mat_Nest *bA = (Mat_Nest *)A->data; 882 Vec *L, *R; 883 MPI_Comm comm; 884 PetscInt i, j; 885 886 PetscFunctionBegin; 887 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 888 if (right) { 889 /* allocate R */ 890 PetscCall(PetscMalloc1(bA->nc, &R)); 891 /* Create the right vectors */ 892 for (j = 0; j < bA->nc; j++) { 893 for (i = 0; i < bA->nr; i++) { 894 if (bA->m[i][j]) { 895 PetscCall(MatCreateVecs(bA->m[i][j], &R[j], NULL)); 896 break; 897 } 898 } 899 PetscCheck(i != bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null column."); 900 } 901 PetscCall(VecCreateNest(comm, bA->nc, bA->isglobal.col, R, right)); 902 /* hand back control to the nest vector */ 903 for (j = 0; j < bA->nc; j++) PetscCall(VecDestroy(&R[j])); 904 PetscCall(PetscFree(R)); 905 } 906 907 if (left) { 908 /* allocate L */ 909 PetscCall(PetscMalloc1(bA->nr, &L)); 910 /* Create the left vectors */ 911 for (i = 0; i < bA->nr; i++) { 912 for (j = 0; j < bA->nc; j++) { 913 if (bA->m[i][j]) { 914 PetscCall(MatCreateVecs(bA->m[i][j], NULL, &L[i])); 915 break; 916 } 917 } 918 PetscCheck(j != bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null row."); 919 } 920 921 PetscCall(VecCreateNest(comm, bA->nr, bA->isglobal.row, L, left)); 922 for (i = 0; i < bA->nr; i++) PetscCall(VecDestroy(&L[i])); 923 924 PetscCall(PetscFree(L)); 925 } 926 PetscFunctionReturn(PETSC_SUCCESS); 927 } 928 929 static PetscErrorCode MatView_Nest(Mat A, PetscViewer viewer) 930 { 931 Mat_Nest *bA = (Mat_Nest *)A->data; 932 PetscBool isascii, viewSub = PETSC_FALSE; 933 PetscInt i, j; 934 935 PetscFunctionBegin; 936 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 937 if (isascii) { 938 PetscCall(PetscOptionsGetBool(((PetscObject)A)->options, ((PetscObject)A)->prefix, "-mat_view_nest_sub", &viewSub, NULL)); 939 PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix object:\n")); 940 PetscCall(PetscViewerASCIIPushTab(viewer)); 941 PetscCall(PetscViewerASCIIPrintf(viewer, "type=nest, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", bA->nr, bA->nc)); 942 943 PetscCall(PetscViewerASCIIPrintf(viewer, "MatNest structure:\n")); 944 for (i = 0; i < bA->nr; i++) { 945 for (j = 0; j < bA->nc; j++) { 946 MatType type; 947 char name[256] = "", prefix[256] = ""; 948 PetscInt NR, NC; 949 PetscBool isNest = PETSC_FALSE; 950 951 if (!bA->m[i][j]) { 952 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : NULL\n", i, j)); 953 continue; 954 } 955 PetscCall(MatGetSize(bA->m[i][j], &NR, &NC)); 956 PetscCall(MatGetType(bA->m[i][j], &type)); 957 if (((PetscObject)bA->m[i][j])->name) PetscCall(PetscSNPrintf(name, sizeof(name), "name=\"%s\", ", ((PetscObject)bA->m[i][j])->name)); 958 if (((PetscObject)bA->m[i][j])->prefix) PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "prefix=\"%s\", ", ((PetscObject)bA->m[i][j])->prefix)); 959 PetscCall(PetscObjectTypeCompare((PetscObject)bA->m[i][j], MATNEST, &isNest)); 960 961 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : %s%stype=%s, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", i, j, name, prefix, type, NR, NC)); 962 963 if (isNest || viewSub) { 964 PetscCall(PetscViewerASCIIPushTab(viewer)); /* push1 */ 965 PetscCall(MatView(bA->m[i][j], viewer)); 966 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop1 */ 967 } 968 } 969 } 970 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop0 */ 971 } 972 PetscFunctionReturn(PETSC_SUCCESS); 973 } 974 975 static PetscErrorCode MatZeroEntries_Nest(Mat A) 976 { 977 Mat_Nest *bA = (Mat_Nest *)A->data; 978 PetscInt i, j; 979 980 PetscFunctionBegin; 981 for (i = 0; i < bA->nr; i++) { 982 for (j = 0; j < bA->nc; j++) { 983 if (!bA->m[i][j]) continue; 984 PetscCall(MatZeroEntries(bA->m[i][j])); 985 } 986 } 987 PetscFunctionReturn(PETSC_SUCCESS); 988 } 989 990 static PetscErrorCode MatCopy_Nest(Mat A, Mat B, MatStructure str) 991 { 992 Mat_Nest *bA = (Mat_Nest *)A->data, *bB = (Mat_Nest *)B->data; 993 PetscInt i, j, nr = bA->nr, nc = bA->nc; 994 PetscBool nnzstate = PETSC_FALSE; 995 996 PetscFunctionBegin; 997 PetscCheck(nr == bB->nr && nc == bB->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Cannot copy a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") to a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bB->nr, bB->nc, nr, nc); 998 for (i = 0; i < nr; i++) { 999 for (j = 0; j < nc; j++) { 1000 PetscObjectState subnnzstate = 0; 1001 if (bA->m[i][j] && bB->m[i][j]) { 1002 PetscCall(MatCopy(bA->m[i][j], bB->m[i][j], str)); 1003 } else PetscCheck(!bA->m[i][j] && !bB->m[i][j], PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT, i, j); 1004 PetscCall(MatGetNonzeroState(bB->m[i][j], &subnnzstate)); 1005 nnzstate = (PetscBool)(nnzstate || bB->nnzstate[i * nc + j] != subnnzstate); 1006 bB->nnzstate[i * nc + j] = subnnzstate; 1007 } 1008 } 1009 if (nnzstate) B->nonzerostate++; 1010 PetscFunctionReturn(PETSC_SUCCESS); 1011 } 1012 1013 static PetscErrorCode MatAXPY_Nest(Mat Y, PetscScalar a, Mat X, MatStructure str) 1014 { 1015 Mat_Nest *bY = (Mat_Nest *)Y->data, *bX = (Mat_Nest *)X->data; 1016 PetscInt i, j, nr = bY->nr, nc = bY->nc; 1017 PetscBool nnzstate = PETSC_FALSE; 1018 1019 PetscFunctionBegin; 1020 PetscCheck(nr == bX->nr && nc == bX->nc, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Cannot AXPY a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") with a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bX->nr, bX->nc, nr, nc); 1021 for (i = 0; i < nr; i++) { 1022 for (j = 0; j < nc; j++) { 1023 PetscObjectState subnnzstate = 0; 1024 if (bY->m[i][j] && bX->m[i][j]) { 1025 PetscCall(MatAXPY(bY->m[i][j], a, bX->m[i][j], str)); 1026 } else if (bX->m[i][j]) { 1027 Mat M; 1028 1029 PetscCheck(str == DIFFERENT_NONZERO_PATTERN || str == UNKNOWN_NONZERO_PATTERN, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT ". Use DIFFERENT_NONZERO_PATTERN or UNKNOWN_NONZERO_PATTERN", i, j); 1030 PetscCall(MatDuplicate(bX->m[i][j], MAT_COPY_VALUES, &M)); 1031 PetscCall(MatNestSetSubMat(Y, i, j, M)); 1032 PetscCall(MatDestroy(&M)); 1033 } 1034 if (bY->m[i][j]) PetscCall(MatGetNonzeroState(bY->m[i][j], &subnnzstate)); 1035 nnzstate = (PetscBool)(nnzstate || bY->nnzstate[i * nc + j] != subnnzstate); 1036 bY->nnzstate[i * nc + j] = subnnzstate; 1037 } 1038 } 1039 if (nnzstate) Y->nonzerostate++; 1040 PetscFunctionReturn(PETSC_SUCCESS); 1041 } 1042 1043 static PetscErrorCode MatDuplicate_Nest(Mat A, MatDuplicateOption op, Mat *B) 1044 { 1045 Mat_Nest *bA = (Mat_Nest *)A->data; 1046 Mat *b; 1047 PetscInt i, j, nr = bA->nr, nc = bA->nc; 1048 1049 PetscFunctionBegin; 1050 PetscCall(PetscMalloc1(nr * nc, &b)); 1051 for (i = 0; i < nr; i++) { 1052 for (j = 0; j < nc; j++) { 1053 if (bA->m[i][j]) { 1054 PetscCall(MatDuplicate(bA->m[i][j], op, &b[i * nc + j])); 1055 } else { 1056 b[i * nc + j] = NULL; 1057 } 1058 } 1059 } 1060 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, bA->isglobal.row, nc, bA->isglobal.col, b, B)); 1061 /* Give the new MatNest exclusive ownership */ 1062 for (i = 0; i < nr * nc; i++) PetscCall(MatDestroy(&b[i])); 1063 PetscCall(PetscFree(b)); 1064 1065 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 1066 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 1067 PetscFunctionReturn(PETSC_SUCCESS); 1068 } 1069 1070 /* nest api */ 1071 static PetscErrorCode MatNestGetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat *mat) 1072 { 1073 Mat_Nest *bA = (Mat_Nest *)A->data; 1074 1075 PetscFunctionBegin; 1076 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1077 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1078 *mat = bA->m[idxm][jdxm]; 1079 PetscFunctionReturn(PETSC_SUCCESS); 1080 } 1081 1082 /*@ 1083 MatNestGetSubMat - Returns a single, sub-matrix from a `MATNEST` 1084 1085 Not Collective 1086 1087 Input Parameters: 1088 + A - `MATNEST` matrix 1089 . idxm - index of the matrix within the nest matrix 1090 - jdxm - index of the matrix within the nest matrix 1091 1092 Output Parameter: 1093 . sub - matrix at index `idxm`, `jdxm` within the nest matrix 1094 1095 Level: developer 1096 1097 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestSetSubMat()`, 1098 `MatNestGetLocalISs()`, `MatNestGetISs()` 1099 @*/ 1100 PetscErrorCode MatNestGetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat *sub) 1101 { 1102 PetscFunctionBegin; 1103 PetscUseMethod(A, "MatNestGetSubMat_C", (Mat, PetscInt, PetscInt, Mat *), (A, idxm, jdxm, sub)); 1104 PetscFunctionReturn(PETSC_SUCCESS); 1105 } 1106 1107 static PetscErrorCode MatNestSetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat mat) 1108 { 1109 Mat_Nest *bA = (Mat_Nest *)A->data; 1110 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1111 1112 PetscFunctionBegin; 1113 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1114 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1115 PetscCall(MatGetLocalSize(mat, &m, &n)); 1116 PetscCall(MatGetSize(mat, &M, &N)); 1117 PetscCall(ISGetLocalSize(bA->isglobal.row[idxm], &mi)); 1118 PetscCall(ISGetSize(bA->isglobal.row[idxm], &Mi)); 1119 PetscCall(ISGetLocalSize(bA->isglobal.col[jdxm], &ni)); 1120 PetscCall(ISGetSize(bA->isglobal.col[jdxm], &Ni)); 1121 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, Mi, Ni); 1122 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix local dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, mi, ni); 1123 1124 /* do not increase object state */ 1125 if (mat == bA->m[idxm][jdxm]) PetscFunctionReturn(PETSC_SUCCESS); 1126 1127 PetscCall(PetscObjectReference((PetscObject)mat)); 1128 PetscCall(MatDestroy(&bA->m[idxm][jdxm])); 1129 bA->m[idxm][jdxm] = mat; 1130 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1131 PetscCall(MatGetNonzeroState(mat, &bA->nnzstate[idxm * bA->nc + jdxm])); 1132 A->nonzerostate++; 1133 PetscFunctionReturn(PETSC_SUCCESS); 1134 } 1135 1136 /*@ 1137 MatNestSetSubMat - Set a single submatrix in the `MATNEST` 1138 1139 Logically Collective 1140 1141 Input Parameters: 1142 + A - `MATNEST` matrix 1143 . idxm - index of the matrix within the nest matrix 1144 . jdxm - index of the matrix within the nest matrix 1145 - sub - matrix at index `idxm`, `jdxm` within the nest matrix 1146 1147 Level: developer 1148 1149 Notes: 1150 The new submatrix must have the same size and communicator as that block of the nest. 1151 1152 This increments the reference count of the submatrix. 1153 1154 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestSetSubMats()`, `MatNestGetSubMats()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1155 `MatNestGetSubMat()`, `MatNestGetISs()`, `MatNestGetSize()` 1156 @*/ 1157 PetscErrorCode MatNestSetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat sub) 1158 { 1159 PetscFunctionBegin; 1160 PetscUseMethod(A, "MatNestSetSubMat_C", (Mat, PetscInt, PetscInt, Mat), (A, idxm, jdxm, sub)); 1161 PetscFunctionReturn(PETSC_SUCCESS); 1162 } 1163 1164 static PetscErrorCode MatNestGetSubMats_Nest(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1165 { 1166 Mat_Nest *bA = (Mat_Nest *)A->data; 1167 1168 PetscFunctionBegin; 1169 if (M) *M = bA->nr; 1170 if (N) *N = bA->nc; 1171 if (mat) *mat = bA->m; 1172 PetscFunctionReturn(PETSC_SUCCESS); 1173 } 1174 1175 /*@C 1176 MatNestGetSubMats - Returns the entire two dimensional array of matrices defining a `MATNEST` matrix. 1177 1178 Not Collective 1179 1180 Input Parameter: 1181 . A - nest matrix 1182 1183 Output Parameters: 1184 + M - number of rows in the nest matrix 1185 . N - number of cols in the nest matrix 1186 - mat - array of matrices 1187 1188 Level: developer 1189 1190 Note: 1191 The user should not free the array `mat`. 1192 1193 Fortran Notes: 1194 This routine has a calling sequence 1195 $ call MatNestGetSubMats(A, M, N, mat, ierr) 1196 where the space allocated for the optional argument `mat` is assumed large enough (if provided). 1197 Matrices in `mat` are returned in row-major order, see `MatCreateNest()` for an example. 1198 1199 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1200 `MatNestSetSubMats()`, `MatNestGetISs()`, `MatNestSetSubMat()` 1201 @*/ 1202 PetscErrorCode MatNestGetSubMats(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1203 { 1204 PetscFunctionBegin; 1205 PetscUseMethod(A, "MatNestGetSubMats_C", (Mat, PetscInt *, PetscInt *, Mat ***), (A, M, N, mat)); 1206 PetscFunctionReturn(PETSC_SUCCESS); 1207 } 1208 1209 static PetscErrorCode MatNestGetSize_Nest(Mat A, PetscInt *M, PetscInt *N) 1210 { 1211 Mat_Nest *bA = (Mat_Nest *)A->data; 1212 1213 PetscFunctionBegin; 1214 if (M) *M = bA->nr; 1215 if (N) *N = bA->nc; 1216 PetscFunctionReturn(PETSC_SUCCESS); 1217 } 1218 1219 /*@ 1220 MatNestGetSize - Returns the size of the `MATNEST` matrix. 1221 1222 Not Collective 1223 1224 Input Parameter: 1225 . A - `MATNEST` matrix 1226 1227 Output Parameters: 1228 + M - number of rows in the nested mat 1229 - N - number of cols in the nested mat 1230 1231 Level: developer 1232 1233 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestGetLocalISs()`, 1234 `MatNestGetISs()` 1235 @*/ 1236 PetscErrorCode MatNestGetSize(Mat A, PetscInt *M, PetscInt *N) 1237 { 1238 PetscFunctionBegin; 1239 PetscUseMethod(A, "MatNestGetSize_C", (Mat, PetscInt *, PetscInt *), (A, M, N)); 1240 PetscFunctionReturn(PETSC_SUCCESS); 1241 } 1242 1243 static PetscErrorCode MatNestGetISs_Nest(Mat A, IS rows[], IS cols[]) 1244 { 1245 Mat_Nest *vs = (Mat_Nest *)A->data; 1246 PetscInt i; 1247 1248 PetscFunctionBegin; 1249 if (rows) 1250 for (i = 0; i < vs->nr; i++) rows[i] = vs->isglobal.row[i]; 1251 if (cols) 1252 for (i = 0; i < vs->nc; i++) cols[i] = vs->isglobal.col[i]; 1253 PetscFunctionReturn(PETSC_SUCCESS); 1254 } 1255 1256 /*@C 1257 MatNestGetISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1258 1259 Not Collective 1260 1261 Input Parameter: 1262 . A - `MATNEST` matrix 1263 1264 Output Parameters: 1265 + rows - array of row index sets 1266 - cols - array of column index sets 1267 1268 Level: advanced 1269 1270 Note: 1271 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1272 1273 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetLocalISs()`, 1274 `MatCreateNest()`, `MatNestSetSubMats()` 1275 @*/ 1276 PetscErrorCode MatNestGetISs(Mat A, IS rows[], IS cols[]) 1277 { 1278 PetscFunctionBegin; 1279 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1280 PetscUseMethod(A, "MatNestGetISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1281 PetscFunctionReturn(PETSC_SUCCESS); 1282 } 1283 1284 static PetscErrorCode MatNestGetLocalISs_Nest(Mat A, IS rows[], IS cols[]) 1285 { 1286 Mat_Nest *vs = (Mat_Nest *)A->data; 1287 PetscInt i; 1288 1289 PetscFunctionBegin; 1290 if (rows) 1291 for (i = 0; i < vs->nr; i++) rows[i] = vs->islocal.row[i]; 1292 if (cols) 1293 for (i = 0; i < vs->nc; i++) cols[i] = vs->islocal.col[i]; 1294 PetscFunctionReturn(PETSC_SUCCESS); 1295 } 1296 1297 /*@C 1298 MatNestGetLocalISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1299 1300 Not Collective 1301 1302 Input Parameter: 1303 . A - `MATNEST` matrix 1304 1305 Output Parameters: 1306 + rows - array of row index sets (or `NULL` to ignore) 1307 - cols - array of column index sets (or `NULL` to ignore) 1308 1309 Level: advanced 1310 1311 Note: 1312 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1313 1314 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetISs()`, `MatCreateNest()`, 1315 `MatNestSetSubMats()`, `MatNestSetSubMat()` 1316 @*/ 1317 PetscErrorCode MatNestGetLocalISs(Mat A, IS rows[], IS cols[]) 1318 { 1319 PetscFunctionBegin; 1320 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1321 PetscUseMethod(A, "MatNestGetLocalISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1322 PetscFunctionReturn(PETSC_SUCCESS); 1323 } 1324 1325 static PetscErrorCode MatNestSetVecType_Nest(Mat A, VecType vtype) 1326 { 1327 PetscBool flg; 1328 1329 PetscFunctionBegin; 1330 PetscCall(PetscStrcmp(vtype, VECNEST, &flg)); 1331 /* In reality, this only distinguishes VECNEST and "other" */ 1332 if (flg) A->ops->getvecs = MatCreateVecs_Nest; 1333 else A->ops->getvecs = (PetscErrorCode(*)(Mat, Vec *, Vec *))0; 1334 PetscFunctionReturn(PETSC_SUCCESS); 1335 } 1336 1337 /*@C 1338 MatNestSetVecType - Sets the type of `Vec` returned by `MatCreateVecs()` 1339 1340 Not Collective 1341 1342 Input Parameters: 1343 + A - `MATNEST` matrix 1344 - vtype - `VecType` to use for creating vectors 1345 1346 Level: developer 1347 1348 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateVecs()`, `MatCreateNest()`, `VecType` 1349 @*/ 1350 PetscErrorCode MatNestSetVecType(Mat A, VecType vtype) 1351 { 1352 PetscFunctionBegin; 1353 PetscTryMethod(A, "MatNestSetVecType_C", (Mat, VecType), (A, vtype)); 1354 PetscFunctionReturn(PETSC_SUCCESS); 1355 } 1356 1357 static PetscErrorCode MatNestSetSubMats_Nest(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1358 { 1359 Mat_Nest *s = (Mat_Nest *)A->data; 1360 PetscInt i, j, m, n, M, N; 1361 PetscBool cong, isstd, sametype = PETSC_FALSE; 1362 VecType vtype, type; 1363 1364 PetscFunctionBegin; 1365 PetscCall(MatReset_Nest(A)); 1366 1367 s->nr = nr; 1368 s->nc = nc; 1369 1370 /* Create space for submatrices */ 1371 PetscCall(PetscMalloc1(nr, &s->m)); 1372 PetscCall(PetscMalloc1(nr * nc, &s->m[0])); 1373 for (i = 0; i < nr; i++) { 1374 s->m[i] = s->m[0] + i * nc; 1375 for (j = 0; j < nc; j++) { 1376 s->m[i][j] = a[i * nc + j]; 1377 if (a[i * nc + j]) PetscCall(PetscObjectReference((PetscObject)a[i * nc + j])); 1378 } 1379 } 1380 PetscCall(MatGetVecType(A, &vtype)); 1381 PetscCall(PetscStrcmp(vtype, VECSTANDARD, &isstd)); 1382 if (isstd) { 1383 /* check if all blocks have the same vectype */ 1384 vtype = NULL; 1385 for (i = 0; i < nr; i++) { 1386 for (j = 0; j < nc; j++) { 1387 if (a[i * nc + j]) { 1388 if (!vtype) { /* first visited block */ 1389 PetscCall(MatGetVecType(a[i * nc + j], &vtype)); 1390 sametype = PETSC_TRUE; 1391 } else if (sametype) { 1392 PetscCall(MatGetVecType(a[i * nc + j], &type)); 1393 PetscCall(PetscStrcmp(vtype, type, &sametype)); 1394 } 1395 } 1396 } 1397 } 1398 if (sametype) { /* propagate vectype */ 1399 PetscCall(MatSetVecType(A, vtype)); 1400 } 1401 } 1402 1403 PetscCall(MatSetUp_NestIS_Private(A, nr, is_row, nc, is_col)); 1404 1405 PetscCall(PetscMalloc1(nr, &s->row_len)); 1406 PetscCall(PetscMalloc1(nc, &s->col_len)); 1407 for (i = 0; i < nr; i++) s->row_len[i] = -1; 1408 for (j = 0; j < nc; j++) s->col_len[j] = -1; 1409 1410 PetscCall(PetscCalloc1(nr * nc, &s->nnzstate)); 1411 for (i = 0; i < nr; i++) { 1412 for (j = 0; j < nc; j++) { 1413 if (s->m[i][j]) PetscCall(MatGetNonzeroState(s->m[i][j], &s->nnzstate[i * nc + j])); 1414 } 1415 } 1416 1417 PetscCall(MatNestGetSizes_Private(A, &m, &n, &M, &N)); 1418 1419 PetscCall(PetscLayoutSetSize(A->rmap, M)); 1420 PetscCall(PetscLayoutSetLocalSize(A->rmap, m)); 1421 PetscCall(PetscLayoutSetSize(A->cmap, N)); 1422 PetscCall(PetscLayoutSetLocalSize(A->cmap, n)); 1423 1424 PetscCall(PetscLayoutSetUp(A->rmap)); 1425 PetscCall(PetscLayoutSetUp(A->cmap)); 1426 1427 /* disable operations that are not supported for non-square matrices, 1428 or matrices for which is_row != is_col */ 1429 PetscCall(MatHasCongruentLayouts(A, &cong)); 1430 if (cong && nr != nc) cong = PETSC_FALSE; 1431 if (cong) { 1432 for (i = 0; cong && i < nr; i++) PetscCall(ISEqualUnsorted(s->isglobal.row[i], s->isglobal.col[i], &cong)); 1433 } 1434 if (!cong) { 1435 A->ops->missingdiagonal = NULL; 1436 A->ops->getdiagonal = NULL; 1437 A->ops->shift = NULL; 1438 A->ops->diagonalset = NULL; 1439 } 1440 1441 PetscCall(PetscCalloc2(nr, &s->left, nc, &s->right)); 1442 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1443 A->nonzerostate++; 1444 PetscFunctionReturn(PETSC_SUCCESS); 1445 } 1446 1447 /*@ 1448 MatNestSetSubMats - Sets the nested submatrices in a `MATNEST` 1449 1450 Collective 1451 1452 Input Parameters: 1453 + A - `MATNEST` matrix 1454 . nr - number of nested row blocks 1455 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1456 . nc - number of nested column blocks 1457 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1458 - a - array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1459 1460 Level: advanced 1461 1462 Notes: 1463 This always resets any submatrix information previously set 1464 1465 In both C and Fortran, `a` must be a row-major order array containing the matrices. See 1466 `MatCreateNest()` for an example. 1467 1468 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, `MatNestGetSubMats()` 1469 @*/ 1470 PetscErrorCode MatNestSetSubMats(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1471 { 1472 PetscInt i; 1473 1474 PetscFunctionBegin; 1475 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1476 PetscCheck(nr >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of rows cannot be negative"); 1477 if (nr && is_row) { 1478 PetscAssertPointer(is_row, 3); 1479 for (i = 0; i < nr; i++) PetscValidHeaderSpecific(is_row[i], IS_CLASSID, 3); 1480 } 1481 PetscCheck(nc >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of columns cannot be negative"); 1482 if (nc && is_col) { 1483 PetscAssertPointer(is_col, 5); 1484 for (i = 0; i < nc; i++) PetscValidHeaderSpecific(is_col[i], IS_CLASSID, 5); 1485 } 1486 if (nr * nc > 0) PetscAssertPointer(a, 6); 1487 PetscUseMethod(A, "MatNestSetSubMats_C", (Mat, PetscInt, const IS[], PetscInt, const IS[], const Mat[]), (A, nr, is_row, nc, is_col, a)); 1488 PetscFunctionReturn(PETSC_SUCCESS); 1489 } 1490 1491 static PetscErrorCode MatNestCreateAggregateL2G_Private(Mat A, PetscInt n, const IS islocal[], const IS isglobal[], PetscBool colflg, ISLocalToGlobalMapping *ltog) 1492 { 1493 PetscBool flg; 1494 PetscInt i, j, m, mi, *ix; 1495 1496 PetscFunctionBegin; 1497 *ltog = NULL; 1498 for (i = 0, m = 0, flg = PETSC_FALSE; i < n; i++) { 1499 if (islocal[i]) { 1500 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1501 flg = PETSC_TRUE; /* We found a non-trivial entry */ 1502 } else { 1503 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1504 } 1505 m += mi; 1506 } 1507 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 1508 1509 PetscCall(PetscMalloc1(m, &ix)); 1510 for (i = 0, m = 0; i < n; i++) { 1511 ISLocalToGlobalMapping smap = NULL; 1512 Mat sub = NULL; 1513 PetscSF sf; 1514 PetscLayout map; 1515 const PetscInt *ix2; 1516 1517 if (!colflg) { 1518 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1519 } else { 1520 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1521 } 1522 if (sub) { 1523 if (!colflg) { 1524 PetscCall(MatGetLocalToGlobalMapping(sub, &smap, NULL)); 1525 } else { 1526 PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &smap)); 1527 } 1528 } 1529 /* 1530 Now we need to extract the monolithic global indices that correspond to the given split global indices. 1531 In many/most cases, we only want MatGetLocalSubMatrix() to work, in which case we only need to know the size of the local spaces. 1532 */ 1533 PetscCall(ISGetIndices(isglobal[i], &ix2)); 1534 if (islocal[i]) { 1535 PetscInt *ilocal, *iremote; 1536 PetscInt mil, nleaves; 1537 1538 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1539 PetscCheck(smap, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing local to global map"); 1540 for (j = 0; j < mi; j++) ix[m + j] = j; 1541 PetscCall(ISLocalToGlobalMappingApply(smap, mi, ix + m, ix + m)); 1542 1543 /* PetscSFSetGraphLayout does not like negative indices */ 1544 PetscCall(PetscMalloc2(mi, &ilocal, mi, &iremote)); 1545 for (j = 0, nleaves = 0; j < mi; j++) { 1546 if (ix[m + j] < 0) continue; 1547 ilocal[nleaves] = j; 1548 iremote[nleaves] = ix[m + j]; 1549 nleaves++; 1550 } 1551 PetscCall(ISGetLocalSize(isglobal[i], &mil)); 1552 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &sf)); 1553 PetscCall(PetscLayoutCreate(PetscObjectComm((PetscObject)A), &map)); 1554 PetscCall(PetscLayoutSetLocalSize(map, mil)); 1555 PetscCall(PetscLayoutSetUp(map)); 1556 PetscCall(PetscSFSetGraphLayout(sf, map, nleaves, ilocal, PETSC_USE_POINTER, iremote)); 1557 PetscCall(PetscLayoutDestroy(&map)); 1558 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1559 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1560 PetscCall(PetscSFDestroy(&sf)); 1561 PetscCall(PetscFree2(ilocal, iremote)); 1562 } else { 1563 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1564 for (j = 0; j < mi; j++) ix[m + j] = ix2[i]; 1565 } 1566 PetscCall(ISRestoreIndices(isglobal[i], &ix2)); 1567 m += mi; 1568 } 1569 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, m, ix, PETSC_OWN_POINTER, ltog)); 1570 PetscFunctionReturn(PETSC_SUCCESS); 1571 } 1572 1573 /* If an IS was provided, there is nothing Nest needs to do, otherwise Nest will build a strided IS */ 1574 /* 1575 nprocessors = NP 1576 Nest x^T = ((g_0,g_1,...g_nprocs-1), (h_0,h_1,...h_NP-1)) 1577 proc 0: => (g_0,h_0,) 1578 proc 1: => (g_1,h_1,) 1579 ... 1580 proc nprocs-1: => (g_NP-1,h_NP-1,) 1581 1582 proc 0: proc 1: proc nprocs-1: 1583 is[0] = (0,1,2,...,nlocal(g_0)-1) (0,1,...,nlocal(g_1)-1) (0,1,...,nlocal(g_NP-1)) 1584 1585 proc 0: 1586 is[1] = (nlocal(g_0),nlocal(g_0)+1,...,nlocal(g_0)+nlocal(h_0)-1) 1587 proc 1: 1588 is[1] = (nlocal(g_1),nlocal(g_1)+1,...,nlocal(g_1)+nlocal(h_1)-1) 1589 1590 proc NP-1: 1591 is[1] = (nlocal(g_NP-1),nlocal(g_NP-1)+1,...,nlocal(g_NP-1)+nlocal(h_NP-1)-1) 1592 */ 1593 static PetscErrorCode MatSetUp_NestIS_Private(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[]) 1594 { 1595 Mat_Nest *vs = (Mat_Nest *)A->data; 1596 PetscInt i, j, offset, n, nsum, bs; 1597 Mat sub = NULL; 1598 1599 PetscFunctionBegin; 1600 PetscCall(PetscMalloc1(nr, &vs->isglobal.row)); 1601 PetscCall(PetscMalloc1(nc, &vs->isglobal.col)); 1602 if (is_row) { /* valid IS is passed in */ 1603 /* refs on is[] are incremented */ 1604 for (i = 0; i < vs->nr; i++) { 1605 PetscCall(PetscObjectReference((PetscObject)is_row[i])); 1606 1607 vs->isglobal.row[i] = is_row[i]; 1608 } 1609 } else { /* Create the ISs by inspecting sizes of a submatrix in each row */ 1610 nsum = 0; 1611 for (i = 0; i < vs->nr; i++) { /* Add up the local sizes to compute the aggregate offset */ 1612 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1613 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in row %" PetscInt_FMT, i); 1614 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1615 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1616 nsum += n; 1617 } 1618 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1619 offset -= nsum; 1620 for (i = 0; i < vs->nr; i++) { 1621 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1622 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1623 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1624 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.row[i])); 1625 PetscCall(ISSetBlockSize(vs->isglobal.row[i], bs)); 1626 offset += n; 1627 } 1628 } 1629 1630 if (is_col) { /* valid IS is passed in */ 1631 /* refs on is[] are incremented */ 1632 for (j = 0; j < vs->nc; j++) { 1633 PetscCall(PetscObjectReference((PetscObject)is_col[j])); 1634 1635 vs->isglobal.col[j] = is_col[j]; 1636 } 1637 } else { /* Create the ISs by inspecting sizes of a submatrix in each column */ 1638 offset = A->cmap->rstart; 1639 nsum = 0; 1640 for (j = 0; j < vs->nc; j++) { 1641 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1642 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in column %" PetscInt_FMT, i); 1643 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1644 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1645 nsum += n; 1646 } 1647 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1648 offset -= nsum; 1649 for (j = 0; j < vs->nc; j++) { 1650 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1651 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1652 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1653 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.col[j])); 1654 PetscCall(ISSetBlockSize(vs->isglobal.col[j], bs)); 1655 offset += n; 1656 } 1657 } 1658 1659 /* Set up the local ISs */ 1660 PetscCall(PetscMalloc1(vs->nr, &vs->islocal.row)); 1661 PetscCall(PetscMalloc1(vs->nc, &vs->islocal.col)); 1662 for (i = 0, offset = 0; i < vs->nr; i++) { 1663 IS isloc; 1664 ISLocalToGlobalMapping rmap = NULL; 1665 PetscInt nlocal, bs; 1666 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1667 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, &rmap, NULL)); 1668 if (rmap) { 1669 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1670 PetscCall(ISLocalToGlobalMappingGetSize(rmap, &nlocal)); 1671 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1672 PetscCall(ISSetBlockSize(isloc, bs)); 1673 } else { 1674 nlocal = 0; 1675 isloc = NULL; 1676 } 1677 vs->islocal.row[i] = isloc; 1678 offset += nlocal; 1679 } 1680 for (i = 0, offset = 0; i < vs->nc; i++) { 1681 IS isloc; 1682 ISLocalToGlobalMapping cmap = NULL; 1683 PetscInt nlocal, bs; 1684 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1685 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &cmap)); 1686 if (cmap) { 1687 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1688 PetscCall(ISLocalToGlobalMappingGetSize(cmap, &nlocal)); 1689 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1690 PetscCall(ISSetBlockSize(isloc, bs)); 1691 } else { 1692 nlocal = 0; 1693 isloc = NULL; 1694 } 1695 vs->islocal.col[i] = isloc; 1696 offset += nlocal; 1697 } 1698 1699 /* Set up the aggregate ISLocalToGlobalMapping */ 1700 { 1701 ISLocalToGlobalMapping rmap, cmap; 1702 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nr, vs->islocal.row, vs->isglobal.row, PETSC_FALSE, &rmap)); 1703 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nc, vs->islocal.col, vs->isglobal.col, PETSC_TRUE, &cmap)); 1704 if (rmap && cmap) PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap)); 1705 PetscCall(ISLocalToGlobalMappingDestroy(&rmap)); 1706 PetscCall(ISLocalToGlobalMappingDestroy(&cmap)); 1707 } 1708 1709 if (PetscDefined(USE_DEBUG)) { 1710 for (i = 0; i < vs->nr; i++) { 1711 for (j = 0; j < vs->nc; j++) { 1712 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1713 Mat B = vs->m[i][j]; 1714 if (!B) continue; 1715 PetscCall(MatGetSize(B, &M, &N)); 1716 PetscCall(MatGetLocalSize(B, &m, &n)); 1717 PetscCall(ISGetSize(vs->isglobal.row[i], &Mi)); 1718 PetscCall(ISGetSize(vs->isglobal.col[j], &Ni)); 1719 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &mi)); 1720 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &ni)); 1721 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Global sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, i, j, Mi, Ni); 1722 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Local sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, i, j, mi, ni); 1723 } 1724 } 1725 } 1726 1727 /* Set A->assembled if all non-null blocks are currently assembled */ 1728 for (i = 0; i < vs->nr; i++) { 1729 for (j = 0; j < vs->nc; j++) { 1730 if (vs->m[i][j] && !vs->m[i][j]->assembled) PetscFunctionReturn(PETSC_SUCCESS); 1731 } 1732 } 1733 A->assembled = PETSC_TRUE; 1734 PetscFunctionReturn(PETSC_SUCCESS); 1735 } 1736 1737 /*@C 1738 MatCreateNest - Creates a new `MATNEST` matrix containing several nested submatrices, each stored separately 1739 1740 Collective 1741 1742 Input Parameters: 1743 + comm - Communicator for the new `MATNEST` 1744 . nr - number of nested row blocks 1745 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1746 . nc - number of nested column blocks 1747 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1748 - a - array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1749 1750 Output Parameter: 1751 . B - new matrix 1752 1753 Note: 1754 In both C and Fortran, `a` must be a row-major order array holding references to the matrices. 1755 For instance, to represent the matrix 1756 $\begin{bmatrix} A_{11} & A_{12} \\ A_{21} & A_{22}\end{bmatrix}$ 1757 one should use `Mat a[4]={A11,A12,A21,A22}`. 1758 1759 Level: advanced 1760 1761 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `VecCreateNest()`, `DMCreateMatrix()`, `MatNestSetSubMat()`, 1762 `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatNestGetSize()`, 1763 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 1764 @*/ 1765 PetscErrorCode MatCreateNest(MPI_Comm comm, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[], Mat *B) 1766 { 1767 Mat A; 1768 1769 PetscFunctionBegin; 1770 *B = NULL; 1771 PetscCall(MatCreate(comm, &A)); 1772 PetscCall(MatSetType(A, MATNEST)); 1773 A->preallocated = PETSC_TRUE; 1774 PetscCall(MatNestSetSubMats(A, nr, is_row, nc, is_col, a)); 1775 *B = A; 1776 PetscFunctionReturn(PETSC_SUCCESS); 1777 } 1778 1779 static PetscErrorCode MatConvert_Nest_SeqAIJ_fast(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 1780 { 1781 Mat_Nest *nest = (Mat_Nest *)A->data; 1782 Mat *trans; 1783 PetscScalar **avv; 1784 PetscScalar *vv; 1785 PetscInt **aii, **ajj; 1786 PetscInt *ii, *jj, *ci; 1787 PetscInt nr, nc, nnz, i, j; 1788 PetscBool done; 1789 1790 PetscFunctionBegin; 1791 PetscCall(MatGetSize(A, &nr, &nc)); 1792 if (reuse == MAT_REUSE_MATRIX) { 1793 PetscInt rnr; 1794 1795 PetscCall(MatGetRowIJ(*newmat, 0, PETSC_FALSE, PETSC_FALSE, &rnr, (const PetscInt **)&ii, (const PetscInt **)&jj, &done)); 1796 PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "MatGetRowIJ"); 1797 PetscCheck(rnr == nr, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of rows"); 1798 PetscCall(MatSeqAIJGetArray(*newmat, &vv)); 1799 } 1800 /* extract CSR for nested SeqAIJ matrices */ 1801 nnz = 0; 1802 PetscCall(PetscCalloc4(nest->nr * nest->nc, &aii, nest->nr * nest->nc, &ajj, nest->nr * nest->nc, &avv, nest->nr * nest->nc, &trans)); 1803 for (i = 0; i < nest->nr; ++i) { 1804 for (j = 0; j < nest->nc; ++j) { 1805 Mat B = nest->m[i][j]; 1806 if (B) { 1807 PetscScalar *naa; 1808 PetscInt *nii, *njj, nnr; 1809 PetscBool istrans; 1810 1811 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 1812 if (istrans) { 1813 Mat Bt; 1814 1815 PetscCall(MatTransposeGetMat(B, &Bt)); 1816 PetscCall(MatTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1817 B = trans[i * nest->nc + j]; 1818 } else { 1819 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 1820 if (istrans) { 1821 Mat Bt; 1822 1823 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 1824 PetscCall(MatHermitianTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1825 B = trans[i * nest->nc + j]; 1826 } 1827 } 1828 PetscCall(MatGetRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&nii, (const PetscInt **)&njj, &done)); 1829 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatGetRowIJ"); 1830 PetscCall(MatSeqAIJGetArray(B, &naa)); 1831 nnz += nii[nnr]; 1832 1833 aii[i * nest->nc + j] = nii; 1834 ajj[i * nest->nc + j] = njj; 1835 avv[i * nest->nc + j] = naa; 1836 } 1837 } 1838 } 1839 if (reuse != MAT_REUSE_MATRIX) { 1840 PetscCall(PetscMalloc1(nr + 1, &ii)); 1841 PetscCall(PetscMalloc1(nnz, &jj)); 1842 PetscCall(PetscMalloc1(nnz, &vv)); 1843 } else { 1844 PetscCheck(nnz == ii[nr], PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of nonzeros"); 1845 } 1846 1847 /* new row pointer */ 1848 PetscCall(PetscArrayzero(ii, nr + 1)); 1849 for (i = 0; i < nest->nr; ++i) { 1850 PetscInt ncr, rst; 1851 1852 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1853 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1854 for (j = 0; j < nest->nc; ++j) { 1855 if (aii[i * nest->nc + j]) { 1856 PetscInt *nii = aii[i * nest->nc + j]; 1857 PetscInt ir; 1858 1859 for (ir = rst; ir < ncr + rst; ++ir) { 1860 ii[ir + 1] += nii[1] - nii[0]; 1861 nii++; 1862 } 1863 } 1864 } 1865 } 1866 for (i = 0; i < nr; i++) ii[i + 1] += ii[i]; 1867 1868 /* construct CSR for the new matrix */ 1869 PetscCall(PetscCalloc1(nr, &ci)); 1870 for (i = 0; i < nest->nr; ++i) { 1871 PetscInt ncr, rst; 1872 1873 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1874 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1875 for (j = 0; j < nest->nc; ++j) { 1876 if (aii[i * nest->nc + j]) { 1877 PetscScalar *nvv = avv[i * nest->nc + j]; 1878 PetscInt *nii = aii[i * nest->nc + j]; 1879 PetscInt *njj = ajj[i * nest->nc + j]; 1880 PetscInt ir, cst; 1881 1882 PetscCall(ISStrideGetInfo(nest->isglobal.col[j], &cst, NULL)); 1883 for (ir = rst; ir < ncr + rst; ++ir) { 1884 PetscInt ij, rsize = nii[1] - nii[0], ist = ii[ir] + ci[ir]; 1885 1886 for (ij = 0; ij < rsize; ij++) { 1887 jj[ist + ij] = *njj + cst; 1888 vv[ist + ij] = *nvv; 1889 njj++; 1890 nvv++; 1891 } 1892 ci[ir] += rsize; 1893 nii++; 1894 } 1895 } 1896 } 1897 } 1898 PetscCall(PetscFree(ci)); 1899 1900 /* restore info */ 1901 for (i = 0; i < nest->nr; ++i) { 1902 for (j = 0; j < nest->nc; ++j) { 1903 Mat B = nest->m[i][j]; 1904 if (B) { 1905 PetscInt nnr = 0, k = i * nest->nc + j; 1906 1907 B = (trans[k] ? trans[k] : B); 1908 PetscCall(MatRestoreRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&aii[k], (const PetscInt **)&ajj[k], &done)); 1909 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatRestoreRowIJ"); 1910 PetscCall(MatSeqAIJRestoreArray(B, &avv[k])); 1911 PetscCall(MatDestroy(&trans[k])); 1912 } 1913 } 1914 } 1915 PetscCall(PetscFree4(aii, ajj, avv, trans)); 1916 1917 /* finalize newmat */ 1918 if (reuse == MAT_INITIAL_MATRIX) { 1919 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, newmat)); 1920 } else if (reuse == MAT_INPLACE_MATRIX) { 1921 Mat B; 1922 1923 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, &B)); 1924 PetscCall(MatHeaderReplace(A, &B)); 1925 } 1926 PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY)); 1927 PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY)); 1928 { 1929 Mat_SeqAIJ *a = (Mat_SeqAIJ *)((*newmat)->data); 1930 a->free_a = PETSC_TRUE; 1931 a->free_ij = PETSC_TRUE; 1932 } 1933 PetscFunctionReturn(PETSC_SUCCESS); 1934 } 1935 1936 PETSC_INTERN PetscErrorCode MatAXPY_Dense_Nest(Mat Y, PetscScalar a, Mat X) 1937 { 1938 Mat_Nest *nest = (Mat_Nest *)X->data; 1939 PetscInt i, j, k, rstart; 1940 PetscBool flg; 1941 1942 PetscFunctionBegin; 1943 /* Fill by row */ 1944 for (j = 0; j < nest->nc; ++j) { 1945 /* Using global column indices and ISAllGather() is not scalable. */ 1946 IS bNis; 1947 PetscInt bN; 1948 const PetscInt *bNindices; 1949 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 1950 PetscCall(ISGetSize(bNis, &bN)); 1951 PetscCall(ISGetIndices(bNis, &bNindices)); 1952 for (i = 0; i < nest->nr; ++i) { 1953 Mat B = nest->m[i][j], D = NULL; 1954 PetscInt bm, br; 1955 const PetscInt *bmindices; 1956 if (!B) continue; 1957 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 1958 if (flg) { 1959 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1960 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1961 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 1962 B = D; 1963 } 1964 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 1965 if (flg) { 1966 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 1967 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 1968 B = D; 1969 } 1970 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 1971 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 1972 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 1973 for (br = 0; br < bm; ++br) { 1974 PetscInt row = bmindices[br], brncols, *cols; 1975 const PetscInt *brcols; 1976 const PetscScalar *brcoldata; 1977 PetscScalar *vals = NULL; 1978 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1979 PetscCall(PetscMalloc1(brncols, &cols)); 1980 for (k = 0; k < brncols; k++) cols[k] = bNindices[brcols[k]]; 1981 /* 1982 Nest blocks are required to be nonoverlapping -- otherwise nest and monolithic index layouts wouldn't match. 1983 Thus, we could use INSERT_VALUES, but I prefer ADD_VALUES. 1984 */ 1985 if (a != 1.0) { 1986 PetscCall(PetscMalloc1(brncols, &vals)); 1987 for (k = 0; k < brncols; k++) vals[k] = a * brcoldata[k]; 1988 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, vals, ADD_VALUES)); 1989 PetscCall(PetscFree(vals)); 1990 } else { 1991 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, brcoldata, ADD_VALUES)); 1992 } 1993 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1994 PetscCall(PetscFree(cols)); 1995 } 1996 if (D) PetscCall(MatDestroy(&D)); 1997 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 1998 } 1999 PetscCall(ISRestoreIndices(bNis, &bNindices)); 2000 PetscCall(ISDestroy(&bNis)); 2001 } 2002 PetscCall(MatAssemblyBegin(Y, MAT_FINAL_ASSEMBLY)); 2003 PetscCall(MatAssemblyEnd(Y, MAT_FINAL_ASSEMBLY)); 2004 PetscFunctionReturn(PETSC_SUCCESS); 2005 } 2006 2007 static PetscErrorCode MatConvert_Nest_AIJ(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 2008 { 2009 Mat_Nest *nest = (Mat_Nest *)A->data; 2010 PetscInt m, n, M, N, i, j, k, *dnnz, *onnz = NULL, rstart, cstart, cend; 2011 PetscMPIInt size; 2012 Mat C; 2013 2014 PetscFunctionBegin; 2015 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 2016 if (size == 1) { /* look for a special case with SeqAIJ matrices and strided-1, contiguous, blocks */ 2017 PetscInt nf; 2018 PetscBool fast; 2019 2020 PetscCall(PetscStrcmp(newtype, MATAIJ, &fast)); 2021 if (!fast) PetscCall(PetscStrcmp(newtype, MATSEQAIJ, &fast)); 2022 for (i = 0; i < nest->nr && fast; ++i) { 2023 for (j = 0; j < nest->nc && fast; ++j) { 2024 Mat B = nest->m[i][j]; 2025 if (B) { 2026 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATSEQAIJ, &fast)); 2027 if (!fast) { 2028 PetscBool istrans; 2029 2030 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 2031 if (istrans) { 2032 Mat Bt; 2033 2034 PetscCall(MatTransposeGetMat(B, &Bt)); 2035 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2036 } else { 2037 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 2038 if (istrans) { 2039 Mat Bt; 2040 2041 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 2042 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2043 } 2044 } 2045 } 2046 } 2047 } 2048 } 2049 for (i = 0, nf = 0; i < nest->nr && fast; ++i) { 2050 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.row[i], ISSTRIDE, &fast)); 2051 if (fast) { 2052 PetscInt f, s; 2053 2054 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &f, &s)); 2055 if (f != nf || s != 1) { 2056 fast = PETSC_FALSE; 2057 } else { 2058 PetscCall(ISGetSize(nest->isglobal.row[i], &f)); 2059 nf += f; 2060 } 2061 } 2062 } 2063 for (i = 0, nf = 0; i < nest->nc && fast; ++i) { 2064 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.col[i], ISSTRIDE, &fast)); 2065 if (fast) { 2066 PetscInt f, s; 2067 2068 PetscCall(ISStrideGetInfo(nest->isglobal.col[i], &f, &s)); 2069 if (f != nf || s != 1) { 2070 fast = PETSC_FALSE; 2071 } else { 2072 PetscCall(ISGetSize(nest->isglobal.col[i], &f)); 2073 nf += f; 2074 } 2075 } 2076 } 2077 if (fast) { 2078 PetscCall(MatConvert_Nest_SeqAIJ_fast(A, newtype, reuse, newmat)); 2079 PetscFunctionReturn(PETSC_SUCCESS); 2080 } 2081 } 2082 PetscCall(MatGetSize(A, &M, &N)); 2083 PetscCall(MatGetLocalSize(A, &m, &n)); 2084 PetscCall(MatGetOwnershipRangeColumn(A, &cstart, &cend)); 2085 if (reuse == MAT_REUSE_MATRIX) C = *newmat; 2086 else { 2087 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2088 PetscCall(MatSetType(C, newtype)); 2089 PetscCall(MatSetSizes(C, m, n, M, N)); 2090 } 2091 PetscCall(PetscMalloc1(2 * m, &dnnz)); 2092 if (m) { 2093 onnz = dnnz + m; 2094 for (k = 0; k < m; k++) { 2095 dnnz[k] = 0; 2096 onnz[k] = 0; 2097 } 2098 } 2099 for (j = 0; j < nest->nc; ++j) { 2100 IS bNis; 2101 PetscInt bN; 2102 const PetscInt *bNindices; 2103 PetscBool flg; 2104 /* Using global column indices and ISAllGather() is not scalable. */ 2105 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 2106 PetscCall(ISGetSize(bNis, &bN)); 2107 PetscCall(ISGetIndices(bNis, &bNindices)); 2108 for (i = 0; i < nest->nr; ++i) { 2109 PetscSF bmsf; 2110 PetscSFNode *iremote; 2111 Mat B = nest->m[i][j], D = NULL; 2112 PetscInt bm, *sub_dnnz, *sub_onnz, br; 2113 const PetscInt *bmindices; 2114 if (!B) continue; 2115 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 2116 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 2117 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &bmsf)); 2118 PetscCall(PetscMalloc1(bm, &iremote)); 2119 PetscCall(PetscMalloc1(bm, &sub_dnnz)); 2120 PetscCall(PetscMalloc1(bm, &sub_onnz)); 2121 for (k = 0; k < bm; ++k) { 2122 sub_dnnz[k] = 0; 2123 sub_onnz[k] = 0; 2124 } 2125 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 2126 if (flg) { 2127 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2128 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2129 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 2130 B = D; 2131 } 2132 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 2133 if (flg) { 2134 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 2135 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 2136 B = D; 2137 } 2138 /* 2139 Locate the owners for all of the locally-owned global row indices for this row block. 2140 These determine the roots of PetscSF used to communicate preallocation data to row owners. 2141 The roots correspond to the dnnz and onnz entries; thus, there are two roots per row. 2142 */ 2143 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 2144 for (br = 0; br < bm; ++br) { 2145 PetscInt row = bmindices[br], brncols, col; 2146 const PetscInt *brcols; 2147 PetscInt rowrel = 0; /* row's relative index on its owner rank */ 2148 PetscMPIInt rowowner = 0; 2149 PetscCall(PetscLayoutFindOwnerIndex(A->rmap, row, &rowowner, &rowrel)); 2150 /* how many roots */ 2151 iremote[br].rank = rowowner; 2152 iremote[br].index = rowrel; /* edge from bmdnnz to dnnz */ 2153 /* get nonzero pattern */ 2154 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, NULL)); 2155 for (k = 0; k < brncols; k++) { 2156 col = bNindices[brcols[k]]; 2157 if (col >= A->cmap->range[rowowner] && col < A->cmap->range[rowowner + 1]) { 2158 sub_dnnz[br]++; 2159 } else { 2160 sub_onnz[br]++; 2161 } 2162 } 2163 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, NULL)); 2164 } 2165 if (D) PetscCall(MatDestroy(&D)); 2166 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 2167 /* bsf will have to take care of disposing of bedges. */ 2168 PetscCall(PetscSFSetGraph(bmsf, m, bm, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 2169 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2170 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2171 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2172 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2173 PetscCall(PetscFree(sub_dnnz)); 2174 PetscCall(PetscFree(sub_onnz)); 2175 PetscCall(PetscSFDestroy(&bmsf)); 2176 } 2177 PetscCall(ISRestoreIndices(bNis, &bNindices)); 2178 PetscCall(ISDestroy(&bNis)); 2179 } 2180 /* Resize preallocation if overestimated */ 2181 for (i = 0; i < m; i++) { 2182 dnnz[i] = PetscMin(dnnz[i], A->cmap->n); 2183 onnz[i] = PetscMin(onnz[i], A->cmap->N - A->cmap->n); 2184 } 2185 PetscCall(MatSeqAIJSetPreallocation(C, 0, dnnz)); 2186 PetscCall(MatMPIAIJSetPreallocation(C, 0, dnnz, 0, onnz)); 2187 PetscCall(PetscFree(dnnz)); 2188 PetscCall(MatAXPY_Dense_Nest(C, 1.0, A)); 2189 if (reuse == MAT_INPLACE_MATRIX) { 2190 PetscCall(MatHeaderReplace(A, &C)); 2191 } else *newmat = C; 2192 PetscFunctionReturn(PETSC_SUCCESS); 2193 } 2194 2195 static PetscErrorCode MatConvert_Nest_Dense(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 2196 { 2197 Mat B; 2198 PetscInt m, n, M, N; 2199 2200 PetscFunctionBegin; 2201 PetscCall(MatGetSize(A, &M, &N)); 2202 PetscCall(MatGetLocalSize(A, &m, &n)); 2203 if (reuse == MAT_REUSE_MATRIX) { 2204 B = *newmat; 2205 PetscCall(MatZeroEntries(B)); 2206 } else { 2207 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), m, PETSC_DECIDE, M, N, NULL, &B)); 2208 } 2209 PetscCall(MatAXPY_Dense_Nest(B, 1.0, A)); 2210 if (reuse == MAT_INPLACE_MATRIX) { 2211 PetscCall(MatHeaderReplace(A, &B)); 2212 } else if (reuse == MAT_INITIAL_MATRIX) *newmat = B; 2213 PetscFunctionReturn(PETSC_SUCCESS); 2214 } 2215 2216 static PetscErrorCode MatHasOperation_Nest(Mat mat, MatOperation op, PetscBool *has) 2217 { 2218 Mat_Nest *bA = (Mat_Nest *)mat->data; 2219 MatOperation opAdd; 2220 PetscInt i, j, nr = bA->nr, nc = bA->nc; 2221 PetscBool flg; 2222 PetscFunctionBegin; 2223 2224 *has = PETSC_FALSE; 2225 if (op == MATOP_MULT || op == MATOP_MULT_ADD || op == MATOP_MULT_TRANSPOSE || op == MATOP_MULT_TRANSPOSE_ADD) { 2226 opAdd = (op == MATOP_MULT || op == MATOP_MULT_ADD ? MATOP_MULT_ADD : MATOP_MULT_TRANSPOSE_ADD); 2227 for (j = 0; j < nc; j++) { 2228 for (i = 0; i < nr; i++) { 2229 if (!bA->m[i][j]) continue; 2230 PetscCall(MatHasOperation(bA->m[i][j], opAdd, &flg)); 2231 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 2232 } 2233 } 2234 } 2235 if (((void **)mat->ops)[op]) *has = PETSC_TRUE; 2236 PetscFunctionReturn(PETSC_SUCCESS); 2237 } 2238 2239 /*MC 2240 MATNEST - "nest" - Matrix type consisting of nested submatrices, each stored separately. 2241 2242 Level: intermediate 2243 2244 Notes: 2245 This matrix type permits scalable use of `PCFIELDSPLIT` and avoids the large memory costs of extracting submatrices. 2246 It allows the use of symmetric and block formats for parts of multi-physics simulations. 2247 It is usually used with `DMCOMPOSITE` and `DMCreateMatrix()` 2248 2249 Each of the submatrices lives on the same MPI communicator as the original nest matrix (though they can have zero 2250 rows/columns on some processes.) Thus this is not meant for cases where the submatrices live on far fewer processes 2251 than the nest matrix. 2252 2253 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `MatType`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, 2254 `VecCreateNest()`, `DMCreateMatrix()`, `DMCOMPOSITE`, `MatNestSetVecType()`, `MatNestGetLocalISs()`, 2255 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 2256 M*/ 2257 PETSC_EXTERN PetscErrorCode MatCreate_Nest(Mat A) 2258 { 2259 Mat_Nest *s; 2260 2261 PetscFunctionBegin; 2262 PetscCall(PetscNew(&s)); 2263 A->data = (void *)s; 2264 2265 s->nr = -1; 2266 s->nc = -1; 2267 s->m = NULL; 2268 s->splitassembly = PETSC_FALSE; 2269 2270 PetscCall(PetscMemzero(A->ops, sizeof(*A->ops))); 2271 2272 A->ops->mult = MatMult_Nest; 2273 A->ops->multadd = MatMultAdd_Nest; 2274 A->ops->multtranspose = MatMultTranspose_Nest; 2275 A->ops->multtransposeadd = MatMultTransposeAdd_Nest; 2276 A->ops->transpose = MatTranspose_Nest; 2277 A->ops->multhermitiantranspose = MatMultHermitianTranspose_Nest; 2278 A->ops->multhermitiantransposeadd = MatMultHermitianTransposeAdd_Nest; 2279 A->ops->assemblybegin = MatAssemblyBegin_Nest; 2280 A->ops->assemblyend = MatAssemblyEnd_Nest; 2281 A->ops->zeroentries = MatZeroEntries_Nest; 2282 A->ops->copy = MatCopy_Nest; 2283 A->ops->axpy = MatAXPY_Nest; 2284 A->ops->duplicate = MatDuplicate_Nest; 2285 A->ops->createsubmatrix = MatCreateSubMatrix_Nest; 2286 A->ops->destroy = MatDestroy_Nest; 2287 A->ops->view = MatView_Nest; 2288 A->ops->getvecs = NULL; /* Use VECNEST by calling MatNestSetVecType(A,VECNEST) */ 2289 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_Nest; 2290 A->ops->restorelocalsubmatrix = MatRestoreLocalSubMatrix_Nest; 2291 A->ops->getdiagonal = MatGetDiagonal_Nest; 2292 A->ops->diagonalscale = MatDiagonalScale_Nest; 2293 A->ops->scale = MatScale_Nest; 2294 A->ops->shift = MatShift_Nest; 2295 A->ops->diagonalset = MatDiagonalSet_Nest; 2296 A->ops->setrandom = MatSetRandom_Nest; 2297 A->ops->hasoperation = MatHasOperation_Nest; 2298 A->ops->missingdiagonal = MatMissingDiagonal_Nest; 2299 2300 A->spptr = NULL; 2301 A->assembled = PETSC_FALSE; 2302 2303 /* expose Nest api's */ 2304 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", MatNestGetSubMat_Nest)); 2305 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", MatNestSetSubMat_Nest)); 2306 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", MatNestGetSubMats_Nest)); 2307 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", MatNestGetSize_Nest)); 2308 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", MatNestGetISs_Nest)); 2309 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", MatNestGetLocalISs_Nest)); 2310 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", MatNestSetVecType_Nest)); 2311 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", MatNestSetSubMats_Nest)); 2312 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", MatConvert_Nest_AIJ)); 2313 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", MatConvert_Nest_AIJ)); 2314 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", MatConvert_Nest_AIJ)); 2315 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", MatConvert_Nest_IS)); 2316 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", MatConvert_Nest_Dense)); 2317 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", MatConvert_Nest_Dense)); 2318 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", MatProductSetFromOptions_Nest_Dense)); 2319 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", MatProductSetFromOptions_Nest_Dense)); 2320 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", MatProductSetFromOptions_Nest_Dense)); 2321 2322 PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATNEST)); 2323 PetscFunctionReturn(PETSC_SUCCESS); 2324 } 2325