1 #include <../src/mat/impls/nest/matnestimpl.h> /*I "petscmat.h" I*/ 2 #include <../src/mat/impls/aij/seq/aij.h> 3 #include <petscsf.h> 4 5 static PetscErrorCode MatSetUp_NestIS_Private(Mat, PetscInt, const IS[], PetscInt, const IS[]); 6 static PetscErrorCode MatCreateVecs_Nest(Mat, Vec *, Vec *); 7 static PetscErrorCode MatReset_Nest(Mat); 8 9 PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat, MatType, MatReuse, Mat *); 10 11 /* private functions */ 12 static PetscErrorCode MatNestGetSizes_Private(Mat A, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N) 13 { 14 Mat_Nest *bA = (Mat_Nest *)A->data; 15 PetscInt i, j; 16 17 PetscFunctionBegin; 18 *m = *n = *M = *N = 0; 19 for (i = 0; i < bA->nr; i++) { /* rows */ 20 PetscInt sm, sM; 21 PetscCall(ISGetLocalSize(bA->isglobal.row[i], &sm)); 22 PetscCall(ISGetSize(bA->isglobal.row[i], &sM)); 23 *m += sm; 24 *M += sM; 25 } 26 for (j = 0; j < bA->nc; j++) { /* cols */ 27 PetscInt sn, sN; 28 PetscCall(ISGetLocalSize(bA->isglobal.col[j], &sn)); 29 PetscCall(ISGetSize(bA->isglobal.col[j], &sN)); 30 *n += sn; 31 *N += sN; 32 } 33 PetscFunctionReturn(PETSC_SUCCESS); 34 } 35 36 /* operations */ 37 static PetscErrorCode MatMult_Nest(Mat A, Vec x, Vec y) 38 { 39 Mat_Nest *bA = (Mat_Nest *)A->data; 40 Vec *bx = bA->right, *by = bA->left; 41 PetscInt i, j, nr = bA->nr, nc = bA->nc; 42 43 PetscFunctionBegin; 44 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by[i])); 45 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 46 for (i = 0; i < nr; i++) { 47 PetscCall(VecZeroEntries(by[i])); 48 for (j = 0; j < nc; j++) { 49 if (!bA->m[i][j]) continue; 50 /* y[i] <- y[i] + A[i][j] * x[j] */ 51 PetscCall(MatMultAdd(bA->m[i][j], bx[j], by[i], by[i])); 52 } 53 } 54 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by[i])); 55 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 56 PetscFunctionReturn(PETSC_SUCCESS); 57 } 58 59 static PetscErrorCode MatMultAdd_Nest(Mat A, Vec x, Vec y, Vec z) 60 { 61 Mat_Nest *bA = (Mat_Nest *)A->data; 62 Vec *bx = bA->right, *bz = bA->left; 63 PetscInt i, j, nr = bA->nr, nc = bA->nc; 64 65 PetscFunctionBegin; 66 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(z, bA->isglobal.row[i], &bz[i])); 67 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 68 for (i = 0; i < nr; i++) { 69 if (y != z) { 70 Vec by; 71 PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by)); 72 PetscCall(VecCopy(by, bz[i])); 73 PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by)); 74 } 75 for (j = 0; j < nc; j++) { 76 if (!bA->m[i][j]) continue; 77 /* y[i] <- y[i] + A[i][j] * x[j] */ 78 PetscCall(MatMultAdd(bA->m[i][j], bx[j], bz[i], bz[i])); 79 } 80 } 81 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.row[i], &bz[i])); 82 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 83 PetscFunctionReturn(PETSC_SUCCESS); 84 } 85 86 typedef struct { 87 Mat *workC; /* array of Mat with specific containers depending on the underlying MatMatMult implementation */ 88 PetscScalar *tarray; /* buffer for storing all temporary products A[i][j] B[j] */ 89 PetscInt *dm, *dn, k; /* displacements and number of submatrices */ 90 } Nest_Dense; 91 92 static PetscErrorCode MatProductNumeric_Nest_Dense(Mat C) 93 { 94 Mat_Nest *bA; 95 Nest_Dense *contents; 96 Mat viewB, viewC, productB, workC; 97 const PetscScalar *barray; 98 PetscScalar *carray; 99 PetscInt i, j, M, N, nr, nc, ldb, ldc; 100 Mat A, B; 101 102 PetscFunctionBegin; 103 MatCheckProduct(C, 1); 104 A = C->product->A; 105 B = C->product->B; 106 PetscCall(MatGetSize(B, NULL, &N)); 107 if (!N) { 108 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 109 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 110 PetscFunctionReturn(PETSC_SUCCESS); 111 } 112 contents = (Nest_Dense *)C->product->data; 113 PetscCheck(contents, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data empty"); 114 bA = (Mat_Nest *)A->data; 115 nr = bA->nr; 116 nc = bA->nc; 117 PetscCall(MatDenseGetLDA(B, &ldb)); 118 PetscCall(MatDenseGetLDA(C, &ldc)); 119 PetscCall(MatZeroEntries(C)); 120 PetscCall(MatDenseGetArrayRead(B, &barray)); 121 PetscCall(MatDenseGetArray(C, &carray)); 122 for (i = 0; i < nr; i++) { 123 PetscCall(ISGetSize(bA->isglobal.row[i], &M)); 124 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dm[i + 1] - contents->dm[i], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset(carray, contents->dm[i]), &viewC)); 125 PetscCall(MatDenseSetLDA(viewC, ldc)); 126 for (j = 0; j < nc; j++) { 127 if (!bA->m[i][j]) continue; 128 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 129 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset((PetscScalar *)barray, contents->dn[j]), &viewB)); 130 PetscCall(MatDenseSetLDA(viewB, ldb)); 131 132 /* MatMatMultNumeric(bA->m[i][j],viewB,contents->workC[i*nc + j]); */ 133 workC = contents->workC[i * nc + j]; 134 productB = workC->product->B; 135 workC->product->B = viewB; /* use newly created dense matrix viewB */ 136 PetscCall(MatProductNumeric(workC)); 137 PetscCall(MatDestroy(&viewB)); 138 workC->product->B = productB; /* resume original B */ 139 140 /* C[i] <- workC + C[i] */ 141 PetscCall(MatAXPY(viewC, 1.0, contents->workC[i * nc + j], SAME_NONZERO_PATTERN)); 142 } 143 PetscCall(MatDestroy(&viewC)); 144 } 145 PetscCall(MatDenseRestoreArray(C, &carray)); 146 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 147 148 PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 149 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 150 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 151 PetscFunctionReturn(PETSC_SUCCESS); 152 } 153 154 static PetscErrorCode MatNest_DenseDestroy(void *ctx) 155 { 156 Nest_Dense *contents = (Nest_Dense *)ctx; 157 PetscInt i; 158 159 PetscFunctionBegin; 160 PetscCall(PetscFree(contents->tarray)); 161 for (i = 0; i < contents->k; i++) PetscCall(MatDestroy(contents->workC + i)); 162 PetscCall(PetscFree3(contents->dm, contents->dn, contents->workC)); 163 PetscCall(PetscFree(contents)); 164 PetscFunctionReturn(PETSC_SUCCESS); 165 } 166 167 static PetscErrorCode MatProductSymbolic_Nest_Dense(Mat C) 168 { 169 Mat_Nest *bA; 170 Mat viewB, workC; 171 const PetscScalar *barray; 172 PetscInt i, j, M, N, m, n, nr, nc, maxm = 0, ldb; 173 Nest_Dense *contents = NULL; 174 PetscBool cisdense; 175 Mat A, B; 176 PetscReal fill; 177 178 PetscFunctionBegin; 179 MatCheckProduct(C, 1); 180 PetscCheck(!C->product->data, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data not empty"); 181 A = C->product->A; 182 B = C->product->B; 183 fill = C->product->fill; 184 bA = (Mat_Nest *)A->data; 185 nr = bA->nr; 186 nc = bA->nc; 187 PetscCall(MatGetLocalSize(C, &m, &n)); 188 PetscCall(MatGetSize(C, &M, &N)); 189 if (m == PETSC_DECIDE || n == PETSC_DECIDE || M == PETSC_DECIDE || N == PETSC_DECIDE) { 190 PetscCall(MatGetLocalSize(B, NULL, &n)); 191 PetscCall(MatGetSize(B, NULL, &N)); 192 PetscCall(MatGetLocalSize(A, &m, NULL)); 193 PetscCall(MatGetSize(A, &M, NULL)); 194 PetscCall(MatSetSizes(C, m, n, M, N)); 195 } 196 PetscCall(PetscObjectTypeCompareAny((PetscObject)C, &cisdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, MATMPIDENSECUDA, "")); 197 if (!cisdense) PetscCall(MatSetType(C, ((PetscObject)B)->type_name)); 198 PetscCall(MatSetUp(C)); 199 if (!N) { 200 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 201 PetscFunctionReturn(PETSC_SUCCESS); 202 } 203 204 PetscCall(PetscNew(&contents)); 205 C->product->data = contents; 206 C->product->destroy = MatNest_DenseDestroy; 207 PetscCall(PetscCalloc3(nr + 1, &contents->dm, nc + 1, &contents->dn, nr * nc, &contents->workC)); 208 contents->k = nr * nc; 209 for (i = 0; i < nr; i++) { 210 PetscCall(ISGetLocalSize(bA->isglobal.row[i], contents->dm + i + 1)); 211 maxm = PetscMax(maxm, contents->dm[i + 1]); 212 contents->dm[i + 1] += contents->dm[i]; 213 } 214 for (i = 0; i < nc; i++) { 215 PetscCall(ISGetLocalSize(bA->isglobal.col[i], contents->dn + i + 1)); 216 contents->dn[i + 1] += contents->dn[i]; 217 } 218 PetscCall(PetscMalloc1(maxm * N, &contents->tarray)); 219 PetscCall(MatDenseGetLDA(B, &ldb)); 220 PetscCall(MatGetSize(B, NULL, &N)); 221 PetscCall(MatDenseGetArrayRead(B, &barray)); 222 /* loops are permuted compared to MatMatMultNumeric so that viewB is created only once per column of A */ 223 for (j = 0; j < nc; j++) { 224 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 225 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset((PetscScalar *)barray, contents->dn[j]), &viewB)); 226 PetscCall(MatDenseSetLDA(viewB, ldb)); 227 for (i = 0; i < nr; i++) { 228 if (!bA->m[i][j]) continue; 229 /* MatMatMultSymbolic may attach a specific container (depending on MatType of bA->m[i][j]) to workC[i][j] */ 230 231 PetscCall(MatProductCreate(bA->m[i][j], viewB, NULL, &contents->workC[i * nc + j])); 232 workC = contents->workC[i * nc + j]; 233 PetscCall(MatProductSetType(workC, MATPRODUCT_AB)); 234 PetscCall(MatProductSetAlgorithm(workC, "default")); 235 PetscCall(MatProductSetFill(workC, fill)); 236 PetscCall(MatProductSetFromOptions(workC)); 237 PetscCall(MatProductSymbolic(workC)); 238 239 /* since tarray will be shared by all Mat */ 240 PetscCall(MatSeqDenseSetPreallocation(workC, contents->tarray)); 241 PetscCall(MatMPIDenseSetPreallocation(workC, contents->tarray)); 242 } 243 PetscCall(MatDestroy(&viewB)); 244 } 245 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 246 247 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 248 PetscFunctionReturn(PETSC_SUCCESS); 249 } 250 251 static PetscErrorCode MatProductSetFromOptions_Nest_Dense_AB(Mat C) 252 { 253 PetscFunctionBegin; 254 C->ops->productsymbolic = MatProductSymbolic_Nest_Dense; 255 PetscFunctionReturn(PETSC_SUCCESS); 256 } 257 258 static PetscErrorCode MatProductSetFromOptions_Nest_Dense(Mat C) 259 { 260 Mat_Product *product = C->product; 261 262 PetscFunctionBegin; 263 if (product->type == MATPRODUCT_AB) PetscCall(MatProductSetFromOptions_Nest_Dense_AB(C)); 264 PetscFunctionReturn(PETSC_SUCCESS); 265 } 266 267 static PetscErrorCode MatMultTransposeKernel_Nest(Mat A, Vec x, Vec y, PetscBool herm) 268 { 269 Mat_Nest *bA = (Mat_Nest *)A->data; 270 Vec *bx = bA->left, *by = bA->right; 271 PetscInt i, j, nr = bA->nr, nc = bA->nc; 272 273 PetscFunctionBegin; 274 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 275 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(y, bA->isglobal.col[i], &by[i])); 276 for (j = 0; j < nc; j++) { 277 PetscCall(VecZeroEntries(by[j])); 278 for (i = 0; i < nr; i++) { 279 if (!bA->m[i][j]) continue; 280 if (herm) PetscCall(MatMultHermitianTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); /* y[j] <- y[j] + (A[i][j])^H * x[i] */ 281 else PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); /* y[j] <- y[j] + (A[i][j])^T * x[i] */ 282 } 283 } 284 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 285 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.col[i], &by[i])); 286 PetscFunctionReturn(PETSC_SUCCESS); 287 } 288 289 static PetscErrorCode MatMultTranspose_Nest(Mat A, Vec x, Vec y) 290 { 291 PetscFunctionBegin; 292 PetscCall(MatMultTransposeKernel_Nest(A, x, y, PETSC_FALSE)); 293 PetscFunctionReturn(PETSC_SUCCESS); 294 } 295 296 static PetscErrorCode MatMultHermitianTranspose_Nest(Mat A, Vec x, Vec y) 297 { 298 PetscFunctionBegin; 299 PetscCall(MatMultTransposeKernel_Nest(A, x, y, PETSC_TRUE)); 300 PetscFunctionReturn(PETSC_SUCCESS); 301 } 302 303 static PetscErrorCode MatMultTransposeAddKernel_Nest(Mat A, Vec x, Vec y, Vec z, PetscBool herm) 304 { 305 Mat_Nest *bA = (Mat_Nest *)A->data; 306 Vec *bx = bA->left, *bz = bA->right; 307 PetscInt i, j, nr = bA->nr, nc = bA->nc; 308 309 PetscFunctionBegin; 310 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 311 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(z, bA->isglobal.col[i], &bz[i])); 312 for (j = 0; j < nc; j++) { 313 if (y != z) { 314 Vec by; 315 PetscCall(VecGetSubVector(y, bA->isglobal.col[j], &by)); 316 PetscCall(VecCopy(by, bz[j])); 317 PetscCall(VecRestoreSubVector(y, bA->isglobal.col[j], &by)); 318 } 319 for (i = 0; i < nr; i++) { 320 if (!bA->m[i][j]) continue; 321 if (herm) PetscCall(MatMultHermitianTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); /* z[j] <- y[j] + (A[i][j])^H * x[i] */ 322 else PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); /* z[j] <- y[j] + (A[i][j])^T * x[i] */ 323 } 324 } 325 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 326 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.col[i], &bz[i])); 327 PetscFunctionReturn(PETSC_SUCCESS); 328 } 329 330 static PetscErrorCode MatMultTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 331 { 332 PetscFunctionBegin; 333 PetscCall(MatMultTransposeAddKernel_Nest(A, x, y, z, PETSC_FALSE)); 334 PetscFunctionReturn(PETSC_SUCCESS); 335 } 336 337 static PetscErrorCode MatMultHermitianTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 338 { 339 PetscFunctionBegin; 340 PetscCall(MatMultTransposeAddKernel_Nest(A, x, y, z, PETSC_TRUE)); 341 PetscFunctionReturn(PETSC_SUCCESS); 342 } 343 344 static PetscErrorCode MatTranspose_Nest(Mat A, MatReuse reuse, Mat *B) 345 { 346 Mat_Nest *bA = (Mat_Nest *)A->data, *bC; 347 Mat C; 348 PetscInt i, j, nr = bA->nr, nc = bA->nc; 349 350 PetscFunctionBegin; 351 if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B)); 352 PetscCheck(reuse != MAT_INPLACE_MATRIX || nr == nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Square nested matrix only for in-place"); 353 354 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) { 355 Mat *subs; 356 IS *is_row, *is_col; 357 358 PetscCall(PetscCalloc1(nr * nc, &subs)); 359 PetscCall(PetscMalloc2(nr, &is_row, nc, &is_col)); 360 PetscCall(MatNestGetISs(A, is_row, is_col)); 361 if (reuse == MAT_INPLACE_MATRIX) { 362 for (i = 0; i < nr; i++) { 363 for (j = 0; j < nc; j++) subs[i + nr * j] = bA->m[i][j]; 364 } 365 } 366 367 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nc, is_col, nr, is_row, subs, &C)); 368 PetscCall(PetscFree(subs)); 369 PetscCall(PetscFree2(is_row, is_col)); 370 } else { 371 C = *B; 372 } 373 374 bC = (Mat_Nest *)C->data; 375 for (i = 0; i < nr; i++) { 376 for (j = 0; j < nc; j++) { 377 if (bA->m[i][j]) { 378 PetscCall(MatTranspose(bA->m[i][j], reuse, &(bC->m[j][i]))); 379 } else { 380 bC->m[j][i] = NULL; 381 } 382 } 383 } 384 385 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) { 386 *B = C; 387 } else { 388 PetscCall(MatHeaderMerge(A, &C)); 389 } 390 PetscFunctionReturn(PETSC_SUCCESS); 391 } 392 393 static PetscErrorCode MatNestDestroyISList(PetscInt n, IS **list) 394 { 395 IS *lst = *list; 396 PetscInt i; 397 398 PetscFunctionBegin; 399 if (!lst) PetscFunctionReturn(PETSC_SUCCESS); 400 for (i = 0; i < n; i++) 401 if (lst[i]) PetscCall(ISDestroy(&lst[i])); 402 PetscCall(PetscFree(lst)); 403 *list = NULL; 404 PetscFunctionReturn(PETSC_SUCCESS); 405 } 406 407 static PetscErrorCode MatReset_Nest(Mat A) 408 { 409 Mat_Nest *vs = (Mat_Nest *)A->data; 410 PetscInt i, j; 411 412 PetscFunctionBegin; 413 /* release the matrices and the place holders */ 414 PetscCall(MatNestDestroyISList(vs->nr, &vs->isglobal.row)); 415 PetscCall(MatNestDestroyISList(vs->nc, &vs->isglobal.col)); 416 PetscCall(MatNestDestroyISList(vs->nr, &vs->islocal.row)); 417 PetscCall(MatNestDestroyISList(vs->nc, &vs->islocal.col)); 418 419 PetscCall(PetscFree(vs->row_len)); 420 PetscCall(PetscFree(vs->col_len)); 421 PetscCall(PetscFree(vs->nnzstate)); 422 423 PetscCall(PetscFree2(vs->left, vs->right)); 424 425 /* release the matrices and the place holders */ 426 if (vs->m) { 427 for (i = 0; i < vs->nr; i++) { 428 for (j = 0; j < vs->nc; j++) PetscCall(MatDestroy(&vs->m[i][j])); 429 PetscCall(PetscFree(vs->m[i])); 430 } 431 PetscCall(PetscFree(vs->m)); 432 } 433 434 /* restore defaults */ 435 vs->nr = 0; 436 vs->nc = 0; 437 vs->splitassembly = PETSC_FALSE; 438 PetscFunctionReturn(PETSC_SUCCESS); 439 } 440 441 static PetscErrorCode MatDestroy_Nest(Mat A) 442 { 443 PetscFunctionBegin; 444 PetscCall(MatReset_Nest(A)); 445 PetscCall(PetscFree(A->data)); 446 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", NULL)); 447 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", NULL)); 448 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", NULL)); 449 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", NULL)); 450 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", NULL)); 451 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", NULL)); 452 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", NULL)); 453 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", NULL)); 454 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", NULL)); 455 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", NULL)); 456 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", NULL)); 457 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", NULL)); 458 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", NULL)); 459 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", NULL)); 460 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", NULL)); 461 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", NULL)); 462 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", NULL)); 463 PetscFunctionReturn(PETSC_SUCCESS); 464 } 465 466 static PetscErrorCode MatMissingDiagonal_Nest(Mat mat, PetscBool *missing, PetscInt *dd) 467 { 468 Mat_Nest *vs = (Mat_Nest *)mat->data; 469 PetscInt i; 470 471 PetscFunctionBegin; 472 if (dd) *dd = 0; 473 if (!vs->nr) { 474 *missing = PETSC_TRUE; 475 PetscFunctionReturn(PETSC_SUCCESS); 476 } 477 *missing = PETSC_FALSE; 478 for (i = 0; i < vs->nr && !(*missing); i++) { 479 *missing = PETSC_TRUE; 480 if (vs->m[i][i]) { 481 PetscCall(MatMissingDiagonal(vs->m[i][i], missing, NULL)); 482 PetscCheck(!*missing || !dd, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "First missing entry not yet implemented"); 483 } 484 } 485 PetscFunctionReturn(PETSC_SUCCESS); 486 } 487 488 static PetscErrorCode MatAssemblyBegin_Nest(Mat A, MatAssemblyType type) 489 { 490 Mat_Nest *vs = (Mat_Nest *)A->data; 491 PetscInt i, j; 492 PetscBool nnzstate = PETSC_FALSE; 493 494 PetscFunctionBegin; 495 for (i = 0; i < vs->nr; i++) { 496 for (j = 0; j < vs->nc; j++) { 497 PetscObjectState subnnzstate = 0; 498 if (vs->m[i][j]) { 499 PetscCall(MatAssemblyBegin(vs->m[i][j], type)); 500 if (!vs->splitassembly) { 501 /* Note: split assembly will fail if the same block appears more than once (even indirectly through a nested 502 * sub-block). This could be fixed by adding a flag to Mat so that there was a way to check if a Mat was 503 * already performing an assembly, but the result would by more complicated and appears to offer less 504 * potential for diagnostics and correctness checking. Split assembly should be fixed once there is an 505 * interface for libraries to make asynchronous progress in "user-defined non-blocking collectives". 506 */ 507 PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 508 PetscCall(MatGetNonzeroState(vs->m[i][j], &subnnzstate)); 509 } 510 } 511 nnzstate = (PetscBool)(nnzstate || vs->nnzstate[i * vs->nc + j] != subnnzstate); 512 vs->nnzstate[i * vs->nc + j] = subnnzstate; 513 } 514 } 515 if (nnzstate) A->nonzerostate++; 516 PetscFunctionReturn(PETSC_SUCCESS); 517 } 518 519 static PetscErrorCode MatAssemblyEnd_Nest(Mat A, MatAssemblyType type) 520 { 521 Mat_Nest *vs = (Mat_Nest *)A->data; 522 PetscInt i, j; 523 524 PetscFunctionBegin; 525 for (i = 0; i < vs->nr; i++) { 526 for (j = 0; j < vs->nc; j++) { 527 if (vs->m[i][j]) { 528 if (vs->splitassembly) PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 529 } 530 } 531 } 532 PetscFunctionReturn(PETSC_SUCCESS); 533 } 534 535 static PetscErrorCode MatNestFindNonzeroSubMatRow(Mat A, PetscInt row, Mat *B) 536 { 537 Mat_Nest *vs = (Mat_Nest *)A->data; 538 PetscInt j; 539 Mat sub; 540 541 PetscFunctionBegin; 542 sub = (row < vs->nc) ? vs->m[row][row] : (Mat)NULL; /* Prefer to find on the diagonal */ 543 for (j = 0; !sub && j < vs->nc; j++) sub = vs->m[row][j]; 544 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 545 *B = sub; 546 PetscFunctionReturn(PETSC_SUCCESS); 547 } 548 549 static PetscErrorCode MatNestFindNonzeroSubMatCol(Mat A, PetscInt col, Mat *B) 550 { 551 Mat_Nest *vs = (Mat_Nest *)A->data; 552 PetscInt i; 553 Mat sub; 554 555 PetscFunctionBegin; 556 sub = (col < vs->nr) ? vs->m[col][col] : (Mat)NULL; /* Prefer to find on the diagonal */ 557 for (i = 0; !sub && i < vs->nr; i++) sub = vs->m[i][col]; 558 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 559 *B = sub; 560 PetscFunctionReturn(PETSC_SUCCESS); 561 } 562 563 static PetscErrorCode MatNestFindISRange(Mat A, PetscInt n, const IS list[], IS is, PetscInt *begin, PetscInt *end) 564 { 565 PetscInt i, j, size, m; 566 PetscBool flg; 567 IS out, concatenate[2]; 568 569 PetscFunctionBegin; 570 PetscAssertPointer(list, 3); 571 PetscValidHeaderSpecific(is, IS_CLASSID, 4); 572 if (begin) { 573 PetscAssertPointer(begin, 5); 574 *begin = -1; 575 } 576 if (end) { 577 PetscAssertPointer(end, 6); 578 *end = -1; 579 } 580 for (i = 0; i < n; i++) { 581 if (!list[i]) continue; 582 PetscCall(ISEqualUnsorted(list[i], is, &flg)); 583 if (flg) { 584 if (begin) *begin = i; 585 if (end) *end = i + 1; 586 PetscFunctionReturn(PETSC_SUCCESS); 587 } 588 } 589 PetscCall(ISGetSize(is, &size)); 590 for (i = 0; i < n - 1; i++) { 591 if (!list[i]) continue; 592 m = 0; 593 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, list + i, &out)); 594 PetscCall(ISGetSize(out, &m)); 595 for (j = i + 2; j < n && m < size; j++) { 596 if (list[j]) { 597 concatenate[0] = out; 598 concatenate[1] = list[j]; 599 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, concatenate, &out)); 600 PetscCall(ISDestroy(concatenate)); 601 PetscCall(ISGetSize(out, &m)); 602 } 603 } 604 if (m == size) { 605 PetscCall(ISEqualUnsorted(out, is, &flg)); 606 if (flg) { 607 if (begin) *begin = i; 608 if (end) *end = j; 609 PetscCall(ISDestroy(&out)); 610 PetscFunctionReturn(PETSC_SUCCESS); 611 } 612 } 613 PetscCall(ISDestroy(&out)); 614 } 615 PetscFunctionReturn(PETSC_SUCCESS); 616 } 617 618 static PetscErrorCode MatNestFillEmptyMat_Private(Mat A, PetscInt i, PetscInt j, Mat *B) 619 { 620 Mat_Nest *vs = (Mat_Nest *)A->data; 621 PetscInt lr, lc; 622 623 PetscFunctionBegin; 624 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 625 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &lr)); 626 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &lc)); 627 PetscCall(MatSetSizes(*B, lr, lc, PETSC_DECIDE, PETSC_DECIDE)); 628 PetscCall(MatSetType(*B, MATAIJ)); 629 PetscCall(MatSeqAIJSetPreallocation(*B, 0, NULL)); 630 PetscCall(MatMPIAIJSetPreallocation(*B, 0, NULL, 0, NULL)); 631 PetscCall(MatSetUp(*B)); 632 PetscCall(MatSetOption(*B, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 633 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 634 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 635 PetscFunctionReturn(PETSC_SUCCESS); 636 } 637 638 static PetscErrorCode MatNestGetBlock_Private(Mat A, PetscInt rbegin, PetscInt rend, PetscInt cbegin, PetscInt cend, Mat *B) 639 { 640 Mat_Nest *vs = (Mat_Nest *)A->data; 641 Mat *a; 642 PetscInt i, j, k, l, nr = rend - rbegin, nc = cend - cbegin; 643 char keyname[256]; 644 PetscBool *b; 645 PetscBool flg; 646 647 PetscFunctionBegin; 648 *B = NULL; 649 PetscCall(PetscSNPrintf(keyname, sizeof(keyname), "NestBlock_%" PetscInt_FMT "-%" PetscInt_FMT "x%" PetscInt_FMT "-%" PetscInt_FMT, rbegin, rend, cbegin, cend)); 650 PetscCall(PetscObjectQuery((PetscObject)A, keyname, (PetscObject *)B)); 651 if (*B) PetscFunctionReturn(PETSC_SUCCESS); 652 653 PetscCall(PetscMalloc2(nr * nc, &a, nr * nc, &b)); 654 for (i = 0; i < nr; i++) { 655 for (j = 0; j < nc; j++) { 656 a[i * nc + j] = vs->m[rbegin + i][cbegin + j]; 657 b[i * nc + j] = PETSC_FALSE; 658 } 659 } 660 if (nc != vs->nc && nr != vs->nr) { 661 for (i = 0; i < nr; i++) { 662 for (j = 0; j < nc; j++) { 663 flg = PETSC_FALSE; 664 for (k = 0; (k < nr && !flg); k++) { 665 if (a[j + k * nc]) flg = PETSC_TRUE; 666 } 667 if (flg) { 668 flg = PETSC_FALSE; 669 for (l = 0; (l < nc && !flg); l++) { 670 if (a[i * nc + l]) flg = PETSC_TRUE; 671 } 672 } 673 if (!flg) { 674 b[i * nc + j] = PETSC_TRUE; 675 PetscCall(MatNestFillEmptyMat_Private(A, rbegin + i, cbegin + j, a + i * nc + j)); 676 } 677 } 678 } 679 } 680 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, nr != vs->nr ? NULL : vs->isglobal.row, nc, nc != vs->nc ? NULL : vs->isglobal.col, a, B)); 681 for (i = 0; i < nr; i++) { 682 for (j = 0; j < nc; j++) { 683 if (b[i * nc + j]) PetscCall(MatDestroy(a + i * nc + j)); 684 } 685 } 686 PetscCall(PetscFree2(a, b)); 687 (*B)->assembled = A->assembled; 688 PetscCall(PetscObjectCompose((PetscObject)A, keyname, (PetscObject)*B)); 689 PetscCall(PetscObjectDereference((PetscObject)*B)); /* Leave the only remaining reference in the composition */ 690 PetscFunctionReturn(PETSC_SUCCESS); 691 } 692 693 static PetscErrorCode MatNestFindSubMat(Mat A, struct MatNestISPair *is, IS isrow, IS iscol, Mat *B) 694 { 695 Mat_Nest *vs = (Mat_Nest *)A->data; 696 PetscInt rbegin, rend, cbegin, cend; 697 698 PetscFunctionBegin; 699 PetscCall(MatNestFindISRange(A, vs->nr, is->row, isrow, &rbegin, &rend)); 700 PetscCall(MatNestFindISRange(A, vs->nc, is->col, iscol, &cbegin, &cend)); 701 if (rend == rbegin + 1 && cend == cbegin + 1) { 702 if (!vs->m[rbegin][cbegin]) PetscCall(MatNestFillEmptyMat_Private(A, rbegin, cbegin, vs->m[rbegin] + cbegin)); 703 *B = vs->m[rbegin][cbegin]; 704 } else if (rbegin != -1 && cbegin != -1) { 705 PetscCall(MatNestGetBlock_Private(A, rbegin, rend, cbegin, cend, B)); 706 } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Could not find index set"); 707 PetscFunctionReturn(PETSC_SUCCESS); 708 } 709 710 /* 711 TODO: This does not actually returns a submatrix we can modify 712 */ 713 static PetscErrorCode MatCreateSubMatrix_Nest(Mat A, IS isrow, IS iscol, MatReuse reuse, Mat *B) 714 { 715 Mat_Nest *vs = (Mat_Nest *)A->data; 716 Mat sub; 717 718 PetscFunctionBegin; 719 PetscCall(MatNestFindSubMat(A, &vs->isglobal, isrow, iscol, &sub)); 720 switch (reuse) { 721 case MAT_INITIAL_MATRIX: 722 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 723 *B = sub; 724 break; 725 case MAT_REUSE_MATRIX: 726 PetscCheck(sub == *B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Submatrix was not used before in this call"); 727 break; 728 case MAT_IGNORE_MATRIX: /* Nothing to do */ 729 break; 730 case MAT_INPLACE_MATRIX: /* Nothing to do */ 731 SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX is not supported yet"); 732 } 733 PetscFunctionReturn(PETSC_SUCCESS); 734 } 735 736 static PetscErrorCode MatGetLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 737 { 738 Mat_Nest *vs = (Mat_Nest *)A->data; 739 Mat sub; 740 741 PetscFunctionBegin; 742 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 743 /* We allow the submatrix to be NULL, perhaps it would be better for the user to return an empty matrix instead */ 744 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 745 *B = sub; 746 PetscFunctionReturn(PETSC_SUCCESS); 747 } 748 749 static PetscErrorCode MatRestoreLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 750 { 751 Mat_Nest *vs = (Mat_Nest *)A->data; 752 Mat sub; 753 754 PetscFunctionBegin; 755 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 756 PetscCheck(*B == sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has not been gotten"); 757 if (sub) { 758 PetscCheck(((PetscObject)sub)->refct > 1, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has had reference count decremented too many times"); 759 PetscCall(MatDestroy(B)); 760 } 761 PetscFunctionReturn(PETSC_SUCCESS); 762 } 763 764 static PetscErrorCode MatGetDiagonal_Nest(Mat A, Vec v) 765 { 766 Mat_Nest *bA = (Mat_Nest *)A->data; 767 PetscInt i; 768 769 PetscFunctionBegin; 770 for (i = 0; i < bA->nr; i++) { 771 Vec bv; 772 PetscCall(VecGetSubVector(v, bA->isglobal.row[i], &bv)); 773 if (bA->m[i][i]) { 774 PetscCall(MatGetDiagonal(bA->m[i][i], bv)); 775 } else { 776 PetscCall(VecSet(bv, 0.0)); 777 } 778 PetscCall(VecRestoreSubVector(v, bA->isglobal.row[i], &bv)); 779 } 780 PetscFunctionReturn(PETSC_SUCCESS); 781 } 782 783 static PetscErrorCode MatDiagonalScale_Nest(Mat A, Vec l, Vec r) 784 { 785 Mat_Nest *bA = (Mat_Nest *)A->data; 786 Vec bl, *br; 787 PetscInt i, j; 788 789 PetscFunctionBegin; 790 PetscCall(PetscCalloc1(bA->nc, &br)); 791 if (r) { 792 for (j = 0; j < bA->nc; j++) PetscCall(VecGetSubVector(r, bA->isglobal.col[j], &br[j])); 793 } 794 bl = NULL; 795 for (i = 0; i < bA->nr; i++) { 796 if (l) PetscCall(VecGetSubVector(l, bA->isglobal.row[i], &bl)); 797 for (j = 0; j < bA->nc; j++) { 798 if (bA->m[i][j]) PetscCall(MatDiagonalScale(bA->m[i][j], bl, br[j])); 799 } 800 if (l) PetscCall(VecRestoreSubVector(l, bA->isglobal.row[i], &bl)); 801 } 802 if (r) { 803 for (j = 0; j < bA->nc; j++) PetscCall(VecRestoreSubVector(r, bA->isglobal.col[j], &br[j])); 804 } 805 PetscCall(PetscFree(br)); 806 PetscFunctionReturn(PETSC_SUCCESS); 807 } 808 809 static PetscErrorCode MatScale_Nest(Mat A, PetscScalar a) 810 { 811 Mat_Nest *bA = (Mat_Nest *)A->data; 812 PetscInt i, j; 813 814 PetscFunctionBegin; 815 for (i = 0; i < bA->nr; i++) { 816 for (j = 0; j < bA->nc; j++) { 817 if (bA->m[i][j]) PetscCall(MatScale(bA->m[i][j], a)); 818 } 819 } 820 PetscFunctionReturn(PETSC_SUCCESS); 821 } 822 823 static PetscErrorCode MatShift_Nest(Mat A, PetscScalar a) 824 { 825 Mat_Nest *bA = (Mat_Nest *)A->data; 826 PetscInt i; 827 PetscBool nnzstate = PETSC_FALSE; 828 829 PetscFunctionBegin; 830 for (i = 0; i < bA->nr; i++) { 831 PetscObjectState subnnzstate = 0; 832 PetscCheck(bA->m[i][i], PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for shifting an empty diagonal block, insert a matrix in block (%" PetscInt_FMT ",%" PetscInt_FMT ")", i, i); 833 PetscCall(MatShift(bA->m[i][i], a)); 834 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 835 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 836 bA->nnzstate[i * bA->nc + i] = subnnzstate; 837 } 838 if (nnzstate) A->nonzerostate++; 839 PetscFunctionReturn(PETSC_SUCCESS); 840 } 841 842 static PetscErrorCode MatDiagonalSet_Nest(Mat A, Vec D, InsertMode is) 843 { 844 Mat_Nest *bA = (Mat_Nest *)A->data; 845 PetscInt i; 846 PetscBool nnzstate = PETSC_FALSE; 847 848 PetscFunctionBegin; 849 for (i = 0; i < bA->nr; i++) { 850 PetscObjectState subnnzstate = 0; 851 Vec bv; 852 PetscCall(VecGetSubVector(D, bA->isglobal.row[i], &bv)); 853 if (bA->m[i][i]) { 854 PetscCall(MatDiagonalSet(bA->m[i][i], bv, is)); 855 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 856 } 857 PetscCall(VecRestoreSubVector(D, bA->isglobal.row[i], &bv)); 858 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 859 bA->nnzstate[i * bA->nc + i] = subnnzstate; 860 } 861 if (nnzstate) A->nonzerostate++; 862 PetscFunctionReturn(PETSC_SUCCESS); 863 } 864 865 static PetscErrorCode MatSetRandom_Nest(Mat A, PetscRandom rctx) 866 { 867 Mat_Nest *bA = (Mat_Nest *)A->data; 868 PetscInt i, j; 869 870 PetscFunctionBegin; 871 for (i = 0; i < bA->nr; i++) { 872 for (j = 0; j < bA->nc; j++) { 873 if (bA->m[i][j]) PetscCall(MatSetRandom(bA->m[i][j], rctx)); 874 } 875 } 876 PetscFunctionReturn(PETSC_SUCCESS); 877 } 878 879 static PetscErrorCode MatCreateVecs_Nest(Mat A, Vec *right, Vec *left) 880 { 881 Mat_Nest *bA = (Mat_Nest *)A->data; 882 Vec *L, *R; 883 MPI_Comm comm; 884 PetscInt i, j; 885 886 PetscFunctionBegin; 887 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 888 if (right) { 889 /* allocate R */ 890 PetscCall(PetscMalloc1(bA->nc, &R)); 891 /* Create the right vectors */ 892 for (j = 0; j < bA->nc; j++) { 893 for (i = 0; i < bA->nr; i++) { 894 if (bA->m[i][j]) { 895 PetscCall(MatCreateVecs(bA->m[i][j], &R[j], NULL)); 896 break; 897 } 898 } 899 PetscCheck(i != bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null column."); 900 } 901 PetscCall(VecCreateNest(comm, bA->nc, bA->isglobal.col, R, right)); 902 /* hand back control to the nest vector */ 903 for (j = 0; j < bA->nc; j++) PetscCall(VecDestroy(&R[j])); 904 PetscCall(PetscFree(R)); 905 } 906 907 if (left) { 908 /* allocate L */ 909 PetscCall(PetscMalloc1(bA->nr, &L)); 910 /* Create the left vectors */ 911 for (i = 0; i < bA->nr; i++) { 912 for (j = 0; j < bA->nc; j++) { 913 if (bA->m[i][j]) { 914 PetscCall(MatCreateVecs(bA->m[i][j], NULL, &L[i])); 915 break; 916 } 917 } 918 PetscCheck(j != bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null row."); 919 } 920 921 PetscCall(VecCreateNest(comm, bA->nr, bA->isglobal.row, L, left)); 922 for (i = 0; i < bA->nr; i++) PetscCall(VecDestroy(&L[i])); 923 924 PetscCall(PetscFree(L)); 925 } 926 PetscFunctionReturn(PETSC_SUCCESS); 927 } 928 929 static PetscErrorCode MatView_Nest(Mat A, PetscViewer viewer) 930 { 931 Mat_Nest *bA = (Mat_Nest *)A->data; 932 PetscBool isascii, viewSub = PETSC_FALSE; 933 PetscInt i, j; 934 935 PetscFunctionBegin; 936 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 937 if (isascii) { 938 PetscCall(PetscOptionsGetBool(((PetscObject)A)->options, ((PetscObject)A)->prefix, "-mat_view_nest_sub", &viewSub, NULL)); 939 PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix object:\n")); 940 PetscCall(PetscViewerASCIIPushTab(viewer)); 941 PetscCall(PetscViewerASCIIPrintf(viewer, "type=nest, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", bA->nr, bA->nc)); 942 943 PetscCall(PetscViewerASCIIPrintf(viewer, "MatNest structure:\n")); 944 for (i = 0; i < bA->nr; i++) { 945 for (j = 0; j < bA->nc; j++) { 946 MatType type; 947 char name[256] = "", prefix[256] = ""; 948 PetscInt NR, NC; 949 PetscBool isNest = PETSC_FALSE; 950 951 if (!bA->m[i][j]) { 952 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : NULL\n", i, j)); 953 continue; 954 } 955 PetscCall(MatGetSize(bA->m[i][j], &NR, &NC)); 956 PetscCall(MatGetType(bA->m[i][j], &type)); 957 if (((PetscObject)bA->m[i][j])->name) PetscCall(PetscSNPrintf(name, sizeof(name), "name=\"%s\", ", ((PetscObject)bA->m[i][j])->name)); 958 if (((PetscObject)bA->m[i][j])->prefix) PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "prefix=\"%s\", ", ((PetscObject)bA->m[i][j])->prefix)); 959 PetscCall(PetscObjectTypeCompare((PetscObject)bA->m[i][j], MATNEST, &isNest)); 960 961 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : %s%stype=%s, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", i, j, name, prefix, type, NR, NC)); 962 963 if (isNest || viewSub) { 964 PetscCall(PetscViewerASCIIPushTab(viewer)); /* push1 */ 965 PetscCall(MatView(bA->m[i][j], viewer)); 966 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop1 */ 967 } 968 } 969 } 970 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop0 */ 971 } 972 PetscFunctionReturn(PETSC_SUCCESS); 973 } 974 975 static PetscErrorCode MatZeroEntries_Nest(Mat A) 976 { 977 Mat_Nest *bA = (Mat_Nest *)A->data; 978 PetscInt i, j; 979 980 PetscFunctionBegin; 981 for (i = 0; i < bA->nr; i++) { 982 for (j = 0; j < bA->nc; j++) { 983 if (!bA->m[i][j]) continue; 984 PetscCall(MatZeroEntries(bA->m[i][j])); 985 } 986 } 987 PetscFunctionReturn(PETSC_SUCCESS); 988 } 989 990 static PetscErrorCode MatCopy_Nest(Mat A, Mat B, MatStructure str) 991 { 992 Mat_Nest *bA = (Mat_Nest *)A->data, *bB = (Mat_Nest *)B->data; 993 PetscInt i, j, nr = bA->nr, nc = bA->nc; 994 PetscBool nnzstate = PETSC_FALSE; 995 996 PetscFunctionBegin; 997 PetscCheck(nr == bB->nr && nc == bB->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Cannot copy a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") to a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bB->nr, bB->nc, nr, nc); 998 for (i = 0; i < nr; i++) { 999 for (j = 0; j < nc; j++) { 1000 PetscObjectState subnnzstate = 0; 1001 if (bA->m[i][j] && bB->m[i][j]) { 1002 PetscCall(MatCopy(bA->m[i][j], bB->m[i][j], str)); 1003 } else PetscCheck(!bA->m[i][j] && !bB->m[i][j], PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT, i, j); 1004 PetscCall(MatGetNonzeroState(bB->m[i][j], &subnnzstate)); 1005 nnzstate = (PetscBool)(nnzstate || bB->nnzstate[i * nc + j] != subnnzstate); 1006 bB->nnzstate[i * nc + j] = subnnzstate; 1007 } 1008 } 1009 if (nnzstate) B->nonzerostate++; 1010 PetscFunctionReturn(PETSC_SUCCESS); 1011 } 1012 1013 static PetscErrorCode MatAXPY_Nest(Mat Y, PetscScalar a, Mat X, MatStructure str) 1014 { 1015 Mat_Nest *bY = (Mat_Nest *)Y->data, *bX = (Mat_Nest *)X->data; 1016 PetscInt i, j, nr = bY->nr, nc = bY->nc; 1017 PetscBool nnzstate = PETSC_FALSE; 1018 1019 PetscFunctionBegin; 1020 PetscCheck(nr == bX->nr && nc == bX->nc, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Cannot AXPY a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") with a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bX->nr, bX->nc, nr, nc); 1021 for (i = 0; i < nr; i++) { 1022 for (j = 0; j < nc; j++) { 1023 PetscObjectState subnnzstate = 0; 1024 if (bY->m[i][j] && bX->m[i][j]) { 1025 PetscCall(MatAXPY(bY->m[i][j], a, bX->m[i][j], str)); 1026 } else if (bX->m[i][j]) { 1027 Mat M; 1028 1029 PetscCheck(str == DIFFERENT_NONZERO_PATTERN || str == UNKNOWN_NONZERO_PATTERN, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT ". Use DIFFERENT_NONZERO_PATTERN or UNKNOWN_NONZERO_PATTERN", i, j); 1030 PetscCall(MatDuplicate(bX->m[i][j], MAT_COPY_VALUES, &M)); 1031 PetscCall(MatNestSetSubMat(Y, i, j, M)); 1032 PetscCall(MatDestroy(&M)); 1033 } 1034 if (bY->m[i][j]) PetscCall(MatGetNonzeroState(bY->m[i][j], &subnnzstate)); 1035 nnzstate = (PetscBool)(nnzstate || bY->nnzstate[i * nc + j] != subnnzstate); 1036 bY->nnzstate[i * nc + j] = subnnzstate; 1037 } 1038 } 1039 if (nnzstate) Y->nonzerostate++; 1040 PetscFunctionReturn(PETSC_SUCCESS); 1041 } 1042 1043 static PetscErrorCode MatDuplicate_Nest(Mat A, MatDuplicateOption op, Mat *B) 1044 { 1045 Mat_Nest *bA = (Mat_Nest *)A->data; 1046 Mat *b; 1047 PetscInt i, j, nr = bA->nr, nc = bA->nc; 1048 1049 PetscFunctionBegin; 1050 PetscCall(PetscMalloc1(nr * nc, &b)); 1051 for (i = 0; i < nr; i++) { 1052 for (j = 0; j < nc; j++) { 1053 if (bA->m[i][j]) { 1054 PetscCall(MatDuplicate(bA->m[i][j], op, &b[i * nc + j])); 1055 } else { 1056 b[i * nc + j] = NULL; 1057 } 1058 } 1059 } 1060 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, bA->isglobal.row, nc, bA->isglobal.col, b, B)); 1061 /* Give the new MatNest exclusive ownership */ 1062 for (i = 0; i < nr * nc; i++) PetscCall(MatDestroy(&b[i])); 1063 PetscCall(PetscFree(b)); 1064 1065 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 1066 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 1067 PetscFunctionReturn(PETSC_SUCCESS); 1068 } 1069 1070 /* nest api */ 1071 static PetscErrorCode MatNestGetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat *mat) 1072 { 1073 Mat_Nest *bA = (Mat_Nest *)A->data; 1074 1075 PetscFunctionBegin; 1076 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1077 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1078 *mat = bA->m[idxm][jdxm]; 1079 PetscFunctionReturn(PETSC_SUCCESS); 1080 } 1081 1082 /*@ 1083 MatNestGetSubMat - Returns a single, sub-matrix from a `MATNEST` 1084 1085 Not Collective 1086 1087 Input Parameters: 1088 + A - `MATNEST` matrix 1089 . idxm - index of the matrix within the nest matrix 1090 - jdxm - index of the matrix within the nest matrix 1091 1092 Output Parameter: 1093 . sub - matrix at index `idxm`, `jdxm` within the nest matrix 1094 1095 Level: developer 1096 1097 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestSetSubMat()`, 1098 `MatNestGetLocalISs()`, `MatNestGetISs()` 1099 @*/ 1100 PetscErrorCode MatNestGetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat *sub) 1101 { 1102 PetscFunctionBegin; 1103 PetscUseMethod(A, "MatNestGetSubMat_C", (Mat, PetscInt, PetscInt, Mat *), (A, idxm, jdxm, sub)); 1104 PetscFunctionReturn(PETSC_SUCCESS); 1105 } 1106 1107 static PetscErrorCode MatNestSetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat mat) 1108 { 1109 Mat_Nest *bA = (Mat_Nest *)A->data; 1110 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1111 1112 PetscFunctionBegin; 1113 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1114 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1115 PetscCall(MatGetLocalSize(mat, &m, &n)); 1116 PetscCall(MatGetSize(mat, &M, &N)); 1117 PetscCall(ISGetLocalSize(bA->isglobal.row[idxm], &mi)); 1118 PetscCall(ISGetSize(bA->isglobal.row[idxm], &Mi)); 1119 PetscCall(ISGetLocalSize(bA->isglobal.col[jdxm], &ni)); 1120 PetscCall(ISGetSize(bA->isglobal.col[jdxm], &Ni)); 1121 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, Mi, Ni); 1122 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix local dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, mi, ni); 1123 1124 /* do not increase object state */ 1125 if (mat == bA->m[idxm][jdxm]) PetscFunctionReturn(PETSC_SUCCESS); 1126 1127 PetscCall(PetscObjectReference((PetscObject)mat)); 1128 PetscCall(MatDestroy(&bA->m[idxm][jdxm])); 1129 bA->m[idxm][jdxm] = mat; 1130 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1131 PetscCall(MatGetNonzeroState(mat, &bA->nnzstate[idxm * bA->nc + jdxm])); 1132 A->nonzerostate++; 1133 PetscFunctionReturn(PETSC_SUCCESS); 1134 } 1135 1136 /*@ 1137 MatNestSetSubMat - Set a single submatrix in the `MATNEST` 1138 1139 Logically Collective 1140 1141 Input Parameters: 1142 + A - `MATNEST` matrix 1143 . idxm - index of the matrix within the nest matrix 1144 . jdxm - index of the matrix within the nest matrix 1145 - sub - matrix at index `idxm`, `jdxm` within the nest matrix 1146 1147 Level: developer 1148 1149 Notes: 1150 The new submatrix must have the same size and communicator as that block of the nest. 1151 1152 This increments the reference count of the submatrix. 1153 1154 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestSetSubMats()`, `MatNestGetSubMats()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1155 `MatNestGetSubMat()`, `MatNestGetISs()`, `MatNestGetSize()` 1156 @*/ 1157 PetscErrorCode MatNestSetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat sub) 1158 { 1159 PetscFunctionBegin; 1160 PetscUseMethod(A, "MatNestSetSubMat_C", (Mat, PetscInt, PetscInt, Mat), (A, idxm, jdxm, sub)); 1161 PetscFunctionReturn(PETSC_SUCCESS); 1162 } 1163 1164 static PetscErrorCode MatNestGetSubMats_Nest(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1165 { 1166 Mat_Nest *bA = (Mat_Nest *)A->data; 1167 1168 PetscFunctionBegin; 1169 if (M) *M = bA->nr; 1170 if (N) *N = bA->nc; 1171 if (mat) *mat = bA->m; 1172 PetscFunctionReturn(PETSC_SUCCESS); 1173 } 1174 1175 /*@C 1176 MatNestGetSubMats - Returns the entire two dimensional array of matrices defining a `MATNEST` matrix. 1177 1178 Not Collective 1179 1180 Input Parameter: 1181 . A - nest matrix 1182 1183 Output Parameters: 1184 + M - number of rows in the nest matrix 1185 . N - number of cols in the nest matrix 1186 - mat - 2d array of matrices 1187 1188 Level: developer 1189 1190 Note: 1191 The user should not free the array `mat`. 1192 1193 Fortran Notes: 1194 This routine has a calling sequence 1195 $ call MatNestGetSubMats(A, M, N, mat, ierr) 1196 where the space allocated for the optional argument `mat` is assumed large enough (if provided). 1197 1198 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1199 `MatNestSetSubMats()`, `MatNestGetISs()`, `MatNestSetSubMat()` 1200 @*/ 1201 PetscErrorCode MatNestGetSubMats(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1202 { 1203 PetscFunctionBegin; 1204 PetscUseMethod(A, "MatNestGetSubMats_C", (Mat, PetscInt *, PetscInt *, Mat ***), (A, M, N, mat)); 1205 PetscFunctionReturn(PETSC_SUCCESS); 1206 } 1207 1208 static PetscErrorCode MatNestGetSize_Nest(Mat A, PetscInt *M, PetscInt *N) 1209 { 1210 Mat_Nest *bA = (Mat_Nest *)A->data; 1211 1212 PetscFunctionBegin; 1213 if (M) *M = bA->nr; 1214 if (N) *N = bA->nc; 1215 PetscFunctionReturn(PETSC_SUCCESS); 1216 } 1217 1218 /*@ 1219 MatNestGetSize - Returns the size of the `MATNEST` matrix. 1220 1221 Not Collective 1222 1223 Input Parameter: 1224 . A - `MATNEST` matrix 1225 1226 Output Parameters: 1227 + M - number of rows in the nested mat 1228 - N - number of cols in the nested mat 1229 1230 Level: developer 1231 1232 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestGetLocalISs()`, 1233 `MatNestGetISs()` 1234 @*/ 1235 PetscErrorCode MatNestGetSize(Mat A, PetscInt *M, PetscInt *N) 1236 { 1237 PetscFunctionBegin; 1238 PetscUseMethod(A, "MatNestGetSize_C", (Mat, PetscInt *, PetscInt *), (A, M, N)); 1239 PetscFunctionReturn(PETSC_SUCCESS); 1240 } 1241 1242 static PetscErrorCode MatNestGetISs_Nest(Mat A, IS rows[], IS cols[]) 1243 { 1244 Mat_Nest *vs = (Mat_Nest *)A->data; 1245 PetscInt i; 1246 1247 PetscFunctionBegin; 1248 if (rows) 1249 for (i = 0; i < vs->nr; i++) rows[i] = vs->isglobal.row[i]; 1250 if (cols) 1251 for (i = 0; i < vs->nc; i++) cols[i] = vs->isglobal.col[i]; 1252 PetscFunctionReturn(PETSC_SUCCESS); 1253 } 1254 1255 /*@C 1256 MatNestGetISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1257 1258 Not Collective 1259 1260 Input Parameter: 1261 . A - `MATNEST` matrix 1262 1263 Output Parameters: 1264 + rows - array of row index sets 1265 - cols - array of column index sets 1266 1267 Level: advanced 1268 1269 Note: 1270 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1271 1272 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetLocalISs()`, 1273 `MatCreateNest()`, `MatNestSetSubMats()` 1274 @*/ 1275 PetscErrorCode MatNestGetISs(Mat A, IS rows[], IS cols[]) 1276 { 1277 PetscFunctionBegin; 1278 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1279 PetscUseMethod(A, "MatNestGetISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1280 PetscFunctionReturn(PETSC_SUCCESS); 1281 } 1282 1283 static PetscErrorCode MatNestGetLocalISs_Nest(Mat A, IS rows[], IS cols[]) 1284 { 1285 Mat_Nest *vs = (Mat_Nest *)A->data; 1286 PetscInt i; 1287 1288 PetscFunctionBegin; 1289 if (rows) 1290 for (i = 0; i < vs->nr; i++) rows[i] = vs->islocal.row[i]; 1291 if (cols) 1292 for (i = 0; i < vs->nc; i++) cols[i] = vs->islocal.col[i]; 1293 PetscFunctionReturn(PETSC_SUCCESS); 1294 } 1295 1296 /*@C 1297 MatNestGetLocalISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1298 1299 Not Collective 1300 1301 Input Parameter: 1302 . A - `MATNEST` matrix 1303 1304 Output Parameters: 1305 + rows - array of row index sets (or `NULL` to ignore) 1306 - cols - array of column index sets (or `NULL` to ignore) 1307 1308 Level: advanced 1309 1310 Note: 1311 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1312 1313 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetISs()`, `MatCreateNest()`, 1314 `MatNestSetSubMats()`, `MatNestSetSubMat()` 1315 @*/ 1316 PetscErrorCode MatNestGetLocalISs(Mat A, IS rows[], IS cols[]) 1317 { 1318 PetscFunctionBegin; 1319 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1320 PetscUseMethod(A, "MatNestGetLocalISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1321 PetscFunctionReturn(PETSC_SUCCESS); 1322 } 1323 1324 static PetscErrorCode MatNestSetVecType_Nest(Mat A, VecType vtype) 1325 { 1326 PetscBool flg; 1327 1328 PetscFunctionBegin; 1329 PetscCall(PetscStrcmp(vtype, VECNEST, &flg)); 1330 /* In reality, this only distinguishes VECNEST and "other" */ 1331 if (flg) A->ops->getvecs = MatCreateVecs_Nest; 1332 else A->ops->getvecs = (PetscErrorCode(*)(Mat, Vec *, Vec *))0; 1333 PetscFunctionReturn(PETSC_SUCCESS); 1334 } 1335 1336 /*@C 1337 MatNestSetVecType - Sets the type of `Vec` returned by `MatCreateVecs()` 1338 1339 Not Collective 1340 1341 Input Parameters: 1342 + A - `MATNEST` matrix 1343 - vtype - `VecType` to use for creating vectors 1344 1345 Level: developer 1346 1347 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateVecs()`, `MatCreateNest()`, `VecType` 1348 @*/ 1349 PetscErrorCode MatNestSetVecType(Mat A, VecType vtype) 1350 { 1351 PetscFunctionBegin; 1352 PetscTryMethod(A, "MatNestSetVecType_C", (Mat, VecType), (A, vtype)); 1353 PetscFunctionReturn(PETSC_SUCCESS); 1354 } 1355 1356 static PetscErrorCode MatNestSetSubMats_Nest(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1357 { 1358 Mat_Nest *s = (Mat_Nest *)A->data; 1359 PetscInt i, j, m, n, M, N; 1360 PetscBool cong, isstd, sametype = PETSC_FALSE; 1361 VecType vtype, type; 1362 1363 PetscFunctionBegin; 1364 PetscCall(MatReset_Nest(A)); 1365 1366 s->nr = nr; 1367 s->nc = nc; 1368 1369 /* Create space for submatrices */ 1370 PetscCall(PetscMalloc1(nr, &s->m)); 1371 for (i = 0; i < nr; i++) PetscCall(PetscMalloc1(nc, &s->m[i])); 1372 for (i = 0; i < nr; i++) { 1373 for (j = 0; j < nc; j++) { 1374 s->m[i][j] = a[i * nc + j]; 1375 if (a[i * nc + j]) PetscCall(PetscObjectReference((PetscObject)a[i * nc + j])); 1376 } 1377 } 1378 PetscCall(MatGetVecType(A, &vtype)); 1379 PetscCall(PetscStrcmp(vtype, VECSTANDARD, &isstd)); 1380 if (isstd) { 1381 /* check if all blocks have the same vectype */ 1382 vtype = NULL; 1383 for (i = 0; i < nr; i++) { 1384 for (j = 0; j < nc; j++) { 1385 if (a[i * nc + j]) { 1386 if (!vtype) { /* first visited block */ 1387 PetscCall(MatGetVecType(a[i * nc + j], &vtype)); 1388 sametype = PETSC_TRUE; 1389 } else if (sametype) { 1390 PetscCall(MatGetVecType(a[i * nc + j], &type)); 1391 PetscCall(PetscStrcmp(vtype, type, &sametype)); 1392 } 1393 } 1394 } 1395 } 1396 if (sametype) { /* propagate vectype */ 1397 PetscCall(MatSetVecType(A, vtype)); 1398 } 1399 } 1400 1401 PetscCall(MatSetUp_NestIS_Private(A, nr, is_row, nc, is_col)); 1402 1403 PetscCall(PetscMalloc1(nr, &s->row_len)); 1404 PetscCall(PetscMalloc1(nc, &s->col_len)); 1405 for (i = 0; i < nr; i++) s->row_len[i] = -1; 1406 for (j = 0; j < nc; j++) s->col_len[j] = -1; 1407 1408 PetscCall(PetscCalloc1(nr * nc, &s->nnzstate)); 1409 for (i = 0; i < nr; i++) { 1410 for (j = 0; j < nc; j++) { 1411 if (s->m[i][j]) PetscCall(MatGetNonzeroState(s->m[i][j], &s->nnzstate[i * nc + j])); 1412 } 1413 } 1414 1415 PetscCall(MatNestGetSizes_Private(A, &m, &n, &M, &N)); 1416 1417 PetscCall(PetscLayoutSetSize(A->rmap, M)); 1418 PetscCall(PetscLayoutSetLocalSize(A->rmap, m)); 1419 PetscCall(PetscLayoutSetSize(A->cmap, N)); 1420 PetscCall(PetscLayoutSetLocalSize(A->cmap, n)); 1421 1422 PetscCall(PetscLayoutSetUp(A->rmap)); 1423 PetscCall(PetscLayoutSetUp(A->cmap)); 1424 1425 /* disable operations that are not supported for non-square matrices, 1426 or matrices for which is_row != is_col */ 1427 PetscCall(MatHasCongruentLayouts(A, &cong)); 1428 if (cong && nr != nc) cong = PETSC_FALSE; 1429 if (cong) { 1430 for (i = 0; cong && i < nr; i++) PetscCall(ISEqualUnsorted(s->isglobal.row[i], s->isglobal.col[i], &cong)); 1431 } 1432 if (!cong) { 1433 A->ops->missingdiagonal = NULL; 1434 A->ops->getdiagonal = NULL; 1435 A->ops->shift = NULL; 1436 A->ops->diagonalset = NULL; 1437 } 1438 1439 PetscCall(PetscCalloc2(nr, &s->left, nc, &s->right)); 1440 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1441 A->nonzerostate++; 1442 PetscFunctionReturn(PETSC_SUCCESS); 1443 } 1444 1445 /*@ 1446 MatNestSetSubMats - Sets the nested submatrices in a `MATNEST` 1447 1448 Collective 1449 1450 Input Parameters: 1451 + A - `MATNEST` matrix 1452 . nr - number of nested row blocks 1453 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1454 . nc - number of nested column blocks 1455 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1456 - a - row-aligned array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1457 1458 Level: advanced 1459 1460 Note: 1461 This always resets any submatrix information previously set 1462 1463 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, `MatNestGetSubMats()` 1464 @*/ 1465 PetscErrorCode MatNestSetSubMats(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1466 { 1467 PetscInt i; 1468 1469 PetscFunctionBegin; 1470 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1471 PetscCheck(nr >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of rows cannot be negative"); 1472 if (nr && is_row) { 1473 PetscAssertPointer(is_row, 3); 1474 for (i = 0; i < nr; i++) PetscValidHeaderSpecific(is_row[i], IS_CLASSID, 3); 1475 } 1476 PetscCheck(nc >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of columns cannot be negative"); 1477 if (nc && is_col) { 1478 PetscAssertPointer(is_col, 5); 1479 for (i = 0; i < nc; i++) PetscValidHeaderSpecific(is_col[i], IS_CLASSID, 5); 1480 } 1481 if (nr * nc > 0) PetscAssertPointer(a, 6); 1482 PetscUseMethod(A, "MatNestSetSubMats_C", (Mat, PetscInt, const IS[], PetscInt, const IS[], const Mat[]), (A, nr, is_row, nc, is_col, a)); 1483 PetscFunctionReturn(PETSC_SUCCESS); 1484 } 1485 1486 static PetscErrorCode MatNestCreateAggregateL2G_Private(Mat A, PetscInt n, const IS islocal[], const IS isglobal[], PetscBool colflg, ISLocalToGlobalMapping *ltog) 1487 { 1488 PetscBool flg; 1489 PetscInt i, j, m, mi, *ix; 1490 1491 PetscFunctionBegin; 1492 *ltog = NULL; 1493 for (i = 0, m = 0, flg = PETSC_FALSE; i < n; i++) { 1494 if (islocal[i]) { 1495 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1496 flg = PETSC_TRUE; /* We found a non-trivial entry */ 1497 } else { 1498 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1499 } 1500 m += mi; 1501 } 1502 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 1503 1504 PetscCall(PetscMalloc1(m, &ix)); 1505 for (i = 0, m = 0; i < n; i++) { 1506 ISLocalToGlobalMapping smap = NULL; 1507 Mat sub = NULL; 1508 PetscSF sf; 1509 PetscLayout map; 1510 const PetscInt *ix2; 1511 1512 if (!colflg) { 1513 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1514 } else { 1515 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1516 } 1517 if (sub) { 1518 if (!colflg) { 1519 PetscCall(MatGetLocalToGlobalMapping(sub, &smap, NULL)); 1520 } else { 1521 PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &smap)); 1522 } 1523 } 1524 /* 1525 Now we need to extract the monolithic global indices that correspond to the given split global indices. 1526 In many/most cases, we only want MatGetLocalSubMatrix() to work, in which case we only need to know the size of the local spaces. 1527 */ 1528 PetscCall(ISGetIndices(isglobal[i], &ix2)); 1529 if (islocal[i]) { 1530 PetscInt *ilocal, *iremote; 1531 PetscInt mil, nleaves; 1532 1533 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1534 PetscCheck(smap, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing local to global map"); 1535 for (j = 0; j < mi; j++) ix[m + j] = j; 1536 PetscCall(ISLocalToGlobalMappingApply(smap, mi, ix + m, ix + m)); 1537 1538 /* PetscSFSetGraphLayout does not like negative indices */ 1539 PetscCall(PetscMalloc2(mi, &ilocal, mi, &iremote)); 1540 for (j = 0, nleaves = 0; j < mi; j++) { 1541 if (ix[m + j] < 0) continue; 1542 ilocal[nleaves] = j; 1543 iremote[nleaves] = ix[m + j]; 1544 nleaves++; 1545 } 1546 PetscCall(ISGetLocalSize(isglobal[i], &mil)); 1547 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &sf)); 1548 PetscCall(PetscLayoutCreate(PetscObjectComm((PetscObject)A), &map)); 1549 PetscCall(PetscLayoutSetLocalSize(map, mil)); 1550 PetscCall(PetscLayoutSetUp(map)); 1551 PetscCall(PetscSFSetGraphLayout(sf, map, nleaves, ilocal, PETSC_USE_POINTER, iremote)); 1552 PetscCall(PetscLayoutDestroy(&map)); 1553 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1554 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1555 PetscCall(PetscSFDestroy(&sf)); 1556 PetscCall(PetscFree2(ilocal, iremote)); 1557 } else { 1558 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1559 for (j = 0; j < mi; j++) ix[m + j] = ix2[i]; 1560 } 1561 PetscCall(ISRestoreIndices(isglobal[i], &ix2)); 1562 m += mi; 1563 } 1564 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, m, ix, PETSC_OWN_POINTER, ltog)); 1565 PetscFunctionReturn(PETSC_SUCCESS); 1566 } 1567 1568 /* If an IS was provided, there is nothing Nest needs to do, otherwise Nest will build a strided IS */ 1569 /* 1570 nprocessors = NP 1571 Nest x^T = ((g_0,g_1,...g_nprocs-1), (h_0,h_1,...h_NP-1)) 1572 proc 0: => (g_0,h_0,) 1573 proc 1: => (g_1,h_1,) 1574 ... 1575 proc nprocs-1: => (g_NP-1,h_NP-1,) 1576 1577 proc 0: proc 1: proc nprocs-1: 1578 is[0] = (0,1,2,...,nlocal(g_0)-1) (0,1,...,nlocal(g_1)-1) (0,1,...,nlocal(g_NP-1)) 1579 1580 proc 0: 1581 is[1] = (nlocal(g_0),nlocal(g_0)+1,...,nlocal(g_0)+nlocal(h_0)-1) 1582 proc 1: 1583 is[1] = (nlocal(g_1),nlocal(g_1)+1,...,nlocal(g_1)+nlocal(h_1)-1) 1584 1585 proc NP-1: 1586 is[1] = (nlocal(g_NP-1),nlocal(g_NP-1)+1,...,nlocal(g_NP-1)+nlocal(h_NP-1)-1) 1587 */ 1588 static PetscErrorCode MatSetUp_NestIS_Private(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[]) 1589 { 1590 Mat_Nest *vs = (Mat_Nest *)A->data; 1591 PetscInt i, j, offset, n, nsum, bs; 1592 Mat sub = NULL; 1593 1594 PetscFunctionBegin; 1595 PetscCall(PetscMalloc1(nr, &vs->isglobal.row)); 1596 PetscCall(PetscMalloc1(nc, &vs->isglobal.col)); 1597 if (is_row) { /* valid IS is passed in */ 1598 /* refs on is[] are incremented */ 1599 for (i = 0; i < vs->nr; i++) { 1600 PetscCall(PetscObjectReference((PetscObject)is_row[i])); 1601 1602 vs->isglobal.row[i] = is_row[i]; 1603 } 1604 } else { /* Create the ISs by inspecting sizes of a submatrix in each row */ 1605 nsum = 0; 1606 for (i = 0; i < vs->nr; i++) { /* Add up the local sizes to compute the aggregate offset */ 1607 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1608 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in row %" PetscInt_FMT, i); 1609 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1610 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1611 nsum += n; 1612 } 1613 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1614 offset -= nsum; 1615 for (i = 0; i < vs->nr; i++) { 1616 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1617 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1618 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1619 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.row[i])); 1620 PetscCall(ISSetBlockSize(vs->isglobal.row[i], bs)); 1621 offset += n; 1622 } 1623 } 1624 1625 if (is_col) { /* valid IS is passed in */ 1626 /* refs on is[] are incremented */ 1627 for (j = 0; j < vs->nc; j++) { 1628 PetscCall(PetscObjectReference((PetscObject)is_col[j])); 1629 1630 vs->isglobal.col[j] = is_col[j]; 1631 } 1632 } else { /* Create the ISs by inspecting sizes of a submatrix in each column */ 1633 offset = A->cmap->rstart; 1634 nsum = 0; 1635 for (j = 0; j < vs->nc; j++) { 1636 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1637 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in column %" PetscInt_FMT, i); 1638 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1639 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1640 nsum += n; 1641 } 1642 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1643 offset -= nsum; 1644 for (j = 0; j < vs->nc; j++) { 1645 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1646 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1647 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1648 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.col[j])); 1649 PetscCall(ISSetBlockSize(vs->isglobal.col[j], bs)); 1650 offset += n; 1651 } 1652 } 1653 1654 /* Set up the local ISs */ 1655 PetscCall(PetscMalloc1(vs->nr, &vs->islocal.row)); 1656 PetscCall(PetscMalloc1(vs->nc, &vs->islocal.col)); 1657 for (i = 0, offset = 0; i < vs->nr; i++) { 1658 IS isloc; 1659 ISLocalToGlobalMapping rmap = NULL; 1660 PetscInt nlocal, bs; 1661 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1662 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, &rmap, NULL)); 1663 if (rmap) { 1664 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1665 PetscCall(ISLocalToGlobalMappingGetSize(rmap, &nlocal)); 1666 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1667 PetscCall(ISSetBlockSize(isloc, bs)); 1668 } else { 1669 nlocal = 0; 1670 isloc = NULL; 1671 } 1672 vs->islocal.row[i] = isloc; 1673 offset += nlocal; 1674 } 1675 for (i = 0, offset = 0; i < vs->nc; i++) { 1676 IS isloc; 1677 ISLocalToGlobalMapping cmap = NULL; 1678 PetscInt nlocal, bs; 1679 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1680 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &cmap)); 1681 if (cmap) { 1682 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1683 PetscCall(ISLocalToGlobalMappingGetSize(cmap, &nlocal)); 1684 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1685 PetscCall(ISSetBlockSize(isloc, bs)); 1686 } else { 1687 nlocal = 0; 1688 isloc = NULL; 1689 } 1690 vs->islocal.col[i] = isloc; 1691 offset += nlocal; 1692 } 1693 1694 /* Set up the aggregate ISLocalToGlobalMapping */ 1695 { 1696 ISLocalToGlobalMapping rmap, cmap; 1697 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nr, vs->islocal.row, vs->isglobal.row, PETSC_FALSE, &rmap)); 1698 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nc, vs->islocal.col, vs->isglobal.col, PETSC_TRUE, &cmap)); 1699 if (rmap && cmap) PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap)); 1700 PetscCall(ISLocalToGlobalMappingDestroy(&rmap)); 1701 PetscCall(ISLocalToGlobalMappingDestroy(&cmap)); 1702 } 1703 1704 if (PetscDefined(USE_DEBUG)) { 1705 for (i = 0; i < vs->nr; i++) { 1706 for (j = 0; j < vs->nc; j++) { 1707 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1708 Mat B = vs->m[i][j]; 1709 if (!B) continue; 1710 PetscCall(MatGetSize(B, &M, &N)); 1711 PetscCall(MatGetLocalSize(B, &m, &n)); 1712 PetscCall(ISGetSize(vs->isglobal.row[i], &Mi)); 1713 PetscCall(ISGetSize(vs->isglobal.col[j], &Ni)); 1714 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &mi)); 1715 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &ni)); 1716 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Global sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, i, j, Mi, Ni); 1717 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Local sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, i, j, mi, ni); 1718 } 1719 } 1720 } 1721 1722 /* Set A->assembled if all non-null blocks are currently assembled */ 1723 for (i = 0; i < vs->nr; i++) { 1724 for (j = 0; j < vs->nc; j++) { 1725 if (vs->m[i][j] && !vs->m[i][j]->assembled) PetscFunctionReturn(PETSC_SUCCESS); 1726 } 1727 } 1728 A->assembled = PETSC_TRUE; 1729 PetscFunctionReturn(PETSC_SUCCESS); 1730 } 1731 1732 /*@C 1733 MatCreateNest - Creates a new `MATNEST` matrix containing several nested submatrices, each stored separately 1734 1735 Collective 1736 1737 Input Parameters: 1738 + comm - Communicator for the new `MATNEST` 1739 . nr - number of nested row blocks 1740 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1741 . nc - number of nested column blocks 1742 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1743 - a - row-aligned array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1744 1745 Output Parameter: 1746 . B - new matrix 1747 1748 Level: advanced 1749 1750 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `VecCreateNest()`, `DMCreateMatrix()`, `MatNestSetSubMat()`, 1751 `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatNestGetSize()`, 1752 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 1753 @*/ 1754 PetscErrorCode MatCreateNest(MPI_Comm comm, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[], Mat *B) 1755 { 1756 Mat A; 1757 1758 PetscFunctionBegin; 1759 *B = NULL; 1760 PetscCall(MatCreate(comm, &A)); 1761 PetscCall(MatSetType(A, MATNEST)); 1762 A->preallocated = PETSC_TRUE; 1763 PetscCall(MatNestSetSubMats(A, nr, is_row, nc, is_col, a)); 1764 *B = A; 1765 PetscFunctionReturn(PETSC_SUCCESS); 1766 } 1767 1768 static PetscErrorCode MatConvert_Nest_SeqAIJ_fast(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 1769 { 1770 Mat_Nest *nest = (Mat_Nest *)A->data; 1771 Mat *trans; 1772 PetscScalar **avv; 1773 PetscScalar *vv; 1774 PetscInt **aii, **ajj; 1775 PetscInt *ii, *jj, *ci; 1776 PetscInt nr, nc, nnz, i, j; 1777 PetscBool done; 1778 1779 PetscFunctionBegin; 1780 PetscCall(MatGetSize(A, &nr, &nc)); 1781 if (reuse == MAT_REUSE_MATRIX) { 1782 PetscInt rnr; 1783 1784 PetscCall(MatGetRowIJ(*newmat, 0, PETSC_FALSE, PETSC_FALSE, &rnr, (const PetscInt **)&ii, (const PetscInt **)&jj, &done)); 1785 PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "MatGetRowIJ"); 1786 PetscCheck(rnr == nr, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of rows"); 1787 PetscCall(MatSeqAIJGetArray(*newmat, &vv)); 1788 } 1789 /* extract CSR for nested SeqAIJ matrices */ 1790 nnz = 0; 1791 PetscCall(PetscCalloc4(nest->nr * nest->nc, &aii, nest->nr * nest->nc, &ajj, nest->nr * nest->nc, &avv, nest->nr * nest->nc, &trans)); 1792 for (i = 0; i < nest->nr; ++i) { 1793 for (j = 0; j < nest->nc; ++j) { 1794 Mat B = nest->m[i][j]; 1795 if (B) { 1796 PetscScalar *naa; 1797 PetscInt *nii, *njj, nnr; 1798 PetscBool istrans; 1799 1800 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 1801 if (istrans) { 1802 Mat Bt; 1803 1804 PetscCall(MatTransposeGetMat(B, &Bt)); 1805 PetscCall(MatTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1806 B = trans[i * nest->nc + j]; 1807 } else { 1808 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 1809 if (istrans) { 1810 Mat Bt; 1811 1812 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 1813 PetscCall(MatHermitianTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1814 B = trans[i * nest->nc + j]; 1815 } 1816 } 1817 PetscCall(MatGetRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&nii, (const PetscInt **)&njj, &done)); 1818 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatGetRowIJ"); 1819 PetscCall(MatSeqAIJGetArray(B, &naa)); 1820 nnz += nii[nnr]; 1821 1822 aii[i * nest->nc + j] = nii; 1823 ajj[i * nest->nc + j] = njj; 1824 avv[i * nest->nc + j] = naa; 1825 } 1826 } 1827 } 1828 if (reuse != MAT_REUSE_MATRIX) { 1829 PetscCall(PetscMalloc1(nr + 1, &ii)); 1830 PetscCall(PetscMalloc1(nnz, &jj)); 1831 PetscCall(PetscMalloc1(nnz, &vv)); 1832 } else { 1833 PetscCheck(nnz == ii[nr], PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of nonzeros"); 1834 } 1835 1836 /* new row pointer */ 1837 PetscCall(PetscArrayzero(ii, nr + 1)); 1838 for (i = 0; i < nest->nr; ++i) { 1839 PetscInt ncr, rst; 1840 1841 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1842 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1843 for (j = 0; j < nest->nc; ++j) { 1844 if (aii[i * nest->nc + j]) { 1845 PetscInt *nii = aii[i * nest->nc + j]; 1846 PetscInt ir; 1847 1848 for (ir = rst; ir < ncr + rst; ++ir) { 1849 ii[ir + 1] += nii[1] - nii[0]; 1850 nii++; 1851 } 1852 } 1853 } 1854 } 1855 for (i = 0; i < nr; i++) ii[i + 1] += ii[i]; 1856 1857 /* construct CSR for the new matrix */ 1858 PetscCall(PetscCalloc1(nr, &ci)); 1859 for (i = 0; i < nest->nr; ++i) { 1860 PetscInt ncr, rst; 1861 1862 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1863 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1864 for (j = 0; j < nest->nc; ++j) { 1865 if (aii[i * nest->nc + j]) { 1866 PetscScalar *nvv = avv[i * nest->nc + j]; 1867 PetscInt *nii = aii[i * nest->nc + j]; 1868 PetscInt *njj = ajj[i * nest->nc + j]; 1869 PetscInt ir, cst; 1870 1871 PetscCall(ISStrideGetInfo(nest->isglobal.col[j], &cst, NULL)); 1872 for (ir = rst; ir < ncr + rst; ++ir) { 1873 PetscInt ij, rsize = nii[1] - nii[0], ist = ii[ir] + ci[ir]; 1874 1875 for (ij = 0; ij < rsize; ij++) { 1876 jj[ist + ij] = *njj + cst; 1877 vv[ist + ij] = *nvv; 1878 njj++; 1879 nvv++; 1880 } 1881 ci[ir] += rsize; 1882 nii++; 1883 } 1884 } 1885 } 1886 } 1887 PetscCall(PetscFree(ci)); 1888 1889 /* restore info */ 1890 for (i = 0; i < nest->nr; ++i) { 1891 for (j = 0; j < nest->nc; ++j) { 1892 Mat B = nest->m[i][j]; 1893 if (B) { 1894 PetscInt nnr = 0, k = i * nest->nc + j; 1895 1896 B = (trans[k] ? trans[k] : B); 1897 PetscCall(MatRestoreRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&aii[k], (const PetscInt **)&ajj[k], &done)); 1898 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatRestoreRowIJ"); 1899 PetscCall(MatSeqAIJRestoreArray(B, &avv[k])); 1900 PetscCall(MatDestroy(&trans[k])); 1901 } 1902 } 1903 } 1904 PetscCall(PetscFree4(aii, ajj, avv, trans)); 1905 1906 /* finalize newmat */ 1907 if (reuse == MAT_INITIAL_MATRIX) { 1908 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, newmat)); 1909 } else if (reuse == MAT_INPLACE_MATRIX) { 1910 Mat B; 1911 1912 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, &B)); 1913 PetscCall(MatHeaderReplace(A, &B)); 1914 } 1915 PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY)); 1916 PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY)); 1917 { 1918 Mat_SeqAIJ *a = (Mat_SeqAIJ *)((*newmat)->data); 1919 a->free_a = PETSC_TRUE; 1920 a->free_ij = PETSC_TRUE; 1921 } 1922 PetscFunctionReturn(PETSC_SUCCESS); 1923 } 1924 1925 PETSC_INTERN PetscErrorCode MatAXPY_Dense_Nest(Mat Y, PetscScalar a, Mat X) 1926 { 1927 Mat_Nest *nest = (Mat_Nest *)X->data; 1928 PetscInt i, j, k, rstart; 1929 PetscBool flg; 1930 1931 PetscFunctionBegin; 1932 /* Fill by row */ 1933 for (j = 0; j < nest->nc; ++j) { 1934 /* Using global column indices and ISAllGather() is not scalable. */ 1935 IS bNis; 1936 PetscInt bN; 1937 const PetscInt *bNindices; 1938 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 1939 PetscCall(ISGetSize(bNis, &bN)); 1940 PetscCall(ISGetIndices(bNis, &bNindices)); 1941 for (i = 0; i < nest->nr; ++i) { 1942 Mat B = nest->m[i][j], D = NULL; 1943 PetscInt bm, br; 1944 const PetscInt *bmindices; 1945 if (!B) continue; 1946 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 1947 if (flg) { 1948 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1949 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1950 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 1951 B = D; 1952 } 1953 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 1954 if (flg) { 1955 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 1956 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 1957 B = D; 1958 } 1959 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 1960 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 1961 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 1962 for (br = 0; br < bm; ++br) { 1963 PetscInt row = bmindices[br], brncols, *cols; 1964 const PetscInt *brcols; 1965 const PetscScalar *brcoldata; 1966 PetscScalar *vals = NULL; 1967 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1968 PetscCall(PetscMalloc1(brncols, &cols)); 1969 for (k = 0; k < brncols; k++) cols[k] = bNindices[brcols[k]]; 1970 /* 1971 Nest blocks are required to be nonoverlapping -- otherwise nest and monolithic index layouts wouldn't match. 1972 Thus, we could use INSERT_VALUES, but I prefer ADD_VALUES. 1973 */ 1974 if (a != 1.0) { 1975 PetscCall(PetscMalloc1(brncols, &vals)); 1976 for (k = 0; k < brncols; k++) vals[k] = a * brcoldata[k]; 1977 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, vals, ADD_VALUES)); 1978 PetscCall(PetscFree(vals)); 1979 } else { 1980 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, brcoldata, ADD_VALUES)); 1981 } 1982 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1983 PetscCall(PetscFree(cols)); 1984 } 1985 if (D) PetscCall(MatDestroy(&D)); 1986 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 1987 } 1988 PetscCall(ISRestoreIndices(bNis, &bNindices)); 1989 PetscCall(ISDestroy(&bNis)); 1990 } 1991 PetscCall(MatAssemblyBegin(Y, MAT_FINAL_ASSEMBLY)); 1992 PetscCall(MatAssemblyEnd(Y, MAT_FINAL_ASSEMBLY)); 1993 PetscFunctionReturn(PETSC_SUCCESS); 1994 } 1995 1996 static PetscErrorCode MatConvert_Nest_AIJ(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 1997 { 1998 Mat_Nest *nest = (Mat_Nest *)A->data; 1999 PetscInt m, n, M, N, i, j, k, *dnnz, *onnz = NULL, rstart, cstart, cend; 2000 PetscMPIInt size; 2001 Mat C; 2002 2003 PetscFunctionBegin; 2004 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 2005 if (size == 1) { /* look for a special case with SeqAIJ matrices and strided-1, contiguous, blocks */ 2006 PetscInt nf; 2007 PetscBool fast; 2008 2009 PetscCall(PetscStrcmp(newtype, MATAIJ, &fast)); 2010 if (!fast) PetscCall(PetscStrcmp(newtype, MATSEQAIJ, &fast)); 2011 for (i = 0; i < nest->nr && fast; ++i) { 2012 for (j = 0; j < nest->nc && fast; ++j) { 2013 Mat B = nest->m[i][j]; 2014 if (B) { 2015 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATSEQAIJ, &fast)); 2016 if (!fast) { 2017 PetscBool istrans; 2018 2019 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 2020 if (istrans) { 2021 Mat Bt; 2022 2023 PetscCall(MatTransposeGetMat(B, &Bt)); 2024 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2025 } else { 2026 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 2027 if (istrans) { 2028 Mat Bt; 2029 2030 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 2031 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2032 } 2033 } 2034 } 2035 } 2036 } 2037 } 2038 for (i = 0, nf = 0; i < nest->nr && fast; ++i) { 2039 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.row[i], ISSTRIDE, &fast)); 2040 if (fast) { 2041 PetscInt f, s; 2042 2043 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &f, &s)); 2044 if (f != nf || s != 1) { 2045 fast = PETSC_FALSE; 2046 } else { 2047 PetscCall(ISGetSize(nest->isglobal.row[i], &f)); 2048 nf += f; 2049 } 2050 } 2051 } 2052 for (i = 0, nf = 0; i < nest->nc && fast; ++i) { 2053 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.col[i], ISSTRIDE, &fast)); 2054 if (fast) { 2055 PetscInt f, s; 2056 2057 PetscCall(ISStrideGetInfo(nest->isglobal.col[i], &f, &s)); 2058 if (f != nf || s != 1) { 2059 fast = PETSC_FALSE; 2060 } else { 2061 PetscCall(ISGetSize(nest->isglobal.col[i], &f)); 2062 nf += f; 2063 } 2064 } 2065 } 2066 if (fast) { 2067 PetscCall(MatConvert_Nest_SeqAIJ_fast(A, newtype, reuse, newmat)); 2068 PetscFunctionReturn(PETSC_SUCCESS); 2069 } 2070 } 2071 PetscCall(MatGetSize(A, &M, &N)); 2072 PetscCall(MatGetLocalSize(A, &m, &n)); 2073 PetscCall(MatGetOwnershipRangeColumn(A, &cstart, &cend)); 2074 if (reuse == MAT_REUSE_MATRIX) C = *newmat; 2075 else { 2076 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2077 PetscCall(MatSetType(C, newtype)); 2078 PetscCall(MatSetSizes(C, m, n, M, N)); 2079 } 2080 PetscCall(PetscMalloc1(2 * m, &dnnz)); 2081 if (m) { 2082 onnz = dnnz + m; 2083 for (k = 0; k < m; k++) { 2084 dnnz[k] = 0; 2085 onnz[k] = 0; 2086 } 2087 } 2088 for (j = 0; j < nest->nc; ++j) { 2089 IS bNis; 2090 PetscInt bN; 2091 const PetscInt *bNindices; 2092 PetscBool flg; 2093 /* Using global column indices and ISAllGather() is not scalable. */ 2094 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 2095 PetscCall(ISGetSize(bNis, &bN)); 2096 PetscCall(ISGetIndices(bNis, &bNindices)); 2097 for (i = 0; i < nest->nr; ++i) { 2098 PetscSF bmsf; 2099 PetscSFNode *iremote; 2100 Mat B = nest->m[i][j], D = NULL; 2101 PetscInt bm, *sub_dnnz, *sub_onnz, br; 2102 const PetscInt *bmindices; 2103 if (!B) continue; 2104 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 2105 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 2106 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &bmsf)); 2107 PetscCall(PetscMalloc1(bm, &iremote)); 2108 PetscCall(PetscMalloc1(bm, &sub_dnnz)); 2109 PetscCall(PetscMalloc1(bm, &sub_onnz)); 2110 for (k = 0; k < bm; ++k) { 2111 sub_dnnz[k] = 0; 2112 sub_onnz[k] = 0; 2113 } 2114 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 2115 if (flg) { 2116 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2117 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2118 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 2119 B = D; 2120 } 2121 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 2122 if (flg) { 2123 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 2124 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 2125 B = D; 2126 } 2127 /* 2128 Locate the owners for all of the locally-owned global row indices for this row block. 2129 These determine the roots of PetscSF used to communicate preallocation data to row owners. 2130 The roots correspond to the dnnz and onnz entries; thus, there are two roots per row. 2131 */ 2132 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 2133 for (br = 0; br < bm; ++br) { 2134 PetscInt row = bmindices[br], brncols, col; 2135 const PetscInt *brcols; 2136 PetscInt rowrel = 0; /* row's relative index on its owner rank */ 2137 PetscMPIInt rowowner = 0; 2138 PetscCall(PetscLayoutFindOwnerIndex(A->rmap, row, &rowowner, &rowrel)); 2139 /* how many roots */ 2140 iremote[br].rank = rowowner; 2141 iremote[br].index = rowrel; /* edge from bmdnnz to dnnz */ 2142 /* get nonzero pattern */ 2143 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, NULL)); 2144 for (k = 0; k < brncols; k++) { 2145 col = bNindices[brcols[k]]; 2146 if (col >= A->cmap->range[rowowner] && col < A->cmap->range[rowowner + 1]) { 2147 sub_dnnz[br]++; 2148 } else { 2149 sub_onnz[br]++; 2150 } 2151 } 2152 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, NULL)); 2153 } 2154 if (D) PetscCall(MatDestroy(&D)); 2155 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 2156 /* bsf will have to take care of disposing of bedges. */ 2157 PetscCall(PetscSFSetGraph(bmsf, m, bm, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 2158 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2159 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2160 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2161 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2162 PetscCall(PetscFree(sub_dnnz)); 2163 PetscCall(PetscFree(sub_onnz)); 2164 PetscCall(PetscSFDestroy(&bmsf)); 2165 } 2166 PetscCall(ISRestoreIndices(bNis, &bNindices)); 2167 PetscCall(ISDestroy(&bNis)); 2168 } 2169 /* Resize preallocation if overestimated */ 2170 for (i = 0; i < m; i++) { 2171 dnnz[i] = PetscMin(dnnz[i], A->cmap->n); 2172 onnz[i] = PetscMin(onnz[i], A->cmap->N - A->cmap->n); 2173 } 2174 PetscCall(MatSeqAIJSetPreallocation(C, 0, dnnz)); 2175 PetscCall(MatMPIAIJSetPreallocation(C, 0, dnnz, 0, onnz)); 2176 PetscCall(PetscFree(dnnz)); 2177 PetscCall(MatAXPY_Dense_Nest(C, 1.0, A)); 2178 if (reuse == MAT_INPLACE_MATRIX) { 2179 PetscCall(MatHeaderReplace(A, &C)); 2180 } else *newmat = C; 2181 PetscFunctionReturn(PETSC_SUCCESS); 2182 } 2183 2184 static PetscErrorCode MatConvert_Nest_Dense(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 2185 { 2186 Mat B; 2187 PetscInt m, n, M, N; 2188 2189 PetscFunctionBegin; 2190 PetscCall(MatGetSize(A, &M, &N)); 2191 PetscCall(MatGetLocalSize(A, &m, &n)); 2192 if (reuse == MAT_REUSE_MATRIX) { 2193 B = *newmat; 2194 PetscCall(MatZeroEntries(B)); 2195 } else { 2196 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), m, PETSC_DECIDE, M, N, NULL, &B)); 2197 } 2198 PetscCall(MatAXPY_Dense_Nest(B, 1.0, A)); 2199 if (reuse == MAT_INPLACE_MATRIX) { 2200 PetscCall(MatHeaderReplace(A, &B)); 2201 } else if (reuse == MAT_INITIAL_MATRIX) *newmat = B; 2202 PetscFunctionReturn(PETSC_SUCCESS); 2203 } 2204 2205 static PetscErrorCode MatHasOperation_Nest(Mat mat, MatOperation op, PetscBool *has) 2206 { 2207 Mat_Nest *bA = (Mat_Nest *)mat->data; 2208 MatOperation opAdd; 2209 PetscInt i, j, nr = bA->nr, nc = bA->nc; 2210 PetscBool flg; 2211 PetscFunctionBegin; 2212 2213 *has = PETSC_FALSE; 2214 if (op == MATOP_MULT || op == MATOP_MULT_ADD || op == MATOP_MULT_TRANSPOSE || op == MATOP_MULT_TRANSPOSE_ADD) { 2215 opAdd = (op == MATOP_MULT || op == MATOP_MULT_ADD ? MATOP_MULT_ADD : MATOP_MULT_TRANSPOSE_ADD); 2216 for (j = 0; j < nc; j++) { 2217 for (i = 0; i < nr; i++) { 2218 if (!bA->m[i][j]) continue; 2219 PetscCall(MatHasOperation(bA->m[i][j], opAdd, &flg)); 2220 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 2221 } 2222 } 2223 } 2224 if (((void **)mat->ops)[op]) *has = PETSC_TRUE; 2225 PetscFunctionReturn(PETSC_SUCCESS); 2226 } 2227 2228 /*MC 2229 MATNEST - "nest" - Matrix type consisting of nested submatrices, each stored separately. 2230 2231 Level: intermediate 2232 2233 Notes: 2234 This matrix type permits scalable use of `PCFIELDSPLIT` and avoids the large memory costs of extracting submatrices. 2235 It allows the use of symmetric and block formats for parts of multi-physics simulations. 2236 It is usually used with `DMCOMPOSITE` and `DMCreateMatrix()` 2237 2238 Each of the submatrices lives on the same MPI communicator as the original nest matrix (though they can have zero 2239 rows/columns on some processes.) Thus this is not meant for cases where the submatrices live on far fewer processes 2240 than the nest matrix. 2241 2242 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `MatType`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, 2243 `VecCreateNest()`, `DMCreateMatrix()`, `DMCOMPOSITE`, `MatNestSetVecType()`, `MatNestGetLocalISs()`, 2244 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 2245 M*/ 2246 PETSC_EXTERN PetscErrorCode MatCreate_Nest(Mat A) 2247 { 2248 Mat_Nest *s; 2249 2250 PetscFunctionBegin; 2251 PetscCall(PetscNew(&s)); 2252 A->data = (void *)s; 2253 2254 s->nr = -1; 2255 s->nc = -1; 2256 s->m = NULL; 2257 s->splitassembly = PETSC_FALSE; 2258 2259 PetscCall(PetscMemzero(A->ops, sizeof(*A->ops))); 2260 2261 A->ops->mult = MatMult_Nest; 2262 A->ops->multadd = MatMultAdd_Nest; 2263 A->ops->multtranspose = MatMultTranspose_Nest; 2264 A->ops->multtransposeadd = MatMultTransposeAdd_Nest; 2265 A->ops->transpose = MatTranspose_Nest; 2266 A->ops->multhermitiantranspose = MatMultHermitianTranspose_Nest; 2267 A->ops->multhermitiantransposeadd = MatMultHermitianTransposeAdd_Nest; 2268 A->ops->assemblybegin = MatAssemblyBegin_Nest; 2269 A->ops->assemblyend = MatAssemblyEnd_Nest; 2270 A->ops->zeroentries = MatZeroEntries_Nest; 2271 A->ops->copy = MatCopy_Nest; 2272 A->ops->axpy = MatAXPY_Nest; 2273 A->ops->duplicate = MatDuplicate_Nest; 2274 A->ops->createsubmatrix = MatCreateSubMatrix_Nest; 2275 A->ops->destroy = MatDestroy_Nest; 2276 A->ops->view = MatView_Nest; 2277 A->ops->getvecs = NULL; /* Use VECNEST by calling MatNestSetVecType(A,VECNEST) */ 2278 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_Nest; 2279 A->ops->restorelocalsubmatrix = MatRestoreLocalSubMatrix_Nest; 2280 A->ops->getdiagonal = MatGetDiagonal_Nest; 2281 A->ops->diagonalscale = MatDiagonalScale_Nest; 2282 A->ops->scale = MatScale_Nest; 2283 A->ops->shift = MatShift_Nest; 2284 A->ops->diagonalset = MatDiagonalSet_Nest; 2285 A->ops->setrandom = MatSetRandom_Nest; 2286 A->ops->hasoperation = MatHasOperation_Nest; 2287 A->ops->missingdiagonal = MatMissingDiagonal_Nest; 2288 2289 A->spptr = NULL; 2290 A->assembled = PETSC_FALSE; 2291 2292 /* expose Nest api's */ 2293 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", MatNestGetSubMat_Nest)); 2294 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", MatNestSetSubMat_Nest)); 2295 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", MatNestGetSubMats_Nest)); 2296 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", MatNestGetSize_Nest)); 2297 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", MatNestGetISs_Nest)); 2298 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", MatNestGetLocalISs_Nest)); 2299 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", MatNestSetVecType_Nest)); 2300 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", MatNestSetSubMats_Nest)); 2301 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", MatConvert_Nest_AIJ)); 2302 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", MatConvert_Nest_AIJ)); 2303 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", MatConvert_Nest_AIJ)); 2304 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", MatConvert_Nest_IS)); 2305 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", MatConvert_Nest_Dense)); 2306 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", MatConvert_Nest_Dense)); 2307 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", MatProductSetFromOptions_Nest_Dense)); 2308 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", MatProductSetFromOptions_Nest_Dense)); 2309 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", MatProductSetFromOptions_Nest_Dense)); 2310 2311 PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATNEST)); 2312 PetscFunctionReturn(PETSC_SUCCESS); 2313 } 2314