1 #include <../src/mat/impls/nest/matnestimpl.h> /*I "petscmat.h" I*/ 2 #include <../src/mat/impls/aij/seq/aij.h> 3 #include <petscsf.h> 4 5 static PetscErrorCode MatSetUp_NestIS_Private(Mat, PetscInt, const IS[], PetscInt, const IS[]); 6 static PetscErrorCode MatCreateVecs_Nest(Mat, Vec *, Vec *); 7 static PetscErrorCode MatReset_Nest(Mat); 8 9 PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat, MatType, MatReuse, Mat *); 10 11 /* private functions */ 12 static PetscErrorCode MatNestGetSizes_Private(Mat A, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N) 13 { 14 Mat_Nest *bA = (Mat_Nest *)A->data; 15 PetscInt i, j; 16 17 PetscFunctionBegin; 18 *m = *n = *M = *N = 0; 19 for (i = 0; i < bA->nr; i++) { /* rows */ 20 PetscInt sm, sM; 21 PetscCall(ISGetLocalSize(bA->isglobal.row[i], &sm)); 22 PetscCall(ISGetSize(bA->isglobal.row[i], &sM)); 23 *m += sm; 24 *M += sM; 25 } 26 for (j = 0; j < bA->nc; j++) { /* cols */ 27 PetscInt sn, sN; 28 PetscCall(ISGetLocalSize(bA->isglobal.col[j], &sn)); 29 PetscCall(ISGetSize(bA->isglobal.col[j], &sN)); 30 *n += sn; 31 *N += sN; 32 } 33 PetscFunctionReturn(PETSC_SUCCESS); 34 } 35 36 /* operations */ 37 static PetscErrorCode MatMult_Nest(Mat A, Vec x, Vec y) 38 { 39 Mat_Nest *bA = (Mat_Nest *)A->data; 40 Vec *bx = bA->right, *by = bA->left; 41 PetscInt i, j, nr = bA->nr, nc = bA->nc; 42 43 PetscFunctionBegin; 44 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by[i])); 45 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 46 for (i = 0; i < nr; i++) { 47 PetscCall(VecZeroEntries(by[i])); 48 for (j = 0; j < nc; j++) { 49 if (!bA->m[i][j]) continue; 50 /* y[i] <- y[i] + A[i][j] * x[j] */ 51 PetscCall(MatMultAdd(bA->m[i][j], bx[j], by[i], by[i])); 52 } 53 } 54 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by[i])); 55 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 56 PetscFunctionReturn(PETSC_SUCCESS); 57 } 58 59 static PetscErrorCode MatMultAdd_Nest(Mat A, Vec x, Vec y, Vec z) 60 { 61 Mat_Nest *bA = (Mat_Nest *)A->data; 62 Vec *bx = bA->right, *bz = bA->left; 63 PetscInt i, j, nr = bA->nr, nc = bA->nc; 64 65 PetscFunctionBegin; 66 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(z, bA->isglobal.row[i], &bz[i])); 67 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 68 for (i = 0; i < nr; i++) { 69 if (y != z) { 70 Vec by; 71 PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by)); 72 PetscCall(VecCopy(by, bz[i])); 73 PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by)); 74 } 75 for (j = 0; j < nc; j++) { 76 if (!bA->m[i][j]) continue; 77 /* y[i] <- y[i] + A[i][j] * x[j] */ 78 PetscCall(MatMultAdd(bA->m[i][j], bx[j], bz[i], bz[i])); 79 } 80 } 81 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.row[i], &bz[i])); 82 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 83 PetscFunctionReturn(PETSC_SUCCESS); 84 } 85 86 typedef struct { 87 Mat *workC; /* array of Mat with specific containers depending on the underlying MatMatMult implementation */ 88 PetscScalar *tarray; /* buffer for storing all temporary products A[i][j] B[j] */ 89 PetscInt *dm, *dn, k; /* displacements and number of submatrices */ 90 } Nest_Dense; 91 92 static PetscErrorCode MatProductNumeric_Nest_Dense(Mat C) 93 { 94 Mat_Nest *bA; 95 Nest_Dense *contents; 96 Mat viewB, viewC, productB, workC; 97 const PetscScalar *barray; 98 PetscScalar *carray; 99 PetscInt i, j, M, N, nr, nc, ldb, ldc; 100 Mat A, B; 101 102 PetscFunctionBegin; 103 MatCheckProduct(C, 1); 104 A = C->product->A; 105 B = C->product->B; 106 PetscCall(MatGetSize(B, NULL, &N)); 107 if (!N) { 108 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 109 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 110 PetscFunctionReturn(PETSC_SUCCESS); 111 } 112 contents = (Nest_Dense *)C->product->data; 113 PetscCheck(contents, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data empty"); 114 bA = (Mat_Nest *)A->data; 115 nr = bA->nr; 116 nc = bA->nc; 117 PetscCall(MatDenseGetLDA(B, &ldb)); 118 PetscCall(MatDenseGetLDA(C, &ldc)); 119 PetscCall(MatZeroEntries(C)); 120 PetscCall(MatDenseGetArrayRead(B, &barray)); 121 PetscCall(MatDenseGetArray(C, &carray)); 122 for (i = 0; i < nr; i++) { 123 PetscCall(ISGetSize(bA->isglobal.row[i], &M)); 124 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dm[i + 1] - contents->dm[i], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset(carray, contents->dm[i]), &viewC)); 125 PetscCall(MatDenseSetLDA(viewC, ldc)); 126 for (j = 0; j < nc; j++) { 127 if (!bA->m[i][j]) continue; 128 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 129 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset((PetscScalar *)barray, contents->dn[j]), &viewB)); 130 PetscCall(MatDenseSetLDA(viewB, ldb)); 131 132 /* MatMatMultNumeric(bA->m[i][j],viewB,contents->workC[i*nc + j]); */ 133 workC = contents->workC[i * nc + j]; 134 productB = workC->product->B; 135 workC->product->B = viewB; /* use newly created dense matrix viewB */ 136 PetscCall(MatProductNumeric(workC)); 137 PetscCall(MatDestroy(&viewB)); 138 workC->product->B = productB; /* resume original B */ 139 140 /* C[i] <- workC + C[i] */ 141 PetscCall(MatAXPY(viewC, 1.0, contents->workC[i * nc + j], SAME_NONZERO_PATTERN)); 142 } 143 PetscCall(MatDestroy(&viewC)); 144 } 145 PetscCall(MatDenseRestoreArray(C, &carray)); 146 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 147 148 PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 149 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 150 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 151 PetscFunctionReturn(PETSC_SUCCESS); 152 } 153 154 static PetscErrorCode MatNest_DenseDestroy(void *ctx) 155 { 156 Nest_Dense *contents = (Nest_Dense *)ctx; 157 PetscInt i; 158 159 PetscFunctionBegin; 160 PetscCall(PetscFree(contents->tarray)); 161 for (i = 0; i < contents->k; i++) PetscCall(MatDestroy(contents->workC + i)); 162 PetscCall(PetscFree3(contents->dm, contents->dn, contents->workC)); 163 PetscCall(PetscFree(contents)); 164 PetscFunctionReturn(PETSC_SUCCESS); 165 } 166 167 static PetscErrorCode MatProductSymbolic_Nest_Dense(Mat C) 168 { 169 Mat_Nest *bA; 170 Mat viewB, workC; 171 const PetscScalar *barray; 172 PetscInt i, j, M, N, m, n, nr, nc, maxm = 0, ldb; 173 Nest_Dense *contents = NULL; 174 PetscBool cisdense; 175 Mat A, B; 176 PetscReal fill; 177 178 PetscFunctionBegin; 179 MatCheckProduct(C, 1); 180 PetscCheck(!C->product->data, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data not empty"); 181 A = C->product->A; 182 B = C->product->B; 183 fill = C->product->fill; 184 bA = (Mat_Nest *)A->data; 185 nr = bA->nr; 186 nc = bA->nc; 187 PetscCall(MatGetLocalSize(C, &m, &n)); 188 PetscCall(MatGetSize(C, &M, &N)); 189 if (m == PETSC_DECIDE || n == PETSC_DECIDE || M == PETSC_DECIDE || N == PETSC_DECIDE) { 190 PetscCall(MatGetLocalSize(B, NULL, &n)); 191 PetscCall(MatGetSize(B, NULL, &N)); 192 PetscCall(MatGetLocalSize(A, &m, NULL)); 193 PetscCall(MatGetSize(A, &M, NULL)); 194 PetscCall(MatSetSizes(C, m, n, M, N)); 195 } 196 PetscCall(PetscObjectTypeCompareAny((PetscObject)C, &cisdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, MATMPIDENSECUDA, "")); 197 if (!cisdense) PetscCall(MatSetType(C, ((PetscObject)B)->type_name)); 198 PetscCall(MatSetUp(C)); 199 if (!N) { 200 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 201 PetscFunctionReturn(PETSC_SUCCESS); 202 } 203 204 PetscCall(PetscNew(&contents)); 205 C->product->data = contents; 206 C->product->destroy = MatNest_DenseDestroy; 207 PetscCall(PetscCalloc3(nr + 1, &contents->dm, nc + 1, &contents->dn, nr * nc, &contents->workC)); 208 contents->k = nr * nc; 209 for (i = 0; i < nr; i++) { 210 PetscCall(ISGetLocalSize(bA->isglobal.row[i], contents->dm + i + 1)); 211 maxm = PetscMax(maxm, contents->dm[i + 1]); 212 contents->dm[i + 1] += contents->dm[i]; 213 } 214 for (i = 0; i < nc; i++) { 215 PetscCall(ISGetLocalSize(bA->isglobal.col[i], contents->dn + i + 1)); 216 contents->dn[i + 1] += contents->dn[i]; 217 } 218 PetscCall(PetscMalloc1(maxm * N, &contents->tarray)); 219 PetscCall(MatDenseGetLDA(B, &ldb)); 220 PetscCall(MatGetSize(B, NULL, &N)); 221 PetscCall(MatDenseGetArrayRead(B, &barray)); 222 /* loops are permuted compared to MatMatMultNumeric so that viewB is created only once per column of A */ 223 for (j = 0; j < nc; j++) { 224 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 225 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset((PetscScalar *)barray, contents->dn[j]), &viewB)); 226 PetscCall(MatDenseSetLDA(viewB, ldb)); 227 for (i = 0; i < nr; i++) { 228 if (!bA->m[i][j]) continue; 229 /* MatMatMultSymbolic may attach a specific container (depending on MatType of bA->m[i][j]) to workC[i][j] */ 230 231 PetscCall(MatProductCreate(bA->m[i][j], viewB, NULL, &contents->workC[i * nc + j])); 232 workC = contents->workC[i * nc + j]; 233 PetscCall(MatProductSetType(workC, MATPRODUCT_AB)); 234 PetscCall(MatProductSetAlgorithm(workC, "default")); 235 PetscCall(MatProductSetFill(workC, fill)); 236 PetscCall(MatProductSetFromOptions(workC)); 237 PetscCall(MatProductSymbolic(workC)); 238 239 /* since tarray will be shared by all Mat */ 240 PetscCall(MatSeqDenseSetPreallocation(workC, contents->tarray)); 241 PetscCall(MatMPIDenseSetPreallocation(workC, contents->tarray)); 242 } 243 PetscCall(MatDestroy(&viewB)); 244 } 245 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 246 247 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 248 PetscFunctionReturn(PETSC_SUCCESS); 249 } 250 251 static PetscErrorCode MatProductSetFromOptions_Nest_Dense_AB(Mat C) 252 { 253 PetscFunctionBegin; 254 C->ops->productsymbolic = MatProductSymbolic_Nest_Dense; 255 PetscFunctionReturn(PETSC_SUCCESS); 256 } 257 258 static PetscErrorCode MatProductSetFromOptions_Nest_Dense(Mat C) 259 { 260 Mat_Product *product = C->product; 261 262 PetscFunctionBegin; 263 if (product->type == MATPRODUCT_AB) PetscCall(MatProductSetFromOptions_Nest_Dense_AB(C)); 264 PetscFunctionReturn(PETSC_SUCCESS); 265 } 266 267 static PetscErrorCode MatMultTransposeKernel_Nest(Mat A, Vec x, Vec y, PetscBool herm) 268 { 269 Mat_Nest *bA = (Mat_Nest *)A->data; 270 Vec *bx = bA->left, *by = bA->right; 271 PetscInt i, j, nr = bA->nr, nc = bA->nc; 272 273 PetscFunctionBegin; 274 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 275 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(y, bA->isglobal.col[i], &by[i])); 276 for (j = 0; j < nc; j++) { 277 PetscCall(VecZeroEntries(by[j])); 278 for (i = 0; i < nr; i++) { 279 if (!bA->m[i][j]) continue; 280 if (herm) PetscCall(MatMultHermitianTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); /* y[j] <- y[j] + (A[i][j])^H * x[i] */ 281 else PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); /* y[j] <- y[j] + (A[i][j])^T * x[i] */ 282 } 283 } 284 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 285 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.col[i], &by[i])); 286 PetscFunctionReturn(PETSC_SUCCESS); 287 } 288 289 static PetscErrorCode MatMultTranspose_Nest(Mat A, Vec x, Vec y) 290 { 291 PetscFunctionBegin; 292 PetscCall(MatMultTransposeKernel_Nest(A, x, y, PETSC_FALSE)); 293 PetscFunctionReturn(PETSC_SUCCESS); 294 } 295 296 static PetscErrorCode MatMultHermitianTranspose_Nest(Mat A, Vec x, Vec y) 297 { 298 PetscFunctionBegin; 299 PetscCall(MatMultTransposeKernel_Nest(A, x, y, PETSC_TRUE)); 300 PetscFunctionReturn(PETSC_SUCCESS); 301 } 302 303 static PetscErrorCode MatMultTransposeAddKernel_Nest(Mat A, Vec x, Vec y, Vec z, PetscBool herm) 304 { 305 Mat_Nest *bA = (Mat_Nest *)A->data; 306 Vec *bx = bA->left, *bz = bA->right; 307 PetscInt i, j, nr = bA->nr, nc = bA->nc; 308 309 PetscFunctionBegin; 310 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 311 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(z, bA->isglobal.col[i], &bz[i])); 312 for (j = 0; j < nc; j++) { 313 if (y != z) { 314 Vec by; 315 PetscCall(VecGetSubVector(y, bA->isglobal.col[j], &by)); 316 PetscCall(VecCopy(by, bz[j])); 317 PetscCall(VecRestoreSubVector(y, bA->isglobal.col[j], &by)); 318 } 319 for (i = 0; i < nr; i++) { 320 if (!bA->m[i][j]) continue; 321 if (herm) PetscCall(MatMultHermitianTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); /* z[j] <- y[j] + (A[i][j])^H * x[i] */ 322 else PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); /* z[j] <- y[j] + (A[i][j])^T * x[i] */ 323 } 324 } 325 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 326 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.col[i], &bz[i])); 327 PetscFunctionReturn(PETSC_SUCCESS); 328 } 329 330 static PetscErrorCode MatMultTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 331 { 332 PetscFunctionBegin; 333 PetscCall(MatMultTransposeAddKernel_Nest(A, x, y, z, PETSC_FALSE)); 334 PetscFunctionReturn(PETSC_SUCCESS); 335 } 336 337 static PetscErrorCode MatMultHermitianTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 338 { 339 PetscFunctionBegin; 340 PetscCall(MatMultTransposeAddKernel_Nest(A, x, y, z, PETSC_TRUE)); 341 PetscFunctionReturn(PETSC_SUCCESS); 342 } 343 344 static PetscErrorCode MatTranspose_Nest(Mat A, MatReuse reuse, Mat *B) 345 { 346 Mat_Nest *bA = (Mat_Nest *)A->data, *bC; 347 Mat C; 348 PetscInt i, j, nr = bA->nr, nc = bA->nc; 349 350 PetscFunctionBegin; 351 if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B)); 352 PetscCheck(reuse != MAT_INPLACE_MATRIX || nr == nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Square nested matrix only for in-place"); 353 354 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) { 355 Mat *subs; 356 IS *is_row, *is_col; 357 358 PetscCall(PetscCalloc1(nr * nc, &subs)); 359 PetscCall(PetscMalloc2(nr, &is_row, nc, &is_col)); 360 PetscCall(MatNestGetISs(A, is_row, is_col)); 361 if (reuse == MAT_INPLACE_MATRIX) { 362 for (i = 0; i < nr; i++) { 363 for (j = 0; j < nc; j++) subs[i + nr * j] = bA->m[i][j]; 364 } 365 } 366 367 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nc, is_col, nr, is_row, subs, &C)); 368 PetscCall(PetscFree(subs)); 369 PetscCall(PetscFree2(is_row, is_col)); 370 } else { 371 C = *B; 372 } 373 374 bC = (Mat_Nest *)C->data; 375 for (i = 0; i < nr; i++) { 376 for (j = 0; j < nc; j++) { 377 if (bA->m[i][j]) { 378 PetscCall(MatTranspose(bA->m[i][j], reuse, &(bC->m[j][i]))); 379 } else { 380 bC->m[j][i] = NULL; 381 } 382 } 383 } 384 385 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) { 386 *B = C; 387 } else { 388 PetscCall(MatHeaderMerge(A, &C)); 389 } 390 PetscFunctionReturn(PETSC_SUCCESS); 391 } 392 393 static PetscErrorCode MatNestDestroyISList(PetscInt n, IS **list) 394 { 395 IS *lst = *list; 396 PetscInt i; 397 398 PetscFunctionBegin; 399 if (!lst) PetscFunctionReturn(PETSC_SUCCESS); 400 for (i = 0; i < n; i++) 401 if (lst[i]) PetscCall(ISDestroy(&lst[i])); 402 PetscCall(PetscFree(lst)); 403 *list = NULL; 404 PetscFunctionReturn(PETSC_SUCCESS); 405 } 406 407 static PetscErrorCode MatReset_Nest(Mat A) 408 { 409 Mat_Nest *vs = (Mat_Nest *)A->data; 410 PetscInt i, j; 411 412 PetscFunctionBegin; 413 /* release the matrices and the place holders */ 414 PetscCall(MatNestDestroyISList(vs->nr, &vs->isglobal.row)); 415 PetscCall(MatNestDestroyISList(vs->nc, &vs->isglobal.col)); 416 PetscCall(MatNestDestroyISList(vs->nr, &vs->islocal.row)); 417 PetscCall(MatNestDestroyISList(vs->nc, &vs->islocal.col)); 418 419 PetscCall(PetscFree(vs->row_len)); 420 PetscCall(PetscFree(vs->col_len)); 421 PetscCall(PetscFree(vs->nnzstate)); 422 423 PetscCall(PetscFree2(vs->left, vs->right)); 424 425 /* release the matrices and the place holders */ 426 if (vs->m) { 427 for (i = 0; i < vs->nr; i++) { 428 for (j = 0; j < vs->nc; j++) PetscCall(MatDestroy(&vs->m[i][j])); 429 PetscCall(PetscFree(vs->m[i])); 430 } 431 PetscCall(PetscFree(vs->m)); 432 } 433 434 /* restore defaults */ 435 vs->nr = 0; 436 vs->nc = 0; 437 vs->splitassembly = PETSC_FALSE; 438 PetscFunctionReturn(PETSC_SUCCESS); 439 } 440 441 static PetscErrorCode MatDestroy_Nest(Mat A) 442 { 443 PetscFunctionBegin; 444 PetscCall(MatReset_Nest(A)); 445 PetscCall(PetscFree(A->data)); 446 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", NULL)); 447 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", NULL)); 448 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", NULL)); 449 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", NULL)); 450 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", NULL)); 451 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", NULL)); 452 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", NULL)); 453 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", NULL)); 454 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", NULL)); 455 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", NULL)); 456 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", NULL)); 457 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", NULL)); 458 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", NULL)); 459 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", NULL)); 460 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", NULL)); 461 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", NULL)); 462 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", NULL)); 463 PetscFunctionReturn(PETSC_SUCCESS); 464 } 465 466 static PetscErrorCode MatMissingDiagonal_Nest(Mat mat, PetscBool *missing, PetscInt *dd) 467 { 468 Mat_Nest *vs = (Mat_Nest *)mat->data; 469 PetscInt i; 470 471 PetscFunctionBegin; 472 if (dd) *dd = 0; 473 if (!vs->nr) { 474 *missing = PETSC_TRUE; 475 PetscFunctionReturn(PETSC_SUCCESS); 476 } 477 *missing = PETSC_FALSE; 478 for (i = 0; i < vs->nr && !(*missing); i++) { 479 *missing = PETSC_TRUE; 480 if (vs->m[i][i]) { 481 PetscCall(MatMissingDiagonal(vs->m[i][i], missing, NULL)); 482 PetscCheck(!*missing || !dd, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "First missing entry not yet implemented"); 483 } 484 } 485 PetscFunctionReturn(PETSC_SUCCESS); 486 } 487 488 static PetscErrorCode MatAssemblyBegin_Nest(Mat A, MatAssemblyType type) 489 { 490 Mat_Nest *vs = (Mat_Nest *)A->data; 491 PetscInt i, j; 492 PetscBool nnzstate = PETSC_FALSE; 493 494 PetscFunctionBegin; 495 for (i = 0; i < vs->nr; i++) { 496 for (j = 0; j < vs->nc; j++) { 497 PetscObjectState subnnzstate = 0; 498 if (vs->m[i][j]) { 499 PetscCall(MatAssemblyBegin(vs->m[i][j], type)); 500 if (!vs->splitassembly) { 501 /* Note: split assembly will fail if the same block appears more than once (even indirectly through a nested 502 * sub-block). This could be fixed by adding a flag to Mat so that there was a way to check if a Mat was 503 * already performing an assembly, but the result would by more complicated and appears to offer less 504 * potential for diagnostics and correctness checking. Split assembly should be fixed once there is an 505 * interface for libraries to make asynchronous progress in "user-defined non-blocking collectives". 506 */ 507 PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 508 PetscCall(MatGetNonzeroState(vs->m[i][j], &subnnzstate)); 509 } 510 } 511 nnzstate = (PetscBool)(nnzstate || vs->nnzstate[i * vs->nc + j] != subnnzstate); 512 vs->nnzstate[i * vs->nc + j] = subnnzstate; 513 } 514 } 515 if (nnzstate) A->nonzerostate++; 516 PetscFunctionReturn(PETSC_SUCCESS); 517 } 518 519 static PetscErrorCode MatAssemblyEnd_Nest(Mat A, MatAssemblyType type) 520 { 521 Mat_Nest *vs = (Mat_Nest *)A->data; 522 PetscInt i, j; 523 524 PetscFunctionBegin; 525 for (i = 0; i < vs->nr; i++) { 526 for (j = 0; j < vs->nc; j++) { 527 if (vs->m[i][j]) { 528 if (vs->splitassembly) PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 529 } 530 } 531 } 532 PetscFunctionReturn(PETSC_SUCCESS); 533 } 534 535 static PetscErrorCode MatNestFindNonzeroSubMatRow(Mat A, PetscInt row, Mat *B) 536 { 537 Mat_Nest *vs = (Mat_Nest *)A->data; 538 PetscInt j; 539 Mat sub; 540 541 PetscFunctionBegin; 542 sub = (row < vs->nc) ? vs->m[row][row] : (Mat)NULL; /* Prefer to find on the diagonal */ 543 for (j = 0; !sub && j < vs->nc; j++) sub = vs->m[row][j]; 544 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 545 *B = sub; 546 PetscFunctionReturn(PETSC_SUCCESS); 547 } 548 549 static PetscErrorCode MatNestFindNonzeroSubMatCol(Mat A, PetscInt col, Mat *B) 550 { 551 Mat_Nest *vs = (Mat_Nest *)A->data; 552 PetscInt i; 553 Mat sub; 554 555 PetscFunctionBegin; 556 sub = (col < vs->nr) ? vs->m[col][col] : (Mat)NULL; /* Prefer to find on the diagonal */ 557 for (i = 0; !sub && i < vs->nr; i++) sub = vs->m[i][col]; 558 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 559 *B = sub; 560 PetscFunctionReturn(PETSC_SUCCESS); 561 } 562 563 static PetscErrorCode MatNestFindISRange(Mat A, PetscInt n, const IS list[], IS is, PetscInt *begin, PetscInt *end) 564 { 565 PetscInt i, j, size, m; 566 PetscBool flg; 567 IS out, concatenate[2]; 568 569 PetscFunctionBegin; 570 PetscAssertPointer(list, 3); 571 PetscValidHeaderSpecific(is, IS_CLASSID, 4); 572 if (begin) { 573 PetscAssertPointer(begin, 5); 574 *begin = -1; 575 } 576 if (end) { 577 PetscAssertPointer(end, 6); 578 *end = -1; 579 } 580 for (i = 0; i < n; i++) { 581 if (!list[i]) continue; 582 PetscCall(ISEqualUnsorted(list[i], is, &flg)); 583 if (flg) { 584 if (begin) *begin = i; 585 if (end) *end = i + 1; 586 PetscFunctionReturn(PETSC_SUCCESS); 587 } 588 } 589 PetscCall(ISGetSize(is, &size)); 590 for (i = 0; i < n - 1; i++) { 591 if (!list[i]) continue; 592 m = 0; 593 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, list + i, &out)); 594 PetscCall(ISGetSize(out, &m)); 595 for (j = i + 2; j < n && m < size; j++) { 596 if (list[j]) { 597 concatenate[0] = out; 598 concatenate[1] = list[j]; 599 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, concatenate, &out)); 600 PetscCall(ISDestroy(concatenate)); 601 PetscCall(ISGetSize(out, &m)); 602 } 603 } 604 if (m == size) { 605 PetscCall(ISEqualUnsorted(out, is, &flg)); 606 if (flg) { 607 if (begin) *begin = i; 608 if (end) *end = j; 609 PetscCall(ISDestroy(&out)); 610 PetscFunctionReturn(PETSC_SUCCESS); 611 } 612 } 613 PetscCall(ISDestroy(&out)); 614 } 615 PetscFunctionReturn(PETSC_SUCCESS); 616 } 617 618 static PetscErrorCode MatNestFillEmptyMat_Private(Mat A, PetscInt i, PetscInt j, Mat *B) 619 { 620 Mat_Nest *vs = (Mat_Nest *)A->data; 621 PetscInt lr, lc; 622 623 PetscFunctionBegin; 624 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 625 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &lr)); 626 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &lc)); 627 PetscCall(MatSetSizes(*B, lr, lc, PETSC_DECIDE, PETSC_DECIDE)); 628 PetscCall(MatSetType(*B, MATAIJ)); 629 PetscCall(MatSeqAIJSetPreallocation(*B, 0, NULL)); 630 PetscCall(MatMPIAIJSetPreallocation(*B, 0, NULL, 0, NULL)); 631 PetscCall(MatSetUp(*B)); 632 PetscCall(MatSetOption(*B, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 633 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 634 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 635 PetscFunctionReturn(PETSC_SUCCESS); 636 } 637 638 static PetscErrorCode MatNestGetBlock_Private(Mat A, PetscInt rbegin, PetscInt rend, PetscInt cbegin, PetscInt cend, Mat *B) 639 { 640 Mat_Nest *vs = (Mat_Nest *)A->data; 641 Mat *a; 642 PetscInt i, j, k, l, nr = rend - rbegin, nc = cend - cbegin; 643 char keyname[256]; 644 PetscBool *b; 645 PetscBool flg; 646 647 PetscFunctionBegin; 648 *B = NULL; 649 PetscCall(PetscSNPrintf(keyname, sizeof(keyname), "NestBlock_%" PetscInt_FMT "-%" PetscInt_FMT "x%" PetscInt_FMT "-%" PetscInt_FMT, rbegin, rend, cbegin, cend)); 650 PetscCall(PetscObjectQuery((PetscObject)A, keyname, (PetscObject *)B)); 651 if (*B) PetscFunctionReturn(PETSC_SUCCESS); 652 653 PetscCall(PetscMalloc2(nr * nc, &a, nr * nc, &b)); 654 for (i = 0; i < nr; i++) { 655 for (j = 0; j < nc; j++) { 656 a[i * nc + j] = vs->m[rbegin + i][cbegin + j]; 657 b[i * nc + j] = PETSC_FALSE; 658 } 659 } 660 if (nc != vs->nc && nr != vs->nr) { 661 for (i = 0; i < nr; i++) { 662 for (j = 0; j < nc; j++) { 663 flg = PETSC_FALSE; 664 for (k = 0; (k < nr && !flg); k++) { 665 if (a[j + k * nc]) flg = PETSC_TRUE; 666 } 667 if (flg) { 668 flg = PETSC_FALSE; 669 for (l = 0; (l < nc && !flg); l++) { 670 if (a[i * nc + l]) flg = PETSC_TRUE; 671 } 672 } 673 if (!flg) { 674 b[i * nc + j] = PETSC_TRUE; 675 PetscCall(MatNestFillEmptyMat_Private(A, rbegin + i, cbegin + j, a + i * nc + j)); 676 } 677 } 678 } 679 } 680 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, nr != vs->nr ? NULL : vs->isglobal.row, nc, nc != vs->nc ? NULL : vs->isglobal.col, a, B)); 681 for (i = 0; i < nr; i++) { 682 for (j = 0; j < nc; j++) { 683 if (b[i * nc + j]) PetscCall(MatDestroy(a + i * nc + j)); 684 } 685 } 686 PetscCall(PetscFree2(a, b)); 687 (*B)->assembled = A->assembled; 688 PetscCall(PetscObjectCompose((PetscObject)A, keyname, (PetscObject)*B)); 689 PetscCall(PetscObjectDereference((PetscObject)*B)); /* Leave the only remaining reference in the composition */ 690 PetscFunctionReturn(PETSC_SUCCESS); 691 } 692 693 static PetscErrorCode MatNestFindSubMat(Mat A, struct MatNestISPair *is, IS isrow, IS iscol, Mat *B) 694 { 695 Mat_Nest *vs = (Mat_Nest *)A->data; 696 PetscInt rbegin, rend, cbegin, cend; 697 698 PetscFunctionBegin; 699 PetscCall(MatNestFindISRange(A, vs->nr, is->row, isrow, &rbegin, &rend)); 700 PetscCall(MatNestFindISRange(A, vs->nc, is->col, iscol, &cbegin, &cend)); 701 if (rend == rbegin + 1 && cend == cbegin + 1) { 702 if (!vs->m[rbegin][cbegin]) PetscCall(MatNestFillEmptyMat_Private(A, rbegin, cbegin, vs->m[rbegin] + cbegin)); 703 *B = vs->m[rbegin][cbegin]; 704 } else if (rbegin != -1 && cbegin != -1) { 705 PetscCall(MatNestGetBlock_Private(A, rbegin, rend, cbegin, cend, B)); 706 } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Could not find index set"); 707 PetscFunctionReturn(PETSC_SUCCESS); 708 } 709 710 /* 711 TODO: This does not actually returns a submatrix we can modify 712 */ 713 static PetscErrorCode MatCreateSubMatrix_Nest(Mat A, IS isrow, IS iscol, MatReuse reuse, Mat *B) 714 { 715 Mat_Nest *vs = (Mat_Nest *)A->data; 716 Mat sub; 717 718 PetscFunctionBegin; 719 PetscCall(MatNestFindSubMat(A, &vs->isglobal, isrow, iscol, &sub)); 720 switch (reuse) { 721 case MAT_INITIAL_MATRIX: 722 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 723 *B = sub; 724 break; 725 case MAT_REUSE_MATRIX: 726 PetscCheck(sub == *B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Submatrix was not used before in this call"); 727 break; 728 case MAT_IGNORE_MATRIX: /* Nothing to do */ 729 break; 730 case MAT_INPLACE_MATRIX: /* Nothing to do */ 731 SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX is not supported yet"); 732 } 733 PetscFunctionReturn(PETSC_SUCCESS); 734 } 735 736 static PetscErrorCode MatGetLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 737 { 738 Mat_Nest *vs = (Mat_Nest *)A->data; 739 Mat sub; 740 741 PetscFunctionBegin; 742 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 743 /* We allow the submatrix to be NULL, perhaps it would be better for the user to return an empty matrix instead */ 744 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 745 *B = sub; 746 PetscFunctionReturn(PETSC_SUCCESS); 747 } 748 749 static PetscErrorCode MatRestoreLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 750 { 751 Mat_Nest *vs = (Mat_Nest *)A->data; 752 Mat sub; 753 754 PetscFunctionBegin; 755 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 756 PetscCheck(*B == sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has not been gotten"); 757 if (sub) { 758 PetscCheck(((PetscObject)sub)->refct > 1, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has had reference count decremented too many times"); 759 PetscCall(MatDestroy(B)); 760 } 761 PetscFunctionReturn(PETSC_SUCCESS); 762 } 763 764 static PetscErrorCode MatGetDiagonal_Nest(Mat A, Vec v) 765 { 766 Mat_Nest *bA = (Mat_Nest *)A->data; 767 PetscInt i; 768 769 PetscFunctionBegin; 770 for (i = 0; i < bA->nr; i++) { 771 Vec bv; 772 PetscCall(VecGetSubVector(v, bA->isglobal.row[i], &bv)); 773 if (bA->m[i][i]) { 774 PetscCall(MatGetDiagonal(bA->m[i][i], bv)); 775 } else { 776 PetscCall(VecSet(bv, 0.0)); 777 } 778 PetscCall(VecRestoreSubVector(v, bA->isglobal.row[i], &bv)); 779 } 780 PetscFunctionReturn(PETSC_SUCCESS); 781 } 782 783 static PetscErrorCode MatDiagonalScale_Nest(Mat A, Vec l, Vec r) 784 { 785 Mat_Nest *bA = (Mat_Nest *)A->data; 786 Vec bl, *br; 787 PetscInt i, j; 788 789 PetscFunctionBegin; 790 PetscCall(PetscCalloc1(bA->nc, &br)); 791 if (r) { 792 for (j = 0; j < bA->nc; j++) PetscCall(VecGetSubVector(r, bA->isglobal.col[j], &br[j])); 793 } 794 bl = NULL; 795 for (i = 0; i < bA->nr; i++) { 796 if (l) PetscCall(VecGetSubVector(l, bA->isglobal.row[i], &bl)); 797 for (j = 0; j < bA->nc; j++) { 798 if (bA->m[i][j]) PetscCall(MatDiagonalScale(bA->m[i][j], bl, br[j])); 799 } 800 if (l) PetscCall(VecRestoreSubVector(l, bA->isglobal.row[i], &bl)); 801 } 802 if (r) { 803 for (j = 0; j < bA->nc; j++) PetscCall(VecRestoreSubVector(r, bA->isglobal.col[j], &br[j])); 804 } 805 PetscCall(PetscFree(br)); 806 PetscFunctionReturn(PETSC_SUCCESS); 807 } 808 809 static PetscErrorCode MatScale_Nest(Mat A, PetscScalar a) 810 { 811 Mat_Nest *bA = (Mat_Nest *)A->data; 812 PetscInt i, j; 813 814 PetscFunctionBegin; 815 for (i = 0; i < bA->nr; i++) { 816 for (j = 0; j < bA->nc; j++) { 817 if (bA->m[i][j]) PetscCall(MatScale(bA->m[i][j], a)); 818 } 819 } 820 PetscFunctionReturn(PETSC_SUCCESS); 821 } 822 823 static PetscErrorCode MatShift_Nest(Mat A, PetscScalar a) 824 { 825 Mat_Nest *bA = (Mat_Nest *)A->data; 826 PetscInt i; 827 PetscBool nnzstate = PETSC_FALSE; 828 829 PetscFunctionBegin; 830 for (i = 0; i < bA->nr; i++) { 831 PetscObjectState subnnzstate = 0; 832 PetscCheck(bA->m[i][i], PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for shifting an empty diagonal block, insert a matrix in block (%" PetscInt_FMT ",%" PetscInt_FMT ")", i, i); 833 PetscCall(MatShift(bA->m[i][i], a)); 834 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 835 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 836 bA->nnzstate[i * bA->nc + i] = subnnzstate; 837 } 838 if (nnzstate) A->nonzerostate++; 839 PetscFunctionReturn(PETSC_SUCCESS); 840 } 841 842 static PetscErrorCode MatDiagonalSet_Nest(Mat A, Vec D, InsertMode is) 843 { 844 Mat_Nest *bA = (Mat_Nest *)A->data; 845 PetscInt i; 846 PetscBool nnzstate = PETSC_FALSE; 847 848 PetscFunctionBegin; 849 for (i = 0; i < bA->nr; i++) { 850 PetscObjectState subnnzstate = 0; 851 Vec bv; 852 PetscCall(VecGetSubVector(D, bA->isglobal.row[i], &bv)); 853 if (bA->m[i][i]) { 854 PetscCall(MatDiagonalSet(bA->m[i][i], bv, is)); 855 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 856 } 857 PetscCall(VecRestoreSubVector(D, bA->isglobal.row[i], &bv)); 858 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 859 bA->nnzstate[i * bA->nc + i] = subnnzstate; 860 } 861 if (nnzstate) A->nonzerostate++; 862 PetscFunctionReturn(PETSC_SUCCESS); 863 } 864 865 static PetscErrorCode MatSetRandom_Nest(Mat A, PetscRandom rctx) 866 { 867 Mat_Nest *bA = (Mat_Nest *)A->data; 868 PetscInt i, j; 869 870 PetscFunctionBegin; 871 for (i = 0; i < bA->nr; i++) { 872 for (j = 0; j < bA->nc; j++) { 873 if (bA->m[i][j]) PetscCall(MatSetRandom(bA->m[i][j], rctx)); 874 } 875 } 876 PetscFunctionReturn(PETSC_SUCCESS); 877 } 878 879 static PetscErrorCode MatCreateVecs_Nest(Mat A, Vec *right, Vec *left) 880 { 881 Mat_Nest *bA = (Mat_Nest *)A->data; 882 Vec *L, *R; 883 MPI_Comm comm; 884 PetscInt i, j; 885 886 PetscFunctionBegin; 887 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 888 if (right) { 889 /* allocate R */ 890 PetscCall(PetscMalloc1(bA->nc, &R)); 891 /* Create the right vectors */ 892 for (j = 0; j < bA->nc; j++) { 893 for (i = 0; i < bA->nr; i++) { 894 if (bA->m[i][j]) { 895 PetscCall(MatCreateVecs(bA->m[i][j], &R[j], NULL)); 896 break; 897 } 898 } 899 PetscCheck(i != bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null column."); 900 } 901 PetscCall(VecCreateNest(comm, bA->nc, bA->isglobal.col, R, right)); 902 /* hand back control to the nest vector */ 903 for (j = 0; j < bA->nc; j++) PetscCall(VecDestroy(&R[j])); 904 PetscCall(PetscFree(R)); 905 } 906 907 if (left) { 908 /* allocate L */ 909 PetscCall(PetscMalloc1(bA->nr, &L)); 910 /* Create the left vectors */ 911 for (i = 0; i < bA->nr; i++) { 912 for (j = 0; j < bA->nc; j++) { 913 if (bA->m[i][j]) { 914 PetscCall(MatCreateVecs(bA->m[i][j], NULL, &L[i])); 915 break; 916 } 917 } 918 PetscCheck(j != bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null row."); 919 } 920 921 PetscCall(VecCreateNest(comm, bA->nr, bA->isglobal.row, L, left)); 922 for (i = 0; i < bA->nr; i++) PetscCall(VecDestroy(&L[i])); 923 924 PetscCall(PetscFree(L)); 925 } 926 PetscFunctionReturn(PETSC_SUCCESS); 927 } 928 929 static PetscErrorCode MatView_Nest(Mat A, PetscViewer viewer) 930 { 931 Mat_Nest *bA = (Mat_Nest *)A->data; 932 PetscBool isascii, viewSub = PETSC_FALSE; 933 PetscInt i, j; 934 935 PetscFunctionBegin; 936 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 937 if (isascii) { 938 PetscCall(PetscOptionsGetBool(((PetscObject)A)->options, ((PetscObject)A)->prefix, "-mat_view_nest_sub", &viewSub, NULL)); 939 PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix object:\n")); 940 PetscCall(PetscViewerASCIIPushTab(viewer)); 941 PetscCall(PetscViewerASCIIPrintf(viewer, "type=nest, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", bA->nr, bA->nc)); 942 943 PetscCall(PetscViewerASCIIPrintf(viewer, "MatNest structure:\n")); 944 for (i = 0; i < bA->nr; i++) { 945 for (j = 0; j < bA->nc; j++) { 946 MatType type; 947 char name[256] = "", prefix[256] = ""; 948 PetscInt NR, NC; 949 PetscBool isNest = PETSC_FALSE; 950 951 if (!bA->m[i][j]) { 952 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : NULL\n", i, j)); 953 continue; 954 } 955 PetscCall(MatGetSize(bA->m[i][j], &NR, &NC)); 956 PetscCall(MatGetType(bA->m[i][j], &type)); 957 if (((PetscObject)bA->m[i][j])->name) PetscCall(PetscSNPrintf(name, sizeof(name), "name=\"%s\", ", ((PetscObject)bA->m[i][j])->name)); 958 if (((PetscObject)bA->m[i][j])->prefix) PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "prefix=\"%s\", ", ((PetscObject)bA->m[i][j])->prefix)); 959 PetscCall(PetscObjectTypeCompare((PetscObject)bA->m[i][j], MATNEST, &isNest)); 960 961 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : %s%stype=%s, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", i, j, name, prefix, type, NR, NC)); 962 963 if (isNest || viewSub) { 964 PetscCall(PetscViewerASCIIPushTab(viewer)); /* push1 */ 965 PetscCall(MatView(bA->m[i][j], viewer)); 966 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop1 */ 967 } 968 } 969 } 970 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop0 */ 971 } 972 PetscFunctionReturn(PETSC_SUCCESS); 973 } 974 975 static PetscErrorCode MatZeroEntries_Nest(Mat A) 976 { 977 Mat_Nest *bA = (Mat_Nest *)A->data; 978 PetscInt i, j; 979 980 PetscFunctionBegin; 981 for (i = 0; i < bA->nr; i++) { 982 for (j = 0; j < bA->nc; j++) { 983 if (!bA->m[i][j]) continue; 984 PetscCall(MatZeroEntries(bA->m[i][j])); 985 } 986 } 987 PetscFunctionReturn(PETSC_SUCCESS); 988 } 989 990 static PetscErrorCode MatCopy_Nest(Mat A, Mat B, MatStructure str) 991 { 992 Mat_Nest *bA = (Mat_Nest *)A->data, *bB = (Mat_Nest *)B->data; 993 PetscInt i, j, nr = bA->nr, nc = bA->nc; 994 PetscBool nnzstate = PETSC_FALSE; 995 996 PetscFunctionBegin; 997 PetscCheck(nr == bB->nr && nc == bB->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Cannot copy a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") to a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bB->nr, bB->nc, nr, nc); 998 for (i = 0; i < nr; i++) { 999 for (j = 0; j < nc; j++) { 1000 PetscObjectState subnnzstate = 0; 1001 if (bA->m[i][j] && bB->m[i][j]) { 1002 PetscCall(MatCopy(bA->m[i][j], bB->m[i][j], str)); 1003 } else PetscCheck(!bA->m[i][j] && !bB->m[i][j], PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT, i, j); 1004 PetscCall(MatGetNonzeroState(bB->m[i][j], &subnnzstate)); 1005 nnzstate = (PetscBool)(nnzstate || bB->nnzstate[i * nc + j] != subnnzstate); 1006 bB->nnzstate[i * nc + j] = subnnzstate; 1007 } 1008 } 1009 if (nnzstate) B->nonzerostate++; 1010 PetscFunctionReturn(PETSC_SUCCESS); 1011 } 1012 1013 static PetscErrorCode MatAXPY_Nest(Mat Y, PetscScalar a, Mat X, MatStructure str) 1014 { 1015 Mat_Nest *bY = (Mat_Nest *)Y->data, *bX = (Mat_Nest *)X->data; 1016 PetscInt i, j, nr = bY->nr, nc = bY->nc; 1017 PetscBool nnzstate = PETSC_FALSE; 1018 1019 PetscFunctionBegin; 1020 PetscCheck(nr == bX->nr && nc == bX->nc, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Cannot AXPY a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") with a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bX->nr, bX->nc, nr, nc); 1021 for (i = 0; i < nr; i++) { 1022 for (j = 0; j < nc; j++) { 1023 PetscObjectState subnnzstate = 0; 1024 if (bY->m[i][j] && bX->m[i][j]) { 1025 PetscCall(MatAXPY(bY->m[i][j], a, bX->m[i][j], str)); 1026 } else if (bX->m[i][j]) { 1027 Mat M; 1028 1029 PetscCheck(str == DIFFERENT_NONZERO_PATTERN || str == UNKNOWN_NONZERO_PATTERN, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT ". Use DIFFERENT_NONZERO_PATTERN or UNKNOWN_NONZERO_PATTERN", i, j); 1030 PetscCall(MatDuplicate(bX->m[i][j], MAT_COPY_VALUES, &M)); 1031 PetscCall(MatNestSetSubMat(Y, i, j, M)); 1032 PetscCall(MatDestroy(&M)); 1033 } 1034 if (bY->m[i][j]) PetscCall(MatGetNonzeroState(bY->m[i][j], &subnnzstate)); 1035 nnzstate = (PetscBool)(nnzstate || bY->nnzstate[i * nc + j] != subnnzstate); 1036 bY->nnzstate[i * nc + j] = subnnzstate; 1037 } 1038 } 1039 if (nnzstate) Y->nonzerostate++; 1040 PetscFunctionReturn(PETSC_SUCCESS); 1041 } 1042 1043 static PetscErrorCode MatDuplicate_Nest(Mat A, MatDuplicateOption op, Mat *B) 1044 { 1045 Mat_Nest *bA = (Mat_Nest *)A->data; 1046 Mat *b; 1047 PetscInt i, j, nr = bA->nr, nc = bA->nc; 1048 1049 PetscFunctionBegin; 1050 PetscCall(PetscMalloc1(nr * nc, &b)); 1051 for (i = 0; i < nr; i++) { 1052 for (j = 0; j < nc; j++) { 1053 if (bA->m[i][j]) { 1054 PetscCall(MatDuplicate(bA->m[i][j], op, &b[i * nc + j])); 1055 } else { 1056 b[i * nc + j] = NULL; 1057 } 1058 } 1059 } 1060 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, bA->isglobal.row, nc, bA->isglobal.col, b, B)); 1061 /* Give the new MatNest exclusive ownership */ 1062 for (i = 0; i < nr * nc; i++) PetscCall(MatDestroy(&b[i])); 1063 PetscCall(PetscFree(b)); 1064 1065 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 1066 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 1067 PetscFunctionReturn(PETSC_SUCCESS); 1068 } 1069 1070 /* nest api */ 1071 static PetscErrorCode MatNestGetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat *mat) 1072 { 1073 Mat_Nest *bA = (Mat_Nest *)A->data; 1074 1075 PetscFunctionBegin; 1076 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1077 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1078 *mat = bA->m[idxm][jdxm]; 1079 PetscFunctionReturn(PETSC_SUCCESS); 1080 } 1081 1082 /*@ 1083 MatNestGetSubMat - Returns a single, sub-matrix from a `MATNEST` 1084 1085 Not Collective 1086 1087 Input Parameters: 1088 + A - `MATNEST` matrix 1089 . idxm - index of the matrix within the nest matrix 1090 - jdxm - index of the matrix within the nest matrix 1091 1092 Output Parameter: 1093 . sub - matrix at index `idxm`, `jdxm` within the nest matrix 1094 1095 Level: developer 1096 1097 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestSetSubMat()`, 1098 `MatNestGetLocalISs()`, `MatNestGetISs()` 1099 @*/ 1100 PetscErrorCode MatNestGetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat *sub) 1101 { 1102 PetscFunctionBegin; 1103 PetscUseMethod(A, "MatNestGetSubMat_C", (Mat, PetscInt, PetscInt, Mat *), (A, idxm, jdxm, sub)); 1104 PetscFunctionReturn(PETSC_SUCCESS); 1105 } 1106 1107 static PetscErrorCode MatNestSetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat mat) 1108 { 1109 Mat_Nest *bA = (Mat_Nest *)A->data; 1110 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1111 1112 PetscFunctionBegin; 1113 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1114 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1115 PetscCall(MatGetLocalSize(mat, &m, &n)); 1116 PetscCall(MatGetSize(mat, &M, &N)); 1117 PetscCall(ISGetLocalSize(bA->isglobal.row[idxm], &mi)); 1118 PetscCall(ISGetSize(bA->isglobal.row[idxm], &Mi)); 1119 PetscCall(ISGetLocalSize(bA->isglobal.col[jdxm], &ni)); 1120 PetscCall(ISGetSize(bA->isglobal.col[jdxm], &Ni)); 1121 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, Mi, Ni); 1122 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix local dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, mi, ni); 1123 1124 /* do not increase object state */ 1125 if (mat == bA->m[idxm][jdxm]) PetscFunctionReturn(PETSC_SUCCESS); 1126 1127 PetscCall(PetscObjectReference((PetscObject)mat)); 1128 PetscCall(MatDestroy(&bA->m[idxm][jdxm])); 1129 bA->m[idxm][jdxm] = mat; 1130 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1131 PetscCall(MatGetNonzeroState(mat, &bA->nnzstate[idxm * bA->nc + jdxm])); 1132 A->nonzerostate++; 1133 PetscFunctionReturn(PETSC_SUCCESS); 1134 } 1135 1136 /*@ 1137 MatNestSetSubMat - Set a single submatrix in the `MATNEST` 1138 1139 Logically Collective 1140 1141 Input Parameters: 1142 + A - `MATNEST` matrix 1143 . idxm - index of the matrix within the nest matrix 1144 . jdxm - index of the matrix within the nest matrix 1145 - sub - matrix at index `idxm`, `jdxm` within the nest matrix 1146 1147 Level: developer 1148 1149 Notes: 1150 The new submatrix must have the same size and communicator as that block of the nest. 1151 1152 This increments the reference count of the submatrix. 1153 1154 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestSetSubMats()`, `MatNestGetSubMats()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1155 `MatNestGetSubMat()`, `MatNestGetISs()`, `MatNestGetSize()` 1156 @*/ 1157 PetscErrorCode MatNestSetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat sub) 1158 { 1159 PetscFunctionBegin; 1160 PetscUseMethod(A, "MatNestSetSubMat_C", (Mat, PetscInt, PetscInt, Mat), (A, idxm, jdxm, sub)); 1161 PetscFunctionReturn(PETSC_SUCCESS); 1162 } 1163 1164 static PetscErrorCode MatNestGetSubMats_Nest(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1165 { 1166 Mat_Nest *bA = (Mat_Nest *)A->data; 1167 1168 PetscFunctionBegin; 1169 if (M) *M = bA->nr; 1170 if (N) *N = bA->nc; 1171 if (mat) *mat = bA->m; 1172 PetscFunctionReturn(PETSC_SUCCESS); 1173 } 1174 1175 /*@C 1176 MatNestGetSubMats - Returns the entire two dimensional array of matrices defining a `MATNEST` matrix. 1177 1178 Not Collective 1179 1180 Input Parameter: 1181 . A - nest matrix 1182 1183 Output Parameters: 1184 + M - number of rows in the nest matrix 1185 . N - number of cols in the nest matrix 1186 - mat - array of matrices 1187 1188 Level: developer 1189 1190 Note: 1191 The user should not free the array `mat`. 1192 1193 Fortran Notes: 1194 This routine has a calling sequence 1195 $ call MatNestGetSubMats(A, M, N, mat, ierr) 1196 where the space allocated for the optional argument `mat` is assumed large enough (if provided). 1197 Matrices in `mat` are returned in row-major order, see `MatCreateNest()` for an example. 1198 1199 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1200 `MatNestSetSubMats()`, `MatNestGetISs()`, `MatNestSetSubMat()` 1201 @*/ 1202 PetscErrorCode MatNestGetSubMats(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1203 { 1204 PetscFunctionBegin; 1205 PetscUseMethod(A, "MatNestGetSubMats_C", (Mat, PetscInt *, PetscInt *, Mat ***), (A, M, N, mat)); 1206 PetscFunctionReturn(PETSC_SUCCESS); 1207 } 1208 1209 static PetscErrorCode MatNestGetSize_Nest(Mat A, PetscInt *M, PetscInt *N) 1210 { 1211 Mat_Nest *bA = (Mat_Nest *)A->data; 1212 1213 PetscFunctionBegin; 1214 if (M) *M = bA->nr; 1215 if (N) *N = bA->nc; 1216 PetscFunctionReturn(PETSC_SUCCESS); 1217 } 1218 1219 /*@ 1220 MatNestGetSize - Returns the size of the `MATNEST` matrix. 1221 1222 Not Collective 1223 1224 Input Parameter: 1225 . A - `MATNEST` matrix 1226 1227 Output Parameters: 1228 + M - number of rows in the nested mat 1229 - N - number of cols in the nested mat 1230 1231 Level: developer 1232 1233 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestGetLocalISs()`, 1234 `MatNestGetISs()` 1235 @*/ 1236 PetscErrorCode MatNestGetSize(Mat A, PetscInt *M, PetscInt *N) 1237 { 1238 PetscFunctionBegin; 1239 PetscUseMethod(A, "MatNestGetSize_C", (Mat, PetscInt *, PetscInt *), (A, M, N)); 1240 PetscFunctionReturn(PETSC_SUCCESS); 1241 } 1242 1243 static PetscErrorCode MatNestGetISs_Nest(Mat A, IS rows[], IS cols[]) 1244 { 1245 Mat_Nest *vs = (Mat_Nest *)A->data; 1246 PetscInt i; 1247 1248 PetscFunctionBegin; 1249 if (rows) 1250 for (i = 0; i < vs->nr; i++) rows[i] = vs->isglobal.row[i]; 1251 if (cols) 1252 for (i = 0; i < vs->nc; i++) cols[i] = vs->isglobal.col[i]; 1253 PetscFunctionReturn(PETSC_SUCCESS); 1254 } 1255 1256 /*@C 1257 MatNestGetISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1258 1259 Not Collective 1260 1261 Input Parameter: 1262 . A - `MATNEST` matrix 1263 1264 Output Parameters: 1265 + rows - array of row index sets 1266 - cols - array of column index sets 1267 1268 Level: advanced 1269 1270 Note: 1271 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1272 1273 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetLocalISs()`, 1274 `MatCreateNest()`, `MatNestSetSubMats()` 1275 @*/ 1276 PetscErrorCode MatNestGetISs(Mat A, IS rows[], IS cols[]) 1277 { 1278 PetscFunctionBegin; 1279 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1280 PetscUseMethod(A, "MatNestGetISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1281 PetscFunctionReturn(PETSC_SUCCESS); 1282 } 1283 1284 static PetscErrorCode MatNestGetLocalISs_Nest(Mat A, IS rows[], IS cols[]) 1285 { 1286 Mat_Nest *vs = (Mat_Nest *)A->data; 1287 PetscInt i; 1288 1289 PetscFunctionBegin; 1290 if (rows) 1291 for (i = 0; i < vs->nr; i++) rows[i] = vs->islocal.row[i]; 1292 if (cols) 1293 for (i = 0; i < vs->nc; i++) cols[i] = vs->islocal.col[i]; 1294 PetscFunctionReturn(PETSC_SUCCESS); 1295 } 1296 1297 /*@C 1298 MatNestGetLocalISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1299 1300 Not Collective 1301 1302 Input Parameter: 1303 . A - `MATNEST` matrix 1304 1305 Output Parameters: 1306 + rows - array of row index sets (or `NULL` to ignore) 1307 - cols - array of column index sets (or `NULL` to ignore) 1308 1309 Level: advanced 1310 1311 Note: 1312 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1313 1314 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetISs()`, `MatCreateNest()`, 1315 `MatNestSetSubMats()`, `MatNestSetSubMat()` 1316 @*/ 1317 PetscErrorCode MatNestGetLocalISs(Mat A, IS rows[], IS cols[]) 1318 { 1319 PetscFunctionBegin; 1320 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1321 PetscUseMethod(A, "MatNestGetLocalISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1322 PetscFunctionReturn(PETSC_SUCCESS); 1323 } 1324 1325 static PetscErrorCode MatNestSetVecType_Nest(Mat A, VecType vtype) 1326 { 1327 PetscBool flg; 1328 1329 PetscFunctionBegin; 1330 PetscCall(PetscStrcmp(vtype, VECNEST, &flg)); 1331 /* In reality, this only distinguishes VECNEST and "other" */ 1332 if (flg) A->ops->getvecs = MatCreateVecs_Nest; 1333 else A->ops->getvecs = (PetscErrorCode(*)(Mat, Vec *, Vec *))0; 1334 PetscFunctionReturn(PETSC_SUCCESS); 1335 } 1336 1337 /*@C 1338 MatNestSetVecType - Sets the type of `Vec` returned by `MatCreateVecs()` 1339 1340 Not Collective 1341 1342 Input Parameters: 1343 + A - `MATNEST` matrix 1344 - vtype - `VecType` to use for creating vectors 1345 1346 Level: developer 1347 1348 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateVecs()`, `MatCreateNest()`, `VecType` 1349 @*/ 1350 PetscErrorCode MatNestSetVecType(Mat A, VecType vtype) 1351 { 1352 PetscFunctionBegin; 1353 PetscTryMethod(A, "MatNestSetVecType_C", (Mat, VecType), (A, vtype)); 1354 PetscFunctionReturn(PETSC_SUCCESS); 1355 } 1356 1357 static PetscErrorCode MatNestSetSubMats_Nest(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1358 { 1359 Mat_Nest *s = (Mat_Nest *)A->data; 1360 PetscInt i, j, m, n, M, N; 1361 PetscBool cong, isstd, sametype = PETSC_FALSE; 1362 VecType vtype, type; 1363 1364 PetscFunctionBegin; 1365 PetscCall(MatReset_Nest(A)); 1366 1367 s->nr = nr; 1368 s->nc = nc; 1369 1370 /* Create space for submatrices */ 1371 PetscCall(PetscMalloc1(nr, &s->m)); 1372 for (i = 0; i < nr; i++) PetscCall(PetscMalloc1(nc, &s->m[i])); 1373 for (i = 0; i < nr; i++) { 1374 for (j = 0; j < nc; j++) { 1375 s->m[i][j] = a[i * nc + j]; 1376 if (a[i * nc + j]) PetscCall(PetscObjectReference((PetscObject)a[i * nc + j])); 1377 } 1378 } 1379 PetscCall(MatGetVecType(A, &vtype)); 1380 PetscCall(PetscStrcmp(vtype, VECSTANDARD, &isstd)); 1381 if (isstd) { 1382 /* check if all blocks have the same vectype */ 1383 vtype = NULL; 1384 for (i = 0; i < nr; i++) { 1385 for (j = 0; j < nc; j++) { 1386 if (a[i * nc + j]) { 1387 if (!vtype) { /* first visited block */ 1388 PetscCall(MatGetVecType(a[i * nc + j], &vtype)); 1389 sametype = PETSC_TRUE; 1390 } else if (sametype) { 1391 PetscCall(MatGetVecType(a[i * nc + j], &type)); 1392 PetscCall(PetscStrcmp(vtype, type, &sametype)); 1393 } 1394 } 1395 } 1396 } 1397 if (sametype) { /* propagate vectype */ 1398 PetscCall(MatSetVecType(A, vtype)); 1399 } 1400 } 1401 1402 PetscCall(MatSetUp_NestIS_Private(A, nr, is_row, nc, is_col)); 1403 1404 PetscCall(PetscMalloc1(nr, &s->row_len)); 1405 PetscCall(PetscMalloc1(nc, &s->col_len)); 1406 for (i = 0; i < nr; i++) s->row_len[i] = -1; 1407 for (j = 0; j < nc; j++) s->col_len[j] = -1; 1408 1409 PetscCall(PetscCalloc1(nr * nc, &s->nnzstate)); 1410 for (i = 0; i < nr; i++) { 1411 for (j = 0; j < nc; j++) { 1412 if (s->m[i][j]) PetscCall(MatGetNonzeroState(s->m[i][j], &s->nnzstate[i * nc + j])); 1413 } 1414 } 1415 1416 PetscCall(MatNestGetSizes_Private(A, &m, &n, &M, &N)); 1417 1418 PetscCall(PetscLayoutSetSize(A->rmap, M)); 1419 PetscCall(PetscLayoutSetLocalSize(A->rmap, m)); 1420 PetscCall(PetscLayoutSetSize(A->cmap, N)); 1421 PetscCall(PetscLayoutSetLocalSize(A->cmap, n)); 1422 1423 PetscCall(PetscLayoutSetUp(A->rmap)); 1424 PetscCall(PetscLayoutSetUp(A->cmap)); 1425 1426 /* disable operations that are not supported for non-square matrices, 1427 or matrices for which is_row != is_col */ 1428 PetscCall(MatHasCongruentLayouts(A, &cong)); 1429 if (cong && nr != nc) cong = PETSC_FALSE; 1430 if (cong) { 1431 for (i = 0; cong && i < nr; i++) PetscCall(ISEqualUnsorted(s->isglobal.row[i], s->isglobal.col[i], &cong)); 1432 } 1433 if (!cong) { 1434 A->ops->missingdiagonal = NULL; 1435 A->ops->getdiagonal = NULL; 1436 A->ops->shift = NULL; 1437 A->ops->diagonalset = NULL; 1438 } 1439 1440 PetscCall(PetscCalloc2(nr, &s->left, nc, &s->right)); 1441 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1442 A->nonzerostate++; 1443 PetscFunctionReturn(PETSC_SUCCESS); 1444 } 1445 1446 /*@ 1447 MatNestSetSubMats - Sets the nested submatrices in a `MATNEST` 1448 1449 Collective 1450 1451 Input Parameters: 1452 + A - `MATNEST` matrix 1453 . nr - number of nested row blocks 1454 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1455 . nc - number of nested column blocks 1456 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1457 - a - array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1458 1459 Level: advanced 1460 1461 Notes: 1462 This always resets any submatrix information previously set 1463 1464 In both C and Fortran, `a` must be a row-major order array containing the matrices. See 1465 `MatCreateNest()` for an example. 1466 1467 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, `MatNestGetSubMats()` 1468 @*/ 1469 PetscErrorCode MatNestSetSubMats(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1470 { 1471 PetscInt i; 1472 1473 PetscFunctionBegin; 1474 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1475 PetscCheck(nr >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of rows cannot be negative"); 1476 if (nr && is_row) { 1477 PetscAssertPointer(is_row, 3); 1478 for (i = 0; i < nr; i++) PetscValidHeaderSpecific(is_row[i], IS_CLASSID, 3); 1479 } 1480 PetscCheck(nc >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of columns cannot be negative"); 1481 if (nc && is_col) { 1482 PetscAssertPointer(is_col, 5); 1483 for (i = 0; i < nc; i++) PetscValidHeaderSpecific(is_col[i], IS_CLASSID, 5); 1484 } 1485 if (nr * nc > 0) PetscAssertPointer(a, 6); 1486 PetscUseMethod(A, "MatNestSetSubMats_C", (Mat, PetscInt, const IS[], PetscInt, const IS[], const Mat[]), (A, nr, is_row, nc, is_col, a)); 1487 PetscFunctionReturn(PETSC_SUCCESS); 1488 } 1489 1490 static PetscErrorCode MatNestCreateAggregateL2G_Private(Mat A, PetscInt n, const IS islocal[], const IS isglobal[], PetscBool colflg, ISLocalToGlobalMapping *ltog) 1491 { 1492 PetscBool flg; 1493 PetscInt i, j, m, mi, *ix; 1494 1495 PetscFunctionBegin; 1496 *ltog = NULL; 1497 for (i = 0, m = 0, flg = PETSC_FALSE; i < n; i++) { 1498 if (islocal[i]) { 1499 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1500 flg = PETSC_TRUE; /* We found a non-trivial entry */ 1501 } else { 1502 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1503 } 1504 m += mi; 1505 } 1506 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 1507 1508 PetscCall(PetscMalloc1(m, &ix)); 1509 for (i = 0, m = 0; i < n; i++) { 1510 ISLocalToGlobalMapping smap = NULL; 1511 Mat sub = NULL; 1512 PetscSF sf; 1513 PetscLayout map; 1514 const PetscInt *ix2; 1515 1516 if (!colflg) { 1517 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1518 } else { 1519 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1520 } 1521 if (sub) { 1522 if (!colflg) { 1523 PetscCall(MatGetLocalToGlobalMapping(sub, &smap, NULL)); 1524 } else { 1525 PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &smap)); 1526 } 1527 } 1528 /* 1529 Now we need to extract the monolithic global indices that correspond to the given split global indices. 1530 In many/most cases, we only want MatGetLocalSubMatrix() to work, in which case we only need to know the size of the local spaces. 1531 */ 1532 PetscCall(ISGetIndices(isglobal[i], &ix2)); 1533 if (islocal[i]) { 1534 PetscInt *ilocal, *iremote; 1535 PetscInt mil, nleaves; 1536 1537 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1538 PetscCheck(smap, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing local to global map"); 1539 for (j = 0; j < mi; j++) ix[m + j] = j; 1540 PetscCall(ISLocalToGlobalMappingApply(smap, mi, ix + m, ix + m)); 1541 1542 /* PetscSFSetGraphLayout does not like negative indices */ 1543 PetscCall(PetscMalloc2(mi, &ilocal, mi, &iremote)); 1544 for (j = 0, nleaves = 0; j < mi; j++) { 1545 if (ix[m + j] < 0) continue; 1546 ilocal[nleaves] = j; 1547 iremote[nleaves] = ix[m + j]; 1548 nleaves++; 1549 } 1550 PetscCall(ISGetLocalSize(isglobal[i], &mil)); 1551 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &sf)); 1552 PetscCall(PetscLayoutCreate(PetscObjectComm((PetscObject)A), &map)); 1553 PetscCall(PetscLayoutSetLocalSize(map, mil)); 1554 PetscCall(PetscLayoutSetUp(map)); 1555 PetscCall(PetscSFSetGraphLayout(sf, map, nleaves, ilocal, PETSC_USE_POINTER, iremote)); 1556 PetscCall(PetscLayoutDestroy(&map)); 1557 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1558 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1559 PetscCall(PetscSFDestroy(&sf)); 1560 PetscCall(PetscFree2(ilocal, iremote)); 1561 } else { 1562 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1563 for (j = 0; j < mi; j++) ix[m + j] = ix2[i]; 1564 } 1565 PetscCall(ISRestoreIndices(isglobal[i], &ix2)); 1566 m += mi; 1567 } 1568 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, m, ix, PETSC_OWN_POINTER, ltog)); 1569 PetscFunctionReturn(PETSC_SUCCESS); 1570 } 1571 1572 /* If an IS was provided, there is nothing Nest needs to do, otherwise Nest will build a strided IS */ 1573 /* 1574 nprocessors = NP 1575 Nest x^T = ((g_0,g_1,...g_nprocs-1), (h_0,h_1,...h_NP-1)) 1576 proc 0: => (g_0,h_0,) 1577 proc 1: => (g_1,h_1,) 1578 ... 1579 proc nprocs-1: => (g_NP-1,h_NP-1,) 1580 1581 proc 0: proc 1: proc nprocs-1: 1582 is[0] = (0,1,2,...,nlocal(g_0)-1) (0,1,...,nlocal(g_1)-1) (0,1,...,nlocal(g_NP-1)) 1583 1584 proc 0: 1585 is[1] = (nlocal(g_0),nlocal(g_0)+1,...,nlocal(g_0)+nlocal(h_0)-1) 1586 proc 1: 1587 is[1] = (nlocal(g_1),nlocal(g_1)+1,...,nlocal(g_1)+nlocal(h_1)-1) 1588 1589 proc NP-1: 1590 is[1] = (nlocal(g_NP-1),nlocal(g_NP-1)+1,...,nlocal(g_NP-1)+nlocal(h_NP-1)-1) 1591 */ 1592 static PetscErrorCode MatSetUp_NestIS_Private(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[]) 1593 { 1594 Mat_Nest *vs = (Mat_Nest *)A->data; 1595 PetscInt i, j, offset, n, nsum, bs; 1596 Mat sub = NULL; 1597 1598 PetscFunctionBegin; 1599 PetscCall(PetscMalloc1(nr, &vs->isglobal.row)); 1600 PetscCall(PetscMalloc1(nc, &vs->isglobal.col)); 1601 if (is_row) { /* valid IS is passed in */ 1602 /* refs on is[] are incremented */ 1603 for (i = 0; i < vs->nr; i++) { 1604 PetscCall(PetscObjectReference((PetscObject)is_row[i])); 1605 1606 vs->isglobal.row[i] = is_row[i]; 1607 } 1608 } else { /* Create the ISs by inspecting sizes of a submatrix in each row */ 1609 nsum = 0; 1610 for (i = 0; i < vs->nr; i++) { /* Add up the local sizes to compute the aggregate offset */ 1611 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1612 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in row %" PetscInt_FMT, i); 1613 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1614 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1615 nsum += n; 1616 } 1617 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1618 offset -= nsum; 1619 for (i = 0; i < vs->nr; i++) { 1620 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1621 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1622 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1623 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.row[i])); 1624 PetscCall(ISSetBlockSize(vs->isglobal.row[i], bs)); 1625 offset += n; 1626 } 1627 } 1628 1629 if (is_col) { /* valid IS is passed in */ 1630 /* refs on is[] are incremented */ 1631 for (j = 0; j < vs->nc; j++) { 1632 PetscCall(PetscObjectReference((PetscObject)is_col[j])); 1633 1634 vs->isglobal.col[j] = is_col[j]; 1635 } 1636 } else { /* Create the ISs by inspecting sizes of a submatrix in each column */ 1637 offset = A->cmap->rstart; 1638 nsum = 0; 1639 for (j = 0; j < vs->nc; j++) { 1640 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1641 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in column %" PetscInt_FMT, i); 1642 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1643 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1644 nsum += n; 1645 } 1646 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1647 offset -= nsum; 1648 for (j = 0; j < vs->nc; j++) { 1649 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1650 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1651 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1652 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.col[j])); 1653 PetscCall(ISSetBlockSize(vs->isglobal.col[j], bs)); 1654 offset += n; 1655 } 1656 } 1657 1658 /* Set up the local ISs */ 1659 PetscCall(PetscMalloc1(vs->nr, &vs->islocal.row)); 1660 PetscCall(PetscMalloc1(vs->nc, &vs->islocal.col)); 1661 for (i = 0, offset = 0; i < vs->nr; i++) { 1662 IS isloc; 1663 ISLocalToGlobalMapping rmap = NULL; 1664 PetscInt nlocal, bs; 1665 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1666 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, &rmap, NULL)); 1667 if (rmap) { 1668 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1669 PetscCall(ISLocalToGlobalMappingGetSize(rmap, &nlocal)); 1670 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1671 PetscCall(ISSetBlockSize(isloc, bs)); 1672 } else { 1673 nlocal = 0; 1674 isloc = NULL; 1675 } 1676 vs->islocal.row[i] = isloc; 1677 offset += nlocal; 1678 } 1679 for (i = 0, offset = 0; i < vs->nc; i++) { 1680 IS isloc; 1681 ISLocalToGlobalMapping cmap = NULL; 1682 PetscInt nlocal, bs; 1683 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1684 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &cmap)); 1685 if (cmap) { 1686 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1687 PetscCall(ISLocalToGlobalMappingGetSize(cmap, &nlocal)); 1688 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1689 PetscCall(ISSetBlockSize(isloc, bs)); 1690 } else { 1691 nlocal = 0; 1692 isloc = NULL; 1693 } 1694 vs->islocal.col[i] = isloc; 1695 offset += nlocal; 1696 } 1697 1698 /* Set up the aggregate ISLocalToGlobalMapping */ 1699 { 1700 ISLocalToGlobalMapping rmap, cmap; 1701 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nr, vs->islocal.row, vs->isglobal.row, PETSC_FALSE, &rmap)); 1702 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nc, vs->islocal.col, vs->isglobal.col, PETSC_TRUE, &cmap)); 1703 if (rmap && cmap) PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap)); 1704 PetscCall(ISLocalToGlobalMappingDestroy(&rmap)); 1705 PetscCall(ISLocalToGlobalMappingDestroy(&cmap)); 1706 } 1707 1708 if (PetscDefined(USE_DEBUG)) { 1709 for (i = 0; i < vs->nr; i++) { 1710 for (j = 0; j < vs->nc; j++) { 1711 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1712 Mat B = vs->m[i][j]; 1713 if (!B) continue; 1714 PetscCall(MatGetSize(B, &M, &N)); 1715 PetscCall(MatGetLocalSize(B, &m, &n)); 1716 PetscCall(ISGetSize(vs->isglobal.row[i], &Mi)); 1717 PetscCall(ISGetSize(vs->isglobal.col[j], &Ni)); 1718 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &mi)); 1719 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &ni)); 1720 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Global sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, i, j, Mi, Ni); 1721 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Local sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, i, j, mi, ni); 1722 } 1723 } 1724 } 1725 1726 /* Set A->assembled if all non-null blocks are currently assembled */ 1727 for (i = 0; i < vs->nr; i++) { 1728 for (j = 0; j < vs->nc; j++) { 1729 if (vs->m[i][j] && !vs->m[i][j]->assembled) PetscFunctionReturn(PETSC_SUCCESS); 1730 } 1731 } 1732 A->assembled = PETSC_TRUE; 1733 PetscFunctionReturn(PETSC_SUCCESS); 1734 } 1735 1736 /*@C 1737 MatCreateNest - Creates a new `MATNEST` matrix containing several nested submatrices, each stored separately 1738 1739 Collective 1740 1741 Input Parameters: 1742 + comm - Communicator for the new `MATNEST` 1743 . nr - number of nested row blocks 1744 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1745 . nc - number of nested column blocks 1746 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1747 - a - array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1748 1749 Output Parameter: 1750 . B - new matrix 1751 1752 Note: 1753 In both C and Fortran, `a` must be a row-major order array holding references to the matrices. 1754 For instance, to represent the matrix 1755 $\begin{bmatrix} A_{11} & A_{12} \\ A_{21} & A_{22}\end{bmatrix}$ 1756 one should use `Mat a[4]={A11,A12,A21,A22}`. 1757 1758 Level: advanced 1759 1760 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `VecCreateNest()`, `DMCreateMatrix()`, `MatNestSetSubMat()`, 1761 `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatNestGetSize()`, 1762 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 1763 @*/ 1764 PetscErrorCode MatCreateNest(MPI_Comm comm, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[], Mat *B) 1765 { 1766 Mat A; 1767 1768 PetscFunctionBegin; 1769 *B = NULL; 1770 PetscCall(MatCreate(comm, &A)); 1771 PetscCall(MatSetType(A, MATNEST)); 1772 A->preallocated = PETSC_TRUE; 1773 PetscCall(MatNestSetSubMats(A, nr, is_row, nc, is_col, a)); 1774 *B = A; 1775 PetscFunctionReturn(PETSC_SUCCESS); 1776 } 1777 1778 static PetscErrorCode MatConvert_Nest_SeqAIJ_fast(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 1779 { 1780 Mat_Nest *nest = (Mat_Nest *)A->data; 1781 Mat *trans; 1782 PetscScalar **avv; 1783 PetscScalar *vv; 1784 PetscInt **aii, **ajj; 1785 PetscInt *ii, *jj, *ci; 1786 PetscInt nr, nc, nnz, i, j; 1787 PetscBool done; 1788 1789 PetscFunctionBegin; 1790 PetscCall(MatGetSize(A, &nr, &nc)); 1791 if (reuse == MAT_REUSE_MATRIX) { 1792 PetscInt rnr; 1793 1794 PetscCall(MatGetRowIJ(*newmat, 0, PETSC_FALSE, PETSC_FALSE, &rnr, (const PetscInt **)&ii, (const PetscInt **)&jj, &done)); 1795 PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "MatGetRowIJ"); 1796 PetscCheck(rnr == nr, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of rows"); 1797 PetscCall(MatSeqAIJGetArray(*newmat, &vv)); 1798 } 1799 /* extract CSR for nested SeqAIJ matrices */ 1800 nnz = 0; 1801 PetscCall(PetscCalloc4(nest->nr * nest->nc, &aii, nest->nr * nest->nc, &ajj, nest->nr * nest->nc, &avv, nest->nr * nest->nc, &trans)); 1802 for (i = 0; i < nest->nr; ++i) { 1803 for (j = 0; j < nest->nc; ++j) { 1804 Mat B = nest->m[i][j]; 1805 if (B) { 1806 PetscScalar *naa; 1807 PetscInt *nii, *njj, nnr; 1808 PetscBool istrans; 1809 1810 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 1811 if (istrans) { 1812 Mat Bt; 1813 1814 PetscCall(MatTransposeGetMat(B, &Bt)); 1815 PetscCall(MatTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1816 B = trans[i * nest->nc + j]; 1817 } else { 1818 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 1819 if (istrans) { 1820 Mat Bt; 1821 1822 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 1823 PetscCall(MatHermitianTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1824 B = trans[i * nest->nc + j]; 1825 } 1826 } 1827 PetscCall(MatGetRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&nii, (const PetscInt **)&njj, &done)); 1828 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatGetRowIJ"); 1829 PetscCall(MatSeqAIJGetArray(B, &naa)); 1830 nnz += nii[nnr]; 1831 1832 aii[i * nest->nc + j] = nii; 1833 ajj[i * nest->nc + j] = njj; 1834 avv[i * nest->nc + j] = naa; 1835 } 1836 } 1837 } 1838 if (reuse != MAT_REUSE_MATRIX) { 1839 PetscCall(PetscMalloc1(nr + 1, &ii)); 1840 PetscCall(PetscMalloc1(nnz, &jj)); 1841 PetscCall(PetscMalloc1(nnz, &vv)); 1842 } else { 1843 PetscCheck(nnz == ii[nr], PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of nonzeros"); 1844 } 1845 1846 /* new row pointer */ 1847 PetscCall(PetscArrayzero(ii, nr + 1)); 1848 for (i = 0; i < nest->nr; ++i) { 1849 PetscInt ncr, rst; 1850 1851 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1852 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1853 for (j = 0; j < nest->nc; ++j) { 1854 if (aii[i * nest->nc + j]) { 1855 PetscInt *nii = aii[i * nest->nc + j]; 1856 PetscInt ir; 1857 1858 for (ir = rst; ir < ncr + rst; ++ir) { 1859 ii[ir + 1] += nii[1] - nii[0]; 1860 nii++; 1861 } 1862 } 1863 } 1864 } 1865 for (i = 0; i < nr; i++) ii[i + 1] += ii[i]; 1866 1867 /* construct CSR for the new matrix */ 1868 PetscCall(PetscCalloc1(nr, &ci)); 1869 for (i = 0; i < nest->nr; ++i) { 1870 PetscInt ncr, rst; 1871 1872 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1873 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1874 for (j = 0; j < nest->nc; ++j) { 1875 if (aii[i * nest->nc + j]) { 1876 PetscScalar *nvv = avv[i * nest->nc + j]; 1877 PetscInt *nii = aii[i * nest->nc + j]; 1878 PetscInt *njj = ajj[i * nest->nc + j]; 1879 PetscInt ir, cst; 1880 1881 PetscCall(ISStrideGetInfo(nest->isglobal.col[j], &cst, NULL)); 1882 for (ir = rst; ir < ncr + rst; ++ir) { 1883 PetscInt ij, rsize = nii[1] - nii[0], ist = ii[ir] + ci[ir]; 1884 1885 for (ij = 0; ij < rsize; ij++) { 1886 jj[ist + ij] = *njj + cst; 1887 vv[ist + ij] = *nvv; 1888 njj++; 1889 nvv++; 1890 } 1891 ci[ir] += rsize; 1892 nii++; 1893 } 1894 } 1895 } 1896 } 1897 PetscCall(PetscFree(ci)); 1898 1899 /* restore info */ 1900 for (i = 0; i < nest->nr; ++i) { 1901 for (j = 0; j < nest->nc; ++j) { 1902 Mat B = nest->m[i][j]; 1903 if (B) { 1904 PetscInt nnr = 0, k = i * nest->nc + j; 1905 1906 B = (trans[k] ? trans[k] : B); 1907 PetscCall(MatRestoreRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&aii[k], (const PetscInt **)&ajj[k], &done)); 1908 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatRestoreRowIJ"); 1909 PetscCall(MatSeqAIJRestoreArray(B, &avv[k])); 1910 PetscCall(MatDestroy(&trans[k])); 1911 } 1912 } 1913 } 1914 PetscCall(PetscFree4(aii, ajj, avv, trans)); 1915 1916 /* finalize newmat */ 1917 if (reuse == MAT_INITIAL_MATRIX) { 1918 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, newmat)); 1919 } else if (reuse == MAT_INPLACE_MATRIX) { 1920 Mat B; 1921 1922 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, &B)); 1923 PetscCall(MatHeaderReplace(A, &B)); 1924 } 1925 PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY)); 1926 PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY)); 1927 { 1928 Mat_SeqAIJ *a = (Mat_SeqAIJ *)((*newmat)->data); 1929 a->free_a = PETSC_TRUE; 1930 a->free_ij = PETSC_TRUE; 1931 } 1932 PetscFunctionReturn(PETSC_SUCCESS); 1933 } 1934 1935 PETSC_INTERN PetscErrorCode MatAXPY_Dense_Nest(Mat Y, PetscScalar a, Mat X) 1936 { 1937 Mat_Nest *nest = (Mat_Nest *)X->data; 1938 PetscInt i, j, k, rstart; 1939 PetscBool flg; 1940 1941 PetscFunctionBegin; 1942 /* Fill by row */ 1943 for (j = 0; j < nest->nc; ++j) { 1944 /* Using global column indices and ISAllGather() is not scalable. */ 1945 IS bNis; 1946 PetscInt bN; 1947 const PetscInt *bNindices; 1948 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 1949 PetscCall(ISGetSize(bNis, &bN)); 1950 PetscCall(ISGetIndices(bNis, &bNindices)); 1951 for (i = 0; i < nest->nr; ++i) { 1952 Mat B = nest->m[i][j], D = NULL; 1953 PetscInt bm, br; 1954 const PetscInt *bmindices; 1955 if (!B) continue; 1956 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 1957 if (flg) { 1958 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1959 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1960 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 1961 B = D; 1962 } 1963 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 1964 if (flg) { 1965 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 1966 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 1967 B = D; 1968 } 1969 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 1970 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 1971 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 1972 for (br = 0; br < bm; ++br) { 1973 PetscInt row = bmindices[br], brncols, *cols; 1974 const PetscInt *brcols; 1975 const PetscScalar *brcoldata; 1976 PetscScalar *vals = NULL; 1977 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1978 PetscCall(PetscMalloc1(brncols, &cols)); 1979 for (k = 0; k < brncols; k++) cols[k] = bNindices[brcols[k]]; 1980 /* 1981 Nest blocks are required to be nonoverlapping -- otherwise nest and monolithic index layouts wouldn't match. 1982 Thus, we could use INSERT_VALUES, but I prefer ADD_VALUES. 1983 */ 1984 if (a != 1.0) { 1985 PetscCall(PetscMalloc1(brncols, &vals)); 1986 for (k = 0; k < brncols; k++) vals[k] = a * brcoldata[k]; 1987 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, vals, ADD_VALUES)); 1988 PetscCall(PetscFree(vals)); 1989 } else { 1990 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, brcoldata, ADD_VALUES)); 1991 } 1992 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1993 PetscCall(PetscFree(cols)); 1994 } 1995 if (D) PetscCall(MatDestroy(&D)); 1996 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 1997 } 1998 PetscCall(ISRestoreIndices(bNis, &bNindices)); 1999 PetscCall(ISDestroy(&bNis)); 2000 } 2001 PetscCall(MatAssemblyBegin(Y, MAT_FINAL_ASSEMBLY)); 2002 PetscCall(MatAssemblyEnd(Y, MAT_FINAL_ASSEMBLY)); 2003 PetscFunctionReturn(PETSC_SUCCESS); 2004 } 2005 2006 static PetscErrorCode MatConvert_Nest_AIJ(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 2007 { 2008 Mat_Nest *nest = (Mat_Nest *)A->data; 2009 PetscInt m, n, M, N, i, j, k, *dnnz, *onnz = NULL, rstart, cstart, cend; 2010 PetscMPIInt size; 2011 Mat C; 2012 2013 PetscFunctionBegin; 2014 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 2015 if (size == 1) { /* look for a special case with SeqAIJ matrices and strided-1, contiguous, blocks */ 2016 PetscInt nf; 2017 PetscBool fast; 2018 2019 PetscCall(PetscStrcmp(newtype, MATAIJ, &fast)); 2020 if (!fast) PetscCall(PetscStrcmp(newtype, MATSEQAIJ, &fast)); 2021 for (i = 0; i < nest->nr && fast; ++i) { 2022 for (j = 0; j < nest->nc && fast; ++j) { 2023 Mat B = nest->m[i][j]; 2024 if (B) { 2025 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATSEQAIJ, &fast)); 2026 if (!fast) { 2027 PetscBool istrans; 2028 2029 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 2030 if (istrans) { 2031 Mat Bt; 2032 2033 PetscCall(MatTransposeGetMat(B, &Bt)); 2034 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2035 } else { 2036 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 2037 if (istrans) { 2038 Mat Bt; 2039 2040 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 2041 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2042 } 2043 } 2044 } 2045 } 2046 } 2047 } 2048 for (i = 0, nf = 0; i < nest->nr && fast; ++i) { 2049 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.row[i], ISSTRIDE, &fast)); 2050 if (fast) { 2051 PetscInt f, s; 2052 2053 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &f, &s)); 2054 if (f != nf || s != 1) { 2055 fast = PETSC_FALSE; 2056 } else { 2057 PetscCall(ISGetSize(nest->isglobal.row[i], &f)); 2058 nf += f; 2059 } 2060 } 2061 } 2062 for (i = 0, nf = 0; i < nest->nc && fast; ++i) { 2063 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.col[i], ISSTRIDE, &fast)); 2064 if (fast) { 2065 PetscInt f, s; 2066 2067 PetscCall(ISStrideGetInfo(nest->isglobal.col[i], &f, &s)); 2068 if (f != nf || s != 1) { 2069 fast = PETSC_FALSE; 2070 } else { 2071 PetscCall(ISGetSize(nest->isglobal.col[i], &f)); 2072 nf += f; 2073 } 2074 } 2075 } 2076 if (fast) { 2077 PetscCall(MatConvert_Nest_SeqAIJ_fast(A, newtype, reuse, newmat)); 2078 PetscFunctionReturn(PETSC_SUCCESS); 2079 } 2080 } 2081 PetscCall(MatGetSize(A, &M, &N)); 2082 PetscCall(MatGetLocalSize(A, &m, &n)); 2083 PetscCall(MatGetOwnershipRangeColumn(A, &cstart, &cend)); 2084 if (reuse == MAT_REUSE_MATRIX) C = *newmat; 2085 else { 2086 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2087 PetscCall(MatSetType(C, newtype)); 2088 PetscCall(MatSetSizes(C, m, n, M, N)); 2089 } 2090 PetscCall(PetscMalloc1(2 * m, &dnnz)); 2091 if (m) { 2092 onnz = dnnz + m; 2093 for (k = 0; k < m; k++) { 2094 dnnz[k] = 0; 2095 onnz[k] = 0; 2096 } 2097 } 2098 for (j = 0; j < nest->nc; ++j) { 2099 IS bNis; 2100 PetscInt bN; 2101 const PetscInt *bNindices; 2102 PetscBool flg; 2103 /* Using global column indices and ISAllGather() is not scalable. */ 2104 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 2105 PetscCall(ISGetSize(bNis, &bN)); 2106 PetscCall(ISGetIndices(bNis, &bNindices)); 2107 for (i = 0; i < nest->nr; ++i) { 2108 PetscSF bmsf; 2109 PetscSFNode *iremote; 2110 Mat B = nest->m[i][j], D = NULL; 2111 PetscInt bm, *sub_dnnz, *sub_onnz, br; 2112 const PetscInt *bmindices; 2113 if (!B) continue; 2114 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 2115 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 2116 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &bmsf)); 2117 PetscCall(PetscMalloc1(bm, &iremote)); 2118 PetscCall(PetscMalloc1(bm, &sub_dnnz)); 2119 PetscCall(PetscMalloc1(bm, &sub_onnz)); 2120 for (k = 0; k < bm; ++k) { 2121 sub_dnnz[k] = 0; 2122 sub_onnz[k] = 0; 2123 } 2124 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 2125 if (flg) { 2126 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2127 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2128 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 2129 B = D; 2130 } 2131 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 2132 if (flg) { 2133 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 2134 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 2135 B = D; 2136 } 2137 /* 2138 Locate the owners for all of the locally-owned global row indices for this row block. 2139 These determine the roots of PetscSF used to communicate preallocation data to row owners. 2140 The roots correspond to the dnnz and onnz entries; thus, there are two roots per row. 2141 */ 2142 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 2143 for (br = 0; br < bm; ++br) { 2144 PetscInt row = bmindices[br], brncols, col; 2145 const PetscInt *brcols; 2146 PetscInt rowrel = 0; /* row's relative index on its owner rank */ 2147 PetscMPIInt rowowner = 0; 2148 PetscCall(PetscLayoutFindOwnerIndex(A->rmap, row, &rowowner, &rowrel)); 2149 /* how many roots */ 2150 iremote[br].rank = rowowner; 2151 iremote[br].index = rowrel; /* edge from bmdnnz to dnnz */ 2152 /* get nonzero pattern */ 2153 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, NULL)); 2154 for (k = 0; k < brncols; k++) { 2155 col = bNindices[brcols[k]]; 2156 if (col >= A->cmap->range[rowowner] && col < A->cmap->range[rowowner + 1]) { 2157 sub_dnnz[br]++; 2158 } else { 2159 sub_onnz[br]++; 2160 } 2161 } 2162 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, NULL)); 2163 } 2164 if (D) PetscCall(MatDestroy(&D)); 2165 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 2166 /* bsf will have to take care of disposing of bedges. */ 2167 PetscCall(PetscSFSetGraph(bmsf, m, bm, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 2168 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2169 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2170 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2171 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2172 PetscCall(PetscFree(sub_dnnz)); 2173 PetscCall(PetscFree(sub_onnz)); 2174 PetscCall(PetscSFDestroy(&bmsf)); 2175 } 2176 PetscCall(ISRestoreIndices(bNis, &bNindices)); 2177 PetscCall(ISDestroy(&bNis)); 2178 } 2179 /* Resize preallocation if overestimated */ 2180 for (i = 0; i < m; i++) { 2181 dnnz[i] = PetscMin(dnnz[i], A->cmap->n); 2182 onnz[i] = PetscMin(onnz[i], A->cmap->N - A->cmap->n); 2183 } 2184 PetscCall(MatSeqAIJSetPreallocation(C, 0, dnnz)); 2185 PetscCall(MatMPIAIJSetPreallocation(C, 0, dnnz, 0, onnz)); 2186 PetscCall(PetscFree(dnnz)); 2187 PetscCall(MatAXPY_Dense_Nest(C, 1.0, A)); 2188 if (reuse == MAT_INPLACE_MATRIX) { 2189 PetscCall(MatHeaderReplace(A, &C)); 2190 } else *newmat = C; 2191 PetscFunctionReturn(PETSC_SUCCESS); 2192 } 2193 2194 static PetscErrorCode MatConvert_Nest_Dense(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 2195 { 2196 Mat B; 2197 PetscInt m, n, M, N; 2198 2199 PetscFunctionBegin; 2200 PetscCall(MatGetSize(A, &M, &N)); 2201 PetscCall(MatGetLocalSize(A, &m, &n)); 2202 if (reuse == MAT_REUSE_MATRIX) { 2203 B = *newmat; 2204 PetscCall(MatZeroEntries(B)); 2205 } else { 2206 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), m, PETSC_DECIDE, M, N, NULL, &B)); 2207 } 2208 PetscCall(MatAXPY_Dense_Nest(B, 1.0, A)); 2209 if (reuse == MAT_INPLACE_MATRIX) { 2210 PetscCall(MatHeaderReplace(A, &B)); 2211 } else if (reuse == MAT_INITIAL_MATRIX) *newmat = B; 2212 PetscFunctionReturn(PETSC_SUCCESS); 2213 } 2214 2215 static PetscErrorCode MatHasOperation_Nest(Mat mat, MatOperation op, PetscBool *has) 2216 { 2217 Mat_Nest *bA = (Mat_Nest *)mat->data; 2218 MatOperation opAdd; 2219 PetscInt i, j, nr = bA->nr, nc = bA->nc; 2220 PetscBool flg; 2221 PetscFunctionBegin; 2222 2223 *has = PETSC_FALSE; 2224 if (op == MATOP_MULT || op == MATOP_MULT_ADD || op == MATOP_MULT_TRANSPOSE || op == MATOP_MULT_TRANSPOSE_ADD) { 2225 opAdd = (op == MATOP_MULT || op == MATOP_MULT_ADD ? MATOP_MULT_ADD : MATOP_MULT_TRANSPOSE_ADD); 2226 for (j = 0; j < nc; j++) { 2227 for (i = 0; i < nr; i++) { 2228 if (!bA->m[i][j]) continue; 2229 PetscCall(MatHasOperation(bA->m[i][j], opAdd, &flg)); 2230 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 2231 } 2232 } 2233 } 2234 if (((void **)mat->ops)[op]) *has = PETSC_TRUE; 2235 PetscFunctionReturn(PETSC_SUCCESS); 2236 } 2237 2238 /*MC 2239 MATNEST - "nest" - Matrix type consisting of nested submatrices, each stored separately. 2240 2241 Level: intermediate 2242 2243 Notes: 2244 This matrix type permits scalable use of `PCFIELDSPLIT` and avoids the large memory costs of extracting submatrices. 2245 It allows the use of symmetric and block formats for parts of multi-physics simulations. 2246 It is usually used with `DMCOMPOSITE` and `DMCreateMatrix()` 2247 2248 Each of the submatrices lives on the same MPI communicator as the original nest matrix (though they can have zero 2249 rows/columns on some processes.) Thus this is not meant for cases where the submatrices live on far fewer processes 2250 than the nest matrix. 2251 2252 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `MatType`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, 2253 `VecCreateNest()`, `DMCreateMatrix()`, `DMCOMPOSITE`, `MatNestSetVecType()`, `MatNestGetLocalISs()`, 2254 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 2255 M*/ 2256 PETSC_EXTERN PetscErrorCode MatCreate_Nest(Mat A) 2257 { 2258 Mat_Nest *s; 2259 2260 PetscFunctionBegin; 2261 PetscCall(PetscNew(&s)); 2262 A->data = (void *)s; 2263 2264 s->nr = -1; 2265 s->nc = -1; 2266 s->m = NULL; 2267 s->splitassembly = PETSC_FALSE; 2268 2269 PetscCall(PetscMemzero(A->ops, sizeof(*A->ops))); 2270 2271 A->ops->mult = MatMult_Nest; 2272 A->ops->multadd = MatMultAdd_Nest; 2273 A->ops->multtranspose = MatMultTranspose_Nest; 2274 A->ops->multtransposeadd = MatMultTransposeAdd_Nest; 2275 A->ops->transpose = MatTranspose_Nest; 2276 A->ops->multhermitiantranspose = MatMultHermitianTranspose_Nest; 2277 A->ops->multhermitiantransposeadd = MatMultHermitianTransposeAdd_Nest; 2278 A->ops->assemblybegin = MatAssemblyBegin_Nest; 2279 A->ops->assemblyend = MatAssemblyEnd_Nest; 2280 A->ops->zeroentries = MatZeroEntries_Nest; 2281 A->ops->copy = MatCopy_Nest; 2282 A->ops->axpy = MatAXPY_Nest; 2283 A->ops->duplicate = MatDuplicate_Nest; 2284 A->ops->createsubmatrix = MatCreateSubMatrix_Nest; 2285 A->ops->destroy = MatDestroy_Nest; 2286 A->ops->view = MatView_Nest; 2287 A->ops->getvecs = NULL; /* Use VECNEST by calling MatNestSetVecType(A,VECNEST) */ 2288 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_Nest; 2289 A->ops->restorelocalsubmatrix = MatRestoreLocalSubMatrix_Nest; 2290 A->ops->getdiagonal = MatGetDiagonal_Nest; 2291 A->ops->diagonalscale = MatDiagonalScale_Nest; 2292 A->ops->scale = MatScale_Nest; 2293 A->ops->shift = MatShift_Nest; 2294 A->ops->diagonalset = MatDiagonalSet_Nest; 2295 A->ops->setrandom = MatSetRandom_Nest; 2296 A->ops->hasoperation = MatHasOperation_Nest; 2297 A->ops->missingdiagonal = MatMissingDiagonal_Nest; 2298 2299 A->spptr = NULL; 2300 A->assembled = PETSC_FALSE; 2301 2302 /* expose Nest api's */ 2303 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", MatNestGetSubMat_Nest)); 2304 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", MatNestSetSubMat_Nest)); 2305 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", MatNestGetSubMats_Nest)); 2306 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", MatNestGetSize_Nest)); 2307 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", MatNestGetISs_Nest)); 2308 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", MatNestGetLocalISs_Nest)); 2309 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", MatNestSetVecType_Nest)); 2310 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", MatNestSetSubMats_Nest)); 2311 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", MatConvert_Nest_AIJ)); 2312 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", MatConvert_Nest_AIJ)); 2313 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", MatConvert_Nest_AIJ)); 2314 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", MatConvert_Nest_IS)); 2315 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", MatConvert_Nest_Dense)); 2316 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", MatConvert_Nest_Dense)); 2317 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", MatProductSetFromOptions_Nest_Dense)); 2318 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", MatProductSetFromOptions_Nest_Dense)); 2319 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_dense_C", MatProductSetFromOptions_Nest_Dense)); 2320 2321 PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATNEST)); 2322 PetscFunctionReturn(PETSC_SUCCESS); 2323 } 2324