1 #include <../src/mat/impls/nest/matnestimpl.h> /*I "petscmat.h" I*/ 2 #include <../src/mat/impls/aij/seq/aij.h> 3 #include <petscsf.h> 4 5 static PetscErrorCode MatSetUp_NestIS_Private(Mat, PetscInt, const IS[], PetscInt, const IS[]); 6 static PetscErrorCode MatCreateVecs_Nest(Mat, Vec *, Vec *); 7 static PetscErrorCode MatReset_Nest(Mat); 8 9 PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat, MatType, MatReuse, Mat *); 10 11 /* private functions */ 12 static PetscErrorCode MatNestGetSizes_Private(Mat A, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N) 13 { 14 Mat_Nest *bA = (Mat_Nest *)A->data; 15 PetscInt i, j; 16 17 PetscFunctionBegin; 18 *m = *n = *M = *N = 0; 19 for (i = 0; i < bA->nr; i++) { /* rows */ 20 PetscInt sm, sM; 21 PetscCall(ISGetLocalSize(bA->isglobal.row[i], &sm)); 22 PetscCall(ISGetSize(bA->isglobal.row[i], &sM)); 23 *m += sm; 24 *M += sM; 25 } 26 for (j = 0; j < bA->nc; j++) { /* cols */ 27 PetscInt sn, sN; 28 PetscCall(ISGetLocalSize(bA->isglobal.col[j], &sn)); 29 PetscCall(ISGetSize(bA->isglobal.col[j], &sN)); 30 *n += sn; 31 *N += sN; 32 } 33 PetscFunctionReturn(PETSC_SUCCESS); 34 } 35 36 /* operations */ 37 static PetscErrorCode MatMult_Nest(Mat A, Vec x, Vec y) 38 { 39 Mat_Nest *bA = (Mat_Nest *)A->data; 40 Vec *bx = bA->right, *by = bA->left; 41 PetscInt i, j, nr = bA->nr, nc = bA->nc; 42 43 PetscFunctionBegin; 44 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by[i])); 45 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 46 for (i = 0; i < nr; i++) { 47 PetscCall(VecZeroEntries(by[i])); 48 for (j = 0; j < nc; j++) { 49 if (!bA->m[i][j]) continue; 50 /* y[i] <- y[i] + A[i][j] * x[j] */ 51 PetscCall(MatMultAdd(bA->m[i][j], bx[j], by[i], by[i])); 52 } 53 } 54 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by[i])); 55 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 56 PetscFunctionReturn(PETSC_SUCCESS); 57 } 58 59 static PetscErrorCode MatMultAdd_Nest(Mat A, Vec x, Vec y, Vec z) 60 { 61 Mat_Nest *bA = (Mat_Nest *)A->data; 62 Vec *bx = bA->right, *bz = bA->left; 63 PetscInt i, j, nr = bA->nr, nc = bA->nc; 64 65 PetscFunctionBegin; 66 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(z, bA->isglobal.row[i], &bz[i])); 67 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(x, bA->isglobal.col[i], &bx[i])); 68 for (i = 0; i < nr; i++) { 69 if (y != z) { 70 Vec by; 71 PetscCall(VecGetSubVector(y, bA->isglobal.row[i], &by)); 72 PetscCall(VecCopy(by, bz[i])); 73 PetscCall(VecRestoreSubVector(y, bA->isglobal.row[i], &by)); 74 } 75 for (j = 0; j < nc; j++) { 76 if (!bA->m[i][j]) continue; 77 /* y[i] <- y[i] + A[i][j] * x[j] */ 78 PetscCall(MatMultAdd(bA->m[i][j], bx[j], bz[i], bz[i])); 79 } 80 } 81 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.row[i], &bz[i])); 82 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.col[i], &bx[i])); 83 PetscFunctionReturn(PETSC_SUCCESS); 84 } 85 86 typedef struct { 87 Mat *workC; /* array of Mat with specific containers depending on the underlying MatMatMult implementation */ 88 PetscScalar *tarray; /* buffer for storing all temporary products A[i][j] B[j] */ 89 PetscInt *dm, *dn, k; /* displacements and number of submatrices */ 90 } Nest_Dense; 91 92 static PetscErrorCode MatProductNumeric_Nest_Dense(Mat C) 93 { 94 Mat_Nest *bA; 95 Nest_Dense *contents; 96 Mat viewB, viewC, productB, workC; 97 const PetscScalar *barray; 98 PetscScalar *carray; 99 PetscInt i, j, M, N, nr, nc, ldb, ldc; 100 Mat A, B; 101 102 PetscFunctionBegin; 103 MatCheckProduct(C, 1); 104 A = C->product->A; 105 B = C->product->B; 106 PetscCall(MatGetSize(B, NULL, &N)); 107 if (!N) { 108 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 109 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 110 PetscFunctionReturn(PETSC_SUCCESS); 111 } 112 contents = (Nest_Dense *)C->product->data; 113 PetscCheck(contents, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data empty"); 114 bA = (Mat_Nest *)A->data; 115 nr = bA->nr; 116 nc = bA->nc; 117 PetscCall(MatDenseGetLDA(B, &ldb)); 118 PetscCall(MatDenseGetLDA(C, &ldc)); 119 PetscCall(MatZeroEntries(C)); 120 PetscCall(MatDenseGetArrayRead(B, &barray)); 121 PetscCall(MatDenseGetArray(C, &carray)); 122 for (i = 0; i < nr; i++) { 123 PetscCall(ISGetSize(bA->isglobal.row[i], &M)); 124 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dm[i + 1] - contents->dm[i], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset(carray, contents->dm[i]), &viewC)); 125 PetscCall(MatDenseSetLDA(viewC, ldc)); 126 for (j = 0; j < nc; j++) { 127 if (!bA->m[i][j]) continue; 128 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 129 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset((PetscScalar *)barray, contents->dn[j]), &viewB)); 130 PetscCall(MatDenseSetLDA(viewB, ldb)); 131 132 /* MatMatMultNumeric(bA->m[i][j],viewB,contents->workC[i*nc + j]); */ 133 workC = contents->workC[i * nc + j]; 134 productB = workC->product->B; 135 workC->product->B = viewB; /* use newly created dense matrix viewB */ 136 PetscCall(MatProductNumeric(workC)); 137 PetscCall(MatDestroy(&viewB)); 138 workC->product->B = productB; /* resume original B */ 139 140 /* C[i] <- workC + C[i] */ 141 PetscCall(MatAXPY(viewC, 1.0, contents->workC[i * nc + j], SAME_NONZERO_PATTERN)); 142 } 143 PetscCall(MatDestroy(&viewC)); 144 } 145 PetscCall(MatDenseRestoreArray(C, &carray)); 146 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 147 148 PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 149 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 150 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 151 PetscFunctionReturn(PETSC_SUCCESS); 152 } 153 154 static PetscErrorCode MatNest_DenseDestroy(void *ctx) 155 { 156 Nest_Dense *contents = (Nest_Dense *)ctx; 157 PetscInt i; 158 159 PetscFunctionBegin; 160 PetscCall(PetscFree(contents->tarray)); 161 for (i = 0; i < contents->k; i++) PetscCall(MatDestroy(contents->workC + i)); 162 PetscCall(PetscFree3(contents->dm, contents->dn, contents->workC)); 163 PetscCall(PetscFree(contents)); 164 PetscFunctionReturn(PETSC_SUCCESS); 165 } 166 167 static PetscErrorCode MatProductSymbolic_Nest_Dense(Mat C) 168 { 169 Mat_Nest *bA; 170 Mat viewB, workC; 171 const PetscScalar *barray; 172 PetscInt i, j, M, N, m, n, nr, nc, maxm = 0, ldb; 173 Nest_Dense *contents = NULL; 174 PetscBool cisdense; 175 Mat A, B; 176 PetscReal fill; 177 178 PetscFunctionBegin; 179 MatCheckProduct(C, 1); 180 PetscCheck(!C->product->data, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Product data not empty"); 181 A = C->product->A; 182 B = C->product->B; 183 fill = C->product->fill; 184 bA = (Mat_Nest *)A->data; 185 nr = bA->nr; 186 nc = bA->nc; 187 PetscCall(MatGetLocalSize(C, &m, &n)); 188 PetscCall(MatGetSize(C, &M, &N)); 189 if (m == PETSC_DECIDE || n == PETSC_DECIDE || M == PETSC_DECIDE || N == PETSC_DECIDE) { 190 PetscCall(MatGetLocalSize(B, NULL, &n)); 191 PetscCall(MatGetSize(B, NULL, &N)); 192 PetscCall(MatGetLocalSize(A, &m, NULL)); 193 PetscCall(MatGetSize(A, &M, NULL)); 194 PetscCall(MatSetSizes(C, m, n, M, N)); 195 } 196 PetscCall(PetscObjectTypeCompareAny((PetscObject)C, &cisdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, MATMPIDENSECUDA, "")); 197 if (!cisdense) PetscCall(MatSetType(C, ((PetscObject)B)->type_name)); 198 PetscCall(MatSetUp(C)); 199 if (!N) { 200 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 201 PetscFunctionReturn(PETSC_SUCCESS); 202 } 203 204 PetscCall(PetscNew(&contents)); 205 C->product->data = contents; 206 C->product->destroy = MatNest_DenseDestroy; 207 PetscCall(PetscCalloc3(nr + 1, &contents->dm, nc + 1, &contents->dn, nr * nc, &contents->workC)); 208 contents->k = nr * nc; 209 for (i = 0; i < nr; i++) { 210 PetscCall(ISGetLocalSize(bA->isglobal.row[i], contents->dm + i + 1)); 211 maxm = PetscMax(maxm, contents->dm[i + 1]); 212 contents->dm[i + 1] += contents->dm[i]; 213 } 214 for (i = 0; i < nc; i++) { 215 PetscCall(ISGetLocalSize(bA->isglobal.col[i], contents->dn + i + 1)); 216 contents->dn[i + 1] += contents->dn[i]; 217 } 218 PetscCall(PetscMalloc1(maxm * N, &contents->tarray)); 219 PetscCall(MatDenseGetLDA(B, &ldb)); 220 PetscCall(MatGetSize(B, NULL, &N)); 221 PetscCall(MatDenseGetArrayRead(B, &barray)); 222 /* loops are permuted compared to MatMatMultNumeric so that viewB is created only once per column of A */ 223 for (j = 0; j < nc; j++) { 224 PetscCall(ISGetSize(bA->isglobal.col[j], &M)); 225 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), contents->dn[j + 1] - contents->dn[j], PETSC_DECIDE, M, N, PetscSafePointerPlusOffset((PetscScalar *)barray, contents->dn[j]), &viewB)); 226 PetscCall(MatDenseSetLDA(viewB, ldb)); 227 for (i = 0; i < nr; i++) { 228 if (!bA->m[i][j]) continue; 229 /* MatMatMultSymbolic may attach a specific container (depending on MatType of bA->m[i][j]) to workC[i][j] */ 230 231 PetscCall(MatProductCreate(bA->m[i][j], viewB, NULL, &contents->workC[i * nc + j])); 232 workC = contents->workC[i * nc + j]; 233 PetscCall(MatProductSetType(workC, MATPRODUCT_AB)); 234 PetscCall(MatProductSetAlgorithm(workC, "default")); 235 PetscCall(MatProductSetFill(workC, fill)); 236 PetscCall(MatProductSetFromOptions(workC)); 237 PetscCall(MatProductSymbolic(workC)); 238 239 /* since tarray will be shared by all Mat */ 240 PetscCall(MatSeqDenseSetPreallocation(workC, contents->tarray)); 241 PetscCall(MatMPIDenseSetPreallocation(workC, contents->tarray)); 242 } 243 PetscCall(MatDestroy(&viewB)); 244 } 245 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 246 247 C->ops->productnumeric = MatProductNumeric_Nest_Dense; 248 PetscFunctionReturn(PETSC_SUCCESS); 249 } 250 251 static PetscErrorCode MatProductSetFromOptions_Nest_Dense(Mat C) 252 { 253 Mat_Product *product = C->product; 254 255 PetscFunctionBegin; 256 if (product->type == MATPRODUCT_AB) C->ops->productsymbolic = MatProductSymbolic_Nest_Dense; 257 PetscFunctionReturn(PETSC_SUCCESS); 258 } 259 260 static PetscErrorCode MatMultTransposeKernel_Nest(Mat A, Vec x, Vec y, PetscBool herm) 261 { 262 Mat_Nest *bA = (Mat_Nest *)A->data; 263 Vec *bx = bA->left, *by = bA->right; 264 PetscInt i, j, nr = bA->nr, nc = bA->nc; 265 266 PetscFunctionBegin; 267 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 268 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(y, bA->isglobal.col[i], &by[i])); 269 for (j = 0; j < nc; j++) { 270 PetscCall(VecZeroEntries(by[j])); 271 for (i = 0; i < nr; i++) { 272 if (!bA->m[i][j]) continue; 273 if (herm) PetscCall(MatMultHermitianTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); /* y[j] <- y[j] + (A[i][j])^H * x[i] */ 274 else PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], by[j], by[j])); /* y[j] <- y[j] + (A[i][j])^T * x[i] */ 275 } 276 } 277 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 278 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(y, bA->isglobal.col[i], &by[i])); 279 PetscFunctionReturn(PETSC_SUCCESS); 280 } 281 282 static PetscErrorCode MatMultTranspose_Nest(Mat A, Vec x, Vec y) 283 { 284 PetscFunctionBegin; 285 PetscCall(MatMultTransposeKernel_Nest(A, x, y, PETSC_FALSE)); 286 PetscFunctionReturn(PETSC_SUCCESS); 287 } 288 289 static PetscErrorCode MatMultHermitianTranspose_Nest(Mat A, Vec x, Vec y) 290 { 291 PetscFunctionBegin; 292 PetscCall(MatMultTransposeKernel_Nest(A, x, y, PETSC_TRUE)); 293 PetscFunctionReturn(PETSC_SUCCESS); 294 } 295 296 static PetscErrorCode MatMultTransposeAddKernel_Nest(Mat A, Vec x, Vec y, Vec z, PetscBool herm) 297 { 298 Mat_Nest *bA = (Mat_Nest *)A->data; 299 Vec *bx = bA->left, *bz = bA->right; 300 PetscInt i, j, nr = bA->nr, nc = bA->nc; 301 302 PetscFunctionBegin; 303 for (i = 0; i < nr; i++) PetscCall(VecGetSubVector(x, bA->isglobal.row[i], &bx[i])); 304 for (i = 0; i < nc; i++) PetscCall(VecGetSubVector(z, bA->isglobal.col[i], &bz[i])); 305 for (j = 0; j < nc; j++) { 306 if (y != z) { 307 Vec by; 308 PetscCall(VecGetSubVector(y, bA->isglobal.col[j], &by)); 309 PetscCall(VecCopy(by, bz[j])); 310 PetscCall(VecRestoreSubVector(y, bA->isglobal.col[j], &by)); 311 } 312 for (i = 0; i < nr; i++) { 313 if (!bA->m[i][j]) continue; 314 if (herm) PetscCall(MatMultHermitianTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); /* z[j] <- y[j] + (A[i][j])^H * x[i] */ 315 else PetscCall(MatMultTransposeAdd(bA->m[i][j], bx[i], bz[j], bz[j])); /* z[j] <- y[j] + (A[i][j])^T * x[i] */ 316 } 317 } 318 for (i = 0; i < nr; i++) PetscCall(VecRestoreSubVector(x, bA->isglobal.row[i], &bx[i])); 319 for (i = 0; i < nc; i++) PetscCall(VecRestoreSubVector(z, bA->isglobal.col[i], &bz[i])); 320 PetscFunctionReturn(PETSC_SUCCESS); 321 } 322 323 static PetscErrorCode MatMultTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 324 { 325 PetscFunctionBegin; 326 PetscCall(MatMultTransposeAddKernel_Nest(A, x, y, z, PETSC_FALSE)); 327 PetscFunctionReturn(PETSC_SUCCESS); 328 } 329 330 static PetscErrorCode MatMultHermitianTransposeAdd_Nest(Mat A, Vec x, Vec y, Vec z) 331 { 332 PetscFunctionBegin; 333 PetscCall(MatMultTransposeAddKernel_Nest(A, x, y, z, PETSC_TRUE)); 334 PetscFunctionReturn(PETSC_SUCCESS); 335 } 336 337 static PetscErrorCode MatTranspose_Nest(Mat A, MatReuse reuse, Mat *B) 338 { 339 Mat_Nest *bA = (Mat_Nest *)A->data, *bC; 340 Mat C; 341 PetscInt i, j, nr = bA->nr, nc = bA->nc; 342 343 PetscFunctionBegin; 344 if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B)); 345 PetscCheck(reuse != MAT_INPLACE_MATRIX || nr == nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Square nested matrix only for in-place"); 346 347 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) { 348 Mat *subs; 349 IS *is_row, *is_col; 350 351 PetscCall(PetscCalloc1(nr * nc, &subs)); 352 PetscCall(PetscMalloc2(nr, &is_row, nc, &is_col)); 353 PetscCall(MatNestGetISs(A, is_row, is_col)); 354 if (reuse == MAT_INPLACE_MATRIX) { 355 for (i = 0; i < nr; i++) { 356 for (j = 0; j < nc; j++) subs[i + nr * j] = bA->m[i][j]; 357 } 358 } 359 360 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nc, is_col, nr, is_row, subs, &C)); 361 PetscCall(PetscFree(subs)); 362 PetscCall(PetscFree2(is_row, is_col)); 363 } else { 364 C = *B; 365 } 366 367 bC = (Mat_Nest *)C->data; 368 for (i = 0; i < nr; i++) { 369 for (j = 0; j < nc; j++) { 370 if (bA->m[i][j]) { 371 PetscCall(MatTranspose(bA->m[i][j], reuse, &(bC->m[j][i]))); 372 } else { 373 bC->m[j][i] = NULL; 374 } 375 } 376 } 377 378 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) { 379 *B = C; 380 } else { 381 PetscCall(MatHeaderMerge(A, &C)); 382 } 383 PetscFunctionReturn(PETSC_SUCCESS); 384 } 385 386 static PetscErrorCode MatNestDestroyISList(PetscInt n, IS **list) 387 { 388 IS *lst = *list; 389 PetscInt i; 390 391 PetscFunctionBegin; 392 if (!lst) PetscFunctionReturn(PETSC_SUCCESS); 393 for (i = 0; i < n; i++) 394 if (lst[i]) PetscCall(ISDestroy(&lst[i])); 395 PetscCall(PetscFree(lst)); 396 *list = NULL; 397 PetscFunctionReturn(PETSC_SUCCESS); 398 } 399 400 static PetscErrorCode MatReset_Nest(Mat A) 401 { 402 Mat_Nest *vs = (Mat_Nest *)A->data; 403 PetscInt i, j; 404 405 PetscFunctionBegin; 406 /* release the matrices and the place holders */ 407 PetscCall(MatNestDestroyISList(vs->nr, &vs->isglobal.row)); 408 PetscCall(MatNestDestroyISList(vs->nc, &vs->isglobal.col)); 409 PetscCall(MatNestDestroyISList(vs->nr, &vs->islocal.row)); 410 PetscCall(MatNestDestroyISList(vs->nc, &vs->islocal.col)); 411 412 PetscCall(PetscFree(vs->row_len)); 413 PetscCall(PetscFree(vs->col_len)); 414 PetscCall(PetscFree(vs->nnzstate)); 415 416 PetscCall(PetscFree2(vs->left, vs->right)); 417 418 /* release the matrices and the place holders */ 419 if (vs->m) { 420 for (i = 0; i < vs->nr; i++) { 421 for (j = 0; j < vs->nc; j++) PetscCall(MatDestroy(&vs->m[i][j])); 422 } 423 PetscCall(PetscFree(vs->m[0])); 424 PetscCall(PetscFree(vs->m)); 425 } 426 427 /* restore defaults */ 428 vs->nr = 0; 429 vs->nc = 0; 430 vs->splitassembly = PETSC_FALSE; 431 PetscFunctionReturn(PETSC_SUCCESS); 432 } 433 434 static PetscErrorCode MatDestroy_Nest(Mat A) 435 { 436 PetscFunctionBegin; 437 PetscCall(MatReset_Nest(A)); 438 PetscCall(PetscFree(A->data)); 439 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", NULL)); 440 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", NULL)); 441 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", NULL)); 442 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", NULL)); 443 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", NULL)); 444 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", NULL)); 445 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", NULL)); 446 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", NULL)); 447 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", NULL)); 448 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", NULL)); 449 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", NULL)); 450 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", NULL)); 451 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", NULL)); 452 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", NULL)); 453 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", NULL)); 454 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", NULL)); 455 PetscFunctionReturn(PETSC_SUCCESS); 456 } 457 458 static PetscErrorCode MatMissingDiagonal_Nest(Mat mat, PetscBool *missing, PetscInt *dd) 459 { 460 Mat_Nest *vs = (Mat_Nest *)mat->data; 461 PetscInt i; 462 463 PetscFunctionBegin; 464 if (dd) *dd = 0; 465 if (!vs->nr) { 466 *missing = PETSC_TRUE; 467 PetscFunctionReturn(PETSC_SUCCESS); 468 } 469 *missing = PETSC_FALSE; 470 for (i = 0; i < vs->nr && !(*missing); i++) { 471 *missing = PETSC_TRUE; 472 if (vs->m[i][i]) { 473 PetscCall(MatMissingDiagonal(vs->m[i][i], missing, NULL)); 474 PetscCheck(!*missing || !dd, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "First missing entry not yet implemented"); 475 } 476 } 477 PetscFunctionReturn(PETSC_SUCCESS); 478 } 479 480 static PetscErrorCode MatAssemblyBegin_Nest(Mat A, MatAssemblyType type) 481 { 482 Mat_Nest *vs = (Mat_Nest *)A->data; 483 PetscInt i, j; 484 PetscBool nnzstate = PETSC_FALSE; 485 486 PetscFunctionBegin; 487 for (i = 0; i < vs->nr; i++) { 488 for (j = 0; j < vs->nc; j++) { 489 PetscObjectState subnnzstate = 0; 490 if (vs->m[i][j]) { 491 PetscCall(MatAssemblyBegin(vs->m[i][j], type)); 492 if (!vs->splitassembly) { 493 /* Note: split assembly will fail if the same block appears more than once (even indirectly through a nested 494 * sub-block). This could be fixed by adding a flag to Mat so that there was a way to check if a Mat was 495 * already performing an assembly, but the result would by more complicated and appears to offer less 496 * potential for diagnostics and correctness checking. Split assembly should be fixed once there is an 497 * interface for libraries to make asynchronous progress in "user-defined non-blocking collectives". 498 */ 499 PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 500 PetscCall(MatGetNonzeroState(vs->m[i][j], &subnnzstate)); 501 } 502 } 503 nnzstate = (PetscBool)(nnzstate || vs->nnzstate[i * vs->nc + j] != subnnzstate); 504 vs->nnzstate[i * vs->nc + j] = subnnzstate; 505 } 506 } 507 if (nnzstate) A->nonzerostate++; 508 PetscFunctionReturn(PETSC_SUCCESS); 509 } 510 511 static PetscErrorCode MatAssemblyEnd_Nest(Mat A, MatAssemblyType type) 512 { 513 Mat_Nest *vs = (Mat_Nest *)A->data; 514 PetscInt i, j; 515 516 PetscFunctionBegin; 517 for (i = 0; i < vs->nr; i++) { 518 for (j = 0; j < vs->nc; j++) { 519 if (vs->m[i][j]) { 520 if (vs->splitassembly) PetscCall(MatAssemblyEnd(vs->m[i][j], type)); 521 } 522 } 523 } 524 PetscFunctionReturn(PETSC_SUCCESS); 525 } 526 527 static PetscErrorCode MatNestFindNonzeroSubMatRow(Mat A, PetscInt row, Mat *B) 528 { 529 Mat_Nest *vs = (Mat_Nest *)A->data; 530 PetscInt j; 531 Mat sub; 532 533 PetscFunctionBegin; 534 sub = (row < vs->nc) ? vs->m[row][row] : (Mat)NULL; /* Prefer to find on the diagonal */ 535 for (j = 0; !sub && j < vs->nc; j++) sub = vs->m[row][j]; 536 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 537 *B = sub; 538 PetscFunctionReturn(PETSC_SUCCESS); 539 } 540 541 static PetscErrorCode MatNestFindNonzeroSubMatCol(Mat A, PetscInt col, Mat *B) 542 { 543 Mat_Nest *vs = (Mat_Nest *)A->data; 544 PetscInt i; 545 Mat sub; 546 547 PetscFunctionBegin; 548 sub = (col < vs->nr) ? vs->m[col][col] : (Mat)NULL; /* Prefer to find on the diagonal */ 549 for (i = 0; !sub && i < vs->nr; i++) sub = vs->m[i][col]; 550 if (sub) PetscCall(MatSetUp(sub)); /* Ensure that the sizes are available */ 551 *B = sub; 552 PetscFunctionReturn(PETSC_SUCCESS); 553 } 554 555 static PetscErrorCode MatNestFindISRange(Mat A, PetscInt n, const IS list[], IS is, PetscInt *begin, PetscInt *end) 556 { 557 PetscInt i, j, size, m; 558 PetscBool flg; 559 IS out, concatenate[2]; 560 561 PetscFunctionBegin; 562 PetscAssertPointer(list, 3); 563 PetscValidHeaderSpecific(is, IS_CLASSID, 4); 564 if (begin) { 565 PetscAssertPointer(begin, 5); 566 *begin = -1; 567 } 568 if (end) { 569 PetscAssertPointer(end, 6); 570 *end = -1; 571 } 572 for (i = 0; i < n; i++) { 573 if (!list[i]) continue; 574 PetscCall(ISEqualUnsorted(list[i], is, &flg)); 575 if (flg) { 576 if (begin) *begin = i; 577 if (end) *end = i + 1; 578 PetscFunctionReturn(PETSC_SUCCESS); 579 } 580 } 581 PetscCall(ISGetSize(is, &size)); 582 for (i = 0; i < n - 1; i++) { 583 if (!list[i]) continue; 584 m = 0; 585 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, list + i, &out)); 586 PetscCall(ISGetSize(out, &m)); 587 for (j = i + 2; j < n && m < size; j++) { 588 if (list[j]) { 589 concatenate[0] = out; 590 concatenate[1] = list[j]; 591 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)A), 2, concatenate, &out)); 592 PetscCall(ISDestroy(concatenate)); 593 PetscCall(ISGetSize(out, &m)); 594 } 595 } 596 if (m == size) { 597 PetscCall(ISEqualUnsorted(out, is, &flg)); 598 if (flg) { 599 if (begin) *begin = i; 600 if (end) *end = j; 601 PetscCall(ISDestroy(&out)); 602 PetscFunctionReturn(PETSC_SUCCESS); 603 } 604 } 605 PetscCall(ISDestroy(&out)); 606 } 607 PetscFunctionReturn(PETSC_SUCCESS); 608 } 609 610 static PetscErrorCode MatNestFillEmptyMat_Private(Mat A, PetscInt i, PetscInt j, Mat *B) 611 { 612 Mat_Nest *vs = (Mat_Nest *)A->data; 613 PetscInt lr, lc; 614 615 PetscFunctionBegin; 616 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 617 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &lr)); 618 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &lc)); 619 PetscCall(MatSetSizes(*B, lr, lc, PETSC_DECIDE, PETSC_DECIDE)); 620 PetscCall(MatSetType(*B, MATAIJ)); 621 PetscCall(MatSeqAIJSetPreallocation(*B, 0, NULL)); 622 PetscCall(MatMPIAIJSetPreallocation(*B, 0, NULL, 0, NULL)); 623 PetscCall(MatSetUp(*B)); 624 PetscCall(MatSetOption(*B, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 625 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 626 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 627 PetscFunctionReturn(PETSC_SUCCESS); 628 } 629 630 static PetscErrorCode MatNestGetBlock_Private(Mat A, PetscInt rbegin, PetscInt rend, PetscInt cbegin, PetscInt cend, Mat *B) 631 { 632 Mat_Nest *vs = (Mat_Nest *)A->data; 633 Mat *a; 634 PetscInt i, j, k, l, nr = rend - rbegin, nc = cend - cbegin; 635 char keyname[256]; 636 PetscBool *b; 637 PetscBool flg; 638 639 PetscFunctionBegin; 640 *B = NULL; 641 PetscCall(PetscSNPrintf(keyname, sizeof(keyname), "NestBlock_%" PetscInt_FMT "-%" PetscInt_FMT "x%" PetscInt_FMT "-%" PetscInt_FMT, rbegin, rend, cbegin, cend)); 642 PetscCall(PetscObjectQuery((PetscObject)A, keyname, (PetscObject *)B)); 643 if (*B) PetscFunctionReturn(PETSC_SUCCESS); 644 645 PetscCall(PetscMalloc2(nr * nc, &a, nr * nc, &b)); 646 for (i = 0; i < nr; i++) { 647 for (j = 0; j < nc; j++) { 648 a[i * nc + j] = vs->m[rbegin + i][cbegin + j]; 649 b[i * nc + j] = PETSC_FALSE; 650 } 651 } 652 if (nc != vs->nc && nr != vs->nr) { 653 for (i = 0; i < nr; i++) { 654 for (j = 0; j < nc; j++) { 655 flg = PETSC_FALSE; 656 for (k = 0; (k < nr && !flg); k++) { 657 if (a[j + k * nc]) flg = PETSC_TRUE; 658 } 659 if (flg) { 660 flg = PETSC_FALSE; 661 for (l = 0; (l < nc && !flg); l++) { 662 if (a[i * nc + l]) flg = PETSC_TRUE; 663 } 664 } 665 if (!flg) { 666 b[i * nc + j] = PETSC_TRUE; 667 PetscCall(MatNestFillEmptyMat_Private(A, rbegin + i, cbegin + j, a + i * nc + j)); 668 } 669 } 670 } 671 } 672 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, nr != vs->nr ? NULL : vs->isglobal.row, nc, nc != vs->nc ? NULL : vs->isglobal.col, a, B)); 673 for (i = 0; i < nr; i++) { 674 for (j = 0; j < nc; j++) { 675 if (b[i * nc + j]) PetscCall(MatDestroy(a + i * nc + j)); 676 } 677 } 678 PetscCall(PetscFree2(a, b)); 679 (*B)->assembled = A->assembled; 680 PetscCall(PetscObjectCompose((PetscObject)A, keyname, (PetscObject)*B)); 681 PetscCall(PetscObjectDereference((PetscObject)*B)); /* Leave the only remaining reference in the composition */ 682 PetscFunctionReturn(PETSC_SUCCESS); 683 } 684 685 static PetscErrorCode MatNestFindSubMat(Mat A, struct MatNestISPair *is, IS isrow, IS iscol, Mat *B) 686 { 687 Mat_Nest *vs = (Mat_Nest *)A->data; 688 PetscInt rbegin, rend, cbegin, cend; 689 690 PetscFunctionBegin; 691 PetscCall(MatNestFindISRange(A, vs->nr, is->row, isrow, &rbegin, &rend)); 692 PetscCall(MatNestFindISRange(A, vs->nc, is->col, iscol, &cbegin, &cend)); 693 if (rend == rbegin + 1 && cend == cbegin + 1) { 694 if (!vs->m[rbegin][cbegin]) PetscCall(MatNestFillEmptyMat_Private(A, rbegin, cbegin, vs->m[rbegin] + cbegin)); 695 *B = vs->m[rbegin][cbegin]; 696 } else if (rbegin != -1 && cbegin != -1) { 697 PetscCall(MatNestGetBlock_Private(A, rbegin, rend, cbegin, cend, B)); 698 } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Could not find index set"); 699 PetscFunctionReturn(PETSC_SUCCESS); 700 } 701 702 /* 703 TODO: This does not actually returns a submatrix we can modify 704 */ 705 static PetscErrorCode MatCreateSubMatrix_Nest(Mat A, IS isrow, IS iscol, MatReuse reuse, Mat *B) 706 { 707 Mat_Nest *vs = (Mat_Nest *)A->data; 708 Mat sub; 709 710 PetscFunctionBegin; 711 PetscCall(MatNestFindSubMat(A, &vs->isglobal, isrow, iscol, &sub)); 712 switch (reuse) { 713 case MAT_INITIAL_MATRIX: 714 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 715 *B = sub; 716 break; 717 case MAT_REUSE_MATRIX: 718 PetscCheck(sub == *B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Submatrix was not used before in this call"); 719 break; 720 case MAT_IGNORE_MATRIX: /* Nothing to do */ 721 break; 722 case MAT_INPLACE_MATRIX: /* Nothing to do */ 723 SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX is not supported yet"); 724 } 725 PetscFunctionReturn(PETSC_SUCCESS); 726 } 727 728 static PetscErrorCode MatGetLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 729 { 730 Mat_Nest *vs = (Mat_Nest *)A->data; 731 Mat sub; 732 733 PetscFunctionBegin; 734 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 735 /* We allow the submatrix to be NULL, perhaps it would be better for the user to return an empty matrix instead */ 736 if (sub) PetscCall(PetscObjectReference((PetscObject)sub)); 737 *B = sub; 738 PetscFunctionReturn(PETSC_SUCCESS); 739 } 740 741 static PetscErrorCode MatRestoreLocalSubMatrix_Nest(Mat A, IS isrow, IS iscol, Mat *B) 742 { 743 Mat_Nest *vs = (Mat_Nest *)A->data; 744 Mat sub; 745 746 PetscFunctionBegin; 747 PetscCall(MatNestFindSubMat(A, &vs->islocal, isrow, iscol, &sub)); 748 PetscCheck(*B == sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has not been gotten"); 749 if (sub) { 750 PetscCheck(((PetscObject)sub)->refct > 1, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Local submatrix has had reference count decremented too many times"); 751 PetscCall(MatDestroy(B)); 752 } 753 PetscFunctionReturn(PETSC_SUCCESS); 754 } 755 756 static PetscErrorCode MatGetDiagonal_Nest(Mat A, Vec v) 757 { 758 Mat_Nest *bA = (Mat_Nest *)A->data; 759 PetscInt i; 760 761 PetscFunctionBegin; 762 for (i = 0; i < bA->nr; i++) { 763 Vec bv; 764 PetscCall(VecGetSubVector(v, bA->isglobal.row[i], &bv)); 765 if (bA->m[i][i]) { 766 PetscCall(MatGetDiagonal(bA->m[i][i], bv)); 767 } else { 768 PetscCall(VecSet(bv, 0.0)); 769 } 770 PetscCall(VecRestoreSubVector(v, bA->isglobal.row[i], &bv)); 771 } 772 PetscFunctionReturn(PETSC_SUCCESS); 773 } 774 775 static PetscErrorCode MatDiagonalScale_Nest(Mat A, Vec l, Vec r) 776 { 777 Mat_Nest *bA = (Mat_Nest *)A->data; 778 Vec bl, *br; 779 PetscInt i, j; 780 781 PetscFunctionBegin; 782 PetscCall(PetscCalloc1(bA->nc, &br)); 783 if (r) { 784 for (j = 0; j < bA->nc; j++) PetscCall(VecGetSubVector(r, bA->isglobal.col[j], &br[j])); 785 } 786 bl = NULL; 787 for (i = 0; i < bA->nr; i++) { 788 if (l) PetscCall(VecGetSubVector(l, bA->isglobal.row[i], &bl)); 789 for (j = 0; j < bA->nc; j++) { 790 if (bA->m[i][j]) PetscCall(MatDiagonalScale(bA->m[i][j], bl, br[j])); 791 } 792 if (l) PetscCall(VecRestoreSubVector(l, bA->isglobal.row[i], &bl)); 793 } 794 if (r) { 795 for (j = 0; j < bA->nc; j++) PetscCall(VecRestoreSubVector(r, bA->isglobal.col[j], &br[j])); 796 } 797 PetscCall(PetscFree(br)); 798 PetscFunctionReturn(PETSC_SUCCESS); 799 } 800 801 static PetscErrorCode MatScale_Nest(Mat A, PetscScalar a) 802 { 803 Mat_Nest *bA = (Mat_Nest *)A->data; 804 PetscInt i, j; 805 806 PetscFunctionBegin; 807 for (i = 0; i < bA->nr; i++) { 808 for (j = 0; j < bA->nc; j++) { 809 if (bA->m[i][j]) PetscCall(MatScale(bA->m[i][j], a)); 810 } 811 } 812 PetscFunctionReturn(PETSC_SUCCESS); 813 } 814 815 static PetscErrorCode MatShift_Nest(Mat A, PetscScalar a) 816 { 817 Mat_Nest *bA = (Mat_Nest *)A->data; 818 PetscInt i; 819 PetscBool nnzstate = PETSC_FALSE; 820 821 PetscFunctionBegin; 822 for (i = 0; i < bA->nr; i++) { 823 PetscObjectState subnnzstate = 0; 824 PetscCheck(bA->m[i][i], PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for shifting an empty diagonal block, insert a matrix in block (%" PetscInt_FMT ",%" PetscInt_FMT ")", i, i); 825 PetscCall(MatShift(bA->m[i][i], a)); 826 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 827 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 828 bA->nnzstate[i * bA->nc + i] = subnnzstate; 829 } 830 if (nnzstate) A->nonzerostate++; 831 PetscFunctionReturn(PETSC_SUCCESS); 832 } 833 834 static PetscErrorCode MatDiagonalSet_Nest(Mat A, Vec D, InsertMode is) 835 { 836 Mat_Nest *bA = (Mat_Nest *)A->data; 837 PetscInt i; 838 PetscBool nnzstate = PETSC_FALSE; 839 840 PetscFunctionBegin; 841 for (i = 0; i < bA->nr; i++) { 842 PetscObjectState subnnzstate = 0; 843 Vec bv; 844 PetscCall(VecGetSubVector(D, bA->isglobal.row[i], &bv)); 845 if (bA->m[i][i]) { 846 PetscCall(MatDiagonalSet(bA->m[i][i], bv, is)); 847 PetscCall(MatGetNonzeroState(bA->m[i][i], &subnnzstate)); 848 } 849 PetscCall(VecRestoreSubVector(D, bA->isglobal.row[i], &bv)); 850 nnzstate = (PetscBool)(nnzstate || bA->nnzstate[i * bA->nc + i] != subnnzstate); 851 bA->nnzstate[i * bA->nc + i] = subnnzstate; 852 } 853 if (nnzstate) A->nonzerostate++; 854 PetscFunctionReturn(PETSC_SUCCESS); 855 } 856 857 static PetscErrorCode MatSetRandom_Nest(Mat A, PetscRandom rctx) 858 { 859 Mat_Nest *bA = (Mat_Nest *)A->data; 860 PetscInt i, j; 861 862 PetscFunctionBegin; 863 for (i = 0; i < bA->nr; i++) { 864 for (j = 0; j < bA->nc; j++) { 865 if (bA->m[i][j]) PetscCall(MatSetRandom(bA->m[i][j], rctx)); 866 } 867 } 868 PetscFunctionReturn(PETSC_SUCCESS); 869 } 870 871 static PetscErrorCode MatCreateVecs_Nest(Mat A, Vec *right, Vec *left) 872 { 873 Mat_Nest *bA = (Mat_Nest *)A->data; 874 Vec *L, *R; 875 MPI_Comm comm; 876 PetscInt i, j; 877 878 PetscFunctionBegin; 879 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 880 if (right) { 881 /* allocate R */ 882 PetscCall(PetscMalloc1(bA->nc, &R)); 883 /* Create the right vectors */ 884 for (j = 0; j < bA->nc; j++) { 885 for (i = 0; i < bA->nr; i++) { 886 if (bA->m[i][j]) { 887 PetscCall(MatCreateVecs(bA->m[i][j], &R[j], NULL)); 888 break; 889 } 890 } 891 PetscCheck(i != bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null column."); 892 } 893 PetscCall(VecCreateNest(comm, bA->nc, bA->isglobal.col, R, right)); 894 /* hand back control to the nest vector */ 895 for (j = 0; j < bA->nc; j++) PetscCall(VecDestroy(&R[j])); 896 PetscCall(PetscFree(R)); 897 } 898 899 if (left) { 900 /* allocate L */ 901 PetscCall(PetscMalloc1(bA->nr, &L)); 902 /* Create the left vectors */ 903 for (i = 0; i < bA->nr; i++) { 904 for (j = 0; j < bA->nc; j++) { 905 if (bA->m[i][j]) { 906 PetscCall(MatCreateVecs(bA->m[i][j], NULL, &L[i])); 907 break; 908 } 909 } 910 PetscCheck(j != bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null row."); 911 } 912 913 PetscCall(VecCreateNest(comm, bA->nr, bA->isglobal.row, L, left)); 914 for (i = 0; i < bA->nr; i++) PetscCall(VecDestroy(&L[i])); 915 916 PetscCall(PetscFree(L)); 917 } 918 PetscFunctionReturn(PETSC_SUCCESS); 919 } 920 921 static PetscErrorCode MatView_Nest(Mat A, PetscViewer viewer) 922 { 923 Mat_Nest *bA = (Mat_Nest *)A->data; 924 PetscBool isascii, viewSub = PETSC_FALSE; 925 PetscInt i, j; 926 927 PetscFunctionBegin; 928 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 929 if (isascii) { 930 PetscCall(PetscOptionsGetBool(((PetscObject)A)->options, ((PetscObject)A)->prefix, "-mat_view_nest_sub", &viewSub, NULL)); 931 PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix object:\n")); 932 PetscCall(PetscViewerASCIIPushTab(viewer)); 933 PetscCall(PetscViewerASCIIPrintf(viewer, "type=nest, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", bA->nr, bA->nc)); 934 935 PetscCall(PetscViewerASCIIPrintf(viewer, "MatNest structure:\n")); 936 for (i = 0; i < bA->nr; i++) { 937 for (j = 0; j < bA->nc; j++) { 938 MatType type; 939 char name[256] = "", prefix[256] = ""; 940 PetscInt NR, NC; 941 PetscBool isNest = PETSC_FALSE; 942 943 if (!bA->m[i][j]) { 944 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : NULL\n", i, j)); 945 continue; 946 } 947 PetscCall(MatGetSize(bA->m[i][j], &NR, &NC)); 948 PetscCall(MatGetType(bA->m[i][j], &type)); 949 if (((PetscObject)bA->m[i][j])->name) PetscCall(PetscSNPrintf(name, sizeof(name), "name=\"%s\", ", ((PetscObject)bA->m[i][j])->name)); 950 if (((PetscObject)bA->m[i][j])->prefix) PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "prefix=\"%s\", ", ((PetscObject)bA->m[i][j])->prefix)); 951 PetscCall(PetscObjectTypeCompare((PetscObject)bA->m[i][j], MATNEST, &isNest)); 952 953 PetscCall(PetscViewerASCIIPrintf(viewer, "(%" PetscInt_FMT ",%" PetscInt_FMT ") : %s%stype=%s, rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", i, j, name, prefix, type, NR, NC)); 954 955 if (isNest || viewSub) { 956 PetscCall(PetscViewerASCIIPushTab(viewer)); /* push1 */ 957 PetscCall(MatView(bA->m[i][j], viewer)); 958 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop1 */ 959 } 960 } 961 } 962 PetscCall(PetscViewerASCIIPopTab(viewer)); /* pop0 */ 963 } 964 PetscFunctionReturn(PETSC_SUCCESS); 965 } 966 967 static PetscErrorCode MatZeroEntries_Nest(Mat A) 968 { 969 Mat_Nest *bA = (Mat_Nest *)A->data; 970 PetscInt i, j; 971 972 PetscFunctionBegin; 973 for (i = 0; i < bA->nr; i++) { 974 for (j = 0; j < bA->nc; j++) { 975 if (!bA->m[i][j]) continue; 976 PetscCall(MatZeroEntries(bA->m[i][j])); 977 } 978 } 979 PetscFunctionReturn(PETSC_SUCCESS); 980 } 981 982 static PetscErrorCode MatCopy_Nest(Mat A, Mat B, MatStructure str) 983 { 984 Mat_Nest *bA = (Mat_Nest *)A->data, *bB = (Mat_Nest *)B->data; 985 PetscInt i, j, nr = bA->nr, nc = bA->nc; 986 PetscBool nnzstate = PETSC_FALSE; 987 988 PetscFunctionBegin; 989 PetscCheck(nr == bB->nr && nc == bB->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Cannot copy a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") to a Mat_Nest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bB->nr, bB->nc, nr, nc); 990 for (i = 0; i < nr; i++) { 991 for (j = 0; j < nc; j++) { 992 PetscObjectState subnnzstate = 0; 993 if (bA->m[i][j] && bB->m[i][j]) { 994 PetscCall(MatCopy(bA->m[i][j], bB->m[i][j], str)); 995 } else PetscCheck(!bA->m[i][j] && !bB->m[i][j], PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT, i, j); 996 PetscCall(MatGetNonzeroState(bB->m[i][j], &subnnzstate)); 997 nnzstate = (PetscBool)(nnzstate || bB->nnzstate[i * nc + j] != subnnzstate); 998 bB->nnzstate[i * nc + j] = subnnzstate; 999 } 1000 } 1001 if (nnzstate) B->nonzerostate++; 1002 PetscFunctionReturn(PETSC_SUCCESS); 1003 } 1004 1005 static PetscErrorCode MatAXPY_Nest(Mat Y, PetscScalar a, Mat X, MatStructure str) 1006 { 1007 Mat_Nest *bY = (Mat_Nest *)Y->data, *bX = (Mat_Nest *)X->data; 1008 PetscInt i, j, nr = bY->nr, nc = bY->nc; 1009 PetscBool nnzstate = PETSC_FALSE; 1010 1011 PetscFunctionBegin; 1012 PetscCheck(nr == bX->nr && nc == bX->nc, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Cannot AXPY a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ") with a MatNest of block size (%" PetscInt_FMT ",%" PetscInt_FMT ")", bX->nr, bX->nc, nr, nc); 1013 for (i = 0; i < nr; i++) { 1014 for (j = 0; j < nc; j++) { 1015 PetscObjectState subnnzstate = 0; 1016 if (bY->m[i][j] && bX->m[i][j]) { 1017 PetscCall(MatAXPY(bY->m[i][j], a, bX->m[i][j], str)); 1018 } else if (bX->m[i][j]) { 1019 Mat M; 1020 1021 PetscCheck(str == DIFFERENT_NONZERO_PATTERN || str == UNKNOWN_NONZERO_PATTERN, PetscObjectComm((PetscObject)Y), PETSC_ERR_ARG_INCOMP, "Matrix block does not exist at %" PetscInt_FMT ",%" PetscInt_FMT ". Use DIFFERENT_NONZERO_PATTERN or UNKNOWN_NONZERO_PATTERN", i, j); 1022 PetscCall(MatDuplicate(bX->m[i][j], MAT_COPY_VALUES, &M)); 1023 PetscCall(MatNestSetSubMat(Y, i, j, M)); 1024 PetscCall(MatDestroy(&M)); 1025 } 1026 if (bY->m[i][j]) PetscCall(MatGetNonzeroState(bY->m[i][j], &subnnzstate)); 1027 nnzstate = (PetscBool)(nnzstate || bY->nnzstate[i * nc + j] != subnnzstate); 1028 bY->nnzstate[i * nc + j] = subnnzstate; 1029 } 1030 } 1031 if (nnzstate) Y->nonzerostate++; 1032 PetscFunctionReturn(PETSC_SUCCESS); 1033 } 1034 1035 static PetscErrorCode MatDuplicate_Nest(Mat A, MatDuplicateOption op, Mat *B) 1036 { 1037 Mat_Nest *bA = (Mat_Nest *)A->data; 1038 Mat *b; 1039 PetscInt i, j, nr = bA->nr, nc = bA->nc; 1040 1041 PetscFunctionBegin; 1042 PetscCall(PetscMalloc1(nr * nc, &b)); 1043 for (i = 0; i < nr; i++) { 1044 for (j = 0; j < nc; j++) { 1045 if (bA->m[i][j]) { 1046 PetscCall(MatDuplicate(bA->m[i][j], op, &b[i * nc + j])); 1047 } else { 1048 b[i * nc + j] = NULL; 1049 } 1050 } 1051 } 1052 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)A), nr, bA->isglobal.row, nc, bA->isglobal.col, b, B)); 1053 /* Give the new MatNest exclusive ownership */ 1054 for (i = 0; i < nr * nc; i++) PetscCall(MatDestroy(&b[i])); 1055 PetscCall(PetscFree(b)); 1056 1057 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 1058 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 1059 PetscFunctionReturn(PETSC_SUCCESS); 1060 } 1061 1062 /* nest api */ 1063 static PetscErrorCode MatNestGetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat *mat) 1064 { 1065 Mat_Nest *bA = (Mat_Nest *)A->data; 1066 1067 PetscFunctionBegin; 1068 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1069 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1070 *mat = bA->m[idxm][jdxm]; 1071 PetscFunctionReturn(PETSC_SUCCESS); 1072 } 1073 1074 /*@ 1075 MatNestGetSubMat - Returns a single, sub-matrix from a `MATNEST` 1076 1077 Not Collective 1078 1079 Input Parameters: 1080 + A - `MATNEST` matrix 1081 . idxm - index of the matrix within the nest matrix 1082 - jdxm - index of the matrix within the nest matrix 1083 1084 Output Parameter: 1085 . sub - matrix at index `idxm`, `jdxm` within the nest matrix 1086 1087 Level: developer 1088 1089 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestSetSubMat()`, 1090 `MatNestGetLocalISs()`, `MatNestGetISs()` 1091 @*/ 1092 PetscErrorCode MatNestGetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat *sub) 1093 { 1094 PetscFunctionBegin; 1095 PetscUseMethod(A, "MatNestGetSubMat_C", (Mat, PetscInt, PetscInt, Mat *), (A, idxm, jdxm, sub)); 1096 PetscFunctionReturn(PETSC_SUCCESS); 1097 } 1098 1099 static PetscErrorCode MatNestSetSubMat_Nest(Mat A, PetscInt idxm, PetscInt jdxm, Mat mat) 1100 { 1101 Mat_Nest *bA = (Mat_Nest *)A->data; 1102 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1103 1104 PetscFunctionBegin; 1105 PetscCheck(idxm < bA->nr, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm, bA->nr - 1); 1106 PetscCheck(jdxm < bA->nc, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Col too large: row %" PetscInt_FMT " max %" PetscInt_FMT, jdxm, bA->nc - 1); 1107 PetscCall(MatGetLocalSize(mat, &m, &n)); 1108 PetscCall(MatGetSize(mat, &M, &N)); 1109 PetscCall(ISGetLocalSize(bA->isglobal.row[idxm], &mi)); 1110 PetscCall(ISGetSize(bA->isglobal.row[idxm], &Mi)); 1111 PetscCall(ISGetLocalSize(bA->isglobal.col[jdxm], &ni)); 1112 PetscCall(ISGetSize(bA->isglobal.col[jdxm], &Ni)); 1113 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, Mi, Ni); 1114 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_INCOMP, "Submatrix local dimension (%" PetscInt_FMT ",%" PetscInt_FMT ") incompatible with nest block (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, mi, ni); 1115 1116 /* do not increase object state */ 1117 if (mat == bA->m[idxm][jdxm]) PetscFunctionReturn(PETSC_SUCCESS); 1118 1119 PetscCall(PetscObjectReference((PetscObject)mat)); 1120 PetscCall(MatDestroy(&bA->m[idxm][jdxm])); 1121 bA->m[idxm][jdxm] = mat; 1122 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1123 PetscCall(MatGetNonzeroState(mat, &bA->nnzstate[idxm * bA->nc + jdxm])); 1124 A->nonzerostate++; 1125 PetscFunctionReturn(PETSC_SUCCESS); 1126 } 1127 1128 /*@ 1129 MatNestSetSubMat - Set a single submatrix in the `MATNEST` 1130 1131 Logically Collective 1132 1133 Input Parameters: 1134 + A - `MATNEST` matrix 1135 . idxm - index of the matrix within the nest matrix 1136 . jdxm - index of the matrix within the nest matrix 1137 - sub - matrix at index `idxm`, `jdxm` within the nest matrix 1138 1139 Level: developer 1140 1141 Notes: 1142 The new submatrix must have the same size and communicator as that block of the nest. 1143 1144 This increments the reference count of the submatrix. 1145 1146 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestSetSubMats()`, `MatNestGetSubMats()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1147 `MatNestGetSubMat()`, `MatNestGetISs()`, `MatNestGetSize()` 1148 @*/ 1149 PetscErrorCode MatNestSetSubMat(Mat A, PetscInt idxm, PetscInt jdxm, Mat sub) 1150 { 1151 PetscFunctionBegin; 1152 PetscUseMethod(A, "MatNestSetSubMat_C", (Mat, PetscInt, PetscInt, Mat), (A, idxm, jdxm, sub)); 1153 PetscFunctionReturn(PETSC_SUCCESS); 1154 } 1155 1156 static PetscErrorCode MatNestGetSubMats_Nest(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1157 { 1158 Mat_Nest *bA = (Mat_Nest *)A->data; 1159 1160 PetscFunctionBegin; 1161 if (M) *M = bA->nr; 1162 if (N) *N = bA->nc; 1163 if (mat) *mat = bA->m; 1164 PetscFunctionReturn(PETSC_SUCCESS); 1165 } 1166 1167 /*@C 1168 MatNestGetSubMats - Returns the entire two dimensional array of matrices defining a `MATNEST` matrix. 1169 1170 Not Collective 1171 1172 Input Parameter: 1173 . A - nest matrix 1174 1175 Output Parameters: 1176 + M - number of rows in the nest matrix 1177 . N - number of cols in the nest matrix 1178 - mat - array of matrices 1179 1180 Level: developer 1181 1182 Note: 1183 The user should not free the array `mat`. 1184 1185 Fortran Notes: 1186 This routine has a calling sequence 1187 $ call MatNestGetSubMats(A, M, N, mat, ierr) 1188 where the space allocated for the optional argument `mat` is assumed large enough (if provided). 1189 Matrices in `mat` are returned in row-major order, see `MatCreateNest()` for an example. 1190 1191 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSize()`, `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatCreateNest()`, 1192 `MatNestSetSubMats()`, `MatNestGetISs()`, `MatNestSetSubMat()` 1193 @*/ 1194 PetscErrorCode MatNestGetSubMats(Mat A, PetscInt *M, PetscInt *N, Mat ***mat) 1195 { 1196 PetscFunctionBegin; 1197 PetscUseMethod(A, "MatNestGetSubMats_C", (Mat, PetscInt *, PetscInt *, Mat ***), (A, M, N, mat)); 1198 PetscFunctionReturn(PETSC_SUCCESS); 1199 } 1200 1201 static PetscErrorCode MatNestGetSize_Nest(Mat A, PetscInt *M, PetscInt *N) 1202 { 1203 Mat_Nest *bA = (Mat_Nest *)A->data; 1204 1205 PetscFunctionBegin; 1206 if (M) *M = bA->nr; 1207 if (N) *N = bA->nc; 1208 PetscFunctionReturn(PETSC_SUCCESS); 1209 } 1210 1211 /*@ 1212 MatNestGetSize - Returns the size of the `MATNEST` matrix. 1213 1214 Not Collective 1215 1216 Input Parameter: 1217 . A - `MATNEST` matrix 1218 1219 Output Parameters: 1220 + M - number of rows in the nested mat 1221 - N - number of cols in the nested mat 1222 1223 Level: developer 1224 1225 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestGetLocalISs()`, 1226 `MatNestGetISs()` 1227 @*/ 1228 PetscErrorCode MatNestGetSize(Mat A, PetscInt *M, PetscInt *N) 1229 { 1230 PetscFunctionBegin; 1231 PetscUseMethod(A, "MatNestGetSize_C", (Mat, PetscInt *, PetscInt *), (A, M, N)); 1232 PetscFunctionReturn(PETSC_SUCCESS); 1233 } 1234 1235 static PetscErrorCode MatNestGetISs_Nest(Mat A, IS rows[], IS cols[]) 1236 { 1237 Mat_Nest *vs = (Mat_Nest *)A->data; 1238 PetscInt i; 1239 1240 PetscFunctionBegin; 1241 if (rows) 1242 for (i = 0; i < vs->nr; i++) rows[i] = vs->isglobal.row[i]; 1243 if (cols) 1244 for (i = 0; i < vs->nc; i++) cols[i] = vs->isglobal.col[i]; 1245 PetscFunctionReturn(PETSC_SUCCESS); 1246 } 1247 1248 /*@C 1249 MatNestGetISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1250 1251 Not Collective 1252 1253 Input Parameter: 1254 . A - `MATNEST` matrix 1255 1256 Output Parameters: 1257 + rows - array of row index sets 1258 - cols - array of column index sets 1259 1260 Level: advanced 1261 1262 Note: 1263 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1264 1265 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetLocalISs()`, 1266 `MatCreateNest()`, `MatNestSetSubMats()` 1267 @*/ 1268 PetscErrorCode MatNestGetISs(Mat A, IS rows[], IS cols[]) 1269 { 1270 PetscFunctionBegin; 1271 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1272 PetscUseMethod(A, "MatNestGetISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1273 PetscFunctionReturn(PETSC_SUCCESS); 1274 } 1275 1276 static PetscErrorCode MatNestGetLocalISs_Nest(Mat A, IS rows[], IS cols[]) 1277 { 1278 Mat_Nest *vs = (Mat_Nest *)A->data; 1279 PetscInt i; 1280 1281 PetscFunctionBegin; 1282 if (rows) 1283 for (i = 0; i < vs->nr; i++) rows[i] = vs->islocal.row[i]; 1284 if (cols) 1285 for (i = 0; i < vs->nc; i++) cols[i] = vs->islocal.col[i]; 1286 PetscFunctionReturn(PETSC_SUCCESS); 1287 } 1288 1289 /*@C 1290 MatNestGetLocalISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` 1291 1292 Not Collective 1293 1294 Input Parameter: 1295 . A - `MATNEST` matrix 1296 1297 Output Parameters: 1298 + rows - array of row index sets (or `NULL` to ignore) 1299 - cols - array of column index sets (or `NULL` to ignore) 1300 1301 Level: advanced 1302 1303 Note: 1304 The user must have allocated arrays of the correct size. The reference count is not increased on the returned `IS`s. 1305 1306 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatNestGetSize()`, `MatNestGetISs()`, `MatCreateNest()`, 1307 `MatNestSetSubMats()`, `MatNestSetSubMat()` 1308 @*/ 1309 PetscErrorCode MatNestGetLocalISs(Mat A, IS rows[], IS cols[]) 1310 { 1311 PetscFunctionBegin; 1312 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1313 PetscUseMethod(A, "MatNestGetLocalISs_C", (Mat, IS[], IS[]), (A, rows, cols)); 1314 PetscFunctionReturn(PETSC_SUCCESS); 1315 } 1316 1317 static PetscErrorCode MatNestSetVecType_Nest(Mat A, VecType vtype) 1318 { 1319 PetscBool flg; 1320 1321 PetscFunctionBegin; 1322 PetscCall(PetscStrcmp(vtype, VECNEST, &flg)); 1323 /* In reality, this only distinguishes VECNEST and "other" */ 1324 if (flg) A->ops->getvecs = MatCreateVecs_Nest; 1325 else A->ops->getvecs = (PetscErrorCode(*)(Mat, Vec *, Vec *))0; 1326 PetscFunctionReturn(PETSC_SUCCESS); 1327 } 1328 1329 /*@C 1330 MatNestSetVecType - Sets the type of `Vec` returned by `MatCreateVecs()` 1331 1332 Not Collective 1333 1334 Input Parameters: 1335 + A - `MATNEST` matrix 1336 - vtype - `VecType` to use for creating vectors 1337 1338 Level: developer 1339 1340 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateVecs()`, `MatCreateNest()`, `VecType` 1341 @*/ 1342 PetscErrorCode MatNestSetVecType(Mat A, VecType vtype) 1343 { 1344 PetscFunctionBegin; 1345 PetscTryMethod(A, "MatNestSetVecType_C", (Mat, VecType), (A, vtype)); 1346 PetscFunctionReturn(PETSC_SUCCESS); 1347 } 1348 1349 static PetscErrorCode MatNestSetSubMats_Nest(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1350 { 1351 Mat_Nest *s = (Mat_Nest *)A->data; 1352 PetscInt i, j, m, n, M, N; 1353 PetscBool cong, isstd, sametype = PETSC_FALSE; 1354 VecType vtype, type; 1355 1356 PetscFunctionBegin; 1357 PetscCall(MatReset_Nest(A)); 1358 1359 s->nr = nr; 1360 s->nc = nc; 1361 1362 /* Create space for submatrices */ 1363 PetscCall(PetscMalloc1(nr, &s->m)); 1364 PetscCall(PetscMalloc1(nr * nc, &s->m[0])); 1365 for (i = 0; i < nr; i++) { 1366 s->m[i] = s->m[0] + i * nc; 1367 for (j = 0; j < nc; j++) { 1368 s->m[i][j] = a[i * nc + j]; 1369 if (a[i * nc + j]) PetscCall(PetscObjectReference((PetscObject)a[i * nc + j])); 1370 } 1371 } 1372 PetscCall(MatGetVecType(A, &vtype)); 1373 PetscCall(PetscStrcmp(vtype, VECSTANDARD, &isstd)); 1374 if (isstd) { 1375 /* check if all blocks have the same vectype */ 1376 vtype = NULL; 1377 for (i = 0; i < nr; i++) { 1378 for (j = 0; j < nc; j++) { 1379 if (a[i * nc + j]) { 1380 if (!vtype) { /* first visited block */ 1381 PetscCall(MatGetVecType(a[i * nc + j], &vtype)); 1382 sametype = PETSC_TRUE; 1383 } else if (sametype) { 1384 PetscCall(MatGetVecType(a[i * nc + j], &type)); 1385 PetscCall(PetscStrcmp(vtype, type, &sametype)); 1386 } 1387 } 1388 } 1389 } 1390 if (sametype) { /* propagate vectype */ 1391 PetscCall(MatSetVecType(A, vtype)); 1392 } 1393 } 1394 1395 PetscCall(MatSetUp_NestIS_Private(A, nr, is_row, nc, is_col)); 1396 1397 PetscCall(PetscMalloc1(nr, &s->row_len)); 1398 PetscCall(PetscMalloc1(nc, &s->col_len)); 1399 for (i = 0; i < nr; i++) s->row_len[i] = -1; 1400 for (j = 0; j < nc; j++) s->col_len[j] = -1; 1401 1402 PetscCall(PetscCalloc1(nr * nc, &s->nnzstate)); 1403 for (i = 0; i < nr; i++) { 1404 for (j = 0; j < nc; j++) { 1405 if (s->m[i][j]) PetscCall(MatGetNonzeroState(s->m[i][j], &s->nnzstate[i * nc + j])); 1406 } 1407 } 1408 1409 PetscCall(MatNestGetSizes_Private(A, &m, &n, &M, &N)); 1410 1411 PetscCall(PetscLayoutSetSize(A->rmap, M)); 1412 PetscCall(PetscLayoutSetLocalSize(A->rmap, m)); 1413 PetscCall(PetscLayoutSetSize(A->cmap, N)); 1414 PetscCall(PetscLayoutSetLocalSize(A->cmap, n)); 1415 1416 PetscCall(PetscLayoutSetUp(A->rmap)); 1417 PetscCall(PetscLayoutSetUp(A->cmap)); 1418 1419 /* disable operations that are not supported for non-square matrices, 1420 or matrices for which is_row != is_col */ 1421 PetscCall(MatHasCongruentLayouts(A, &cong)); 1422 if (cong && nr != nc) cong = PETSC_FALSE; 1423 if (cong) { 1424 for (i = 0; cong && i < nr; i++) PetscCall(ISEqualUnsorted(s->isglobal.row[i], s->isglobal.col[i], &cong)); 1425 } 1426 if (!cong) { 1427 A->ops->missingdiagonal = NULL; 1428 A->ops->getdiagonal = NULL; 1429 A->ops->shift = NULL; 1430 A->ops->diagonalset = NULL; 1431 } 1432 1433 PetscCall(PetscCalloc2(nr, &s->left, nc, &s->right)); 1434 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 1435 A->nonzerostate++; 1436 PetscFunctionReturn(PETSC_SUCCESS); 1437 } 1438 1439 /*@ 1440 MatNestSetSubMats - Sets the nested submatrices in a `MATNEST` 1441 1442 Collective 1443 1444 Input Parameters: 1445 + A - `MATNEST` matrix 1446 . nr - number of nested row blocks 1447 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1448 . nc - number of nested column blocks 1449 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1450 - a - array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1451 1452 Level: advanced 1453 1454 Notes: 1455 This always resets any submatrix information previously set 1456 1457 In both C and Fortran, `a` must be a row-major order array containing the matrices. See 1458 `MatCreateNest()` for an example. 1459 1460 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, `MatNestGetSubMats()` 1461 @*/ 1462 PetscErrorCode MatNestSetSubMats(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[]) 1463 { 1464 PetscInt i; 1465 1466 PetscFunctionBegin; 1467 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1468 PetscCheck(nr >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of rows cannot be negative"); 1469 if (nr && is_row) { 1470 PetscAssertPointer(is_row, 3); 1471 for (i = 0; i < nr; i++) PetscValidHeaderSpecific(is_row[i], IS_CLASSID, 3); 1472 } 1473 PetscCheck(nc >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Number of columns cannot be negative"); 1474 if (nc && is_col) { 1475 PetscAssertPointer(is_col, 5); 1476 for (i = 0; i < nc; i++) PetscValidHeaderSpecific(is_col[i], IS_CLASSID, 5); 1477 } 1478 if (nr * nc > 0) PetscAssertPointer(a, 6); 1479 PetscUseMethod(A, "MatNestSetSubMats_C", (Mat, PetscInt, const IS[], PetscInt, const IS[], const Mat[]), (A, nr, is_row, nc, is_col, a)); 1480 PetscFunctionReturn(PETSC_SUCCESS); 1481 } 1482 1483 static PetscErrorCode MatNestCreateAggregateL2G_Private(Mat A, PetscInt n, const IS islocal[], const IS isglobal[], PetscBool colflg, ISLocalToGlobalMapping *ltog) 1484 { 1485 PetscBool flg; 1486 PetscInt i, j, m, mi, *ix; 1487 1488 PetscFunctionBegin; 1489 *ltog = NULL; 1490 for (i = 0, m = 0, flg = PETSC_FALSE; i < n; i++) { 1491 if (islocal[i]) { 1492 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1493 flg = PETSC_TRUE; /* We found a non-trivial entry */ 1494 } else { 1495 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1496 } 1497 m += mi; 1498 } 1499 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 1500 1501 PetscCall(PetscMalloc1(m, &ix)); 1502 for (i = 0, m = 0; i < n; i++) { 1503 ISLocalToGlobalMapping smap = NULL; 1504 Mat sub = NULL; 1505 PetscSF sf; 1506 PetscLayout map; 1507 const PetscInt *ix2; 1508 1509 if (!colflg) { 1510 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1511 } else { 1512 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1513 } 1514 if (sub) { 1515 if (!colflg) { 1516 PetscCall(MatGetLocalToGlobalMapping(sub, &smap, NULL)); 1517 } else { 1518 PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &smap)); 1519 } 1520 } 1521 /* 1522 Now we need to extract the monolithic global indices that correspond to the given split global indices. 1523 In many/most cases, we only want MatGetLocalSubMatrix() to work, in which case we only need to know the size of the local spaces. 1524 */ 1525 PetscCall(ISGetIndices(isglobal[i], &ix2)); 1526 if (islocal[i]) { 1527 PetscInt *ilocal, *iremote; 1528 PetscInt mil, nleaves; 1529 1530 PetscCall(ISGetLocalSize(islocal[i], &mi)); 1531 PetscCheck(smap, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing local to global map"); 1532 for (j = 0; j < mi; j++) ix[m + j] = j; 1533 PetscCall(ISLocalToGlobalMappingApply(smap, mi, ix + m, ix + m)); 1534 1535 /* PetscSFSetGraphLayout does not like negative indices */ 1536 PetscCall(PetscMalloc2(mi, &ilocal, mi, &iremote)); 1537 for (j = 0, nleaves = 0; j < mi; j++) { 1538 if (ix[m + j] < 0) continue; 1539 ilocal[nleaves] = j; 1540 iremote[nleaves] = ix[m + j]; 1541 nleaves++; 1542 } 1543 PetscCall(ISGetLocalSize(isglobal[i], &mil)); 1544 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &sf)); 1545 PetscCall(PetscLayoutCreate(PetscObjectComm((PetscObject)A), &map)); 1546 PetscCall(PetscLayoutSetLocalSize(map, mil)); 1547 PetscCall(PetscLayoutSetUp(map)); 1548 PetscCall(PetscSFSetGraphLayout(sf, map, nleaves, ilocal, PETSC_USE_POINTER, iremote)); 1549 PetscCall(PetscLayoutDestroy(&map)); 1550 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1551 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ix2, ix + m, MPI_REPLACE)); 1552 PetscCall(PetscSFDestroy(&sf)); 1553 PetscCall(PetscFree2(ilocal, iremote)); 1554 } else { 1555 PetscCall(ISGetLocalSize(isglobal[i], &mi)); 1556 for (j = 0; j < mi; j++) ix[m + j] = ix2[i]; 1557 } 1558 PetscCall(ISRestoreIndices(isglobal[i], &ix2)); 1559 m += mi; 1560 } 1561 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, m, ix, PETSC_OWN_POINTER, ltog)); 1562 PetscFunctionReturn(PETSC_SUCCESS); 1563 } 1564 1565 /* If an IS was provided, there is nothing Nest needs to do, otherwise Nest will build a strided IS */ 1566 /* 1567 nprocessors = NP 1568 Nest x^T = ((g_0,g_1,...g_nprocs-1), (h_0,h_1,...h_NP-1)) 1569 proc 0: => (g_0,h_0,) 1570 proc 1: => (g_1,h_1,) 1571 ... 1572 proc nprocs-1: => (g_NP-1,h_NP-1,) 1573 1574 proc 0: proc 1: proc nprocs-1: 1575 is[0] = (0,1,2,...,nlocal(g_0)-1) (0,1,...,nlocal(g_1)-1) (0,1,...,nlocal(g_NP-1)) 1576 1577 proc 0: 1578 is[1] = (nlocal(g_0),nlocal(g_0)+1,...,nlocal(g_0)+nlocal(h_0)-1) 1579 proc 1: 1580 is[1] = (nlocal(g_1),nlocal(g_1)+1,...,nlocal(g_1)+nlocal(h_1)-1) 1581 1582 proc NP-1: 1583 is[1] = (nlocal(g_NP-1),nlocal(g_NP-1)+1,...,nlocal(g_NP-1)+nlocal(h_NP-1)-1) 1584 */ 1585 static PetscErrorCode MatSetUp_NestIS_Private(Mat A, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[]) 1586 { 1587 Mat_Nest *vs = (Mat_Nest *)A->data; 1588 PetscInt i, j, offset, n, nsum, bs; 1589 Mat sub = NULL; 1590 1591 PetscFunctionBegin; 1592 PetscCall(PetscMalloc1(nr, &vs->isglobal.row)); 1593 PetscCall(PetscMalloc1(nc, &vs->isglobal.col)); 1594 if (is_row) { /* valid IS is passed in */ 1595 /* refs on is[] are incremented */ 1596 for (i = 0; i < vs->nr; i++) { 1597 PetscCall(PetscObjectReference((PetscObject)is_row[i])); 1598 1599 vs->isglobal.row[i] = is_row[i]; 1600 } 1601 } else { /* Create the ISs by inspecting sizes of a submatrix in each row */ 1602 nsum = 0; 1603 for (i = 0; i < vs->nr; i++) { /* Add up the local sizes to compute the aggregate offset */ 1604 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1605 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in row %" PetscInt_FMT, i); 1606 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1607 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1608 nsum += n; 1609 } 1610 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1611 offset -= nsum; 1612 for (i = 0; i < vs->nr; i++) { 1613 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1614 PetscCall(MatGetLocalSize(sub, &n, NULL)); 1615 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1616 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.row[i])); 1617 PetscCall(ISSetBlockSize(vs->isglobal.row[i], bs)); 1618 offset += n; 1619 } 1620 } 1621 1622 if (is_col) { /* valid IS is passed in */ 1623 /* refs on is[] are incremented */ 1624 for (j = 0; j < vs->nc; j++) { 1625 PetscCall(PetscObjectReference((PetscObject)is_col[j])); 1626 1627 vs->isglobal.col[j] = is_col[j]; 1628 } 1629 } else { /* Create the ISs by inspecting sizes of a submatrix in each column */ 1630 offset = A->cmap->rstart; 1631 nsum = 0; 1632 for (j = 0; j < vs->nc; j++) { 1633 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1634 PetscCheck(sub, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "No nonzero submatrix in column %" PetscInt_FMT, i); 1635 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1636 PetscCheck(n >= 0, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Sizes have not yet been set for submatrix"); 1637 nsum += n; 1638 } 1639 PetscCallMPI(MPI_Scan(&nsum, &offset, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 1640 offset -= nsum; 1641 for (j = 0; j < vs->nc; j++) { 1642 PetscCall(MatNestFindNonzeroSubMatCol(A, j, &sub)); 1643 PetscCall(MatGetLocalSize(sub, NULL, &n)); 1644 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1645 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)sub), n, offset, 1, &vs->isglobal.col[j])); 1646 PetscCall(ISSetBlockSize(vs->isglobal.col[j], bs)); 1647 offset += n; 1648 } 1649 } 1650 1651 /* Set up the local ISs */ 1652 PetscCall(PetscMalloc1(vs->nr, &vs->islocal.row)); 1653 PetscCall(PetscMalloc1(vs->nc, &vs->islocal.col)); 1654 for (i = 0, offset = 0; i < vs->nr; i++) { 1655 IS isloc; 1656 ISLocalToGlobalMapping rmap = NULL; 1657 PetscInt nlocal, bs; 1658 PetscCall(MatNestFindNonzeroSubMatRow(A, i, &sub)); 1659 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, &rmap, NULL)); 1660 if (rmap) { 1661 PetscCall(MatGetBlockSizes(sub, &bs, NULL)); 1662 PetscCall(ISLocalToGlobalMappingGetSize(rmap, &nlocal)); 1663 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1664 PetscCall(ISSetBlockSize(isloc, bs)); 1665 } else { 1666 nlocal = 0; 1667 isloc = NULL; 1668 } 1669 vs->islocal.row[i] = isloc; 1670 offset += nlocal; 1671 } 1672 for (i = 0, offset = 0; i < vs->nc; i++) { 1673 IS isloc; 1674 ISLocalToGlobalMapping cmap = NULL; 1675 PetscInt nlocal, bs; 1676 PetscCall(MatNestFindNonzeroSubMatCol(A, i, &sub)); 1677 if (sub) PetscCall(MatGetLocalToGlobalMapping(sub, NULL, &cmap)); 1678 if (cmap) { 1679 PetscCall(MatGetBlockSizes(sub, NULL, &bs)); 1680 PetscCall(ISLocalToGlobalMappingGetSize(cmap, &nlocal)); 1681 PetscCall(ISCreateStride(PETSC_COMM_SELF, nlocal, offset, 1, &isloc)); 1682 PetscCall(ISSetBlockSize(isloc, bs)); 1683 } else { 1684 nlocal = 0; 1685 isloc = NULL; 1686 } 1687 vs->islocal.col[i] = isloc; 1688 offset += nlocal; 1689 } 1690 1691 /* Set up the aggregate ISLocalToGlobalMapping */ 1692 { 1693 ISLocalToGlobalMapping rmap, cmap; 1694 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nr, vs->islocal.row, vs->isglobal.row, PETSC_FALSE, &rmap)); 1695 PetscCall(MatNestCreateAggregateL2G_Private(A, vs->nc, vs->islocal.col, vs->isglobal.col, PETSC_TRUE, &cmap)); 1696 if (rmap && cmap) PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap)); 1697 PetscCall(ISLocalToGlobalMappingDestroy(&rmap)); 1698 PetscCall(ISLocalToGlobalMappingDestroy(&cmap)); 1699 } 1700 1701 if (PetscDefined(USE_DEBUG)) { 1702 for (i = 0; i < vs->nr; i++) { 1703 for (j = 0; j < vs->nc; j++) { 1704 PetscInt m, n, M, N, mi, ni, Mi, Ni; 1705 Mat B = vs->m[i][j]; 1706 if (!B) continue; 1707 PetscCall(MatGetSize(B, &M, &N)); 1708 PetscCall(MatGetLocalSize(B, &m, &n)); 1709 PetscCall(ISGetSize(vs->isglobal.row[i], &Mi)); 1710 PetscCall(ISGetSize(vs->isglobal.col[j], &Ni)); 1711 PetscCall(ISGetLocalSize(vs->isglobal.row[i], &mi)); 1712 PetscCall(ISGetLocalSize(vs->isglobal.col[j], &ni)); 1713 PetscCheck(M == Mi && N == Ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Global sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", M, N, i, j, Mi, Ni); 1714 PetscCheck(m == mi && n == ni, PetscObjectComm((PetscObject)sub), PETSC_ERR_ARG_INCOMP, "Local sizes (%" PetscInt_FMT ",%" PetscInt_FMT ") of nested submatrix (%" PetscInt_FMT ",%" PetscInt_FMT ") do not agree with space defined by index sets (%" PetscInt_FMT ",%" PetscInt_FMT ")", m, n, i, j, mi, ni); 1715 } 1716 } 1717 } 1718 1719 /* Set A->assembled if all non-null blocks are currently assembled */ 1720 for (i = 0; i < vs->nr; i++) { 1721 for (j = 0; j < vs->nc; j++) { 1722 if (vs->m[i][j] && !vs->m[i][j]->assembled) PetscFunctionReturn(PETSC_SUCCESS); 1723 } 1724 } 1725 A->assembled = PETSC_TRUE; 1726 PetscFunctionReturn(PETSC_SUCCESS); 1727 } 1728 1729 /*@C 1730 MatCreateNest - Creates a new `MATNEST` matrix containing several nested submatrices, each stored separately 1731 1732 Collective 1733 1734 Input Parameters: 1735 + comm - Communicator for the new `MATNEST` 1736 . nr - number of nested row blocks 1737 . is_row - index sets for each nested row block, or `NULL` to make contiguous 1738 . nc - number of nested column blocks 1739 . is_col - index sets for each nested column block, or `NULL` to make contiguous 1740 - a - array of nr*nc submatrices, empty submatrices can be passed using `NULL` 1741 1742 Output Parameter: 1743 . B - new matrix 1744 1745 Note: 1746 In both C and Fortran, `a` must be a row-major order array holding references to the matrices. 1747 For instance, to represent the matrix 1748 $\begin{bmatrix} A_{11} & A_{12} \\ A_{21} & A_{22}\end{bmatrix}$ 1749 one should use `Mat a[4]={A11,A12,A21,A22}`. 1750 1751 Level: advanced 1752 1753 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `VecCreateNest()`, `DMCreateMatrix()`, `MatNestSetSubMat()`, 1754 `MatNestGetSubMat()`, `MatNestGetLocalISs()`, `MatNestGetSize()`, 1755 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 1756 @*/ 1757 PetscErrorCode MatCreateNest(MPI_Comm comm, PetscInt nr, const IS is_row[], PetscInt nc, const IS is_col[], const Mat a[], Mat *B) 1758 { 1759 Mat A; 1760 1761 PetscFunctionBegin; 1762 *B = NULL; 1763 PetscCall(MatCreate(comm, &A)); 1764 PetscCall(MatSetType(A, MATNEST)); 1765 A->preallocated = PETSC_TRUE; 1766 PetscCall(MatNestSetSubMats(A, nr, is_row, nc, is_col, a)); 1767 *B = A; 1768 PetscFunctionReturn(PETSC_SUCCESS); 1769 } 1770 1771 static PetscErrorCode MatConvert_Nest_SeqAIJ_fast(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 1772 { 1773 Mat_Nest *nest = (Mat_Nest *)A->data; 1774 Mat *trans; 1775 PetscScalar **avv; 1776 PetscScalar *vv; 1777 PetscInt **aii, **ajj; 1778 PetscInt *ii, *jj, *ci; 1779 PetscInt nr, nc, nnz, i, j; 1780 PetscBool done; 1781 1782 PetscFunctionBegin; 1783 PetscCall(MatGetSize(A, &nr, &nc)); 1784 if (reuse == MAT_REUSE_MATRIX) { 1785 PetscInt rnr; 1786 1787 PetscCall(MatGetRowIJ(*newmat, 0, PETSC_FALSE, PETSC_FALSE, &rnr, (const PetscInt **)&ii, (const PetscInt **)&jj, &done)); 1788 PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "MatGetRowIJ"); 1789 PetscCheck(rnr == nr, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of rows"); 1790 PetscCall(MatSeqAIJGetArray(*newmat, &vv)); 1791 } 1792 /* extract CSR for nested SeqAIJ matrices */ 1793 nnz = 0; 1794 PetscCall(PetscCalloc4(nest->nr * nest->nc, &aii, nest->nr * nest->nc, &ajj, nest->nr * nest->nc, &avv, nest->nr * nest->nc, &trans)); 1795 for (i = 0; i < nest->nr; ++i) { 1796 for (j = 0; j < nest->nc; ++j) { 1797 Mat B = nest->m[i][j]; 1798 if (B) { 1799 PetscScalar *naa; 1800 PetscInt *nii, *njj, nnr; 1801 PetscBool istrans; 1802 1803 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 1804 if (istrans) { 1805 Mat Bt; 1806 1807 PetscCall(MatTransposeGetMat(B, &Bt)); 1808 PetscCall(MatTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1809 B = trans[i * nest->nc + j]; 1810 } else { 1811 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 1812 if (istrans) { 1813 Mat Bt; 1814 1815 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 1816 PetscCall(MatHermitianTranspose(Bt, MAT_INITIAL_MATRIX, &trans[i * nest->nc + j])); 1817 B = trans[i * nest->nc + j]; 1818 } 1819 } 1820 PetscCall(MatGetRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&nii, (const PetscInt **)&njj, &done)); 1821 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatGetRowIJ"); 1822 PetscCall(MatSeqAIJGetArray(B, &naa)); 1823 nnz += nii[nnr]; 1824 1825 aii[i * nest->nc + j] = nii; 1826 ajj[i * nest->nc + j] = njj; 1827 avv[i * nest->nc + j] = naa; 1828 } 1829 } 1830 } 1831 if (reuse != MAT_REUSE_MATRIX) { 1832 PetscCall(PetscMalloc1(nr + 1, &ii)); 1833 PetscCall(PetscMalloc1(nnz, &jj)); 1834 PetscCall(PetscMalloc1(nnz, &vv)); 1835 } else { 1836 PetscCheck(nnz == ii[nr], PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "Cannot reuse matrix, wrong number of nonzeros"); 1837 } 1838 1839 /* new row pointer */ 1840 PetscCall(PetscArrayzero(ii, nr + 1)); 1841 for (i = 0; i < nest->nr; ++i) { 1842 PetscInt ncr, rst; 1843 1844 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1845 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1846 for (j = 0; j < nest->nc; ++j) { 1847 if (aii[i * nest->nc + j]) { 1848 PetscInt *nii = aii[i * nest->nc + j]; 1849 PetscInt ir; 1850 1851 for (ir = rst; ir < ncr + rst; ++ir) { 1852 ii[ir + 1] += nii[1] - nii[0]; 1853 nii++; 1854 } 1855 } 1856 } 1857 } 1858 for (i = 0; i < nr; i++) ii[i + 1] += ii[i]; 1859 1860 /* construct CSR for the new matrix */ 1861 PetscCall(PetscCalloc1(nr, &ci)); 1862 for (i = 0; i < nest->nr; ++i) { 1863 PetscInt ncr, rst; 1864 1865 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &rst, NULL)); 1866 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); 1867 for (j = 0; j < nest->nc; ++j) { 1868 if (aii[i * nest->nc + j]) { 1869 PetscScalar *nvv = avv[i * nest->nc + j]; 1870 PetscInt *nii = aii[i * nest->nc + j]; 1871 PetscInt *njj = ajj[i * nest->nc + j]; 1872 PetscInt ir, cst; 1873 1874 PetscCall(ISStrideGetInfo(nest->isglobal.col[j], &cst, NULL)); 1875 for (ir = rst; ir < ncr + rst; ++ir) { 1876 PetscInt ij, rsize = nii[1] - nii[0], ist = ii[ir] + ci[ir]; 1877 1878 for (ij = 0; ij < rsize; ij++) { 1879 jj[ist + ij] = *njj + cst; 1880 vv[ist + ij] = *nvv; 1881 njj++; 1882 nvv++; 1883 } 1884 ci[ir] += rsize; 1885 nii++; 1886 } 1887 } 1888 } 1889 } 1890 PetscCall(PetscFree(ci)); 1891 1892 /* restore info */ 1893 for (i = 0; i < nest->nr; ++i) { 1894 for (j = 0; j < nest->nc; ++j) { 1895 Mat B = nest->m[i][j]; 1896 if (B) { 1897 PetscInt nnr = 0, k = i * nest->nc + j; 1898 1899 B = (trans[k] ? trans[k] : B); 1900 PetscCall(MatRestoreRowIJ(B, 0, PETSC_FALSE, PETSC_FALSE, &nnr, (const PetscInt **)&aii[k], (const PetscInt **)&ajj[k], &done)); 1901 PetscCheck(done, PetscObjectComm((PetscObject)B), PETSC_ERR_PLIB, "MatRestoreRowIJ"); 1902 PetscCall(MatSeqAIJRestoreArray(B, &avv[k])); 1903 PetscCall(MatDestroy(&trans[k])); 1904 } 1905 } 1906 } 1907 PetscCall(PetscFree4(aii, ajj, avv, trans)); 1908 1909 /* finalize newmat */ 1910 if (reuse == MAT_INITIAL_MATRIX) { 1911 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, newmat)); 1912 } else if (reuse == MAT_INPLACE_MATRIX) { 1913 Mat B; 1914 1915 PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), nr, nc, ii, jj, vv, &B)); 1916 PetscCall(MatHeaderReplace(A, &B)); 1917 } 1918 PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY)); 1919 PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY)); 1920 { 1921 Mat_SeqAIJ *a = (Mat_SeqAIJ *)((*newmat)->data); 1922 a->free_a = PETSC_TRUE; 1923 a->free_ij = PETSC_TRUE; 1924 } 1925 PetscFunctionReturn(PETSC_SUCCESS); 1926 } 1927 1928 PETSC_INTERN PetscErrorCode MatAXPY_Dense_Nest(Mat Y, PetscScalar a, Mat X) 1929 { 1930 Mat_Nest *nest = (Mat_Nest *)X->data; 1931 PetscInt i, j, k, rstart; 1932 PetscBool flg; 1933 1934 PetscFunctionBegin; 1935 /* Fill by row */ 1936 for (j = 0; j < nest->nc; ++j) { 1937 /* Using global column indices and ISAllGather() is not scalable. */ 1938 IS bNis; 1939 PetscInt bN; 1940 const PetscInt *bNindices; 1941 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 1942 PetscCall(ISGetSize(bNis, &bN)); 1943 PetscCall(ISGetIndices(bNis, &bNindices)); 1944 for (i = 0; i < nest->nr; ++i) { 1945 Mat B = nest->m[i][j], D = NULL; 1946 PetscInt bm, br; 1947 const PetscInt *bmindices; 1948 if (!B) continue; 1949 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 1950 if (flg) { 1951 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1952 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 1953 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 1954 B = D; 1955 } 1956 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 1957 if (flg) { 1958 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 1959 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 1960 B = D; 1961 } 1962 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 1963 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 1964 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 1965 for (br = 0; br < bm; ++br) { 1966 PetscInt row = bmindices[br], brncols, *cols; 1967 const PetscInt *brcols; 1968 const PetscScalar *brcoldata; 1969 PetscScalar *vals = NULL; 1970 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1971 PetscCall(PetscMalloc1(brncols, &cols)); 1972 for (k = 0; k < brncols; k++) cols[k] = bNindices[brcols[k]]; 1973 /* 1974 Nest blocks are required to be nonoverlapping -- otherwise nest and monolithic index layouts wouldn't match. 1975 Thus, we could use INSERT_VALUES, but I prefer ADD_VALUES. 1976 */ 1977 if (a != 1.0) { 1978 PetscCall(PetscMalloc1(brncols, &vals)); 1979 for (k = 0; k < brncols; k++) vals[k] = a * brcoldata[k]; 1980 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, vals, ADD_VALUES)); 1981 PetscCall(PetscFree(vals)); 1982 } else { 1983 PetscCall(MatSetValues(Y, 1, &row, brncols, cols, brcoldata, ADD_VALUES)); 1984 } 1985 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, &brcoldata)); 1986 PetscCall(PetscFree(cols)); 1987 } 1988 if (D) PetscCall(MatDestroy(&D)); 1989 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 1990 } 1991 PetscCall(ISRestoreIndices(bNis, &bNindices)); 1992 PetscCall(ISDestroy(&bNis)); 1993 } 1994 PetscCall(MatAssemblyBegin(Y, MAT_FINAL_ASSEMBLY)); 1995 PetscCall(MatAssemblyEnd(Y, MAT_FINAL_ASSEMBLY)); 1996 PetscFunctionReturn(PETSC_SUCCESS); 1997 } 1998 1999 static PetscErrorCode MatConvert_Nest_AIJ(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 2000 { 2001 Mat_Nest *nest = (Mat_Nest *)A->data; 2002 PetscInt m, n, M, N, i, j, k, *dnnz, *onnz = NULL, rstart, cstart, cend; 2003 PetscMPIInt size; 2004 Mat C; 2005 2006 PetscFunctionBegin; 2007 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 2008 if (size == 1) { /* look for a special case with SeqAIJ matrices and strided-1, contiguous, blocks */ 2009 PetscInt nf; 2010 PetscBool fast; 2011 2012 PetscCall(PetscStrcmp(newtype, MATAIJ, &fast)); 2013 if (!fast) PetscCall(PetscStrcmp(newtype, MATSEQAIJ, &fast)); 2014 for (i = 0; i < nest->nr && fast; ++i) { 2015 for (j = 0; j < nest->nc && fast; ++j) { 2016 Mat B = nest->m[i][j]; 2017 if (B) { 2018 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATSEQAIJ, &fast)); 2019 if (!fast) { 2020 PetscBool istrans; 2021 2022 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &istrans)); 2023 if (istrans) { 2024 Mat Bt; 2025 2026 PetscCall(MatTransposeGetMat(B, &Bt)); 2027 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2028 } else { 2029 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHERMITIANTRANSPOSEVIRTUAL, &istrans)); 2030 if (istrans) { 2031 Mat Bt; 2032 2033 PetscCall(MatHermitianTransposeGetMat(B, &Bt)); 2034 PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); 2035 } 2036 } 2037 } 2038 } 2039 } 2040 } 2041 for (i = 0, nf = 0; i < nest->nr && fast; ++i) { 2042 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.row[i], ISSTRIDE, &fast)); 2043 if (fast) { 2044 PetscInt f, s; 2045 2046 PetscCall(ISStrideGetInfo(nest->isglobal.row[i], &f, &s)); 2047 if (f != nf || s != 1) { 2048 fast = PETSC_FALSE; 2049 } else { 2050 PetscCall(ISGetSize(nest->isglobal.row[i], &f)); 2051 nf += f; 2052 } 2053 } 2054 } 2055 for (i = 0, nf = 0; i < nest->nc && fast; ++i) { 2056 PetscCall(PetscObjectTypeCompare((PetscObject)nest->isglobal.col[i], ISSTRIDE, &fast)); 2057 if (fast) { 2058 PetscInt f, s; 2059 2060 PetscCall(ISStrideGetInfo(nest->isglobal.col[i], &f, &s)); 2061 if (f != nf || s != 1) { 2062 fast = PETSC_FALSE; 2063 } else { 2064 PetscCall(ISGetSize(nest->isglobal.col[i], &f)); 2065 nf += f; 2066 } 2067 } 2068 } 2069 if (fast) { 2070 PetscCall(MatConvert_Nest_SeqAIJ_fast(A, newtype, reuse, newmat)); 2071 PetscFunctionReturn(PETSC_SUCCESS); 2072 } 2073 } 2074 PetscCall(MatGetSize(A, &M, &N)); 2075 PetscCall(MatGetLocalSize(A, &m, &n)); 2076 PetscCall(MatGetOwnershipRangeColumn(A, &cstart, &cend)); 2077 if (reuse == MAT_REUSE_MATRIX) C = *newmat; 2078 else { 2079 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2080 PetscCall(MatSetType(C, newtype)); 2081 PetscCall(MatSetSizes(C, m, n, M, N)); 2082 } 2083 PetscCall(PetscMalloc1(2 * m, &dnnz)); 2084 if (m) { 2085 onnz = dnnz + m; 2086 for (k = 0; k < m; k++) { 2087 dnnz[k] = 0; 2088 onnz[k] = 0; 2089 } 2090 } 2091 for (j = 0; j < nest->nc; ++j) { 2092 IS bNis; 2093 PetscInt bN; 2094 const PetscInt *bNindices; 2095 PetscBool flg; 2096 /* Using global column indices and ISAllGather() is not scalable. */ 2097 PetscCall(ISAllGather(nest->isglobal.col[j], &bNis)); 2098 PetscCall(ISGetSize(bNis, &bN)); 2099 PetscCall(ISGetIndices(bNis, &bNindices)); 2100 for (i = 0; i < nest->nr; ++i) { 2101 PetscSF bmsf; 2102 PetscSFNode *iremote; 2103 Mat B = nest->m[i][j], D = NULL; 2104 PetscInt bm, *sub_dnnz, *sub_onnz, br; 2105 const PetscInt *bmindices; 2106 if (!B) continue; 2107 PetscCall(ISGetLocalSize(nest->isglobal.row[i], &bm)); 2108 PetscCall(ISGetIndices(nest->isglobal.row[i], &bmindices)); 2109 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &bmsf)); 2110 PetscCall(PetscMalloc1(bm, &iremote)); 2111 PetscCall(PetscMalloc1(bm, &sub_dnnz)); 2112 PetscCall(PetscMalloc1(bm, &sub_onnz)); 2113 for (k = 0; k < bm; ++k) { 2114 sub_dnnz[k] = 0; 2115 sub_onnz[k] = 0; 2116 } 2117 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATTRANSPOSEVIRTUAL, MATHERMITIANTRANSPOSEVIRTUAL, "")); 2118 if (flg) { 2119 PetscTryMethod(B, "MatTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2120 PetscTryMethod(B, "MatHermitianTransposeGetMat_C", (Mat, Mat *), (B, &D)); 2121 PetscCall(MatConvert(B, ((PetscObject)D)->type_name, MAT_INITIAL_MATRIX, &D)); 2122 B = D; 2123 } 2124 PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 2125 if (flg) { 2126 if (D) PetscCall(MatConvert(D, MATBAIJ, MAT_INPLACE_MATRIX, &D)); 2127 else PetscCall(MatConvert(B, MATBAIJ, MAT_INITIAL_MATRIX, &D)); 2128 B = D; 2129 } 2130 /* 2131 Locate the owners for all of the locally-owned global row indices for this row block. 2132 These determine the roots of PetscSF used to communicate preallocation data to row owners. 2133 The roots correspond to the dnnz and onnz entries; thus, there are two roots per row. 2134 */ 2135 PetscCall(MatGetOwnershipRange(B, &rstart, NULL)); 2136 for (br = 0; br < bm; ++br) { 2137 PetscInt row = bmindices[br], brncols, col; 2138 const PetscInt *brcols; 2139 PetscInt rowrel = 0; /* row's relative index on its owner rank */ 2140 PetscMPIInt rowowner = 0; 2141 PetscCall(PetscLayoutFindOwnerIndex(A->rmap, row, &rowowner, &rowrel)); 2142 /* how many roots */ 2143 iremote[br].rank = rowowner; 2144 iremote[br].index = rowrel; /* edge from bmdnnz to dnnz */ 2145 /* get nonzero pattern */ 2146 PetscCall(MatGetRow(B, br + rstart, &brncols, &brcols, NULL)); 2147 for (k = 0; k < brncols; k++) { 2148 col = bNindices[brcols[k]]; 2149 if (col >= A->cmap->range[rowowner] && col < A->cmap->range[rowowner + 1]) { 2150 sub_dnnz[br]++; 2151 } else { 2152 sub_onnz[br]++; 2153 } 2154 } 2155 PetscCall(MatRestoreRow(B, br + rstart, &brncols, &brcols, NULL)); 2156 } 2157 if (D) PetscCall(MatDestroy(&D)); 2158 PetscCall(ISRestoreIndices(nest->isglobal.row[i], &bmindices)); 2159 /* bsf will have to take care of disposing of bedges. */ 2160 PetscCall(PetscSFSetGraph(bmsf, m, bm, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 2161 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2162 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_dnnz, dnnz, MPI_SUM)); 2163 PetscCall(PetscSFReduceBegin(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2164 PetscCall(PetscSFReduceEnd(bmsf, MPIU_INT, sub_onnz, onnz, MPI_SUM)); 2165 PetscCall(PetscFree(sub_dnnz)); 2166 PetscCall(PetscFree(sub_onnz)); 2167 PetscCall(PetscSFDestroy(&bmsf)); 2168 } 2169 PetscCall(ISRestoreIndices(bNis, &bNindices)); 2170 PetscCall(ISDestroy(&bNis)); 2171 } 2172 /* Resize preallocation if overestimated */ 2173 for (i = 0; i < m; i++) { 2174 dnnz[i] = PetscMin(dnnz[i], A->cmap->n); 2175 onnz[i] = PetscMin(onnz[i], A->cmap->N - A->cmap->n); 2176 } 2177 PetscCall(MatSeqAIJSetPreallocation(C, 0, dnnz)); 2178 PetscCall(MatMPIAIJSetPreallocation(C, 0, dnnz, 0, onnz)); 2179 PetscCall(PetscFree(dnnz)); 2180 PetscCall(MatAXPY_Dense_Nest(C, 1.0, A)); 2181 if (reuse == MAT_INPLACE_MATRIX) { 2182 PetscCall(MatHeaderReplace(A, &C)); 2183 } else *newmat = C; 2184 PetscFunctionReturn(PETSC_SUCCESS); 2185 } 2186 2187 static PetscErrorCode MatConvert_Nest_Dense(Mat A, MatType newtype, MatReuse reuse, Mat *newmat) 2188 { 2189 Mat B; 2190 PetscInt m, n, M, N; 2191 2192 PetscFunctionBegin; 2193 PetscCall(MatGetSize(A, &M, &N)); 2194 PetscCall(MatGetLocalSize(A, &m, &n)); 2195 if (reuse == MAT_REUSE_MATRIX) { 2196 B = *newmat; 2197 PetscCall(MatZeroEntries(B)); 2198 } else { 2199 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)A), m, PETSC_DECIDE, M, N, NULL, &B)); 2200 } 2201 PetscCall(MatAXPY_Dense_Nest(B, 1.0, A)); 2202 if (reuse == MAT_INPLACE_MATRIX) { 2203 PetscCall(MatHeaderReplace(A, &B)); 2204 } else if (reuse == MAT_INITIAL_MATRIX) *newmat = B; 2205 PetscFunctionReturn(PETSC_SUCCESS); 2206 } 2207 2208 static PetscErrorCode MatHasOperation_Nest(Mat mat, MatOperation op, PetscBool *has) 2209 { 2210 Mat_Nest *bA = (Mat_Nest *)mat->data; 2211 MatOperation opAdd; 2212 PetscInt i, j, nr = bA->nr, nc = bA->nc; 2213 PetscBool flg; 2214 PetscFunctionBegin; 2215 2216 *has = PETSC_FALSE; 2217 if (op == MATOP_MULT || op == MATOP_MULT_ADD || op == MATOP_MULT_TRANSPOSE || op == MATOP_MULT_TRANSPOSE_ADD) { 2218 opAdd = (op == MATOP_MULT || op == MATOP_MULT_ADD ? MATOP_MULT_ADD : MATOP_MULT_TRANSPOSE_ADD); 2219 for (j = 0; j < nc; j++) { 2220 for (i = 0; i < nr; i++) { 2221 if (!bA->m[i][j]) continue; 2222 PetscCall(MatHasOperation(bA->m[i][j], opAdd, &flg)); 2223 if (!flg) PetscFunctionReturn(PETSC_SUCCESS); 2224 } 2225 } 2226 } 2227 if (((void **)mat->ops)[op]) *has = PETSC_TRUE; 2228 PetscFunctionReturn(PETSC_SUCCESS); 2229 } 2230 2231 /*MC 2232 MATNEST - "nest" - Matrix type consisting of nested submatrices, each stored separately. 2233 2234 Level: intermediate 2235 2236 Notes: 2237 This matrix type permits scalable use of `PCFIELDSPLIT` and avoids the large memory costs of extracting submatrices. 2238 It allows the use of symmetric and block formats for parts of multi-physics simulations. 2239 It is usually used with `DMCOMPOSITE` and `DMCreateMatrix()` 2240 2241 Each of the submatrices lives on the same MPI communicator as the original nest matrix (though they can have zero 2242 rows/columns on some processes.) Thus this is not meant for cases where the submatrices live on far fewer processes 2243 than the nest matrix. 2244 2245 .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatCreate()`, `MatType`, `MatCreateNest()`, `MatNestSetSubMat()`, `MatNestGetSubMat()`, 2246 `VecCreateNest()`, `DMCreateMatrix()`, `DMCOMPOSITE`, `MatNestSetVecType()`, `MatNestGetLocalISs()`, 2247 `MatNestGetISs()`, `MatNestSetSubMats()`, `MatNestGetSubMats()` 2248 M*/ 2249 PETSC_EXTERN PetscErrorCode MatCreate_Nest(Mat A) 2250 { 2251 Mat_Nest *s; 2252 2253 PetscFunctionBegin; 2254 PetscCall(PetscNew(&s)); 2255 A->data = (void *)s; 2256 2257 s->nr = -1; 2258 s->nc = -1; 2259 s->m = NULL; 2260 s->splitassembly = PETSC_FALSE; 2261 2262 PetscCall(PetscMemzero(A->ops, sizeof(*A->ops))); 2263 2264 A->ops->mult = MatMult_Nest; 2265 A->ops->multadd = MatMultAdd_Nest; 2266 A->ops->multtranspose = MatMultTranspose_Nest; 2267 A->ops->multtransposeadd = MatMultTransposeAdd_Nest; 2268 A->ops->transpose = MatTranspose_Nest; 2269 A->ops->multhermitiantranspose = MatMultHermitianTranspose_Nest; 2270 A->ops->multhermitiantransposeadd = MatMultHermitianTransposeAdd_Nest; 2271 A->ops->assemblybegin = MatAssemblyBegin_Nest; 2272 A->ops->assemblyend = MatAssemblyEnd_Nest; 2273 A->ops->zeroentries = MatZeroEntries_Nest; 2274 A->ops->copy = MatCopy_Nest; 2275 A->ops->axpy = MatAXPY_Nest; 2276 A->ops->duplicate = MatDuplicate_Nest; 2277 A->ops->createsubmatrix = MatCreateSubMatrix_Nest; 2278 A->ops->destroy = MatDestroy_Nest; 2279 A->ops->view = MatView_Nest; 2280 A->ops->getvecs = NULL; /* Use VECNEST by calling MatNestSetVecType(A,VECNEST) */ 2281 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_Nest; 2282 A->ops->restorelocalsubmatrix = MatRestoreLocalSubMatrix_Nest; 2283 A->ops->getdiagonal = MatGetDiagonal_Nest; 2284 A->ops->diagonalscale = MatDiagonalScale_Nest; 2285 A->ops->scale = MatScale_Nest; 2286 A->ops->shift = MatShift_Nest; 2287 A->ops->diagonalset = MatDiagonalSet_Nest; 2288 A->ops->setrandom = MatSetRandom_Nest; 2289 A->ops->hasoperation = MatHasOperation_Nest; 2290 A->ops->missingdiagonal = MatMissingDiagonal_Nest; 2291 2292 A->spptr = NULL; 2293 A->assembled = PETSC_FALSE; 2294 2295 /* expose Nest api's */ 2296 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMat_C", MatNestGetSubMat_Nest)); 2297 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMat_C", MatNestSetSubMat_Nest)); 2298 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSubMats_C", MatNestGetSubMats_Nest)); 2299 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetSize_C", MatNestGetSize_Nest)); 2300 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetISs_C", MatNestGetISs_Nest)); 2301 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestGetLocalISs_C", MatNestGetLocalISs_Nest)); 2302 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetVecType_C", MatNestSetVecType_Nest)); 2303 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatNestSetSubMats_C", MatNestSetSubMats_Nest)); 2304 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpiaij_C", MatConvert_Nest_AIJ)); 2305 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqaij_C", MatConvert_Nest_AIJ)); 2306 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_aij_C", MatConvert_Nest_AIJ)); 2307 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_is_C", MatConvert_Nest_IS)); 2308 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_mpidense_C", MatConvert_Nest_Dense)); 2309 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_nest_seqdense_C", MatConvert_Nest_Dense)); 2310 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_seqdense_C", MatProductSetFromOptions_Nest_Dense)); 2311 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_nest_mpidense_C", MatProductSetFromOptions_Nest_Dense)); 2312 2313 PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATNEST)); 2314 PetscFunctionReturn(PETSC_SUCCESS); 2315 } 2316