1 /* 2 Defines the basic matrix operations for the AIJ (compressed row) 3 matrix storage format. 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/ 7 #include <petscblaslapack.h> 8 #include <petscbt.h> 9 #include <petsc/private/kernels/blocktranspose.h> 10 11 /* defines MatSetValues_Seq_Hash(), MatAssemblyEnd_Seq_Hash(), MatSetUp_Seq_Hash() */ 12 #define TYPE AIJ 13 #define TYPE_BS 14 #include "../src/mat/impls/aij/seq/seqhashmatsetvalues.h" 15 #include "../src/mat/impls/aij/seq/seqhashmat.h" 16 #undef TYPE 17 #undef TYPE_BS 18 19 PetscErrorCode MatSeqAIJSetTypeFromOptions(Mat A) 20 { 21 PetscBool flg; 22 char type[256]; 23 24 PetscFunctionBegin; 25 PetscObjectOptionsBegin((PetscObject)A); 26 PetscCall(PetscOptionsFList("-mat_seqaij_type", "Matrix SeqAIJ type", "MatSeqAIJSetType", MatSeqAIJList, "seqaij", type, 256, &flg)); 27 if (flg) PetscCall(MatSeqAIJSetType(A, type)); 28 PetscOptionsEnd(); 29 PetscFunctionReturn(PETSC_SUCCESS); 30 } 31 32 PetscErrorCode MatGetColumnReductions_SeqAIJ(Mat A, PetscInt type, PetscReal *reductions) 33 { 34 PetscInt i, m, n; 35 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 36 37 PetscFunctionBegin; 38 PetscCall(MatGetSize(A, &m, &n)); 39 PetscCall(PetscArrayzero(reductions, n)); 40 if (type == NORM_2) { 41 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscAbsScalar(aij->a[i] * aij->a[i]); 42 } else if (type == NORM_1) { 43 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscAbsScalar(aij->a[i]); 44 } else if (type == NORM_INFINITY) { 45 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] = PetscMax(PetscAbsScalar(aij->a[i]), reductions[aij->j[i]]); 46 } else if (type == REDUCTION_SUM_REALPART || type == REDUCTION_MEAN_REALPART) { 47 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscRealPart(aij->a[i]); 48 } else if (type == REDUCTION_SUM_IMAGINARYPART || type == REDUCTION_MEAN_IMAGINARYPART) { 49 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscImaginaryPart(aij->a[i]); 50 } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Unknown reduction type"); 51 52 if (type == NORM_2) { 53 for (i = 0; i < n; i++) reductions[i] = PetscSqrtReal(reductions[i]); 54 } else if (type == REDUCTION_MEAN_REALPART || type == REDUCTION_MEAN_IMAGINARYPART) { 55 for (i = 0; i < n; i++) reductions[i] /= m; 56 } 57 PetscFunctionReturn(PETSC_SUCCESS); 58 } 59 60 PetscErrorCode MatFindOffBlockDiagonalEntries_SeqAIJ(Mat A, IS *is) 61 { 62 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 63 PetscInt i, m = A->rmap->n, cnt = 0, bs = A->rmap->bs; 64 const PetscInt *jj = a->j, *ii = a->i; 65 PetscInt *rows; 66 67 PetscFunctionBegin; 68 for (i = 0; i < m; i++) { 69 if ((ii[i] != ii[i + 1]) && ((jj[ii[i]] < bs * (i / bs)) || (jj[ii[i + 1] - 1] > bs * ((i + bs) / bs) - 1))) cnt++; 70 } 71 PetscCall(PetscMalloc1(cnt, &rows)); 72 cnt = 0; 73 for (i = 0; i < m; i++) { 74 if ((ii[i] != ii[i + 1]) && ((jj[ii[i]] < bs * (i / bs)) || (jj[ii[i + 1] - 1] > bs * ((i + bs) / bs) - 1))) { 75 rows[cnt] = i; 76 cnt++; 77 } 78 } 79 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, cnt, rows, PETSC_OWN_POINTER, is)); 80 PetscFunctionReturn(PETSC_SUCCESS); 81 } 82 83 PetscErrorCode MatFindZeroDiagonals_SeqAIJ_Private(Mat A, PetscInt *nrows, PetscInt **zrows) 84 { 85 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 86 const MatScalar *aa; 87 PetscInt i, m = A->rmap->n, cnt = 0; 88 const PetscInt *ii = a->i, *jj = a->j, *diag; 89 PetscInt *rows; 90 91 PetscFunctionBegin; 92 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 93 PetscCall(MatMarkDiagonal_SeqAIJ(A)); 94 diag = a->diag; 95 for (i = 0; i < m; i++) { 96 if ((diag[i] >= ii[i + 1]) || (jj[diag[i]] != i) || (aa[diag[i]] == 0.0)) cnt++; 97 } 98 PetscCall(PetscMalloc1(cnt, &rows)); 99 cnt = 0; 100 for (i = 0; i < m; i++) { 101 if ((diag[i] >= ii[i + 1]) || (jj[diag[i]] != i) || (aa[diag[i]] == 0.0)) rows[cnt++] = i; 102 } 103 *nrows = cnt; 104 *zrows = rows; 105 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 106 PetscFunctionReturn(PETSC_SUCCESS); 107 } 108 109 PetscErrorCode MatFindZeroDiagonals_SeqAIJ(Mat A, IS *zrows) 110 { 111 PetscInt nrows, *rows; 112 113 PetscFunctionBegin; 114 *zrows = NULL; 115 PetscCall(MatFindZeroDiagonals_SeqAIJ_Private(A, &nrows, &rows)); 116 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), nrows, rows, PETSC_OWN_POINTER, zrows)); 117 PetscFunctionReturn(PETSC_SUCCESS); 118 } 119 120 PetscErrorCode MatFindNonzeroRows_SeqAIJ(Mat A, IS *keptrows) 121 { 122 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 123 const MatScalar *aa; 124 PetscInt m = A->rmap->n, cnt = 0; 125 const PetscInt *ii; 126 PetscInt n, i, j, *rows; 127 128 PetscFunctionBegin; 129 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 130 *keptrows = NULL; 131 ii = a->i; 132 for (i = 0; i < m; i++) { 133 n = ii[i + 1] - ii[i]; 134 if (!n) { 135 cnt++; 136 goto ok1; 137 } 138 for (j = ii[i]; j < ii[i + 1]; j++) { 139 if (aa[j] != 0.0) goto ok1; 140 } 141 cnt++; 142 ok1:; 143 } 144 if (!cnt) { 145 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 146 PetscFunctionReturn(PETSC_SUCCESS); 147 } 148 PetscCall(PetscMalloc1(A->rmap->n - cnt, &rows)); 149 cnt = 0; 150 for (i = 0; i < m; i++) { 151 n = ii[i + 1] - ii[i]; 152 if (!n) continue; 153 for (j = ii[i]; j < ii[i + 1]; j++) { 154 if (aa[j] != 0.0) { 155 rows[cnt++] = i; 156 break; 157 } 158 } 159 } 160 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 161 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, cnt, rows, PETSC_OWN_POINTER, keptrows)); 162 PetscFunctionReturn(PETSC_SUCCESS); 163 } 164 165 PetscErrorCode MatDiagonalSet_SeqAIJ(Mat Y, Vec D, InsertMode is) 166 { 167 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)Y->data; 168 PetscInt i, m = Y->rmap->n; 169 const PetscInt *diag; 170 MatScalar *aa; 171 const PetscScalar *v; 172 PetscBool missing; 173 174 PetscFunctionBegin; 175 if (Y->assembled) { 176 PetscCall(MatMissingDiagonal_SeqAIJ(Y, &missing, NULL)); 177 if (!missing) { 178 diag = aij->diag; 179 PetscCall(VecGetArrayRead(D, &v)); 180 PetscCall(MatSeqAIJGetArray(Y, &aa)); 181 if (is == INSERT_VALUES) { 182 for (i = 0; i < m; i++) aa[diag[i]] = v[i]; 183 } else { 184 for (i = 0; i < m; i++) aa[diag[i]] += v[i]; 185 } 186 PetscCall(MatSeqAIJRestoreArray(Y, &aa)); 187 PetscCall(VecRestoreArrayRead(D, &v)); 188 PetscFunctionReturn(PETSC_SUCCESS); 189 } 190 PetscCall(MatSeqAIJInvalidateDiagonal(Y)); 191 } 192 PetscCall(MatDiagonalSet_Default(Y, D, is)); 193 PetscFunctionReturn(PETSC_SUCCESS); 194 } 195 196 PetscErrorCode MatGetRowIJ_SeqAIJ(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *m, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done) 197 { 198 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 199 PetscInt i, ishift; 200 201 PetscFunctionBegin; 202 if (m) *m = A->rmap->n; 203 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 204 ishift = 0; 205 if (symmetric && A->structurally_symmetric != PETSC_BOOL3_TRUE) { 206 PetscCall(MatToSymmetricIJ_SeqAIJ(A->rmap->n, a->i, a->j, PETSC_TRUE, ishift, oshift, (PetscInt **)ia, (PetscInt **)ja)); 207 } else if (oshift == 1) { 208 PetscInt *tia; 209 PetscInt nz = a->i[A->rmap->n]; 210 /* malloc space and add 1 to i and j indices */ 211 PetscCall(PetscMalloc1(A->rmap->n + 1, &tia)); 212 for (i = 0; i < A->rmap->n + 1; i++) tia[i] = a->i[i] + 1; 213 *ia = tia; 214 if (ja) { 215 PetscInt *tja; 216 PetscCall(PetscMalloc1(nz + 1, &tja)); 217 for (i = 0; i < nz; i++) tja[i] = a->j[i] + 1; 218 *ja = tja; 219 } 220 } else { 221 *ia = a->i; 222 if (ja) *ja = a->j; 223 } 224 PetscFunctionReturn(PETSC_SUCCESS); 225 } 226 227 PetscErrorCode MatRestoreRowIJ_SeqAIJ(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done) 228 { 229 PetscFunctionBegin; 230 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 231 if ((symmetric && A->structurally_symmetric != PETSC_BOOL3_TRUE) || oshift == 1) { 232 PetscCall(PetscFree(*ia)); 233 if (ja) PetscCall(PetscFree(*ja)); 234 } 235 PetscFunctionReturn(PETSC_SUCCESS); 236 } 237 238 PetscErrorCode MatGetColumnIJ_SeqAIJ(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *nn, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done) 239 { 240 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 241 PetscInt i, *collengths, *cia, *cja, n = A->cmap->n, m = A->rmap->n; 242 PetscInt nz = a->i[m], row, *jj, mr, col; 243 244 PetscFunctionBegin; 245 *nn = n; 246 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 247 if (symmetric) { 248 PetscCall(MatToSymmetricIJ_SeqAIJ(A->rmap->n, a->i, a->j, PETSC_TRUE, 0, oshift, (PetscInt **)ia, (PetscInt **)ja)); 249 } else { 250 PetscCall(PetscCalloc1(n, &collengths)); 251 PetscCall(PetscMalloc1(n + 1, &cia)); 252 PetscCall(PetscMalloc1(nz, &cja)); 253 jj = a->j; 254 for (i = 0; i < nz; i++) collengths[jj[i]]++; 255 cia[0] = oshift; 256 for (i = 0; i < n; i++) cia[i + 1] = cia[i] + collengths[i]; 257 PetscCall(PetscArrayzero(collengths, n)); 258 jj = a->j; 259 for (row = 0; row < m; row++) { 260 mr = a->i[row + 1] - a->i[row]; 261 for (i = 0; i < mr; i++) { 262 col = *jj++; 263 264 cja[cia[col] + collengths[col]++ - oshift] = row + oshift; 265 } 266 } 267 PetscCall(PetscFree(collengths)); 268 *ia = cia; 269 *ja = cja; 270 } 271 PetscFunctionReturn(PETSC_SUCCESS); 272 } 273 274 PetscErrorCode MatRestoreColumnIJ_SeqAIJ(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done) 275 { 276 PetscFunctionBegin; 277 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 278 279 PetscCall(PetscFree(*ia)); 280 PetscCall(PetscFree(*ja)); 281 PetscFunctionReturn(PETSC_SUCCESS); 282 } 283 284 /* 285 MatGetColumnIJ_SeqAIJ_Color() and MatRestoreColumnIJ_SeqAIJ_Color() are customized from 286 MatGetColumnIJ_SeqAIJ() and MatRestoreColumnIJ_SeqAIJ() by adding an output 287 spidx[], index of a->a, to be used in MatTransposeColoringCreate_SeqAIJ() and MatFDColoringCreate_SeqXAIJ() 288 */ 289 PetscErrorCode MatGetColumnIJ_SeqAIJ_Color(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *nn, const PetscInt *ia[], const PetscInt *ja[], PetscInt *spidx[], PetscBool *done) 290 { 291 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 292 PetscInt i, *collengths, *cia, *cja, n = A->cmap->n, m = A->rmap->n; 293 PetscInt nz = a->i[m], row, mr, col, tmp; 294 PetscInt *cspidx; 295 const PetscInt *jj; 296 297 PetscFunctionBegin; 298 *nn = n; 299 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 300 301 PetscCall(PetscCalloc1(n, &collengths)); 302 PetscCall(PetscMalloc1(n + 1, &cia)); 303 PetscCall(PetscMalloc1(nz, &cja)); 304 PetscCall(PetscMalloc1(nz, &cspidx)); 305 jj = a->j; 306 for (i = 0; i < nz; i++) collengths[jj[i]]++; 307 cia[0] = oshift; 308 for (i = 0; i < n; i++) cia[i + 1] = cia[i] + collengths[i]; 309 PetscCall(PetscArrayzero(collengths, n)); 310 jj = a->j; 311 for (row = 0; row < m; row++) { 312 mr = a->i[row + 1] - a->i[row]; 313 for (i = 0; i < mr; i++) { 314 col = *jj++; 315 tmp = cia[col] + collengths[col]++ - oshift; 316 cspidx[tmp] = a->i[row] + i; /* index of a->j */ 317 cja[tmp] = row + oshift; 318 } 319 } 320 PetscCall(PetscFree(collengths)); 321 *ia = cia; 322 *ja = cja; 323 *spidx = cspidx; 324 PetscFunctionReturn(PETSC_SUCCESS); 325 } 326 327 PetscErrorCode MatRestoreColumnIJ_SeqAIJ_Color(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscInt *spidx[], PetscBool *done) 328 { 329 PetscFunctionBegin; 330 PetscCall(MatRestoreColumnIJ_SeqAIJ(A, oshift, symmetric, inodecompressed, n, ia, ja, done)); 331 PetscCall(PetscFree(*spidx)); 332 PetscFunctionReturn(PETSC_SUCCESS); 333 } 334 335 PetscErrorCode MatSetValuesRow_SeqAIJ(Mat A, PetscInt row, const PetscScalar v[]) 336 { 337 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 338 PetscInt *ai = a->i; 339 PetscScalar *aa; 340 341 PetscFunctionBegin; 342 PetscCall(MatSeqAIJGetArray(A, &aa)); 343 PetscCall(PetscArraycpy(aa + ai[row], v, ai[row + 1] - ai[row])); 344 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 345 PetscFunctionReturn(PETSC_SUCCESS); 346 } 347 348 /* 349 MatSeqAIJSetValuesLocalFast - An optimized version of MatSetValuesLocal() for SeqAIJ matrices with several assumptions 350 351 - a single row of values is set with each call 352 - no row or column indices are negative or (in error) larger than the number of rows or columns 353 - the values are always added to the matrix, not set 354 - no new locations are introduced in the nonzero structure of the matrix 355 356 This does NOT assume the global column indices are sorted 357 358 */ 359 360 #include <petsc/private/isimpl.h> 361 PetscErrorCode MatSeqAIJSetValuesLocalFast(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode is) 362 { 363 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 364 PetscInt low, high, t, row, nrow, i, col, l; 365 const PetscInt *rp, *ai = a->i, *ailen = a->ilen, *aj = a->j; 366 PetscInt lastcol = -1; 367 MatScalar *ap, value, *aa; 368 const PetscInt *ridx = A->rmap->mapping->indices, *cidx = A->cmap->mapping->indices; 369 370 PetscFunctionBegin; 371 PetscCall(MatSeqAIJGetArray(A, &aa)); 372 row = ridx[im[0]]; 373 rp = aj + ai[row]; 374 ap = aa + ai[row]; 375 nrow = ailen[row]; 376 low = 0; 377 high = nrow; 378 for (l = 0; l < n; l++) { /* loop over added columns */ 379 col = cidx[in[l]]; 380 value = v[l]; 381 382 if (col <= lastcol) low = 0; 383 else high = nrow; 384 lastcol = col; 385 while (high - low > 5) { 386 t = (low + high) / 2; 387 if (rp[t] > col) high = t; 388 else low = t; 389 } 390 for (i = low; i < high; i++) { 391 if (rp[i] == col) { 392 ap[i] += value; 393 low = i + 1; 394 break; 395 } 396 } 397 } 398 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 399 return PETSC_SUCCESS; 400 } 401 402 PetscErrorCode MatSetValues_SeqAIJ(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode is) 403 { 404 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 405 PetscInt *rp, k, low, high, t, ii, row, nrow, i, col, l, rmax, N; 406 PetscInt *imax = a->imax, *ai = a->i, *ailen = a->ilen; 407 PetscInt *aj = a->j, nonew = a->nonew, lastcol = -1; 408 MatScalar *ap = NULL, value = 0.0, *aa; 409 PetscBool ignorezeroentries = a->ignorezeroentries; 410 PetscBool roworiented = a->roworiented; 411 412 PetscFunctionBegin; 413 PetscCall(MatSeqAIJGetArray(A, &aa)); 414 for (k = 0; k < m; k++) { /* loop over added rows */ 415 row = im[k]; 416 if (row < 0) continue; 417 PetscCheck(row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, row, A->rmap->n - 1); 418 rp = aj + ai[row]; 419 if (!A->structure_only) ap = aa + ai[row]; 420 rmax = imax[row]; 421 nrow = ailen[row]; 422 low = 0; 423 high = nrow; 424 for (l = 0; l < n; l++) { /* loop over added columns */ 425 if (in[l] < 0) continue; 426 PetscCheck(in[l] < A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[l], A->cmap->n - 1); 427 col = in[l]; 428 if (v && !A->structure_only) value = roworiented ? v[l + k * n] : v[k + l * m]; 429 if (!A->structure_only && value == 0.0 && ignorezeroentries && is == ADD_VALUES && row != col) continue; 430 431 if (col <= lastcol) low = 0; 432 else high = nrow; 433 lastcol = col; 434 while (high - low > 5) { 435 t = (low + high) / 2; 436 if (rp[t] > col) high = t; 437 else low = t; 438 } 439 for (i = low; i < high; i++) { 440 if (rp[i] > col) break; 441 if (rp[i] == col) { 442 if (!A->structure_only) { 443 if (is == ADD_VALUES) { 444 ap[i] += value; 445 (void)PetscLogFlops(1.0); 446 } else ap[i] = value; 447 } 448 low = i + 1; 449 goto noinsert; 450 } 451 } 452 if (value == 0.0 && ignorezeroentries && row != col) goto noinsert; 453 if (nonew == 1) goto noinsert; 454 PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at (%" PetscInt_FMT ",%" PetscInt_FMT ") in the matrix", row, col); 455 if (A->structure_only) { 456 MatSeqXAIJReallocateAIJ_structure_only(A, A->rmap->n, 1, nrow, row, col, rmax, ai, aj, rp, imax, nonew, MatScalar); 457 } else { 458 MatSeqXAIJReallocateAIJ(A, A->rmap->n, 1, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar); 459 } 460 N = nrow++ - 1; 461 a->nz++; 462 high++; 463 /* shift up all the later entries in this row */ 464 PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1)); 465 rp[i] = col; 466 if (!A->structure_only) { 467 PetscCall(PetscArraymove(ap + i + 1, ap + i, N - i + 1)); 468 ap[i] = value; 469 } 470 low = i + 1; 471 A->nonzerostate++; 472 noinsert:; 473 } 474 ailen[row] = nrow; 475 } 476 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 477 PetscFunctionReturn(PETSC_SUCCESS); 478 } 479 480 PetscErrorCode MatSetValues_SeqAIJ_SortedFullNoPreallocation(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode is) 481 { 482 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 483 PetscInt *rp, k, row; 484 PetscInt *ai = a->i; 485 PetscInt *aj = a->j; 486 MatScalar *aa, *ap; 487 488 PetscFunctionBegin; 489 PetscCheck(!A->was_assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot call on assembled matrix."); 490 PetscCheck(m * n + a->nz <= a->maxnz, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of entries in matrix will be larger than maximum nonzeros allocated for %" PetscInt_FMT " in MatSeqAIJSetTotalPreallocation()", a->maxnz); 491 492 PetscCall(MatSeqAIJGetArray(A, &aa)); 493 for (k = 0; k < m; k++) { /* loop over added rows */ 494 row = im[k]; 495 rp = aj + ai[row]; 496 ap = aa + ai[row]; 497 498 PetscCall(PetscMemcpy(rp, in, n * sizeof(PetscInt))); 499 if (!A->structure_only) { 500 if (v) { 501 PetscCall(PetscMemcpy(ap, v, n * sizeof(PetscScalar))); 502 v += n; 503 } else { 504 PetscCall(PetscMemzero(ap, n * sizeof(PetscScalar))); 505 } 506 } 507 a->ilen[row] = n; 508 a->imax[row] = n; 509 a->i[row + 1] = a->i[row] + n; 510 a->nz += n; 511 } 512 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 513 PetscFunctionReturn(PETSC_SUCCESS); 514 } 515 516 /*@ 517 MatSeqAIJSetTotalPreallocation - Sets an upper bound on the total number of expected nonzeros in the matrix. 518 519 Input Parameters: 520 + A - the `MATSEQAIJ` matrix 521 - nztotal - bound on the number of nonzeros 522 523 Level: advanced 524 525 Notes: 526 This can be called if you will be provided the matrix row by row (from row zero) with sorted column indices for each row. 527 Simply call `MatSetValues()` after this call to provide the matrix entries in the usual manner. This matrix may be used 528 as always with multiple matrix assemblies. 529 530 .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MAT_SORTED_FULL`, `MatSetValues()`, `MatSeqAIJSetPreallocation()` 531 @*/ 532 PetscErrorCode MatSeqAIJSetTotalPreallocation(Mat A, PetscInt nztotal) 533 { 534 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 535 536 PetscFunctionBegin; 537 PetscCall(PetscLayoutSetUp(A->rmap)); 538 PetscCall(PetscLayoutSetUp(A->cmap)); 539 a->maxnz = nztotal; 540 if (!a->imax) { PetscCall(PetscMalloc1(A->rmap->n, &a->imax)); } 541 if (!a->ilen) { 542 PetscCall(PetscMalloc1(A->rmap->n, &a->ilen)); 543 } else { 544 PetscCall(PetscMemzero(a->ilen, A->rmap->n * sizeof(PetscInt))); 545 } 546 547 /* allocate the matrix space */ 548 if (A->structure_only) { 549 PetscCall(PetscMalloc1(nztotal, &a->j)); 550 PetscCall(PetscMalloc1(A->rmap->n + 1, &a->i)); 551 } else { 552 PetscCall(PetscMalloc3(nztotal, &a->a, nztotal, &a->j, A->rmap->n + 1, &a->i)); 553 } 554 a->i[0] = 0; 555 if (A->structure_only) { 556 a->singlemalloc = PETSC_FALSE; 557 a->free_a = PETSC_FALSE; 558 } else { 559 a->singlemalloc = PETSC_TRUE; 560 a->free_a = PETSC_TRUE; 561 } 562 a->free_ij = PETSC_TRUE; 563 A->ops->setvalues = MatSetValues_SeqAIJ_SortedFullNoPreallocation; 564 A->preallocated = PETSC_TRUE; 565 PetscFunctionReturn(PETSC_SUCCESS); 566 } 567 568 PetscErrorCode MatSetValues_SeqAIJ_SortedFull(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode is) 569 { 570 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 571 PetscInt *rp, k, row; 572 PetscInt *ai = a->i, *ailen = a->ilen; 573 PetscInt *aj = a->j; 574 MatScalar *aa, *ap; 575 576 PetscFunctionBegin; 577 PetscCall(MatSeqAIJGetArray(A, &aa)); 578 for (k = 0; k < m; k++) { /* loop over added rows */ 579 row = im[k]; 580 PetscCheck(n <= a->imax[row], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Preallocation for row %" PetscInt_FMT " does not match number of columns provided", n); 581 rp = aj + ai[row]; 582 ap = aa + ai[row]; 583 if (!A->was_assembled) PetscCall(PetscMemcpy(rp, in, n * sizeof(PetscInt))); 584 if (!A->structure_only) { 585 if (v) { 586 PetscCall(PetscMemcpy(ap, v, n * sizeof(PetscScalar))); 587 v += n; 588 } else { 589 PetscCall(PetscMemzero(ap, n * sizeof(PetscScalar))); 590 } 591 } 592 ailen[row] = n; 593 a->nz += n; 594 } 595 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 596 PetscFunctionReturn(PETSC_SUCCESS); 597 } 598 599 PetscErrorCode MatGetValues_SeqAIJ(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], PetscScalar v[]) 600 { 601 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 602 PetscInt *rp, k, low, high, t, row, nrow, i, col, l, *aj = a->j; 603 PetscInt *ai = a->i, *ailen = a->ilen; 604 const MatScalar *ap, *aa; 605 606 PetscFunctionBegin; 607 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 608 for (k = 0; k < m; k++) { /* loop over rows */ 609 row = im[k]; 610 if (row < 0) { 611 v += n; 612 continue; 613 } /* negative row */ 614 PetscCheck(row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, row, A->rmap->n - 1); 615 rp = aj + ai[row]; 616 ap = aa + ai[row]; 617 nrow = ailen[row]; 618 for (l = 0; l < n; l++) { /* loop over columns */ 619 if (in[l] < 0) { 620 v++; 621 continue; 622 } /* negative column */ 623 PetscCheck(in[l] < A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[l], A->cmap->n - 1); 624 col = in[l]; 625 high = nrow; 626 low = 0; /* assume unsorted */ 627 while (high - low > 5) { 628 t = (low + high) / 2; 629 if (rp[t] > col) high = t; 630 else low = t; 631 } 632 for (i = low; i < high; i++) { 633 if (rp[i] > col) break; 634 if (rp[i] == col) { 635 *v++ = ap[i]; 636 goto finished; 637 } 638 } 639 *v++ = 0.0; 640 finished:; 641 } 642 } 643 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 644 PetscFunctionReturn(PETSC_SUCCESS); 645 } 646 647 PetscErrorCode MatView_SeqAIJ_Binary(Mat mat, PetscViewer viewer) 648 { 649 Mat_SeqAIJ *A = (Mat_SeqAIJ *)mat->data; 650 const PetscScalar *av; 651 PetscInt header[4], M, N, m, nz, i; 652 PetscInt *rowlens; 653 654 PetscFunctionBegin; 655 PetscCall(PetscViewerSetUp(viewer)); 656 657 M = mat->rmap->N; 658 N = mat->cmap->N; 659 m = mat->rmap->n; 660 nz = A->nz; 661 662 /* write matrix header */ 663 header[0] = MAT_FILE_CLASSID; 664 header[1] = M; 665 header[2] = N; 666 header[3] = nz; 667 PetscCall(PetscViewerBinaryWrite(viewer, header, 4, PETSC_INT)); 668 669 /* fill in and store row lengths */ 670 PetscCall(PetscMalloc1(m, &rowlens)); 671 for (i = 0; i < m; i++) rowlens[i] = A->i[i + 1] - A->i[i]; 672 PetscCall(PetscViewerBinaryWrite(viewer, rowlens, m, PETSC_INT)); 673 PetscCall(PetscFree(rowlens)); 674 /* store column indices */ 675 PetscCall(PetscViewerBinaryWrite(viewer, A->j, nz, PETSC_INT)); 676 /* store nonzero values */ 677 PetscCall(MatSeqAIJGetArrayRead(mat, &av)); 678 PetscCall(PetscViewerBinaryWrite(viewer, av, nz, PETSC_SCALAR)); 679 PetscCall(MatSeqAIJRestoreArrayRead(mat, &av)); 680 681 /* write block size option to the viewer's .info file */ 682 PetscCall(MatView_Binary_BlockSizes(mat, viewer)); 683 PetscFunctionReturn(PETSC_SUCCESS); 684 } 685 686 static PetscErrorCode MatView_SeqAIJ_ASCII_structonly(Mat A, PetscViewer viewer) 687 { 688 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 689 PetscInt i, k, m = A->rmap->N; 690 691 PetscFunctionBegin; 692 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 693 for (i = 0; i < m; i++) { 694 PetscCall(PetscViewerASCIIPrintf(viewer, "row %" PetscInt_FMT ":", i)); 695 for (k = a->i[i]; k < a->i[i + 1]; k++) PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ") ", a->j[k])); 696 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 697 } 698 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 699 PetscFunctionReturn(PETSC_SUCCESS); 700 } 701 702 extern PetscErrorCode MatSeqAIJFactorInfo_Matlab(Mat, PetscViewer); 703 704 PetscErrorCode MatView_SeqAIJ_ASCII(Mat A, PetscViewer viewer) 705 { 706 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 707 const PetscScalar *av; 708 PetscInt i, j, m = A->rmap->n; 709 const char *name; 710 PetscViewerFormat format; 711 712 PetscFunctionBegin; 713 if (A->structure_only) { 714 PetscCall(MatView_SeqAIJ_ASCII_structonly(A, viewer)); 715 PetscFunctionReturn(PETSC_SUCCESS); 716 } 717 718 PetscCall(PetscViewerGetFormat(viewer, &format)); 719 if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS); 720 721 /* trigger copy to CPU if needed */ 722 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 723 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 724 if (format == PETSC_VIEWER_ASCII_MATLAB) { 725 PetscInt nofinalvalue = 0; 726 if (m && ((a->i[m] == a->i[m - 1]) || (a->j[a->nz - 1] != A->cmap->n - 1))) { 727 /* Need a dummy value to ensure the dimension of the matrix. */ 728 nofinalvalue = 1; 729 } 730 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 731 PetscCall(PetscViewerASCIIPrintf(viewer, "%% Size = %" PetscInt_FMT " %" PetscInt_FMT " \n", m, A->cmap->n)); 732 PetscCall(PetscViewerASCIIPrintf(viewer, "%% Nonzeros = %" PetscInt_FMT " \n", a->nz)); 733 #if defined(PETSC_USE_COMPLEX) 734 PetscCall(PetscViewerASCIIPrintf(viewer, "zzz = zeros(%" PetscInt_FMT ",4);\n", a->nz + nofinalvalue)); 735 #else 736 PetscCall(PetscViewerASCIIPrintf(viewer, "zzz = zeros(%" PetscInt_FMT ",3);\n", a->nz + nofinalvalue)); 737 #endif 738 PetscCall(PetscViewerASCIIPrintf(viewer, "zzz = [\n")); 739 740 for (i = 0; i < m; i++) { 741 for (j = a->i[i]; j < a->i[i + 1]; j++) { 742 #if defined(PETSC_USE_COMPLEX) 743 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %18.16e %18.16e\n", i + 1, a->j[j] + 1, (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 744 #else 745 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %18.16e\n", i + 1, a->j[j] + 1, (double)a->a[j])); 746 #endif 747 } 748 } 749 if (nofinalvalue) { 750 #if defined(PETSC_USE_COMPLEX) 751 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %18.16e %18.16e\n", m, A->cmap->n, 0., 0.)); 752 #else 753 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %18.16e\n", m, A->cmap->n, 0.0)); 754 #endif 755 } 756 PetscCall(PetscObjectGetName((PetscObject)A, &name)); 757 PetscCall(PetscViewerASCIIPrintf(viewer, "];\n %s = spconvert(zzz);\n", name)); 758 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 759 } else if (format == PETSC_VIEWER_ASCII_COMMON) { 760 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 761 for (i = 0; i < m; i++) { 762 PetscCall(PetscViewerASCIIPrintf(viewer, "row %" PetscInt_FMT ":", i)); 763 for (j = a->i[i]; j < a->i[i + 1]; j++) { 764 #if defined(PETSC_USE_COMPLEX) 765 if (PetscImaginaryPart(a->a[j]) > 0.0 && PetscRealPart(a->a[j]) != 0.0) { 766 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 767 } else if (PetscImaginaryPart(a->a[j]) < 0.0 && PetscRealPart(a->a[j]) != 0.0) { 768 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)-PetscImaginaryPart(a->a[j]))); 769 } else if (PetscRealPart(a->a[j]) != 0.0) { 770 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(a->a[j]))); 771 } 772 #else 773 if (a->a[j] != 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)a->a[j])); 774 #endif 775 } 776 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 777 } 778 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 779 } else if (format == PETSC_VIEWER_ASCII_SYMMODU) { 780 PetscInt nzd = 0, fshift = 1, *sptr; 781 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 782 PetscCall(PetscMalloc1(m + 1, &sptr)); 783 for (i = 0; i < m; i++) { 784 sptr[i] = nzd + 1; 785 for (j = a->i[i]; j < a->i[i + 1]; j++) { 786 if (a->j[j] >= i) { 787 #if defined(PETSC_USE_COMPLEX) 788 if (PetscImaginaryPart(a->a[j]) != 0.0 || PetscRealPart(a->a[j]) != 0.0) nzd++; 789 #else 790 if (a->a[j] != 0.0) nzd++; 791 #endif 792 } 793 } 794 } 795 sptr[m] = nzd + 1; 796 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT "\n\n", m, nzd)); 797 for (i = 0; i < m + 1; i += 6) { 798 if (i + 4 < m) { 799 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1], sptr[i + 2], sptr[i + 3], sptr[i + 4], sptr[i + 5])); 800 } else if (i + 3 < m) { 801 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1], sptr[i + 2], sptr[i + 3], sptr[i + 4])); 802 } else if (i + 2 < m) { 803 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1], sptr[i + 2], sptr[i + 3])); 804 } else if (i + 1 < m) { 805 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1], sptr[i + 2])); 806 } else if (i < m) { 807 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1])); 808 } else { 809 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT "\n", sptr[i])); 810 } 811 } 812 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 813 PetscCall(PetscFree(sptr)); 814 for (i = 0; i < m; i++) { 815 for (j = a->i[i]; j < a->i[i + 1]; j++) { 816 if (a->j[j] >= i) PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " ", a->j[j] + fshift)); 817 } 818 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 819 } 820 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 821 for (i = 0; i < m; i++) { 822 for (j = a->i[i]; j < a->i[i + 1]; j++) { 823 if (a->j[j] >= i) { 824 #if defined(PETSC_USE_COMPLEX) 825 if (PetscImaginaryPart(a->a[j]) != 0.0 || PetscRealPart(a->a[j]) != 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " %18.16e %18.16e ", (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 826 #else 827 if (a->a[j] != 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " %18.16e ", (double)a->a[j])); 828 #endif 829 } 830 } 831 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 832 } 833 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 834 } else if (format == PETSC_VIEWER_ASCII_DENSE) { 835 PetscInt cnt = 0, jcnt; 836 PetscScalar value; 837 #if defined(PETSC_USE_COMPLEX) 838 PetscBool realonly = PETSC_TRUE; 839 840 for (i = 0; i < a->i[m]; i++) { 841 if (PetscImaginaryPart(a->a[i]) != 0.0) { 842 realonly = PETSC_FALSE; 843 break; 844 } 845 } 846 #endif 847 848 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 849 for (i = 0; i < m; i++) { 850 jcnt = 0; 851 for (j = 0; j < A->cmap->n; j++) { 852 if (jcnt < a->i[i + 1] - a->i[i] && j == a->j[cnt]) { 853 value = a->a[cnt++]; 854 jcnt++; 855 } else { 856 value = 0.0; 857 } 858 #if defined(PETSC_USE_COMPLEX) 859 if (realonly) { 860 PetscCall(PetscViewerASCIIPrintf(viewer, " %7.5e ", (double)PetscRealPart(value))); 861 } else { 862 PetscCall(PetscViewerASCIIPrintf(viewer, " %7.5e+%7.5e i ", (double)PetscRealPart(value), (double)PetscImaginaryPart(value))); 863 } 864 #else 865 PetscCall(PetscViewerASCIIPrintf(viewer, " %7.5e ", (double)value)); 866 #endif 867 } 868 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 869 } 870 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 871 } else if (format == PETSC_VIEWER_ASCII_MATRIXMARKET) { 872 PetscInt fshift = 1; 873 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 874 #if defined(PETSC_USE_COMPLEX) 875 PetscCall(PetscViewerASCIIPrintf(viewer, "%%%%MatrixMarket matrix coordinate complex general\n")); 876 #else 877 PetscCall(PetscViewerASCIIPrintf(viewer, "%%%%MatrixMarket matrix coordinate real general\n")); 878 #endif 879 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, A->cmap->n, a->nz)); 880 for (i = 0; i < m; i++) { 881 for (j = a->i[i]; j < a->i[i + 1]; j++) { 882 #if defined(PETSC_USE_COMPLEX) 883 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %g %g\n", i + fshift, a->j[j] + fshift, (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 884 #else 885 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %g\n", i + fshift, a->j[j] + fshift, (double)a->a[j])); 886 #endif 887 } 888 } 889 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 890 } else { 891 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 892 if (A->factortype) { 893 for (i = 0; i < m; i++) { 894 PetscCall(PetscViewerASCIIPrintf(viewer, "row %" PetscInt_FMT ":", i)); 895 /* L part */ 896 for (j = a->i[i]; j < a->i[i + 1]; j++) { 897 #if defined(PETSC_USE_COMPLEX) 898 if (PetscImaginaryPart(a->a[j]) > 0.0) { 899 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 900 } else if (PetscImaginaryPart(a->a[j]) < 0.0) { 901 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)(-PetscImaginaryPart(a->a[j])))); 902 } else { 903 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(a->a[j]))); 904 } 905 #else 906 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)a->a[j])); 907 #endif 908 } 909 /* diagonal */ 910 j = a->diag[i]; 911 #if defined(PETSC_USE_COMPLEX) 912 if (PetscImaginaryPart(a->a[j]) > 0.0) { 913 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(1.0 / a->a[j]), (double)PetscImaginaryPart(1.0 / a->a[j]))); 914 } else if (PetscImaginaryPart(a->a[j]) < 0.0) { 915 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(1.0 / a->a[j]), (double)(-PetscImaginaryPart(1.0 / a->a[j])))); 916 } else { 917 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(1.0 / a->a[j]))); 918 } 919 #else 920 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)(1.0 / a->a[j]))); 921 #endif 922 923 /* U part */ 924 for (j = a->diag[i + 1] + 1; j < a->diag[i]; j++) { 925 #if defined(PETSC_USE_COMPLEX) 926 if (PetscImaginaryPart(a->a[j]) > 0.0) { 927 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 928 } else if (PetscImaginaryPart(a->a[j]) < 0.0) { 929 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)(-PetscImaginaryPart(a->a[j])))); 930 } else { 931 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(a->a[j]))); 932 } 933 #else 934 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)a->a[j])); 935 #endif 936 } 937 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 938 } 939 } else { 940 for (i = 0; i < m; i++) { 941 PetscCall(PetscViewerASCIIPrintf(viewer, "row %" PetscInt_FMT ":", i)); 942 for (j = a->i[i]; j < a->i[i + 1]; j++) { 943 #if defined(PETSC_USE_COMPLEX) 944 if (PetscImaginaryPart(a->a[j]) > 0.0) { 945 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 946 } else if (PetscImaginaryPart(a->a[j]) < 0.0) { 947 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)-PetscImaginaryPart(a->a[j]))); 948 } else { 949 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(a->a[j]))); 950 } 951 #else 952 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)a->a[j])); 953 #endif 954 } 955 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 956 } 957 } 958 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 959 } 960 PetscCall(PetscViewerFlush(viewer)); 961 PetscFunctionReturn(PETSC_SUCCESS); 962 } 963 964 #include <petscdraw.h> 965 PetscErrorCode MatView_SeqAIJ_Draw_Zoom(PetscDraw draw, void *Aa) 966 { 967 Mat A = (Mat)Aa; 968 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 969 PetscInt i, j, m = A->rmap->n; 970 int color; 971 PetscReal xl, yl, xr, yr, x_l, x_r, y_l, y_r; 972 PetscViewer viewer; 973 PetscViewerFormat format; 974 const PetscScalar *aa; 975 976 PetscFunctionBegin; 977 PetscCall(PetscObjectQuery((PetscObject)A, "Zoomviewer", (PetscObject *)&viewer)); 978 PetscCall(PetscViewerGetFormat(viewer, &format)); 979 PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr)); 980 981 /* loop over matrix elements drawing boxes */ 982 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 983 if (format != PETSC_VIEWER_DRAW_CONTOUR) { 984 PetscDrawCollectiveBegin(draw); 985 /* Blue for negative, Cyan for zero and Red for positive */ 986 color = PETSC_DRAW_BLUE; 987 for (i = 0; i < m; i++) { 988 y_l = m - i - 1.0; 989 y_r = y_l + 1.0; 990 for (j = a->i[i]; j < a->i[i + 1]; j++) { 991 x_l = a->j[j]; 992 x_r = x_l + 1.0; 993 if (PetscRealPart(aa[j]) >= 0.) continue; 994 PetscCall(PetscDrawRectangle(draw, x_l, y_l, x_r, y_r, color, color, color, color)); 995 } 996 } 997 color = PETSC_DRAW_CYAN; 998 for (i = 0; i < m; i++) { 999 y_l = m - i - 1.0; 1000 y_r = y_l + 1.0; 1001 for (j = a->i[i]; j < a->i[i + 1]; j++) { 1002 x_l = a->j[j]; 1003 x_r = x_l + 1.0; 1004 if (aa[j] != 0.) continue; 1005 PetscCall(PetscDrawRectangle(draw, x_l, y_l, x_r, y_r, color, color, color, color)); 1006 } 1007 } 1008 color = PETSC_DRAW_RED; 1009 for (i = 0; i < m; i++) { 1010 y_l = m - i - 1.0; 1011 y_r = y_l + 1.0; 1012 for (j = a->i[i]; j < a->i[i + 1]; j++) { 1013 x_l = a->j[j]; 1014 x_r = x_l + 1.0; 1015 if (PetscRealPart(aa[j]) <= 0.) continue; 1016 PetscCall(PetscDrawRectangle(draw, x_l, y_l, x_r, y_r, color, color, color, color)); 1017 } 1018 } 1019 PetscDrawCollectiveEnd(draw); 1020 } else { 1021 /* use contour shading to indicate magnitude of values */ 1022 /* first determine max of all nonzero values */ 1023 PetscReal minv = 0.0, maxv = 0.0; 1024 PetscInt nz = a->nz, count = 0; 1025 PetscDraw popup; 1026 1027 for (i = 0; i < nz; i++) { 1028 if (PetscAbsScalar(aa[i]) > maxv) maxv = PetscAbsScalar(aa[i]); 1029 } 1030 if (minv >= maxv) maxv = minv + PETSC_SMALL; 1031 PetscCall(PetscDrawGetPopup(draw, &popup)); 1032 PetscCall(PetscDrawScalePopup(popup, minv, maxv)); 1033 1034 PetscDrawCollectiveBegin(draw); 1035 for (i = 0; i < m; i++) { 1036 y_l = m - i - 1.0; 1037 y_r = y_l + 1.0; 1038 for (j = a->i[i]; j < a->i[i + 1]; j++) { 1039 x_l = a->j[j]; 1040 x_r = x_l + 1.0; 1041 color = PetscDrawRealToColor(PetscAbsScalar(aa[count]), minv, maxv); 1042 PetscCall(PetscDrawRectangle(draw, x_l, y_l, x_r, y_r, color, color, color, color)); 1043 count++; 1044 } 1045 } 1046 PetscDrawCollectiveEnd(draw); 1047 } 1048 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1049 PetscFunctionReturn(PETSC_SUCCESS); 1050 } 1051 1052 #include <petscdraw.h> 1053 PetscErrorCode MatView_SeqAIJ_Draw(Mat A, PetscViewer viewer) 1054 { 1055 PetscDraw draw; 1056 PetscReal xr, yr, xl, yl, h, w; 1057 PetscBool isnull; 1058 1059 PetscFunctionBegin; 1060 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 1061 PetscCall(PetscDrawIsNull(draw, &isnull)); 1062 if (isnull) PetscFunctionReturn(PETSC_SUCCESS); 1063 1064 xr = A->cmap->n; 1065 yr = A->rmap->n; 1066 h = yr / 10.0; 1067 w = xr / 10.0; 1068 xr += w; 1069 yr += h; 1070 xl = -w; 1071 yl = -h; 1072 PetscCall(PetscDrawSetCoordinates(draw, xl, yl, xr, yr)); 1073 PetscCall(PetscObjectCompose((PetscObject)A, "Zoomviewer", (PetscObject)viewer)); 1074 PetscCall(PetscDrawZoom(draw, MatView_SeqAIJ_Draw_Zoom, A)); 1075 PetscCall(PetscObjectCompose((PetscObject)A, "Zoomviewer", NULL)); 1076 PetscCall(PetscDrawSave(draw)); 1077 PetscFunctionReturn(PETSC_SUCCESS); 1078 } 1079 1080 PetscErrorCode MatView_SeqAIJ(Mat A, PetscViewer viewer) 1081 { 1082 PetscBool iascii, isbinary, isdraw; 1083 1084 PetscFunctionBegin; 1085 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1086 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1087 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 1088 if (iascii) PetscCall(MatView_SeqAIJ_ASCII(A, viewer)); 1089 else if (isbinary) PetscCall(MatView_SeqAIJ_Binary(A, viewer)); 1090 else if (isdraw) PetscCall(MatView_SeqAIJ_Draw(A, viewer)); 1091 PetscCall(MatView_SeqAIJ_Inode(A, viewer)); 1092 PetscFunctionReturn(PETSC_SUCCESS); 1093 } 1094 1095 PetscErrorCode MatAssemblyEnd_SeqAIJ(Mat A, MatAssemblyType mode) 1096 { 1097 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1098 PetscInt fshift = 0, i, *ai = a->i, *aj = a->j, *imax = a->imax; 1099 PetscInt m = A->rmap->n, *ip, N, *ailen = a->ilen, rmax = 0; 1100 MatScalar *aa = a->a, *ap; 1101 PetscReal ratio = 0.6; 1102 1103 PetscFunctionBegin; 1104 if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(PETSC_SUCCESS); 1105 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 1106 if (A->was_assembled && A->ass_nonzerostate == A->nonzerostate) { 1107 /* we need to respect users asking to use or not the inodes routine in between matrix assemblies */ 1108 PetscCall(MatAssemblyEnd_SeqAIJ_Inode(A, mode)); 1109 PetscFunctionReturn(PETSC_SUCCESS); 1110 } 1111 1112 if (m) rmax = ailen[0]; /* determine row with most nonzeros */ 1113 for (i = 1; i < m; i++) { 1114 /* move each row back by the amount of empty slots (fshift) before it*/ 1115 fshift += imax[i - 1] - ailen[i - 1]; 1116 rmax = PetscMax(rmax, ailen[i]); 1117 if (fshift) { 1118 ip = aj + ai[i]; 1119 ap = aa + ai[i]; 1120 N = ailen[i]; 1121 PetscCall(PetscArraymove(ip - fshift, ip, N)); 1122 if (!A->structure_only) PetscCall(PetscArraymove(ap - fshift, ap, N)); 1123 } 1124 ai[i] = ai[i - 1] + ailen[i - 1]; 1125 } 1126 if (m) { 1127 fshift += imax[m - 1] - ailen[m - 1]; 1128 ai[m] = ai[m - 1] + ailen[m - 1]; 1129 } 1130 /* reset ilen and imax for each row */ 1131 a->nonzerorowcnt = 0; 1132 if (A->structure_only) { 1133 PetscCall(PetscFree(a->imax)); 1134 PetscCall(PetscFree(a->ilen)); 1135 } else { /* !A->structure_only */ 1136 for (i = 0; i < m; i++) { 1137 ailen[i] = imax[i] = ai[i + 1] - ai[i]; 1138 a->nonzerorowcnt += ((ai[i + 1] - ai[i]) > 0); 1139 } 1140 } 1141 a->nz = ai[m]; 1142 PetscCheck(!fshift || a->nounused != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unused space detected in matrix: %" PetscInt_FMT " X %" PetscInt_FMT ", %" PetscInt_FMT " unneeded", m, A->cmap->n, fshift); 1143 1144 PetscCall(MatMarkDiagonal_SeqAIJ(A)); 1145 PetscCall(PetscInfo(A, "Matrix size: %" PetscInt_FMT " X %" PetscInt_FMT "; storage space: %" PetscInt_FMT " unneeded,%" PetscInt_FMT " used\n", m, A->cmap->n, fshift, a->nz)); 1146 PetscCall(PetscInfo(A, "Number of mallocs during MatSetValues() is %" PetscInt_FMT "\n", a->reallocs)); 1147 PetscCall(PetscInfo(A, "Maximum nonzeros in any row is %" PetscInt_FMT "\n", rmax)); 1148 1149 A->info.mallocs += a->reallocs; 1150 a->reallocs = 0; 1151 A->info.nz_unneeded = (PetscReal)fshift; 1152 a->rmax = rmax; 1153 1154 if (!A->structure_only) PetscCall(MatCheckCompressedRow(A, a->nonzerorowcnt, &a->compressedrow, a->i, m, ratio)); 1155 PetscCall(MatAssemblyEnd_SeqAIJ_Inode(A, mode)); 1156 PetscFunctionReturn(PETSC_SUCCESS); 1157 } 1158 1159 PetscErrorCode MatRealPart_SeqAIJ(Mat A) 1160 { 1161 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1162 PetscInt i, nz = a->nz; 1163 MatScalar *aa; 1164 1165 PetscFunctionBegin; 1166 PetscCall(MatSeqAIJGetArray(A, &aa)); 1167 for (i = 0; i < nz; i++) aa[i] = PetscRealPart(aa[i]); 1168 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 1169 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 1170 PetscFunctionReturn(PETSC_SUCCESS); 1171 } 1172 1173 PetscErrorCode MatImaginaryPart_SeqAIJ(Mat A) 1174 { 1175 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1176 PetscInt i, nz = a->nz; 1177 MatScalar *aa; 1178 1179 PetscFunctionBegin; 1180 PetscCall(MatSeqAIJGetArray(A, &aa)); 1181 for (i = 0; i < nz; i++) aa[i] = PetscImaginaryPart(aa[i]); 1182 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 1183 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 1184 PetscFunctionReturn(PETSC_SUCCESS); 1185 } 1186 1187 PetscErrorCode MatZeroEntries_SeqAIJ(Mat A) 1188 { 1189 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1190 MatScalar *aa; 1191 1192 PetscFunctionBegin; 1193 PetscCall(MatSeqAIJGetArrayWrite(A, &aa)); 1194 PetscCall(PetscArrayzero(aa, a->i[A->rmap->n])); 1195 PetscCall(MatSeqAIJRestoreArrayWrite(A, &aa)); 1196 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 1197 PetscFunctionReturn(PETSC_SUCCESS); 1198 } 1199 1200 PetscErrorCode MatDestroy_SeqAIJ(Mat A) 1201 { 1202 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1203 1204 PetscFunctionBegin; 1205 if (A->hash_active) { 1206 A->ops[0] = a->cops; 1207 PetscCall(PetscHMapIJVDestroy(&a->ht)); 1208 PetscCall(PetscFree(a->dnz)); 1209 A->hash_active = PETSC_FALSE; 1210 } 1211 1212 #if defined(PETSC_USE_LOG) 1213 PetscCall(PetscLogObjectState((PetscObject)A, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, A->rmap->n, A->cmap->n, a->nz)); 1214 #endif 1215 PetscCall(MatSeqXAIJFreeAIJ(A, &a->a, &a->j, &a->i)); 1216 PetscCall(ISDestroy(&a->row)); 1217 PetscCall(ISDestroy(&a->col)); 1218 PetscCall(PetscFree(a->diag)); 1219 PetscCall(PetscFree(a->ibdiag)); 1220 PetscCall(PetscFree(a->imax)); 1221 PetscCall(PetscFree(a->ilen)); 1222 PetscCall(PetscFree(a->ipre)); 1223 PetscCall(PetscFree3(a->idiag, a->mdiag, a->ssor_work)); 1224 PetscCall(PetscFree(a->solve_work)); 1225 PetscCall(ISDestroy(&a->icol)); 1226 PetscCall(PetscFree(a->saved_values)); 1227 PetscCall(PetscFree2(a->compressedrow.i, a->compressedrow.rindex)); 1228 PetscCall(MatDestroy_SeqAIJ_Inode(A)); 1229 PetscCall(PetscFree(A->data)); 1230 1231 /* MatMatMultNumeric_SeqAIJ_SeqAIJ_Sorted may allocate this. 1232 That function is so heavily used (sometimes in an hidden way through multnumeric function pointers) 1233 that is hard to properly add this data to the MatProduct data. We free it here to avoid 1234 users reusing the matrix object with different data to incur in obscure segmentation faults 1235 due to different matrix sizes */ 1236 PetscCall(PetscObjectCompose((PetscObject)A, "__PETSc__ab_dense", NULL)); 1237 1238 PetscCall(PetscObjectChangeTypeName((PetscObject)A, NULL)); 1239 PetscCall(PetscObjectComposeFunction((PetscObject)A, "PetscMatlabEnginePut_C", NULL)); 1240 PetscCall(PetscObjectComposeFunction((PetscObject)A, "PetscMatlabEngineGet_C", NULL)); 1241 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqAIJSetColumnIndices_C", NULL)); 1242 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatStoreValues_C", NULL)); 1243 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatRetrieveValues_C", NULL)); 1244 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqsbaij_C", NULL)); 1245 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqbaij_C", NULL)); 1246 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijperm_C", NULL)); 1247 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijsell_C", NULL)); 1248 #if defined(PETSC_HAVE_MKL_SPARSE) 1249 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijmkl_C", NULL)); 1250 #endif 1251 #if defined(PETSC_HAVE_CUDA) 1252 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijcusparse_C", NULL)); 1253 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijcusparse_seqaij_C", NULL)); 1254 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_seqaijcusparse_C", NULL)); 1255 #endif 1256 #if defined(PETSC_HAVE_HIP) 1257 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijhipsparse_C", NULL)); 1258 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijhipsparse_seqaij_C", NULL)); 1259 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_seqaijhipsparse_C", NULL)); 1260 #endif 1261 #if defined(PETSC_HAVE_KOKKOS_KERNELS) 1262 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijkokkos_C", NULL)); 1263 #endif 1264 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijcrl_C", NULL)); 1265 #if defined(PETSC_HAVE_ELEMENTAL) 1266 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_elemental_C", NULL)); 1267 #endif 1268 #if defined(PETSC_HAVE_SCALAPACK) 1269 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_scalapack_C", NULL)); 1270 #endif 1271 #if defined(PETSC_HAVE_HYPRE) 1272 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_hypre_C", NULL)); 1273 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_transpose_seqaij_seqaij_C", NULL)); 1274 #endif 1275 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqdense_C", NULL)); 1276 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqsell_C", NULL)); 1277 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_is_C", NULL)); 1278 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatIsTranspose_C", NULL)); 1279 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatIsHermitianTranspose_C", NULL)); 1280 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqAIJSetPreallocation_C", NULL)); 1281 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatResetPreallocation_C", NULL)); 1282 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqAIJSetPreallocationCSR_C", NULL)); 1283 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatReorderForNonzeroDiagonal_C", NULL)); 1284 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_is_seqaij_C", NULL)); 1285 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqdense_seqaij_C", NULL)); 1286 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_seqaij_C", NULL)); 1287 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqAIJKron_C", NULL)); 1288 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL)); 1289 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL)); 1290 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL)); 1291 /* these calls do not belong here: the subclasses Duplicate/Destroy are wrong */ 1292 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaijsell_seqaij_C", NULL)); 1293 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaijperm_seqaij_C", NULL)); 1294 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijviennacl_C", NULL)); 1295 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijviennacl_seqdense_C", NULL)); 1296 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijviennacl_seqaij_C", NULL)); 1297 PetscFunctionReturn(PETSC_SUCCESS); 1298 } 1299 1300 PetscErrorCode MatSetOption_SeqAIJ(Mat A, MatOption op, PetscBool flg) 1301 { 1302 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1303 1304 PetscFunctionBegin; 1305 switch (op) { 1306 case MAT_ROW_ORIENTED: 1307 a->roworiented = flg; 1308 break; 1309 case MAT_KEEP_NONZERO_PATTERN: 1310 a->keepnonzeropattern = flg; 1311 break; 1312 case MAT_NEW_NONZERO_LOCATIONS: 1313 a->nonew = (flg ? 0 : 1); 1314 break; 1315 case MAT_NEW_NONZERO_LOCATION_ERR: 1316 a->nonew = (flg ? -1 : 0); 1317 break; 1318 case MAT_NEW_NONZERO_ALLOCATION_ERR: 1319 a->nonew = (flg ? -2 : 0); 1320 break; 1321 case MAT_UNUSED_NONZERO_LOCATION_ERR: 1322 a->nounused = (flg ? -1 : 0); 1323 break; 1324 case MAT_IGNORE_ZERO_ENTRIES: 1325 a->ignorezeroentries = flg; 1326 break; 1327 case MAT_SPD: 1328 case MAT_SYMMETRIC: 1329 case MAT_STRUCTURALLY_SYMMETRIC: 1330 case MAT_HERMITIAN: 1331 case MAT_SYMMETRY_ETERNAL: 1332 case MAT_STRUCTURE_ONLY: 1333 case MAT_STRUCTURAL_SYMMETRY_ETERNAL: 1334 case MAT_SPD_ETERNAL: 1335 /* if the diagonal matrix is square it inherits some of the properties above */ 1336 break; 1337 case MAT_FORCE_DIAGONAL_ENTRIES: 1338 case MAT_IGNORE_OFF_PROC_ENTRIES: 1339 case MAT_USE_HASH_TABLE: 1340 PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op])); 1341 break; 1342 case MAT_USE_INODES: 1343 PetscCall(MatSetOption_SeqAIJ_Inode(A, MAT_USE_INODES, flg)); 1344 break; 1345 case MAT_SUBMAT_SINGLEIS: 1346 A->submat_singleis = flg; 1347 break; 1348 case MAT_SORTED_FULL: 1349 if (flg) A->ops->setvalues = MatSetValues_SeqAIJ_SortedFull; 1350 else A->ops->setvalues = MatSetValues_SeqAIJ; 1351 break; 1352 case MAT_FORM_EXPLICIT_TRANSPOSE: 1353 A->form_explicit_transpose = flg; 1354 break; 1355 default: 1356 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "unknown option %d", op); 1357 } 1358 PetscFunctionReturn(PETSC_SUCCESS); 1359 } 1360 1361 PetscErrorCode MatGetDiagonal_SeqAIJ(Mat A, Vec v) 1362 { 1363 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1364 PetscInt i, j, n, *ai = a->i, *aj = a->j; 1365 PetscScalar *x; 1366 const PetscScalar *aa; 1367 1368 PetscFunctionBegin; 1369 PetscCall(VecGetLocalSize(v, &n)); 1370 PetscCheck(n == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector"); 1371 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 1372 if (A->factortype == MAT_FACTOR_ILU || A->factortype == MAT_FACTOR_LU) { 1373 PetscInt *diag = a->diag; 1374 PetscCall(VecGetArrayWrite(v, &x)); 1375 for (i = 0; i < n; i++) x[i] = 1.0 / aa[diag[i]]; 1376 PetscCall(VecRestoreArrayWrite(v, &x)); 1377 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1378 PetscFunctionReturn(PETSC_SUCCESS); 1379 } 1380 1381 PetscCall(VecGetArrayWrite(v, &x)); 1382 for (i = 0; i < n; i++) { 1383 x[i] = 0.0; 1384 for (j = ai[i]; j < ai[i + 1]; j++) { 1385 if (aj[j] == i) { 1386 x[i] = aa[j]; 1387 break; 1388 } 1389 } 1390 } 1391 PetscCall(VecRestoreArrayWrite(v, &x)); 1392 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1393 PetscFunctionReturn(PETSC_SUCCESS); 1394 } 1395 1396 #include <../src/mat/impls/aij/seq/ftn-kernels/fmult.h> 1397 PetscErrorCode MatMultTransposeAdd_SeqAIJ(Mat A, Vec xx, Vec zz, Vec yy) 1398 { 1399 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1400 const MatScalar *aa; 1401 PetscScalar *y; 1402 const PetscScalar *x; 1403 PetscInt m = A->rmap->n; 1404 #if !defined(PETSC_USE_FORTRAN_KERNEL_MULTTRANSPOSEAIJ) 1405 const MatScalar *v; 1406 PetscScalar alpha; 1407 PetscInt n, i, j; 1408 const PetscInt *idx, *ii, *ridx = NULL; 1409 Mat_CompressedRow cprow = a->compressedrow; 1410 PetscBool usecprow = cprow.use; 1411 #endif 1412 1413 PetscFunctionBegin; 1414 if (zz != yy) PetscCall(VecCopy(zz, yy)); 1415 PetscCall(VecGetArrayRead(xx, &x)); 1416 PetscCall(VecGetArray(yy, &y)); 1417 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 1418 1419 #if defined(PETSC_USE_FORTRAN_KERNEL_MULTTRANSPOSEAIJ) 1420 fortranmulttransposeaddaij_(&m, x, a->i, a->j, aa, y); 1421 #else 1422 if (usecprow) { 1423 m = cprow.nrows; 1424 ii = cprow.i; 1425 ridx = cprow.rindex; 1426 } else { 1427 ii = a->i; 1428 } 1429 for (i = 0; i < m; i++) { 1430 idx = a->j + ii[i]; 1431 v = aa + ii[i]; 1432 n = ii[i + 1] - ii[i]; 1433 if (usecprow) { 1434 alpha = x[ridx[i]]; 1435 } else { 1436 alpha = x[i]; 1437 } 1438 for (j = 0; j < n; j++) y[idx[j]] += alpha * v[j]; 1439 } 1440 #endif 1441 PetscCall(PetscLogFlops(2.0 * a->nz)); 1442 PetscCall(VecRestoreArrayRead(xx, &x)); 1443 PetscCall(VecRestoreArray(yy, &y)); 1444 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1445 PetscFunctionReturn(PETSC_SUCCESS); 1446 } 1447 1448 PetscErrorCode MatMultTranspose_SeqAIJ(Mat A, Vec xx, Vec yy) 1449 { 1450 PetscFunctionBegin; 1451 PetscCall(VecSet(yy, 0.0)); 1452 PetscCall(MatMultTransposeAdd_SeqAIJ(A, xx, yy, yy)); 1453 PetscFunctionReturn(PETSC_SUCCESS); 1454 } 1455 1456 #include <../src/mat/impls/aij/seq/ftn-kernels/fmult.h> 1457 1458 PetscErrorCode MatMult_SeqAIJ(Mat A, Vec xx, Vec yy) 1459 { 1460 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1461 PetscScalar *y; 1462 const PetscScalar *x; 1463 const MatScalar *aa, *a_a; 1464 PetscInt m = A->rmap->n; 1465 const PetscInt *aj, *ii, *ridx = NULL; 1466 PetscInt n, i; 1467 PetscScalar sum; 1468 PetscBool usecprow = a->compressedrow.use; 1469 1470 #if defined(PETSC_HAVE_PRAGMA_DISJOINT) 1471 #pragma disjoint(*x, *y, *aa) 1472 #endif 1473 1474 PetscFunctionBegin; 1475 if (a->inode.use && a->inode.checked) { 1476 PetscCall(MatMult_SeqAIJ_Inode(A, xx, yy)); 1477 PetscFunctionReturn(PETSC_SUCCESS); 1478 } 1479 PetscCall(MatSeqAIJGetArrayRead(A, &a_a)); 1480 PetscCall(VecGetArrayRead(xx, &x)); 1481 PetscCall(VecGetArray(yy, &y)); 1482 ii = a->i; 1483 if (usecprow) { /* use compressed row format */ 1484 PetscCall(PetscArrayzero(y, m)); 1485 m = a->compressedrow.nrows; 1486 ii = a->compressedrow.i; 1487 ridx = a->compressedrow.rindex; 1488 for (i = 0; i < m; i++) { 1489 n = ii[i + 1] - ii[i]; 1490 aj = a->j + ii[i]; 1491 aa = a_a + ii[i]; 1492 sum = 0.0; 1493 PetscSparseDensePlusDot(sum, x, aa, aj, n); 1494 /* for (j=0; j<n; j++) sum += (*aa++)*x[*aj++]; */ 1495 y[*ridx++] = sum; 1496 } 1497 } else { /* do not use compressed row format */ 1498 #if defined(PETSC_USE_FORTRAN_KERNEL_MULTAIJ) 1499 aj = a->j; 1500 aa = a_a; 1501 fortranmultaij_(&m, x, ii, aj, aa, y); 1502 #else 1503 for (i = 0; i < m; i++) { 1504 n = ii[i + 1] - ii[i]; 1505 aj = a->j + ii[i]; 1506 aa = a_a + ii[i]; 1507 sum = 0.0; 1508 PetscSparseDensePlusDot(sum, x, aa, aj, n); 1509 y[i] = sum; 1510 } 1511 #endif 1512 } 1513 PetscCall(PetscLogFlops(2.0 * a->nz - a->nonzerorowcnt)); 1514 PetscCall(VecRestoreArrayRead(xx, &x)); 1515 PetscCall(VecRestoreArray(yy, &y)); 1516 PetscCall(MatSeqAIJRestoreArrayRead(A, &a_a)); 1517 PetscFunctionReturn(PETSC_SUCCESS); 1518 } 1519 1520 PetscErrorCode MatMultMax_SeqAIJ(Mat A, Vec xx, Vec yy) 1521 { 1522 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1523 PetscScalar *y; 1524 const PetscScalar *x; 1525 const MatScalar *aa, *a_a; 1526 PetscInt m = A->rmap->n; 1527 const PetscInt *aj, *ii, *ridx = NULL; 1528 PetscInt n, i, nonzerorow = 0; 1529 PetscScalar sum; 1530 PetscBool usecprow = a->compressedrow.use; 1531 1532 #if defined(PETSC_HAVE_PRAGMA_DISJOINT) 1533 #pragma disjoint(*x, *y, *aa) 1534 #endif 1535 1536 PetscFunctionBegin; 1537 PetscCall(MatSeqAIJGetArrayRead(A, &a_a)); 1538 PetscCall(VecGetArrayRead(xx, &x)); 1539 PetscCall(VecGetArray(yy, &y)); 1540 if (usecprow) { /* use compressed row format */ 1541 m = a->compressedrow.nrows; 1542 ii = a->compressedrow.i; 1543 ridx = a->compressedrow.rindex; 1544 for (i = 0; i < m; i++) { 1545 n = ii[i + 1] - ii[i]; 1546 aj = a->j + ii[i]; 1547 aa = a_a + ii[i]; 1548 sum = 0.0; 1549 nonzerorow += (n > 0); 1550 PetscSparseDenseMaxDot(sum, x, aa, aj, n); 1551 /* for (j=0; j<n; j++) sum += (*aa++)*x[*aj++]; */ 1552 y[*ridx++] = sum; 1553 } 1554 } else { /* do not use compressed row format */ 1555 ii = a->i; 1556 for (i = 0; i < m; i++) { 1557 n = ii[i + 1] - ii[i]; 1558 aj = a->j + ii[i]; 1559 aa = a_a + ii[i]; 1560 sum = 0.0; 1561 nonzerorow += (n > 0); 1562 PetscSparseDenseMaxDot(sum, x, aa, aj, n); 1563 y[i] = sum; 1564 } 1565 } 1566 PetscCall(PetscLogFlops(2.0 * a->nz - nonzerorow)); 1567 PetscCall(VecRestoreArrayRead(xx, &x)); 1568 PetscCall(VecRestoreArray(yy, &y)); 1569 PetscCall(MatSeqAIJRestoreArrayRead(A, &a_a)); 1570 PetscFunctionReturn(PETSC_SUCCESS); 1571 } 1572 1573 PetscErrorCode MatMultAddMax_SeqAIJ(Mat A, Vec xx, Vec yy, Vec zz) 1574 { 1575 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1576 PetscScalar *y, *z; 1577 const PetscScalar *x; 1578 const MatScalar *aa, *a_a; 1579 PetscInt m = A->rmap->n, *aj, *ii; 1580 PetscInt n, i, *ridx = NULL; 1581 PetscScalar sum; 1582 PetscBool usecprow = a->compressedrow.use; 1583 1584 PetscFunctionBegin; 1585 PetscCall(MatSeqAIJGetArrayRead(A, &a_a)); 1586 PetscCall(VecGetArrayRead(xx, &x)); 1587 PetscCall(VecGetArrayPair(yy, zz, &y, &z)); 1588 if (usecprow) { /* use compressed row format */ 1589 if (zz != yy) PetscCall(PetscArraycpy(z, y, m)); 1590 m = a->compressedrow.nrows; 1591 ii = a->compressedrow.i; 1592 ridx = a->compressedrow.rindex; 1593 for (i = 0; i < m; i++) { 1594 n = ii[i + 1] - ii[i]; 1595 aj = a->j + ii[i]; 1596 aa = a_a + ii[i]; 1597 sum = y[*ridx]; 1598 PetscSparseDenseMaxDot(sum, x, aa, aj, n); 1599 z[*ridx++] = sum; 1600 } 1601 } else { /* do not use compressed row format */ 1602 ii = a->i; 1603 for (i = 0; i < m; i++) { 1604 n = ii[i + 1] - ii[i]; 1605 aj = a->j + ii[i]; 1606 aa = a_a + ii[i]; 1607 sum = y[i]; 1608 PetscSparseDenseMaxDot(sum, x, aa, aj, n); 1609 z[i] = sum; 1610 } 1611 } 1612 PetscCall(PetscLogFlops(2.0 * a->nz)); 1613 PetscCall(VecRestoreArrayRead(xx, &x)); 1614 PetscCall(VecRestoreArrayPair(yy, zz, &y, &z)); 1615 PetscCall(MatSeqAIJRestoreArrayRead(A, &a_a)); 1616 PetscFunctionReturn(PETSC_SUCCESS); 1617 } 1618 1619 #include <../src/mat/impls/aij/seq/ftn-kernels/fmultadd.h> 1620 PetscErrorCode MatMultAdd_SeqAIJ(Mat A, Vec xx, Vec yy, Vec zz) 1621 { 1622 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1623 PetscScalar *y, *z; 1624 const PetscScalar *x; 1625 const MatScalar *aa, *a_a; 1626 const PetscInt *aj, *ii, *ridx = NULL; 1627 PetscInt m = A->rmap->n, n, i; 1628 PetscScalar sum; 1629 PetscBool usecprow = a->compressedrow.use; 1630 1631 PetscFunctionBegin; 1632 if (a->inode.use && a->inode.checked) { 1633 PetscCall(MatMultAdd_SeqAIJ_Inode(A, xx, yy, zz)); 1634 PetscFunctionReturn(PETSC_SUCCESS); 1635 } 1636 PetscCall(MatSeqAIJGetArrayRead(A, &a_a)); 1637 PetscCall(VecGetArrayRead(xx, &x)); 1638 PetscCall(VecGetArrayPair(yy, zz, &y, &z)); 1639 if (usecprow) { /* use compressed row format */ 1640 if (zz != yy) PetscCall(PetscArraycpy(z, y, m)); 1641 m = a->compressedrow.nrows; 1642 ii = a->compressedrow.i; 1643 ridx = a->compressedrow.rindex; 1644 for (i = 0; i < m; i++) { 1645 n = ii[i + 1] - ii[i]; 1646 aj = a->j + ii[i]; 1647 aa = a_a + ii[i]; 1648 sum = y[*ridx]; 1649 PetscSparseDensePlusDot(sum, x, aa, aj, n); 1650 z[*ridx++] = sum; 1651 } 1652 } else { /* do not use compressed row format */ 1653 ii = a->i; 1654 #if defined(PETSC_USE_FORTRAN_KERNEL_MULTADDAIJ) 1655 aj = a->j; 1656 aa = a_a; 1657 fortranmultaddaij_(&m, x, ii, aj, aa, y, z); 1658 #else 1659 for (i = 0; i < m; i++) { 1660 n = ii[i + 1] - ii[i]; 1661 aj = a->j + ii[i]; 1662 aa = a_a + ii[i]; 1663 sum = y[i]; 1664 PetscSparseDensePlusDot(sum, x, aa, aj, n); 1665 z[i] = sum; 1666 } 1667 #endif 1668 } 1669 PetscCall(PetscLogFlops(2.0 * a->nz)); 1670 PetscCall(VecRestoreArrayRead(xx, &x)); 1671 PetscCall(VecRestoreArrayPair(yy, zz, &y, &z)); 1672 PetscCall(MatSeqAIJRestoreArrayRead(A, &a_a)); 1673 PetscFunctionReturn(PETSC_SUCCESS); 1674 } 1675 1676 /* 1677 Adds diagonal pointers to sparse matrix structure. 1678 */ 1679 PetscErrorCode MatMarkDiagonal_SeqAIJ(Mat A) 1680 { 1681 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1682 PetscInt i, j, m = A->rmap->n; 1683 PetscBool alreadySet = PETSC_TRUE; 1684 1685 PetscFunctionBegin; 1686 if (!a->diag) { 1687 PetscCall(PetscMalloc1(m, &a->diag)); 1688 alreadySet = PETSC_FALSE; 1689 } 1690 for (i = 0; i < A->rmap->n; i++) { 1691 /* If A's diagonal is already correctly set, this fast track enables cheap and repeated MatMarkDiagonal_SeqAIJ() calls */ 1692 if (alreadySet) { 1693 PetscInt pos = a->diag[i]; 1694 if (pos >= a->i[i] && pos < a->i[i + 1] && a->j[pos] == i) continue; 1695 } 1696 1697 a->diag[i] = a->i[i + 1]; 1698 for (j = a->i[i]; j < a->i[i + 1]; j++) { 1699 if (a->j[j] == i) { 1700 a->diag[i] = j; 1701 break; 1702 } 1703 } 1704 } 1705 PetscFunctionReturn(PETSC_SUCCESS); 1706 } 1707 1708 PetscErrorCode MatShift_SeqAIJ(Mat A, PetscScalar v) 1709 { 1710 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1711 const PetscInt *diag = (const PetscInt *)a->diag; 1712 const PetscInt *ii = (const PetscInt *)a->i; 1713 PetscInt i, *mdiag = NULL; 1714 PetscInt cnt = 0; /* how many diagonals are missing */ 1715 1716 PetscFunctionBegin; 1717 if (!A->preallocated || !a->nz) { 1718 PetscCall(MatSeqAIJSetPreallocation(A, 1, NULL)); 1719 PetscCall(MatShift_Basic(A, v)); 1720 PetscFunctionReturn(PETSC_SUCCESS); 1721 } 1722 1723 if (a->diagonaldense) { 1724 cnt = 0; 1725 } else { 1726 PetscCall(PetscCalloc1(A->rmap->n, &mdiag)); 1727 for (i = 0; i < A->rmap->n; i++) { 1728 if (i < A->cmap->n && diag[i] >= ii[i + 1]) { /* 'out of range' rows never have diagonals */ 1729 cnt++; 1730 mdiag[i] = 1; 1731 } 1732 } 1733 } 1734 if (!cnt) { 1735 PetscCall(MatShift_Basic(A, v)); 1736 } else { 1737 PetscScalar *olda = a->a; /* preserve pointers to current matrix nonzeros structure and values */ 1738 PetscInt *oldj = a->j, *oldi = a->i; 1739 PetscBool singlemalloc = a->singlemalloc, free_a = a->free_a, free_ij = a->free_ij; 1740 1741 a->a = NULL; 1742 a->j = NULL; 1743 a->i = NULL; 1744 /* increase the values in imax for each row where a diagonal is being inserted then reallocate the matrix data structures */ 1745 for (i = 0; i < PetscMin(A->rmap->n, A->cmap->n); i++) a->imax[i] += mdiag[i]; 1746 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(A, 0, a->imax)); 1747 1748 /* copy old values into new matrix data structure */ 1749 for (i = 0; i < A->rmap->n; i++) { 1750 PetscCall(MatSetValues(A, 1, &i, a->imax[i] - mdiag[i], &oldj[oldi[i]], &olda[oldi[i]], ADD_VALUES)); 1751 if (i < A->cmap->n) PetscCall(MatSetValue(A, i, i, v, ADD_VALUES)); 1752 } 1753 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 1754 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 1755 if (singlemalloc) { 1756 PetscCall(PetscFree3(olda, oldj, oldi)); 1757 } else { 1758 if (free_a) PetscCall(PetscFree(olda)); 1759 if (free_ij) PetscCall(PetscFree(oldj)); 1760 if (free_ij) PetscCall(PetscFree(oldi)); 1761 } 1762 } 1763 PetscCall(PetscFree(mdiag)); 1764 a->diagonaldense = PETSC_TRUE; 1765 PetscFunctionReturn(PETSC_SUCCESS); 1766 } 1767 1768 /* 1769 Checks for missing diagonals 1770 */ 1771 PetscErrorCode MatMissingDiagonal_SeqAIJ(Mat A, PetscBool *missing, PetscInt *d) 1772 { 1773 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1774 PetscInt *diag, *ii = a->i, i; 1775 1776 PetscFunctionBegin; 1777 *missing = PETSC_FALSE; 1778 if (A->rmap->n > 0 && !ii) { 1779 *missing = PETSC_TRUE; 1780 if (d) *d = 0; 1781 PetscCall(PetscInfo(A, "Matrix has no entries therefore is missing diagonal\n")); 1782 } else { 1783 PetscInt n; 1784 n = PetscMin(A->rmap->n, A->cmap->n); 1785 diag = a->diag; 1786 for (i = 0; i < n; i++) { 1787 if (diag[i] >= ii[i + 1]) { 1788 *missing = PETSC_TRUE; 1789 if (d) *d = i; 1790 PetscCall(PetscInfo(A, "Matrix is missing diagonal number %" PetscInt_FMT "\n", i)); 1791 break; 1792 } 1793 } 1794 } 1795 PetscFunctionReturn(PETSC_SUCCESS); 1796 } 1797 1798 #include <petscblaslapack.h> 1799 #include <petsc/private/kernels/blockinvert.h> 1800 1801 /* 1802 Note that values is allocated externally by the PC and then passed into this routine 1803 */ 1804 PetscErrorCode MatInvertVariableBlockDiagonal_SeqAIJ(Mat A, PetscInt nblocks, const PetscInt *bsizes, PetscScalar *diag) 1805 { 1806 PetscInt n = A->rmap->n, i, ncnt = 0, *indx, j, bsizemax = 0, *v_pivots; 1807 PetscBool allowzeropivot, zeropivotdetected = PETSC_FALSE; 1808 const PetscReal shift = 0.0; 1809 PetscInt ipvt[5]; 1810 PetscCount flops = 0; 1811 PetscScalar work[25], *v_work; 1812 1813 PetscFunctionBegin; 1814 allowzeropivot = PetscNot(A->erroriffailure); 1815 for (i = 0; i < nblocks; i++) ncnt += bsizes[i]; 1816 PetscCheck(ncnt == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Total blocksizes %" PetscInt_FMT " doesn't match number matrix rows %" PetscInt_FMT, ncnt, n); 1817 for (i = 0; i < nblocks; i++) bsizemax = PetscMax(bsizemax, bsizes[i]); 1818 PetscCall(PetscMalloc1(bsizemax, &indx)); 1819 if (bsizemax > 7) PetscCall(PetscMalloc2(bsizemax, &v_work, bsizemax, &v_pivots)); 1820 ncnt = 0; 1821 for (i = 0; i < nblocks; i++) { 1822 for (j = 0; j < bsizes[i]; j++) indx[j] = ncnt + j; 1823 PetscCall(MatGetValues(A, bsizes[i], indx, bsizes[i], indx, diag)); 1824 switch (bsizes[i]) { 1825 case 1: 1826 *diag = 1.0 / (*diag); 1827 break; 1828 case 2: 1829 PetscCall(PetscKernel_A_gets_inverse_A_2(diag, shift, allowzeropivot, &zeropivotdetected)); 1830 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1831 PetscCall(PetscKernel_A_gets_transpose_A_2(diag)); 1832 break; 1833 case 3: 1834 PetscCall(PetscKernel_A_gets_inverse_A_3(diag, shift, allowzeropivot, &zeropivotdetected)); 1835 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1836 PetscCall(PetscKernel_A_gets_transpose_A_3(diag)); 1837 break; 1838 case 4: 1839 PetscCall(PetscKernel_A_gets_inverse_A_4(diag, shift, allowzeropivot, &zeropivotdetected)); 1840 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1841 PetscCall(PetscKernel_A_gets_transpose_A_4(diag)); 1842 break; 1843 case 5: 1844 PetscCall(PetscKernel_A_gets_inverse_A_5(diag, ipvt, work, shift, allowzeropivot, &zeropivotdetected)); 1845 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1846 PetscCall(PetscKernel_A_gets_transpose_A_5(diag)); 1847 break; 1848 case 6: 1849 PetscCall(PetscKernel_A_gets_inverse_A_6(diag, shift, allowzeropivot, &zeropivotdetected)); 1850 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1851 PetscCall(PetscKernel_A_gets_transpose_A_6(diag)); 1852 break; 1853 case 7: 1854 PetscCall(PetscKernel_A_gets_inverse_A_7(diag, shift, allowzeropivot, &zeropivotdetected)); 1855 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1856 PetscCall(PetscKernel_A_gets_transpose_A_7(diag)); 1857 break; 1858 default: 1859 PetscCall(PetscKernel_A_gets_inverse_A(bsizes[i], diag, v_pivots, v_work, allowzeropivot, &zeropivotdetected)); 1860 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1861 PetscCall(PetscKernel_A_gets_transpose_A_N(diag, bsizes[i])); 1862 } 1863 ncnt += bsizes[i]; 1864 diag += bsizes[i] * bsizes[i]; 1865 flops += 2 * PetscPowInt(bsizes[i], 3) / 3; 1866 } 1867 PetscCall(PetscLogFlops(flops)); 1868 if (bsizemax > 7) PetscCall(PetscFree2(v_work, v_pivots)); 1869 PetscCall(PetscFree(indx)); 1870 PetscFunctionReturn(PETSC_SUCCESS); 1871 } 1872 1873 /* 1874 Negative shift indicates do not generate an error if there is a zero diagonal, just invert it anyways 1875 */ 1876 PetscErrorCode MatInvertDiagonal_SeqAIJ(Mat A, PetscScalar omega, PetscScalar fshift) 1877 { 1878 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1879 PetscInt i, *diag, m = A->rmap->n; 1880 const MatScalar *v; 1881 PetscScalar *idiag, *mdiag; 1882 1883 PetscFunctionBegin; 1884 if (a->idiagvalid) PetscFunctionReturn(PETSC_SUCCESS); 1885 PetscCall(MatMarkDiagonal_SeqAIJ(A)); 1886 diag = a->diag; 1887 if (!a->idiag) { PetscCall(PetscMalloc3(m, &a->idiag, m, &a->mdiag, m, &a->ssor_work)); } 1888 1889 mdiag = a->mdiag; 1890 idiag = a->idiag; 1891 PetscCall(MatSeqAIJGetArrayRead(A, &v)); 1892 if (omega == 1.0 && PetscRealPart(fshift) <= 0.0) { 1893 for (i = 0; i < m; i++) { 1894 mdiag[i] = v[diag[i]]; 1895 if (!PetscAbsScalar(mdiag[i])) { /* zero diagonal */ 1896 if (PetscRealPart(fshift)) { 1897 PetscCall(PetscInfo(A, "Zero diagonal on row %" PetscInt_FMT "\n", i)); 1898 A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1899 A->factorerror_zeropivot_value = 0.0; 1900 A->factorerror_zeropivot_row = i; 1901 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Zero diagonal on row %" PetscInt_FMT, i); 1902 } 1903 idiag[i] = 1.0 / v[diag[i]]; 1904 } 1905 PetscCall(PetscLogFlops(m)); 1906 } else { 1907 for (i = 0; i < m; i++) { 1908 mdiag[i] = v[diag[i]]; 1909 idiag[i] = omega / (fshift + v[diag[i]]); 1910 } 1911 PetscCall(PetscLogFlops(2.0 * m)); 1912 } 1913 a->idiagvalid = PETSC_TRUE; 1914 PetscCall(MatSeqAIJRestoreArrayRead(A, &v)); 1915 PetscFunctionReturn(PETSC_SUCCESS); 1916 } 1917 1918 #include <../src/mat/impls/aij/seq/ftn-kernels/frelax.h> 1919 PetscErrorCode MatSOR_SeqAIJ(Mat A, Vec bb, PetscReal omega, MatSORType flag, PetscReal fshift, PetscInt its, PetscInt lits, Vec xx) 1920 { 1921 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1922 PetscScalar *x, d, sum, *t, scale; 1923 const MatScalar *v, *idiag = NULL, *mdiag, *aa; 1924 const PetscScalar *b, *bs, *xb, *ts; 1925 PetscInt n, m = A->rmap->n, i; 1926 const PetscInt *idx, *diag; 1927 1928 PetscFunctionBegin; 1929 if (a->inode.use && a->inode.checked && omega == 1.0 && fshift == 0.0) { 1930 PetscCall(MatSOR_SeqAIJ_Inode(A, bb, omega, flag, fshift, its, lits, xx)); 1931 PetscFunctionReturn(PETSC_SUCCESS); 1932 } 1933 its = its * lits; 1934 1935 if (fshift != a->fshift || omega != a->omega) a->idiagvalid = PETSC_FALSE; /* must recompute idiag[] */ 1936 if (!a->idiagvalid) PetscCall(MatInvertDiagonal_SeqAIJ(A, omega, fshift)); 1937 a->fshift = fshift; 1938 a->omega = omega; 1939 1940 diag = a->diag; 1941 t = a->ssor_work; 1942 idiag = a->idiag; 1943 mdiag = a->mdiag; 1944 1945 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 1946 PetscCall(VecGetArray(xx, &x)); 1947 PetscCall(VecGetArrayRead(bb, &b)); 1948 /* We count flops by assuming the upper triangular and lower triangular parts have the same number of nonzeros */ 1949 if (flag == SOR_APPLY_UPPER) { 1950 /* apply (U + D/omega) to the vector */ 1951 bs = b; 1952 for (i = 0; i < m; i++) { 1953 d = fshift + mdiag[i]; 1954 n = a->i[i + 1] - diag[i] - 1; 1955 idx = a->j + diag[i] + 1; 1956 v = aa + diag[i] + 1; 1957 sum = b[i] * d / omega; 1958 PetscSparseDensePlusDot(sum, bs, v, idx, n); 1959 x[i] = sum; 1960 } 1961 PetscCall(VecRestoreArray(xx, &x)); 1962 PetscCall(VecRestoreArrayRead(bb, &b)); 1963 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1964 PetscCall(PetscLogFlops(a->nz)); 1965 PetscFunctionReturn(PETSC_SUCCESS); 1966 } 1967 1968 PetscCheck(flag != SOR_APPLY_LOWER, PETSC_COMM_SELF, PETSC_ERR_SUP, "SOR_APPLY_LOWER is not implemented"); 1969 if (flag & SOR_EISENSTAT) { 1970 /* Let A = L + U + D; where L is lower triangular, 1971 U is upper triangular, E = D/omega; This routine applies 1972 1973 (L + E)^{-1} A (U + E)^{-1} 1974 1975 to a vector efficiently using Eisenstat's trick. 1976 */ 1977 scale = (2.0 / omega) - 1.0; 1978 1979 /* x = (E + U)^{-1} b */ 1980 for (i = m - 1; i >= 0; i--) { 1981 n = a->i[i + 1] - diag[i] - 1; 1982 idx = a->j + diag[i] + 1; 1983 v = aa + diag[i] + 1; 1984 sum = b[i]; 1985 PetscSparseDenseMinusDot(sum, x, v, idx, n); 1986 x[i] = sum * idiag[i]; 1987 } 1988 1989 /* t = b - (2*E - D)x */ 1990 v = aa; 1991 for (i = 0; i < m; i++) t[i] = b[i] - scale * (v[*diag++]) * x[i]; 1992 1993 /* t = (E + L)^{-1}t */ 1994 ts = t; 1995 diag = a->diag; 1996 for (i = 0; i < m; i++) { 1997 n = diag[i] - a->i[i]; 1998 idx = a->j + a->i[i]; 1999 v = aa + a->i[i]; 2000 sum = t[i]; 2001 PetscSparseDenseMinusDot(sum, ts, v, idx, n); 2002 t[i] = sum * idiag[i]; 2003 /* x = x + t */ 2004 x[i] += t[i]; 2005 } 2006 2007 PetscCall(PetscLogFlops(6.0 * m - 1 + 2.0 * a->nz)); 2008 PetscCall(VecRestoreArray(xx, &x)); 2009 PetscCall(VecRestoreArrayRead(bb, &b)); 2010 PetscFunctionReturn(PETSC_SUCCESS); 2011 } 2012 if (flag & SOR_ZERO_INITIAL_GUESS) { 2013 if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) { 2014 for (i = 0; i < m; i++) { 2015 n = diag[i] - a->i[i]; 2016 idx = a->j + a->i[i]; 2017 v = aa + a->i[i]; 2018 sum = b[i]; 2019 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2020 t[i] = sum; 2021 x[i] = sum * idiag[i]; 2022 } 2023 xb = t; 2024 PetscCall(PetscLogFlops(a->nz)); 2025 } else xb = b; 2026 if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) { 2027 for (i = m - 1; i >= 0; i--) { 2028 n = a->i[i + 1] - diag[i] - 1; 2029 idx = a->j + diag[i] + 1; 2030 v = aa + diag[i] + 1; 2031 sum = xb[i]; 2032 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2033 if (xb == b) { 2034 x[i] = sum * idiag[i]; 2035 } else { 2036 x[i] = (1 - omega) * x[i] + sum * idiag[i]; /* omega in idiag */ 2037 } 2038 } 2039 PetscCall(PetscLogFlops(a->nz)); /* assumes 1/2 in upper */ 2040 } 2041 its--; 2042 } 2043 while (its--) { 2044 if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) { 2045 for (i = 0; i < m; i++) { 2046 /* lower */ 2047 n = diag[i] - a->i[i]; 2048 idx = a->j + a->i[i]; 2049 v = aa + a->i[i]; 2050 sum = b[i]; 2051 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2052 t[i] = sum; /* save application of the lower-triangular part */ 2053 /* upper */ 2054 n = a->i[i + 1] - diag[i] - 1; 2055 idx = a->j + diag[i] + 1; 2056 v = aa + diag[i] + 1; 2057 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2058 x[i] = (1. - omega) * x[i] + sum * idiag[i]; /* omega in idiag */ 2059 } 2060 xb = t; 2061 PetscCall(PetscLogFlops(2.0 * a->nz)); 2062 } else xb = b; 2063 if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) { 2064 for (i = m - 1; i >= 0; i--) { 2065 sum = xb[i]; 2066 if (xb == b) { 2067 /* whole matrix (no checkpointing available) */ 2068 n = a->i[i + 1] - a->i[i]; 2069 idx = a->j + a->i[i]; 2070 v = aa + a->i[i]; 2071 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2072 x[i] = (1. - omega) * x[i] + (sum + mdiag[i] * x[i]) * idiag[i]; 2073 } else { /* lower-triangular part has been saved, so only apply upper-triangular */ 2074 n = a->i[i + 1] - diag[i] - 1; 2075 idx = a->j + diag[i] + 1; 2076 v = aa + diag[i] + 1; 2077 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2078 x[i] = (1. - omega) * x[i] + sum * idiag[i]; /* omega in idiag */ 2079 } 2080 } 2081 if (xb == b) { 2082 PetscCall(PetscLogFlops(2.0 * a->nz)); 2083 } else { 2084 PetscCall(PetscLogFlops(a->nz)); /* assumes 1/2 in upper */ 2085 } 2086 } 2087 } 2088 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2089 PetscCall(VecRestoreArray(xx, &x)); 2090 PetscCall(VecRestoreArrayRead(bb, &b)); 2091 PetscFunctionReturn(PETSC_SUCCESS); 2092 } 2093 2094 PetscErrorCode MatGetInfo_SeqAIJ(Mat A, MatInfoType flag, MatInfo *info) 2095 { 2096 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2097 2098 PetscFunctionBegin; 2099 info->block_size = 1.0; 2100 info->nz_allocated = a->maxnz; 2101 info->nz_used = a->nz; 2102 info->nz_unneeded = (a->maxnz - a->nz); 2103 info->assemblies = A->num_ass; 2104 info->mallocs = A->info.mallocs; 2105 info->memory = 0; /* REVIEW ME */ 2106 if (A->factortype) { 2107 info->fill_ratio_given = A->info.fill_ratio_given; 2108 info->fill_ratio_needed = A->info.fill_ratio_needed; 2109 info->factor_mallocs = A->info.factor_mallocs; 2110 } else { 2111 info->fill_ratio_given = 0; 2112 info->fill_ratio_needed = 0; 2113 info->factor_mallocs = 0; 2114 } 2115 PetscFunctionReturn(PETSC_SUCCESS); 2116 } 2117 2118 PetscErrorCode MatZeroRows_SeqAIJ(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 2119 { 2120 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2121 PetscInt i, m = A->rmap->n - 1; 2122 const PetscScalar *xx; 2123 PetscScalar *bb, *aa; 2124 PetscInt d = 0; 2125 2126 PetscFunctionBegin; 2127 if (x && b) { 2128 PetscCall(VecGetArrayRead(x, &xx)); 2129 PetscCall(VecGetArray(b, &bb)); 2130 for (i = 0; i < N; i++) { 2131 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2132 if (rows[i] >= A->cmap->n) continue; 2133 bb[rows[i]] = diag * xx[rows[i]]; 2134 } 2135 PetscCall(VecRestoreArrayRead(x, &xx)); 2136 PetscCall(VecRestoreArray(b, &bb)); 2137 } 2138 2139 PetscCall(MatSeqAIJGetArray(A, &aa)); 2140 if (a->keepnonzeropattern) { 2141 for (i = 0; i < N; i++) { 2142 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2143 PetscCall(PetscArrayzero(&aa[a->i[rows[i]]], a->ilen[rows[i]])); 2144 } 2145 if (diag != 0.0) { 2146 for (i = 0; i < N; i++) { 2147 d = rows[i]; 2148 if (rows[i] >= A->cmap->n) continue; 2149 PetscCheck(a->diag[d] < a->i[d + 1], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is missing diagonal entry in the zeroed row %" PetscInt_FMT, d); 2150 } 2151 for (i = 0; i < N; i++) { 2152 if (rows[i] >= A->cmap->n) continue; 2153 aa[a->diag[rows[i]]] = diag; 2154 } 2155 } 2156 } else { 2157 if (diag != 0.0) { 2158 for (i = 0; i < N; i++) { 2159 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2160 if (a->ilen[rows[i]] > 0) { 2161 if (rows[i] >= A->cmap->n) { 2162 a->ilen[rows[i]] = 0; 2163 } else { 2164 a->ilen[rows[i]] = 1; 2165 aa[a->i[rows[i]]] = diag; 2166 a->j[a->i[rows[i]]] = rows[i]; 2167 } 2168 } else if (rows[i] < A->cmap->n) { /* in case row was completely empty */ 2169 PetscCall(MatSetValues_SeqAIJ(A, 1, &rows[i], 1, &rows[i], &diag, INSERT_VALUES)); 2170 } 2171 } 2172 } else { 2173 for (i = 0; i < N; i++) { 2174 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2175 a->ilen[rows[i]] = 0; 2176 } 2177 } 2178 A->nonzerostate++; 2179 } 2180 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 2181 PetscUseTypeMethod(A, assemblyend, MAT_FINAL_ASSEMBLY); 2182 PetscFunctionReturn(PETSC_SUCCESS); 2183 } 2184 2185 PetscErrorCode MatZeroRowsColumns_SeqAIJ(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 2186 { 2187 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2188 PetscInt i, j, m = A->rmap->n - 1, d = 0; 2189 PetscBool missing, *zeroed, vecs = PETSC_FALSE; 2190 const PetscScalar *xx; 2191 PetscScalar *bb, *aa; 2192 2193 PetscFunctionBegin; 2194 if (!N) PetscFunctionReturn(PETSC_SUCCESS); 2195 PetscCall(MatSeqAIJGetArray(A, &aa)); 2196 if (x && b) { 2197 PetscCall(VecGetArrayRead(x, &xx)); 2198 PetscCall(VecGetArray(b, &bb)); 2199 vecs = PETSC_TRUE; 2200 } 2201 PetscCall(PetscCalloc1(A->rmap->n, &zeroed)); 2202 for (i = 0; i < N; i++) { 2203 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2204 PetscCall(PetscArrayzero(&aa[a->i[rows[i]]], a->ilen[rows[i]])); 2205 2206 zeroed[rows[i]] = PETSC_TRUE; 2207 } 2208 for (i = 0; i < A->rmap->n; i++) { 2209 if (!zeroed[i]) { 2210 for (j = a->i[i]; j < a->i[i + 1]; j++) { 2211 if (a->j[j] < A->rmap->n && zeroed[a->j[j]]) { 2212 if (vecs) bb[i] -= aa[j] * xx[a->j[j]]; 2213 aa[j] = 0.0; 2214 } 2215 } 2216 } else if (vecs && i < A->cmap->N) bb[i] = diag * xx[i]; 2217 } 2218 if (x && b) { 2219 PetscCall(VecRestoreArrayRead(x, &xx)); 2220 PetscCall(VecRestoreArray(b, &bb)); 2221 } 2222 PetscCall(PetscFree(zeroed)); 2223 if (diag != 0.0) { 2224 PetscCall(MatMissingDiagonal_SeqAIJ(A, &missing, &d)); 2225 if (missing) { 2226 for (i = 0; i < N; i++) { 2227 if (rows[i] >= A->cmap->N) continue; 2228 PetscCheck(!a->nonew || rows[i] < d, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is missing diagonal entry in row %" PetscInt_FMT " (%" PetscInt_FMT ")", d, rows[i]); 2229 PetscCall(MatSetValues_SeqAIJ(A, 1, &rows[i], 1, &rows[i], &diag, INSERT_VALUES)); 2230 } 2231 } else { 2232 for (i = 0; i < N; i++) aa[a->diag[rows[i]]] = diag; 2233 } 2234 } 2235 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 2236 PetscUseTypeMethod(A, assemblyend, MAT_FINAL_ASSEMBLY); 2237 PetscFunctionReturn(PETSC_SUCCESS); 2238 } 2239 2240 PetscErrorCode MatGetRow_SeqAIJ(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 2241 { 2242 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2243 const PetscScalar *aa; 2244 PetscInt *itmp; 2245 2246 PetscFunctionBegin; 2247 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 2248 *nz = a->i[row + 1] - a->i[row]; 2249 if (v) *v = (PetscScalar *)(aa + a->i[row]); 2250 if (idx) { 2251 itmp = a->j + a->i[row]; 2252 if (*nz) *idx = itmp; 2253 else *idx = NULL; 2254 } 2255 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2256 PetscFunctionReturn(PETSC_SUCCESS); 2257 } 2258 2259 PetscErrorCode MatRestoreRow_SeqAIJ(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 2260 { 2261 PetscFunctionBegin; 2262 if (nz) *nz = 0; 2263 if (idx) *idx = NULL; 2264 if (v) *v = NULL; 2265 PetscFunctionReturn(PETSC_SUCCESS); 2266 } 2267 2268 PetscErrorCode MatNorm_SeqAIJ(Mat A, NormType type, PetscReal *nrm) 2269 { 2270 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2271 const MatScalar *v; 2272 PetscReal sum = 0.0; 2273 PetscInt i, j; 2274 2275 PetscFunctionBegin; 2276 PetscCall(MatSeqAIJGetArrayRead(A, &v)); 2277 if (type == NORM_FROBENIUS) { 2278 #if defined(PETSC_USE_REAL___FP16) 2279 PetscBLASInt one = 1, nz = a->nz; 2280 PetscCallBLAS("BLASnrm2", *nrm = BLASnrm2_(&nz, v, &one)); 2281 #else 2282 for (i = 0; i < a->nz; i++) { 2283 sum += PetscRealPart(PetscConj(*v) * (*v)); 2284 v++; 2285 } 2286 *nrm = PetscSqrtReal(sum); 2287 #endif 2288 PetscCall(PetscLogFlops(2.0 * a->nz)); 2289 } else if (type == NORM_1) { 2290 PetscReal *tmp; 2291 PetscInt *jj = a->j; 2292 PetscCall(PetscCalloc1(A->cmap->n + 1, &tmp)); 2293 *nrm = 0.0; 2294 for (j = 0; j < a->nz; j++) { 2295 tmp[*jj++] += PetscAbsScalar(*v); 2296 v++; 2297 } 2298 for (j = 0; j < A->cmap->n; j++) { 2299 if (tmp[j] > *nrm) *nrm = tmp[j]; 2300 } 2301 PetscCall(PetscFree(tmp)); 2302 PetscCall(PetscLogFlops(PetscMax(a->nz - 1, 0))); 2303 } else if (type == NORM_INFINITY) { 2304 *nrm = 0.0; 2305 for (j = 0; j < A->rmap->n; j++) { 2306 const PetscScalar *v2 = v + a->i[j]; 2307 sum = 0.0; 2308 for (i = 0; i < a->i[j + 1] - a->i[j]; i++) { 2309 sum += PetscAbsScalar(*v2); 2310 v2++; 2311 } 2312 if (sum > *nrm) *nrm = sum; 2313 } 2314 PetscCall(PetscLogFlops(PetscMax(a->nz - 1, 0))); 2315 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for two norm"); 2316 PetscCall(MatSeqAIJRestoreArrayRead(A, &v)); 2317 PetscFunctionReturn(PETSC_SUCCESS); 2318 } 2319 2320 PetscErrorCode MatIsTranspose_SeqAIJ(Mat A, Mat B, PetscReal tol, PetscBool *f) 2321 { 2322 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data, *bij = (Mat_SeqAIJ *)B->data; 2323 PetscInt *adx, *bdx, *aii, *bii, *aptr, *bptr; 2324 const MatScalar *va, *vb; 2325 PetscInt ma, na, mb, nb, i; 2326 2327 PetscFunctionBegin; 2328 PetscCall(MatGetSize(A, &ma, &na)); 2329 PetscCall(MatGetSize(B, &mb, &nb)); 2330 if (ma != nb || na != mb) { 2331 *f = PETSC_FALSE; 2332 PetscFunctionReturn(PETSC_SUCCESS); 2333 } 2334 PetscCall(MatSeqAIJGetArrayRead(A, &va)); 2335 PetscCall(MatSeqAIJGetArrayRead(B, &vb)); 2336 aii = aij->i; 2337 bii = bij->i; 2338 adx = aij->j; 2339 bdx = bij->j; 2340 PetscCall(PetscMalloc1(ma, &aptr)); 2341 PetscCall(PetscMalloc1(mb, &bptr)); 2342 for (i = 0; i < ma; i++) aptr[i] = aii[i]; 2343 for (i = 0; i < mb; i++) bptr[i] = bii[i]; 2344 2345 *f = PETSC_TRUE; 2346 for (i = 0; i < ma; i++) { 2347 while (aptr[i] < aii[i + 1]) { 2348 PetscInt idc, idr; 2349 PetscScalar vc, vr; 2350 /* column/row index/value */ 2351 idc = adx[aptr[i]]; 2352 idr = bdx[bptr[idc]]; 2353 vc = va[aptr[i]]; 2354 vr = vb[bptr[idc]]; 2355 if (i != idr || PetscAbsScalar(vc - vr) > tol) { 2356 *f = PETSC_FALSE; 2357 goto done; 2358 } else { 2359 aptr[i]++; 2360 if (B || i != idc) bptr[idc]++; 2361 } 2362 } 2363 } 2364 done: 2365 PetscCall(PetscFree(aptr)); 2366 PetscCall(PetscFree(bptr)); 2367 PetscCall(MatSeqAIJRestoreArrayRead(A, &va)); 2368 PetscCall(MatSeqAIJRestoreArrayRead(B, &vb)); 2369 PetscFunctionReturn(PETSC_SUCCESS); 2370 } 2371 2372 PetscErrorCode MatIsHermitianTranspose_SeqAIJ(Mat A, Mat B, PetscReal tol, PetscBool *f) 2373 { 2374 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data, *bij = (Mat_SeqAIJ *)B->data; 2375 PetscInt *adx, *bdx, *aii, *bii, *aptr, *bptr; 2376 MatScalar *va, *vb; 2377 PetscInt ma, na, mb, nb, i; 2378 2379 PetscFunctionBegin; 2380 PetscCall(MatGetSize(A, &ma, &na)); 2381 PetscCall(MatGetSize(B, &mb, &nb)); 2382 if (ma != nb || na != mb) { 2383 *f = PETSC_FALSE; 2384 PetscFunctionReturn(PETSC_SUCCESS); 2385 } 2386 aii = aij->i; 2387 bii = bij->i; 2388 adx = aij->j; 2389 bdx = bij->j; 2390 va = aij->a; 2391 vb = bij->a; 2392 PetscCall(PetscMalloc1(ma, &aptr)); 2393 PetscCall(PetscMalloc1(mb, &bptr)); 2394 for (i = 0; i < ma; i++) aptr[i] = aii[i]; 2395 for (i = 0; i < mb; i++) bptr[i] = bii[i]; 2396 2397 *f = PETSC_TRUE; 2398 for (i = 0; i < ma; i++) { 2399 while (aptr[i] < aii[i + 1]) { 2400 PetscInt idc, idr; 2401 PetscScalar vc, vr; 2402 /* column/row index/value */ 2403 idc = adx[aptr[i]]; 2404 idr = bdx[bptr[idc]]; 2405 vc = va[aptr[i]]; 2406 vr = vb[bptr[idc]]; 2407 if (i != idr || PetscAbsScalar(vc - PetscConj(vr)) > tol) { 2408 *f = PETSC_FALSE; 2409 goto done; 2410 } else { 2411 aptr[i]++; 2412 if (B || i != idc) bptr[idc]++; 2413 } 2414 } 2415 } 2416 done: 2417 PetscCall(PetscFree(aptr)); 2418 PetscCall(PetscFree(bptr)); 2419 PetscFunctionReturn(PETSC_SUCCESS); 2420 } 2421 2422 PetscErrorCode MatIsSymmetric_SeqAIJ(Mat A, PetscReal tol, PetscBool *f) 2423 { 2424 PetscFunctionBegin; 2425 PetscCall(MatIsTranspose_SeqAIJ(A, A, tol, f)); 2426 PetscFunctionReturn(PETSC_SUCCESS); 2427 } 2428 2429 PetscErrorCode MatIsHermitian_SeqAIJ(Mat A, PetscReal tol, PetscBool *f) 2430 { 2431 PetscFunctionBegin; 2432 PetscCall(MatIsHermitianTranspose_SeqAIJ(A, A, tol, f)); 2433 PetscFunctionReturn(PETSC_SUCCESS); 2434 } 2435 2436 PetscErrorCode MatDiagonalScale_SeqAIJ(Mat A, Vec ll, Vec rr) 2437 { 2438 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2439 const PetscScalar *l, *r; 2440 PetscScalar x; 2441 MatScalar *v; 2442 PetscInt i, j, m = A->rmap->n, n = A->cmap->n, M, nz = a->nz; 2443 const PetscInt *jj; 2444 2445 PetscFunctionBegin; 2446 if (ll) { 2447 /* The local size is used so that VecMPI can be passed to this routine 2448 by MatDiagonalScale_MPIAIJ */ 2449 PetscCall(VecGetLocalSize(ll, &m)); 2450 PetscCheck(m == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Left scaling vector wrong length"); 2451 PetscCall(VecGetArrayRead(ll, &l)); 2452 PetscCall(MatSeqAIJGetArray(A, &v)); 2453 for (i = 0; i < m; i++) { 2454 x = l[i]; 2455 M = a->i[i + 1] - a->i[i]; 2456 for (j = 0; j < M; j++) (*v++) *= x; 2457 } 2458 PetscCall(VecRestoreArrayRead(ll, &l)); 2459 PetscCall(PetscLogFlops(nz)); 2460 PetscCall(MatSeqAIJRestoreArray(A, &v)); 2461 } 2462 if (rr) { 2463 PetscCall(VecGetLocalSize(rr, &n)); 2464 PetscCheck(n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Right scaling vector wrong length"); 2465 PetscCall(VecGetArrayRead(rr, &r)); 2466 PetscCall(MatSeqAIJGetArray(A, &v)); 2467 jj = a->j; 2468 for (i = 0; i < nz; i++) (*v++) *= r[*jj++]; 2469 PetscCall(MatSeqAIJRestoreArray(A, &v)); 2470 PetscCall(VecRestoreArrayRead(rr, &r)); 2471 PetscCall(PetscLogFlops(nz)); 2472 } 2473 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 2474 PetscFunctionReturn(PETSC_SUCCESS); 2475 } 2476 2477 PetscErrorCode MatCreateSubMatrix_SeqAIJ(Mat A, IS isrow, IS iscol, PetscInt csize, MatReuse scall, Mat *B) 2478 { 2479 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data, *c; 2480 PetscInt *smap, i, k, kstart, kend, oldcols = A->cmap->n, *lens; 2481 PetscInt row, mat_i, *mat_j, tcol, first, step, *mat_ilen, sum, lensi; 2482 const PetscInt *irow, *icol; 2483 const PetscScalar *aa; 2484 PetscInt nrows, ncols; 2485 PetscInt *starts, *j_new, *i_new, *aj = a->j, *ai = a->i, ii, *ailen = a->ilen; 2486 MatScalar *a_new, *mat_a, *c_a; 2487 Mat C; 2488 PetscBool stride; 2489 2490 PetscFunctionBegin; 2491 PetscCall(ISGetIndices(isrow, &irow)); 2492 PetscCall(ISGetLocalSize(isrow, &nrows)); 2493 PetscCall(ISGetLocalSize(iscol, &ncols)); 2494 2495 PetscCall(PetscObjectTypeCompare((PetscObject)iscol, ISSTRIDE, &stride)); 2496 if (stride) { 2497 PetscCall(ISStrideGetInfo(iscol, &first, &step)); 2498 } else { 2499 first = 0; 2500 step = 0; 2501 } 2502 if (stride && step == 1) { 2503 /* special case of contiguous rows */ 2504 PetscCall(PetscMalloc2(nrows, &lens, nrows, &starts)); 2505 /* loop over new rows determining lens and starting points */ 2506 for (i = 0; i < nrows; i++) { 2507 kstart = ai[irow[i]]; 2508 kend = kstart + ailen[irow[i]]; 2509 starts[i] = kstart; 2510 for (k = kstart; k < kend; k++) { 2511 if (aj[k] >= first) { 2512 starts[i] = k; 2513 break; 2514 } 2515 } 2516 sum = 0; 2517 while (k < kend) { 2518 if (aj[k++] >= first + ncols) break; 2519 sum++; 2520 } 2521 lens[i] = sum; 2522 } 2523 /* create submatrix */ 2524 if (scall == MAT_REUSE_MATRIX) { 2525 PetscInt n_cols, n_rows; 2526 PetscCall(MatGetSize(*B, &n_rows, &n_cols)); 2527 PetscCheck(n_rows == nrows && n_cols == ncols, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Reused submatrix wrong size"); 2528 PetscCall(MatZeroEntries(*B)); 2529 C = *B; 2530 } else { 2531 PetscInt rbs, cbs; 2532 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2533 PetscCall(MatSetSizes(C, nrows, ncols, PETSC_DETERMINE, PETSC_DETERMINE)); 2534 PetscCall(ISGetBlockSize(isrow, &rbs)); 2535 PetscCall(ISGetBlockSize(iscol, &cbs)); 2536 PetscCall(MatSetBlockSizes(C, rbs, cbs)); 2537 PetscCall(MatSetType(C, ((PetscObject)A)->type_name)); 2538 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(C, 0, lens)); 2539 } 2540 c = (Mat_SeqAIJ *)C->data; 2541 2542 /* loop over rows inserting into submatrix */ 2543 PetscCall(MatSeqAIJGetArrayWrite(C, &a_new)); // Not 'a_new = c->a-new', since that raw usage ignores offload state of C 2544 j_new = c->j; 2545 i_new = c->i; 2546 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 2547 for (i = 0; i < nrows; i++) { 2548 ii = starts[i]; 2549 lensi = lens[i]; 2550 for (k = 0; k < lensi; k++) *j_new++ = aj[ii + k] - first; 2551 PetscCall(PetscArraycpy(a_new, aa + starts[i], lensi)); 2552 a_new += lensi; 2553 i_new[i + 1] = i_new[i] + lensi; 2554 c->ilen[i] = lensi; 2555 } 2556 PetscCall(MatSeqAIJRestoreArrayWrite(C, &a_new)); // Set C's offload state properly 2557 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2558 PetscCall(PetscFree2(lens, starts)); 2559 } else { 2560 PetscCall(ISGetIndices(iscol, &icol)); 2561 PetscCall(PetscCalloc1(oldcols, &smap)); 2562 PetscCall(PetscMalloc1(1 + nrows, &lens)); 2563 for (i = 0; i < ncols; i++) { 2564 PetscCheck(icol[i] < oldcols, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Requesting column beyond largest column icol[%" PetscInt_FMT "] %" PetscInt_FMT " >= A->cmap->n %" PetscInt_FMT, i, icol[i], oldcols); 2565 smap[icol[i]] = i + 1; 2566 } 2567 2568 /* determine lens of each row */ 2569 for (i = 0; i < nrows; i++) { 2570 kstart = ai[irow[i]]; 2571 kend = kstart + a->ilen[irow[i]]; 2572 lens[i] = 0; 2573 for (k = kstart; k < kend; k++) { 2574 if (smap[aj[k]]) lens[i]++; 2575 } 2576 } 2577 /* Create and fill new matrix */ 2578 if (scall == MAT_REUSE_MATRIX) { 2579 PetscBool equal; 2580 2581 c = (Mat_SeqAIJ *)((*B)->data); 2582 PetscCheck((*B)->rmap->n == nrows && (*B)->cmap->n == ncols, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot reuse matrix. wrong size"); 2583 PetscCall(PetscArraycmp(c->ilen, lens, (*B)->rmap->n, &equal)); 2584 PetscCheck(equal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot reuse matrix. wrong no of nonzeros"); 2585 PetscCall(PetscArrayzero(c->ilen, (*B)->rmap->n)); 2586 C = *B; 2587 } else { 2588 PetscInt rbs, cbs; 2589 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2590 PetscCall(MatSetSizes(C, nrows, ncols, PETSC_DETERMINE, PETSC_DETERMINE)); 2591 PetscCall(ISGetBlockSize(isrow, &rbs)); 2592 PetscCall(ISGetBlockSize(iscol, &cbs)); 2593 PetscCall(MatSetBlockSizes(C, rbs, cbs)); 2594 PetscCall(MatSetType(C, ((PetscObject)A)->type_name)); 2595 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(C, 0, lens)); 2596 } 2597 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 2598 2599 c = (Mat_SeqAIJ *)(C->data); 2600 PetscCall(MatSeqAIJGetArrayWrite(C, &c_a)); // Not 'c->a', since that raw usage ignores offload state of C 2601 for (i = 0; i < nrows; i++) { 2602 row = irow[i]; 2603 kstart = ai[row]; 2604 kend = kstart + a->ilen[row]; 2605 mat_i = c->i[i]; 2606 mat_j = c->j + mat_i; 2607 mat_a = c_a + mat_i; 2608 mat_ilen = c->ilen + i; 2609 for (k = kstart; k < kend; k++) { 2610 if ((tcol = smap[a->j[k]])) { 2611 *mat_j++ = tcol - 1; 2612 *mat_a++ = aa[k]; 2613 (*mat_ilen)++; 2614 } 2615 } 2616 } 2617 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2618 /* Free work space */ 2619 PetscCall(ISRestoreIndices(iscol, &icol)); 2620 PetscCall(PetscFree(smap)); 2621 PetscCall(PetscFree(lens)); 2622 /* sort */ 2623 for (i = 0; i < nrows; i++) { 2624 PetscInt ilen; 2625 2626 mat_i = c->i[i]; 2627 mat_j = c->j + mat_i; 2628 mat_a = c_a + mat_i; 2629 ilen = c->ilen[i]; 2630 PetscCall(PetscSortIntWithScalarArray(ilen, mat_j, mat_a)); 2631 } 2632 PetscCall(MatSeqAIJRestoreArrayWrite(C, &c_a)); 2633 } 2634 #if defined(PETSC_HAVE_DEVICE) 2635 PetscCall(MatBindToCPU(C, A->boundtocpu)); 2636 #endif 2637 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 2638 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 2639 2640 PetscCall(ISRestoreIndices(isrow, &irow)); 2641 *B = C; 2642 PetscFunctionReturn(PETSC_SUCCESS); 2643 } 2644 2645 PetscErrorCode MatGetMultiProcBlock_SeqAIJ(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat) 2646 { 2647 Mat B; 2648 2649 PetscFunctionBegin; 2650 if (scall == MAT_INITIAL_MATRIX) { 2651 PetscCall(MatCreate(subComm, &B)); 2652 PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->n, mat->cmap->n)); 2653 PetscCall(MatSetBlockSizesFromMats(B, mat, mat)); 2654 PetscCall(MatSetType(B, MATSEQAIJ)); 2655 PetscCall(MatDuplicateNoCreate_SeqAIJ(B, mat, MAT_COPY_VALUES, PETSC_TRUE)); 2656 *subMat = B; 2657 } else { 2658 PetscCall(MatCopy_SeqAIJ(mat, *subMat, SAME_NONZERO_PATTERN)); 2659 } 2660 PetscFunctionReturn(PETSC_SUCCESS); 2661 } 2662 2663 PetscErrorCode MatILUFactor_SeqAIJ(Mat inA, IS row, IS col, const MatFactorInfo *info) 2664 { 2665 Mat_SeqAIJ *a = (Mat_SeqAIJ *)inA->data; 2666 Mat outA; 2667 PetscBool row_identity, col_identity; 2668 2669 PetscFunctionBegin; 2670 PetscCheck(info->levels == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only levels=0 supported for in-place ilu"); 2671 2672 PetscCall(ISIdentity(row, &row_identity)); 2673 PetscCall(ISIdentity(col, &col_identity)); 2674 2675 outA = inA; 2676 outA->factortype = MAT_FACTOR_LU; 2677 PetscCall(PetscFree(inA->solvertype)); 2678 PetscCall(PetscStrallocpy(MATSOLVERPETSC, &inA->solvertype)); 2679 2680 PetscCall(PetscObjectReference((PetscObject)row)); 2681 PetscCall(ISDestroy(&a->row)); 2682 2683 a->row = row; 2684 2685 PetscCall(PetscObjectReference((PetscObject)col)); 2686 PetscCall(ISDestroy(&a->col)); 2687 2688 a->col = col; 2689 2690 /* Create the inverse permutation so that it can be used in MatLUFactorNumeric() */ 2691 PetscCall(ISDestroy(&a->icol)); 2692 PetscCall(ISInvertPermutation(col, PETSC_DECIDE, &a->icol)); 2693 2694 if (!a->solve_work) { /* this matrix may have been factored before */ 2695 PetscCall(PetscMalloc1(inA->rmap->n + 1, &a->solve_work)); 2696 } 2697 2698 PetscCall(MatMarkDiagonal_SeqAIJ(inA)); 2699 if (row_identity && col_identity) { 2700 PetscCall(MatLUFactorNumeric_SeqAIJ_inplace(outA, inA, info)); 2701 } else { 2702 PetscCall(MatLUFactorNumeric_SeqAIJ_InplaceWithPerm(outA, inA, info)); 2703 } 2704 PetscFunctionReturn(PETSC_SUCCESS); 2705 } 2706 2707 PetscErrorCode MatScale_SeqAIJ(Mat inA, PetscScalar alpha) 2708 { 2709 Mat_SeqAIJ *a = (Mat_SeqAIJ *)inA->data; 2710 PetscScalar *v; 2711 PetscBLASInt one = 1, bnz; 2712 2713 PetscFunctionBegin; 2714 PetscCall(MatSeqAIJGetArray(inA, &v)); 2715 PetscCall(PetscBLASIntCast(a->nz, &bnz)); 2716 PetscCallBLAS("BLASscal", BLASscal_(&bnz, &alpha, v, &one)); 2717 PetscCall(PetscLogFlops(a->nz)); 2718 PetscCall(MatSeqAIJRestoreArray(inA, &v)); 2719 PetscCall(MatSeqAIJInvalidateDiagonal(inA)); 2720 PetscFunctionReturn(PETSC_SUCCESS); 2721 } 2722 2723 PetscErrorCode MatDestroySubMatrix_Private(Mat_SubSppt *submatj) 2724 { 2725 PetscInt i; 2726 2727 PetscFunctionBegin; 2728 if (!submatj->id) { /* delete data that are linked only to submats[id=0] */ 2729 PetscCall(PetscFree4(submatj->sbuf1, submatj->ptr, submatj->tmp, submatj->ctr)); 2730 2731 for (i = 0; i < submatj->nrqr; ++i) PetscCall(PetscFree(submatj->sbuf2[i])); 2732 PetscCall(PetscFree3(submatj->sbuf2, submatj->req_size, submatj->req_source1)); 2733 2734 if (submatj->rbuf1) { 2735 PetscCall(PetscFree(submatj->rbuf1[0])); 2736 PetscCall(PetscFree(submatj->rbuf1)); 2737 } 2738 2739 for (i = 0; i < submatj->nrqs; ++i) PetscCall(PetscFree(submatj->rbuf3[i])); 2740 PetscCall(PetscFree3(submatj->req_source2, submatj->rbuf2, submatj->rbuf3)); 2741 PetscCall(PetscFree(submatj->pa)); 2742 } 2743 2744 #if defined(PETSC_USE_CTABLE) 2745 PetscCall(PetscHMapIDestroy(&submatj->rmap)); 2746 if (submatj->cmap_loc) PetscCall(PetscFree(submatj->cmap_loc)); 2747 PetscCall(PetscFree(submatj->rmap_loc)); 2748 #else 2749 PetscCall(PetscFree(submatj->rmap)); 2750 #endif 2751 2752 if (!submatj->allcolumns) { 2753 #if defined(PETSC_USE_CTABLE) 2754 PetscCall(PetscHMapIDestroy((PetscHMapI *)&submatj->cmap)); 2755 #else 2756 PetscCall(PetscFree(submatj->cmap)); 2757 #endif 2758 } 2759 PetscCall(PetscFree(submatj->row2proc)); 2760 2761 PetscCall(PetscFree(submatj)); 2762 PetscFunctionReturn(PETSC_SUCCESS); 2763 } 2764 2765 PetscErrorCode MatDestroySubMatrix_SeqAIJ(Mat C) 2766 { 2767 Mat_SeqAIJ *c = (Mat_SeqAIJ *)C->data; 2768 Mat_SubSppt *submatj = c->submatis1; 2769 2770 PetscFunctionBegin; 2771 PetscCall((*submatj->destroy)(C)); 2772 PetscCall(MatDestroySubMatrix_Private(submatj)); 2773 PetscFunctionReturn(PETSC_SUCCESS); 2774 } 2775 2776 /* Note this has code duplication with MatDestroySubMatrices_SeqBAIJ() */ 2777 PetscErrorCode MatDestroySubMatrices_SeqAIJ(PetscInt n, Mat *mat[]) 2778 { 2779 PetscInt i; 2780 Mat C; 2781 Mat_SeqAIJ *c; 2782 Mat_SubSppt *submatj; 2783 2784 PetscFunctionBegin; 2785 for (i = 0; i < n; i++) { 2786 C = (*mat)[i]; 2787 c = (Mat_SeqAIJ *)C->data; 2788 submatj = c->submatis1; 2789 if (submatj) { 2790 if (--((PetscObject)C)->refct <= 0) { 2791 PetscCall(PetscFree(C->factorprefix)); 2792 PetscCall((*submatj->destroy)(C)); 2793 PetscCall(MatDestroySubMatrix_Private(submatj)); 2794 PetscCall(PetscFree(C->defaultvectype)); 2795 PetscCall(PetscFree(C->defaultrandtype)); 2796 PetscCall(PetscLayoutDestroy(&C->rmap)); 2797 PetscCall(PetscLayoutDestroy(&C->cmap)); 2798 PetscCall(PetscHeaderDestroy(&C)); 2799 } 2800 } else { 2801 PetscCall(MatDestroy(&C)); 2802 } 2803 } 2804 2805 /* Destroy Dummy submatrices created for reuse */ 2806 PetscCall(MatDestroySubMatrices_Dummy(n, mat)); 2807 2808 PetscCall(PetscFree(*mat)); 2809 PetscFunctionReturn(PETSC_SUCCESS); 2810 } 2811 2812 PetscErrorCode MatCreateSubMatrices_SeqAIJ(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *B[]) 2813 { 2814 PetscInt i; 2815 2816 PetscFunctionBegin; 2817 if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscCalloc1(n + 1, B)); 2818 2819 for (i = 0; i < n; i++) PetscCall(MatCreateSubMatrix_SeqAIJ(A, irow[i], icol[i], PETSC_DECIDE, scall, &(*B)[i])); 2820 PetscFunctionReturn(PETSC_SUCCESS); 2821 } 2822 2823 PetscErrorCode MatIncreaseOverlap_SeqAIJ(Mat A, PetscInt is_max, IS is[], PetscInt ov) 2824 { 2825 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2826 PetscInt row, i, j, k, l, ll, m, n, *nidx, isz, val; 2827 const PetscInt *idx; 2828 PetscInt start, end, *ai, *aj, bs = (A->rmap->bs > 0 && A->rmap->bs == A->cmap->bs) ? A->rmap->bs : 1; 2829 PetscBT table; 2830 2831 PetscFunctionBegin; 2832 m = A->rmap->n / bs; 2833 ai = a->i; 2834 aj = a->j; 2835 2836 PetscCheck(ov >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "illegal negative overlap value used"); 2837 2838 PetscCall(PetscMalloc1(m + 1, &nidx)); 2839 PetscCall(PetscBTCreate(m, &table)); 2840 2841 for (i = 0; i < is_max; i++) { 2842 /* Initialize the two local arrays */ 2843 isz = 0; 2844 PetscCall(PetscBTMemzero(m, table)); 2845 2846 /* Extract the indices, assume there can be duplicate entries */ 2847 PetscCall(ISGetIndices(is[i], &idx)); 2848 PetscCall(ISGetLocalSize(is[i], &n)); 2849 2850 if (bs > 1) { 2851 /* Enter these into the temp arrays. I.e., mark table[row], enter row into new index */ 2852 for (j = 0; j < n; ++j) { 2853 if (!PetscBTLookupSet(table, idx[j] / bs)) nidx[isz++] = idx[j] / bs; 2854 } 2855 PetscCall(ISRestoreIndices(is[i], &idx)); 2856 PetscCall(ISDestroy(&is[i])); 2857 2858 k = 0; 2859 for (j = 0; j < ov; j++) { /* for each overlap */ 2860 n = isz; 2861 for (; k < n; k++) { /* do only those rows in nidx[k], which are not done yet */ 2862 for (ll = 0; ll < bs; ll++) { 2863 row = bs * nidx[k] + ll; 2864 start = ai[row]; 2865 end = ai[row + 1]; 2866 for (l = start; l < end; l++) { 2867 val = aj[l] / bs; 2868 if (!PetscBTLookupSet(table, val)) nidx[isz++] = val; 2869 } 2870 } 2871 } 2872 } 2873 PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, isz, nidx, PETSC_COPY_VALUES, (is + i))); 2874 } else { 2875 /* Enter these into the temp arrays. I.e., mark table[row], enter row into new index */ 2876 for (j = 0; j < n; ++j) { 2877 if (!PetscBTLookupSet(table, idx[j])) nidx[isz++] = idx[j]; 2878 } 2879 PetscCall(ISRestoreIndices(is[i], &idx)); 2880 PetscCall(ISDestroy(&is[i])); 2881 2882 k = 0; 2883 for (j = 0; j < ov; j++) { /* for each overlap */ 2884 n = isz; 2885 for (; k < n; k++) { /* do only those rows in nidx[k], which are not done yet */ 2886 row = nidx[k]; 2887 start = ai[row]; 2888 end = ai[row + 1]; 2889 for (l = start; l < end; l++) { 2890 val = aj[l]; 2891 if (!PetscBTLookupSet(table, val)) nidx[isz++] = val; 2892 } 2893 } 2894 } 2895 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, isz, nidx, PETSC_COPY_VALUES, (is + i))); 2896 } 2897 } 2898 PetscCall(PetscBTDestroy(&table)); 2899 PetscCall(PetscFree(nidx)); 2900 PetscFunctionReturn(PETSC_SUCCESS); 2901 } 2902 2903 PetscErrorCode MatPermute_SeqAIJ(Mat A, IS rowp, IS colp, Mat *B) 2904 { 2905 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2906 PetscInt i, nz = 0, m = A->rmap->n, n = A->cmap->n; 2907 const PetscInt *row, *col; 2908 PetscInt *cnew, j, *lens; 2909 IS icolp, irowp; 2910 PetscInt *cwork = NULL; 2911 PetscScalar *vwork = NULL; 2912 2913 PetscFunctionBegin; 2914 PetscCall(ISInvertPermutation(rowp, PETSC_DECIDE, &irowp)); 2915 PetscCall(ISGetIndices(irowp, &row)); 2916 PetscCall(ISInvertPermutation(colp, PETSC_DECIDE, &icolp)); 2917 PetscCall(ISGetIndices(icolp, &col)); 2918 2919 /* determine lengths of permuted rows */ 2920 PetscCall(PetscMalloc1(m + 1, &lens)); 2921 for (i = 0; i < m; i++) lens[row[i]] = a->i[i + 1] - a->i[i]; 2922 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 2923 PetscCall(MatSetSizes(*B, m, n, m, n)); 2924 PetscCall(MatSetBlockSizesFromMats(*B, A, A)); 2925 PetscCall(MatSetType(*B, ((PetscObject)A)->type_name)); 2926 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(*B, 0, lens)); 2927 PetscCall(PetscFree(lens)); 2928 2929 PetscCall(PetscMalloc1(n, &cnew)); 2930 for (i = 0; i < m; i++) { 2931 PetscCall(MatGetRow_SeqAIJ(A, i, &nz, &cwork, &vwork)); 2932 for (j = 0; j < nz; j++) cnew[j] = col[cwork[j]]; 2933 PetscCall(MatSetValues_SeqAIJ(*B, 1, &row[i], nz, cnew, vwork, INSERT_VALUES)); 2934 PetscCall(MatRestoreRow_SeqAIJ(A, i, &nz, &cwork, &vwork)); 2935 } 2936 PetscCall(PetscFree(cnew)); 2937 2938 (*B)->assembled = PETSC_FALSE; 2939 2940 #if defined(PETSC_HAVE_DEVICE) 2941 PetscCall(MatBindToCPU(*B, A->boundtocpu)); 2942 #endif 2943 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 2944 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 2945 PetscCall(ISRestoreIndices(irowp, &row)); 2946 PetscCall(ISRestoreIndices(icolp, &col)); 2947 PetscCall(ISDestroy(&irowp)); 2948 PetscCall(ISDestroy(&icolp)); 2949 if (rowp == colp) PetscCall(MatPropagateSymmetryOptions(A, *B)); 2950 PetscFunctionReturn(PETSC_SUCCESS); 2951 } 2952 2953 PetscErrorCode MatCopy_SeqAIJ(Mat A, Mat B, MatStructure str) 2954 { 2955 PetscFunctionBegin; 2956 /* If the two matrices have the same copy implementation, use fast copy. */ 2957 if (str == SAME_NONZERO_PATTERN && (A->ops->copy == B->ops->copy)) { 2958 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2959 Mat_SeqAIJ *b = (Mat_SeqAIJ *)B->data; 2960 const PetscScalar *aa; 2961 2962 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 2963 PetscCheck(a->i[A->rmap->n] == b->i[B->rmap->n], PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of nonzeros in two matrices are different %" PetscInt_FMT " != %" PetscInt_FMT, a->i[A->rmap->n], b->i[B->rmap->n]); 2964 PetscCall(PetscArraycpy(b->a, aa, a->i[A->rmap->n])); 2965 PetscCall(PetscObjectStateIncrease((PetscObject)B)); 2966 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2967 } else { 2968 PetscCall(MatCopy_Basic(A, B, str)); 2969 } 2970 PetscFunctionReturn(PETSC_SUCCESS); 2971 } 2972 2973 PETSC_INTERN PetscErrorCode MatSeqAIJGetArray_SeqAIJ(Mat A, PetscScalar *array[]) 2974 { 2975 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2976 2977 PetscFunctionBegin; 2978 *array = a->a; 2979 PetscFunctionReturn(PETSC_SUCCESS); 2980 } 2981 2982 PETSC_INTERN PetscErrorCode MatSeqAIJRestoreArray_SeqAIJ(Mat A, PetscScalar *array[]) 2983 { 2984 PetscFunctionBegin; 2985 *array = NULL; 2986 PetscFunctionReturn(PETSC_SUCCESS); 2987 } 2988 2989 /* 2990 Computes the number of nonzeros per row needed for preallocation when X and Y 2991 have different nonzero structure. 2992 */ 2993 PetscErrorCode MatAXPYGetPreallocation_SeqX_private(PetscInt m, const PetscInt *xi, const PetscInt *xj, const PetscInt *yi, const PetscInt *yj, PetscInt *nnz) 2994 { 2995 PetscInt i, j, k, nzx, nzy; 2996 2997 PetscFunctionBegin; 2998 /* Set the number of nonzeros in the new matrix */ 2999 for (i = 0; i < m; i++) { 3000 const PetscInt *xjj = xj + xi[i], *yjj = yj + yi[i]; 3001 nzx = xi[i + 1] - xi[i]; 3002 nzy = yi[i + 1] - yi[i]; 3003 nnz[i] = 0; 3004 for (j = 0, k = 0; j < nzx; j++) { /* Point in X */ 3005 for (; k < nzy && yjj[k] < xjj[j]; k++) nnz[i]++; /* Catch up to X */ 3006 if (k < nzy && yjj[k] == xjj[j]) k++; /* Skip duplicate */ 3007 nnz[i]++; 3008 } 3009 for (; k < nzy; k++) nnz[i]++; 3010 } 3011 PetscFunctionReturn(PETSC_SUCCESS); 3012 } 3013 3014 PetscErrorCode MatAXPYGetPreallocation_SeqAIJ(Mat Y, Mat X, PetscInt *nnz) 3015 { 3016 PetscInt m = Y->rmap->N; 3017 Mat_SeqAIJ *x = (Mat_SeqAIJ *)X->data; 3018 Mat_SeqAIJ *y = (Mat_SeqAIJ *)Y->data; 3019 3020 PetscFunctionBegin; 3021 /* Set the number of nonzeros in the new matrix */ 3022 PetscCall(MatAXPYGetPreallocation_SeqX_private(m, x->i, x->j, y->i, y->j, nnz)); 3023 PetscFunctionReturn(PETSC_SUCCESS); 3024 } 3025 3026 PetscErrorCode MatAXPY_SeqAIJ(Mat Y, PetscScalar a, Mat X, MatStructure str) 3027 { 3028 Mat_SeqAIJ *x = (Mat_SeqAIJ *)X->data, *y = (Mat_SeqAIJ *)Y->data; 3029 3030 PetscFunctionBegin; 3031 if (str == UNKNOWN_NONZERO_PATTERN || (PetscDefined(USE_DEBUG) && str == SAME_NONZERO_PATTERN)) { 3032 PetscBool e = x->nz == y->nz ? PETSC_TRUE : PETSC_FALSE; 3033 if (e) { 3034 PetscCall(PetscArraycmp(x->i, y->i, Y->rmap->n + 1, &e)); 3035 if (e) { 3036 PetscCall(PetscArraycmp(x->j, y->j, y->nz, &e)); 3037 if (e) str = SAME_NONZERO_PATTERN; 3038 } 3039 } 3040 if (!e) PetscCheck(str != SAME_NONZERO_PATTERN, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatStructure is not SAME_NONZERO_PATTERN"); 3041 } 3042 if (str == SAME_NONZERO_PATTERN) { 3043 const PetscScalar *xa; 3044 PetscScalar *ya, alpha = a; 3045 PetscBLASInt one = 1, bnz; 3046 3047 PetscCall(PetscBLASIntCast(x->nz, &bnz)); 3048 PetscCall(MatSeqAIJGetArray(Y, &ya)); 3049 PetscCall(MatSeqAIJGetArrayRead(X, &xa)); 3050 PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xa, &one, ya, &one)); 3051 PetscCall(MatSeqAIJRestoreArrayRead(X, &xa)); 3052 PetscCall(MatSeqAIJRestoreArray(Y, &ya)); 3053 PetscCall(PetscLogFlops(2.0 * bnz)); 3054 PetscCall(MatSeqAIJInvalidateDiagonal(Y)); 3055 PetscCall(PetscObjectStateIncrease((PetscObject)Y)); 3056 } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */ 3057 PetscCall(MatAXPY_Basic(Y, a, X, str)); 3058 } else { 3059 Mat B; 3060 PetscInt *nnz; 3061 PetscCall(PetscMalloc1(Y->rmap->N, &nnz)); 3062 PetscCall(MatCreate(PetscObjectComm((PetscObject)Y), &B)); 3063 PetscCall(PetscObjectSetName((PetscObject)B, ((PetscObject)Y)->name)); 3064 PetscCall(MatSetLayouts(B, Y->rmap, Y->cmap)); 3065 PetscCall(MatSetType(B, ((PetscObject)Y)->type_name)); 3066 PetscCall(MatAXPYGetPreallocation_SeqAIJ(Y, X, nnz)); 3067 PetscCall(MatSeqAIJSetPreallocation(B, 0, nnz)); 3068 PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str)); 3069 PetscCall(MatHeaderMerge(Y, &B)); 3070 PetscCall(MatSeqAIJCheckInode(Y)); 3071 PetscCall(PetscFree(nnz)); 3072 } 3073 PetscFunctionReturn(PETSC_SUCCESS); 3074 } 3075 3076 PETSC_INTERN PetscErrorCode MatConjugate_SeqAIJ(Mat mat) 3077 { 3078 #if defined(PETSC_USE_COMPLEX) 3079 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3080 PetscInt i, nz; 3081 PetscScalar *a; 3082 3083 PetscFunctionBegin; 3084 nz = aij->nz; 3085 PetscCall(MatSeqAIJGetArray(mat, &a)); 3086 for (i = 0; i < nz; i++) a[i] = PetscConj(a[i]); 3087 PetscCall(MatSeqAIJRestoreArray(mat, &a)); 3088 #else 3089 PetscFunctionBegin; 3090 #endif 3091 PetscFunctionReturn(PETSC_SUCCESS); 3092 } 3093 3094 PetscErrorCode MatGetRowMaxAbs_SeqAIJ(Mat A, Vec v, PetscInt idx[]) 3095 { 3096 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3097 PetscInt i, j, m = A->rmap->n, *ai, *aj, ncols, n; 3098 PetscReal atmp; 3099 PetscScalar *x; 3100 const MatScalar *aa, *av; 3101 3102 PetscFunctionBegin; 3103 PetscCheck(!A->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3104 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 3105 aa = av; 3106 ai = a->i; 3107 aj = a->j; 3108 3109 PetscCall(VecSet(v, 0.0)); 3110 PetscCall(VecGetArrayWrite(v, &x)); 3111 PetscCall(VecGetLocalSize(v, &n)); 3112 PetscCheck(n == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector"); 3113 for (i = 0; i < m; i++) { 3114 ncols = ai[1] - ai[0]; 3115 ai++; 3116 for (j = 0; j < ncols; j++) { 3117 atmp = PetscAbsScalar(*aa); 3118 if (PetscAbsScalar(x[i]) < atmp) { 3119 x[i] = atmp; 3120 if (idx) idx[i] = *aj; 3121 } 3122 aa++; 3123 aj++; 3124 } 3125 } 3126 PetscCall(VecRestoreArrayWrite(v, &x)); 3127 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 3128 PetscFunctionReturn(PETSC_SUCCESS); 3129 } 3130 3131 PetscErrorCode MatGetRowMax_SeqAIJ(Mat A, Vec v, PetscInt idx[]) 3132 { 3133 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3134 PetscInt i, j, m = A->rmap->n, *ai, *aj, ncols, n; 3135 PetscScalar *x; 3136 const MatScalar *aa, *av; 3137 3138 PetscFunctionBegin; 3139 PetscCheck(!A->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3140 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 3141 aa = av; 3142 ai = a->i; 3143 aj = a->j; 3144 3145 PetscCall(VecSet(v, 0.0)); 3146 PetscCall(VecGetArrayWrite(v, &x)); 3147 PetscCall(VecGetLocalSize(v, &n)); 3148 PetscCheck(n == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector"); 3149 for (i = 0; i < m; i++) { 3150 ncols = ai[1] - ai[0]; 3151 ai++; 3152 if (ncols == A->cmap->n) { /* row is dense */ 3153 x[i] = *aa; 3154 if (idx) idx[i] = 0; 3155 } else { /* row is sparse so already KNOW maximum is 0.0 or higher */ 3156 x[i] = 0.0; 3157 if (idx) { 3158 for (j = 0; j < ncols; j++) { /* find first implicit 0.0 in the row */ 3159 if (aj[j] > j) { 3160 idx[i] = j; 3161 break; 3162 } 3163 } 3164 /* in case first implicit 0.0 in the row occurs at ncols-th column */ 3165 if (j == ncols && j < A->cmap->n) idx[i] = j; 3166 } 3167 } 3168 for (j = 0; j < ncols; j++) { 3169 if (PetscRealPart(x[i]) < PetscRealPart(*aa)) { 3170 x[i] = *aa; 3171 if (idx) idx[i] = *aj; 3172 } 3173 aa++; 3174 aj++; 3175 } 3176 } 3177 PetscCall(VecRestoreArrayWrite(v, &x)); 3178 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 3179 PetscFunctionReturn(PETSC_SUCCESS); 3180 } 3181 3182 PetscErrorCode MatGetRowMinAbs_SeqAIJ(Mat A, Vec v, PetscInt idx[]) 3183 { 3184 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3185 PetscInt i, j, m = A->rmap->n, *ai, *aj, ncols, n; 3186 PetscScalar *x; 3187 const MatScalar *aa, *av; 3188 3189 PetscFunctionBegin; 3190 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 3191 aa = av; 3192 ai = a->i; 3193 aj = a->j; 3194 3195 PetscCall(VecSet(v, 0.0)); 3196 PetscCall(VecGetArrayWrite(v, &x)); 3197 PetscCall(VecGetLocalSize(v, &n)); 3198 PetscCheck(n == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector, %" PetscInt_FMT " vs. %" PetscInt_FMT " rows", m, n); 3199 for (i = 0; i < m; i++) { 3200 ncols = ai[1] - ai[0]; 3201 ai++; 3202 if (ncols == A->cmap->n) { /* row is dense */ 3203 x[i] = *aa; 3204 if (idx) idx[i] = 0; 3205 } else { /* row is sparse so already KNOW minimum is 0.0 or higher */ 3206 x[i] = 0.0; 3207 if (idx) { /* find first implicit 0.0 in the row */ 3208 for (j = 0; j < ncols; j++) { 3209 if (aj[j] > j) { 3210 idx[i] = j; 3211 break; 3212 } 3213 } 3214 /* in case first implicit 0.0 in the row occurs at ncols-th column */ 3215 if (j == ncols && j < A->cmap->n) idx[i] = j; 3216 } 3217 } 3218 for (j = 0; j < ncols; j++) { 3219 if (PetscAbsScalar(x[i]) > PetscAbsScalar(*aa)) { 3220 x[i] = *aa; 3221 if (idx) idx[i] = *aj; 3222 } 3223 aa++; 3224 aj++; 3225 } 3226 } 3227 PetscCall(VecRestoreArrayWrite(v, &x)); 3228 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 3229 PetscFunctionReturn(PETSC_SUCCESS); 3230 } 3231 3232 PetscErrorCode MatGetRowMin_SeqAIJ(Mat A, Vec v, PetscInt idx[]) 3233 { 3234 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3235 PetscInt i, j, m = A->rmap->n, ncols, n; 3236 const PetscInt *ai, *aj; 3237 PetscScalar *x; 3238 const MatScalar *aa, *av; 3239 3240 PetscFunctionBegin; 3241 PetscCheck(!A->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3242 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 3243 aa = av; 3244 ai = a->i; 3245 aj = a->j; 3246 3247 PetscCall(VecSet(v, 0.0)); 3248 PetscCall(VecGetArrayWrite(v, &x)); 3249 PetscCall(VecGetLocalSize(v, &n)); 3250 PetscCheck(n == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector"); 3251 for (i = 0; i < m; i++) { 3252 ncols = ai[1] - ai[0]; 3253 ai++; 3254 if (ncols == A->cmap->n) { /* row is dense */ 3255 x[i] = *aa; 3256 if (idx) idx[i] = 0; 3257 } else { /* row is sparse so already KNOW minimum is 0.0 or lower */ 3258 x[i] = 0.0; 3259 if (idx) { /* find first implicit 0.0 in the row */ 3260 for (j = 0; j < ncols; j++) { 3261 if (aj[j] > j) { 3262 idx[i] = j; 3263 break; 3264 } 3265 } 3266 /* in case first implicit 0.0 in the row occurs at ncols-th column */ 3267 if (j == ncols && j < A->cmap->n) idx[i] = j; 3268 } 3269 } 3270 for (j = 0; j < ncols; j++) { 3271 if (PetscRealPart(x[i]) > PetscRealPart(*aa)) { 3272 x[i] = *aa; 3273 if (idx) idx[i] = *aj; 3274 } 3275 aa++; 3276 aj++; 3277 } 3278 } 3279 PetscCall(VecRestoreArrayWrite(v, &x)); 3280 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 3281 PetscFunctionReturn(PETSC_SUCCESS); 3282 } 3283 3284 PetscErrorCode MatInvertBlockDiagonal_SeqAIJ(Mat A, const PetscScalar **values) 3285 { 3286 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3287 PetscInt i, bs = PetscAbs(A->rmap->bs), mbs = A->rmap->n / bs, ipvt[5], bs2 = bs * bs, *v_pivots, ij[7], *IJ, j; 3288 MatScalar *diag, work[25], *v_work; 3289 const PetscReal shift = 0.0; 3290 PetscBool allowzeropivot, zeropivotdetected = PETSC_FALSE; 3291 3292 PetscFunctionBegin; 3293 allowzeropivot = PetscNot(A->erroriffailure); 3294 if (a->ibdiagvalid) { 3295 if (values) *values = a->ibdiag; 3296 PetscFunctionReturn(PETSC_SUCCESS); 3297 } 3298 PetscCall(MatMarkDiagonal_SeqAIJ(A)); 3299 if (!a->ibdiag) { PetscCall(PetscMalloc1(bs2 * mbs, &a->ibdiag)); } 3300 diag = a->ibdiag; 3301 if (values) *values = a->ibdiag; 3302 /* factor and invert each block */ 3303 switch (bs) { 3304 case 1: 3305 for (i = 0; i < mbs; i++) { 3306 PetscCall(MatGetValues(A, 1, &i, 1, &i, diag + i)); 3307 if (PetscAbsScalar(diag[i] + shift) < PETSC_MACHINE_EPSILON) { 3308 if (allowzeropivot) { 3309 A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3310 A->factorerror_zeropivot_value = PetscAbsScalar(diag[i]); 3311 A->factorerror_zeropivot_row = i; 3312 PetscCall(PetscInfo(A, "Zero pivot, row %" PetscInt_FMT " pivot %g tolerance %g\n", i, (double)PetscAbsScalar(diag[i]), (double)PETSC_MACHINE_EPSILON)); 3313 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_MAT_LU_ZRPVT, "Zero pivot, row %" PetscInt_FMT " pivot %g tolerance %g", i, (double)PetscAbsScalar(diag[i]), (double)PETSC_MACHINE_EPSILON); 3314 } 3315 diag[i] = (PetscScalar)1.0 / (diag[i] + shift); 3316 } 3317 break; 3318 case 2: 3319 for (i = 0; i < mbs; i++) { 3320 ij[0] = 2 * i; 3321 ij[1] = 2 * i + 1; 3322 PetscCall(MatGetValues(A, 2, ij, 2, ij, diag)); 3323 PetscCall(PetscKernel_A_gets_inverse_A_2(diag, shift, allowzeropivot, &zeropivotdetected)); 3324 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3325 PetscCall(PetscKernel_A_gets_transpose_A_2(diag)); 3326 diag += 4; 3327 } 3328 break; 3329 case 3: 3330 for (i = 0; i < mbs; i++) { 3331 ij[0] = 3 * i; 3332 ij[1] = 3 * i + 1; 3333 ij[2] = 3 * i + 2; 3334 PetscCall(MatGetValues(A, 3, ij, 3, ij, diag)); 3335 PetscCall(PetscKernel_A_gets_inverse_A_3(diag, shift, allowzeropivot, &zeropivotdetected)); 3336 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3337 PetscCall(PetscKernel_A_gets_transpose_A_3(diag)); 3338 diag += 9; 3339 } 3340 break; 3341 case 4: 3342 for (i = 0; i < mbs; i++) { 3343 ij[0] = 4 * i; 3344 ij[1] = 4 * i + 1; 3345 ij[2] = 4 * i + 2; 3346 ij[3] = 4 * i + 3; 3347 PetscCall(MatGetValues(A, 4, ij, 4, ij, diag)); 3348 PetscCall(PetscKernel_A_gets_inverse_A_4(diag, shift, allowzeropivot, &zeropivotdetected)); 3349 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3350 PetscCall(PetscKernel_A_gets_transpose_A_4(diag)); 3351 diag += 16; 3352 } 3353 break; 3354 case 5: 3355 for (i = 0; i < mbs; i++) { 3356 ij[0] = 5 * i; 3357 ij[1] = 5 * i + 1; 3358 ij[2] = 5 * i + 2; 3359 ij[3] = 5 * i + 3; 3360 ij[4] = 5 * i + 4; 3361 PetscCall(MatGetValues(A, 5, ij, 5, ij, diag)); 3362 PetscCall(PetscKernel_A_gets_inverse_A_5(diag, ipvt, work, shift, allowzeropivot, &zeropivotdetected)); 3363 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3364 PetscCall(PetscKernel_A_gets_transpose_A_5(diag)); 3365 diag += 25; 3366 } 3367 break; 3368 case 6: 3369 for (i = 0; i < mbs; i++) { 3370 ij[0] = 6 * i; 3371 ij[1] = 6 * i + 1; 3372 ij[2] = 6 * i + 2; 3373 ij[3] = 6 * i + 3; 3374 ij[4] = 6 * i + 4; 3375 ij[5] = 6 * i + 5; 3376 PetscCall(MatGetValues(A, 6, ij, 6, ij, diag)); 3377 PetscCall(PetscKernel_A_gets_inverse_A_6(diag, shift, allowzeropivot, &zeropivotdetected)); 3378 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3379 PetscCall(PetscKernel_A_gets_transpose_A_6(diag)); 3380 diag += 36; 3381 } 3382 break; 3383 case 7: 3384 for (i = 0; i < mbs; i++) { 3385 ij[0] = 7 * i; 3386 ij[1] = 7 * i + 1; 3387 ij[2] = 7 * i + 2; 3388 ij[3] = 7 * i + 3; 3389 ij[4] = 7 * i + 4; 3390 ij[5] = 7 * i + 5; 3391 ij[6] = 7 * i + 6; 3392 PetscCall(MatGetValues(A, 7, ij, 7, ij, diag)); 3393 PetscCall(PetscKernel_A_gets_inverse_A_7(diag, shift, allowzeropivot, &zeropivotdetected)); 3394 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3395 PetscCall(PetscKernel_A_gets_transpose_A_7(diag)); 3396 diag += 49; 3397 } 3398 break; 3399 default: 3400 PetscCall(PetscMalloc3(bs, &v_work, bs, &v_pivots, bs, &IJ)); 3401 for (i = 0; i < mbs; i++) { 3402 for (j = 0; j < bs; j++) IJ[j] = bs * i + j; 3403 PetscCall(MatGetValues(A, bs, IJ, bs, IJ, diag)); 3404 PetscCall(PetscKernel_A_gets_inverse_A(bs, diag, v_pivots, v_work, allowzeropivot, &zeropivotdetected)); 3405 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3406 PetscCall(PetscKernel_A_gets_transpose_A_N(diag, bs)); 3407 diag += bs2; 3408 } 3409 PetscCall(PetscFree3(v_work, v_pivots, IJ)); 3410 } 3411 a->ibdiagvalid = PETSC_TRUE; 3412 PetscFunctionReturn(PETSC_SUCCESS); 3413 } 3414 3415 static PetscErrorCode MatSetRandom_SeqAIJ(Mat x, PetscRandom rctx) 3416 { 3417 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)x->data; 3418 PetscScalar a, *aa; 3419 PetscInt m, n, i, j, col; 3420 3421 PetscFunctionBegin; 3422 if (!x->assembled) { 3423 PetscCall(MatGetSize(x, &m, &n)); 3424 for (i = 0; i < m; i++) { 3425 for (j = 0; j < aij->imax[i]; j++) { 3426 PetscCall(PetscRandomGetValue(rctx, &a)); 3427 col = (PetscInt)(n * PetscRealPart(a)); 3428 PetscCall(MatSetValues(x, 1, &i, 1, &col, &a, ADD_VALUES)); 3429 } 3430 } 3431 } else { 3432 PetscCall(MatSeqAIJGetArrayWrite(x, &aa)); 3433 for (i = 0; i < aij->nz; i++) PetscCall(PetscRandomGetValue(rctx, aa + i)); 3434 PetscCall(MatSeqAIJRestoreArrayWrite(x, &aa)); 3435 } 3436 PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY)); 3437 PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY)); 3438 PetscFunctionReturn(PETSC_SUCCESS); 3439 } 3440 3441 /* Like MatSetRandom_SeqAIJ, but do not set values on columns in range of [low, high) */ 3442 PetscErrorCode MatSetRandomSkipColumnRange_SeqAIJ_Private(Mat x, PetscInt low, PetscInt high, PetscRandom rctx) 3443 { 3444 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)x->data; 3445 PetscScalar a; 3446 PetscInt m, n, i, j, col, nskip; 3447 3448 PetscFunctionBegin; 3449 nskip = high - low; 3450 PetscCall(MatGetSize(x, &m, &n)); 3451 n -= nskip; /* shrink number of columns where nonzeros can be set */ 3452 for (i = 0; i < m; i++) { 3453 for (j = 0; j < aij->imax[i]; j++) { 3454 PetscCall(PetscRandomGetValue(rctx, &a)); 3455 col = (PetscInt)(n * PetscRealPart(a)); 3456 if (col >= low) col += nskip; /* shift col rightward to skip the hole */ 3457 PetscCall(MatSetValues(x, 1, &i, 1, &col, &a, ADD_VALUES)); 3458 } 3459 } 3460 PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY)); 3461 PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY)); 3462 PetscFunctionReturn(PETSC_SUCCESS); 3463 } 3464 3465 static struct _MatOps MatOps_Values = {MatSetValues_SeqAIJ, 3466 MatGetRow_SeqAIJ, 3467 MatRestoreRow_SeqAIJ, 3468 MatMult_SeqAIJ, 3469 /* 4*/ MatMultAdd_SeqAIJ, 3470 MatMultTranspose_SeqAIJ, 3471 MatMultTransposeAdd_SeqAIJ, 3472 NULL, 3473 NULL, 3474 NULL, 3475 /* 10*/ NULL, 3476 MatLUFactor_SeqAIJ, 3477 NULL, 3478 MatSOR_SeqAIJ, 3479 MatTranspose_SeqAIJ, 3480 /*1 5*/ MatGetInfo_SeqAIJ, 3481 MatEqual_SeqAIJ, 3482 MatGetDiagonal_SeqAIJ, 3483 MatDiagonalScale_SeqAIJ, 3484 MatNorm_SeqAIJ, 3485 /* 20*/ NULL, 3486 MatAssemblyEnd_SeqAIJ, 3487 MatSetOption_SeqAIJ, 3488 MatZeroEntries_SeqAIJ, 3489 /* 24*/ MatZeroRows_SeqAIJ, 3490 NULL, 3491 NULL, 3492 NULL, 3493 NULL, 3494 /* 29*/ MatSetUp_Seq_Hash, 3495 NULL, 3496 NULL, 3497 NULL, 3498 NULL, 3499 /* 34*/ MatDuplicate_SeqAIJ, 3500 NULL, 3501 NULL, 3502 MatILUFactor_SeqAIJ, 3503 NULL, 3504 /* 39*/ MatAXPY_SeqAIJ, 3505 MatCreateSubMatrices_SeqAIJ, 3506 MatIncreaseOverlap_SeqAIJ, 3507 MatGetValues_SeqAIJ, 3508 MatCopy_SeqAIJ, 3509 /* 44*/ MatGetRowMax_SeqAIJ, 3510 MatScale_SeqAIJ, 3511 MatShift_SeqAIJ, 3512 MatDiagonalSet_SeqAIJ, 3513 MatZeroRowsColumns_SeqAIJ, 3514 /* 49*/ MatSetRandom_SeqAIJ, 3515 MatGetRowIJ_SeqAIJ, 3516 MatRestoreRowIJ_SeqAIJ, 3517 MatGetColumnIJ_SeqAIJ, 3518 MatRestoreColumnIJ_SeqAIJ, 3519 /* 54*/ MatFDColoringCreate_SeqXAIJ, 3520 NULL, 3521 NULL, 3522 MatPermute_SeqAIJ, 3523 NULL, 3524 /* 59*/ NULL, 3525 MatDestroy_SeqAIJ, 3526 MatView_SeqAIJ, 3527 NULL, 3528 NULL, 3529 /* 64*/ NULL, 3530 MatMatMatMultNumeric_SeqAIJ_SeqAIJ_SeqAIJ, 3531 NULL, 3532 NULL, 3533 NULL, 3534 /* 69*/ MatGetRowMaxAbs_SeqAIJ, 3535 MatGetRowMinAbs_SeqAIJ, 3536 NULL, 3537 NULL, 3538 NULL, 3539 /* 74*/ NULL, 3540 MatFDColoringApply_AIJ, 3541 NULL, 3542 NULL, 3543 NULL, 3544 /* 79*/ MatFindZeroDiagonals_SeqAIJ, 3545 NULL, 3546 NULL, 3547 NULL, 3548 MatLoad_SeqAIJ, 3549 /* 84*/ MatIsSymmetric_SeqAIJ, 3550 MatIsHermitian_SeqAIJ, 3551 NULL, 3552 NULL, 3553 NULL, 3554 /* 89*/ NULL, 3555 NULL, 3556 MatMatMultNumeric_SeqAIJ_SeqAIJ, 3557 NULL, 3558 NULL, 3559 /* 94*/ MatPtAPNumeric_SeqAIJ_SeqAIJ_SparseAxpy, 3560 NULL, 3561 NULL, 3562 MatMatTransposeMultNumeric_SeqAIJ_SeqAIJ, 3563 NULL, 3564 /* 99*/ MatProductSetFromOptions_SeqAIJ, 3565 NULL, 3566 NULL, 3567 MatConjugate_SeqAIJ, 3568 NULL, 3569 /*104*/ MatSetValuesRow_SeqAIJ, 3570 MatRealPart_SeqAIJ, 3571 MatImaginaryPart_SeqAIJ, 3572 NULL, 3573 NULL, 3574 /*109*/ MatMatSolve_SeqAIJ, 3575 NULL, 3576 MatGetRowMin_SeqAIJ, 3577 NULL, 3578 MatMissingDiagonal_SeqAIJ, 3579 /*114*/ NULL, 3580 NULL, 3581 NULL, 3582 NULL, 3583 NULL, 3584 /*119*/ NULL, 3585 NULL, 3586 NULL, 3587 NULL, 3588 MatGetMultiProcBlock_SeqAIJ, 3589 /*124*/ MatFindNonzeroRows_SeqAIJ, 3590 MatGetColumnReductions_SeqAIJ, 3591 MatInvertBlockDiagonal_SeqAIJ, 3592 MatInvertVariableBlockDiagonal_SeqAIJ, 3593 NULL, 3594 /*129*/ NULL, 3595 NULL, 3596 NULL, 3597 MatTransposeMatMultNumeric_SeqAIJ_SeqAIJ, 3598 MatTransposeColoringCreate_SeqAIJ, 3599 /*134*/ MatTransColoringApplySpToDen_SeqAIJ, 3600 MatTransColoringApplyDenToSp_SeqAIJ, 3601 NULL, 3602 NULL, 3603 MatRARtNumeric_SeqAIJ_SeqAIJ, 3604 /*139*/ NULL, 3605 NULL, 3606 NULL, 3607 MatFDColoringSetUp_SeqXAIJ, 3608 MatFindOffBlockDiagonalEntries_SeqAIJ, 3609 MatCreateMPIMatConcatenateSeqMat_SeqAIJ, 3610 /*145*/ MatDestroySubMatrices_SeqAIJ, 3611 NULL, 3612 NULL, 3613 MatCreateGraph_Simple_AIJ, 3614 NULL, 3615 /*150*/ MatTransposeSymbolic_SeqAIJ, 3616 MatEliminateZeros_SeqAIJ}; 3617 3618 PetscErrorCode MatSeqAIJSetColumnIndices_SeqAIJ(Mat mat, PetscInt *indices) 3619 { 3620 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3621 PetscInt i, nz, n; 3622 3623 PetscFunctionBegin; 3624 nz = aij->maxnz; 3625 n = mat->rmap->n; 3626 for (i = 0; i < nz; i++) aij->j[i] = indices[i]; 3627 aij->nz = nz; 3628 for (i = 0; i < n; i++) aij->ilen[i] = aij->imax[i]; 3629 PetscFunctionReturn(PETSC_SUCCESS); 3630 } 3631 3632 /* 3633 * Given a sparse matrix with global column indices, compact it by using a local column space. 3634 * The result matrix helps saving memory in other algorithms, such as MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable() 3635 */ 3636 PetscErrorCode MatSeqAIJCompactOutExtraColumns_SeqAIJ(Mat mat, ISLocalToGlobalMapping *mapping) 3637 { 3638 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3639 PetscHMapI gid1_lid1; 3640 PetscHashIter tpos; 3641 PetscInt gid, lid, i, ec, nz = aij->nz; 3642 PetscInt *garray, *jj = aij->j; 3643 3644 PetscFunctionBegin; 3645 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3646 PetscValidPointer(mapping, 2); 3647 /* use a table */ 3648 PetscCall(PetscHMapICreateWithSize(mat->rmap->n, &gid1_lid1)); 3649 ec = 0; 3650 for (i = 0; i < nz; i++) { 3651 PetscInt data, gid1 = jj[i] + 1; 3652 PetscCall(PetscHMapIGetWithDefault(gid1_lid1, gid1, 0, &data)); 3653 if (!data) { 3654 /* one based table */ 3655 PetscCall(PetscHMapISet(gid1_lid1, gid1, ++ec)); 3656 } 3657 } 3658 /* form array of columns we need */ 3659 PetscCall(PetscMalloc1(ec, &garray)); 3660 PetscHashIterBegin(gid1_lid1, tpos); 3661 while (!PetscHashIterAtEnd(gid1_lid1, tpos)) { 3662 PetscHashIterGetKey(gid1_lid1, tpos, gid); 3663 PetscHashIterGetVal(gid1_lid1, tpos, lid); 3664 PetscHashIterNext(gid1_lid1, tpos); 3665 gid--; 3666 lid--; 3667 garray[lid] = gid; 3668 } 3669 PetscCall(PetscSortInt(ec, garray)); /* sort, and rebuild */ 3670 PetscCall(PetscHMapIClear(gid1_lid1)); 3671 for (i = 0; i < ec; i++) PetscCall(PetscHMapISet(gid1_lid1, garray[i] + 1, i + 1)); 3672 /* compact out the extra columns in B */ 3673 for (i = 0; i < nz; i++) { 3674 PetscInt gid1 = jj[i] + 1; 3675 PetscCall(PetscHMapIGetWithDefault(gid1_lid1, gid1, 0, &lid)); 3676 lid--; 3677 jj[i] = lid; 3678 } 3679 PetscCall(PetscLayoutDestroy(&mat->cmap)); 3680 PetscCall(PetscHMapIDestroy(&gid1_lid1)); 3681 PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)mat), ec, ec, 1, &mat->cmap)); 3682 PetscCall(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, mat->cmap->bs, mat->cmap->n, garray, PETSC_OWN_POINTER, mapping)); 3683 PetscCall(ISLocalToGlobalMappingSetType(*mapping, ISLOCALTOGLOBALMAPPINGHASH)); 3684 PetscFunctionReturn(PETSC_SUCCESS); 3685 } 3686 3687 /*@ 3688 MatSeqAIJSetColumnIndices - Set the column indices for all the rows 3689 in the matrix. 3690 3691 Input Parameters: 3692 + mat - the `MATSEQAIJ` matrix 3693 - indices - the column indices 3694 3695 Level: advanced 3696 3697 Notes: 3698 This can be called if you have precomputed the nonzero structure of the 3699 matrix and want to provide it to the matrix object to improve the performance 3700 of the `MatSetValues()` operation. 3701 3702 You MUST have set the correct numbers of nonzeros per row in the call to 3703 `MatCreateSeqAIJ()`, and the columns indices MUST be sorted. 3704 3705 MUST be called before any calls to `MatSetValues()` 3706 3707 The indices should start with zero, not one. 3708 3709 .seealso: [](ch_matrices), `Mat`, `MATSEQAIJ` 3710 @*/ 3711 PetscErrorCode MatSeqAIJSetColumnIndices(Mat mat, PetscInt *indices) 3712 { 3713 PetscFunctionBegin; 3714 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3715 PetscValidIntPointer(indices, 2); 3716 PetscUseMethod(mat, "MatSeqAIJSetColumnIndices_C", (Mat, PetscInt *), (mat, indices)); 3717 PetscFunctionReturn(PETSC_SUCCESS); 3718 } 3719 3720 PetscErrorCode MatStoreValues_SeqAIJ(Mat mat) 3721 { 3722 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3723 size_t nz = aij->i[mat->rmap->n]; 3724 3725 PetscFunctionBegin; 3726 PetscCheck(aij->nonew, PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call MatSetOption(A,MAT_NEW_NONZERO_LOCATIONS,PETSC_FALSE);first"); 3727 3728 /* allocate space for values if not already there */ 3729 if (!aij->saved_values) { PetscCall(PetscMalloc1(nz + 1, &aij->saved_values)); } 3730 3731 /* copy values over */ 3732 PetscCall(PetscArraycpy(aij->saved_values, aij->a, nz)); 3733 PetscFunctionReturn(PETSC_SUCCESS); 3734 } 3735 3736 /*@ 3737 MatStoreValues - Stashes a copy of the matrix values; this allows reusing of the linear part of a Jacobian, while recomputing only the 3738 nonlinear portion. 3739 3740 Logically Collect 3741 3742 Input Parameter: 3743 . mat - the matrix (currently only `MATAIJ` matrices support this option) 3744 3745 Level: advanced 3746 3747 Usage: 3748 .vb 3749 Using SNES 3750 Create Jacobian matrix 3751 Set linear terms into matrix 3752 Apply boundary conditions to matrix, at this time matrix must have 3753 final nonzero structure (i.e. setting the nonlinear terms and applying 3754 boundary conditions again will not change the nonzero structure 3755 MatSetOption(mat,MAT_NEW_NONZERO_LOCATIONS,PETSC_FALSE); 3756 MatStoreValues(mat); 3757 Call SNESSetJacobian() with matrix 3758 In your Jacobian routine 3759 MatRetrieveValues(mat); 3760 Set nonlinear terms in matrix 3761 3762 Without `SNESSolve()`, i.e. when you handle nonlinear solve yourself: 3763 // build linear portion of Jacobian 3764 MatSetOption(mat,MAT_NEW_NONZERO_LOCATIONS,PETSC_FALSE); 3765 MatStoreValues(mat); 3766 loop over nonlinear iterations 3767 MatRetrieveValues(mat); 3768 // call MatSetValues(mat,...) to set nonliner portion of Jacobian 3769 // call MatAssemblyBegin/End() on matrix 3770 Solve linear system with Jacobian 3771 endloop 3772 .ve 3773 3774 Notes: 3775 Matrix must already be assembled before calling this routine 3776 Must set the matrix option `MatSetOption`(mat,`MAT_NEW_NONZERO_LOCATIONS`,`PETSC_FALSE`); before 3777 calling this routine. 3778 3779 When this is called multiple times it overwrites the previous set of stored values 3780 and does not allocated additional space. 3781 3782 .seealso: [](ch_matrices), `Mat`, `MatRetrieveValues()` 3783 @*/ 3784 PetscErrorCode MatStoreValues(Mat mat) 3785 { 3786 PetscFunctionBegin; 3787 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3788 PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix"); 3789 PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3790 PetscUseMethod(mat, "MatStoreValues_C", (Mat), (mat)); 3791 PetscFunctionReturn(PETSC_SUCCESS); 3792 } 3793 3794 PetscErrorCode MatRetrieveValues_SeqAIJ(Mat mat) 3795 { 3796 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3797 PetscInt nz = aij->i[mat->rmap->n]; 3798 3799 PetscFunctionBegin; 3800 PetscCheck(aij->nonew, PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call MatSetOption(A,MAT_NEW_NONZERO_LOCATIONS,PETSC_FALSE);first"); 3801 PetscCheck(aij->saved_values, PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call MatStoreValues(A);first"); 3802 /* copy values over */ 3803 PetscCall(PetscArraycpy(aij->a, aij->saved_values, nz)); 3804 PetscFunctionReturn(PETSC_SUCCESS); 3805 } 3806 3807 /*@ 3808 MatRetrieveValues - Retrieves the copy of the matrix values that was stored with `MatStoreValues()` 3809 3810 Logically Collect 3811 3812 Input Parameter: 3813 . mat - the matrix (currently only `MATAIJ` matrices support this option) 3814 3815 Level: advanced 3816 3817 .seealso: [](ch_matrices), `Mat`, `MatStoreValues()` 3818 @*/ 3819 PetscErrorCode MatRetrieveValues(Mat mat) 3820 { 3821 PetscFunctionBegin; 3822 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3823 PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix"); 3824 PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3825 PetscUseMethod(mat, "MatRetrieveValues_C", (Mat), (mat)); 3826 PetscFunctionReturn(PETSC_SUCCESS); 3827 } 3828 3829 /*@C 3830 MatCreateSeqAIJ - Creates a sparse matrix in `MATSEQAIJ` (compressed row) format 3831 (the default parallel PETSc format). For good matrix assembly performance 3832 the user should preallocate the matrix storage by setting the parameter `nz` 3833 (or the array `nnz`). 3834 3835 Collective 3836 3837 Input Parameters: 3838 + comm - MPI communicator, set to `PETSC_COMM_SELF` 3839 . m - number of rows 3840 . n - number of columns 3841 . nz - number of nonzeros per row (same for all rows) 3842 - nnz - array containing the number of nonzeros in the various rows 3843 (possibly different for each row) or NULL 3844 3845 Output Parameter: 3846 . A - the matrix 3847 3848 Options Database Keys: 3849 + -mat_no_inode - Do not use inodes 3850 - -mat_inode_limit <limit> - Sets inode limit (max limit=5) 3851 3852 Level: intermediate 3853 3854 Notes: 3855 If `nnz` is given then `nz` is ignored 3856 3857 The `MATSEQAIJ` format, also called 3858 compressed row storage, is fully compatible with standard Fortran 3859 storage. That is, the stored row and column indices can begin at 3860 either one (as in Fortran) or zero. 3861 3862 Specify the preallocated storage with either `nz` or `nnz` (not both). 3863 Set `nz` = `PETSC_DEFAULT` and `nnz` = `NULL` for PETSc to control dynamic memory 3864 allocation. 3865 3866 By default, this format uses inodes (identical nodes) when possible, to 3867 improve numerical efficiency of matrix-vector products and solves. We 3868 search for consecutive rows with the same nonzero structure, thereby 3869 reusing matrix information to achieve increased efficiency. 3870 3871 .seealso: [](ch_matrices), `Mat`, [Sparse Matrix Creation](sec_matsparse), `MatCreate()`, `MatCreateAIJ()`, `MatSetValues()`, `MatSeqAIJSetColumnIndices()`, `MatCreateSeqAIJWithArrays()` 3872 @*/ 3873 PetscErrorCode MatCreateSeqAIJ(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt nz, const PetscInt nnz[], Mat *A) 3874 { 3875 PetscFunctionBegin; 3876 PetscCall(MatCreate(comm, A)); 3877 PetscCall(MatSetSizes(*A, m, n, m, n)); 3878 PetscCall(MatSetType(*A, MATSEQAIJ)); 3879 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(*A, nz, nnz)); 3880 PetscFunctionReturn(PETSC_SUCCESS); 3881 } 3882 3883 /*@C 3884 MatSeqAIJSetPreallocation - For good matrix assembly performance 3885 the user should preallocate the matrix storage by setting the parameter nz 3886 (or the array nnz). By setting these parameters accurately, performance 3887 during matrix assembly can be increased by more than a factor of 50. 3888 3889 Collective 3890 3891 Input Parameters: 3892 + B - The matrix 3893 . nz - number of nonzeros per row (same for all rows) 3894 - nnz - array containing the number of nonzeros in the various rows 3895 (possibly different for each row) or NULL 3896 3897 Options Database Keys: 3898 + -mat_no_inode - Do not use inodes 3899 - -mat_inode_limit <limit> - Sets inode limit (max limit=5) 3900 3901 Level: intermediate 3902 3903 Notes: 3904 If `nnz` is given then `nz` is ignored 3905 3906 The `MATSEQAIJ` format also called 3907 compressed row storage, is fully compatible with standard Fortran 3908 storage. That is, the stored row and column indices can begin at 3909 either one (as in Fortran) or zero. See the users' manual for details. 3910 3911 Specify the preallocated storage with either `nz` or `nnz` (not both). 3912 Set nz = `PETSC_DEFAULT` and `nnz` = `NULL` for PETSc to control dynamic memory 3913 allocation. 3914 3915 You can call `MatGetInfo()` to get information on how effective the preallocation was; 3916 for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 3917 You can also run with the option -info and look for messages with the string 3918 malloc in them to see if additional memory allocation was needed. 3919 3920 Developer Notes: 3921 Use nz of `MAT_SKIP_ALLOCATION` to not allocate any space for the matrix 3922 entries or columns indices 3923 3924 By default, this format uses inodes (identical nodes) when possible, to 3925 improve numerical efficiency of matrix-vector products and solves. We 3926 search for consecutive rows with the same nonzero structure, thereby 3927 reusing matrix information to achieve increased efficiency. 3928 3929 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateAIJ()`, `MatSetValues()`, `MatSeqAIJSetColumnIndices()`, `MatCreateSeqAIJWithArrays()`, `MatGetInfo()`, 3930 `MatSeqAIJSetTotalPreallocation()` 3931 @*/ 3932 PetscErrorCode MatSeqAIJSetPreallocation(Mat B, PetscInt nz, const PetscInt nnz[]) 3933 { 3934 PetscFunctionBegin; 3935 PetscValidHeaderSpecific(B, MAT_CLASSID, 1); 3936 PetscValidType(B, 1); 3937 PetscTryMethod(B, "MatSeqAIJSetPreallocation_C", (Mat, PetscInt, const PetscInt[]), (B, nz, nnz)); 3938 PetscFunctionReturn(PETSC_SUCCESS); 3939 } 3940 3941 PetscErrorCode MatSeqAIJSetPreallocation_SeqAIJ(Mat B, PetscInt nz, const PetscInt *nnz) 3942 { 3943 Mat_SeqAIJ *b = (Mat_SeqAIJ *)B->data; 3944 PetscBool skipallocation = PETSC_FALSE, realalloc = PETSC_FALSE; 3945 PetscInt i; 3946 3947 PetscFunctionBegin; 3948 if (B->hash_active) { 3949 B->ops[0] = b->cops; 3950 PetscCall(PetscHMapIJVDestroy(&b->ht)); 3951 PetscCall(PetscFree(b->dnz)); 3952 B->hash_active = PETSC_FALSE; 3953 } 3954 if (nz >= 0 || nnz) realalloc = PETSC_TRUE; 3955 if (nz == MAT_SKIP_ALLOCATION) { 3956 skipallocation = PETSC_TRUE; 3957 nz = 0; 3958 } 3959 PetscCall(PetscLayoutSetUp(B->rmap)); 3960 PetscCall(PetscLayoutSetUp(B->cmap)); 3961 3962 if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5; 3963 PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nz cannot be less than 0: value %" PetscInt_FMT, nz); 3964 if (PetscUnlikelyDebug(nnz)) { 3965 for (i = 0; i < B->rmap->n; i++) { 3966 PetscCheck(nnz[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nnz cannot be less than 0: local row %" PetscInt_FMT " value %" PetscInt_FMT, i, nnz[i]); 3967 PetscCheck(nnz[i] <= B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nnz cannot be greater than row length: local row %" PetscInt_FMT " value %" PetscInt_FMT " rowlength %" PetscInt_FMT, i, nnz[i], B->cmap->n); 3968 } 3969 } 3970 3971 B->preallocated = PETSC_TRUE; 3972 if (!skipallocation) { 3973 if (!b->imax) { PetscCall(PetscMalloc1(B->rmap->n, &b->imax)); } 3974 if (!b->ilen) { 3975 /* b->ilen will count nonzeros in each row so far. */ 3976 PetscCall(PetscCalloc1(B->rmap->n, &b->ilen)); 3977 } else { 3978 PetscCall(PetscMemzero(b->ilen, B->rmap->n * sizeof(PetscInt))); 3979 } 3980 if (!b->ipre) PetscCall(PetscMalloc1(B->rmap->n, &b->ipre)); 3981 if (!nnz) { 3982 if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 10; 3983 else if (nz < 0) nz = 1; 3984 nz = PetscMin(nz, B->cmap->n); 3985 for (i = 0; i < B->rmap->n; i++) b->imax[i] = nz; 3986 nz = nz * B->rmap->n; 3987 } else { 3988 PetscInt64 nz64 = 0; 3989 for (i = 0; i < B->rmap->n; i++) { 3990 b->imax[i] = nnz[i]; 3991 nz64 += nnz[i]; 3992 } 3993 PetscCall(PetscIntCast(nz64, &nz)); 3994 } 3995 3996 /* allocate the matrix space */ 3997 /* FIXME: should B's old memory be unlogged? */ 3998 PetscCall(MatSeqXAIJFreeAIJ(B, &b->a, &b->j, &b->i)); 3999 if (B->structure_only) { 4000 PetscCall(PetscMalloc1(nz, &b->j)); 4001 PetscCall(PetscMalloc1(B->rmap->n + 1, &b->i)); 4002 } else { 4003 PetscCall(PetscMalloc3(nz, &b->a, nz, &b->j, B->rmap->n + 1, &b->i)); 4004 } 4005 b->i[0] = 0; 4006 for (i = 1; i < B->rmap->n + 1; i++) b->i[i] = b->i[i - 1] + b->imax[i - 1]; 4007 if (B->structure_only) { 4008 b->singlemalloc = PETSC_FALSE; 4009 b->free_a = PETSC_FALSE; 4010 } else { 4011 b->singlemalloc = PETSC_TRUE; 4012 b->free_a = PETSC_TRUE; 4013 } 4014 b->free_ij = PETSC_TRUE; 4015 } else { 4016 b->free_a = PETSC_FALSE; 4017 b->free_ij = PETSC_FALSE; 4018 } 4019 4020 if (b->ipre && nnz != b->ipre && b->imax) { 4021 /* reserve user-requested sparsity */ 4022 PetscCall(PetscArraycpy(b->ipre, b->imax, B->rmap->n)); 4023 } 4024 4025 b->nz = 0; 4026 b->maxnz = nz; 4027 B->info.nz_unneeded = (double)b->maxnz; 4028 if (realalloc) PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE)); 4029 B->was_assembled = PETSC_FALSE; 4030 B->assembled = PETSC_FALSE; 4031 /* We simply deem preallocation has changed nonzero state. Updating the state 4032 will give clients (like AIJKokkos) a chance to know something has happened. 4033 */ 4034 B->nonzerostate++; 4035 PetscFunctionReturn(PETSC_SUCCESS); 4036 } 4037 4038 PetscErrorCode MatResetPreallocation_SeqAIJ(Mat A) 4039 { 4040 Mat_SeqAIJ *a; 4041 PetscInt i; 4042 PetscBool skipreset; 4043 4044 PetscFunctionBegin; 4045 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 4046 4047 /* Check local size. If zero, then return */ 4048 if (!A->rmap->n) PetscFunctionReturn(PETSC_SUCCESS); 4049 4050 a = (Mat_SeqAIJ *)A->data; 4051 /* if no saved info, we error out */ 4052 PetscCheck(a->ipre, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "No saved preallocation info "); 4053 4054 PetscCheck(a->i && a->imax && a->ilen, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Memory info is incomplete, and can not reset preallocation "); 4055 4056 PetscCall(PetscArraycmp(a->ipre, a->ilen, A->rmap->n, &skipreset)); 4057 if (!skipreset) { 4058 PetscCall(PetscArraycpy(a->imax, a->ipre, A->rmap->n)); 4059 PetscCall(PetscArrayzero(a->ilen, A->rmap->n)); 4060 a->i[0] = 0; 4061 for (i = 1; i < A->rmap->n + 1; i++) a->i[i] = a->i[i - 1] + a->imax[i - 1]; 4062 A->preallocated = PETSC_TRUE; 4063 a->nz = 0; 4064 a->maxnz = a->i[A->rmap->n]; 4065 A->info.nz_unneeded = (double)a->maxnz; 4066 A->was_assembled = PETSC_FALSE; 4067 A->assembled = PETSC_FALSE; 4068 } 4069 PetscFunctionReturn(PETSC_SUCCESS); 4070 } 4071 4072 /*@ 4073 MatSeqAIJSetPreallocationCSR - Allocates memory for a sparse sequential matrix in `MATSEQAIJ` format. 4074 4075 Input Parameters: 4076 + B - the matrix 4077 . i - the indices into j for the start of each row (starts with zero) 4078 . j - the column indices for each row (starts with zero) these must be sorted for each row 4079 - v - optional values in the matrix 4080 4081 Level: developer 4082 4083 Notes: 4084 The `i`,`j`,`v` values are COPIED with this routine; to avoid the copy use `MatCreateSeqAIJWithArrays()` 4085 4086 This routine may be called multiple times with different nonzero patterns (or the same nonzero pattern). The nonzero 4087 structure will be the union of all the previous nonzero structures. 4088 4089 Developer Notes: 4090 An optimization could be added to the implementation where it checks if the `i`, and `j` are identical to the current `i` and `j` and 4091 then just copies the `v` values directly with `PetscMemcpy()`. 4092 4093 This routine could also take a `PetscCopyMode` argument to allow sharing the values instead of always copying them. 4094 4095 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatSeqAIJSetPreallocation()`, `MATSEQAIJ`, `MatResetPreallocation()` 4096 @*/ 4097 PetscErrorCode MatSeqAIJSetPreallocationCSR(Mat B, const PetscInt i[], const PetscInt j[], const PetscScalar v[]) 4098 { 4099 PetscFunctionBegin; 4100 PetscValidHeaderSpecific(B, MAT_CLASSID, 1); 4101 PetscValidType(B, 1); 4102 PetscTryMethod(B, "MatSeqAIJSetPreallocationCSR_C", (Mat, const PetscInt[], const PetscInt[], const PetscScalar[]), (B, i, j, v)); 4103 PetscFunctionReturn(PETSC_SUCCESS); 4104 } 4105 4106 PetscErrorCode MatSeqAIJSetPreallocationCSR_SeqAIJ(Mat B, const PetscInt Ii[], const PetscInt J[], const PetscScalar v[]) 4107 { 4108 PetscInt i; 4109 PetscInt m, n; 4110 PetscInt nz; 4111 PetscInt *nnz; 4112 4113 PetscFunctionBegin; 4114 PetscCheck(Ii[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ii[0] must be 0 it is %" PetscInt_FMT, Ii[0]); 4115 4116 PetscCall(PetscLayoutSetUp(B->rmap)); 4117 PetscCall(PetscLayoutSetUp(B->cmap)); 4118 4119 PetscCall(MatGetSize(B, &m, &n)); 4120 PetscCall(PetscMalloc1(m + 1, &nnz)); 4121 for (i = 0; i < m; i++) { 4122 nz = Ii[i + 1] - Ii[i]; 4123 PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local row %" PetscInt_FMT " has a negative number of columns %" PetscInt_FMT, i, nz); 4124 nnz[i] = nz; 4125 } 4126 PetscCall(MatSeqAIJSetPreallocation(B, 0, nnz)); 4127 PetscCall(PetscFree(nnz)); 4128 4129 for (i = 0; i < m; i++) PetscCall(MatSetValues_SeqAIJ(B, 1, &i, Ii[i + 1] - Ii[i], J + Ii[i], v ? v + Ii[i] : NULL, INSERT_VALUES)); 4130 4131 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 4132 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 4133 4134 PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 4135 PetscFunctionReturn(PETSC_SUCCESS); 4136 } 4137 4138 /*@ 4139 MatSeqAIJKron - Computes `C`, the Kronecker product of `A` and `B`. 4140 4141 Input Parameters: 4142 + A - left-hand side matrix 4143 . B - right-hand side matrix 4144 - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` 4145 4146 Output Parameter: 4147 . C - Kronecker product of `A` and `B` 4148 4149 Level: intermediate 4150 4151 Note: 4152 `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the product matrix has not changed from that last call to `MatSeqAIJKron()`. 4153 4154 .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJ()`, `MATSEQAIJ`, `MATKAIJ`, `MatReuse` 4155 @*/ 4156 PetscErrorCode MatSeqAIJKron(Mat A, Mat B, MatReuse reuse, Mat *C) 4157 { 4158 PetscFunctionBegin; 4159 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 4160 PetscValidType(A, 1); 4161 PetscValidHeaderSpecific(B, MAT_CLASSID, 2); 4162 PetscValidType(B, 2); 4163 PetscValidPointer(C, 4); 4164 if (reuse == MAT_REUSE_MATRIX) { 4165 PetscValidHeaderSpecific(*C, MAT_CLASSID, 4); 4166 PetscValidType(*C, 4); 4167 } 4168 PetscTryMethod(A, "MatSeqAIJKron_C", (Mat, Mat, MatReuse, Mat *), (A, B, reuse, C)); 4169 PetscFunctionReturn(PETSC_SUCCESS); 4170 } 4171 4172 PetscErrorCode MatSeqAIJKron_SeqAIJ(Mat A, Mat B, MatReuse reuse, Mat *C) 4173 { 4174 Mat newmat; 4175 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 4176 Mat_SeqAIJ *b = (Mat_SeqAIJ *)B->data; 4177 PetscScalar *v; 4178 const PetscScalar *aa, *ba; 4179 PetscInt *i, *j, m, n, p, q, nnz = 0, am = A->rmap->n, bm = B->rmap->n, an = A->cmap->n, bn = B->cmap->n; 4180 PetscBool flg; 4181 4182 PetscFunctionBegin; 4183 PetscCheck(!A->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 4184 PetscCheck(A->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix"); 4185 PetscCheck(!B->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 4186 PetscCheck(B->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix"); 4187 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATSEQAIJ, &flg)); 4188 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatType %s", ((PetscObject)B)->type_name); 4189 PetscCheck(reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatReuse %d", (int)reuse); 4190 if (reuse == MAT_INITIAL_MATRIX) { 4191 PetscCall(PetscMalloc2(am * bm + 1, &i, a->i[am] * b->i[bm], &j)); 4192 PetscCall(MatCreate(PETSC_COMM_SELF, &newmat)); 4193 PetscCall(MatSetSizes(newmat, am * bm, an * bn, am * bm, an * bn)); 4194 PetscCall(MatSetType(newmat, MATAIJ)); 4195 i[0] = 0; 4196 for (m = 0; m < am; ++m) { 4197 for (p = 0; p < bm; ++p) { 4198 i[m * bm + p + 1] = i[m * bm + p] + (a->i[m + 1] - a->i[m]) * (b->i[p + 1] - b->i[p]); 4199 for (n = a->i[m]; n < a->i[m + 1]; ++n) { 4200 for (q = b->i[p]; q < b->i[p + 1]; ++q) j[nnz++] = a->j[n] * bn + b->j[q]; 4201 } 4202 } 4203 } 4204 PetscCall(MatSeqAIJSetPreallocationCSR(newmat, i, j, NULL)); 4205 *C = newmat; 4206 PetscCall(PetscFree2(i, j)); 4207 nnz = 0; 4208 } 4209 PetscCall(MatSeqAIJGetArray(*C, &v)); 4210 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 4211 PetscCall(MatSeqAIJGetArrayRead(B, &ba)); 4212 for (m = 0; m < am; ++m) { 4213 for (p = 0; p < bm; ++p) { 4214 for (n = a->i[m]; n < a->i[m + 1]; ++n) { 4215 for (q = b->i[p]; q < b->i[p + 1]; ++q) v[nnz++] = aa[n] * ba[q]; 4216 } 4217 } 4218 } 4219 PetscCall(MatSeqAIJRestoreArray(*C, &v)); 4220 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 4221 PetscCall(MatSeqAIJRestoreArrayRead(B, &ba)); 4222 PetscFunctionReturn(PETSC_SUCCESS); 4223 } 4224 4225 #include <../src/mat/impls/dense/seq/dense.h> 4226 #include <petsc/private/kernels/petscaxpy.h> 4227 4228 /* 4229 Computes (B'*A')' since computing B*A directly is untenable 4230 4231 n p p 4232 [ ] [ ] [ ] 4233 m [ A ] * n [ B ] = m [ C ] 4234 [ ] [ ] [ ] 4235 4236 */ 4237 PetscErrorCode MatMatMultNumeric_SeqDense_SeqAIJ(Mat A, Mat B, Mat C) 4238 { 4239 Mat_SeqDense *sub_a = (Mat_SeqDense *)A->data; 4240 Mat_SeqAIJ *sub_b = (Mat_SeqAIJ *)B->data; 4241 Mat_SeqDense *sub_c = (Mat_SeqDense *)C->data; 4242 PetscInt i, j, n, m, q, p; 4243 const PetscInt *ii, *idx; 4244 const PetscScalar *b, *a, *a_q; 4245 PetscScalar *c, *c_q; 4246 PetscInt clda = sub_c->lda; 4247 PetscInt alda = sub_a->lda; 4248 4249 PetscFunctionBegin; 4250 m = A->rmap->n; 4251 n = A->cmap->n; 4252 p = B->cmap->n; 4253 a = sub_a->v; 4254 b = sub_b->a; 4255 c = sub_c->v; 4256 if (clda == m) { 4257 PetscCall(PetscArrayzero(c, m * p)); 4258 } else { 4259 for (j = 0; j < p; j++) 4260 for (i = 0; i < m; i++) c[j * clda + i] = 0.0; 4261 } 4262 ii = sub_b->i; 4263 idx = sub_b->j; 4264 for (i = 0; i < n; i++) { 4265 q = ii[i + 1] - ii[i]; 4266 while (q-- > 0) { 4267 c_q = c + clda * (*idx); 4268 a_q = a + alda * i; 4269 PetscKernelAXPY(c_q, *b, a_q, m); 4270 idx++; 4271 b++; 4272 } 4273 } 4274 PetscFunctionReturn(PETSC_SUCCESS); 4275 } 4276 4277 PetscErrorCode MatMatMultSymbolic_SeqDense_SeqAIJ(Mat A, Mat B, PetscReal fill, Mat C) 4278 { 4279 PetscInt m = A->rmap->n, n = B->cmap->n; 4280 PetscBool cisdense; 4281 4282 PetscFunctionBegin; 4283 PetscCheck(A->cmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "A->cmap->n %" PetscInt_FMT " != B->rmap->n %" PetscInt_FMT, A->cmap->n, B->rmap->n); 4284 PetscCall(MatSetSizes(C, m, n, m, n)); 4285 PetscCall(MatSetBlockSizesFromMats(C, A, B)); 4286 PetscCall(PetscObjectTypeCompareAny((PetscObject)C, &cisdense, MATSEQDENSE, MATSEQDENSECUDA, MATSEQDENSEHIP, "")); 4287 if (!cisdense) PetscCall(MatSetType(C, MATDENSE)); 4288 PetscCall(MatSetUp(C)); 4289 4290 C->ops->matmultnumeric = MatMatMultNumeric_SeqDense_SeqAIJ; 4291 PetscFunctionReturn(PETSC_SUCCESS); 4292 } 4293 4294 /*MC 4295 MATSEQAIJ - MATSEQAIJ = "seqaij" - A matrix type to be used for sequential sparse matrices, 4296 based on compressed sparse row format. 4297 4298 Options Database Key: 4299 . -mat_type seqaij - sets the matrix type to "seqaij" during a call to MatSetFromOptions() 4300 4301 Level: beginner 4302 4303 Notes: 4304 `MatSetValues()` may be called for this matrix type with a `NULL` argument for the numerical values, 4305 in this case the values associated with the rows and columns one passes in are set to zero 4306 in the matrix 4307 4308 `MatSetOptions`(,`MAT_STRUCTURE_ONLY`,`PETSC_TRUE`) may be called for this matrix type. In this no 4309 space is allocated for the nonzero entries and any entries passed with `MatSetValues()` are ignored 4310 4311 Developer Note: 4312 It would be nice if all matrix formats supported passing `NULL` in for the numerical values 4313 4314 .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJ()`, `MatSetFromOptions()`, `MatSetType()`, `MatCreate()`, `MatType`, `MATSELL`, `MATSEQSELL`, `MATMPISELL` 4315 M*/ 4316 4317 /*MC 4318 MATAIJ - MATAIJ = "aij" - A matrix type to be used for sparse matrices. 4319 4320 This matrix type is identical to `MATSEQAIJ` when constructed with a single process communicator, 4321 and `MATMPIAIJ` otherwise. As a result, for single process communicators, 4322 `MatSeqAIJSetPreallocation()` is supported, and similarly `MatMPIAIJSetPreallocation()` is supported 4323 for communicators controlling multiple processes. It is recommended that you call both of 4324 the above preallocation routines for simplicity. 4325 4326 Options Database Key: 4327 . -mat_type aij - sets the matrix type to "aij" during a call to `MatSetFromOptions()` 4328 4329 Level: beginner 4330 4331 Note: 4332 Subclasses include `MATAIJCUSPARSE`, `MATAIJPERM`, `MATAIJSELL`, `MATAIJMKL`, `MATAIJCRL`, and also automatically switches over to use inodes when 4333 enough exist. 4334 4335 .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MatCreateSeqAIJ()`, `MATSEQAIJ`, `MATMPIAIJ`, `MATSELL`, `MATSEQSELL`, `MATMPISELL` 4336 M*/ 4337 4338 /*MC 4339 MATAIJCRL - MATAIJCRL = "aijcrl" - A matrix type to be used for sparse matrices. 4340 4341 Options Database Key: 4342 . -mat_type aijcrl - sets the matrix type to "aijcrl" during a call to `MatSetFromOptions()` 4343 4344 Level: beginner 4345 4346 Note: 4347 This matrix type is identical to `MATSEQAIJCRL` when constructed with a single process communicator, 4348 and `MATMPIAIJCRL` otherwise. As a result, for single process communicators, 4349 `MatSeqAIJSetPreallocation()` is supported, and similarly `MatMPIAIJSetPreallocation()` is supported 4350 for communicators controlling multiple processes. It is recommended that you call both of 4351 the above preallocation routines for simplicity. 4352 4353 .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAIJCRL`, `MATSEQAIJCRL`, `MATMPIAIJCRL`, `MATSEQAIJCRL`, `MATMPIAIJCRL` 4354 M*/ 4355 4356 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJCRL(Mat, MatType, MatReuse, Mat *); 4357 #if defined(PETSC_HAVE_ELEMENTAL) 4358 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat, MatType, MatReuse, Mat *); 4359 #endif 4360 #if defined(PETSC_HAVE_SCALAPACK) 4361 PETSC_INTERN PetscErrorCode MatConvert_AIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *); 4362 #endif 4363 #if defined(PETSC_HAVE_HYPRE) 4364 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType, MatReuse, Mat *); 4365 #endif 4366 4367 PETSC_EXTERN PetscErrorCode MatConvert_SeqAIJ_SeqSELL(Mat, MatType, MatReuse, Mat *); 4368 PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat, MatType, MatReuse, Mat *); 4369 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_IS_XAIJ(Mat); 4370 4371 /*@C 4372 MatSeqAIJGetArray - gives read/write access to the array where the data for a `MATSEQAIJ` matrix is stored 4373 4374 Not Collective 4375 4376 Input Parameter: 4377 . A - a `MATSEQAIJ` matrix 4378 4379 Output Parameter: 4380 . array - pointer to the data 4381 4382 Level: intermediate 4383 4384 Fortran Notes: 4385 `MatSeqAIJGetArray()` Fortran binding is deprecated (since PETSc 3.19), use `MatSeqAIJGetArrayF90()` 4386 4387 .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArray()`, `MatSeqAIJGetArrayF90()` 4388 @*/ 4389 PetscErrorCode MatSeqAIJGetArray(Mat A, PetscScalar **array) 4390 { 4391 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4392 4393 PetscFunctionBegin; 4394 if (aij->ops->getarray) { 4395 PetscCall((*aij->ops->getarray)(A, array)); 4396 } else { 4397 *array = aij->a; 4398 } 4399 PetscFunctionReturn(PETSC_SUCCESS); 4400 } 4401 4402 /*@C 4403 MatSeqAIJRestoreArray - returns access to the array where the data for a `MATSEQAIJ` matrix is stored obtained by `MatSeqAIJGetArray()` 4404 4405 Not Collective 4406 4407 Input Parameters: 4408 + A - a `MATSEQAIJ` matrix 4409 - array - pointer to the data 4410 4411 Level: intermediate 4412 4413 Fortran Notes: 4414 `MatSeqAIJRestoreArray()` Fortran binding is deprecated (since PETSc 3.19), use `MatSeqAIJRestoreArrayF90()` 4415 4416 .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArrayF90()` 4417 @*/ 4418 PetscErrorCode MatSeqAIJRestoreArray(Mat A, PetscScalar **array) 4419 { 4420 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4421 4422 PetscFunctionBegin; 4423 if (aij->ops->restorearray) { 4424 PetscCall((*aij->ops->restorearray)(A, array)); 4425 } else { 4426 *array = NULL; 4427 } 4428 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 4429 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 4430 PetscFunctionReturn(PETSC_SUCCESS); 4431 } 4432 4433 /*@C 4434 MatSeqAIJGetArrayRead - gives read-only access to the array where the data for a `MATSEQAIJ` matrix is stored 4435 4436 Not Collective; No Fortran Support 4437 4438 Input Parameter: 4439 . A - a `MATSEQAIJ` matrix 4440 4441 Output Parameter: 4442 . array - pointer to the data 4443 4444 Level: intermediate 4445 4446 .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArrayRead()` 4447 @*/ 4448 PetscErrorCode MatSeqAIJGetArrayRead(Mat A, const PetscScalar **array) 4449 { 4450 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4451 4452 PetscFunctionBegin; 4453 if (aij->ops->getarrayread) { 4454 PetscCall((*aij->ops->getarrayread)(A, array)); 4455 } else { 4456 *array = aij->a; 4457 } 4458 PetscFunctionReturn(PETSC_SUCCESS); 4459 } 4460 4461 /*@C 4462 MatSeqAIJRestoreArrayRead - restore the read-only access array obtained from `MatSeqAIJGetArrayRead()` 4463 4464 Not Collective; No Fortran Support 4465 4466 Input Parameter: 4467 . A - a `MATSEQAIJ` matrix 4468 4469 Output Parameter: 4470 . array - pointer to the data 4471 4472 Level: intermediate 4473 4474 .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJGetArrayRead()` 4475 @*/ 4476 PetscErrorCode MatSeqAIJRestoreArrayRead(Mat A, const PetscScalar **array) 4477 { 4478 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4479 4480 PetscFunctionBegin; 4481 if (aij->ops->restorearrayread) { 4482 PetscCall((*aij->ops->restorearrayread)(A, array)); 4483 } else { 4484 *array = NULL; 4485 } 4486 PetscFunctionReturn(PETSC_SUCCESS); 4487 } 4488 4489 /*@C 4490 MatSeqAIJGetArrayWrite - gives write-only access to the array where the data for a `MATSEQAIJ` matrix is stored 4491 4492 Not Collective; No Fortran Support 4493 4494 Input Parameter: 4495 . A - a `MATSEQAIJ` matrix 4496 4497 Output Parameter: 4498 . array - pointer to the data 4499 4500 Level: intermediate 4501 4502 .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArrayRead()` 4503 @*/ 4504 PetscErrorCode MatSeqAIJGetArrayWrite(Mat A, PetscScalar **array) 4505 { 4506 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4507 4508 PetscFunctionBegin; 4509 if (aij->ops->getarraywrite) { 4510 PetscCall((*aij->ops->getarraywrite)(A, array)); 4511 } else { 4512 *array = aij->a; 4513 } 4514 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 4515 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 4516 PetscFunctionReturn(PETSC_SUCCESS); 4517 } 4518 4519 /*@C 4520 MatSeqAIJRestoreArrayWrite - restore the read-only access array obtained from MatSeqAIJGetArrayRead 4521 4522 Not Collective; No Fortran Support 4523 4524 Input Parameter: 4525 . A - a MATSEQAIJ matrix 4526 4527 Output Parameter: 4528 . array - pointer to the data 4529 4530 Level: intermediate 4531 4532 .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJGetArrayRead()` 4533 @*/ 4534 PetscErrorCode MatSeqAIJRestoreArrayWrite(Mat A, PetscScalar **array) 4535 { 4536 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4537 4538 PetscFunctionBegin; 4539 if (aij->ops->restorearraywrite) { 4540 PetscCall((*aij->ops->restorearraywrite)(A, array)); 4541 } else { 4542 *array = NULL; 4543 } 4544 PetscFunctionReturn(PETSC_SUCCESS); 4545 } 4546 4547 /*@C 4548 MatSeqAIJGetCSRAndMemType - Get the CSR arrays and the memory type of the `MATSEQAIJ` matrix 4549 4550 Not Collective; No Fortran Support 4551 4552 Input Parameter: 4553 . mat - a matrix of type `MATSEQAIJ` or its subclasses 4554 4555 Output Parameters: 4556 + i - row map array of the matrix 4557 . j - column index array of the matrix 4558 . a - data array of the matrix 4559 - mtype - memory type of the arrays 4560 4561 Level: developer 4562 4563 Notes: 4564 Any of the output parameters can be `NULL`, in which case the corresponding value is not returned. 4565 If mat is a device matrix, the arrays are on the device. Otherwise, they are on the host. 4566 4567 One can call this routine on a preallocated but not assembled matrix to just get the memory of the CSR underneath the matrix. 4568 If the matrix is assembled, the data array `a` is guaranteed to have the latest values of the matrix. 4569 4570 .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJGetArrayRead()` 4571 @*/ 4572 PetscErrorCode MatSeqAIJGetCSRAndMemType(Mat mat, const PetscInt **i, const PetscInt **j, PetscScalar **a, PetscMemType *mtype) 4573 { 4574 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 4575 4576 PetscFunctionBegin; 4577 PetscCheck(mat->preallocated, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "matrix is not preallocated"); 4578 if (aij->ops->getcsrandmemtype) { 4579 PetscCall((*aij->ops->getcsrandmemtype)(mat, i, j, a, mtype)); 4580 } else { 4581 if (i) *i = aij->i; 4582 if (j) *j = aij->j; 4583 if (a) *a = aij->a; 4584 if (mtype) *mtype = PETSC_MEMTYPE_HOST; 4585 } 4586 PetscFunctionReturn(PETSC_SUCCESS); 4587 } 4588 4589 /*@C 4590 MatSeqAIJGetMaxRowNonzeros - returns the maximum number of nonzeros in any row 4591 4592 Not Collective 4593 4594 Input Parameter: 4595 . A - a `MATSEQAIJ` matrix 4596 4597 Output Parameter: 4598 . nz - the maximum number of nonzeros in any row 4599 4600 Level: intermediate 4601 4602 .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArray()`, `MatSeqAIJGetArrayF90()` 4603 @*/ 4604 PetscErrorCode MatSeqAIJGetMaxRowNonzeros(Mat A, PetscInt *nz) 4605 { 4606 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4607 4608 PetscFunctionBegin; 4609 *nz = aij->rmax; 4610 PetscFunctionReturn(PETSC_SUCCESS); 4611 } 4612 4613 static PetscErrorCode MatCOOStructDestroy_SeqAIJ(void *data) 4614 { 4615 MatCOOStruct_SeqAIJ *coo = (MatCOOStruct_SeqAIJ *)data; 4616 PetscFunctionBegin; 4617 PetscCall(PetscFree(coo->perm)); 4618 PetscCall(PetscFree(coo->jmap)); 4619 PetscCall(PetscFree(coo)); 4620 PetscFunctionReturn(PETSC_SUCCESS); 4621 } 4622 4623 PetscErrorCode MatSetPreallocationCOO_SeqAIJ(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[]) 4624 { 4625 MPI_Comm comm; 4626 PetscInt *i, *j; 4627 PetscInt M, N, row; 4628 PetscCount k, p, q, nneg, nnz, start, end; /* Index the coo array, so use PetscCount as their type */ 4629 PetscInt *Ai; /* Change to PetscCount once we use it for row pointers */ 4630 PetscInt *Aj; 4631 PetscScalar *Aa; 4632 Mat_SeqAIJ *seqaij = (Mat_SeqAIJ *)(mat->data); 4633 MatType rtype; 4634 PetscCount *perm, *jmap; 4635 PetscContainer container; 4636 MatCOOStruct_SeqAIJ *coo; 4637 4638 PetscFunctionBegin; 4639 PetscCall(PetscObjectGetComm((PetscObject)mat, &comm)); 4640 PetscCall(MatGetSize(mat, &M, &N)); 4641 i = coo_i; 4642 j = coo_j; 4643 PetscCall(PetscMalloc1(coo_n, &perm)); 4644 for (k = 0; k < coo_n; k++) { /* Ignore entries with negative row or col indices */ 4645 if (j[k] < 0) i[k] = -1; 4646 perm[k] = k; 4647 } 4648 4649 /* Sort by row */ 4650 PetscCall(PetscSortIntWithIntCountArrayPair(coo_n, i, j, perm)); 4651 4652 /* Advance k to the first row with a non-negative index */ 4653 for (k = 0; k < coo_n; k++) 4654 if (i[k] >= 0) break; 4655 nneg = k; 4656 PetscCall(PetscMalloc1(coo_n - nneg + 1, &jmap)); /* +1 to make a CSR-like data structure. jmap[i] originally is the number of repeats for i-th nonzero */ 4657 nnz = 0; /* Total number of unique nonzeros to be counted */ 4658 jmap++; /* Inc jmap by 1 for convenience */ 4659 4660 PetscCall(PetscCalloc1(M + 1, &Ai)); /* CSR of A */ 4661 PetscCall(PetscMalloc1(coo_n - nneg, &Aj)); /* We have at most coo_n-nneg unique nonzeros */ 4662 4663 /* Support for HYPRE */ 4664 PetscBool hypre; 4665 const char *name; 4666 PetscCall(PetscObjectGetName((PetscObject)mat, &name)); 4667 PetscCall(PetscStrcmp("_internal_COO_mat_for_hypre", name, &hypre)); 4668 4669 /* In each row, sort by column, then unique column indices to get row length */ 4670 Ai++; /* Inc by 1 for convenience */ 4671 q = 0; /* q-th unique nonzero, with q starting from 0 */ 4672 while (k < coo_n) { 4673 row = i[k]; 4674 start = k; /* [start,end) indices for this row */ 4675 while (k < coo_n && i[k] == row) k++; 4676 end = k; 4677 /* hack for HYPRE: swap min column to diag so that diagonal values will go first */ 4678 if (hypre) { 4679 PetscInt minj = PETSC_MAX_INT; 4680 PetscBool hasdiag = PETSC_FALSE; 4681 for (p = start; p < end; p++) { 4682 hasdiag = (PetscBool)(hasdiag || (j[p] == row)); 4683 minj = PetscMin(minj, j[p]); 4684 } 4685 if (hasdiag) { 4686 for (p = start; p < end; p++) { 4687 if (j[p] == minj) j[p] = row; 4688 else if (j[p] == row) j[p] = minj; 4689 } 4690 } 4691 } 4692 PetscCall(PetscSortIntWithCountArray(end - start, j + start, perm + start)); 4693 4694 /* Find number of unique col entries in this row */ 4695 Aj[q] = j[start]; /* Log the first nonzero in this row */ 4696 jmap[q] = 1; /* Number of repeats of this nonzero entry */ 4697 Ai[row] = 1; 4698 nnz++; 4699 4700 for (p = start + 1; p < end; p++) { /* Scan remaining nonzero in this row */ 4701 if (j[p] != j[p - 1]) { /* Meet a new nonzero */ 4702 q++; 4703 jmap[q] = 1; 4704 Aj[q] = j[p]; 4705 Ai[row]++; 4706 nnz++; 4707 } else { 4708 jmap[q]++; 4709 } 4710 } 4711 q++; /* Move to next row and thus next unique nonzero */ 4712 } 4713 Ai--; /* Back to the beginning of Ai[] */ 4714 for (k = 0; k < M; k++) Ai[k + 1] += Ai[k]; 4715 jmap--; /* Back to the beginning of jmap[] */ 4716 jmap[0] = 0; 4717 for (k = 0; k < nnz; k++) jmap[k + 1] += jmap[k]; 4718 if (nnz < coo_n - nneg) { /* Realloc with actual number of unique nonzeros */ 4719 PetscCount *jmap_new; 4720 PetscInt *Aj_new; 4721 4722 PetscCall(PetscMalloc1(nnz + 1, &jmap_new)); 4723 PetscCall(PetscArraycpy(jmap_new, jmap, nnz + 1)); 4724 PetscCall(PetscFree(jmap)); 4725 jmap = jmap_new; 4726 4727 PetscCall(PetscMalloc1(nnz, &Aj_new)); 4728 PetscCall(PetscArraycpy(Aj_new, Aj, nnz)); 4729 PetscCall(PetscFree(Aj)); 4730 Aj = Aj_new; 4731 } 4732 4733 if (nneg) { /* Discard heading entries with negative indices in perm[], as we'll access it from index 0 in MatSetValuesCOO */ 4734 PetscCount *perm_new; 4735 4736 PetscCall(PetscMalloc1(coo_n - nneg, &perm_new)); 4737 PetscCall(PetscArraycpy(perm_new, perm + nneg, coo_n - nneg)); 4738 PetscCall(PetscFree(perm)); 4739 perm = perm_new; 4740 } 4741 4742 PetscCall(MatGetRootType_Private(mat, &rtype)); 4743 PetscCall(PetscCalloc1(nnz, &Aa)); /* Zero the matrix */ 4744 PetscCall(MatSetSeqAIJWithArrays_private(PETSC_COMM_SELF, M, N, Ai, Aj, Aa, rtype, mat)); 4745 4746 seqaij->singlemalloc = PETSC_FALSE; /* Ai, Aj and Aa are not allocated in one big malloc */ 4747 seqaij->free_a = seqaij->free_ij = PETSC_TRUE; /* Let newmat own Ai, Aj and Aa */ 4748 4749 // Put the COO struct in a container and then attach that to the matrix 4750 PetscCall(PetscMalloc1(1, &coo)); 4751 coo->nz = nnz; 4752 coo->n = coo_n; 4753 coo->Atot = coo_n - nneg; // Annz is seqaij->nz, so no need to record that again 4754 coo->jmap = jmap; // of length nnz+1 4755 coo->perm = perm; 4756 PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container)); 4757 PetscCall(PetscContainerSetPointer(container, coo)); 4758 PetscCall(PetscContainerSetUserDestroy(container, MatCOOStructDestroy_SeqAIJ)); 4759 PetscCall(PetscObjectCompose((PetscObject)mat, "__PETSc_MatCOOStruct_Host", (PetscObject)container)); 4760 PetscCall(PetscContainerDestroy(&container)); 4761 PetscFunctionReturn(PETSC_SUCCESS); 4762 } 4763 4764 static PetscErrorCode MatSetValuesCOO_SeqAIJ(Mat A, const PetscScalar v[], InsertMode imode) 4765 { 4766 Mat_SeqAIJ *aseq = (Mat_SeqAIJ *)A->data; 4767 PetscCount i, j, Annz = aseq->nz; 4768 PetscCount *perm, *jmap; 4769 PetscScalar *Aa; 4770 PetscContainer container; 4771 MatCOOStruct_SeqAIJ *coo; 4772 4773 PetscFunctionBegin; 4774 PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_MatCOOStruct_Host", (PetscObject *)&container)); 4775 PetscCheck(container, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Not found MatCOOStruct on this matrix"); 4776 PetscCall(PetscContainerGetPointer(container, (void **)&coo)); 4777 perm = coo->perm; 4778 jmap = coo->jmap; 4779 PetscCall(MatSeqAIJGetArray(A, &Aa)); 4780 for (i = 0; i < Annz; i++) { 4781 PetscScalar sum = 0.0; 4782 for (j = jmap[i]; j < jmap[i + 1]; j++) sum += v[perm[j]]; 4783 Aa[i] = (imode == INSERT_VALUES ? 0.0 : Aa[i]) + sum; 4784 } 4785 PetscCall(MatSeqAIJRestoreArray(A, &Aa)); 4786 PetscFunctionReturn(PETSC_SUCCESS); 4787 } 4788 4789 #if defined(PETSC_HAVE_CUDA) 4790 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJCUSPARSE(Mat, MatType, MatReuse, Mat *); 4791 #endif 4792 #if defined(PETSC_HAVE_HIP) 4793 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJHIPSPARSE(Mat, MatType, MatReuse, Mat *); 4794 #endif 4795 #if defined(PETSC_HAVE_KOKKOS_KERNELS) 4796 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJKokkos(Mat, MatType, MatReuse, Mat *); 4797 #endif 4798 4799 PETSC_EXTERN PetscErrorCode MatCreate_SeqAIJ(Mat B) 4800 { 4801 Mat_SeqAIJ *b; 4802 PetscMPIInt size; 4803 4804 PetscFunctionBegin; 4805 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &size)); 4806 PetscCheck(size <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Comm must be of size 1"); 4807 4808 PetscCall(PetscNew(&b)); 4809 4810 B->data = (void *)b; 4811 B->ops[0] = MatOps_Values; 4812 if (B->sortedfull) B->ops->setvalues = MatSetValues_SeqAIJ_SortedFull; 4813 4814 b->row = NULL; 4815 b->col = NULL; 4816 b->icol = NULL; 4817 b->reallocs = 0; 4818 b->ignorezeroentries = PETSC_FALSE; 4819 b->roworiented = PETSC_TRUE; 4820 b->nonew = 0; 4821 b->diag = NULL; 4822 b->solve_work = NULL; 4823 B->spptr = NULL; 4824 b->saved_values = NULL; 4825 b->idiag = NULL; 4826 b->mdiag = NULL; 4827 b->ssor_work = NULL; 4828 b->omega = 1.0; 4829 b->fshift = 0.0; 4830 b->idiagvalid = PETSC_FALSE; 4831 b->ibdiagvalid = PETSC_FALSE; 4832 b->keepnonzeropattern = PETSC_FALSE; 4833 4834 PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATSEQAIJ)); 4835 #if defined(PETSC_HAVE_MATLAB) 4836 PetscCall(PetscObjectComposeFunction((PetscObject)B, "PetscMatlabEnginePut_C", MatlabEnginePut_SeqAIJ)); 4837 PetscCall(PetscObjectComposeFunction((PetscObject)B, "PetscMatlabEngineGet_C", MatlabEngineGet_SeqAIJ)); 4838 #endif 4839 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqAIJSetColumnIndices_C", MatSeqAIJSetColumnIndices_SeqAIJ)); 4840 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatStoreValues_C", MatStoreValues_SeqAIJ)); 4841 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatRetrieveValues_C", MatRetrieveValues_SeqAIJ)); 4842 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqsbaij_C", MatConvert_SeqAIJ_SeqSBAIJ)); 4843 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqbaij_C", MatConvert_SeqAIJ_SeqBAIJ)); 4844 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijperm_C", MatConvert_SeqAIJ_SeqAIJPERM)); 4845 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijsell_C", MatConvert_SeqAIJ_SeqAIJSELL)); 4846 #if defined(PETSC_HAVE_MKL_SPARSE) 4847 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijmkl_C", MatConvert_SeqAIJ_SeqAIJMKL)); 4848 #endif 4849 #if defined(PETSC_HAVE_CUDA) 4850 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijcusparse_C", MatConvert_SeqAIJ_SeqAIJCUSPARSE)); 4851 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijcusparse_seqaij_C", MatProductSetFromOptions_SeqAIJ)); 4852 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_seqaijcusparse_C", MatProductSetFromOptions_SeqAIJ)); 4853 #endif 4854 #if defined(PETSC_HAVE_HIP) 4855 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijhipsparse_C", MatConvert_SeqAIJ_SeqAIJHIPSPARSE)); 4856 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijhipsparse_seqaij_C", MatProductSetFromOptions_SeqAIJ)); 4857 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_seqaijhipsparse_C", MatProductSetFromOptions_SeqAIJ)); 4858 #endif 4859 #if defined(PETSC_HAVE_KOKKOS_KERNELS) 4860 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijkokkos_C", MatConvert_SeqAIJ_SeqAIJKokkos)); 4861 #endif 4862 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijcrl_C", MatConvert_SeqAIJ_SeqAIJCRL)); 4863 #if defined(PETSC_HAVE_ELEMENTAL) 4864 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_elemental_C", MatConvert_SeqAIJ_Elemental)); 4865 #endif 4866 #if defined(PETSC_HAVE_SCALAPACK) 4867 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_scalapack_C", MatConvert_AIJ_ScaLAPACK)); 4868 #endif 4869 #if defined(PETSC_HAVE_HYPRE) 4870 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_hypre_C", MatConvert_AIJ_HYPRE)); 4871 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_transpose_seqaij_seqaij_C", MatProductSetFromOptions_Transpose_AIJ_AIJ)); 4872 #endif 4873 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqdense_C", MatConvert_SeqAIJ_SeqDense)); 4874 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqsell_C", MatConvert_SeqAIJ_SeqSELL)); 4875 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_is_C", MatConvert_XAIJ_IS)); 4876 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatIsTranspose_C", MatIsTranspose_SeqAIJ)); 4877 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatIsHermitianTranspose_C", MatIsTranspose_SeqAIJ)); 4878 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqAIJSetPreallocation_C", MatSeqAIJSetPreallocation_SeqAIJ)); 4879 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatResetPreallocation_C", MatResetPreallocation_SeqAIJ)); 4880 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqAIJSetPreallocationCSR_C", MatSeqAIJSetPreallocationCSR_SeqAIJ)); 4881 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatReorderForNonzeroDiagonal_C", MatReorderForNonzeroDiagonal_SeqAIJ)); 4882 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_is_seqaij_C", MatProductSetFromOptions_IS_XAIJ)); 4883 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqdense_seqaij_C", MatProductSetFromOptions_SeqDense_SeqAIJ)); 4884 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_seqaij_C", MatProductSetFromOptions_SeqAIJ)); 4885 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqAIJKron_C", MatSeqAIJKron_SeqAIJ)); 4886 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_SeqAIJ)); 4887 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_SeqAIJ)); 4888 PetscCall(MatCreate_SeqAIJ_Inode(B)); 4889 PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATSEQAIJ)); 4890 PetscCall(MatSeqAIJSetTypeFromOptions(B)); /* this allows changing the matrix subtype to say MATSEQAIJPERM */ 4891 PetscFunctionReturn(PETSC_SUCCESS); 4892 } 4893 4894 /* 4895 Given a matrix generated with MatGetFactor() duplicates all the information in A into C 4896 */ 4897 PetscErrorCode MatDuplicateNoCreate_SeqAIJ(Mat C, Mat A, MatDuplicateOption cpvalues, PetscBool mallocmatspace) 4898 { 4899 Mat_SeqAIJ *c = (Mat_SeqAIJ *)C->data, *a = (Mat_SeqAIJ *)A->data; 4900 PetscInt m = A->rmap->n, i; 4901 4902 PetscFunctionBegin; 4903 PetscCheck(A->assembled || cpvalues == MAT_DO_NOT_COPY_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot duplicate unassembled matrix"); 4904 4905 C->factortype = A->factortype; 4906 c->row = NULL; 4907 c->col = NULL; 4908 c->icol = NULL; 4909 c->reallocs = 0; 4910 4911 C->assembled = A->assembled; 4912 4913 if (A->preallocated) { 4914 PetscCall(PetscLayoutReference(A->rmap, &C->rmap)); 4915 PetscCall(PetscLayoutReference(A->cmap, &C->cmap)); 4916 4917 if (!A->hash_active) { 4918 PetscCall(PetscMalloc1(m, &c->imax)); 4919 PetscCall(PetscMemcpy(c->imax, a->imax, m * sizeof(PetscInt))); 4920 PetscCall(PetscMalloc1(m, &c->ilen)); 4921 PetscCall(PetscMemcpy(c->ilen, a->ilen, m * sizeof(PetscInt))); 4922 4923 /* allocate the matrix space */ 4924 if (mallocmatspace) { 4925 PetscCall(PetscMalloc3(a->i[m], &c->a, a->i[m], &c->j, m + 1, &c->i)); 4926 4927 c->singlemalloc = PETSC_TRUE; 4928 4929 PetscCall(PetscArraycpy(c->i, a->i, m + 1)); 4930 if (m > 0) { 4931 PetscCall(PetscArraycpy(c->j, a->j, a->i[m])); 4932 if (cpvalues == MAT_COPY_VALUES) { 4933 const PetscScalar *aa; 4934 4935 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 4936 PetscCall(PetscArraycpy(c->a, aa, a->i[m])); 4937 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 4938 } else { 4939 PetscCall(PetscArrayzero(c->a, a->i[m])); 4940 } 4941 } 4942 } 4943 C->preallocated = PETSC_TRUE; 4944 } else { 4945 PetscCheck(mallocmatspace, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Cannot malloc matrix memory from a non-preallocated matrix"); 4946 PetscCall(MatSetUp(C)); 4947 } 4948 4949 c->ignorezeroentries = a->ignorezeroentries; 4950 c->roworiented = a->roworiented; 4951 c->nonew = a->nonew; 4952 if (a->diag) { 4953 PetscCall(PetscMalloc1(m + 1, &c->diag)); 4954 PetscCall(PetscMemcpy(c->diag, a->diag, m * sizeof(PetscInt))); 4955 } else c->diag = NULL; 4956 4957 c->solve_work = NULL; 4958 c->saved_values = NULL; 4959 c->idiag = NULL; 4960 c->ssor_work = NULL; 4961 c->keepnonzeropattern = a->keepnonzeropattern; 4962 c->free_a = PETSC_TRUE; 4963 c->free_ij = PETSC_TRUE; 4964 4965 c->rmax = a->rmax; 4966 c->nz = a->nz; 4967 c->maxnz = a->nz; /* Since we allocate exactly the right amount */ 4968 4969 c->compressedrow.use = a->compressedrow.use; 4970 c->compressedrow.nrows = a->compressedrow.nrows; 4971 if (a->compressedrow.use) { 4972 i = a->compressedrow.nrows; 4973 PetscCall(PetscMalloc2(i + 1, &c->compressedrow.i, i, &c->compressedrow.rindex)); 4974 PetscCall(PetscArraycpy(c->compressedrow.i, a->compressedrow.i, i + 1)); 4975 PetscCall(PetscArraycpy(c->compressedrow.rindex, a->compressedrow.rindex, i)); 4976 } else { 4977 c->compressedrow.use = PETSC_FALSE; 4978 c->compressedrow.i = NULL; 4979 c->compressedrow.rindex = NULL; 4980 } 4981 c->nonzerorowcnt = a->nonzerorowcnt; 4982 C->nonzerostate = A->nonzerostate; 4983 4984 PetscCall(MatDuplicate_SeqAIJ_Inode(A, cpvalues, &C)); 4985 } 4986 PetscCall(PetscFunctionListDuplicate(((PetscObject)A)->qlist, &((PetscObject)C)->qlist)); 4987 PetscFunctionReturn(PETSC_SUCCESS); 4988 } 4989 4990 PetscErrorCode MatDuplicate_SeqAIJ(Mat A, MatDuplicateOption cpvalues, Mat *B) 4991 { 4992 PetscFunctionBegin; 4993 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 4994 PetscCall(MatSetSizes(*B, A->rmap->n, A->cmap->n, A->rmap->n, A->cmap->n)); 4995 if (!(A->rmap->n % A->rmap->bs) && !(A->cmap->n % A->cmap->bs)) PetscCall(MatSetBlockSizesFromMats(*B, A, A)); 4996 PetscCall(MatSetType(*B, ((PetscObject)A)->type_name)); 4997 PetscCall(MatDuplicateNoCreate_SeqAIJ(*B, A, cpvalues, PETSC_TRUE)); 4998 PetscFunctionReturn(PETSC_SUCCESS); 4999 } 5000 5001 PetscErrorCode MatLoad_SeqAIJ(Mat newMat, PetscViewer viewer) 5002 { 5003 PetscBool isbinary, ishdf5; 5004 5005 PetscFunctionBegin; 5006 PetscValidHeaderSpecific(newMat, MAT_CLASSID, 1); 5007 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 5008 /* force binary viewer to load .info file if it has not yet done so */ 5009 PetscCall(PetscViewerSetUp(viewer)); 5010 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 5011 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5)); 5012 if (isbinary) { 5013 PetscCall(MatLoad_SeqAIJ_Binary(newMat, viewer)); 5014 } else if (ishdf5) { 5015 #if defined(PETSC_HAVE_HDF5) 5016 PetscCall(MatLoad_AIJ_HDF5(newMat, viewer)); 5017 #else 5018 SETERRQ(PetscObjectComm((PetscObject)newMat), PETSC_ERR_SUP, "HDF5 not supported in this build.\nPlease reconfigure using --download-hdf5"); 5019 #endif 5020 } else { 5021 SETERRQ(PetscObjectComm((PetscObject)newMat), PETSC_ERR_SUP, "Viewer type %s not yet supported for reading %s matrices", ((PetscObject)viewer)->type_name, ((PetscObject)newMat)->type_name); 5022 } 5023 PetscFunctionReturn(PETSC_SUCCESS); 5024 } 5025 5026 PetscErrorCode MatLoad_SeqAIJ_Binary(Mat mat, PetscViewer viewer) 5027 { 5028 Mat_SeqAIJ *a = (Mat_SeqAIJ *)mat->data; 5029 PetscInt header[4], *rowlens, M, N, nz, sum, rows, cols, i; 5030 5031 PetscFunctionBegin; 5032 PetscCall(PetscViewerSetUp(viewer)); 5033 5034 /* read in matrix header */ 5035 PetscCall(PetscViewerBinaryRead(viewer, header, 4, NULL, PETSC_INT)); 5036 PetscCheck(header[0] == MAT_FILE_CLASSID, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a matrix object in file"); 5037 M = header[1]; 5038 N = header[2]; 5039 nz = header[3]; 5040 PetscCheck(M >= 0, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "Matrix row size (%" PetscInt_FMT ") in file is negative", M); 5041 PetscCheck(N >= 0, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "Matrix column size (%" PetscInt_FMT ") in file is negative", N); 5042 PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Matrix stored in special format on disk, cannot load as SeqAIJ"); 5043 5044 /* set block sizes from the viewer's .info file */ 5045 PetscCall(MatLoad_Binary_BlockSizes(mat, viewer)); 5046 /* set local and global sizes if not set already */ 5047 if (mat->rmap->n < 0) mat->rmap->n = M; 5048 if (mat->cmap->n < 0) mat->cmap->n = N; 5049 if (mat->rmap->N < 0) mat->rmap->N = M; 5050 if (mat->cmap->N < 0) mat->cmap->N = N; 5051 PetscCall(PetscLayoutSetUp(mat->rmap)); 5052 PetscCall(PetscLayoutSetUp(mat->cmap)); 5053 5054 /* check if the matrix sizes are correct */ 5055 PetscCall(MatGetSize(mat, &rows, &cols)); 5056 PetscCheck(M == rows && N == cols, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Matrix in file of different sizes (%" PetscInt_FMT ", %" PetscInt_FMT ") than the input matrix (%" PetscInt_FMT ", %" PetscInt_FMT ")", M, N, rows, cols); 5057 5058 /* read in row lengths */ 5059 PetscCall(PetscMalloc1(M, &rowlens)); 5060 PetscCall(PetscViewerBinaryRead(viewer, rowlens, M, NULL, PETSC_INT)); 5061 /* check if sum(rowlens) is same as nz */ 5062 sum = 0; 5063 for (i = 0; i < M; i++) sum += rowlens[i]; 5064 PetscCheck(sum == nz, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Inconsistent matrix data in file: nonzeros = %" PetscInt_FMT ", sum-row-lengths = %" PetscInt_FMT, nz, sum); 5065 /* preallocate and check sizes */ 5066 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(mat, 0, rowlens)); 5067 PetscCall(MatGetSize(mat, &rows, &cols)); 5068 PetscCheck(M == rows && N == cols, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Matrix in file of different length (%" PetscInt_FMT ", %" PetscInt_FMT ") than the input matrix (%" PetscInt_FMT ", %" PetscInt_FMT ")", M, N, rows, cols); 5069 /* store row lengths */ 5070 PetscCall(PetscArraycpy(a->ilen, rowlens, M)); 5071 PetscCall(PetscFree(rowlens)); 5072 5073 /* fill in "i" row pointers */ 5074 a->i[0] = 0; 5075 for (i = 0; i < M; i++) a->i[i + 1] = a->i[i] + a->ilen[i]; 5076 /* read in "j" column indices */ 5077 PetscCall(PetscViewerBinaryRead(viewer, a->j, nz, NULL, PETSC_INT)); 5078 /* read in "a" nonzero values */ 5079 PetscCall(PetscViewerBinaryRead(viewer, a->a, nz, NULL, PETSC_SCALAR)); 5080 5081 PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY)); 5082 PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); 5083 PetscFunctionReturn(PETSC_SUCCESS); 5084 } 5085 5086 PetscErrorCode MatEqual_SeqAIJ(Mat A, Mat B, PetscBool *flg) 5087 { 5088 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data, *b = (Mat_SeqAIJ *)B->data; 5089 const PetscScalar *aa, *ba; 5090 #if defined(PETSC_USE_COMPLEX) 5091 PetscInt k; 5092 #endif 5093 5094 PetscFunctionBegin; 5095 /* If the matrix dimensions are not equal,or no of nonzeros */ 5096 if ((A->rmap->n != B->rmap->n) || (A->cmap->n != B->cmap->n) || (a->nz != b->nz)) { 5097 *flg = PETSC_FALSE; 5098 PetscFunctionReturn(PETSC_SUCCESS); 5099 } 5100 5101 /* if the a->i are the same */ 5102 PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, flg)); 5103 if (!*flg) PetscFunctionReturn(PETSC_SUCCESS); 5104 5105 /* if a->j are the same */ 5106 PetscCall(PetscArraycmp(a->j, b->j, a->nz, flg)); 5107 if (!*flg) PetscFunctionReturn(PETSC_SUCCESS); 5108 5109 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 5110 PetscCall(MatSeqAIJGetArrayRead(B, &ba)); 5111 /* if a->a are the same */ 5112 #if defined(PETSC_USE_COMPLEX) 5113 for (k = 0; k < a->nz; k++) { 5114 if (PetscRealPart(aa[k]) != PetscRealPart(ba[k]) || PetscImaginaryPart(aa[k]) != PetscImaginaryPart(ba[k])) { 5115 *flg = PETSC_FALSE; 5116 PetscFunctionReturn(PETSC_SUCCESS); 5117 } 5118 } 5119 #else 5120 PetscCall(PetscArraycmp(aa, ba, a->nz, flg)); 5121 #endif 5122 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 5123 PetscCall(MatSeqAIJRestoreArrayRead(B, &ba)); 5124 PetscFunctionReturn(PETSC_SUCCESS); 5125 } 5126 5127 /*@ 5128 MatCreateSeqAIJWithArrays - Creates an sequential `MATSEQAIJ` matrix using matrix elements (in CSR format) 5129 provided by the user. 5130 5131 Collective 5132 5133 Input Parameters: 5134 + comm - must be an MPI communicator of size 1 5135 . m - number of rows 5136 . n - number of columns 5137 . i - row indices; that is i[0] = 0, i[row] = i[row-1] + number of elements in that row of the matrix 5138 . j - column indices 5139 - a - matrix values 5140 5141 Output Parameter: 5142 . mat - the matrix 5143 5144 Level: intermediate 5145 5146 Notes: 5147 The `i`, `j`, and `a` arrays are not copied by this routine, the user must free these arrays 5148 once the matrix is destroyed and not before 5149 5150 You cannot set new nonzero locations into this matrix, that will generate an error. 5151 5152 The `i` and `j` indices are 0 based 5153 5154 The format which is used for the sparse matrix input, is equivalent to a 5155 row-major ordering.. i.e for the following matrix, the input data expected is 5156 as shown 5157 .vb 5158 1 0 0 5159 2 0 3 5160 4 5 6 5161 5162 i = {0,1,3,6} [size = nrow+1 = 3+1] 5163 j = {0,0,2,0,1,2} [size = 6]; values must be sorted for each row 5164 v = {1,2,3,4,5,6} [size = 6] 5165 .ve 5166 5167 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateAIJ()`, `MatCreateSeqAIJ()`, `MatCreateMPIAIJWithArrays()`, `MatMPIAIJSetPreallocationCSR()` 5168 @*/ 5169 PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt i[], PetscInt j[], PetscScalar a[], Mat *mat) 5170 { 5171 PetscInt ii; 5172 Mat_SeqAIJ *aij; 5173 PetscInt jj; 5174 5175 PetscFunctionBegin; 5176 PetscCheck(m <= 0 || i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i (row indices) must start with 0"); 5177 PetscCall(MatCreate(comm, mat)); 5178 PetscCall(MatSetSizes(*mat, m, n, m, n)); 5179 /* PetscCall(MatSetBlockSizes(*mat,,)); */ 5180 PetscCall(MatSetType(*mat, MATSEQAIJ)); 5181 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(*mat, MAT_SKIP_ALLOCATION, NULL)); 5182 aij = (Mat_SeqAIJ *)(*mat)->data; 5183 PetscCall(PetscMalloc1(m, &aij->imax)); 5184 PetscCall(PetscMalloc1(m, &aij->ilen)); 5185 5186 aij->i = i; 5187 aij->j = j; 5188 aij->a = a; 5189 aij->singlemalloc = PETSC_FALSE; 5190 aij->nonew = -1; /*this indicates that inserting a new value in the matrix that generates a new nonzero is an error*/ 5191 aij->free_a = PETSC_FALSE; 5192 aij->free_ij = PETSC_FALSE; 5193 5194 for (ii = 0, aij->nonzerorowcnt = 0, aij->rmax = 0; ii < m; ii++) { 5195 aij->ilen[ii] = aij->imax[ii] = i[ii + 1] - i[ii]; 5196 if (PetscDefined(USE_DEBUG)) { 5197 PetscCheck(i[ii + 1] - i[ii] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative row length in i (row indices) row = %" PetscInt_FMT " length = %" PetscInt_FMT, ii, i[ii + 1] - i[ii]); 5198 for (jj = i[ii] + 1; jj < i[ii + 1]; jj++) { 5199 PetscCheck(j[jj] >= j[jj - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column entry number %" PetscInt_FMT " (actual column %" PetscInt_FMT ") in row %" PetscInt_FMT " is not sorted", jj - i[ii], j[jj], ii); 5200 PetscCheck(j[jj] != j[jj - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column entry number %" PetscInt_FMT " (actual column %" PetscInt_FMT ") in row %" PetscInt_FMT " is identical to previous entry", jj - i[ii], j[jj], ii); 5201 } 5202 } 5203 } 5204 if (PetscDefined(USE_DEBUG)) { 5205 for (ii = 0; ii < aij->i[m]; ii++) { 5206 PetscCheck(j[ii] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative column index at location = %" PetscInt_FMT " index = %" PetscInt_FMT, ii, j[ii]); 5207 PetscCheck(j[ii] <= n - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index to large at location = %" PetscInt_FMT " index = %" PetscInt_FMT, ii, j[ii]); 5208 } 5209 } 5210 5211 PetscCall(MatAssemblyBegin(*mat, MAT_FINAL_ASSEMBLY)); 5212 PetscCall(MatAssemblyEnd(*mat, MAT_FINAL_ASSEMBLY)); 5213 PetscFunctionReturn(PETSC_SUCCESS); 5214 } 5215 5216 /*@ 5217 MatCreateSeqAIJFromTriple - Creates an sequential `MATSEQAIJ` matrix using matrix elements (in COO format) 5218 provided by the user. 5219 5220 Collective 5221 5222 Input Parameters: 5223 + comm - must be an MPI communicator of size 1 5224 . m - number of rows 5225 . n - number of columns 5226 . i - row indices 5227 . j - column indices 5228 . a - matrix values 5229 . nz - number of nonzeros 5230 - idx - if the `i` and `j` indices start with 1 use `PETSC_TRUE` otherwise use `PETSC_FALSE` 5231 5232 Output Parameter: 5233 . mat - the matrix 5234 5235 Level: intermediate 5236 5237 Example: 5238 For the following matrix, the input data expected is as shown (using 0 based indexing) 5239 .vb 5240 1 0 0 5241 2 0 3 5242 4 5 6 5243 5244 i = {0,1,1,2,2,2} 5245 j = {0,0,2,0,1,2} 5246 v = {1,2,3,4,5,6} 5247 .ve 5248 5249 Note: 5250 Instead of using this function, users should also consider `MatSetPreallocationCOO()` and `MatSetValuesCOO()`, which allow repeated or remote entries, 5251 and are particularly useful in iterative applications. 5252 5253 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateAIJ()`, `MatCreateSeqAIJ()`, `MatCreateSeqAIJWithArrays()`, `MatMPIAIJSetPreallocationCSR()`, `MatSetValuesCOO()`, `MatSetPreallocationCOO()` 5254 @*/ 5255 PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt i[], PetscInt j[], PetscScalar a[], Mat *mat, PetscInt nz, PetscBool idx) 5256 { 5257 PetscInt ii, *nnz, one = 1, row, col; 5258 5259 PetscFunctionBegin; 5260 PetscCall(PetscCalloc1(m, &nnz)); 5261 for (ii = 0; ii < nz; ii++) nnz[i[ii] - !!idx] += 1; 5262 PetscCall(MatCreate(comm, mat)); 5263 PetscCall(MatSetSizes(*mat, m, n, m, n)); 5264 PetscCall(MatSetType(*mat, MATSEQAIJ)); 5265 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(*mat, 0, nnz)); 5266 for (ii = 0; ii < nz; ii++) { 5267 if (idx) { 5268 row = i[ii] - 1; 5269 col = j[ii] - 1; 5270 } else { 5271 row = i[ii]; 5272 col = j[ii]; 5273 } 5274 PetscCall(MatSetValues(*mat, one, &row, one, &col, &a[ii], ADD_VALUES)); 5275 } 5276 PetscCall(MatAssemblyBegin(*mat, MAT_FINAL_ASSEMBLY)); 5277 PetscCall(MatAssemblyEnd(*mat, MAT_FINAL_ASSEMBLY)); 5278 PetscCall(PetscFree(nnz)); 5279 PetscFunctionReturn(PETSC_SUCCESS); 5280 } 5281 5282 PetscErrorCode MatSeqAIJInvalidateDiagonal(Mat A) 5283 { 5284 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 5285 5286 PetscFunctionBegin; 5287 a->idiagvalid = PETSC_FALSE; 5288 a->ibdiagvalid = PETSC_FALSE; 5289 5290 PetscCall(MatSeqAIJInvalidateDiagonal_Inode(A)); 5291 PetscFunctionReturn(PETSC_SUCCESS); 5292 } 5293 5294 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_SeqAIJ(MPI_Comm comm, Mat inmat, PetscInt n, MatReuse scall, Mat *outmat) 5295 { 5296 PetscFunctionBegin; 5297 PetscCall(MatCreateMPIMatConcatenateSeqMat_MPIAIJ(comm, inmat, n, scall, outmat)); 5298 PetscFunctionReturn(PETSC_SUCCESS); 5299 } 5300 5301 /* 5302 Permute A into C's *local* index space using rowemb,colemb. 5303 The embedding are supposed to be injections and the above implies that the range of rowemb is a subset 5304 of [0,m), colemb is in [0,n). 5305 If pattern == DIFFERENT_NONZERO_PATTERN, C is preallocated according to A. 5306 */ 5307 PetscErrorCode MatSetSeqMat_SeqAIJ(Mat C, IS rowemb, IS colemb, MatStructure pattern, Mat B) 5308 { 5309 /* If making this function public, change the error returned in this function away from _PLIB. */ 5310 Mat_SeqAIJ *Baij; 5311 PetscBool seqaij; 5312 PetscInt m, n, *nz, i, j, count; 5313 PetscScalar v; 5314 const PetscInt *rowindices, *colindices; 5315 5316 PetscFunctionBegin; 5317 if (!B) PetscFunctionReturn(PETSC_SUCCESS); 5318 /* Check to make sure the target matrix (and embeddings) are compatible with C and each other. */ 5319 PetscCall(PetscObjectBaseTypeCompare((PetscObject)B, MATSEQAIJ, &seqaij)); 5320 PetscCheck(seqaij, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Input matrix is of wrong type"); 5321 if (rowemb) { 5322 PetscCall(ISGetLocalSize(rowemb, &m)); 5323 PetscCheck(m == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Row IS of size %" PetscInt_FMT " is incompatible with matrix row size %" PetscInt_FMT, m, B->rmap->n); 5324 } else { 5325 PetscCheck(C->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Input matrix is row-incompatible with the target matrix"); 5326 } 5327 if (colemb) { 5328 PetscCall(ISGetLocalSize(colemb, &n)); 5329 PetscCheck(n == B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Diag col IS of size %" PetscInt_FMT " is incompatible with input matrix col size %" PetscInt_FMT, n, B->cmap->n); 5330 } else { 5331 PetscCheck(C->cmap->n == B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Input matrix is col-incompatible with the target matrix"); 5332 } 5333 5334 Baij = (Mat_SeqAIJ *)(B->data); 5335 if (pattern == DIFFERENT_NONZERO_PATTERN) { 5336 PetscCall(PetscMalloc1(B->rmap->n, &nz)); 5337 for (i = 0; i < B->rmap->n; i++) nz[i] = Baij->i[i + 1] - Baij->i[i]; 5338 PetscCall(MatSeqAIJSetPreallocation(C, 0, nz)); 5339 PetscCall(PetscFree(nz)); 5340 } 5341 if (pattern == SUBSET_NONZERO_PATTERN) PetscCall(MatZeroEntries(C)); 5342 count = 0; 5343 rowindices = NULL; 5344 colindices = NULL; 5345 if (rowemb) PetscCall(ISGetIndices(rowemb, &rowindices)); 5346 if (colemb) PetscCall(ISGetIndices(colemb, &colindices)); 5347 for (i = 0; i < B->rmap->n; i++) { 5348 PetscInt row; 5349 row = i; 5350 if (rowindices) row = rowindices[i]; 5351 for (j = Baij->i[i]; j < Baij->i[i + 1]; j++) { 5352 PetscInt col; 5353 col = Baij->j[count]; 5354 if (colindices) col = colindices[col]; 5355 v = Baij->a[count]; 5356 PetscCall(MatSetValues(C, 1, &row, 1, &col, &v, INSERT_VALUES)); 5357 ++count; 5358 } 5359 } 5360 /* FIXME: set C's nonzerostate correctly. */ 5361 /* Assembly for C is necessary. */ 5362 C->preallocated = PETSC_TRUE; 5363 C->assembled = PETSC_TRUE; 5364 C->was_assembled = PETSC_FALSE; 5365 PetscFunctionReturn(PETSC_SUCCESS); 5366 } 5367 5368 PetscErrorCode MatEliminateZeros_SeqAIJ(Mat A) 5369 { 5370 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 5371 MatScalar *aa = a->a; 5372 PetscInt m = A->rmap->n, fshift = 0, fshift_prev = 0, i, k; 5373 PetscInt *ailen = a->ilen, *imax = a->imax, *ai = a->i, *aj = a->j, rmax = 0; 5374 5375 PetscFunctionBegin; 5376 PetscCheck(A->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot eliminate zeros for unassembled matrix"); 5377 if (m) rmax = ailen[0]; /* determine row with most nonzeros */ 5378 for (i = 1; i <= m; i++) { 5379 /* move each nonzero entry back by the amount of zero slots (fshift) before it*/ 5380 for (k = ai[i - 1]; k < ai[i]; k++) { 5381 if (aa[k] == 0 && aj[k] != i - 1) fshift++; 5382 else { 5383 if (aa[k] == 0 && aj[k] == i - 1) PetscCall(PetscInfo(A, "Keep the diagonal zero at row %" PetscInt_FMT "\n", i - 1)); 5384 aa[k - fshift] = aa[k]; 5385 aj[k - fshift] = aj[k]; 5386 } 5387 } 5388 ai[i - 1] -= fshift_prev; // safe to update ai[i-1] now since it will not be used in the next iteration 5389 fshift_prev = fshift; 5390 /* reset ilen and imax for each row */ 5391 ailen[i - 1] = imax[i - 1] = ai[i] - fshift - ai[i - 1]; 5392 a->nonzerorowcnt += ((ai[i] - fshift - ai[i - 1]) > 0); 5393 rmax = PetscMax(rmax, ailen[i - 1]); 5394 } 5395 if (m) { 5396 ai[m] -= fshift; 5397 a->nz = ai[m]; 5398 } 5399 PetscCall(PetscInfo(A, "Matrix size: %" PetscInt_FMT " X %" PetscInt_FMT "; zeros eliminated: %" PetscInt_FMT "; nonzeros left: %" PetscInt_FMT "\n", m, A->cmap->n, fshift, a->nz)); 5400 A->nonzerostate -= fshift; 5401 A->info.nz_unneeded += (PetscReal)fshift; 5402 a->rmax = rmax; 5403 if (a->inode.use && a->inode.checked) PetscCall(MatSeqAIJCheckInode(A)); 5404 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 5405 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 5406 PetscFunctionReturn(PETSC_SUCCESS); 5407 } 5408 5409 PetscFunctionList MatSeqAIJList = NULL; 5410 5411 /*@C 5412 MatSeqAIJSetType - Converts a `MATSEQAIJ` matrix to a subtype 5413 5414 Collective 5415 5416 Input Parameters: 5417 + mat - the matrix object 5418 - matype - matrix type 5419 5420 Options Database Key: 5421 . -mat_seqaij_type <method> - for example seqaijcrl 5422 5423 Level: intermediate 5424 5425 .seealso: [](ch_matrices), `Mat`, `PCSetType()`, `VecSetType()`, `MatCreate()`, `MatType` 5426 @*/ 5427 PetscErrorCode MatSeqAIJSetType(Mat mat, MatType matype) 5428 { 5429 PetscBool sametype; 5430 PetscErrorCode (*r)(Mat, MatType, MatReuse, Mat *); 5431 5432 PetscFunctionBegin; 5433 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 5434 PetscCall(PetscObjectTypeCompare((PetscObject)mat, matype, &sametype)); 5435 if (sametype) PetscFunctionReturn(PETSC_SUCCESS); 5436 5437 PetscCall(PetscFunctionListFind(MatSeqAIJList, matype, &r)); 5438 PetscCheck(r, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Mat type given: %s", matype); 5439 PetscCall((*r)(mat, matype, MAT_INPLACE_MATRIX, &mat)); 5440 PetscFunctionReturn(PETSC_SUCCESS); 5441 } 5442 5443 /*@C 5444 MatSeqAIJRegister - - Adds a new sub-matrix type for sequential `MATSEQAIJ` matrices 5445 5446 Not Collective 5447 5448 Input Parameters: 5449 + sname - name of a new user-defined matrix type, for example `MATSEQAIJCRL` 5450 - function - routine to convert to subtype 5451 5452 Level: advanced 5453 5454 Notes: 5455 `MatSeqAIJRegister()` may be called multiple times to add several user-defined solvers. 5456 5457 Then, your matrix can be chosen with the procedural interface at runtime via the option 5458 $ -mat_seqaij_type my_mat 5459 5460 .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRegisterAll()` 5461 @*/ 5462 PetscErrorCode MatSeqAIJRegister(const char sname[], PetscErrorCode (*function)(Mat, MatType, MatReuse, Mat *)) 5463 { 5464 PetscFunctionBegin; 5465 PetscCall(MatInitializePackage()); 5466 PetscCall(PetscFunctionListAdd(&MatSeqAIJList, sname, function)); 5467 PetscFunctionReturn(PETSC_SUCCESS); 5468 } 5469 5470 PetscBool MatSeqAIJRegisterAllCalled = PETSC_FALSE; 5471 5472 /*@C 5473 MatSeqAIJRegisterAll - Registers all of the matrix subtypes of `MATSSEQAIJ` 5474 5475 Not Collective 5476 5477 Level: advanced 5478 5479 Note: 5480 This registers the versions of `MATSEQAIJ` for GPUs 5481 5482 .seealso: [](ch_matrices), `Mat`, `MatRegisterAll()`, `MatSeqAIJRegister()` 5483 @*/ 5484 PetscErrorCode MatSeqAIJRegisterAll(void) 5485 { 5486 PetscFunctionBegin; 5487 if (MatSeqAIJRegisterAllCalled) PetscFunctionReturn(PETSC_SUCCESS); 5488 MatSeqAIJRegisterAllCalled = PETSC_TRUE; 5489 5490 PetscCall(MatSeqAIJRegister(MATSEQAIJCRL, MatConvert_SeqAIJ_SeqAIJCRL)); 5491 PetscCall(MatSeqAIJRegister(MATSEQAIJPERM, MatConvert_SeqAIJ_SeqAIJPERM)); 5492 PetscCall(MatSeqAIJRegister(MATSEQAIJSELL, MatConvert_SeqAIJ_SeqAIJSELL)); 5493 #if defined(PETSC_HAVE_MKL_SPARSE) 5494 PetscCall(MatSeqAIJRegister(MATSEQAIJMKL, MatConvert_SeqAIJ_SeqAIJMKL)); 5495 #endif 5496 #if defined(PETSC_HAVE_CUDA) 5497 PetscCall(MatSeqAIJRegister(MATSEQAIJCUSPARSE, MatConvert_SeqAIJ_SeqAIJCUSPARSE)); 5498 #endif 5499 #if defined(PETSC_HAVE_HIP) 5500 PetscCall(MatSeqAIJRegister(MATSEQAIJHIPSPARSE, MatConvert_SeqAIJ_SeqAIJHIPSPARSE)); 5501 #endif 5502 #if defined(PETSC_HAVE_KOKKOS_KERNELS) 5503 PetscCall(MatSeqAIJRegister(MATSEQAIJKOKKOS, MatConvert_SeqAIJ_SeqAIJKokkos)); 5504 #endif 5505 #if defined(PETSC_HAVE_VIENNACL) && defined(PETSC_HAVE_VIENNACL_NO_CUDA) 5506 PetscCall(MatSeqAIJRegister(MATMPIAIJVIENNACL, MatConvert_SeqAIJ_SeqAIJViennaCL)); 5507 #endif 5508 PetscFunctionReturn(PETSC_SUCCESS); 5509 } 5510 5511 /* 5512 Special version for direct calls from Fortran 5513 */ 5514 #include <petsc/private/fortranimpl.h> 5515 #if defined(PETSC_HAVE_FORTRAN_CAPS) 5516 #define matsetvaluesseqaij_ MATSETVALUESSEQAIJ 5517 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 5518 #define matsetvaluesseqaij_ matsetvaluesseqaij 5519 #endif 5520 5521 /* Change these macros so can be used in void function */ 5522 5523 /* Change these macros so can be used in void function */ 5524 /* Identical to PetscCallVoid, except it assigns to *_ierr */ 5525 #undef PetscCall 5526 #define PetscCall(...) \ 5527 do { \ 5528 PetscErrorCode ierr_msv_mpiaij = __VA_ARGS__; \ 5529 if (PetscUnlikely(ierr_msv_mpiaij)) { \ 5530 *_ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, ierr_msv_mpiaij, PETSC_ERROR_REPEAT, " "); \ 5531 return; \ 5532 } \ 5533 } while (0) 5534 5535 #undef SETERRQ 5536 #define SETERRQ(comm, ierr, ...) \ 5537 do { \ 5538 *_ierr = PetscError(comm, __LINE__, PETSC_FUNCTION_NAME, __FILE__, ierr, PETSC_ERROR_INITIAL, __VA_ARGS__); \ 5539 return; \ 5540 } while (0) 5541 5542 PETSC_EXTERN void matsetvaluesseqaij_(Mat *AA, PetscInt *mm, const PetscInt im[], PetscInt *nn, const PetscInt in[], const PetscScalar v[], InsertMode *isis, PetscErrorCode *_ierr) 5543 { 5544 Mat A = *AA; 5545 PetscInt m = *mm, n = *nn; 5546 InsertMode is = *isis; 5547 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 5548 PetscInt *rp, k, low, high, t, ii, row, nrow, i, col, l, rmax, N; 5549 PetscInt *imax, *ai, *ailen; 5550 PetscInt *aj, nonew = a->nonew, lastcol = -1; 5551 MatScalar *ap, value, *aa; 5552 PetscBool ignorezeroentries = a->ignorezeroentries; 5553 PetscBool roworiented = a->roworiented; 5554 5555 PetscFunctionBegin; 5556 MatCheckPreallocated(A, 1); 5557 imax = a->imax; 5558 ai = a->i; 5559 ailen = a->ilen; 5560 aj = a->j; 5561 aa = a->a; 5562 5563 for (k = 0; k < m; k++) { /* loop over added rows */ 5564 row = im[k]; 5565 if (row < 0) continue; 5566 PetscCheck(row < A->rmap->n, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large"); 5567 rp = aj + ai[row]; 5568 ap = aa + ai[row]; 5569 rmax = imax[row]; 5570 nrow = ailen[row]; 5571 low = 0; 5572 high = nrow; 5573 for (l = 0; l < n; l++) { /* loop over added columns */ 5574 if (in[l] < 0) continue; 5575 PetscCheck(in[l] < A->cmap->n, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Column too large"); 5576 col = in[l]; 5577 if (roworiented) value = v[l + k * n]; 5578 else value = v[k + l * m]; 5579 5580 if (value == 0.0 && ignorezeroentries && (is == ADD_VALUES)) continue; 5581 5582 if (col <= lastcol) low = 0; 5583 else high = nrow; 5584 lastcol = col; 5585 while (high - low > 5) { 5586 t = (low + high) / 2; 5587 if (rp[t] > col) high = t; 5588 else low = t; 5589 } 5590 for (i = low; i < high; i++) { 5591 if (rp[i] > col) break; 5592 if (rp[i] == col) { 5593 if (is == ADD_VALUES) ap[i] += value; 5594 else ap[i] = value; 5595 goto noinsert; 5596 } 5597 } 5598 if (value == 0.0 && ignorezeroentries) goto noinsert; 5599 if (nonew == 1) goto noinsert; 5600 PetscCheck(nonew != -1, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero in the matrix"); 5601 MatSeqXAIJReallocateAIJ(A, A->rmap->n, 1, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar); 5602 N = nrow++ - 1; 5603 a->nz++; 5604 high++; 5605 /* shift up all the later entries in this row */ 5606 for (ii = N; ii >= i; ii--) { 5607 rp[ii + 1] = rp[ii]; 5608 ap[ii + 1] = ap[ii]; 5609 } 5610 rp[i] = col; 5611 ap[i] = value; 5612 A->nonzerostate++; 5613 noinsert:; 5614 low = i + 1; 5615 } 5616 ailen[row] = nrow; 5617 } 5618 PetscFunctionReturnVoid(); 5619 } 5620 /* Undefining these here since they were redefined from their original definition above! No 5621 * other PETSc functions should be defined past this point, as it is impossible to recover the 5622 * original definitions */ 5623 #undef PetscCall 5624 #undef SETERRQ 5625