Home
last modified time | relevance | path

Searched refs:reductions (Results 1 – 24 of 24) sorted by relevance

/petsc/lib/petsc/bin/
H A Dpetsclogformat.py222 reductions = 0
224 reductions += localReductions[t]
225 return time,flops,messages,messagelens,reductions
/petsc/src/mat/utils/
H A Dgetcolv.c297 PetscErrorCode MatGetColumnReductions(Mat A, PetscInt type, PetscReal reductions[]) in MatGetColumnReductions() argument
301 PetscUseTypeMethod(A, getcolumnreductions, type, reductions); in MatGetColumnReductions()
/petsc/doc/changes/
H A D212.md17 - Support for global reductions on local functions
H A D216.md32 fewer reductions
H A D2028.md251 - Added PetscSum_Op to replace MPI_SUM for reductions with
H A D33.md103 reduction per iteration instead of two blocking reductions.
H A D316.md128 - Add `MatGetColumnReductions()` developer routine to calculate reductions over columns of a matrix
H A D32.md191 solves, intended for use when reductions are expensive such as
/petsc/src/snes/tutorials/output/
H A Dex19_tut_6.out37 Reduct: number of global reductions
42 %R - percent reductions in this phase
H A Dex19_tut_4.out37 Reduct: number of global reductions
42 %R - percent reductions in this phase
H A Dex19_tut_5.out37 Reduct: number of global reductions
42 %R - percent reductions in this phase
H A Dex19_tut_7.out37 Reduct: number of global reductions
42 %R - percent reductions in this phase
/petsc/src/mat/impls/dense/mpi/
H A Dmpidense.c1172 static PetscErrorCode MatGetColumnReductions_MPIDense(Mat A, PetscInt type, PetscReal *reductions) in MatGetColumnReductions_MPIDense() argument
1180 PetscCall(MatGetColumnReductions_SeqDense(a->A, (PetscInt)REDUCTION_SUM_REALPART, reductions)); in MatGetColumnReductions_MPIDense()
1182 …etscCall(MatGetColumnReductions_SeqDense(a->A, (PetscInt)REDUCTION_SUM_IMAGINARYPART, reductions)); in MatGetColumnReductions_MPIDense()
1184 PetscCall(MatGetColumnReductions_SeqDense(a->A, type, reductions)); in MatGetColumnReductions_MPIDense()
1187 for (i = 0; i < n; i++) reductions[i] *= reductions[i]; in MatGetColumnReductions_MPIDense()
1190 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, reductions, n, MPIU_REAL, MPIU_MAX, A->hdr.comm)); in MatGetColumnReductions_MPIDense()
1192 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, reductions, n, MPIU_REAL, MPIU_SUM, A->hdr.comm)); in MatGetColumnReductions_MPIDense()
1195 for (i = 0; i < n; i++) reductions[i] = PetscSqrtReal(reductions[i]); in MatGetColumnReductions_MPIDense()
1197 for (i = 0; i < n; i++) reductions[i] /= m; in MatGetColumnReductions_MPIDense()
/petsc/src/mat/impls/baij/seq/
H A Dbaij.c34 static PetscErrorCode MatGetColumnReductions_SeqBAIJ(Mat A, PetscInt type, PetscReal *reductions) in MatGetColumnReductions_SeqBAIJ() argument
42 PetscCall(PetscArrayzero(reductions, n)); in MatGetColumnReductions_SeqBAIJ()
47 reductions[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val); in MatGetColumnReductions_SeqBAIJ()
56 reductions[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val); in MatGetColumnReductions_SeqBAIJ()
66 reductions[col] = PetscMax(PetscAbsScalar(*a_val), reductions[col]); in MatGetColumnReductions_SeqBAIJ()
75 reductions[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscRealPart(*a_val); in MatGetColumnReductions_SeqBAIJ()
84 reductions[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscImaginaryPart(*a_val); in MatGetColumnReductions_SeqBAIJ()
91 for (PetscInt i = 0; i < n; i++) reductions[i] = PetscSqrtReal(reductions[i]); in MatGetColumnReductions_SeqBAIJ()
93 for (PetscInt i = 0; i < n; i++) reductions[i] /= m; in MatGetColumnReductions_SeqBAIJ()
/petsc/src/mat/impls/dense/seq/
H A Ddense.c2990 …_INTERN PetscErrorCode MatGetColumnReductions_SeqDense(Mat A, PetscInt type, PetscReal *reductions) in MatGetColumnReductions_SeqDense() argument
2997 PetscCall(PetscArrayzero(reductions, n)); in MatGetColumnReductions_SeqDense()
3001 for (j = 0; j < m; j++) reductions[i] += PetscAbsScalar(a[j] * a[j]); in MatGetColumnReductions_SeqDense()
3006 for (j = 0; j < m; j++) reductions[i] += PetscAbsScalar(a[j]); in MatGetColumnReductions_SeqDense()
3011 for (j = 0; j < m; j++) reductions[i] = PetscMax(PetscAbsScalar(a[j]), reductions[i]); in MatGetColumnReductions_SeqDense()
3016 for (j = 0; j < m; j++) reductions[i] += PetscRealPart(a[j]); in MatGetColumnReductions_SeqDense()
3021 for (j = 0; j < m; j++) reductions[i] += PetscImaginaryPart(a[j]); in MatGetColumnReductions_SeqDense()
3027 for (i = 0; i < n; i++) reductions[i] = PetscSqrtReal(reductions[i]); in MatGetColumnReductions_SeqDense()
3029 for (i = 0; i < n; i++) reductions[i] /= m; in MatGetColumnReductions_SeqDense()
/petsc/doc/manual/
H A Dprofiling.md230 length, and the number of global reductions.
247 Reduct: number of global reductions
252 %R - percent reductions in this phase
301 length (`%L`), and reductions (`%R`)) for each event relative to the
H A Dksp.md639 Standard Krylov methods have one or more global reductions resulting from the computations of inner…
640 These reductions need to block until all MPI processes have received the results. For a large numbe…
643 thus effectively "hiding" the time of the reductions. In addition, they may reduce the number of gl…
647 Special configuration of MPI may be necessary for reductions to make asynchronous progress, which i…
/petsc/doc/overview/
H A Dlinear_solve_table.md588 * - Flexible stabilized Bi-Conjugate Gradients with fewer reductions
/petsc/src/mat/impls/aij/seq/
H A Daij.c34 static PetscErrorCode MatGetColumnReductions_SeqAIJ(Mat A, PetscInt type, PetscReal *reductions) in MatGetColumnReductions_SeqAIJ() argument
41 PetscCall(PetscArrayzero(reductions, n)); in MatGetColumnReductions_SeqAIJ()
43 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscAbsScalar(aij->a[i] * aij->a[i]); in MatGetColumnReductions_SeqAIJ()
45 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscAbsScalar(aij->a[i]); in MatGetColumnReductions_SeqAIJ()
47 …for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] = PetscMax(PetscAbsScalar(aij->a[i]), reduct… in MatGetColumnReductions_SeqAIJ()
49 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscRealPart(aij->a[i]); in MatGetColumnReductions_SeqAIJ()
51 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscImaginaryPart(aij->a[i]); in MatGetColumnReductions_SeqAIJ()
55 for (i = 0; i < n; i++) reductions[i] = PetscSqrtReal(reductions[i]); in MatGetColumnReductions_SeqAIJ()
57 for (i = 0; i < n; i++) reductions[i] /= m; in MatGetColumnReductions_SeqAIJ()
/petsc/src/mat/impls/baij/mpi/
H A Dmpibaij.c2235 static PetscErrorCode MatGetColumnReductions_MPIBAIJ(Mat A, PetscInt type, PetscReal *reductions) in MatGetColumnReductions_MPIBAIJ() argument
2338 …PetscCallMPI(MPIU_Allreduce(work, reductions, N, MPIU_REAL, MPIU_MAX, PetscObjectComm((PetscObject… in MatGetColumnReductions_MPIBAIJ()
2340 …PetscCallMPI(MPIU_Allreduce(work, reductions, N, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject… in MatGetColumnReductions_MPIBAIJ()
2344 for (i = 0; i < N; i++) reductions[i] = PetscSqrtReal(reductions[i]); in MatGetColumnReductions_MPIBAIJ()
2346 for (i = 0; i < N; i++) reductions[i] /= m; in MatGetColumnReductions_MPIBAIJ()
/petsc/src/mat/impls/aij/mpi/
H A Dmpiaij.c301 static PetscErrorCode MatGetColumnReductions_MPIAIJ(Mat A, PetscInt type, PetscReal *reductions) in MatGetColumnReductions_MPIAIJ() argument
334 …PetscCallMPI(MPIU_Allreduce(work, reductions, n, MPIU_REAL, MPIU_MAX, PetscObjectComm((PetscObject… in MatGetColumnReductions_MPIAIJ()
336 …PetscCallMPI(MPIU_Allreduce(work, reductions, n, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject… in MatGetColumnReductions_MPIAIJ()
340 for (i = 0; i < n; i++) reductions[i] = PetscSqrtReal(reductions[i]); in MatGetColumnReductions_MPIAIJ()
342 for (i = 0; i < n; i++) reductions[i] /= m; in MatGetColumnReductions_MPIAIJ()
/petsc/doc/faq/
H A Dindex.md1408 require special MPI configuration to effectively overlap reductions with computation. In
/petsc/share/petsc/datafiles/meshes/
H A Dtestcase3D.cas2021 (dpm/multicomponent-maximum-step-reductions 500)
/petsc/doc/
H A Dpetsc.bib30477 title = {Toward a more comprehensive cost measure for {CO2}-reductions},