Home
last modified time | relevance | path

Searched refs:MPIEXEC (Results 1 – 25 of 25) sorted by relevance

/petsc/src/benchmarks/
H A Dmakefile63 -@${MPIEXEC} -n 1 ./PetscTime
64 -@${MPIEXEC} -n 1 ./PetscGetTime
65 -@${MPIEXEC} -n 1 ./PetscGetCPUTime
66 -@${MPIEXEC} -n 1 ./MPI_Wtime
70 -@${MPIEXEC} -n 1 ./PLogEvent > /dev/null
71 -@${MPIEXEC} -n 1 ./PLogEvent -log_view > /dev/null
72 -@${MPIEXEC} -n 1 ./PLogEvent -log_mpe > /dev/null
76 -@${MPIEXEC} -n 1 ./PetscMalloc
77 -@${MPIEXEC} -n 1 ./PetscMalloc -malloc
81 -@${MPIEXEC} -n 1 ./PetscMemcpy
[all …]
/petsc/src/snes/tutorials/
H A Dmakefile23 -${QUIET}${MPIEXEC} -n 1 ${MPIEXEC_TAIL} ./ex5f -snes_rtol 1e-4 > ex5f.tmp 2>&1;
35 …-${QUIET}${MPIEXEC} -n 1 ${MPIEXEC_TAIL} ./ex19 -da_refine 3 -pc_type mg -ksp_type fgmres > ex19.…
47 …-${QUIET}${MPIEXEC} -n 2 ${MPIEXEC_TAIL} ./ex19 -da_refine 3 -pc_type mg -ksp_type fgmres -snes_ty…
60 …-${QUIET}${MPIEXEC} -n 2 ${MPIEXEC_TAIL} ./ex19 -da_refine 3 -snes_monitor_short -ksp_norm_type un…
71 …CUDA_VISIBLE_DEVICES=$$USE_DEVICE ${MPIEXEC} -n 2 ${MPIEXEC_TAIL} ./ex19 -petsc_ci -dm_vec_type cu…
82 …HIP_VISIBLE_DEVICES=$$USE_DEVICE ${MPIEXEC} -n 2 ${MPIEXEC_TAIL} ./ex19 -petsc_ci -dm_vec_type hip…
92 …-${QUIET}${MPIEXEC} -n 1 ${MPIEXEC_TAIL} ./ex19 -snes_monitor -dm_mat_type seqaijcusparse -dm_vec_…
102 …-${QUIET}${MPIEXEC} -n 1 ${MPIEXEC_TAIL} ./ex19 -snes_monitor -dm_mat_type mpiaijhipsparse -dm_vec…
112 …-${QUIET}${MPIEXEC} -n 2 ${MPIEXEC_TAIL} ./ex19 -da_refine 3 -snes_monitor_short -pc_type ml > ex1…
122 …-${QUIET}${MPIEXEC} -n 2 ${MPIEXEC_TAIL} ./ex19 -pc_type fieldsplit -pc_fieldsplit_block_size 4 -p…
[all …]
/petsc/src/benchmarks/streams/
H A Dmakefile45 …-@printf "Running streams with '${MPIEXEC} ${MPI_BINDING} ${MPI_BINDING_VIEW} -n <np> ./MPIVersion…
49 ${MPIEXEC} ${MPI_BINDING} ${MPI_BINDING_VIEW} -n $${i} ./MPIVersion | tee -a scaling.log; \
58 -@printf "Running streams with '${MPIEXEC} ${MPI_BINDING}' using 'NPMAX=${NPMAX}'\n"
60 ${MPIEXEC} ${MPI_BINDING} -n 1 -c$${i} -a$${i} -g1 ./CUDAVersion | tee -a scaling.log; \
64 ${MPIEXEC} ${MPI_BINDING} -n $${n} -c$${c} -a$${c} -g1 ./CUDAVersion | tee -a scaling.log; \
89 …-@if [ "${LSTOPO}foo" != "foo" ]; then ${MPIEXEC} ${MPI_BINDING} -n 1 ${LSTOPO} --no-icaches --no-…
/petsc/src/binding/petsc4py/demo/legacy/kspsolve/
H A Dmakefile3 MPIEXEC= macro
18 ${MPIEXEC} ${PYTHON} ${SCRIPT1}.py
23 ${MPIEXEC} ${PYTHON} ${SCRIPT2}.py
/petsc/src/binding/petsc4py/demo/legacy/bratu3d/
H A Dmakefile3 MPIEXEC= macro
17 ${MPIEXEC} ${PYTHON} bratu3d.py -fd -nx 7 -ny 8 -nz 9
21 ${MPIEXEC} ${PYTHON} bratu3d.py -mf -nx 9 -ny 8 -nz 7
/petsc/src/binding/petsc4py/demo/legacy/perftest/
H A Dmakefile3 MPIEXEC= macro
19 ${MPIEXEC} ${PYTHON} ${SCRIPT}.py
24 ${MPIEXEC} ./${EXECUTABLE}.exe
/petsc/src/binding/petsc4py/demo/legacy/bratu2d/
H A Dmakefile3 MPIEXEC= macro
23 ${MPIEXEC} ${PYTHON} bratu2d.py -impl python
33 ${MPIEXEC} ${PYTHON} bratu2d.py -impl fortran
/petsc/src/binding/petsc4py/demo/legacy/poisson3d/
H A Dmakefile3 MPIEXEC= macro
24 ${MPIEXEC} ${PYTHON} ${SCRIPT}.py
32 ${MPIEXEC} ./${EXECUTABLE}.exe
/petsc/src/binding/petsc4py/demo/legacy/wrap-cython/
H A Dmakefile3 MPIEXEC= macro
20 ${MPIEXEC} ${PYTHON} ${SCRIPT}.py
35 MPIEXEC= macro
/petsc/src/binding/petsc4py/demo/legacy/wrap-swig/
H A Dmakefile3 MPIEXEC= macro
20 ${MPIEXEC} ${PYTHON} ${SCRIPT}.py
35 MPIEXEC= macro
/petsc/src/binding/petsc4py/demo/legacy/petsc-examples/ksp/
H A Dmakefile3 MPIEXEC=mpiexec -n 2 macro
15 ${MPIEXEC} ${PYTHON} $@.py
/petsc/src/binding/petsc4py/demo/legacy/poisson2d/
H A Dmakefile3 MPIEXEC= macro
14 ${MPIEXEC} ${PYTHON} poisson2d.py -nx 15 -ny 16
/petsc/src/binding/petsc4py/demo/legacy/binary-io/
H A Dmakefile3 MPIEXEC= macro
14 ${MPIEXEC} ${PYTHON} matvecio.py
/petsc/src/binding/petsc4py/demo/legacy/wrap-f2py/
H A Dmakefile3 MPIEXEC= macro
38 MPIEXEC= macro
/petsc/src/sys/tests/
H A Dmakefile12 ${MPIEXEC} -n 2 ${MPIEXEC_TAIL} ${PYTHON_EXE} ./ex55.py > ex55.tmp 2>&1;
/petsc/src/vec/vec/tests/
H A Dmakefile10 …-${QUIET}H5OUT=`mktemp -t petsc.h5.XXXXXX`; ${MPIEXEC} -n 1 ${MPIEXEC_TAIL} ./ex47 -filename $${H5…
/petsc/src/vec/vec/tutorials/
H A Dmakefile14 -${QUIET}${MPIEXEC} -n 1 ${MPIEXEC_TAIL} ./ex31 > ex31.tmp 2>&1;
/petsc/src/ksp/ksp/tutorials/
H A Dmakefile13 …-${QUIET}OMPI_MCA_mpi_warn_on_fork=0 ${MPIEXEC} -n 1 ${MPIEXEC_TAIL} ./ex100 -test > ex100.tmp 2>…
/petsc/src/dm/impls/swarm/tests/
H A Dmakefile12 …-${MPIEXEC} -n ${NP} ${MPIEXEC_TAIL} ./ex2 -dim 2 -pc_type lu -faces ${N} -particles_cell ${P} -pe…
/petsc/src/ksp/ksp/tutorials/amrex/
H A Dmakefile22 …-${QUIET}${MPIEXEC} -n 1 ${MPIEXEC_TAIL} ./amrex inputs.rt.2d.petsc | grep -E -v "(AMReX|Timers)" …
/petsc/src/binding/petsc4py/
H A Dmakefile10 MPIEXEC = mpiexec macro
22 ${MPIEXEC} -n $* ${VALGRIND} ${PYTHON} ${PWD}/test/runtests.py ${opt}
/petsc/
H A Dmakefile130 elif [ "${MPIEXEC}" = "/bin/false" ]; then\
255 …ALLTESTS_MAKEFILE} PETSC_ARCH=${PETSC_ARCH} PETSC_DIR=${PETSC_DIR} MPIEXEC="${MPIEXEC}" DATAFILESP…
H A Dgmakefile359 -@echo "Using mpiexec: ${MPIEXEC}"
H A Dgmakefile.test262 -$(call macos-firewall-register, $(MPIEXEC))
/petsc/lib/petsc/conf/
H A Drules_util.mk19 -@echo ${MPIEXEC}