Home
last modified time | relevance | path

Searched refs:proc (Results 1 – 25 of 55) sorted by relevance

123

/petsc/src/vec/is/ao/tests/output/
H A Dex5_1.out23 proc = 0 : 0 -> 7
24 proc = 0 : -1 -> -1
25 proc = 0 : 1 -> 6
26 proc = 0 : 2 -> 5
27 proc = 0 : -1 -> -1
28 proc = 0 : 4 -> 3
29 proc = 0 : 5 -> 2
30 proc = 0 : 6 -> 1
31 proc = 1 : -1 -> -1
32 proc = 1 : 8 -> 8
[all …]
/petsc/src/sys/memory/
H A Dmem.c63 char proc[PETSC_MAX_PATH_LEN]; in PetscMemoryGetCurrentUsage() local
70 char proc[PETSC_MAX_PATH_LEN]; in PetscMemoryGetCurrentUsage()
79 PetscCall(PetscSNPrintf(proc, PETSC_STATIC_ARRAY_LENGTH(proc), "/proc/%d", getpid())); in PetscMemoryGetCurrentUsage()
80 …PetscCheck((fd = open(proc, O_RDONLY)) != -1, PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Unable to acc… in PetscMemoryGetCurrentUsage()
90 PetscCall(PetscSNPrintf(proc, PETSC_STATIC_ARRAY_LENGTH(proc), "/proc/%d/statm", getpid())); in PetscMemoryGetCurrentUsage()
91 …Check(file = fopen(proc, "r"), PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Unable to access system file… in PetscMemoryGetCurrentUsage()
92 …s) == 2, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to read two integers (mm and rss) from %s", proc); in PetscMemoryGetCurrentUsage()
/petsc/src/tao/constrained/impls/ipm/
H A Dpdipm.c282 PetscInt proc, nx_all, *nce_all = pdipm->nce_all; in TaoSNESJacobian_PDIPM() local
320 proc = 0; in TaoSNESJacobian_PDIPM()
322 while (aj[j] >= cranges[proc + 1]) proc++; in TaoSNESJacobian_PDIPM()
323 cols[0] = aj[j] - cranges[proc] + Jranges[proc]; in TaoSNESJacobian_PDIPM()
340 proc = 0; in TaoSNESJacobian_PDIPM()
342 while (aj[j] >= cranges[proc + 1]) proc++; in TaoSNESJacobian_PDIPM()
343 cols[0] = aj[j] - cranges[proc] + Jranges[proc]; in TaoSNESJacobian_PDIPM()
372 proc = 0; in TaoSNESJacobian_PDIPM()
374 while (aj[j] >= cranges[proc + 1]) proc++; in TaoSNESJacobian_PDIPM()
375 cols[0] = aj[j] - cranges[proc] + Jranges[proc]; in TaoSNESJacobian_PDIPM()
[all …]
/petsc/src/dm/impls/composite/
H A Dpackm.c90 PetscMPIInt proc; in DMCreateMatrix_Composite_AIJ() local
113 proc = 0; in DMCreateMatrix_Composite_AIJ()
114 while (cols[j] >= rstarts[proc + 1]) proc++; in DMCreateMatrix_Composite_AIJ()
115 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc]; in DMCreateMatrix_Composite_AIJ()
138 PetscMPIInt proc; in DMCreateMatrix_Composite_AIJ() local
159 proc = 0; in DMCreateMatrix_Composite_AIJ()
160 while (cols[j] >= rstarts[proc + 1]) proc++; in DMCreateMatrix_Composite_AIJ()
161 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc]; in DMCreateMatrix_Composite_AIJ()
/petsc/src/mat/impls/aij/mpi/
H A Dmpimatmatmult.c1274 PetscMPIInt tagi, tagj, *len_si, *len_s, *len_ri, nrecv, proc, nsend; in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable() local
1335 proc = 0; in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable()
1337 while (prmap[i] >= owners[proc + 1]) proc++; in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable()
1338 len_si[proc]++; /* num of rows in Co(=Pt*A) to be sent to [proc] */ in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable()
1339 len_s[proc] += coi[i + 1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */ in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable()
1345 for (proc = 0; proc < size; proc++) { in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable()
1346 owners_co[proc + 1] = owners_co[proc] + len_si[proc]; in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable()
1347 if (len_s[proc]) { in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable()
1349 len_si[proc] = 2 * (len_si[proc] + 1); /* length of buf_si to be sent to [proc] */ in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable()
1350 len += len_si[proc]; in MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable()
[all …]
H A Dmpiptap.c225 PetscMPIInt tagi, tagj, *len_si, *len_s, *len_ri, nrecv, nsend, proc; in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable() local
398 proc = 0; in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable()
401 while (prmap[i] >= owners[proc + 1]) proc++; in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable()
402 len_si[proc]++; /* num of rows in Co(=Pt*AP) to be sent to [proc] */ in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable()
403 len_s[proc] += coi[i + 1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */ in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable()
409 for (proc = 0; proc < size; proc++) { in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable()
410 owners_co[proc + 1] = owners_co[proc] + len_si[proc]; in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable()
411 if (len_s[proc]) { in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable()
413 len_si[proc] = 2 * (len_si[proc] + 1); /* length of buf_si to be sent to [proc] */ in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable()
414 len += len_si[proc]; in MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable()
[all …]
H A Dmpiov.c504 PetscMPIInt size, rank, tag1, tag2, proc = 0, nrqs, *pa; in MatIncreaseOverlap_MPIAIJ_Once() local
540 PetscCall(PetscLayoutFindOwner(C->rmap, row, &proc)); in MatIncreaseOverlap_MPIAIJ_Once()
541 w4[proc]++; in MatIncreaseOverlap_MPIAIJ_Once()
647 PetscCall(PetscLayoutFindOwner(C->rmap, row, &proc)); in MatIncreaseOverlap_MPIAIJ_Once()
648 if (proc != rank) { /* copy to the outgoing buffer */ in MatIncreaseOverlap_MPIAIJ_Once()
649 ctr[proc]++; in MatIncreaseOverlap_MPIAIJ_Once()
650 *ptr[proc] = row; in MatIncreaseOverlap_MPIAIJ_Once()
651 ptr[proc]++; in MatIncreaseOverlap_MPIAIJ_Once()
720 proc = recv_status[i].MPI_SOURCE; in MatIncreaseOverlap_MPIAIJ_Once()
721 PetscCheck(proc == onodes1[i], PETSC_COMM_SELF, PETSC_ERR_PLIB, "MPI_SOURCE mismatch"); in MatIncreaseOverlap_MPIAIJ_Once()
[all …]
H A Dmpiaij.c4654 PetscMPIInt proc, k; in MatCreateMPIAIJSumSeqAIJNumeric() local
4692 for (proc = 0, k = 0; proc < size; proc++) { in MatCreateMPIAIJSumSeqAIJNumeric()
4693 if (!len_s[proc]) continue; in MatCreateMPIAIJSumSeqAIJNumeric()
4694 i = owners[proc]; in MatCreateMPIAIJSumSeqAIJNumeric()
4695 … PetscCallMPI(MPIU_Isend(aa + ai[i], len_s[proc], MPIU_MATSCALAR, proc, taga, comm, s_waits + k)); in MatCreateMPIAIJSumSeqAIJNumeric()
4812 for (PetscMPIInt proc = 0; proc < size; proc++) { in MatCreateMPIAIJSumSeqAIJSymbolic() local
4813 len_si[proc] = 0; in MatCreateMPIAIJSumSeqAIJSymbolic()
4814 if (proc == rank) { in MatCreateMPIAIJSumSeqAIJSymbolic()
4815 len_s[proc] = 0; in MatCreateMPIAIJSumSeqAIJSymbolic()
4817 PetscCall(PetscMPIIntCast(owners[proc + 1] - owners[proc] + 1, &len_si[proc])); in MatCreateMPIAIJSumSeqAIJSymbolic()
[all …]
/petsc/src/dm/partitioner/impls/shell/
H A Dpartshell.c185 PetscInt proc, numPoints; in PetscPartitionerShellSetPartition() local
196 … for (proc = 0; proc < size; ++proc) PetscCall(PetscSectionSetDof(p->section, proc, sizes[proc])); in PetscPartitionerShellSetPartition()
/petsc/src/mat/impls/dense/mpi/
H A Dmmdense.c55 PetscMPIInt rank, size, tag0, tag1, idex, end, i, proc, nrqs, *rtable, *pa, nrqr; in MatCreateSubMatrices_MPIDense_Local() local
112 proc = rtable[row]; in MatCreateSubMatrices_MPIDense_Local()
113 w4[proc]++; in MatCreateSubMatrices_MPIDense_Local()
190 proc = rtable[row]; in MatCreateSubMatrices_MPIDense_Local()
191 if (proc != rank) { /* copy to the outgoing buf*/ in MatCreateSubMatrices_MPIDense_Local()
192 ctr[proc]++; in MatCreateSubMatrices_MPIDense_Local()
193 *ptr[proc] = row; in MatCreateSubMatrices_MPIDense_Local()
194 ptr[proc]++; in MatCreateSubMatrices_MPIDense_Local()
304 proc = rtable[row]; in MatCreateSubMatrices_MPIDense_Local()
305 if (proc == rank) { in MatCreateSubMatrices_MPIDense_Local()
/petsc/lib/petsc/bin/maint/petsclinter/petsclinter/
H A Dqueue_main.py166 proc = mp.current_process().name
167 print_prefix = proc + ' --'[:len('[ROOT]') - len(proc)]
/petsc/src/mat/impls/baij/mpi/
H A Dbaijov.c65 PetscMPIInt *onodes1, *olengths1, *onodes2, *olengths2, proc = -1; in MatIncreaseOverlap_MPIBAIJ_Once() local
97 PetscCall(PetscLayoutFindOwner(C->rmap, row * C->rmap->bs, &proc)); in MatIncreaseOverlap_MPIBAIJ_Once()
98 w4[proc]++; in MatIncreaseOverlap_MPIBAIJ_Once()
188 PetscCall(PetscLayoutFindOwner(C->rmap, row * C->rmap->bs, &proc)); in MatIncreaseOverlap_MPIBAIJ_Once()
189 if (proc != rank) { /* copy to the outgoing buffer */ in MatIncreaseOverlap_MPIBAIJ_Once()
190 ctr[proc]++; in MatIncreaseOverlap_MPIBAIJ_Once()
191 *ptr[proc] = row; in MatIncreaseOverlap_MPIBAIJ_Once()
192 ptr[proc]++; in MatIncreaseOverlap_MPIBAIJ_Once()
253 proc = onodes1[i]; in MatIncreaseOverlap_MPIBAIJ_Once()
254 PetscCall(PetscMPIIntCast(isz1[i], &rw1[proc])); in MatIncreaseOverlap_MPIBAIJ_Once()
[all …]
/petsc/src/mat/graphops/coarsen/impls/hem/
H A Dhem.c543 PetscMPIInt proc = lghost_pe[ix], idx = -1; in MatCoarsenApply_HEM_private() local
546 if (comm_procs[k] == proc) idx = k; in MatCoarsenApply_HEM_private()
547 if (idx == -1) comm_procs[ncomm_procs++] = proc; in MatCoarsenApply_HEM_private()
796 PetscMPIInt proc = lghost_pe[ghost1_idx]; in MatCoarsenApply_HEM_private() local
799 … PetscCall(PetscCDAppendID(ghost_deleted_list, proc, ghost1_idx)); /* cache to send messages */ in MatCoarsenApply_HEM_private()
800 PetscCall(PetscCDAppendID(ghost_deleted_list, proc, lid0)); in MatCoarsenApply_HEM_private()
828 const PetscMPIInt proc = comm_procs[proc_idx]; in MatCoarsenApply_HEM_private() local
833 PetscCall(PetscCDCountAt(ghost_deleted_list, proc, &ndel)); in MatCoarsenApply_HEM_private()
844 PetscCall(PetscCDGetHeadPos(ghost_deleted_list, proc, &pos)); in MatCoarsenApply_HEM_private()
850 PetscCall(PetscCDGetNextPos(ghost_deleted_list, proc, &pos)); in MatCoarsenApply_HEM_private()
[all …]
/petsc/src/ts/characteristic/interface/
H A Dcharacteristic.c388 Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y); in CharacteristicSolve()
406 if (c->neighbors[Qi.proc] == rank) { in CharacteristicSolve()
470 Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y); in CharacteristicSolve()
493 if (c->neighbors[c->queue[n].proc] == rank) { in CharacteristicSolve()
584 for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++; in CharacteristicSendCoordinatesBegin()
629 …rs[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc… in CharacteristicSendCoordinatesEnd()
666 … for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc)); in CharacteristicHeapSort()
679 … for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc)); in CharacteristicHeapSort()
696 else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2; in CharacteristicSiftDown()
699 if (queue[root].proc < queue[maxChild].proc) { in CharacteristicSiftDown()
/petsc/src/dm/impls/plex/
H A Dplexdistribute.c356 PetscMPIInt size, proc, rank; in DMPlexCreateTwoSidedProcessSF() local
394 for (proc = 0, numNeighbors = 0; proc < size; ++proc) { in DMPlexCreateTwoSidedProcessSF()
395 if (PetscBTLookup(neighbors, proc)) ++numNeighbors; in DMPlexCreateTwoSidedProcessSF()
400 for (proc = 0, n = 0; proc < size; ++proc) { in DMPlexCreateTwoSidedProcessSF()
401 if (PetscBTLookup(neighbors, proc)) { in DMPlexCreateTwoSidedProcessSF()
402 ranksNew[n] = proc; in DMPlexCreateTwoSidedProcessSF()
403 localPointsNew[n] = proc; in DMPlexCreateTwoSidedProcessSF()
405 remotePointsNew[n].rank = proc; in DMPlexCreateTwoSidedProcessSF()
1880 PetscInt pStart, pEnd, proc, npoints, poff = 0, nranks; in DMPlexDistribute() local
1886 for (proc = pStart; proc < pEnd; proc++) { in DMPlexDistribute()
[all …]
H A Dplexorient.c622 PetscInt proc, nproc, seen, flippedA, flippedB, mismatch, numNeighbors, n; in DMPlexOrient() local
624 proc = procFIFO[pTop++]; in DMPlexOrient()
625 flippedA = PetscBTLookup(flippedProcs, proc) ? 1 : 0; in DMPlexOrient()
626 PetscCall(MatGetRow(G, proc, &numNeighbors, &neighbors, &ornt)); in DMPlexOrient()
635 … %" PetscInt_FMT " and %" PetscInt_FMT " do not match: Fault mesh is non-orientable", proc, nproc); in DMPlexOrient()
999 PetscInt proc, nproc, seen, flippedA, flippedB, mismatch, numNeighbors; in DMPlexOrientCells_Internal() local
1001 proc = procFIFO[pTop++]; in DMPlexOrientCells_Internal()
1002 flippedA = PetscBTLookup(flippedProcs, proc) ? 1 : 0; in DMPlexOrientCells_Internal()
1003 PetscCall(MatGetRow(G, proc, &numNeighbors, &neighbors, &ornt)); in DMPlexOrientCells_Internal()
1012 … %" PetscInt_FMT " and %" PetscInt_FMT " do not match: Fault mesh is non-orientable", proc, nproc); in DMPlexOrientCells_Internal()
/petsc/include/petsc/private/
H A Dcharacteristicimpl.h24 …PetscMPIInt proc; /* Relative processor from which data is required (mapped to ab… member
/petsc/src/ksp/ksp/tutorials/output/
H A Dex71_bddc_elast_4lev.out42 Coarse eqs per proc (significant at the coarsest level): 1
155 Coarse eqs per proc (significant at the coarsest level): 1
274 Coarse eqs per proc (significant at the coarsest level): 1
H A Dex59_bddc_fetidp_ml_2.out42 Coarse eqs per proc (significant at the coarsest level): 1
153 Coarse eqs per proc (significant at the coarsest level): 1
H A Dex59_bddc_fetidp_ml_1.out42 Coarse eqs per proc (significant at the coarsest level): 1
153 Coarse eqs per proc (significant at the coarsest level): 1
H A Dex71_bddc_elast_3lev.out42 Coarse eqs per proc (significant at the coarsest level): 1
155 Coarse eqs per proc (significant at the coarsest level): 1
H A Dex71_bddc_elast_3lev_alt.out42 Coarse eqs per proc (significant at the coarsest level): 1
155 Coarse eqs per proc (significant at the coarsest level): 1
H A Dex71_bddc_elast_deluxe_layers.out42 Coarse eqs per proc (significant at the coarsest level): 1
/petsc/src/snes/tutorials/output/
H A Dex12_p4est_full_q2_nonconformal_parallel_bddcfas.out73 Coarse eqs per proc (significant at the coarsest level): 1
248 Coarse eqs per proc (significant at the coarsest level): 1
424 Coarse eqs per proc (significant at the coarsest level): 1
H A Dex12_p4est_full_q2_nonconformal_parallel_bddcfas_alt.out73 Coarse eqs per proc (significant at the coarsest level): 1
248 Coarse eqs per proc (significant at the coarsest level): 1
424 Coarse eqs per proc (significant at the coarsest level): 1

123