1 static char help[]= "Test VecScatterCreateToZero, VecScatterCreateToAll\n\n"; 2 3 #include <petscvec.h> 4 int main(int argc,char **argv) 5 { 6 PetscInt i,N=10,low,high; 7 PetscMPIInt size,rank; 8 Vec x,y; 9 VecScatter vscat; 10 11 PetscFunctionBeginUser; 12 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 13 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 14 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 15 16 PetscCall(VecCreate(PETSC_COMM_WORLD,&x)); 17 PetscCall(VecSetFromOptions(x)); 18 PetscCall(VecSetSizes(x,PETSC_DECIDE,N)); 19 PetscCall(VecGetOwnershipRange(x,&low,&high)); 20 PetscCall(PetscObjectSetName((PetscObject)x,"x")); 21 22 /*-------------------------------------*/ 23 /* VecScatterCreateToZero */ 24 /*-------------------------------------*/ 25 26 /* MPI vec x = [0, 1, 2, .., N-1] */ 27 for (i=low; i<high; i++) PetscCall(VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES)); 28 PetscCall(VecAssemblyBegin(x)); 29 PetscCall(VecAssemblyEnd(x)); 30 31 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nTesting VecScatterCreateToZero\n")); 32 PetscCall(VecScatterCreateToZero(x,&vscat,&y)); 33 PetscCall(PetscObjectSetName((PetscObject)y,"y")); 34 35 /* Test PetscSFBcastAndOp with op = MPI_REPLACE, which does y = x on rank 0 */ 36 PetscCall(VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD)); 37 PetscCall(VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD)); 38 if (rank == 0) PetscCall(VecView(y,PETSC_VIEWER_STDOUT_SELF)); 39 40 /* Test PetscSFBcastAndOp with op = MPI_SUM, which does y += x */ 41 PetscCall(VecScatterBegin(vscat,x,y,ADD_VALUES,SCATTER_FORWARD)); 42 PetscCall(VecScatterEnd(vscat,x,y,ADD_VALUES,SCATTER_FORWARD)); 43 if (rank == 0) PetscCall(VecView(y,PETSC_VIEWER_STDOUT_SELF)); 44 45 /* Test PetscSFReduce with op = MPI_REPLACE, which does x = y */ 46 PetscCall(VecScatterBegin(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE)); 47 PetscCall(VecScatterEnd(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE)); 48 PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD)); 49 50 /* Test PetscSFReduce with op = MPI_SUM, which does x += y on x's local part on rank 0*/ 51 PetscCall(VecScatterBegin(vscat,y,x,ADD_VALUES,SCATTER_REVERSE_LOCAL)); 52 PetscCall(VecScatterEnd(vscat,y,x,ADD_VALUES,SCATTER_REVERSE_LOCAL)); 53 PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD)); 54 55 PetscCall(VecDestroy(&y)); 56 PetscCall(VecScatterDestroy(&vscat)); 57 58 /*-------------------------------------*/ 59 /* VecScatterCreateToAll */ 60 /*-------------------------------------*/ 61 for (i=low; i<high; i++) PetscCall(VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES)); 62 PetscCall(VecAssemblyBegin(x)); 63 PetscCall(VecAssemblyEnd(x)); 64 65 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nTesting VecScatterCreateToAll\n")); 66 67 PetscCall(VecScatterCreateToAll(x,&vscat,&y)); 68 PetscCall(PetscObjectSetName((PetscObject)y,"y")); 69 70 /* Test PetscSFBcastAndOp with op = MPI_REPLACE, which does y = x on all ranks */ 71 PetscCall(VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD)); 72 PetscCall(VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD)); 73 if (rank == 0) PetscCall(VecView(y,PETSC_VIEWER_STDOUT_SELF)); 74 75 /* Test PetscSFBcastAndOp with op = MPI_SUM, which does y += x */ 76 PetscCall(VecScatterBegin(vscat,x,y,ADD_VALUES,SCATTER_FORWARD)); 77 PetscCall(VecScatterEnd(vscat,x,y,ADD_VALUES,SCATTER_FORWARD)); 78 if (rank == 0) PetscCall(VecView(y,PETSC_VIEWER_STDOUT_SELF)); 79 80 /* Test PetscSFReduce with op = MPI_REPLACE, which does x = y */ 81 PetscCall(VecScatterBegin(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE)); 82 PetscCall(VecScatterEnd(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE)); 83 PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD)); 84 85 /* Test PetscSFReduce with op = MPI_SUM, which does x += size*y */ 86 PetscCall(VecScatterBegin(vscat,y,x,ADD_VALUES,SCATTER_REVERSE)); 87 PetscCall(VecScatterEnd(vscat,y,x,ADD_VALUES,SCATTER_REVERSE)); 88 PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD)); 89 PetscCall(VecDestroy(&x)); 90 PetscCall(VecDestroy(&y)); 91 PetscCall(VecScatterDestroy(&vscat)); 92 93 PetscCall(PetscFinalize()); 94 return 0; 95 } 96 97 /*TEST 98 99 testset: 100 # N=10 is divisible by nsize, to trigger Allgather/Gather in SF 101 nsize: 2 102 # Exact numbers really matter here 103 diff_args: -j 104 filter: grep -v "type" 105 output_file: output/ex8_1.out 106 107 test: 108 suffix: 1_standard 109 110 test: 111 suffix: 1_cuda 112 # sf_backend cuda is not needed if compiling only with cuda 113 args: -vec_type cuda -sf_backend cuda 114 requires: cuda 115 116 test: 117 suffix: 1_hip 118 args: -vec_type hip -sf_backend hip 119 requires: hip 120 121 test: 122 suffix: 1_cuda_aware_mpi 123 # sf_backend cuda is not needed if compiling only with cuda 124 args: -vec_type cuda -sf_backend cuda 125 requires: cuda defined(PETSC_HAVE_MPI_GPU_AWARE) 126 127 testset: 128 # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF 129 nsize: 3 130 # Exact numbers really matter here 131 diff_args: -j 132 filter: grep -v "type" 133 output_file: output/ex8_2.out 134 135 test: 136 suffix: 2_standard 137 138 test: 139 suffix: 2_cuda 140 # sf_backend cuda is not needed if compiling only with cuda 141 args: -vec_type cuda -sf_backend cuda 142 requires: cuda 143 144 test: 145 suffix: 2_hip 146 # sf_backend hip is not needed if compiling only with hip 147 args: -vec_type hip -sf_backend hip 148 requires: hip 149 150 test: 151 suffix: 2_cuda_aware_mpi 152 args: -vec_type cuda 153 requires: cuda defined(PETSC_HAVE_MPI_GPU_AWARE) 154 155 TEST*/ 156