1 static const char help[] = "Test freeing of MPI types in PetscSF\n\n"; 2 3 #include <petscvec.h> 4 #include <petscsf.h> 5 #include <petscviewer.h> 6 7 int main(int argc, char **argv) 8 { 9 PetscSF sf; 10 Vec A, Aout; 11 PetscScalar *bufA; 12 PetscScalar *bufAout; 13 PetscMPIInt rank, size; 14 PetscInt nroots, nleaves; 15 PetscInt i; 16 PetscInt *ilocal; 17 PetscSFNode *iremote; 18 PetscBool test_dupped_type; 19 MPI_Datatype contig; 20 21 PetscFunctionBeginUser; 22 PetscCall(PetscInitialize(&argc, &argv, NULL, help)); 23 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 24 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); 25 26 PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_USER, "Only coded for one MPI process"); 27 28 PetscOptionsBegin(PETSC_COMM_WORLD, "", "PetscSF type freeing options", "none"); 29 test_dupped_type = PETSC_FALSE; 30 PetscCall(PetscOptionsBool("-test_dupped_type", "Test dupped input type", "", test_dupped_type, &test_dupped_type, NULL)); 31 PetscOptionsEnd(); 32 33 PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sf)); 34 PetscCall(PetscSFSetFromOptions(sf)); 35 36 nleaves = 1; 37 nroots = 1; 38 PetscCall(PetscMalloc1(nleaves, &ilocal)); 39 40 for (i = 0; i < nleaves; i++) ilocal[i] = i; 41 42 PetscCall(PetscMalloc1(nleaves, &iremote)); 43 iremote[0].rank = 0; 44 iremote[0].index = 0; 45 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 46 PetscCall(PetscSFSetUp(sf)); 47 PetscCall(PetscSFView(sf, PETSC_VIEWER_STDOUT_WORLD)); 48 PetscCall(VecCreate(PETSC_COMM_WORLD, &A)); 49 PetscCall(VecSetSizes(A, 4, PETSC_DETERMINE)); 50 PetscCall(VecSetFromOptions(A)); 51 PetscCall(VecSetUp(A)); 52 53 PetscCall(VecDuplicate(A, &Aout)); 54 PetscCall(VecGetArray(A, &bufA)); 55 for (i = 0; i < 4; i++) bufA[i] = (PetscScalar)i; 56 PetscCall(VecRestoreArray(A, &bufA)); 57 58 PetscCall(VecGetArrayRead(A, (const PetscScalar **)&bufA)); 59 PetscCall(VecGetArray(Aout, &bufAout)); 60 61 PetscCallMPI(MPI_Type_contiguous(4, MPIU_SCALAR, &contig)); 62 PetscCallMPI(MPI_Type_commit(&contig)); 63 64 if (test_dupped_type) { 65 MPI_Datatype tmp; 66 PetscCallMPI(MPI_Type_dup(contig, &tmp)); 67 PetscCallMPI(MPI_Type_free(&contig)); 68 contig = tmp; 69 } 70 for (i = 0; i < 10000; i++) { 71 PetscCall(PetscSFBcastBegin(sf, contig, bufA, bufAout, MPI_REPLACE)); 72 PetscCall(PetscSFBcastEnd(sf, contig, bufA, bufAout, MPI_REPLACE)); 73 } 74 PetscCall(VecRestoreArrayRead(A, (const PetscScalar **)&bufA)); 75 PetscCall(VecRestoreArray(Aout, &bufAout)); 76 77 PetscCall(VecView(Aout, PETSC_VIEWER_STDOUT_WORLD)); 78 PetscCall(VecDestroy(&A)); 79 PetscCall(VecDestroy(&Aout)); 80 PetscCall(PetscSFDestroy(&sf)); 81 PetscCallMPI(MPI_Type_free(&contig)); 82 PetscCall(PetscFinalize()); 83 return 0; 84 } 85 86 /*TEST 87 88 test: 89 suffix: basic 90 args: -sf_type basic 91 92 test: 93 suffix: basic_dupped 94 args: -test_dupped_type -sf_type basic 95 96 test: 97 suffix: window 98 filter: grep -v "type" | grep -v "sort" 99 args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate dynamic}} 100 requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW) 101 102 test: 103 suffix: window_dupped 104 filter: grep -v "type" | grep -v "sort" 105 args: -test_dupped_type -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate dynamic}} 106 requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW) 107 108 test: 109 suffix: window_shared 110 output_file: output/ex3_window.out 111 filter: grep -v "type" | grep -v "sort" 112 args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared 113 requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI_NUMVERSION) 114 115 test: 116 suffix: window_dupped_shared 117 output_file: output/ex3_window_dupped.out 118 filter: grep -v "type" | grep -v "sort" 119 args: -test_dupped_type -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared 120 requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI_NUMVERSION) 121 122 TEST*/ 123