1 static const char help[] = "Test freeing of MPI types in PetscSF\n\n"; 2 3 #include <petscvec.h> 4 #include <petscsf.h> 5 #include <petscviewer.h> 6 7 int main(int argc, char **argv) { 8 PetscSF sf; 9 Vec A, Aout; 10 PetscScalar *bufA; 11 PetscScalar *bufAout; 12 PetscMPIInt rank, size; 13 PetscInt nroots, nleaves; 14 PetscInt i; 15 PetscInt *ilocal; 16 PetscSFNode *iremote; 17 PetscBool test_dupped_type; 18 MPI_Datatype contig; 19 20 PetscFunctionBeginUser; 21 PetscCall(PetscInitialize(&argc, &argv, NULL, help)); 22 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 23 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); 24 25 PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_USER, "Only coded for one MPI process"); 26 27 PetscOptionsBegin(PETSC_COMM_WORLD, "", "PetscSF type freeing options", "none"); 28 test_dupped_type = PETSC_FALSE; 29 PetscCall(PetscOptionsBool("-test_dupped_type", "Test dupped input type", "", test_dupped_type, &test_dupped_type, NULL)); 30 PetscOptionsEnd(); 31 32 PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sf)); 33 PetscCall(PetscSFSetFromOptions(sf)); 34 35 nleaves = 1; 36 nroots = 1; 37 PetscCall(PetscMalloc1(nleaves, &ilocal)); 38 39 for (i = 0; i < nleaves; i++) { ilocal[i] = i; } 40 41 PetscCall(PetscMalloc1(nleaves, &iremote)); 42 iremote[0].rank = 0; 43 iremote[0].index = 0; 44 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 45 PetscCall(PetscSFSetUp(sf)); 46 PetscCall(PetscSFView(sf, PETSC_VIEWER_STDOUT_WORLD)); 47 PetscCall(VecCreate(PETSC_COMM_WORLD, &A)); 48 PetscCall(VecSetSizes(A, 4, PETSC_DETERMINE)); 49 PetscCall(VecSetFromOptions(A)); 50 PetscCall(VecSetUp(A)); 51 52 PetscCall(VecDuplicate(A, &Aout)); 53 PetscCall(VecGetArray(A, &bufA)); 54 for (i = 0; i < 4; i++) { bufA[i] = (PetscScalar)i; } 55 PetscCall(VecRestoreArray(A, &bufA)); 56 57 PetscCall(VecGetArrayRead(A, (const PetscScalar **)&bufA)); 58 PetscCall(VecGetArray(Aout, &bufAout)); 59 60 PetscCallMPI(MPI_Type_contiguous(4, MPIU_SCALAR, &contig)); 61 PetscCallMPI(MPI_Type_commit(&contig)); 62 63 if (test_dupped_type) { 64 MPI_Datatype tmp; 65 PetscCallMPI(MPI_Type_dup(contig, &tmp)); 66 PetscCallMPI(MPI_Type_free(&contig)); 67 contig = tmp; 68 } 69 for (i = 0; i < 10000; i++) { 70 PetscCall(PetscSFBcastBegin(sf, contig, bufA, bufAout, MPI_REPLACE)); 71 PetscCall(PetscSFBcastEnd(sf, contig, bufA, bufAout, MPI_REPLACE)); 72 } 73 PetscCall(VecRestoreArrayRead(A, (const PetscScalar **)&bufA)); 74 PetscCall(VecRestoreArray(Aout, &bufAout)); 75 76 PetscCall(VecView(Aout, PETSC_VIEWER_STDOUT_WORLD)); 77 PetscCall(VecDestroy(&A)); 78 PetscCall(VecDestroy(&Aout)); 79 PetscCall(PetscSFDestroy(&sf)); 80 PetscCallMPI(MPI_Type_free(&contig)); 81 PetscCall(PetscFinalize()); 82 return 0; 83 } 84 85 /*TEST 86 87 test: 88 suffix: basic 89 args: -sf_type basic 90 91 test: 92 suffix: basic_dupped 93 args: -test_dupped_type -sf_type basic 94 95 test: 96 suffix: window 97 filter: grep -v "type" | grep -v "sort" 98 args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate dynamic}} 99 requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW) 100 101 test: 102 suffix: window_dupped 103 filter: grep -v "type" | grep -v "sort" 104 args: -test_dupped_type -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate dynamic}} 105 requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW) 106 107 test: 108 suffix: window_shared 109 output_file: output/ex3_window.out 110 filter: grep -v "type" | grep -v "sort" 111 args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared 112 requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI_NUMVERSION) 113 114 test: 115 suffix: window_dupped_shared 116 output_file: output/ex3_window_dupped.out 117 filter: grep -v "type" | grep -v "sort" 118 args: -test_dupped_type -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared 119 requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI_NUMVERSION) 120 121 TEST*/ 122