1 2 static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\ 3 -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\ 4 -nd <size> : > 0 number of domains per processor \n\ 5 -ov <overlap> : >=0 amount of overlap between domains\n\n"; 6 7 #include <petscmat.h> 8 9 PetscErrorCode ISAllGatherDisjoint(IS iis, IS **ois) { 10 IS *is2, is; 11 const PetscInt *idxs; 12 PetscInt i, ls, *sizes; 13 PetscMPIInt size; 14 15 PetscFunctionBeginUser; 16 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)iis), &size)); 17 PetscCall(PetscMalloc1(size, &is2)); 18 PetscCall(PetscMalloc1(size, &sizes)); 19 PetscCall(ISGetLocalSize(iis, &ls)); 20 /* we don't have a public ISGetLayout */ 21 PetscCallMPI(MPI_Allgather(&ls, 1, MPIU_INT, sizes, 1, MPIU_INT, PetscObjectComm((PetscObject)iis))); 22 PetscCall(ISAllGather(iis, &is)); 23 PetscCall(ISGetIndices(is, &idxs)); 24 for (i = 0, ls = 0; i < size; i++) { 25 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, sizes[i], idxs + ls, PETSC_COPY_VALUES, &is2[i])); 26 ls += sizes[i]; 27 } 28 PetscCall(ISRestoreIndices(is, &idxs)); 29 PetscCall(ISDestroy(&is)); 30 PetscCall(PetscFree(sizes)); 31 *ois = is2; 32 PetscFunctionReturn(0); 33 } 34 35 int main(int argc, char **args) { 36 PetscInt nd = 2, ov = 1, ndpar, i, start, m, n, end, lsize; 37 PetscMPIInt rank; 38 PetscBool flg, useND = PETSC_FALSE; 39 Mat A, B; 40 char file[PETSC_MAX_PATH_LEN]; 41 PetscViewer fd; 42 IS *is1, *is2; 43 PetscRandom r; 44 PetscScalar rand; 45 46 PetscFunctionBeginUser; 47 PetscCall(PetscInitialize(&argc, &args, (char *)0, help)); 48 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 49 PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), &flg)); 50 PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER, "Must use -f filename to indicate a file containing a PETSc binary matrix"); 51 PetscCall(PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL)); 52 PetscCall(PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL)); 53 PetscCall(PetscOptionsGetBool(NULL, NULL, "-nested_dissection", &useND, NULL)); 54 55 /* Read matrix */ 56 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd)); 57 PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); 58 PetscCall(MatSetType(A, MATMPIAIJ)); 59 PetscCall(MatLoad(A, fd)); 60 PetscCall(MatSetFromOptions(A)); 61 PetscCall(PetscViewerDestroy(&fd)); 62 63 /* Read the matrix again as a sequential matrix */ 64 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd)); 65 PetscCall(MatCreate(PETSC_COMM_SELF, &B)); 66 PetscCall(MatSetType(B, MATSEQAIJ)); 67 PetscCall(MatLoad(B, fd)); 68 PetscCall(MatSetFromOptions(B)); 69 PetscCall(PetscViewerDestroy(&fd)); 70 71 /* Create the IS corresponding to subdomains */ 72 if (useND) { 73 MatPartitioning part; 74 IS ndmap; 75 PetscMPIInt size; 76 77 ndpar = 1; 78 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); 79 nd = (PetscInt)size; 80 PetscCall(PetscMalloc1(ndpar, &is1)); 81 PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD, &part)); 82 PetscCall(MatPartitioningSetAdjacency(part, A)); 83 PetscCall(MatPartitioningSetFromOptions(part)); 84 PetscCall(MatPartitioningApplyND(part, &ndmap)); 85 PetscCall(MatPartitioningDestroy(&part)); 86 PetscCall(ISBuildTwoSided(ndmap, NULL, &is1[0])); 87 PetscCall(ISDestroy(&ndmap)); 88 PetscCall(ISAllGatherDisjoint(is1[0], &is2)); 89 } else { 90 /* Create the random Index Sets */ 91 PetscCall(PetscMalloc1(nd, &is1)); 92 PetscCall(PetscMalloc1(nd, &is2)); 93 94 PetscCall(MatGetSize(A, &m, &n)); 95 PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r)); 96 PetscCall(PetscRandomSetFromOptions(r)); 97 for (i = 0; i < nd; i++) { 98 PetscCall(PetscRandomGetValue(r, &rand)); 99 start = (PetscInt)(rand * m); 100 PetscCall(PetscRandomGetValue(r, &rand)); 101 end = (PetscInt)(rand * m); 102 lsize = end - start; 103 if (start > end) { 104 start = end; 105 lsize = -lsize; 106 } 107 PetscCall(ISCreateStride(PETSC_COMM_SELF, lsize, start, 1, is1 + i)); 108 PetscCall(ISCreateStride(PETSC_COMM_SELF, lsize, start, 1, is2 + i)); 109 } 110 ndpar = nd; 111 PetscCall(PetscRandomDestroy(&r)); 112 } 113 PetscCall(MatIncreaseOverlap(A, ndpar, is1, ov)); 114 PetscCall(MatIncreaseOverlap(B, nd, is2, ov)); 115 if (useND) { 116 IS *is; 117 118 PetscCall(ISAllGatherDisjoint(is1[0], &is)); 119 PetscCall(ISDestroy(&is1[0])); 120 PetscCall(PetscFree(is1)); 121 is1 = is; 122 } 123 /* Now see if the serial and parallel case have the same answers */ 124 for (i = 0; i < nd; ++i) { 125 PetscCall(ISEqual(is1[i], is2[i], &flg)); 126 if (!flg) { 127 PetscCall(ISViewFromOptions(is1[i], NULL, "-err_view")); 128 PetscCall(ISViewFromOptions(is2[i], NULL, "-err_view")); 129 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "proc:[%d], i=%" PetscInt_FMT ", flg =%d", rank, i, (int)flg); 130 } 131 } 132 133 /* Free allocated memory */ 134 for (i = 0; i < nd; ++i) { 135 PetscCall(ISDestroy(&is1[i])); 136 PetscCall(ISDestroy(&is2[i])); 137 } 138 PetscCall(PetscFree(is1)); 139 PetscCall(PetscFree(is2)); 140 PetscCall(MatDestroy(&A)); 141 PetscCall(MatDestroy(&B)); 142 PetscCall(PetscFinalize()); 143 return 0; 144 } 145 146 /*TEST 147 148 build: 149 requires: !complex 150 151 testset: 152 nsize: 5 153 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex 154 args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2 155 output_file: output/ex40_1.out 156 test: 157 suffix: 1 158 args: -nd 7 159 test: 160 requires: parmetis 161 suffix: 1_nd 162 args: -nested_dissection -mat_partitioning_type parmetis 163 164 testset: 165 nsize: 3 166 requires: double !defined(PETSC_USE_64BIT_INDICES) !complex 167 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2 168 output_file: output/ex40_1.out 169 test: 170 suffix: 2 171 args: -nd 7 172 test: 173 requires: parmetis 174 suffix: 2_nd 175 args: -nested_dissection -mat_partitioning_type parmetis 176 177 TEST*/ 178