1 static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
2 is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
3 -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
4 -nd <size> : > 0 no of domains per processor \n\
5 -ov <overlap> : >=0 amount of overlap between domains\n\n";
6
7 #include <petscmat.h>
8
main(int argc,char ** args)9 int main(int argc, char **args)
10 {
11 PetscInt nd = 2, ov = 1, i, j, m, n, *idx, lsize;
12 PetscMPIInt rank;
13 PetscBool flg;
14 Mat A, B;
15 char file[PETSC_MAX_PATH_LEN];
16 PetscViewer fd;
17 IS *is1, *is2;
18 PetscRandom r;
19 PetscScalar rand;
20
21 PetscFunctionBeginUser;
22 PetscCall(PetscInitialize(&argc, &args, NULL, help));
23 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
24 PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), NULL));
25 PetscCall(PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL));
26 PetscCall(PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL));
27
28 /* Read matrix and RHS */
29 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
30 PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
31 PetscCall(MatSetType(A, MATMPIAIJ));
32 PetscCall(MatLoad(A, fd));
33 PetscCall(PetscViewerDestroy(&fd));
34
35 /* Read the matrix again as a seq matrix */
36 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd));
37 PetscCall(MatCreate(PETSC_COMM_SELF, &B));
38 PetscCall(MatSetType(B, MATSEQAIJ));
39 PetscCall(MatLoad(B, fd));
40 PetscCall(PetscViewerDestroy(&fd));
41
42 /* Create the Random no generator */
43 PetscCall(MatGetSize(A, &m, &n));
44 PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
45 PetscCall(PetscRandomSetFromOptions(r));
46
47 /* Create the IS corresponding to subdomains */
48 PetscCall(PetscMalloc1(nd, &is1));
49 PetscCall(PetscMalloc1(nd, &is2));
50 PetscCall(PetscMalloc1(m, &idx));
51
52 /* Create the random Index Sets */
53 for (i = 0; i < nd; i++) {
54 for (j = 0; j < rank; j++) PetscCall(PetscRandomGetValue(r, &rand));
55 PetscCall(PetscRandomGetValue(r, &rand));
56 lsize = (PetscInt)(rand * m);
57 for (j = 0; j < lsize; j++) {
58 PetscCall(PetscRandomGetValue(r, &rand));
59 idx[j] = (PetscInt)(rand * m);
60 }
61 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, lsize, idx, PETSC_COPY_VALUES, is1 + i));
62 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, lsize, idx, PETSC_COPY_VALUES, is2 + i));
63 }
64
65 PetscCall(MatIncreaseOverlap(A, nd, is1, ov));
66 PetscCall(MatIncreaseOverlap(B, nd, is2, ov));
67
68 /* Now see if the serial and parallel case have the same answers */
69 for (i = 0; i < nd; ++i) {
70 PetscInt sz1, sz2;
71 PetscCall(ISEqual(is1[i], is2[i], &flg));
72 PetscCall(ISGetSize(is1[i], &sz1));
73 PetscCall(ISGetSize(is2[i], &sz2));
74 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "proc:[%d], i=%" PetscInt_FMT ", flg =%d sz1 = %" PetscInt_FMT " sz2 = %" PetscInt_FMT, rank, i, (int)flg, sz1, sz2);
75 }
76
77 /* Free Allocated Memory */
78 for (i = 0; i < nd; ++i) {
79 PetscCall(ISDestroy(&is1[i]));
80 PetscCall(ISDestroy(&is2[i]));
81 }
82 PetscCall(PetscRandomDestroy(&r));
83 PetscCall(PetscFree(is1));
84 PetscCall(PetscFree(is2));
85 PetscCall(MatDestroy(&A));
86 PetscCall(MatDestroy(&B));
87 PetscCall(PetscFree(idx));
88 PetscCall(PetscFinalize());
89 return 0;
90 }
91
92 /*TEST
93
94 build:
95 requires: !complex
96
97 test:
98 nsize: 3
99 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
100 args: -f ${DATAFILESPATH}/matrices/arco1 -nd 3 -ov 1
101 output_file: output/empty.out
102
103 TEST*/
104