xref: /petsc/src/mat/tests/ex42.c (revision 76be6f4ff3bd4e251c19fc00ebbebfd58b6e7589)
1 
2 static char help[] = "Tests MatIncreaseOverlap() and MatCreateSubmatrices() for the parallel case.\n\
3 This example is similar to ex40.c; here the index sets used are random.\n\
4 Input arguments are:\n\
5   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
6   -nd <size>      : > 0  no of domains per processor \n\
7   -ov <overlap>   : >=0  amount of overlap between domains\n\n";
8 
9 #include <petscmat.h>
10 
11 int main(int argc,char **args)
12 {
13   PetscInt       nd = 2,ov=1,i,j,lsize,m,n,*idx,bs;
14   PetscMPIInt    rank, size;
15   PetscBool      flg;
16   Mat            A,B,*submatA,*submatB;
17   char           file[PETSC_MAX_PATH_LEN];
18   PetscViewer    fd;
19   IS             *is1,*is2;
20   PetscRandom    r;
21   PetscBool      test_unsorted = PETSC_FALSE;
22   PetscScalar    rand;
23 
24   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
25   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
26   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
27   PetscCall(PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),NULL));
28   PetscCall(PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL));
29   PetscCall(PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL));
30   PetscCall(PetscOptionsGetBool(NULL,NULL,"-test_unsorted",&test_unsorted,NULL));
31 
32   /* Read matrix A and RHS */
33   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd));
34   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
35   PetscCall(MatSetType(A,MATAIJ));
36   PetscCall(MatSetFromOptions(A));
37   PetscCall(MatLoad(A,fd));
38   PetscCall(PetscViewerDestroy(&fd));
39 
40   /* Read the same matrix as a seq matrix B */
41   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd));
42   PetscCall(MatCreate(PETSC_COMM_SELF,&B));
43   PetscCall(MatSetType(B,MATSEQAIJ));
44   PetscCall(MatSetFromOptions(B));
45   PetscCall(MatLoad(B,fd));
46   PetscCall(PetscViewerDestroy(&fd));
47 
48   PetscCall(MatGetBlockSize(A,&bs));
49 
50   /* Create the Random no generator */
51   PetscCall(MatGetSize(A,&m,&n));
52   PetscCall(PetscRandomCreate(PETSC_COMM_SELF,&r));
53   PetscCall(PetscRandomSetFromOptions(r));
54 
55   /* Create the IS corresponding to subdomains */
56   PetscCall(PetscMalloc1(nd,&is1));
57   PetscCall(PetscMalloc1(nd,&is2));
58   PetscCall(PetscMalloc1(m ,&idx));
59   for (i = 0; i < m; i++) {idx[i] = i;}
60 
61   /* Create the random Index Sets */
62   for (i=0; i<nd; i++) {
63     /* Skip a few,so that the IS on different procs are diffeent*/
64     for (j=0; j<rank; j++) {
65       PetscCall(PetscRandomGetValue(r,&rand));
66     }
67     PetscCall(PetscRandomGetValue(r,&rand));
68     lsize = (PetscInt)(rand*(m/bs));
69     /* shuffle */
70     for (j=0; j<lsize; j++) {
71       PetscInt k, swap, l;
72 
73       PetscCall(PetscRandomGetValue(r,&rand));
74       k      = j + (PetscInt)(rand*((m/bs)-j));
75       for (l = 0; l < bs; l++) {
76         swap        = idx[bs*j+l];
77         idx[bs*j+l] = idx[bs*k+l];
78         idx[bs*k+l] = swap;
79       }
80     }
81     if (!test_unsorted) PetscCall(PetscSortInt(lsize*bs,idx));
82     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is1+i));
83     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is2+i));
84     PetscCall(ISSetBlockSize(is1[i],bs));
85     PetscCall(ISSetBlockSize(is2[i],bs));
86   }
87 
88   if (!test_unsorted) {
89     PetscCall(MatIncreaseOverlap(A,nd,is1,ov));
90     PetscCall(MatIncreaseOverlap(B,nd,is2,ov));
91 
92     for (i=0; i<nd; ++i) {
93       PetscCall(ISSort(is1[i]));
94       PetscCall(ISSort(is2[i]));
95     }
96   }
97 
98   PetscCall(MatCreateSubMatrices(A,nd,is1,is1,MAT_INITIAL_MATRIX,&submatA));
99   PetscCall(MatCreateSubMatrices(B,nd,is2,is2,MAT_INITIAL_MATRIX,&submatB));
100 
101   /* Now see if the serial and parallel case have the same answers */
102   for (i=0; i<nd; ++i) {
103     PetscCall(MatEqual(submatA[i],submatB[i],&flg));
104     PetscCheck(flg,PETSC_COMM_SELF,PETSC_ERR_PLIB,"%" PetscInt_FMT "-th parallel submatA != seq submatB",i);
105   }
106 
107   /* Free Allocated Memory */
108   for (i=0; i<nd; ++i) {
109     PetscCall(ISDestroy(&is1[i]));
110     PetscCall(ISDestroy(&is2[i]));
111   }
112   PetscCall(MatDestroySubMatrices(nd,&submatA));
113   PetscCall(MatDestroySubMatrices(nd,&submatB));
114 
115   PetscCall(PetscRandomDestroy(&r));
116   PetscCall(PetscFree(is1));
117   PetscCall(PetscFree(is2));
118   PetscCall(MatDestroy(&A));
119   PetscCall(MatDestroy(&B));
120   PetscCall(PetscFree(idx));
121   PetscCall(PetscFinalize());
122   return 0;
123 }
124 
125 /*TEST
126 
127    build:
128       requires: !complex
129 
130    test:
131       nsize: 3
132       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
133       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 5 -ov 2
134 
135    test:
136       suffix: 2
137       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -ov 2
138       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
139 
140    test:
141       suffix: unsorted_baij_mpi
142       nsize: 3
143       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
144       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
145 
146    test:
147       suffix: unsorted_baij_seq
148       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
149       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
150 
151    test:
152       suffix: unsorted_mpi
153       nsize: 3
154       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
155       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
156 
157    test:
158       suffix: unsorted_seq
159       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
160       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
161 
162 TEST*/
163