xref: /petsc/src/mat/tests/ex42.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 
2 static char help[] = "Tests MatIncreaseOverlap() and MatCreateSubmatrices() for the parallel case.\n\
3 This example is similar to ex40.c; here the index sets used are random.\n\
4 Input arguments are:\n\
5   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
6   -nd <size>      : > 0  no of domains per processor \n\
7   -ov <overlap>   : >=0  amount of overlap between domains\n\n";
8 
9 #include <petscmat.h>
10 
11 int main(int argc,char **args)
12 {
13   PetscInt       nd = 2,ov=1,i,j,lsize,m,n,*idx,bs;
14   PetscMPIInt    rank, size;
15   PetscBool      flg;
16   Mat            A,B,*submatA,*submatB;
17   char           file[PETSC_MAX_PATH_LEN];
18   PetscViewer    fd;
19   IS             *is1,*is2;
20   PetscRandom    r;
21   PetscBool      test_unsorted = PETSC_FALSE;
22   PetscScalar    rand;
23 
24   PetscFunctionBeginUser;
25   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
26   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
27   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
28   PetscCall(PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),NULL));
29   PetscCall(PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL));
30   PetscCall(PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL));
31   PetscCall(PetscOptionsGetBool(NULL,NULL,"-test_unsorted",&test_unsorted,NULL));
32 
33   /* Read matrix A and RHS */
34   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd));
35   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
36   PetscCall(MatSetType(A,MATAIJ));
37   PetscCall(MatSetFromOptions(A));
38   PetscCall(MatLoad(A,fd));
39   PetscCall(PetscViewerDestroy(&fd));
40 
41   /* Read the same matrix as a seq matrix B */
42   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd));
43   PetscCall(MatCreate(PETSC_COMM_SELF,&B));
44   PetscCall(MatSetType(B,MATSEQAIJ));
45   PetscCall(MatSetFromOptions(B));
46   PetscCall(MatLoad(B,fd));
47   PetscCall(PetscViewerDestroy(&fd));
48 
49   PetscCall(MatGetBlockSize(A,&bs));
50 
51   /* Create the Random no generator */
52   PetscCall(MatGetSize(A,&m,&n));
53   PetscCall(PetscRandomCreate(PETSC_COMM_SELF,&r));
54   PetscCall(PetscRandomSetFromOptions(r));
55 
56   /* Create the IS corresponding to subdomains */
57   PetscCall(PetscMalloc1(nd,&is1));
58   PetscCall(PetscMalloc1(nd,&is2));
59   PetscCall(PetscMalloc1(m ,&idx));
60   for (i = 0; i < m; i++) {idx[i] = i;}
61 
62   /* Create the random Index Sets */
63   for (i=0; i<nd; i++) {
64     /* Skip a few,so that the IS on different procs are diffeent*/
65     for (j=0; j<rank; j++) {
66       PetscCall(PetscRandomGetValue(r,&rand));
67     }
68     PetscCall(PetscRandomGetValue(r,&rand));
69     lsize = (PetscInt)(rand*(m/bs));
70     /* shuffle */
71     for (j=0; j<lsize; j++) {
72       PetscInt k, swap, l;
73 
74       PetscCall(PetscRandomGetValue(r,&rand));
75       k      = j + (PetscInt)(rand*((m/bs)-j));
76       for (l = 0; l < bs; l++) {
77         swap        = idx[bs*j+l];
78         idx[bs*j+l] = idx[bs*k+l];
79         idx[bs*k+l] = swap;
80       }
81     }
82     if (!test_unsorted) PetscCall(PetscSortInt(lsize*bs,idx));
83     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is1+i));
84     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is2+i));
85     PetscCall(ISSetBlockSize(is1[i],bs));
86     PetscCall(ISSetBlockSize(is2[i],bs));
87   }
88 
89   if (!test_unsorted) {
90     PetscCall(MatIncreaseOverlap(A,nd,is1,ov));
91     PetscCall(MatIncreaseOverlap(B,nd,is2,ov));
92 
93     for (i=0; i<nd; ++i) {
94       PetscCall(ISSort(is1[i]));
95       PetscCall(ISSort(is2[i]));
96     }
97   }
98 
99   PetscCall(MatCreateSubMatrices(A,nd,is1,is1,MAT_INITIAL_MATRIX,&submatA));
100   PetscCall(MatCreateSubMatrices(B,nd,is2,is2,MAT_INITIAL_MATRIX,&submatB));
101 
102   /* Now see if the serial and parallel case have the same answers */
103   for (i=0; i<nd; ++i) {
104     PetscCall(MatEqual(submatA[i],submatB[i],&flg));
105     PetscCheck(flg,PETSC_COMM_SELF,PETSC_ERR_PLIB,"%" PetscInt_FMT "-th parallel submatA != seq submatB",i);
106   }
107 
108   /* Free Allocated Memory */
109   for (i=0; i<nd; ++i) {
110     PetscCall(ISDestroy(&is1[i]));
111     PetscCall(ISDestroy(&is2[i]));
112   }
113   PetscCall(MatDestroySubMatrices(nd,&submatA));
114   PetscCall(MatDestroySubMatrices(nd,&submatB));
115 
116   PetscCall(PetscRandomDestroy(&r));
117   PetscCall(PetscFree(is1));
118   PetscCall(PetscFree(is2));
119   PetscCall(MatDestroy(&A));
120   PetscCall(MatDestroy(&B));
121   PetscCall(PetscFree(idx));
122   PetscCall(PetscFinalize());
123   return 0;
124 }
125 
126 /*TEST
127 
128    build:
129       requires: !complex
130 
131    test:
132       nsize: 3
133       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
134       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 5 -ov 2
135 
136    test:
137       suffix: 2
138       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -ov 2
139       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
140 
141    test:
142       suffix: unsorted_baij_mpi
143       nsize: 3
144       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
145       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
146 
147    test:
148       suffix: unsorted_baij_seq
149       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
150       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
151 
152    test:
153       suffix: unsorted_mpi
154       nsize: 3
155       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
156       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
157 
158    test:
159       suffix: unsorted_seq
160       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
161       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
162 
163 TEST*/
164