xref: /petsc/src/mat/tests/ex41.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 
2 static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
3 is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
4   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
5   -nd <size>      : > 0  no of domains per processor \n\
6   -ov <overlap>   : >=0  amount of overlap between domains\n\n";
7 
8 #include <petscmat.h>
9 
10 int main(int argc,char **args)
11 {
12   PetscInt       nd = 2,ov=1,i,j,m,n,*idx,lsize;
13   PetscMPIInt    rank;
14   PetscBool      flg;
15   Mat            A,B;
16   char           file[PETSC_MAX_PATH_LEN];
17   PetscViewer    fd;
18   IS             *is1,*is2;
19   PetscRandom    r;
20   PetscScalar    rand;
21 
22   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
23   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
24   PetscCall(PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),NULL));
25   PetscCall(PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL));
26   PetscCall(PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL));
27 
28   /* Read matrix and RHS */
29   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd));
30   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
31   PetscCall(MatSetType(A,MATMPIAIJ));
32   PetscCall(MatLoad(A,fd));
33   PetscCall(PetscViewerDestroy(&fd));
34 
35   /* Read the matrix again as a seq matrix */
36   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd));
37   PetscCall(MatCreate(PETSC_COMM_SELF,&B));
38   PetscCall(MatSetType(B,MATSEQAIJ));
39   PetscCall(MatLoad(B,fd));
40   PetscCall(PetscViewerDestroy(&fd));
41 
42   /* Create the Random no generator */
43   PetscCall(MatGetSize(A,&m,&n));
44   PetscCall(PetscRandomCreate(PETSC_COMM_SELF,&r));
45   PetscCall(PetscRandomSetFromOptions(r));
46 
47   /* Create the IS corresponding to subdomains */
48   PetscCall(PetscMalloc1(nd,&is1));
49   PetscCall(PetscMalloc1(nd,&is2));
50   PetscCall(PetscMalloc1(m ,&idx));
51 
52   /* Create the random Index Sets */
53   for (i=0; i<nd; i++) {
54     for (j=0; j<rank; j++) {
55       PetscCall(PetscRandomGetValue(r,&rand));
56     }
57     PetscCall(PetscRandomGetValue(r,&rand));
58     lsize = (PetscInt)(rand*m);
59     for (j=0; j<lsize; j++) {
60       PetscCall(PetscRandomGetValue(r,&rand));
61       idx[j] = (PetscInt)(rand*m);
62     }
63     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is1+i));
64     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is2+i));
65   }
66 
67   PetscCall(MatIncreaseOverlap(A,nd,is1,ov));
68   PetscCall(MatIncreaseOverlap(B,nd,is2,ov));
69 
70   /* Now see if the serial and parallel case have the same answers */
71   for (i=0; i<nd; ++i) {
72     PetscInt sz1,sz2;
73     PetscCall(ISEqual(is1[i],is2[i],&flg));
74     PetscCall(ISGetSize(is1[i],&sz1));
75     PetscCall(ISGetSize(is2[i],&sz2));
76     PetscCheck(flg,PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%" PetscInt_FMT ", flg =%d  sz1 = %" PetscInt_FMT " sz2 = %" PetscInt_FMT,rank,i,(int)flg,sz1,sz2);
77   }
78 
79   /* Free Allocated Memory */
80   for (i=0; i<nd; ++i) {
81     PetscCall(ISDestroy(&is1[i]));
82     PetscCall(ISDestroy(&is2[i]));
83   }
84   PetscCall(PetscRandomDestroy(&r));
85   PetscCall(PetscFree(is1));
86   PetscCall(PetscFree(is2));
87   PetscCall(MatDestroy(&A));
88   PetscCall(MatDestroy(&B));
89   PetscCall(PetscFree(idx));
90   PetscCall(PetscFinalize());
91   return 0;
92 }
93 
94 /*TEST
95 
96    build:
97       requires: !complex
98 
99    test:
100       nsize: 3
101       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
102       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 3 -ov 1
103 
104 TEST*/
105