xref: /petsc/src/mat/tests/ex41.c (revision 28b400f66ebc7ae0049166a2294dfcd3df27e64b) !
1 
2 static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
3 is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
4   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
5   -nd <size>      : > 0  no of domains per processor \n\
6   -ov <overlap>   : >=0  amount of overlap between domains\n\n";
7 
8 #include <petscmat.h>
9 
10 int main(int argc,char **args)
11 {
12   PetscInt       nd = 2,ov=1,i,j,m,n,*idx,lsize;
13   PetscErrorCode ierr;
14   PetscMPIInt    rank;
15   PetscBool      flg;
16   Mat            A,B;
17   char           file[PETSC_MAX_PATH_LEN];
18   PetscViewer    fd;
19   IS             *is1,*is2;
20   PetscRandom    r;
21   PetscScalar    rand;
22 
23   ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
24   CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
25   CHKERRQ(PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),NULL));
26   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL));
27   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL));
28 
29   /* Read matrix and RHS */
30   CHKERRQ(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd));
31   CHKERRQ(MatCreate(PETSC_COMM_WORLD,&A));
32   CHKERRQ(MatSetType(A,MATMPIAIJ));
33   CHKERRQ(MatLoad(A,fd));
34   CHKERRQ(PetscViewerDestroy(&fd));
35 
36   /* Read the matrix again as a seq matrix */
37   CHKERRQ(PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd));
38   CHKERRQ(MatCreate(PETSC_COMM_SELF,&B));
39   CHKERRQ(MatSetType(B,MATSEQAIJ));
40   CHKERRQ(MatLoad(B,fd));
41   CHKERRQ(PetscViewerDestroy(&fd));
42 
43   /* Create the Random no generator */
44   CHKERRQ(MatGetSize(A,&m,&n));
45   CHKERRQ(PetscRandomCreate(PETSC_COMM_SELF,&r));
46   CHKERRQ(PetscRandomSetFromOptions(r));
47 
48   /* Create the IS corresponding to subdomains */
49   CHKERRQ(PetscMalloc1(nd,&is1));
50   CHKERRQ(PetscMalloc1(nd,&is2));
51   CHKERRQ(PetscMalloc1(m ,&idx));
52 
53   /* Create the random Index Sets */
54   for (i=0; i<nd; i++) {
55     for (j=0; j<rank; j++) {
56       CHKERRQ(PetscRandomGetValue(r,&rand));
57     }
58     CHKERRQ(PetscRandomGetValue(r,&rand));
59     lsize = (PetscInt)(rand*m);
60     for (j=0; j<lsize; j++) {
61       CHKERRQ(PetscRandomGetValue(r,&rand));
62       idx[j] = (PetscInt)(rand*m);
63     }
64     CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is1+i));
65     CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is2+i));
66   }
67 
68   CHKERRQ(MatIncreaseOverlap(A,nd,is1,ov));
69   CHKERRQ(MatIncreaseOverlap(B,nd,is2,ov));
70 
71   /* Now see if the serial and parallel case have the same answers */
72   for (i=0; i<nd; ++i) {
73     PetscInt sz1,sz2;
74     CHKERRQ(ISEqual(is1[i],is2[i],&flg));
75     CHKERRQ(ISGetSize(is1[i],&sz1));
76     CHKERRQ(ISGetSize(is2[i],&sz2));
77     PetscCheck(flg,PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%" PetscInt_FMT ", flg =%d  sz1 = %" PetscInt_FMT " sz2 = %" PetscInt_FMT,rank,i,(int)flg,sz1,sz2);
78   }
79 
80   /* Free Allocated Memory */
81   for (i=0; i<nd; ++i) {
82     CHKERRQ(ISDestroy(&is1[i]));
83     CHKERRQ(ISDestroy(&is2[i]));
84   }
85   CHKERRQ(PetscRandomDestroy(&r));
86   CHKERRQ(PetscFree(is1));
87   CHKERRQ(PetscFree(is2));
88   CHKERRQ(MatDestroy(&A));
89   CHKERRQ(MatDestroy(&B));
90   CHKERRQ(PetscFree(idx));
91   ierr = PetscFinalize();
92   return ierr;
93 }
94 
95 /*TEST
96 
97    build:
98       requires: !complex
99 
100    test:
101       nsize: 3
102       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
103       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 3 -ov 1
104 
105 TEST*/
106