xref: /petsc/src/mat/tests/ex40.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 
2 static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
3   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
4   -nd <size>      : > 0  number of domains per processor \n\
5   -ov <overlap>   : >=0  amount of overlap between domains\n\n";
6 
7 #include <petscmat.h>
8 
9 PetscErrorCode ISAllGatherDisjoint(IS iis, IS** ois)
10 {
11   IS             *is2,is;
12   const PetscInt *idxs;
13   PetscInt       i, ls,*sizes;
14   PetscMPIInt    size;
15 
16   PetscFunctionBeginUser;
17   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)iis),&size));
18   PetscCall(PetscMalloc1(size,&is2));
19   PetscCall(PetscMalloc1(size,&sizes));
20   PetscCall(ISGetLocalSize(iis,&ls));
21   /* we don't have a public ISGetLayout */
22   PetscCallMPI(MPI_Allgather(&ls,1,MPIU_INT,sizes,1,MPIU_INT,PetscObjectComm((PetscObject)iis)));
23   PetscCall(ISAllGather(iis,&is));
24   PetscCall(ISGetIndices(is,&idxs));
25   for (i = 0, ls = 0; i < size; i++) {
26     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,sizes[i],idxs+ls,PETSC_COPY_VALUES,&is2[i]));
27     ls += sizes[i];
28   }
29   PetscCall(ISRestoreIndices(is,&idxs));
30   PetscCall(ISDestroy(&is));
31   PetscCall(PetscFree(sizes));
32   *ois = is2;
33   PetscFunctionReturn(0);
34 }
35 
36 int main(int argc,char **args)
37 {
38   PetscInt       nd = 2,ov = 1,ndpar,i,start,m,n,end,lsize;
39   PetscMPIInt    rank;
40   PetscBool      flg, useND = PETSC_FALSE;
41   Mat            A,B;
42   char           file[PETSC_MAX_PATH_LEN];
43   PetscViewer    fd;
44   IS             *is1,*is2;
45   PetscRandom    r;
46   PetscScalar    rand;
47 
48   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
49   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
50   PetscCall(PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),&flg));
51   PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_USER,"Must use -f filename to indicate a file containing a PETSc binary matrix");
52   PetscCall(PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL));
53   PetscCall(PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL));
54   PetscCall(PetscOptionsGetBool(NULL,NULL,"-nested_dissection",&useND,NULL));
55 
56   /* Read matrix */
57   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd));
58   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
59   PetscCall(MatSetType(A,MATMPIAIJ));
60   PetscCall(MatLoad(A,fd));
61   PetscCall(MatSetFromOptions(A));
62   PetscCall(PetscViewerDestroy(&fd));
63 
64   /* Read the matrix again as a sequential matrix */
65   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd));
66   PetscCall(MatCreate(PETSC_COMM_SELF,&B));
67   PetscCall(MatSetType(B,MATSEQAIJ));
68   PetscCall(MatLoad(B,fd));
69   PetscCall(MatSetFromOptions(B));
70   PetscCall(PetscViewerDestroy(&fd));
71 
72   /* Create the IS corresponding to subdomains */
73   if (useND) {
74     MatPartitioning part;
75     IS              ndmap;
76     PetscMPIInt     size;
77 
78     ndpar = 1;
79     PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
80     nd   = (PetscInt)size;
81     PetscCall(PetscMalloc1(ndpar,&is1));
82     PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD,&part));
83     PetscCall(MatPartitioningSetAdjacency(part,A));
84     PetscCall(MatPartitioningSetFromOptions(part));
85     PetscCall(MatPartitioningApplyND(part,&ndmap));
86     PetscCall(MatPartitioningDestroy(&part));
87     PetscCall(ISBuildTwoSided(ndmap,NULL,&is1[0]));
88     PetscCall(ISDestroy(&ndmap));
89     PetscCall(ISAllGatherDisjoint(is1[0],&is2));
90   } else {
91     /* Create the random Index Sets */
92     PetscCall(PetscMalloc1(nd,&is1));
93     PetscCall(PetscMalloc1(nd,&is2));
94 
95     PetscCall(MatGetSize(A,&m,&n));
96     PetscCall(PetscRandomCreate(PETSC_COMM_SELF,&r));
97     PetscCall(PetscRandomSetFromOptions(r));
98     for (i=0; i<nd; i++) {
99       PetscCall(PetscRandomGetValue(r,&rand));
100       start = (PetscInt)(rand*m);
101       PetscCall(PetscRandomGetValue(r,&rand));
102       end   = (PetscInt)(rand*m);
103       lsize =  end - start;
104       if (start > end) { start = end; lsize = -lsize;}
105       PetscCall(ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i));
106       PetscCall(ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i));
107     }
108     ndpar = nd;
109     PetscCall(PetscRandomDestroy(&r));
110   }
111   PetscCall(MatIncreaseOverlap(A,ndpar,is1,ov));
112   PetscCall(MatIncreaseOverlap(B,nd,is2,ov));
113   if (useND) {
114     IS *is;
115 
116     PetscCall(ISAllGatherDisjoint(is1[0],&is));
117     PetscCall(ISDestroy(&is1[0]));
118     PetscCall(PetscFree(is1));
119     is1 = is;
120   }
121   /* Now see if the serial and parallel case have the same answers */
122   for (i=0; i<nd; ++i) {
123     PetscCall(ISEqual(is1[i],is2[i],&flg));
124     if (!flg) {
125       PetscCall(ISViewFromOptions(is1[i],NULL,"-err_view"));
126       PetscCall(ISViewFromOptions(is2[i],NULL,"-err_view"));
127       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%" PetscInt_FMT ", flg =%d",rank,i,(int)flg);
128     }
129   }
130 
131   /* Free allocated memory */
132   for (i=0; i<nd; ++i) {
133     PetscCall(ISDestroy(&is1[i]));
134     PetscCall(ISDestroy(&is2[i]));
135   }
136   PetscCall(PetscFree(is1));
137   PetscCall(PetscFree(is2));
138   PetscCall(MatDestroy(&A));
139   PetscCall(MatDestroy(&B));
140   PetscCall(PetscFinalize());
141   return 0;
142 }
143 
144 /*TEST
145 
146    build:
147       requires: !complex
148 
149    testset:
150       nsize: 5
151       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
152       args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2
153       output_file: output/ex40_1.out
154       test:
155         suffix: 1
156         args: -nd 7
157       test:
158         requires: parmetis
159         suffix: 1_nd
160         args: -nested_dissection -mat_partitioning_type parmetis
161 
162    testset:
163       nsize: 3
164       requires: double !defined(PETSC_USE_64BIT_INDICES) !complex
165       args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2
166       output_file: output/ex40_1.out
167       test:
168         suffix: 2
169         args: -nd 7
170       test:
171         requires: parmetis
172         suffix: 2_nd
173         args: -nested_dissection -mat_partitioning_type parmetis
174 
175 TEST*/
176