xref: /petsc/src/mat/tests/ex40.c (revision c4762a1b19cd2af06abeed90e8f9d34fb975dd94) !
1 
2 static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
3   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
4   -nd <size>      : > 0  number of domains per processor \n\
5   -ov <overlap>   : >=0  amount of overlap between domains\n\n";
6 
7 #include <petscmat.h>
8 
9 PetscErrorCode ISAllGatherDisjoint(IS iis, IS** ois)
10 {
11   IS             *is2,is;
12   const PetscInt *idxs;
13   PetscInt       i, ls,*sizes;
14   PetscErrorCode ierr;
15   PetscMPIInt    size;
16 
17   PetscFunctionBeginUser;
18   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)iis),&size);CHKERRQ(ierr);
19   ierr = PetscMalloc1(size,&is2);CHKERRQ(ierr);
20   ierr = PetscMalloc1(size,&sizes);CHKERRQ(ierr);
21   ierr = ISGetLocalSize(iis,&ls);CHKERRQ(ierr);
22   /* we don't have a public ISGetLayout */
23   ierr = MPI_Allgather(&ls,1,MPIU_INT,sizes,1,MPIU_INT,PetscObjectComm((PetscObject)iis));CHKERRQ(ierr);
24   ierr = ISAllGather(iis,&is);CHKERRQ(ierr);
25   ierr = ISGetIndices(is,&idxs);CHKERRQ(ierr);
26   for (i = 0, ls = 0; i < size; i++) {
27     ierr = ISCreateGeneral(PETSC_COMM_SELF,sizes[i],idxs+ls,PETSC_COPY_VALUES,&is2[i]);CHKERRQ(ierr);
28     ls += sizes[i];
29   }
30   ierr = ISRestoreIndices(is,&idxs);CHKERRQ(ierr);
31   ierr = ISDestroy(&is);CHKERRQ(ierr);
32   ierr = PetscFree(sizes);CHKERRQ(ierr);
33   *ois = is2;
34   PetscFunctionReturn(0);
35 }
36 
37 int main(int argc,char **args)
38 {
39   PetscErrorCode ierr;
40   PetscInt       nd = 2,ov = 1,ndpar,i,start,m,n,end,lsize;
41   PetscMPIInt    rank;
42   PetscBool      flg, useND = PETSC_FALSE;
43   Mat            A,B;
44   char           file[PETSC_MAX_PATH_LEN];
45   PetscViewer    fd;
46   IS             *is1,*is2;
47   PetscRandom    r;
48   PetscScalar    rand;
49 
50   ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
51   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
52   ierr = PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr);
53   if (!flg) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Must use -f filename to indicate a file containing a PETSc binary matrix");
54   ierr = PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);CHKERRQ(ierr);
55   ierr = PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);CHKERRQ(ierr);
56   ierr = PetscOptionsGetBool(NULL,NULL,"-nested_dissection",&useND,NULL);CHKERRQ(ierr);
57 
58   /* Read matrix */
59   ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);CHKERRQ(ierr);
60   ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr);
61   ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr);
62   ierr = MatLoad(A,fd);CHKERRQ(ierr);
63   ierr = MatSetFromOptions(A);CHKERRQ(ierr);
64   ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr);
65 
66   /* Read the matrix again as a sequential matrix */
67   ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);CHKERRQ(ierr);
68   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
69   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
70   ierr = MatLoad(B,fd);CHKERRQ(ierr);
71   ierr = MatSetFromOptions(B);CHKERRQ(ierr);
72   ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr);
73 
74   /* Create the IS corresponding to subdomains */
75   if (useND) {
76     MatPartitioning part;
77     IS              ndmap;
78     PetscMPIInt     size;
79 
80     ndpar = 1;
81     ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
82     nd   = (PetscInt)size;
83     ierr = PetscMalloc1(ndpar,&is1);CHKERRQ(ierr);
84     ierr = MatPartitioningCreate(PETSC_COMM_WORLD,&part);CHKERRQ(ierr);
85     ierr = MatPartitioningSetAdjacency(part,A);CHKERRQ(ierr);
86     ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
87     ierr = MatPartitioningApplyND(part,&ndmap);CHKERRQ(ierr);
88     ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);
89     ierr = ISBuildTwoSided(ndmap,NULL,&is1[0]);CHKERRQ(ierr);
90     ierr = ISDestroy(&ndmap);CHKERRQ(ierr);
91     ierr = ISAllGatherDisjoint(is1[0],&is2);CHKERRQ(ierr);
92   } else {
93     /* Create the random Index Sets */
94     ierr = PetscMalloc1(nd,&is1);CHKERRQ(ierr);
95     ierr = PetscMalloc1(nd,&is2);CHKERRQ(ierr);
96 
97     ierr = MatGetSize(A,&m,&n);CHKERRQ(ierr);
98     ierr = PetscRandomCreate(PETSC_COMM_SELF,&r);CHKERRQ(ierr);
99     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
100     for (i=0; i<nd; i++) {
101       ierr  = PetscRandomGetValue(r,&rand);CHKERRQ(ierr);
102       start = (PetscInt)(rand*m);
103       ierr  = PetscRandomGetValue(r,&rand);CHKERRQ(ierr);
104       end   = (PetscInt)(rand*m);
105       lsize =  end - start;
106       if (start > end) { start = end; lsize = -lsize;}
107       ierr = ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i);CHKERRQ(ierr);
108       ierr = ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i);CHKERRQ(ierr);
109     }
110     ndpar = nd;
111     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
112   }
113   ierr = MatIncreaseOverlap(A,ndpar,is1,ov);CHKERRQ(ierr);
114   ierr = MatIncreaseOverlap(B,nd,is2,ov);CHKERRQ(ierr);
115   if (useND) {
116     IS *is;
117 
118     ierr = ISAllGatherDisjoint(is1[0],&is);CHKERRQ(ierr);
119     ierr = ISDestroy(&is1[0]);CHKERRQ(ierr);
120     ierr = PetscFree(is1);CHKERRQ(ierr);
121     is1 = is;
122   }
123   /* Now see if the serial and parallel case have the same answers */
124   for (i=0; i<nd; ++i) {
125     ierr = ISEqual(is1[i],is2[i],&flg);CHKERRQ(ierr);
126     if (!flg) {
127       ierr = ISViewFromOptions(is1[i],NULL,"-err_view");CHKERRQ(ierr);
128       ierr = ISViewFromOptions(is2[i],NULL,"-err_view");CHKERRQ(ierr);
129       SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%D, flg =%d\n",rank,i,(int)flg);
130     }
131   }
132 
133   /* Free allocated memory */
134   for (i=0; i<nd; ++i) {
135     ierr = ISDestroy(&is1[i]);CHKERRQ(ierr);
136     ierr = ISDestroy(&is2[i]);CHKERRQ(ierr);
137   }
138   ierr = PetscFree(is1);CHKERRQ(ierr);
139   ierr = PetscFree(is2);CHKERRQ(ierr);
140   ierr = MatDestroy(&A);CHKERRQ(ierr);
141   ierr = MatDestroy(&B);CHKERRQ(ierr);
142   ierr = PetscFinalize();
143   return ierr;
144 }
145 
146 
147 
148 /*TEST
149 
150    build:
151       requires: !complex
152 
153    testset:
154       nsize: 5
155       requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
156       args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2
157       output_file: output/ex40_1.out
158       test:
159         suffix: 1
160         args: -nd 7
161       test:
162         requires: parmetis
163         suffix: 1_nd
164         args: -nested_dissection -mat_partitioning_type parmetis
165 
166    testset:
167       nsize: 3
168       requires: double !define(PETSC_USE_64BIT_INDICES) !complex
169       args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2
170       output_file: output/ex40_1.out
171       test:
172         suffix: 2
173         args: -nd 7
174       test:
175         requires: parmetis
176         suffix: 2_nd
177         args: -nested_dissection -mat_partitioning_type parmetis
178 
179 TEST*/
180