xref: /petsc/src/mat/tests/ex170.c (revision daa037dfd3c3bec8dc8659548d2b20b07c1dc6de)
1 static char help[] = "Scalable algorithm for Connected Components problem.\n\
2 Entails changing the MatMult() for this matrix.\n\n\n";
3 
4 #include <petscmat.h>
5 
6 PETSC_EXTERN PetscErrorCode MatMultMax_SeqAIJ(Mat,Vec,Vec);
7 PETSC_EXTERN PetscErrorCode MatMultAddMax_SeqAIJ(Mat,Vec,Vec,Vec);
8 #include <../src/mat/impls/aij/mpi/mpiaij.h>
9 
10 /*
11   Paper with Ananth: Frbenius norm of band was good proxy, but really want to know the rank outside
12 
13   LU for diagonal blocks must do shifting instead of pivoting, preferably shifting individual rows (like Pardiso)
14 
15   Draw picture of flow of reordering
16 
17   Measure Forbenius norm of the blocks being dropped by Truncated SPIKE (might be contaminated by pivoting in LU)
18 
19   Report on using Florida matrices (Maxim, Murat)
20 */
21 
22 /*
23 I have thought about how to do this. Here is a prototype algorithm. Let A be
24 the adjacency matrix (0 or 1), and let each component be identified by the
25 lowest numbered vertex in it. We initialize a vector c so that each vertex is
26 a component, c_i = i. Now we act on c with A, using a special product
27 
28   c = A * c
29 
30 where we replace addition with min. The fixed point of this operation is a vector
31 c which is the component for each vertex. The number of iterates is
32 
33   max_{components} depth of BFS tree for component
34 
35 We can accelerate this algorithm by preprocessing all locals domains using the
36 same algorithm. Then the number of iterations is bounded the depth of the BFS
37 tree for the graph on supervertices defined over local components, which is
38 bounded by p. In practice, this should be very fast.
39 */
40 
41 /* Only isolated vertices get a 1 on the diagonal */
42 PetscErrorCode CreateGraph(MPI_Comm comm, PetscInt testnum, Mat *A)
43 {
44   Mat            G;
45 
46   PetscFunctionBegin;
47   PetscCall(MatCreate(comm, &G));
48   /* The identity matrix */
49   switch (testnum) {
50   case 0:
51   {
52     Vec D;
53 
54     PetscCall(MatSetSizes(G, PETSC_DETERMINE, PETSC_DETERMINE, 5, 5));
55     PetscCall(MatSetUp(G));
56     PetscCall(MatCreateVecs(G, &D, NULL));
57     PetscCall(VecSet(D, 1.0));
58     PetscCall(MatDiagonalSet(G, D, INSERT_VALUES));
59     PetscCall(VecDestroy(&D));
60   }
61   break;
62   case 1:
63   {
64     PetscScalar vals[3] = {1.0, 1.0, 1.0};
65     PetscInt    cols[3];
66     PetscInt    rStart, rEnd, row;
67 
68     PetscCall(MatSetSizes(G, PETSC_DETERMINE, PETSC_DETERMINE, 5, 5));
69     PetscCall(MatSetFromOptions(G));
70     PetscCall(MatSeqAIJSetPreallocation(G, 2, NULL));
71     PetscCall(MatSetUp(G));
72     PetscCall(MatGetOwnershipRange(G, &rStart, &rEnd));
73     row  = 0;
74     cols[0] = 0; cols[1] = 1;
75     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
76     row  = 1;
77     cols[0] = 0; cols[1] = 1;
78     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
79     row  = 2;
80     cols[0] = 2; cols[1] = 3;
81     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
82     row  = 3;
83     cols[0] = 3; cols[1] = 4;
84     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
85     row  = 4;
86     cols[0] = 4; cols[1] = 2;
87     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
88     PetscCall(MatAssemblyBegin(G, MAT_FINAL_ASSEMBLY));
89     PetscCall(MatAssemblyEnd(G, MAT_FINAL_ASSEMBLY));
90   }
91   break;
92   case 2:
93   {
94     PetscScalar vals[3] = {1.0, 1.0, 1.0};
95     PetscInt    cols[3];
96     PetscInt    rStart, rEnd, row;
97 
98     PetscCall(MatSetSizes(G, PETSC_DETERMINE, PETSC_DETERMINE, 5, 5));
99     PetscCall(MatSetFromOptions(G));
100     PetscCall(MatSeqAIJSetPreallocation(G, 2, NULL));
101     PetscCall(MatSetUp(G));
102     PetscCall(MatGetOwnershipRange(G, &rStart, &rEnd));
103     row  = 0;
104     cols[0] = 0; cols[1] = 4;
105     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
106     row  = 1;
107     cols[0] = 1; cols[1] = 2;
108     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
109     row  = 2;
110     cols[0] = 2; cols[1] = 3;
111     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
112     row  = 3;
113     cols[0] = 3; cols[1] = 1;
114     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
115     row  = 4;
116     cols[0] = 0; cols[1] = 4;
117     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
118     PetscCall(MatAssemblyBegin(G, MAT_FINAL_ASSEMBLY));
119     PetscCall(MatAssemblyEnd(G, MAT_FINAL_ASSEMBLY));
120   }
121   break;
122   default:
123     SETERRQ(comm, PETSC_ERR_PLIB, "Unknown test %d", testnum);
124   }
125   *A = G;
126   PetscFunctionReturn(0);
127 }
128 
129 int main(int argc, char **argv)
130 {
131   MPI_Comm       comm;
132   Mat            A;    /* A graph */
133   Vec            c;    /* A vector giving the component of each vertex */
134   Vec            cold; /* The vector c from the last iteration */
135   PetscScalar   *carray;
136   PetscInt       testnum = 0;
137   PetscInt       V, vStart, vEnd, v, n;
138   PetscMPIInt    size;
139 
140   PetscCall(PetscInitialize(&argc, &argv, NULL,help));
141   comm = PETSC_COMM_WORLD;
142   PetscCallMPI(MPI_Comm_size(comm, &size));
143   /* Use matrix to encode a graph */
144   PetscCall(PetscOptionsGetInt(NULL,NULL, "-testnum", &testnum, NULL));
145   PetscCall(CreateGraph(comm, testnum, &A));
146   PetscCall(MatGetSize(A, &V, NULL));
147   /* Replace matrix-vector multiplication with one that calculates the minimum rather than the sum */
148   if (size == 1) {
149     PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)) MatMultMax_SeqAIJ));
150   } else {
151     Mat_MPIAIJ *a = (Mat_MPIAIJ *) A->data;
152 
153     PetscCall(MatShellSetOperation(a->A, MATOP_MULT, (void (*)) MatMultMax_SeqAIJ));
154     PetscCall(MatShellSetOperation(a->B, MATOP_MULT, (void (*)) MatMultMax_SeqAIJ));
155     PetscCall(MatShellSetOperation(a->B, MATOP_MULT_ADD, (void (*)) MatMultAddMax_SeqAIJ));
156   }
157   /* Initialize each vertex as a separate component */
158   PetscCall(MatCreateVecs(A, &c, NULL));
159   PetscCall(MatGetOwnershipRange(A, &vStart, &vEnd));
160   PetscCall(VecGetArray(c, &carray));
161   for (v = vStart; v < vEnd; ++v) {
162     carray[v-vStart] = v;
163   }
164   PetscCall(VecRestoreArray(c, &carray));
165   /* Preprocess in parallel to find local components */
166   /* Multiply until c does not change */
167   PetscCall(VecDuplicate(c, &cold));
168   for (v = 0; v < V; ++v) {
169     Vec       cnew = cold;
170     PetscBool stop;
171 
172     PetscCall(MatMult(A, c, cnew));
173     PetscCall(VecEqual(c, cnew, &stop));
174     if (stop) break;
175     cold = c;
176     c    = cnew;
177   }
178   /* Report */
179   PetscCall(VecUniqueEntries(c, &n, NULL));
180   PetscCall(PetscPrintf(comm, "Components: %d Iterations: %d\n", n, v));
181   PetscCall(VecView(c, PETSC_VIEWER_STDOUT_WORLD));
182   /* Cleanup */
183   PetscCall(VecDestroy(&c));
184   PetscCall(VecDestroy(&cold));
185   PetscCall(PetscFinalize());
186   return 0;
187 }
188