xref: /petsc/src/mat/tests/ex170.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 static char help[] = "Scalable algorithm for Connected Components problem.\n\
2 Entails changing the MatMult() for this matrix.\n\n\n";
3 
4 #include <petscmat.h>
5 
6 PETSC_EXTERN PetscErrorCode MatMultMax_SeqAIJ(Mat, Vec, Vec);
7 PETSC_EXTERN PetscErrorCode MatMultAddMax_SeqAIJ(Mat, Vec, Vec, Vec);
8 #include <../src/mat/impls/aij/mpi/mpiaij.h>
9 
10 /*
11   Paper with Ananth: Frbenius norm of band was good proxy, but really want to know the rank outside
12 
13   LU for diagonal blocks must do shifting instead of pivoting, preferably shifting individual rows (like Pardiso)
14 
15   Draw picture of flow of reordering
16 
17   Measure Forbenius norm of the blocks being dropped by Truncated SPIKE (might be contaminated by pivoting in LU)
18 
19   Report on using Florida matrices (Maxim, Murat)
20 */
21 
22 /*
23 I have thought about how to do this. Here is a prototype algorithm. Let A be
24 the adjacency matrix (0 or 1), and let each component be identified by the
25 lowest numbered vertex in it. We initialize a vector c so that each vertex is
26 a component, c_i = i. Now we act on c with A, using a special product
27 
28   c = A * c
29 
30 where we replace addition with min. The fixed point of this operation is a vector
31 c which is the component for each vertex. The number of iterates is
32 
33   max_{components} depth of BFS tree for component
34 
35 We can accelerate this algorithm by preprocessing all locals domains using the
36 same algorithm. Then the number of iterations is bounded the depth of the BFS
37 tree for the graph on supervertices defined over local components, which is
38 bounded by p. In practice, this should be very fast.
39 */
40 
41 /* Only isolated vertices get a 1 on the diagonal */
42 PetscErrorCode CreateGraph(MPI_Comm comm, PetscInt testnum, Mat *A) {
43   Mat G;
44 
45   PetscFunctionBegin;
46   PetscCall(MatCreate(comm, &G));
47   /* The identity matrix */
48   switch (testnum) {
49   case 0: {
50     Vec D;
51 
52     PetscCall(MatSetSizes(G, PETSC_DETERMINE, PETSC_DETERMINE, 5, 5));
53     PetscCall(MatSetUp(G));
54     PetscCall(MatCreateVecs(G, &D, NULL));
55     PetscCall(VecSet(D, 1.0));
56     PetscCall(MatDiagonalSet(G, D, INSERT_VALUES));
57     PetscCall(VecDestroy(&D));
58   } break;
59   case 1: {
60     PetscScalar vals[3] = {1.0, 1.0, 1.0};
61     PetscInt    cols[3];
62     PetscInt    rStart, rEnd, row;
63 
64     PetscCall(MatSetSizes(G, PETSC_DETERMINE, PETSC_DETERMINE, 5, 5));
65     PetscCall(MatSetFromOptions(G));
66     PetscCall(MatSeqAIJSetPreallocation(G, 2, NULL));
67     PetscCall(MatSetUp(G));
68     PetscCall(MatGetOwnershipRange(G, &rStart, &rEnd));
69     row     = 0;
70     cols[0] = 0;
71     cols[1] = 1;
72     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
73     row     = 1;
74     cols[0] = 0;
75     cols[1] = 1;
76     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
77     row     = 2;
78     cols[0] = 2;
79     cols[1] = 3;
80     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
81     row     = 3;
82     cols[0] = 3;
83     cols[1] = 4;
84     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
85     row     = 4;
86     cols[0] = 4;
87     cols[1] = 2;
88     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
89     PetscCall(MatAssemblyBegin(G, MAT_FINAL_ASSEMBLY));
90     PetscCall(MatAssemblyEnd(G, MAT_FINAL_ASSEMBLY));
91   } break;
92   case 2: {
93     PetscScalar vals[3] = {1.0, 1.0, 1.0};
94     PetscInt    cols[3];
95     PetscInt    rStart, rEnd, row;
96 
97     PetscCall(MatSetSizes(G, PETSC_DETERMINE, PETSC_DETERMINE, 5, 5));
98     PetscCall(MatSetFromOptions(G));
99     PetscCall(MatSeqAIJSetPreallocation(G, 2, NULL));
100     PetscCall(MatSetUp(G));
101     PetscCall(MatGetOwnershipRange(G, &rStart, &rEnd));
102     row     = 0;
103     cols[0] = 0;
104     cols[1] = 4;
105     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
106     row     = 1;
107     cols[0] = 1;
108     cols[1] = 2;
109     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
110     row     = 2;
111     cols[0] = 2;
112     cols[1] = 3;
113     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
114     row     = 3;
115     cols[0] = 3;
116     cols[1] = 1;
117     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
118     row     = 4;
119     cols[0] = 0;
120     cols[1] = 4;
121     if ((row >= rStart) && (row < rEnd)) PetscCall(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
122     PetscCall(MatAssemblyBegin(G, MAT_FINAL_ASSEMBLY));
123     PetscCall(MatAssemblyEnd(G, MAT_FINAL_ASSEMBLY));
124   } break;
125   default: SETERRQ(comm, PETSC_ERR_PLIB, "Unknown test %d", testnum);
126   }
127   *A = G;
128   PetscFunctionReturn(0);
129 }
130 
131 int main(int argc, char **argv) {
132   MPI_Comm     comm;
133   Mat          A;    /* A graph */
134   Vec          c;    /* A vector giving the component of each vertex */
135   Vec          cold; /* The vector c from the last iteration */
136   PetscScalar *carray;
137   PetscInt     testnum = 0;
138   PetscInt     V, vStart, vEnd, v, n;
139   PetscMPIInt  size;
140 
141   PetscFunctionBeginUser;
142   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
143   comm = PETSC_COMM_WORLD;
144   PetscCallMPI(MPI_Comm_size(comm, &size));
145   /* Use matrix to encode a graph */
146   PetscCall(PetscOptionsGetInt(NULL, NULL, "-testnum", &testnum, NULL));
147   PetscCall(CreateGraph(comm, testnum, &A));
148   PetscCall(MatGetSize(A, &V, NULL));
149   /* Replace matrix-vector multiplication with one that calculates the minimum rather than the sum */
150   if (size == 1) {
151     PetscCall(MatShellSetOperation(A, MATOP_MULT, (void(*))MatMultMax_SeqAIJ));
152   } else {
153     Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
154 
155     PetscCall(MatShellSetOperation(a->A, MATOP_MULT, (void(*))MatMultMax_SeqAIJ));
156     PetscCall(MatShellSetOperation(a->B, MATOP_MULT, (void(*))MatMultMax_SeqAIJ));
157     PetscCall(MatShellSetOperation(a->B, MATOP_MULT_ADD, (void(*))MatMultAddMax_SeqAIJ));
158   }
159   /* Initialize each vertex as a separate component */
160   PetscCall(MatCreateVecs(A, &c, NULL));
161   PetscCall(MatGetOwnershipRange(A, &vStart, &vEnd));
162   PetscCall(VecGetArray(c, &carray));
163   for (v = vStart; v < vEnd; ++v) { carray[v - vStart] = v; }
164   PetscCall(VecRestoreArray(c, &carray));
165   /* Preprocess in parallel to find local components */
166   /* Multiply until c does not change */
167   PetscCall(VecDuplicate(c, &cold));
168   for (v = 0; v < V; ++v) {
169     Vec       cnew = cold;
170     PetscBool stop;
171 
172     PetscCall(MatMult(A, c, cnew));
173     PetscCall(VecEqual(c, cnew, &stop));
174     if (stop) break;
175     cold = c;
176     c    = cnew;
177   }
178   /* Report */
179   PetscCall(VecUniqueEntries(c, &n, NULL));
180   PetscCall(PetscPrintf(comm, "Components: %d Iterations: %d\n", n, v));
181   PetscCall(VecView(c, PETSC_VIEWER_STDOUT_WORLD));
182   /* Cleanup */
183   PetscCall(VecDestroy(&c));
184   PetscCall(VecDestroy(&cold));
185   PetscCall(PetscFinalize());
186   return 0;
187 }
188