xref: /petsc/src/mat/tests/ex193.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 /*
2  * ex193.c
3  *
4  *  Created on: Jul 29, 2015
5  *      Author: Fande Kong fdkong.jd@gmail.com
6  */
7 /*
8  * An example demonstrates how to use hierarchical partitioning approach
9  */
10 
11 #include <petscmat.h>
12 
13 static char help[] = "Illustrates use of hierarchical partitioning.\n";
14 
15 int main(int argc, char **args) {
16   Mat             A;    /* matrix */
17   PetscInt        m, n; /* mesh dimensions in x- and y- directions */
18   PetscInt        i, j, Ii, J, Istart, Iend;
19   PetscMPIInt     size;
20   PetscScalar     v;
21   MatPartitioning part;
22   IS              coarseparts, fineparts;
23   IS              is, isn, isrows;
24   MPI_Comm        comm;
25 
26   PetscFunctionBeginUser;
27   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
28   comm = PETSC_COMM_WORLD;
29   PetscCallMPI(MPI_Comm_size(comm, &size));
30   PetscOptionsBegin(comm, NULL, "ex193", "hierarchical partitioning");
31   m = 15;
32   PetscCall(PetscOptionsInt("-M", "Number of mesh points in the x-direction", "partitioning", m, &m, NULL));
33   n = 15;
34   PetscCall(PetscOptionsInt("-N", "Number of mesh points in the y-direction", "partitioning", n, &n, NULL));
35   PetscOptionsEnd();
36 
37   /*
38      Assemble the matrix for the five point stencil (finite difference), YET AGAIN
39   */
40   PetscCall(MatCreate(comm, &A));
41   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
42   PetscCall(MatSetFromOptions(A));
43   PetscCall(MatSetUp(A));
44   PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
45   for (Ii = Istart; Ii < Iend; Ii++) {
46     v = -1.0;
47     i = Ii / n;
48     j = Ii - i * n;
49     if (i > 0) {
50       J = Ii - n;
51       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
52     }
53     if (i < m - 1) {
54       J = Ii + n;
55       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
56     }
57     if (j > 0) {
58       J = Ii - 1;
59       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
60     }
61     if (j < n - 1) {
62       J = Ii + 1;
63       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
64     }
65     v = 4.0;
66     PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
67   }
68   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
69   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
70   PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
71   /*
72    Partition the graph of the matrix
73   */
74   PetscCall(MatPartitioningCreate(comm, &part));
75   PetscCall(MatPartitioningSetAdjacency(part, A));
76   PetscCall(MatPartitioningSetType(part, MATPARTITIONINGHIERARCH));
77   PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part, 2));
78   PetscCall(MatPartitioningHierarchicalSetNfineparts(part, 4));
79   PetscCall(MatPartitioningSetFromOptions(part));
80   /* get new processor owner number of each vertex */
81   PetscCall(MatPartitioningApply(part, &is));
82   /* coarse parts */
83   PetscCall(MatPartitioningHierarchicalGetCoarseparts(part, &coarseparts));
84   PetscCall(ISView(coarseparts, PETSC_VIEWER_STDOUT_WORLD));
85   /* fine parts */
86   PetscCall(MatPartitioningHierarchicalGetFineparts(part, &fineparts));
87   PetscCall(ISView(fineparts, PETSC_VIEWER_STDOUT_WORLD));
88   /* partitioning */
89   PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
90   /* get new global number of each old global number */
91   PetscCall(ISPartitioningToNumbering(is, &isn));
92   PetscCall(ISView(isn, PETSC_VIEWER_STDOUT_WORLD));
93   PetscCall(ISBuildTwoSided(is, NULL, &isrows));
94   PetscCall(ISView(isrows, PETSC_VIEWER_STDOUT_WORLD));
95   PetscCall(ISDestroy(&is));
96   PetscCall(ISDestroy(&coarseparts));
97   PetscCall(ISDestroy(&fineparts));
98   PetscCall(ISDestroy(&isrows));
99   PetscCall(ISDestroy(&isn));
100   PetscCall(MatPartitioningDestroy(&part));
101   PetscCall(MatDestroy(&A));
102   PetscCall(PetscFinalize());
103   return 0;
104 }
105 
106 /*TEST
107 
108    test:
109       nsize: 4
110       args: -mat_partitioning_hierarchical_Nfineparts 2
111       requires: parmetis
112       TODO: cannot run because parmetis does reproduce across all machines, probably due to nonportable random number generator
113 
114 TEST*/
115