xref: /petsc/src/mat/tests/ex82.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 
2 static char help[] = "Partition a tiny grid using hierarchical partitioning.\n\n";
3 
4 /*
5   Include "petscmat.h" so that we can use matrices.  Note that this file
6   automatically includes:
7      petscsys.h       - base PETSc routines   petscvec.h - vectors
8      petscmat.h - matrices
9      petscis.h     - index sets
10      petscviewer.h - viewers
11 */
12 #include <petscmat.h>
13 
14 int main(int argc,char **args)
15 {
16   Mat             A;
17   PetscMPIInt     rank,size;
18   PetscInt        *ia,*ja;
19   MatPartitioning part;
20   IS              is,isn,isrows;
21   IS              coarseparts,fineparts;
22   MPI_Comm        comm;
23 
24   PetscFunctionBeginUser;
25   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
26   comm = PETSC_COMM_WORLD;
27   PetscCallMPI(MPI_Comm_size(comm,&size));
28   PetscCheck(size == 4,comm,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 4 processors");
29   PetscCallMPI(MPI_Comm_rank(comm,&rank));
30 
31   PetscCall(PetscMalloc1(5,&ia));
32   PetscCall(PetscMalloc1(16,&ja));
33   if (rank == 0) {
34     ja[0] = 1; ja[1] = 4; ja[2] = 0; ja[3] = 2; ja[4] = 5; ja[5] = 1; ja[6] = 3; ja[7] = 6;
35     ja[8] = 2; ja[9] = 7;
36     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
37   } else if (rank == 1) {
38     ja[0] = 0; ja[1] = 5; ja[2] = 8; ja[3] = 1; ja[4] = 4; ja[5] = 6; ja[6] = 9; ja[7] = 2;
39     ja[8] = 5; ja[9] = 7; ja[10] = 10; ja[11] = 3; ja[12] = 6; ja[13] = 11;
40     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
41   } else if (rank == 2) {
42     ja[0] = 4; ja[1] = 9; ja[2] = 12; ja[3] = 5; ja[4] = 8; ja[5] = 10; ja[6] = 13; ja[7] = 6;
43     ja[8] = 9; ja[9] = 11; ja[10] = 14; ja[11] = 7; ja[12] = 10; ja[13] = 15;
44     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
45   } else {
46     ja[0] = 8; ja[1] = 13; ja[2] = 9; ja[3] = 12; ja[4] = 14; ja[5] = 10; ja[6] = 13; ja[7] = 15;
47     ja[8] = 11; ja[9] = 14;
48     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
49   }
50   PetscCall(MatCreateMPIAdj(comm,4,16,ia,ja,NULL,&A));
51   PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD));
52   /*
53    Partition the graph of the matrix
54   */
55   PetscCall(MatPartitioningCreate(comm,&part));
56   PetscCall(MatPartitioningSetAdjacency(part,A));
57   PetscCall(MatPartitioningSetType(part,MATPARTITIONINGHIERARCH));
58   PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part,2));
59   PetscCall(MatPartitioningHierarchicalSetNfineparts(part,2));
60   PetscCall(MatPartitioningSetFromOptions(part));
61   /* get new processor owner number of each vertex */
62   PetscCall(MatPartitioningApply(part,&is));
63   /* coarse parts */
64   PetscCall(MatPartitioningHierarchicalGetCoarseparts(part,&coarseparts));
65   PetscCall(ISView(coarseparts,PETSC_VIEWER_STDOUT_WORLD));
66   /* fine parts */
67   PetscCall(MatPartitioningHierarchicalGetFineparts(part,&fineparts));
68   PetscCall(ISView(fineparts,PETSC_VIEWER_STDOUT_WORLD));
69   /* partitioning */
70   PetscCall(ISView(is,PETSC_VIEWER_STDOUT_WORLD));
71   /* get new global number of each old global number */
72   PetscCall(ISPartitioningToNumbering(is,&isn));
73   PetscCall(ISView(isn,PETSC_VIEWER_STDOUT_WORLD));
74   PetscCall(ISBuildTwoSided(is,NULL,&isrows));
75   PetscCall(ISView(isrows,PETSC_VIEWER_STDOUT_WORLD));
76   PetscCall(ISDestroy(&is));
77   PetscCall(ISDestroy(&coarseparts));
78   PetscCall(ISDestroy(&fineparts));
79   PetscCall(ISDestroy(&isrows));
80   PetscCall(ISDestroy(&isn));
81   PetscCall(MatPartitioningDestroy(&part));
82   /*
83     Free work space.  All PETSc objects should be destroyed when they
84     are no longer needed.
85   */
86   PetscCall(MatDestroy(&A));
87   PetscCall(PetscFinalize());
88   return 0;
89 }
90 
91 /*TEST
92 
93    test:
94       nsize: 4
95       requires: parmetis
96       TODO: tests cannot use parmetis because it produces different results on different machines
97 
98 TEST*/
99