xref: /petsc/src/mat/tests/ex83.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 
2 static char help[] = "Partition tiny grid using hierarchical partitioning and increase overlap using MatIncreaseOverlapSplit.\n\n";
3 
4 /*T
5    Concepts: partitioning
6    Processors: 4
7 T*/
8 
9 /*
10   Include "petscmat.h" so that we can use matrices.  Note that this file
11   automatically includes:
12      petscsys.h       - base PETSc routines   petscvec.h - vectors
13      petscmat.h - matrices
14      petscis.h     - index sets
15      petscviewer.h - viewers
16 */
17 #include <petscmat.h>
18 
19 int main(int argc,char **args)
20 {
21   Mat             A,B;
22   PetscMPIInt     rank,size,membershipKey;
23   PetscInt        *ia,*ja,*indices_sc,isrows_localsize;
24   const PetscInt  *indices;
25   MatPartitioning part;
26   IS              is,isrows,isrows_sc;
27   IS              coarseparts,fineparts;
28   MPI_Comm        comm,scomm;
29 
30   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
31   comm = PETSC_COMM_WORLD;
32   PetscCallMPI(MPI_Comm_size(comm,&size));
33   PetscCheckFalse(size != 4,comm,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 4 processors ");
34   PetscCallMPI(MPI_Comm_rank(comm,&rank));
35   /*set a small matrix */
36   PetscCall(PetscMalloc1(5,&ia));
37   PetscCall(PetscMalloc1(16,&ja));
38   if (rank == 0) {
39     ja[0] = 1; ja[1] = 4; ja[2] = 0; ja[3] = 2; ja[4] = 5; ja[5] = 1; ja[6] = 3; ja[7] = 6;
40     ja[8] = 2; ja[9] = 7;
41     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
42     membershipKey = 0;
43   } else if (rank == 1) {
44     ja[0] = 0; ja[1] = 5; ja[2] = 8; ja[3] = 1; ja[4] = 4; ja[5] = 6; ja[6] = 9; ja[7] = 2;
45     ja[8] = 5; ja[9] = 7; ja[10] = 10; ja[11] = 3; ja[12] = 6; ja[13] = 11;
46     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
47     membershipKey = 0;
48   } else if (rank == 2) {
49     ja[0] = 4; ja[1] = 9; ja[2] = 12; ja[3] = 5; ja[4] = 8; ja[5] = 10; ja[6] = 13; ja[7] = 6;
50     ja[8] = 9; ja[9] = 11; ja[10] = 14; ja[11] = 7; ja[12] = 10; ja[13] = 15;
51     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
52     membershipKey = 1;
53   } else {
54     ja[0] = 8; ja[1] = 13; ja[2] = 9; ja[3] = 12; ja[4] = 14; ja[5] = 10; ja[6] = 13; ja[7] = 15;
55     ja[8] = 11; ja[9] = 14;
56     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
57     membershipKey = 1;
58   }
59   PetscCall(MatCreateMPIAdj(comm,4,16,ia,ja,NULL,&A));
60   PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD));
61   /*
62    Partition the graph of the matrix
63   */
64   PetscCall(MatPartitioningCreate(comm,&part));
65   PetscCall(MatPartitioningSetAdjacency(part,A));
66   PetscCall(MatPartitioningSetType(part,MATPARTITIONINGHIERARCH));
67   PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part,2));
68   PetscCall(MatPartitioningHierarchicalSetNfineparts(part,2));
69   PetscCall(MatPartitioningSetFromOptions(part));
70   /* get new processor owner number of each vertex */
71   PetscCall(MatPartitioningApply(part,&is));
72   /* coarse parts */
73   PetscCall(MatPartitioningHierarchicalGetCoarseparts(part,&coarseparts));
74   PetscCall(ISView(coarseparts,PETSC_VIEWER_STDOUT_WORLD));
75   /* fine parts */
76   PetscCall(MatPartitioningHierarchicalGetFineparts(part,&fineparts));
77   PetscCall(ISView(fineparts,PETSC_VIEWER_STDOUT_WORLD));
78   /* partitioning */
79   PetscCall(ISView(is,PETSC_VIEWER_STDOUT_WORLD));
80   /* compute coming rows */
81   PetscCall(ISBuildTwoSided(is,NULL,&isrows));
82   PetscCall(ISView(isrows,PETSC_VIEWER_STDOUT_WORLD));
83   /*create a sub-communicator */
84   PetscCallMPI(MPI_Comm_split(comm, membershipKey,rank,&scomm));
85   PetscCall(ISGetLocalSize(isrows,&isrows_localsize));
86   PetscCall(PetscMalloc1(isrows_localsize,&indices_sc));
87   PetscCall(ISGetIndices(isrows,&indices));
88   PetscCall(PetscArraycpy(indices_sc,indices,isrows_localsize));
89   PetscCall(ISRestoreIndices(isrows,&indices));
90   PetscCall(ISDestroy(&is));
91   PetscCall(ISDestroy(&coarseparts));
92   PetscCall(ISDestroy(&fineparts));
93   PetscCall(ISDestroy(&isrows));
94   PetscCall(MatPartitioningDestroy(&part));
95   /*create a sub-IS on the sub communicator  */
96   PetscCall(ISCreateGeneral(scomm,isrows_localsize,indices_sc,PETSC_OWN_POINTER,&isrows_sc));
97   PetscCall(MatConvert(A,MATMPIAIJ,MAT_INITIAL_MATRIX,&B));
98 #if 1
99   PetscCall(MatView(B,PETSC_VIEWER_STDOUT_WORLD));
100 #endif
101   /*increase overlap */
102   PetscCall(MatIncreaseOverlapSplit(B,1,&isrows_sc,1));
103   PetscCall(ISView(isrows_sc,NULL));
104   PetscCall(ISDestroy(&isrows_sc));
105   /*
106     Free work space.  All PETSc objects should be destroyed when they
107     are no longer needed.
108   */
109   PetscCall(MatDestroy(&A));
110   PetscCall(MatDestroy(&B));
111   PetscCall(PetscFinalize());
112   return 0;
113 }
114