xref: /petsc/src/mat/tests/ex83.c (revision c4762a1b19cd2af06abeed90e8f9d34fb975dd94)
1 
2 static char help[] = "Partition tiny grid using hierarchical partitioning and increase overlap using MatIncreaseOverlapSplit.\n\n";
3 
4 /*T
5    Concepts: partitioning
6    Processors: 4
7 T*/
8 
9 /*
10   Include "petscmat.h" so that we can use matrices.  Note that this file
11   automatically includes:
12      petscsys.h       - base PETSc routines   petscvec.h - vectors
13      petscmat.h - matrices
14      petscis.h     - index sets
15      petscviewer.h - viewers
16 */
17 #include <petscmat.h>
18 
19 int main(int argc,char **args)
20 {
21   Mat             A,B;
22   PetscErrorCode  ierr;
23   PetscMPIInt     rank,size,membershipKey;
24   PetscInt        *ia,*ja,*indices_sc,isrows_localsize;
25   const PetscInt  *indices;
26   MatPartitioning part;
27   IS              is,isrows,isrows_sc;
28   IS              coarseparts,fineparts;
29   MPI_Comm        comm,scomm;
30 
31   ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
32   comm = PETSC_COMM_WORLD;
33   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
34   if (size != 4) SETERRQ(comm,1,"Must run with 4 processors \n");
35   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
36   /*set a small matrix */
37   ierr = PetscMalloc1(5,&ia);CHKERRQ(ierr);
38   ierr = PetscMalloc1(16,&ja);CHKERRQ(ierr);
39   if (!rank) {
40     ja[0] = 1; ja[1] = 4; ja[2] = 0; ja[3] = 2; ja[4] = 5; ja[5] = 1; ja[6] = 3; ja[7] = 6;
41     ja[8] = 2; ja[9] = 7;
42     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
43     membershipKey = 0;
44   } else if (rank == 1) {
45     ja[0] = 0; ja[1] = 5; ja[2] = 8; ja[3] = 1; ja[4] = 4; ja[5] = 6; ja[6] = 9; ja[7] = 2;
46     ja[8] = 5; ja[9] = 7; ja[10] = 10; ja[11] = 3; ja[12] = 6; ja[13] = 11;
47     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
48     membershipKey = 0;
49   } else if (rank == 2) {
50     ja[0] = 4; ja[1] = 9; ja[2] = 12; ja[3] = 5; ja[4] = 8; ja[5] = 10; ja[6] = 13; ja[7] = 6;
51     ja[8] = 9; ja[9] = 11; ja[10] = 14; ja[11] = 7; ja[12] = 10; ja[13] = 15;
52     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
53     membershipKey = 1;
54   } else {
55     ja[0] = 8; ja[1] = 13; ja[2] = 9; ja[3] = 12; ja[4] = 14; ja[5] = 10; ja[6] = 13; ja[7] = 15;
56     ja[8] = 11; ja[9] = 14;
57     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
58     membershipKey = 1;
59   }
60   ierr = MatCreateMPIAdj(comm,4,16,ia,ja,NULL,&A);CHKERRQ(ierr);
61   ierr = MatView(A,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
62   /*
63    Partition the graph of the matrix
64   */
65   ierr = MatPartitioningCreate(comm,&part);CHKERRQ(ierr);
66   ierr = MatPartitioningSetAdjacency(part,A);CHKERRQ(ierr);
67   ierr = MatPartitioningSetType(part,MATPARTITIONINGHIERARCH);CHKERRQ(ierr);
68   ierr = MatPartitioningHierarchicalSetNcoarseparts(part,2);CHKERRQ(ierr);
69   ierr = MatPartitioningHierarchicalSetNfineparts(part,2);CHKERRQ(ierr);
70   ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
71   /* get new processor owner number of each vertex */
72   ierr = MatPartitioningApply(part,&is);CHKERRQ(ierr);
73   /* coarse parts */
74   ierr = MatPartitioningHierarchicalGetCoarseparts(part,&coarseparts);CHKERRQ(ierr);
75   ierr = ISView(coarseparts,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
76   /* fine parts */
77   ierr = MatPartitioningHierarchicalGetFineparts(part,&fineparts);CHKERRQ(ierr);
78   ierr = ISView(fineparts,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
79   /* partitioning */
80   ierr = ISView(is,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
81   /* compute coming rows */
82   ierr = ISBuildTwoSided(is,NULL,&isrows);CHKERRQ(ierr);
83   ierr = ISView(isrows,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
84   /*create a sub-communicator */
85   ierr = MPI_Comm_split(comm, membershipKey,rank,&scomm);CHKERRQ(ierr);
86   ierr = ISGetLocalSize(isrows,&isrows_localsize);CHKERRQ(ierr);
87   ierr = PetscMalloc1(isrows_localsize,&indices_sc);CHKERRQ(ierr);
88   ierr = ISGetIndices(isrows,&indices);CHKERRQ(ierr);
89   ierr = PetscArraycpy(indices_sc,indices,isrows_localsize);CHKERRQ(ierr);
90   ierr = ISRestoreIndices(isrows,&indices);CHKERRQ(ierr);
91   ierr = ISDestroy(&is);CHKERRQ(ierr);
92   ierr = ISDestroy(&coarseparts);CHKERRQ(ierr);
93   ierr = ISDestroy(&fineparts);CHKERRQ(ierr);
94   ierr = ISDestroy(&isrows);CHKERRQ(ierr);
95   ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);
96   /*create a sub-IS on the sub communicator  */
97   ierr = ISCreateGeneral(scomm,isrows_localsize,indices_sc,PETSC_OWN_POINTER,&isrows_sc);CHKERRQ(ierr);
98   ierr = MatConvert(A,MATMPIAIJ,MAT_INITIAL_MATRIX,&B);CHKERRQ(ierr);
99 #if 1
100   ierr = MatView(B,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
101 #endif
102   /*increase overlap */
103   ierr = MatIncreaseOverlapSplit(B,1,&isrows_sc,1);CHKERRQ(ierr);
104   ierr = ISView(isrows_sc,NULL);CHKERRQ(ierr);
105   ierr = ISDestroy(&isrows_sc);CHKERRQ(ierr);
106   /*
107     Free work space.  All PETSc objects should be destroyed when they
108     are no longer needed.
109   */
110   ierr = MatDestroy(&A);CHKERRQ(ierr);
111   ierr = MatDestroy(&B);CHKERRQ(ierr);
112   ierr = PetscFinalize();
113   return ierr;
114 }
115 
116