1 2 static char help[] = "Partition a tiny grid using hierarchical partitioning.\n\n"; 3 4 /*T 5 Concepts: partitioning 6 Processors: 4 7 T*/ 8 9 /* 10 Include "petscmat.h" so that we can use matrices. Note that this file 11 automatically includes: 12 petscsys.h - base PETSc routines petscvec.h - vectors 13 petscmat.h - matrices 14 petscis.h - index sets 15 petscviewer.h - viewers 16 */ 17 #include <petscmat.h> 18 19 int main(int argc,char **args) 20 { 21 Mat A; 22 PetscMPIInt rank,size; 23 PetscInt *ia,*ja; 24 MatPartitioning part; 25 IS is,isn,isrows; 26 IS coarseparts,fineparts; 27 MPI_Comm comm; 28 29 PetscCall(PetscInitialize(&argc,&args,(char*)0,help)); 30 comm = PETSC_COMM_WORLD; 31 PetscCallMPI(MPI_Comm_size(comm,&size)); 32 PetscCheckFalse(size != 4,comm,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 4 processors"); 33 PetscCallMPI(MPI_Comm_rank(comm,&rank)); 34 35 PetscCall(PetscMalloc1(5,&ia)); 36 PetscCall(PetscMalloc1(16,&ja)); 37 if (rank == 0) { 38 ja[0] = 1; ja[1] = 4; ja[2] = 0; ja[3] = 2; ja[4] = 5; ja[5] = 1; ja[6] = 3; ja[7] = 6; 39 ja[8] = 2; ja[9] = 7; 40 ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10; 41 } else if (rank == 1) { 42 ja[0] = 0; ja[1] = 5; ja[2] = 8; ja[3] = 1; ja[4] = 4; ja[5] = 6; ja[6] = 9; ja[7] = 2; 43 ja[8] = 5; ja[9] = 7; ja[10] = 10; ja[11] = 3; ja[12] = 6; ja[13] = 11; 44 ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14; 45 } else if (rank == 2) { 46 ja[0] = 4; ja[1] = 9; ja[2] = 12; ja[3] = 5; ja[4] = 8; ja[5] = 10; ja[6] = 13; ja[7] = 6; 47 ja[8] = 9; ja[9] = 11; ja[10] = 14; ja[11] = 7; ja[12] = 10; ja[13] = 15; 48 ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14; 49 } else { 50 ja[0] = 8; ja[1] = 13; ja[2] = 9; ja[3] = 12; ja[4] = 14; ja[5] = 10; ja[6] = 13; ja[7] = 15; 51 ja[8] = 11; ja[9] = 14; 52 ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10; 53 } 54 PetscCall(MatCreateMPIAdj(comm,4,16,ia,ja,NULL,&A)); 55 PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); 56 /* 57 Partition the graph of the matrix 58 */ 59 PetscCall(MatPartitioningCreate(comm,&part)); 60 PetscCall(MatPartitioningSetAdjacency(part,A)); 61 PetscCall(MatPartitioningSetType(part,MATPARTITIONINGHIERARCH)); 62 PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part,2)); 63 PetscCall(MatPartitioningHierarchicalSetNfineparts(part,2)); 64 PetscCall(MatPartitioningSetFromOptions(part)); 65 /* get new processor owner number of each vertex */ 66 PetscCall(MatPartitioningApply(part,&is)); 67 /* coarse parts */ 68 PetscCall(MatPartitioningHierarchicalGetCoarseparts(part,&coarseparts)); 69 PetscCall(ISView(coarseparts,PETSC_VIEWER_STDOUT_WORLD)); 70 /* fine parts */ 71 PetscCall(MatPartitioningHierarchicalGetFineparts(part,&fineparts)); 72 PetscCall(ISView(fineparts,PETSC_VIEWER_STDOUT_WORLD)); 73 /* partitioning */ 74 PetscCall(ISView(is,PETSC_VIEWER_STDOUT_WORLD)); 75 /* get new global number of each old global number */ 76 PetscCall(ISPartitioningToNumbering(is,&isn)); 77 PetscCall(ISView(isn,PETSC_VIEWER_STDOUT_WORLD)); 78 PetscCall(ISBuildTwoSided(is,NULL,&isrows)); 79 PetscCall(ISView(isrows,PETSC_VIEWER_STDOUT_WORLD)); 80 PetscCall(ISDestroy(&is)); 81 PetscCall(ISDestroy(&coarseparts)); 82 PetscCall(ISDestroy(&fineparts)); 83 PetscCall(ISDestroy(&isrows)); 84 PetscCall(ISDestroy(&isn)); 85 PetscCall(MatPartitioningDestroy(&part)); 86 /* 87 Free work space. All PETSc objects should be destroyed when they 88 are no longer needed. 89 */ 90 PetscCall(MatDestroy(&A)); 91 PetscCall(PetscFinalize()); 92 return 0; 93 } 94 95 /*TEST 96 97 test: 98 nsize: 4 99 requires: parmetis 100 TODO: tests cannot use parmetis because it produces different results on different machines 101 102 TEST*/ 103