1 2 static char help[] = "Partition a tiny grid using hierarchical partitioning.\n\n"; 3 4 /*T 5 Concepts: partitioning 6 Processors: 4 7 T*/ 8 9 /* 10 Include "petscmat.h" so that we can use matrices. Note that this file 11 automatically includes: 12 petscsys.h - base PETSc routines petscvec.h - vectors 13 petscmat.h - matrices 14 petscis.h - index sets 15 petscviewer.h - viewers 16 */ 17 #include <petscmat.h> 18 19 int main(int argc,char **args) 20 { 21 Mat A; 22 PetscErrorCode ierr; 23 PetscMPIInt rank,size; 24 PetscInt *ia,*ja; 25 MatPartitioning part; 26 IS is,isn,isrows; 27 IS coarseparts,fineparts; 28 MPI_Comm comm; 29 30 ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr; 31 comm = PETSC_COMM_WORLD; 32 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 33 if (size != 4) SETERRQ(comm,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 4 processors"); 34 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 35 36 ierr = PetscMalloc1(5,&ia);CHKERRQ(ierr); 37 ierr = PetscMalloc1(16,&ja);CHKERRQ(ierr); 38 if (!rank) { 39 ja[0] = 1; ja[1] = 4; ja[2] = 0; ja[3] = 2; ja[4] = 5; ja[5] = 1; ja[6] = 3; ja[7] = 6; 40 ja[8] = 2; ja[9] = 7; 41 ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10; 42 } else if (rank == 1) { 43 ja[0] = 0; ja[1] = 5; ja[2] = 8; ja[3] = 1; ja[4] = 4; ja[5] = 6; ja[6] = 9; ja[7] = 2; 44 ja[8] = 5; ja[9] = 7; ja[10] = 10; ja[11] = 3; ja[12] = 6; ja[13] = 11; 45 ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14; 46 } else if (rank == 2) { 47 ja[0] = 4; ja[1] = 9; ja[2] = 12; ja[3] = 5; ja[4] = 8; ja[5] = 10; ja[6] = 13; ja[7] = 6; 48 ja[8] = 9; ja[9] = 11; ja[10] = 14; ja[11] = 7; ja[12] = 10; ja[13] = 15; 49 ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14; 50 } else { 51 ja[0] = 8; ja[1] = 13; ja[2] = 9; ja[3] = 12; ja[4] = 14; ja[5] = 10; ja[6] = 13; ja[7] = 15; 52 ja[8] = 11; ja[9] = 14; 53 ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10; 54 } 55 ierr = MatCreateMPIAdj(comm,4,16,ia,ja,NULL,&A);CHKERRQ(ierr); 56 ierr = MatView(A,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 57 /* 58 Partition the graph of the matrix 59 */ 60 ierr = MatPartitioningCreate(comm,&part);CHKERRQ(ierr); 61 ierr = MatPartitioningSetAdjacency(part,A);CHKERRQ(ierr); 62 ierr = MatPartitioningSetType(part,MATPARTITIONINGHIERARCH);CHKERRQ(ierr); 63 ierr = MatPartitioningHierarchicalSetNcoarseparts(part,2);CHKERRQ(ierr); 64 ierr = MatPartitioningHierarchicalSetNfineparts(part,2);CHKERRQ(ierr); 65 ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr); 66 /* get new processor owner number of each vertex */ 67 ierr = MatPartitioningApply(part,&is);CHKERRQ(ierr); 68 /* coarse parts */ 69 ierr = MatPartitioningHierarchicalGetCoarseparts(part,&coarseparts);CHKERRQ(ierr); 70 ierr = ISView(coarseparts,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 71 /* fine parts */ 72 ierr = MatPartitioningHierarchicalGetFineparts(part,&fineparts);CHKERRQ(ierr); 73 ierr = ISView(fineparts,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 74 /* partitioning */ 75 ierr = ISView(is,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 76 /* get new global number of each old global number */ 77 ierr = ISPartitioningToNumbering(is,&isn);CHKERRQ(ierr); 78 ierr = ISView(isn,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 79 ierr = ISBuildTwoSided(is,NULL,&isrows);CHKERRQ(ierr); 80 ierr = ISView(isrows,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 81 ierr = ISDestroy(&is);CHKERRQ(ierr); 82 ierr = ISDestroy(&coarseparts);CHKERRQ(ierr); 83 ierr = ISDestroy(&fineparts);CHKERRQ(ierr); 84 ierr = ISDestroy(&isrows);CHKERRQ(ierr); 85 ierr = ISDestroy(&isn);CHKERRQ(ierr); 86 ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr); 87 /* 88 Free work space. All PETSc objects should be destroyed when they 89 are no longer needed. 90 */ 91 ierr = MatDestroy(&A);CHKERRQ(ierr); 92 ierr = PetscFinalize(); 93 return ierr; 94 } 95 96 /*TEST 97 98 test: 99 nsize: 4 100 requires: parmetis 101 TODO: tests cannot use parmetis because it produces different results on different machines 102 103 TEST*/ 104