1 /* 2 * ex193.c 3 * 4 * Created on: Jul 29, 2015 5 * Author: Fande Kong fdkong.jd@gmail.com 6 */ 7 /* 8 * An example demonstrates how to use hierarchical partitioning approach 9 */ 10 11 #include <petscmat.h> 12 13 static char help[] = "Illustrates use of hierarchical partitioning.\n"; 14 15 int main(int argc,char **args) 16 { 17 Mat A; /* matrix */ 18 PetscInt m,n; /* mesh dimensions in x- and y- directions */ 19 PetscInt i,j,Ii,J,Istart,Iend; 20 PetscErrorCode ierr; 21 PetscMPIInt size; 22 PetscScalar v; 23 MatPartitioning part; 24 IS coarseparts,fineparts; 25 IS is,isn,isrows; 26 MPI_Comm comm; 27 28 PetscCall(PetscInitialize(&argc,&args,(char*)0,help)); 29 comm = PETSC_COMM_WORLD; 30 PetscCallMPI(MPI_Comm_size(comm,&size)); 31 ierr = PetscOptionsBegin(comm,NULL,"ex193","hierarchical partitioning");PetscCall(ierr); 32 m = 15; 33 PetscCall(PetscOptionsInt("-M","Number of mesh points in the x-direction","partitioning",m,&m,NULL)); 34 n = 15; 35 PetscCall(PetscOptionsInt("-N","Number of mesh points in the y-direction","partitioning",n,&n,NULL)); 36 ierr = PetscOptionsEnd();PetscCall(ierr); 37 38 /* 39 Assemble the matrix for the five point stencil (finite difference), YET AGAIN 40 */ 41 PetscCall(MatCreate(comm,&A)); 42 PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n)); 43 PetscCall(MatSetFromOptions(A)); 44 PetscCall(MatSetUp(A)); 45 PetscCall(MatGetOwnershipRange(A,&Istart,&Iend)); 46 for (Ii=Istart; Ii<Iend; Ii++) { 47 v = -1.0; i = Ii/n; j = Ii - i*n; 48 if (i>0) {J = Ii - n; PetscCall(MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES));} 49 if (i<m-1) {J = Ii + n; PetscCall(MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES));} 50 if (j>0) {J = Ii - 1; PetscCall(MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES));} 51 if (j<n-1) {J = Ii + 1; PetscCall(MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES));} 52 v = 4.0; PetscCall(MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES)); 53 } 54 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 55 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 56 PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); 57 /* 58 Partition the graph of the matrix 59 */ 60 PetscCall(MatPartitioningCreate(comm,&part)); 61 PetscCall(MatPartitioningSetAdjacency(part,A)); 62 PetscCall(MatPartitioningSetType(part,MATPARTITIONINGHIERARCH)); 63 PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part,2)); 64 PetscCall(MatPartitioningHierarchicalSetNfineparts(part,4)); 65 PetscCall(MatPartitioningSetFromOptions(part)); 66 /* get new processor owner number of each vertex */ 67 PetscCall(MatPartitioningApply(part,&is)); 68 /* coarse parts */ 69 PetscCall(MatPartitioningHierarchicalGetCoarseparts(part,&coarseparts)); 70 PetscCall(ISView(coarseparts,PETSC_VIEWER_STDOUT_WORLD)); 71 /* fine parts */ 72 PetscCall(MatPartitioningHierarchicalGetFineparts(part,&fineparts)); 73 PetscCall(ISView(fineparts,PETSC_VIEWER_STDOUT_WORLD)); 74 /* partitioning */ 75 PetscCall(ISView(is,PETSC_VIEWER_STDOUT_WORLD)); 76 /* get new global number of each old global number */ 77 PetscCall(ISPartitioningToNumbering(is,&isn)); 78 PetscCall(ISView(isn,PETSC_VIEWER_STDOUT_WORLD)); 79 PetscCall(ISBuildTwoSided(is,NULL,&isrows)); 80 PetscCall(ISView(isrows,PETSC_VIEWER_STDOUT_WORLD)); 81 PetscCall(ISDestroy(&is)); 82 PetscCall(ISDestroy(&coarseparts)); 83 PetscCall(ISDestroy(&fineparts)); 84 PetscCall(ISDestroy(&isrows)); 85 PetscCall(ISDestroy(&isn)); 86 PetscCall(MatPartitioningDestroy(&part)); 87 PetscCall(MatDestroy(&A)); 88 PetscCall(PetscFinalize()); 89 return 0; 90 } 91 92 /*TEST 93 94 test: 95 nsize: 4 96 args: -mat_partitioning_hierarchical_Nfineparts 2 97 requires: parmetis 98 TODO: cannot run because parmetis does reproduce across all machines, probably due to nonportable random number generator 99 100 TEST*/ 101