1 2 static char help[] = "Tests MatMeshToDual()\n\n"; 3 4 /*T 5 Concepts: Mat^mesh partitioning 6 Processors: n 7 T*/ 8 9 /* 10 Include "petscmat.h" so that we can use matrices. 11 automatically includes: 12 petscsys.h - base PETSc routines petscvec.h - vectors 13 petscmat.h - matrices 14 petscis.h - index sets petscviewer.h - viewers 15 */ 16 #include <petscmat.h> 17 18 int main(int argc,char **args) 19 { 20 Mat mesh,dual; 21 PetscErrorCode ierr; 22 PetscInt Nvertices = 6; /* total number of vertices */ 23 PetscInt ncells = 2; /* number cells on this process */ 24 PetscInt *ii,*jj; 25 PetscMPIInt size,rank; 26 MatPartitioning part; 27 IS is; 28 29 ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr; 30 CHKERRMPI(MPI_Comm_size(MPI_COMM_WORLD,&size)); 31 PetscCheckFalse(size != 2,PETSC_COMM_WORLD,PETSC_ERR_SUP,"This example is for exactly two processes"); 32 CHKERRMPI(MPI_Comm_rank(MPI_COMM_WORLD,&rank)); 33 34 CHKERRQ(PetscMalloc1(3,&ii)); 35 CHKERRQ(PetscMalloc1(6,&jj)); 36 ii[0] = 0; ii[1] = 3; ii[2] = 6; 37 if (rank == 0) { 38 jj[0] = 0; jj[1] = 1; jj[2] = 2; jj[3] = 1; jj[4] = 2; jj[5] = 3; 39 } else { 40 jj[0] = 1; jj[1] = 4; jj[2] = 5; jj[3] = 1; jj[4] = 3; jj[5] = 5; 41 } 42 CHKERRQ(MatCreateMPIAdj(MPI_COMM_WORLD,ncells,Nvertices,ii,jj,NULL,&mesh)); 43 CHKERRQ(MatMeshToCellGraph(mesh,2,&dual)); 44 CHKERRQ(MatView(dual,PETSC_VIEWER_STDOUT_WORLD)); 45 46 CHKERRQ(MatPartitioningCreate(MPI_COMM_WORLD,&part)); 47 CHKERRQ(MatPartitioningSetAdjacency(part,dual)); 48 CHKERRQ(MatPartitioningSetFromOptions(part)); 49 CHKERRQ(MatPartitioningApply(part,&is)); 50 CHKERRQ(ISView(is,PETSC_VIEWER_STDOUT_WORLD)); 51 CHKERRQ(ISDestroy(&is)); 52 CHKERRQ(MatPartitioningDestroy(&part)); 53 54 CHKERRQ(MatDestroy(&mesh)); 55 CHKERRQ(MatDestroy(&dual)); 56 ierr = PetscFinalize(); 57 return ierr; 58 } 59 60 /*TEST 61 62 build: 63 requires: parmetis 64 65 test: 66 nsize: 2 67 requires: parmetis 68 69 TEST*/ 70