1 2 static char help[] = "Tests MPI parallel matrix creation. Test MatCreateRedundantMatrix() \n\n"; 3 4 #include <petscmat.h> 5 6 int main(int argc,char **args) 7 { 8 Mat C,Credundant; 9 MatInfo info; 10 PetscMPIInt rank,size,subsize; 11 PetscInt i,j,m = 3,n = 2,low,high,iglobal; 12 PetscInt Ii,J,ldim,nsubcomms; 13 PetscErrorCode ierr; 14 PetscBool flg_info,flg_mat; 15 PetscScalar v,one = 1.0; 16 Vec x,y; 17 MPI_Comm subcomm; 18 19 ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr; 20 CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL)); 21 CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 22 CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 23 n = 2*size; 24 25 CHKERRQ(MatCreate(PETSC_COMM_WORLD,&C)); 26 CHKERRQ(MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n)); 27 CHKERRQ(MatSetFromOptions(C)); 28 CHKERRQ(MatSetUp(C)); 29 30 /* Create the matrix for the five point stencil, YET AGAIN */ 31 for (i=0; i<m; i++) { 32 for (j=2*rank; j<2*rank+2; j++) { 33 v = -1.0; Ii = j + n*i; 34 if (i>0) {J = Ii - n; CHKERRQ(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));} 35 if (i<m-1) {J = Ii + n; CHKERRQ(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));} 36 if (j>0) {J = Ii - 1; CHKERRQ(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));} 37 if (j<n-1) {J = Ii + 1; CHKERRQ(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));} 38 v = 4.0; CHKERRQ(MatSetValues(C,1,&Ii,1,&Ii,&v,INSERT_VALUES)); 39 } 40 } 41 42 /* Add extra elements (to illustrate variants of MatGetInfo) */ 43 Ii = n; J = n-2; v = 100.0; 44 CHKERRQ(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES)); 45 Ii = n-2; J = n; v = 100.0; 46 CHKERRQ(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES)); 47 48 CHKERRQ(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY)); 49 CHKERRQ(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY)); 50 51 /* Form vectors */ 52 CHKERRQ(MatCreateVecs(C,&x,&y)); 53 CHKERRQ(VecGetLocalSize(x,&ldim)); 54 CHKERRQ(VecGetOwnershipRange(x,&low,&high)); 55 for (i=0; i<ldim; i++) { 56 iglobal = i + low; 57 v = one*((PetscReal)i) + 100.0*rank; 58 CHKERRQ(VecSetValues(x,1,&iglobal,&v,INSERT_VALUES)); 59 } 60 CHKERRQ(VecAssemblyBegin(x)); 61 CHKERRQ(VecAssemblyEnd(x)); 62 63 CHKERRQ(MatMult(C,x,y)); 64 65 CHKERRQ(PetscOptionsHasName(NULL,NULL,"-view_info",&flg_info)); 66 if (flg_info) { 67 CHKERRQ(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO)); 68 CHKERRQ(MatView(C,PETSC_VIEWER_STDOUT_WORLD)); 69 70 CHKERRQ(MatGetInfo(C,MAT_GLOBAL_SUM,&info)); 71 CHKERRQ(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"matrix information (global sums):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n",(PetscInt)info.nz_used,(PetscInt)info.nz_allocated)); 72 CHKERRQ(MatGetInfo (C,MAT_GLOBAL_MAX,&info)); 73 CHKERRQ(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"matrix information (global max):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n",(PetscInt)info.nz_used,(PetscInt)info.nz_allocated)); 74 } 75 76 CHKERRQ(PetscOptionsHasName(NULL,NULL,"-view_mat",&flg_mat)); 77 if (flg_mat) { 78 CHKERRQ(MatView(C,PETSC_VIEWER_STDOUT_WORLD)); 79 } 80 81 /* Test MatCreateRedundantMatrix() */ 82 nsubcomms = size; 83 CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-nsubcomms",&nsubcomms,NULL)); 84 CHKERRQ(MatCreateRedundantMatrix(C,nsubcomms,MPI_COMM_NULL,MAT_INITIAL_MATRIX,&Credundant)); 85 CHKERRQ(MatCreateRedundantMatrix(C,nsubcomms,MPI_COMM_NULL,MAT_REUSE_MATRIX,&Credundant)); 86 87 CHKERRQ(PetscObjectGetComm((PetscObject)Credundant,&subcomm)); 88 CHKERRMPI(MPI_Comm_size(subcomm,&subsize)); 89 90 if (subsize==2 && flg_mat) { 91 CHKERRQ(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(subcomm),"\n[%d] Credundant:\n",rank)); 92 CHKERRQ(MatView(Credundant,PETSC_VIEWER_STDOUT_(subcomm))); 93 } 94 CHKERRQ(MatDestroy(&Credundant)); 95 96 /* Test MatCreateRedundantMatrix() with user-provided subcomm */ 97 { 98 PetscSubcomm psubcomm; 99 100 CHKERRQ(PetscSubcommCreate(PETSC_COMM_WORLD,&psubcomm)); 101 CHKERRQ(PetscSubcommSetNumber(psubcomm,nsubcomms)); 102 CHKERRQ(PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS)); 103 /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */ 104 CHKERRQ(PetscSubcommSetFromOptions(psubcomm)); 105 106 CHKERRQ(MatCreateRedundantMatrix(C,nsubcomms,PetscSubcommChild(psubcomm),MAT_INITIAL_MATRIX,&Credundant)); 107 CHKERRQ(MatCreateRedundantMatrix(C,nsubcomms,PetscSubcommChild(psubcomm),MAT_REUSE_MATRIX,&Credundant)); 108 109 CHKERRQ(PetscSubcommDestroy(&psubcomm)); 110 CHKERRQ(MatDestroy(&Credundant)); 111 } 112 113 CHKERRQ(VecDestroy(&x)); 114 CHKERRQ(VecDestroy(&y)); 115 CHKERRQ(MatDestroy(&C)); 116 ierr = PetscFinalize(); 117 return ierr; 118 } 119 120 /*TEST 121 122 test: 123 nsize: 3 124 args: -view_info 125 126 test: 127 suffix: 2 128 nsize: 3 129 args: -nsubcomms 2 -view_mat -psubcomm_type interlaced 130 131 test: 132 suffix: 3 133 nsize: 3 134 args: -nsubcomms 2 -view_mat -psubcomm_type contiguous 135 136 test: 137 suffix: 3_baij 138 nsize: 3 139 args: -mat_type baij -nsubcomms 2 -view_mat 140 141 test: 142 suffix: 3_sbaij 143 nsize: 3 144 args: -mat_type sbaij -nsubcomms 2 -view_mat 145 146 test: 147 suffix: 3_dense 148 nsize: 3 149 args: -mat_type dense -nsubcomms 2 -view_mat 150 151 test: 152 suffix: 4_baij 153 nsize: 3 154 args: -mat_type baij -nsubcomms 2 -view_mat -psubcomm_type interlaced 155 156 test: 157 suffix: 4_sbaij 158 nsize: 3 159 args: -mat_type sbaij -nsubcomms 2 -view_mat -psubcomm_type interlaced 160 161 test: 162 suffix: 4_dense 163 nsize: 3 164 args: -mat_type dense -nsubcomms 2 -view_mat -psubcomm_type interlaced 165 166 TEST*/ 167