1 2 static char help[] = "Tests MPI parallel matrix creation. Test MatCreateRedundantMatrix() \n\n"; 3 4 #include <petscmat.h> 5 6 int main(int argc,char **args) 7 { 8 Mat C,Credundant; 9 MatInfo info; 10 PetscMPIInt rank,size,subsize; 11 PetscInt i,j,m = 3,n = 2,low,high,iglobal; 12 PetscInt Ii,J,ldim,nsubcomms; 13 PetscBool flg_info,flg_mat; 14 PetscScalar v,one = 1.0; 15 Vec x,y; 16 MPI_Comm subcomm; 17 18 PetscCall(PetscInitialize(&argc,&args,(char*)0,help)); 19 PetscCall(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL)); 20 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 21 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 22 n = 2*size; 23 24 PetscCall(MatCreate(PETSC_COMM_WORLD,&C)); 25 PetscCall(MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n)); 26 PetscCall(MatSetFromOptions(C)); 27 PetscCall(MatSetUp(C)); 28 29 /* Create the matrix for the five point stencil, YET AGAIN */ 30 for (i=0; i<m; i++) { 31 for (j=2*rank; j<2*rank+2; j++) { 32 v = -1.0; Ii = j + n*i; 33 if (i>0) {J = Ii - n; PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));} 34 if (i<m-1) {J = Ii + n; PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));} 35 if (j>0) {J = Ii - 1; PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));} 36 if (j<n-1) {J = Ii + 1; PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));} 37 v = 4.0; PetscCall(MatSetValues(C,1,&Ii,1,&Ii,&v,INSERT_VALUES)); 38 } 39 } 40 41 /* Add extra elements (to illustrate variants of MatGetInfo) */ 42 Ii = n; J = n-2; v = 100.0; 43 PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES)); 44 Ii = n-2; J = n; v = 100.0; 45 PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES)); 46 47 PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY)); 48 PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY)); 49 50 /* Form vectors */ 51 PetscCall(MatCreateVecs(C,&x,&y)); 52 PetscCall(VecGetLocalSize(x,&ldim)); 53 PetscCall(VecGetOwnershipRange(x,&low,&high)); 54 for (i=0; i<ldim; i++) { 55 iglobal = i + low; 56 v = one*((PetscReal)i) + 100.0*rank; 57 PetscCall(VecSetValues(x,1,&iglobal,&v,INSERT_VALUES)); 58 } 59 PetscCall(VecAssemblyBegin(x)); 60 PetscCall(VecAssemblyEnd(x)); 61 62 PetscCall(MatMult(C,x,y)); 63 64 PetscCall(PetscOptionsHasName(NULL,NULL,"-view_info",&flg_info)); 65 if (flg_info) { 66 PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO)); 67 PetscCall(MatView(C,PETSC_VIEWER_STDOUT_WORLD)); 68 69 PetscCall(MatGetInfo(C,MAT_GLOBAL_SUM,&info)); 70 PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"matrix information (global sums):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n",(PetscInt)info.nz_used,(PetscInt)info.nz_allocated)); 71 PetscCall(MatGetInfo (C,MAT_GLOBAL_MAX,&info)); 72 PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"matrix information (global max):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n",(PetscInt)info.nz_used,(PetscInt)info.nz_allocated)); 73 } 74 75 PetscCall(PetscOptionsHasName(NULL,NULL,"-view_mat",&flg_mat)); 76 if (flg_mat) { 77 PetscCall(MatView(C,PETSC_VIEWER_STDOUT_WORLD)); 78 } 79 80 /* Test MatCreateRedundantMatrix() */ 81 nsubcomms = size; 82 PetscCall(PetscOptionsGetInt(NULL,NULL,"-nsubcomms",&nsubcomms,NULL)); 83 PetscCall(MatCreateRedundantMatrix(C,nsubcomms,MPI_COMM_NULL,MAT_INITIAL_MATRIX,&Credundant)); 84 PetscCall(MatCreateRedundantMatrix(C,nsubcomms,MPI_COMM_NULL,MAT_REUSE_MATRIX,&Credundant)); 85 86 PetscCall(PetscObjectGetComm((PetscObject)Credundant,&subcomm)); 87 PetscCallMPI(MPI_Comm_size(subcomm,&subsize)); 88 89 if (subsize==2 && flg_mat) { 90 PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(subcomm),"\n[%d] Credundant:\n",rank)); 91 PetscCall(MatView(Credundant,PETSC_VIEWER_STDOUT_(subcomm))); 92 } 93 PetscCall(MatDestroy(&Credundant)); 94 95 /* Test MatCreateRedundantMatrix() with user-provided subcomm */ 96 { 97 PetscSubcomm psubcomm; 98 99 PetscCall(PetscSubcommCreate(PETSC_COMM_WORLD,&psubcomm)); 100 PetscCall(PetscSubcommSetNumber(psubcomm,nsubcomms)); 101 PetscCall(PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS)); 102 /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */ 103 PetscCall(PetscSubcommSetFromOptions(psubcomm)); 104 105 PetscCall(MatCreateRedundantMatrix(C,nsubcomms,PetscSubcommChild(psubcomm),MAT_INITIAL_MATRIX,&Credundant)); 106 PetscCall(MatCreateRedundantMatrix(C,nsubcomms,PetscSubcommChild(psubcomm),MAT_REUSE_MATRIX,&Credundant)); 107 108 PetscCall(PetscSubcommDestroy(&psubcomm)); 109 PetscCall(MatDestroy(&Credundant)); 110 } 111 112 PetscCall(VecDestroy(&x)); 113 PetscCall(VecDestroy(&y)); 114 PetscCall(MatDestroy(&C)); 115 PetscCall(PetscFinalize()); 116 return 0; 117 } 118 119 /*TEST 120 121 test: 122 nsize: 3 123 args: -view_info 124 125 test: 126 suffix: 2 127 nsize: 3 128 args: -nsubcomms 2 -view_mat -psubcomm_type interlaced 129 130 test: 131 suffix: 3 132 nsize: 3 133 args: -nsubcomms 2 -view_mat -psubcomm_type contiguous 134 135 test: 136 suffix: 3_baij 137 nsize: 3 138 args: -mat_type baij -nsubcomms 2 -view_mat 139 140 test: 141 suffix: 3_sbaij 142 nsize: 3 143 args: -mat_type sbaij -nsubcomms 2 -view_mat 144 145 test: 146 suffix: 3_dense 147 nsize: 3 148 args: -mat_type dense -nsubcomms 2 -view_mat 149 150 test: 151 suffix: 4_baij 152 nsize: 3 153 args: -mat_type baij -nsubcomms 2 -view_mat -psubcomm_type interlaced 154 155 test: 156 suffix: 4_sbaij 157 nsize: 3 158 args: -mat_type sbaij -nsubcomms 2 -view_mat -psubcomm_type interlaced 159 160 test: 161 suffix: 4_dense 162 nsize: 3 163 args: -mat_type dense -nsubcomms 2 -view_mat -psubcomm_type interlaced 164 165 TEST*/ 166