1 2 static char help[] ="Tests sequential and parallel MatMatMatMult() and MatPtAP(). Modified from ex96.c \n\ 3 -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\ 4 -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\ 5 -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\ 6 -Npx <npx>, where <npx> = number of processors in the x-direction\n\ 7 -Npy <npy>, where <npy> = number of processors in the y-direction\n\ 8 -Npz <npz>, where <npz> = number of processors in the z-direction\n\n"; 9 10 /* 11 Example of usage: mpiexec -n 3 ./ex41 -Mx 10 -My 10 -Mz 10 12 */ 13 14 #include <petscdm.h> 15 #include <petscdmda.h> 16 17 /* User-defined application contexts */ 18 typedef struct { 19 PetscInt mx,my,mz; /* number grid points in x, y and z direction */ 20 Vec localX,localF; /* local vectors with ghost region */ 21 DM da; 22 Vec x,b,r; /* global vectors */ 23 Mat J; /* Jacobian on grid */ 24 } GridCtx; 25 typedef struct { 26 GridCtx fine; 27 GridCtx coarse; 28 PetscInt ratio; 29 Mat Ii; /* interpolation from coarse to fine */ 30 } AppCtx; 31 32 #define COARSE_LEVEL 0 33 #define FINE_LEVEL 1 34 35 /* 36 Mm_ratio - ration of grid lines between fine and coarse grids. 37 */ 38 int main(int argc,char **argv) 39 { 40 PetscErrorCode ierr; 41 AppCtx user; 42 PetscMPIInt size,rank; 43 PetscInt m,n,M,N,i,nrows; 44 PetscScalar one = 1.0; 45 PetscReal fill=2.0; 46 Mat A,P,R,C,PtAP,D; 47 PetscScalar *array; 48 PetscRandom rdm; 49 PetscBool Test_3D=PETSC_FALSE,flg; 50 const PetscInt *ia,*ja; 51 52 ierr = PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr; 53 CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 54 CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 55 56 /* Get size of fine grids and coarse grids */ 57 user.ratio = 2; 58 user.coarse.mx = 4; user.coarse.my = 4; user.coarse.mz = 4; 59 60 CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-Mx",&user.coarse.mx,NULL)); 61 CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-My",&user.coarse.my,NULL)); 62 CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-Mz",&user.coarse.mz,NULL)); 63 CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-ratio",&user.ratio,NULL)); 64 if (user.coarse.mz) Test_3D = PETSC_TRUE; 65 66 user.fine.mx = user.ratio*(user.coarse.mx-1)+1; 67 user.fine.my = user.ratio*(user.coarse.my-1)+1; 68 user.fine.mz = user.ratio*(user.coarse.mz-1)+1; 69 70 if (rank == 0) { 71 if (!Test_3D) { 72 CHKERRQ(PetscPrintf(PETSC_COMM_SELF,"coarse grids: %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT "\n",user.coarse.mx,user.coarse.my,user.fine.mx,user.fine.my)); 73 } else { 74 CHKERRQ(PetscPrintf(PETSC_COMM_SELF,"coarse grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n",user.coarse.mx,user.coarse.my,user.coarse.mz,user.fine.mx,user.fine.my,user.fine.mz)); 75 } 76 } 77 78 /* Set up distributed array for fine grid */ 79 if (!Test_3D) { 80 CHKERRQ(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.fine.da)); 81 } else { 82 ierr = DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,user.fine.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE, 83 1,1,NULL,NULL,NULL,&user.fine.da);CHKERRQ(ierr); 84 } 85 CHKERRQ(DMSetFromOptions(user.fine.da)); 86 CHKERRQ(DMSetUp(user.fine.da)); 87 88 /* Create and set A at fine grids */ 89 CHKERRQ(DMSetMatType(user.fine.da,MATAIJ)); 90 CHKERRQ(DMCreateMatrix(user.fine.da,&A)); 91 CHKERRQ(MatGetLocalSize(A,&m,&n)); 92 CHKERRQ(MatGetSize(A,&M,&N)); 93 94 /* set val=one to A (replace with random values!) */ 95 CHKERRQ(PetscRandomCreate(PETSC_COMM_WORLD,&rdm)); 96 CHKERRQ(PetscRandomSetFromOptions(rdm)); 97 if (size == 1) { 98 CHKERRQ(MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg)); 99 if (flg) { 100 CHKERRQ(MatSeqAIJGetArray(A,&array)); 101 for (i=0; i<ia[nrows]; i++) array[i] = one; 102 CHKERRQ(MatSeqAIJRestoreArray(A,&array)); 103 } 104 CHKERRQ(MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg)); 105 } else { 106 Mat AA,AB; 107 CHKERRQ(MatMPIAIJGetSeqAIJ(A,&AA,&AB,NULL)); 108 CHKERRQ(MatGetRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg)); 109 if (flg) { 110 CHKERRQ(MatSeqAIJGetArray(AA,&array)); 111 for (i=0; i<ia[nrows]; i++) array[i] = one; 112 CHKERRQ(MatSeqAIJRestoreArray(AA,&array)); 113 } 114 CHKERRQ(MatRestoreRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg)); 115 CHKERRQ(MatGetRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg)); 116 if (flg) { 117 CHKERRQ(MatSeqAIJGetArray(AB,&array)); 118 for (i=0; i<ia[nrows]; i++) array[i] = one; 119 CHKERRQ(MatSeqAIJRestoreArray(AB,&array)); 120 } 121 CHKERRQ(MatRestoreRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg)); 122 } 123 /* Set up distributed array for coarse grid */ 124 if (!Test_3D) { 125 CHKERRQ(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.coarse.da)); 126 } else { 127 CHKERRQ(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,user.coarse.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,NULL,&user.coarse.da)); 128 } 129 CHKERRQ(DMSetFromOptions(user.coarse.da)); 130 CHKERRQ(DMSetUp(user.coarse.da)); 131 132 /* Create interpolation between the fine and coarse grids */ 133 CHKERRQ(DMCreateInterpolation(user.coarse.da,user.fine.da,&P,NULL)); 134 135 /* Get R = P^T */ 136 CHKERRQ(MatTranspose(P,MAT_INITIAL_MATRIX,&R)); 137 138 /* C = R*A*P */ 139 /* Developer's API */ 140 CHKERRQ(MatProductCreate(R,A,P,&D)); 141 CHKERRQ(MatProductSetType(D,MATPRODUCT_ABC)); 142 CHKERRQ(MatProductSetFromOptions(D)); 143 CHKERRQ(MatProductSymbolic(D)); 144 CHKERRQ(MatProductNumeric(D)); 145 CHKERRQ(MatProductNumeric(D)); /* Test reuse symbolic D */ 146 147 /* User's API */ 148 { /* Test MatMatMatMult_Basic() */ 149 Mat Adense,Cdense; 150 CHKERRQ(MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense)); 151 CHKERRQ(MatMatMatMult(R,Adense,P,MAT_INITIAL_MATRIX,fill,&Cdense)); 152 CHKERRQ(MatMatMatMult(R,Adense,P,MAT_REUSE_MATRIX,fill,&Cdense)); 153 154 CHKERRQ(MatMultEqual(D,Cdense,10,&flg)); 155 PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"D*v != Cdense*v"); 156 CHKERRQ(MatDestroy(&Adense)); 157 CHKERRQ(MatDestroy(&Cdense)); 158 } 159 160 CHKERRQ(MatMatMatMult(R,A,P,MAT_INITIAL_MATRIX,fill,&C)); 161 CHKERRQ(MatMatMatMult(R,A,P,MAT_REUSE_MATRIX,fill,&C)); 162 CHKERRQ(MatProductClear(C)); 163 164 /* Test D == C */ 165 CHKERRQ(MatEqual(D,C,&flg)); 166 PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"D != C"); 167 168 /* Test C == PtAP */ 169 CHKERRQ(MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&PtAP)); 170 CHKERRQ(MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&PtAP)); 171 CHKERRQ(MatEqual(C,PtAP,&flg)); 172 PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"C != PtAP"); 173 CHKERRQ(MatDestroy(&PtAP)); 174 175 /* Clean up */ 176 CHKERRQ(MatDestroy(&A)); 177 CHKERRQ(PetscRandomDestroy(&rdm)); 178 CHKERRQ(DMDestroy(&user.fine.da)); 179 CHKERRQ(DMDestroy(&user.coarse.da)); 180 CHKERRQ(MatDestroy(&P)); 181 CHKERRQ(MatDestroy(&R)); 182 CHKERRQ(MatDestroy(&C)); 183 CHKERRQ(MatDestroy(&D)); 184 ierr = PetscFinalize(); 185 return ierr; 186 } 187 188 /*TEST 189 190 test: 191 192 test: 193 suffix: 2 194 nsize: 2 195 args: -matmatmatmult_via scalable 196 197 test: 198 suffix: 3 199 nsize: 2 200 args: -matmatmatmult_via nonscalable 201 output_file: output/ex111_1.out 202 203 TEST*/ 204