1 2 static char help[] = "Tests reusing MPI parallel matrices and MatGetValues().\n\ 3 To test the parallel matrix assembly, this example intentionally lays out\n\ 4 the matrix across processors differently from the way it is assembled.\n\ 5 This example uses bilinear elements on the unit square. Input arguments are:\n\ 6 -m <size> : problem size\n\n"; 7 8 #include <petscmat.h> 9 10 int FormElementStiffness(PetscReal H,PetscScalar *Ke) 11 { 12 PetscFunctionBegin; 13 Ke[0] = H/6.0; Ke[1] = -.125*H; Ke[2] = H/12.0; Ke[3] = -.125*H; 14 Ke[4] = -.125*H; Ke[5] = H/6.0; Ke[6] = -.125*H; Ke[7] = H/12.0; 15 Ke[8] = H/12.0; Ke[9] = -.125*H; Ke[10] = H/6.0; Ke[11] = -.125*H; 16 Ke[12] = -.125*H; Ke[13] = H/12.0; Ke[14] = -.125*H; Ke[15] = H/6.0; 17 PetscFunctionReturn(0); 18 } 19 20 int main(int argc,char **args) 21 { 22 Mat C; 23 Vec u,b; 24 PetscErrorCode ierr; 25 PetscMPIInt size,rank; 26 PetscInt i,m = 5,N,start,end,M,idx[4]; 27 PetscInt j,nrsub,ncsub,*rsub,*csub,mystart,myend; 28 PetscBool flg; 29 PetscScalar one = 1.0,Ke[16],*vals; 30 PetscReal h,norm; 31 32 ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr; 33 ierr = PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);CHKERRQ(ierr); 34 35 N = (m+1)*(m+1); /* dimension of matrix */ 36 M = m*m; /* number of elements */ 37 h = 1.0/m; /* mesh width */ 38 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 39 ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr); 40 41 /* Create stiffness matrix */ 42 ierr = MatCreate(PETSC_COMM_WORLD,&C);CHKERRQ(ierr); 43 ierr = MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,N,N);CHKERRQ(ierr); 44 ierr = MatSetFromOptions(C);CHKERRQ(ierr); 45 ierr = MatSetUp(C);CHKERRQ(ierr); 46 47 start = rank*(M/size) + ((M%size) < rank ? (M%size) : rank); 48 end = start + M/size + ((M%size) > rank); 49 50 /* Form the element stiffness for the Laplacian */ 51 ierr = FormElementStiffness(h*h,Ke);CHKERRQ(ierr); 52 for (i=start; i<end; i++) { 53 /* location of lower left corner of element */ 54 /* node numbers for the four corners of element */ 55 idx[0] = (m+1)*(i/m) + (i % m); 56 idx[1] = idx[0]+1; idx[2] = idx[1] + m + 1; idx[3] = idx[2] - 1; 57 ierr = MatSetValues(C,4,idx,4,idx,Ke,ADD_VALUES);CHKERRQ(ierr); 58 } 59 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 60 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 61 62 /* Assemble the matrix again */ 63 ierr = MatZeroEntries(C);CHKERRQ(ierr); 64 65 for (i=start; i<end; i++) { 66 /* location of lower left corner of element */ 67 /* node numbers for the four corners of element */ 68 idx[0] = (m+1)*(i/m) + (i % m); 69 idx[1] = idx[0]+1; idx[2] = idx[1] + m + 1; idx[3] = idx[2] - 1; 70 ierr = MatSetValues(C,4,idx,4,idx,Ke,ADD_VALUES);CHKERRQ(ierr); 71 } 72 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 73 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 74 75 /* Create test vectors */ 76 ierr = VecCreate(PETSC_COMM_WORLD,&u);CHKERRQ(ierr); 77 ierr = VecSetSizes(u,PETSC_DECIDE,N);CHKERRQ(ierr); 78 ierr = VecSetFromOptions(u);CHKERRQ(ierr); 79 ierr = VecDuplicate(u,&b);CHKERRQ(ierr); 80 ierr = VecSet(u,one);CHKERRQ(ierr); 81 82 /* Check error */ 83 ierr = MatMult(C,u,b);CHKERRQ(ierr); 84 ierr = VecNorm(b,NORM_2,&norm);CHKERRQ(ierr); 85 if (norm > PETSC_SQRT_MACHINE_EPSILON) { 86 ierr = PetscPrintf(PETSC_COMM_WORLD,"Norm of error b %g should be near 0\n",(double)norm);CHKERRQ(ierr); 87 } 88 89 /* Now test MatGetValues() */ 90 ierr = PetscOptionsHasName(NULL,NULL,"-get_values",&flg);CHKERRQ(ierr); 91 if (flg) { 92 ierr = MatGetOwnershipRange(C,&mystart,&myend);CHKERRQ(ierr); 93 nrsub = myend - mystart; ncsub = 4; 94 ierr = PetscMalloc1(nrsub*ncsub,&vals);CHKERRQ(ierr); 95 ierr = PetscMalloc1(nrsub,&rsub);CHKERRQ(ierr); 96 ierr = PetscMalloc1(ncsub,&csub);CHKERRQ(ierr); 97 for (i=myend-1; i>=mystart; i--) rsub[myend-i-1] = i; 98 for (i=0; i<ncsub; i++) csub[i] = 2*(ncsub-i) + mystart; 99 ierr = MatGetValues(C,nrsub,rsub,ncsub,csub,vals);CHKERRQ(ierr); 100 ierr = MatView(C,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 101 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"processor number %d: start=%D, end=%D, mystart=%D, myend=%D\n",rank,start,end,mystart,myend);CHKERRQ(ierr); 102 for (i=0; i<nrsub; i++) { 103 for (j=0; j<ncsub; j++) { 104 if (PetscImaginaryPart(vals[i*ncsub+j]) != 0.0) { 105 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD," C[%D, %D] = %g + %g i\n",rsub[i],csub[j],(double)PetscRealPart(vals[i*ncsub+j]),(double)PetscImaginaryPart(vals[i*ncsub+j]));CHKERRQ(ierr); 106 } else { 107 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD," C[%D, %D] = %g\n",rsub[i],csub[j],(double)PetscRealPart(vals[i*ncsub+j]));CHKERRQ(ierr); 108 } 109 } 110 } 111 ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);CHKERRQ(ierr); 112 ierr = PetscFree(rsub);CHKERRQ(ierr); 113 ierr = PetscFree(csub);CHKERRQ(ierr); 114 ierr = PetscFree(vals);CHKERRQ(ierr); 115 } 116 117 /* Free data structures */ 118 ierr = VecDestroy(&u);CHKERRQ(ierr); 119 ierr = VecDestroy(&b);CHKERRQ(ierr); 120 ierr = MatDestroy(&C);CHKERRQ(ierr); 121 ierr = PetscFinalize(); 122 return ierr; 123 } 124 125 /*TEST 126 127 test: 128 nsize: 4 129 130 TEST*/ 131