1 2 static char help[] = "Tests reusing MPI parallel matrices and MatGetValues().\n\ 3 To test the parallel matrix assembly, this example intentionally lays out\n\ 4 the matrix across processors differently from the way it is assembled.\n\ 5 This example uses bilinear elements on the unit square. Input arguments are:\n\ 6 -m <size> : problem size\n\n"; 7 8 #include <petscmat.h> 9 10 int FormElementStiffness(PetscReal H, PetscScalar *Ke) { 11 PetscFunctionBegin; 12 Ke[0] = H / 6.0; 13 Ke[1] = -.125 * H; 14 Ke[2] = H / 12.0; 15 Ke[3] = -.125 * H; 16 Ke[4] = -.125 * H; 17 Ke[5] = H / 6.0; 18 Ke[6] = -.125 * H; 19 Ke[7] = H / 12.0; 20 Ke[8] = H / 12.0; 21 Ke[9] = -.125 * H; 22 Ke[10] = H / 6.0; 23 Ke[11] = -.125 * H; 24 Ke[12] = -.125 * H; 25 Ke[13] = H / 12.0; 26 Ke[14] = -.125 * H; 27 Ke[15] = H / 6.0; 28 PetscFunctionReturn(0); 29 } 30 31 int main(int argc, char **args) { 32 Mat C; 33 Vec u, b; 34 PetscMPIInt size, rank; 35 PetscInt i, m = 5, N, start, end, M, idx[4]; 36 PetscInt j, nrsub, ncsub, *rsub, *csub, mystart, myend; 37 PetscBool flg; 38 PetscScalar one = 1.0, Ke[16], *vals; 39 PetscReal h, norm; 40 41 PetscFunctionBeginUser; 42 PetscCall(PetscInitialize(&argc, &args, (char *)0, help)); 43 PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL)); 44 45 N = (m + 1) * (m + 1); /* dimension of matrix */ 46 M = m * m; /* number of elements */ 47 h = 1.0 / m; /* mesh width */ 48 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 49 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); 50 51 /* Create stiffness matrix */ 52 PetscCall(MatCreate(PETSC_COMM_WORLD, &C)); 53 PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, N, N)); 54 PetscCall(MatSetFromOptions(C)); 55 PetscCall(MatSetUp(C)); 56 57 start = rank * (M / size) + ((M % size) < rank ? (M % size) : rank); 58 end = start + M / size + ((M % size) > rank); 59 60 /* Form the element stiffness for the Laplacian */ 61 PetscCall(FormElementStiffness(h * h, Ke)); 62 for (i = start; i < end; i++) { 63 /* location of lower left corner of element */ 64 /* node numbers for the four corners of element */ 65 idx[0] = (m + 1) * (i / m) + (i % m); 66 idx[1] = idx[0] + 1; 67 idx[2] = idx[1] + m + 1; 68 idx[3] = idx[2] - 1; 69 PetscCall(MatSetValues(C, 4, idx, 4, idx, Ke, ADD_VALUES)); 70 } 71 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 72 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 73 74 /* Assemble the matrix again */ 75 PetscCall(MatZeroEntries(C)); 76 77 for (i = start; i < end; i++) { 78 /* location of lower left corner of element */ 79 /* node numbers for the four corners of element */ 80 idx[0] = (m + 1) * (i / m) + (i % m); 81 idx[1] = idx[0] + 1; 82 idx[2] = idx[1] + m + 1; 83 idx[3] = idx[2] - 1; 84 PetscCall(MatSetValues(C, 4, idx, 4, idx, Ke, ADD_VALUES)); 85 } 86 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 87 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 88 89 /* Create test vectors */ 90 PetscCall(VecCreate(PETSC_COMM_WORLD, &u)); 91 PetscCall(VecSetSizes(u, PETSC_DECIDE, N)); 92 PetscCall(VecSetFromOptions(u)); 93 PetscCall(VecDuplicate(u, &b)); 94 PetscCall(VecSet(u, one)); 95 96 /* Check error */ 97 PetscCall(MatMult(C, u, b)); 98 PetscCall(VecNorm(b, NORM_2, &norm)); 99 if (norm > PETSC_SQRT_MACHINE_EPSILON) { PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error b %g should be near 0\n", (double)norm)); } 100 101 /* Now test MatGetValues() */ 102 PetscCall(PetscOptionsHasName(NULL, NULL, "-get_values", &flg)); 103 if (flg) { 104 PetscCall(MatGetOwnershipRange(C, &mystart, &myend)); 105 nrsub = myend - mystart; 106 ncsub = 4; 107 PetscCall(PetscMalloc1(nrsub * ncsub, &vals)); 108 PetscCall(PetscMalloc1(nrsub, &rsub)); 109 PetscCall(PetscMalloc1(ncsub, &csub)); 110 for (i = myend - 1; i >= mystart; i--) rsub[myend - i - 1] = i; 111 for (i = 0; i < ncsub; i++) csub[i] = 2 * (ncsub - i) + mystart; 112 PetscCall(MatGetValues(C, nrsub, rsub, ncsub, csub, vals)); 113 PetscCall(MatView(C, PETSC_VIEWER_STDOUT_WORLD)); 114 PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "processor number %d: start=%" PetscInt_FMT ", end=%" PetscInt_FMT ", mystart=%" PetscInt_FMT ", myend=%" PetscInt_FMT "\n", rank, start, end, mystart, myend)); 115 for (i = 0; i < nrsub; i++) { 116 for (j = 0; j < ncsub; j++) { 117 if (PetscImaginaryPart(vals[i * ncsub + j]) != 0.0) { 118 PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, " C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g + %g i\n", rsub[i], csub[j], (double)PetscRealPart(vals[i * ncsub + j]), (double)PetscImaginaryPart(vals[i * ncsub + j]))); 119 } else { 120 PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, " C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g\n", rsub[i], csub[j], (double)PetscRealPart(vals[i * ncsub + j]))); 121 } 122 } 123 } 124 PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT)); 125 PetscCall(PetscFree(rsub)); 126 PetscCall(PetscFree(csub)); 127 PetscCall(PetscFree(vals)); 128 } 129 130 /* Free data structures */ 131 PetscCall(VecDestroy(&u)); 132 PetscCall(VecDestroy(&b)); 133 PetscCall(MatDestroy(&C)); 134 PetscCall(PetscFinalize()); 135 return 0; 136 } 137 138 /*TEST 139 140 test: 141 nsize: 4 142 143 TEST*/ 144