xref: /petsc/src/mat/tutorials/ex3.c (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1 static char help[] = "Illustration of MatIS using a 1D Laplacian assembly\n\n";
2 
3 /*
4   MatIS means that the matrix is not assembled. The easiest way to think of this (for me) is that processes do not have
5   to hold full matrix rows. One process can hold part of row i, and another processes can hold another part. However, there
6   are still the same number of global rows. The local size here is not the size of the local IS block, which we call the
7   overlap size, since that is a property only of MatIS. It is the size of the local piece of the vector you multiply in
8   MatMult(). This allows PETSc to understand the parallel layout of the Vec, and how it matches the Mat. If you only know
9   the overlap size when assembling, it is best to use PETSC_DECIDE for the local size in the creation routine, so that PETSc
10   automatically partitions the unknowns.
11 
12   Each P_1 element matrix for a cell will be
13 
14     /  1 -1 \
15     \ -1  1 /
16 
17   so that the assembled matrix has a tridiagonal [-1, 2, -1] pattern. We will use 1 cell per process for illustration,
18   and allow PETSc to decide the ownership.
19 */
20 
21 #include <petscmat.h>
22 
23 int main(int argc, char **argv) {
24   MPI_Comm               comm;
25   Mat                    A;
26   Vec                    x, y;
27   ISLocalToGlobalMapping map;
28   PetscScalar            elemMat[4] = {1.0, -1.0, -1.0, 1.0};
29   PetscReal              error;
30   PetscInt               overlapSize = 2, globalIdx[2];
31   PetscMPIInt            rank, size;
32 
33   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
34   comm = PETSC_COMM_WORLD;
35   PetscCallMPI(MPI_Comm_rank(comm, &rank));
36   PetscCallMPI(MPI_Comm_size(comm, &size));
37   /* Create local-to-global map */
38   globalIdx[0] = rank;
39   globalIdx[1] = rank+1;
40   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, overlapSize, globalIdx, PETSC_COPY_VALUES, &map));
41   /* Create matrix */
42   PetscCall(MatCreateIS(comm, 1, PETSC_DECIDE, PETSC_DECIDE, size+1, size+1, map, map, &A));
43   PetscCall(PetscObjectSetName((PetscObject) A, "A"));
44   PetscCall(ISLocalToGlobalMappingDestroy(&map));
45   PetscCall(MatISSetPreallocation(A, overlapSize, NULL, overlapSize, NULL));
46   PetscCall(MatSetValues(A, 2, globalIdx, 2, globalIdx, elemMat, ADD_VALUES));
47   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
48   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
49   /* Check that the constant vector is in the nullspace */
50   PetscCall(MatCreateVecs(A, &x, &y));
51   PetscCall(VecSet(x, 1.0));
52   PetscCall(PetscObjectSetName((PetscObject) x, "x"));
53   PetscCall(VecViewFromOptions(x, NULL, "-x_view"));
54   PetscCall(MatMult(A, x, y));
55   PetscCall(PetscObjectSetName((PetscObject) y, "y"));
56   PetscCall(VecViewFromOptions(y, NULL, "-y_view"));
57   PetscCall(VecNorm(y, NORM_2, &error));
58   PetscCheck(error <= PETSC_SMALL,comm, PETSC_ERR_ARG_WRONG, "Invalid output, x should be in the nullspace of A");
59   /* Check that an interior unit vector gets mapped to something of 1-norm 4 */
60   if (size > 1) {
61     PetscCall(VecSet(x, 0.0));
62     PetscCall(VecSetValue(x, 1, 1.0, INSERT_VALUES));
63     PetscCall(VecAssemblyBegin(x));
64     PetscCall(VecAssemblyEnd(x));
65     PetscCall(MatMult(A, x, y));
66     PetscCall(VecNorm(y, NORM_1, &error));
67     PetscCheck(PetscAbsReal(error - 4) <= PETSC_SMALL,comm, PETSC_ERR_ARG_WRONG, "Invalid output for matrix multiply");
68   }
69   /* Cleanup */
70   PetscCall(MatDestroy(&A));
71   PetscCall(VecDestroy(&x));
72   PetscCall(VecDestroy(&y));
73   PetscCall(PetscFinalize());
74   return 0;
75 }
76 
77 /*TEST
78 
79   test:
80     suffix: 0
81     requires:
82     args:
83 
84   test:
85     suffix: 1
86     nsize: 3
87     args:
88 
89   test:
90     suffix: 2
91     nsize: 7
92     args:
93 
94 TEST*/
95