xref: /petsc/src/mat/tests/ex19.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 
2 static char help[] = "Tests reusing MPI parallel matrices and MatGetValues().\n\
3 To test the parallel matrix assembly, this example intentionally lays out\n\
4 the matrix across processors differently from the way it is assembled.\n\
5 This example uses bilinear elements on the unit square.  Input arguments are:\n\
6   -m <size> : problem size\n\n";
7 
8 #include <petscmat.h>
9 
10 int FormElementStiffness(PetscReal H,PetscScalar *Ke)
11 {
12   PetscFunctionBegin;
13   Ke[0]  = H/6.0;    Ke[1]  = -.125*H; Ke[2]  = H/12.0;   Ke[3]  = -.125*H;
14   Ke[4]  = -.125*H;  Ke[5]  = H/6.0;   Ke[6]  = -.125*H;  Ke[7]  = H/12.0;
15   Ke[8]  = H/12.0;   Ke[9]  = -.125*H; Ke[10] = H/6.0;    Ke[11] = -.125*H;
16   Ke[12] = -.125*H;  Ke[13] = H/12.0;  Ke[14] = -.125*H;  Ke[15] = H/6.0;
17   PetscFunctionReturn(0);
18 }
19 
20 int main(int argc,char **args)
21 {
22   Mat            C;
23   Vec            u,b;
24   PetscMPIInt    size,rank;
25   PetscInt       i,m = 5,N,start,end,M,idx[4];
26   PetscInt       j,nrsub,ncsub,*rsub,*csub,mystart,myend;
27   PetscBool      flg;
28   PetscScalar    one = 1.0,Ke[16],*vals;
29   PetscReal      h,norm;
30 
31   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
32   PetscCall(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL));
33 
34   N    = (m+1)*(m+1); /* dimension of matrix */
35   M    = m*m;      /* number of elements */
36   h    = 1.0/m;    /* mesh width */
37   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
38   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
39 
40   /* Create stiffness matrix */
41   PetscCall(MatCreate(PETSC_COMM_WORLD,&C));
42   PetscCall(MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,N,N));
43   PetscCall(MatSetFromOptions(C));
44   PetscCall(MatSetUp(C));
45 
46   start = rank*(M/size) + ((M%size) < rank ? (M%size) : rank);
47   end   = start + M/size + ((M%size) > rank);
48 
49   /* Form the element stiffness for the Laplacian */
50   PetscCall(FormElementStiffness(h*h,Ke));
51   for (i=start; i<end; i++) {
52     /* location of lower left corner of element */
53     /* node numbers for the four corners of element */
54     idx[0] = (m+1)*(i/m) + (i % m);
55     idx[1] = idx[0]+1; idx[2] = idx[1] + m + 1; idx[3] = idx[2] - 1;
56     PetscCall(MatSetValues(C,4,idx,4,idx,Ke,ADD_VALUES));
57   }
58   PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY));
59   PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY));
60 
61   /* Assemble the matrix again */
62   PetscCall(MatZeroEntries(C));
63 
64   for (i=start; i<end; i++) {
65     /* location of lower left corner of element */
66     /* node numbers for the four corners of element */
67     idx[0] = (m+1)*(i/m) + (i % m);
68     idx[1] = idx[0]+1; idx[2] = idx[1] + m + 1; idx[3] = idx[2] - 1;
69     PetscCall(MatSetValues(C,4,idx,4,idx,Ke,ADD_VALUES));
70   }
71   PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY));
72   PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY));
73 
74   /* Create test vectors */
75   PetscCall(VecCreate(PETSC_COMM_WORLD,&u));
76   PetscCall(VecSetSizes(u,PETSC_DECIDE,N));
77   PetscCall(VecSetFromOptions(u));
78   PetscCall(VecDuplicate(u,&b));
79   PetscCall(VecSet(u,one));
80 
81   /* Check error */
82   PetscCall(MatMult(C,u,b));
83   PetscCall(VecNorm(b,NORM_2,&norm));
84   if (norm > PETSC_SQRT_MACHINE_EPSILON) {
85     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Norm of error b %g should be near 0\n",(double)norm));
86   }
87 
88   /* Now test MatGetValues() */
89   PetscCall(PetscOptionsHasName(NULL,NULL,"-get_values",&flg));
90   if (flg) {
91     PetscCall(MatGetOwnershipRange(C,&mystart,&myend));
92     nrsub = myend - mystart; ncsub = 4;
93     PetscCall(PetscMalloc1(nrsub*ncsub,&vals));
94     PetscCall(PetscMalloc1(nrsub,&rsub));
95     PetscCall(PetscMalloc1(ncsub,&csub));
96     for (i=myend-1; i>=mystart; i--) rsub[myend-i-1] = i;
97     for (i=0; i<ncsub; i++) csub[i] = 2*(ncsub-i) + mystart;
98     PetscCall(MatGetValues(C,nrsub,rsub,ncsub,csub,vals));
99     PetscCall(MatView(C,PETSC_VIEWER_STDOUT_WORLD));
100     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD,"processor number %d: start=%" PetscInt_FMT ", end=%" PetscInt_FMT ", mystart=%" PetscInt_FMT ", myend=%" PetscInt_FMT "\n",rank,start,end,mystart,myend));
101     for (i=0; i<nrsub; i++) {
102       for (j=0; j<ncsub; j++) {
103         if (PetscImaginaryPart(vals[i*ncsub+j]) != 0.0) {
104           PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD,"  C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g + %g i\n",rsub[i],csub[j],(double)PetscRealPart(vals[i*ncsub+j]),(double)PetscImaginaryPart(vals[i*ncsub+j])));
105         } else {
106           PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD,"  C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g\n",rsub[i],csub[j],(double)PetscRealPart(vals[i*ncsub+j])));
107         }
108       }
109     }
110     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT));
111     PetscCall(PetscFree(rsub));
112     PetscCall(PetscFree(csub));
113     PetscCall(PetscFree(vals));
114   }
115 
116   /* Free data structures */
117   PetscCall(VecDestroy(&u));
118   PetscCall(VecDestroy(&b));
119   PetscCall(MatDestroy(&C));
120   PetscCall(PetscFinalize());
121   return 0;
122 }
123 
124 /*TEST
125 
126    test:
127       nsize: 4
128 
129 TEST*/
130