xref: /petsc/src/mat/tests/ex204.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 static char help[] = "Test ViennaCL Matrix Conversions";
2 
3 #include <petscmat.h>
4 
5 int main(int argc,char **args)
6 {
7   PetscMPIInt rank,size;
8 
9   PetscFunctionBeginUser;
10   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
11 
12   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
13   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
14 
15   /* Construct a sequential ViennaCL matrix and vector */
16   if (rank == 0) {
17     Mat A_vcl;
18     Vec v_vcl,r_vcl;
19     PetscInt n = 17, m = 31,nz = 5,i,cnt,j;
20     const PetscScalar val = 1.0;
21 
22     PetscCall(MatCreateSeqAIJViennaCL(PETSC_COMM_SELF,m,n,nz,NULL,&A_vcl));
23 
24     /* Add nz arbitrary entries per row in arbitrary columns */
25     for (i=0;i<m;++i) {
26       for (cnt = 0; cnt<nz; ++cnt) {
27         j = (19 * cnt + (7*i + 3)) % n;
28         PetscCall(MatSetValue(A_vcl,i,j,(PetscScalar)(0.3 * i + j),INSERT_VALUES));
29       }
30     }
31     PetscCall(MatAssemblyBegin(A_vcl,MAT_FINAL_ASSEMBLY));
32     PetscCall(MatAssemblyEnd(A_vcl,MAT_FINAL_ASSEMBLY));
33 
34     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,n,&v_vcl));
35     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,m,&r_vcl));
36     PetscCall(VecSet(v_vcl,val));
37 
38     PetscCall(MatMult(A_vcl,v_vcl,r_vcl));
39 
40     PetscCall(VecDestroy(&v_vcl));
41     PetscCall(VecDestroy(&r_vcl));
42     PetscCall(MatDestroy(&A_vcl));
43   }
44 
45   /* Create a sequential AIJ matrix on rank 0 convert it to a new ViennaCL matrix, and apply it to a ViennaCL vector */
46   if (rank == 0) {
47     Mat               A,A_vcl;
48     Vec               v,r,v_vcl,r_vcl,d_vcl;
49     PetscInt          n=17,m=31,nz=5,i,cnt,j;
50     const PetscScalar val = 1.0;
51     PetscReal         dnorm;
52     const PetscReal   tol = 1e-5;
53 
54     PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,m,n,nz,NULL,&A));
55 
56     /* Add nz arbitrary entries per row in arbitrary columns */
57     for (i=0;i<m;++i) {
58       for (cnt = 0; cnt<nz; ++cnt) {
59         j = (19 * cnt + (7*i + 3)) % n;
60         PetscCall(MatSetValue(A,i,j,(PetscScalar) (0.3 * i + j),INSERT_VALUES));
61       }
62     }
63     PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
64     PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
65 
66     PetscCall(PetscObjectSetName((PetscObject)A,"Sequential CPU Matrix"));
67 
68     PetscCall(VecCreateSeq(PETSC_COMM_SELF,n,&v));
69     PetscCall(VecCreateSeq(PETSC_COMM_SELF,m,&r));
70     PetscCall(PetscObjectSetName((PetscObject)r,"CPU result vector"));
71     PetscCall(VecSet(v,val));
72     PetscCall(MatMult(A,v,r));
73 
74     PetscCall(MatConvert(A,MATSEQAIJVIENNACL,MAT_INITIAL_MATRIX,&A_vcl));
75     PetscCall(PetscObjectSetName((PetscObject)A_vcl,"New ViennaCL Matrix"));
76 
77     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,n,&v_vcl));
78     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,m,&r_vcl));
79     PetscCall(PetscObjectSetName((PetscObject)r_vcl,"ViennaCL result vector"));
80     PetscCall(VecSet(v_vcl,val));
81     PetscCall(MatMult(A_vcl,v_vcl,r_vcl));
82 
83     PetscCall(VecDuplicate(r_vcl,&d_vcl));
84     PetscCall(VecCopy(r_vcl,d_vcl));
85     PetscCall(VecAXPY(d_vcl,-1.0,r_vcl));
86     PetscCall(VecNorm(d_vcl,NORM_INFINITY,&dnorm));
87     PetscCheck(dnorm <= tol,PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"Sequential CPU and MPI ViennaCL vector results incompatible with inf norm greater than tolerance of %g",tol);
88 
89     PetscCall(VecDestroy(&v));
90     PetscCall(VecDestroy(&r));
91     PetscCall(VecDestroy(&v_vcl));
92     PetscCall(VecDestroy(&r_vcl));
93     PetscCall(VecDestroy(&d_vcl));
94     PetscCall(MatDestroy(&A));
95     PetscCall(MatDestroy(&A_vcl));
96   }
97 
98   /* Create a sequential AIJ matrix on rank 0 convert it inplace to a new ViennaCL matrix, and apply it to a ViennaCL vector */
99   if (rank == 0) {
100     Mat               A;
101     Vec               v,r,v_vcl,r_vcl,d_vcl;
102     PetscInt          n=17,m=31,nz=5,i,cnt,j;
103     const PetscScalar val = 1.0;
104     PetscReal         dnorm;
105     const PetscReal   tol = 1e-5;
106 
107     PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,m,n,nz,NULL,&A));
108 
109     /* Add nz arbitrary entries per row in arbitrary columns */
110     for (i=0;i<m;++i) {
111       for (cnt = 0; cnt<nz; ++cnt) {
112         j = (19 * cnt + (7*i + 3)) % n;
113         PetscCall(MatSetValue(A,i,j,(PetscScalar)(0.3 * i + j),INSERT_VALUES));
114       }
115     }
116     PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
117     PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
118 
119     PetscCall(PetscObjectSetName((PetscObject)A,"Sequential CPU Matrix"));
120 
121     PetscCall(VecCreateSeq(PETSC_COMM_SELF,n,&v));
122     PetscCall(VecCreateSeq(PETSC_COMM_SELF,m,&r));
123     PetscCall(PetscObjectSetName((PetscObject)r,"CPU result vector"));
124     PetscCall(VecSet(v,val));
125     PetscCall(MatMult(A,v,r));
126 
127     PetscCall(MatConvert(A,MATSEQAIJVIENNACL,MAT_INPLACE_MATRIX,&A));
128     PetscCall(PetscObjectSetName((PetscObject)A,"Converted ViennaCL Matrix"));
129 
130     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,n,&v_vcl));
131     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,m,&r_vcl));
132     PetscCall(PetscObjectSetName((PetscObject)r_vcl,"ViennaCL result vector"));
133     PetscCall(VecSet(v_vcl,val));
134     PetscCall(MatMult(A,v_vcl,r_vcl));
135 
136     PetscCall(VecDuplicate(r_vcl,&d_vcl));
137     PetscCall(VecCopy(r_vcl,d_vcl));
138     PetscCall(VecAXPY(d_vcl,-1.0,r_vcl));
139     PetscCall(VecNorm(d_vcl,NORM_INFINITY,&dnorm));
140     PetscCheck(dnorm <= tol,PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"MPI CPU and MPI ViennaCL Vector results incompatible with inf norm greater than tolerance of %g",tol);
141 
142     PetscCall(VecDestroy(&v));
143     PetscCall(VecDestroy(&r));
144     PetscCall(VecDestroy(&v_vcl));
145     PetscCall(VecDestroy(&r_vcl));
146     PetscCall(VecDestroy(&d_vcl));
147     PetscCall(MatDestroy(&A));
148   }
149 
150   /* Create a parallel AIJ matrix, convert it to a new ViennaCL matrix, and apply it to a ViennaCL vector */
151   if (size > 1) {
152     Mat               A,A_vcl;
153     Vec               v,r,v_vcl,r_vcl,d_vcl;
154     PetscInt          N=17,M=31,nz=5,i,cnt,j,rlow,rhigh;
155     const PetscScalar val = 1.0;
156     PetscReal         dnorm;
157     const PetscReal   tol=1e-5;
158 
159     PetscCall(MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DETERMINE,PETSC_DETERMINE,M,N,nz,NULL,nz,NULL,&A));
160 
161     /* Add nz arbitrary entries per row in arbitrary columns */
162     PetscCall(MatGetOwnershipRange(A,&rlow,&rhigh));
163     for (i=rlow;i<rhigh;++i) {
164       for (cnt = 0; cnt<nz; ++cnt) {
165         j = (19 * cnt + (7*i + 3)) % N;
166         PetscCall(MatSetValue(A,i,j,(PetscScalar)(0.3 * i + j),INSERT_VALUES));
167       }
168     }
169     PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
170     PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
171 
172     PetscCall(PetscObjectSetName((PetscObject)A,"MPI CPU Matrix"));
173 
174     PetscCall(MatCreateVecs(A,&v,&r));
175     PetscCall(PetscObjectSetName((PetscObject)r,"MPI CPU result vector"));
176     PetscCall(VecSet(v,val));
177     PetscCall(MatMult(A,v,r));
178 
179     PetscCall(MatConvert(A,MATMPIAIJVIENNACL,MAT_INITIAL_MATRIX,&A_vcl));
180     PetscCall(PetscObjectSetName((PetscObject)A_vcl,"New MPI ViennaCL Matrix"));
181 
182     PetscCall(MatCreateVecs(A_vcl,&v_vcl,&r_vcl));
183     PetscCall(PetscObjectSetName((PetscObject)r_vcl,"ViennaCL result vector"));
184     PetscCall(VecSet(v_vcl,val));
185     PetscCall(MatMult(A_vcl,v_vcl,r_vcl));
186 
187     PetscCall(VecDuplicate(r_vcl,&d_vcl));
188     PetscCall(VecCopy(r_vcl,d_vcl));
189     PetscCall(VecAXPY(d_vcl,-1.0,r_vcl));
190     PetscCall(VecNorm(d_vcl,NORM_INFINITY,&dnorm));
191     PetscCheck(dnorm <= tol,PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"MPI CPU and MPI ViennaCL Vector results incompatible with inf norm greater than tolerance of %g",tol);
192 
193     PetscCall(VecDestroy(&v));
194     PetscCall(VecDestroy(&r));
195     PetscCall(VecDestroy(&v_vcl));
196     PetscCall(VecDestroy(&r_vcl));
197     PetscCall(VecDestroy(&d_vcl));
198     PetscCall(MatDestroy(&A));
199     PetscCall(MatDestroy(&A_vcl));
200   }
201 
202   /* Create a parallel AIJ matrix, convert it in-place to a ViennaCL matrix, and apply it to a ViennaCL vector */
203   if (size > 1) {
204     Mat               A;
205     Vec               v,r,v_vcl,r_vcl,d_vcl;
206     PetscInt          N=17,M=31,nz=5,i,cnt,j,rlow,rhigh;
207     const PetscScalar val = 1.0;
208     PetscReal         dnorm;
209     const PetscReal   tol=1e-5;
210 
211     PetscCall(MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DETERMINE,PETSC_DETERMINE,M,N,nz,NULL,nz,NULL,&A));
212 
213     /* Add nz arbitrary entries per row in arbitrary columns */
214     PetscCall(MatGetOwnershipRange(A,&rlow,&rhigh));
215     for (i=rlow;i<rhigh;++i) {
216       for (cnt = 0; cnt<nz; ++cnt) {
217         j = (19 * cnt + (7*i + 3)) % N;
218         PetscCall(MatSetValue(A,i,j,(PetscScalar)(0.3 * i + j),INSERT_VALUES));
219       }
220     }
221     PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
222     PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
223 
224     PetscCall(PetscObjectSetName((PetscObject)A,"MPI CPU Matrix"));
225 
226     PetscCall(MatCreateVecs(A,&v,&r));
227     PetscCall(PetscObjectSetName((PetscObject)r,"MPI CPU result vector"));
228     PetscCall(VecSet(v,val));
229     PetscCall(MatMult(A,v,r));
230 
231     PetscCall(MatConvert(A,MATMPIAIJVIENNACL,MAT_INPLACE_MATRIX,&A));
232     PetscCall(PetscObjectSetName((PetscObject)A,"Converted MPI ViennaCL Matrix"));
233 
234     PetscCall(MatCreateVecs(A,&v_vcl,&r_vcl));
235     PetscCall(PetscObjectSetName((PetscObject)r_vcl,"ViennaCL result vector"));
236     PetscCall(VecSet(v_vcl,val));
237     PetscCall(MatMult(A,v_vcl,r_vcl));
238 
239     PetscCall(VecDuplicate(r_vcl,&d_vcl));
240     PetscCall(VecCopy(r_vcl,d_vcl));
241     PetscCall(VecAXPY(d_vcl,-1.0,r_vcl));
242     PetscCall(VecNorm(d_vcl,NORM_INFINITY,&dnorm));
243     PetscCheck(dnorm <= tol,PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"MPI CPU and MPI ViennaCL Vector results incompatible with inf norm greater than tolerance of %g",tol);
244 
245     PetscCall(VecDestroy(&v));
246     PetscCall(VecDestroy(&r));
247     PetscCall(VecDestroy(&v_vcl));
248     PetscCall(VecDestroy(&r_vcl));
249     PetscCall(VecDestroy(&d_vcl));
250     PetscCall(MatDestroy(&A));
251   }
252 
253   PetscCall(PetscFinalize());
254   return 0;
255 
256 }
257 
258 /*TEST
259 
260    build:
261       requires: viennacl
262 
263    test:
264       output_file: output/ex204.out
265 
266    test:
267       suffix: 2
268       nsize: 2
269       output_file: output/ex204.out
270 
271 TEST*/
272