1 static char help[] = "Test ViennaCL Matrix Conversions"; 2 3 #include <petscmat.h> 4 5 int main(int argc,char **args) 6 { 7 PetscMPIInt rank,size; 8 9 PetscCall(PetscInitialize(&argc,&args,(char*)0,help)); 10 11 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 12 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 13 14 /* Construct a sequential ViennaCL matrix and vector */ 15 if (rank == 0) { 16 Mat A_vcl; 17 Vec v_vcl,r_vcl; 18 PetscInt n = 17, m = 31,nz = 5,i,cnt,j; 19 const PetscScalar val = 1.0; 20 21 PetscCall(MatCreateSeqAIJViennaCL(PETSC_COMM_SELF,m,n,nz,NULL,&A_vcl)); 22 23 /* Add nz arbitrary entries per row in arbitrary columns */ 24 for (i=0;i<m;++i) { 25 for (cnt = 0; cnt<nz; ++cnt) { 26 j = (19 * cnt + (7*i + 3)) % n; 27 PetscCall(MatSetValue(A_vcl,i,j,(PetscScalar)(0.3 * i + j),INSERT_VALUES)); 28 } 29 } 30 PetscCall(MatAssemblyBegin(A_vcl,MAT_FINAL_ASSEMBLY)); 31 PetscCall(MatAssemblyEnd(A_vcl,MAT_FINAL_ASSEMBLY)); 32 33 PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,n,&v_vcl)); 34 PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,m,&r_vcl)); 35 PetscCall(VecSet(v_vcl,val)); 36 37 PetscCall(MatMult(A_vcl,v_vcl,r_vcl)); 38 39 PetscCall(VecDestroy(&v_vcl)); 40 PetscCall(VecDestroy(&r_vcl)); 41 PetscCall(MatDestroy(&A_vcl)); 42 } 43 44 /* Create a sequential AIJ matrix on rank 0 convert it to a new ViennaCL matrix, and apply it to a ViennaCL vector */ 45 if (rank == 0) { 46 Mat A,A_vcl; 47 Vec v,r,v_vcl,r_vcl,d_vcl; 48 PetscInt n=17,m=31,nz=5,i,cnt,j; 49 const PetscScalar val = 1.0; 50 PetscReal dnorm; 51 const PetscReal tol = 1e-5; 52 53 PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,m,n,nz,NULL,&A)); 54 55 /* Add nz arbitrary entries per row in arbitrary columns */ 56 for (i=0;i<m;++i) { 57 for (cnt = 0; cnt<nz; ++cnt) { 58 j = (19 * cnt + (7*i + 3)) % n; 59 PetscCall(MatSetValue(A,i,j,(PetscScalar) (0.3 * i + j),INSERT_VALUES)); 60 } 61 } 62 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 63 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 64 65 PetscCall(PetscObjectSetName((PetscObject)A,"Sequential CPU Matrix")); 66 67 PetscCall(VecCreateSeq(PETSC_COMM_SELF,n,&v)); 68 PetscCall(VecCreateSeq(PETSC_COMM_SELF,m,&r)); 69 PetscCall(PetscObjectSetName((PetscObject)r,"CPU result vector")); 70 PetscCall(VecSet(v,val)); 71 PetscCall(MatMult(A,v,r)); 72 73 PetscCall(MatConvert(A,MATSEQAIJVIENNACL,MAT_INITIAL_MATRIX,&A_vcl)); 74 PetscCall(PetscObjectSetName((PetscObject)A_vcl,"New ViennaCL Matrix")); 75 76 PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,n,&v_vcl)); 77 PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,m,&r_vcl)); 78 PetscCall(PetscObjectSetName((PetscObject)r_vcl,"ViennaCL result vector")); 79 PetscCall(VecSet(v_vcl,val)); 80 PetscCall(MatMult(A_vcl,v_vcl,r_vcl)); 81 82 PetscCall(VecDuplicate(r_vcl,&d_vcl)); 83 PetscCall(VecCopy(r_vcl,d_vcl)); 84 PetscCall(VecAXPY(d_vcl,-1.0,r_vcl)); 85 PetscCall(VecNorm(d_vcl,NORM_INFINITY,&dnorm)); 86 PetscCheckFalse(dnorm > tol,PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"Sequential CPU and MPI ViennaCL vector results incompatible with inf norm greater than tolerance of %g",tol); 87 88 PetscCall(VecDestroy(&v)); 89 PetscCall(VecDestroy(&r)); 90 PetscCall(VecDestroy(&v_vcl)); 91 PetscCall(VecDestroy(&r_vcl)); 92 PetscCall(VecDestroy(&d_vcl)); 93 PetscCall(MatDestroy(&A)); 94 PetscCall(MatDestroy(&A_vcl)); 95 } 96 97 /* Create a sequential AIJ matrix on rank 0 convert it inplace to a new ViennaCL matrix, and apply it to a ViennaCL vector */ 98 if (rank == 0) { 99 Mat A; 100 Vec v,r,v_vcl,r_vcl,d_vcl; 101 PetscInt n=17,m=31,nz=5,i,cnt,j; 102 const PetscScalar val = 1.0; 103 PetscReal dnorm; 104 const PetscReal tol = 1e-5; 105 106 PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,m,n,nz,NULL,&A)); 107 108 /* Add nz arbitrary entries per row in arbitrary columns */ 109 for (i=0;i<m;++i) { 110 for (cnt = 0; cnt<nz; ++cnt) { 111 j = (19 * cnt + (7*i + 3)) % n; 112 PetscCall(MatSetValue(A,i,j,(PetscScalar)(0.3 * i + j),INSERT_VALUES)); 113 } 114 } 115 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 116 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 117 118 PetscCall(PetscObjectSetName((PetscObject)A,"Sequential CPU Matrix")); 119 120 PetscCall(VecCreateSeq(PETSC_COMM_SELF,n,&v)); 121 PetscCall(VecCreateSeq(PETSC_COMM_SELF,m,&r)); 122 PetscCall(PetscObjectSetName((PetscObject)r,"CPU result vector")); 123 PetscCall(VecSet(v,val)); 124 PetscCall(MatMult(A,v,r)); 125 126 PetscCall(MatConvert(A,MATSEQAIJVIENNACL,MAT_INPLACE_MATRIX,&A)); 127 PetscCall(PetscObjectSetName((PetscObject)A,"Converted ViennaCL Matrix")); 128 129 PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,n,&v_vcl)); 130 PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF,m,&r_vcl)); 131 PetscCall(PetscObjectSetName((PetscObject)r_vcl,"ViennaCL result vector")); 132 PetscCall(VecSet(v_vcl,val)); 133 PetscCall(MatMult(A,v_vcl,r_vcl)); 134 135 PetscCall(VecDuplicate(r_vcl,&d_vcl)); 136 PetscCall(VecCopy(r_vcl,d_vcl)); 137 PetscCall(VecAXPY(d_vcl,-1.0,r_vcl)); 138 PetscCall(VecNorm(d_vcl,NORM_INFINITY,&dnorm)); 139 PetscCheckFalse(dnorm > tol,PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"MPI CPU and MPI ViennaCL Vector results incompatible with inf norm greater than tolerance of %g",tol); 140 141 PetscCall(VecDestroy(&v)); 142 PetscCall(VecDestroy(&r)); 143 PetscCall(VecDestroy(&v_vcl)); 144 PetscCall(VecDestroy(&r_vcl)); 145 PetscCall(VecDestroy(&d_vcl)); 146 PetscCall(MatDestroy(&A)); 147 } 148 149 /* Create a parallel AIJ matrix, convert it to a new ViennaCL matrix, and apply it to a ViennaCL vector */ 150 if (size > 1) { 151 Mat A,A_vcl; 152 Vec v,r,v_vcl,r_vcl,d_vcl; 153 PetscInt N=17,M=31,nz=5,i,cnt,j,rlow,rhigh; 154 const PetscScalar val = 1.0; 155 PetscReal dnorm; 156 const PetscReal tol=1e-5; 157 158 PetscCall(MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DETERMINE,PETSC_DETERMINE,M,N,nz,NULL,nz,NULL,&A)); 159 160 /* Add nz arbitrary entries per row in arbitrary columns */ 161 PetscCall(MatGetOwnershipRange(A,&rlow,&rhigh)); 162 for (i=rlow;i<rhigh;++i) { 163 for (cnt = 0; cnt<nz; ++cnt) { 164 j = (19 * cnt + (7*i + 3)) % N; 165 PetscCall(MatSetValue(A,i,j,(PetscScalar)(0.3 * i + j),INSERT_VALUES)); 166 } 167 } 168 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 169 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 170 171 PetscCall(PetscObjectSetName((PetscObject)A,"MPI CPU Matrix")); 172 173 PetscCall(MatCreateVecs(A,&v,&r)); 174 PetscCall(PetscObjectSetName((PetscObject)r,"MPI CPU result vector")); 175 PetscCall(VecSet(v,val)); 176 PetscCall(MatMult(A,v,r)); 177 178 PetscCall(MatConvert(A,MATMPIAIJVIENNACL,MAT_INITIAL_MATRIX,&A_vcl)); 179 PetscCall(PetscObjectSetName((PetscObject)A_vcl,"New MPI ViennaCL Matrix")); 180 181 PetscCall(MatCreateVecs(A_vcl,&v_vcl,&r_vcl)); 182 PetscCall(PetscObjectSetName((PetscObject)r_vcl,"ViennaCL result vector")); 183 PetscCall(VecSet(v_vcl,val)); 184 PetscCall(MatMult(A_vcl,v_vcl,r_vcl)); 185 186 PetscCall(VecDuplicate(r_vcl,&d_vcl)); 187 PetscCall(VecCopy(r_vcl,d_vcl)); 188 PetscCall(VecAXPY(d_vcl,-1.0,r_vcl)); 189 PetscCall(VecNorm(d_vcl,NORM_INFINITY,&dnorm)); 190 PetscCheckFalse(dnorm > tol,PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"MPI CPU and MPI ViennaCL Vector results incompatible with inf norm greater than tolerance of %g",tol); 191 192 PetscCall(VecDestroy(&v)); 193 PetscCall(VecDestroy(&r)); 194 PetscCall(VecDestroy(&v_vcl)); 195 PetscCall(VecDestroy(&r_vcl)); 196 PetscCall(VecDestroy(&d_vcl)); 197 PetscCall(MatDestroy(&A)); 198 PetscCall(MatDestroy(&A_vcl)); 199 } 200 201 /* Create a parallel AIJ matrix, convert it in-place to a ViennaCL matrix, and apply it to a ViennaCL vector */ 202 if (size > 1) { 203 Mat A; 204 Vec v,r,v_vcl,r_vcl,d_vcl; 205 PetscInt N=17,M=31,nz=5,i,cnt,j,rlow,rhigh; 206 const PetscScalar val = 1.0; 207 PetscReal dnorm; 208 const PetscReal tol=1e-5; 209 210 PetscCall(MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DETERMINE,PETSC_DETERMINE,M,N,nz,NULL,nz,NULL,&A)); 211 212 /* Add nz arbitrary entries per row in arbitrary columns */ 213 PetscCall(MatGetOwnershipRange(A,&rlow,&rhigh)); 214 for (i=rlow;i<rhigh;++i) { 215 for (cnt = 0; cnt<nz; ++cnt) { 216 j = (19 * cnt + (7*i + 3)) % N; 217 PetscCall(MatSetValue(A,i,j,(PetscScalar)(0.3 * i + j),INSERT_VALUES)); 218 } 219 } 220 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 221 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 222 223 PetscCall(PetscObjectSetName((PetscObject)A,"MPI CPU Matrix")); 224 225 PetscCall(MatCreateVecs(A,&v,&r)); 226 PetscCall(PetscObjectSetName((PetscObject)r,"MPI CPU result vector")); 227 PetscCall(VecSet(v,val)); 228 PetscCall(MatMult(A,v,r)); 229 230 PetscCall(MatConvert(A,MATMPIAIJVIENNACL,MAT_INPLACE_MATRIX,&A)); 231 PetscCall(PetscObjectSetName((PetscObject)A,"Converted MPI ViennaCL Matrix")); 232 233 PetscCall(MatCreateVecs(A,&v_vcl,&r_vcl)); 234 PetscCall(PetscObjectSetName((PetscObject)r_vcl,"ViennaCL result vector")); 235 PetscCall(VecSet(v_vcl,val)); 236 PetscCall(MatMult(A,v_vcl,r_vcl)); 237 238 PetscCall(VecDuplicate(r_vcl,&d_vcl)); 239 PetscCall(VecCopy(r_vcl,d_vcl)); 240 PetscCall(VecAXPY(d_vcl,-1.0,r_vcl)); 241 PetscCall(VecNorm(d_vcl,NORM_INFINITY,&dnorm)); 242 PetscCheckFalse(dnorm > tol,PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"MPI CPU and MPI ViennaCL Vector results incompatible with inf norm greater than tolerance of %g",tol); 243 244 PetscCall(VecDestroy(&v)); 245 PetscCall(VecDestroy(&r)); 246 PetscCall(VecDestroy(&v_vcl)); 247 PetscCall(VecDestroy(&r_vcl)); 248 PetscCall(VecDestroy(&d_vcl)); 249 PetscCall(MatDestroy(&A)); 250 } 251 252 PetscCall(PetscFinalize()); 253 return 0; 254 255 } 256 257 /*TEST 258 259 build: 260 requires: viennacl 261 262 test: 263 output_file: output/ex204.out 264 265 test: 266 suffix: 2 267 nsize: 2 268 output_file: output/ex204.out 269 270 TEST*/ 271