1 static char help[] = "Tests MatDenseGetArray() and MatView()/MatLoad() with binary viewers.\n\n"; 2 3 #include <petscmat.h> 4 #include <petscviewer.h> 5 6 static PetscErrorCode CheckValues(Mat A,PetscBool one) 7 { 8 const PetscScalar *array; 9 PetscInt M,N,rstart,rend,lda,i,j; 10 11 PetscFunctionBegin; 12 CHKERRQ(MatDenseGetArrayRead(A,&array)); 13 CHKERRQ(MatDenseGetLDA(A,&lda)); 14 CHKERRQ(MatGetSize(A,&M,&N)); 15 CHKERRQ(MatGetOwnershipRange(A,&rstart,&rend)); 16 for (i=rstart; i<rend; i++) { 17 for (j=0; j<N; j++) { 18 PetscInt ii = i - rstart, jj = j; 19 PetscReal v = (PetscReal)(one ? 1 : (1 + i + j*M)); 20 PetscReal w = PetscRealPart(array[ii + jj*lda]); 21 PetscCheckFalse(PetscAbsReal(v-w) > 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ") should be %g, got %g",i,j,(double)v,(double)w); 22 } 23 } 24 CHKERRQ(MatDenseRestoreArrayRead(A,&array)); 25 PetscFunctionReturn(0); 26 } 27 28 #define CheckValuesIJ(A) CheckValues(A,PETSC_FALSE) 29 #define CheckValuesOne(A) CheckValues(A,PETSC_TRUE) 30 31 int main(int argc,char **args) 32 { 33 Mat A; 34 PetscInt i,j,M = 4,N = 3,rstart,rend; 35 PetscErrorCode ierr; 36 PetscScalar *array; 37 char mattype[256]; 38 PetscViewer view; 39 40 ierr = PetscInitialize(&argc,&args,NULL,help);if (ierr) return ierr; 41 CHKERRQ(PetscStrcpy(mattype,MATMPIDENSE)); 42 CHKERRQ(PetscOptionsGetString(NULL,NULL,"-mat_type",mattype,sizeof(mattype),NULL)); 43 /* 44 Create a parallel dense matrix shared by all processors 45 */ 46 CHKERRQ(MatCreateDense(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,M,N,NULL,&A)); 47 CHKERRQ(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 48 CHKERRQ(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 49 CHKERRQ(MatConvert(A,mattype,MAT_INPLACE_MATRIX,&A)); 50 /* 51 Set values into the matrix 52 */ 53 for (i=0; i<M; i++) { 54 for (j=0; j<N; j++) { 55 PetscScalar v = (PetscReal)(1 + i + j*M); 56 CHKERRQ(MatSetValues(A,1,&i,1,&j,&v,INSERT_VALUES)); 57 } 58 } 59 CHKERRQ(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 60 CHKERRQ(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 61 CHKERRQ(MatScale(A,2.0)); 62 CHKERRQ(MatScale(A,1.0/2.0)); 63 64 /* 65 Store the binary matrix to a file 66 */ 67 CHKERRQ(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "matrix.dat", FILE_MODE_WRITE, &view)); 68 for (i=0; i<2; i++) { 69 CHKERRQ(MatView(A,view)); 70 CHKERRQ(PetscViewerPushFormat(view,PETSC_VIEWER_NATIVE)); 71 CHKERRQ(MatView(A,view)); 72 CHKERRQ(PetscViewerPopFormat(view)); 73 } 74 CHKERRQ(PetscViewerDestroy(&view)); 75 CHKERRQ(MatDestroy(&A)); 76 77 /* 78 Now reload the matrix and check its values 79 */ 80 CHKERRQ(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_READ,&view)); 81 CHKERRQ(MatCreate(PETSC_COMM_WORLD,&A)); 82 CHKERRQ(MatSetType(A,mattype)); 83 for (i=0; i<4; i++) { 84 if (i > 0) CHKERRQ(MatZeroEntries(A)); 85 CHKERRQ(MatLoad(A,view)); 86 CHKERRQ(CheckValuesIJ(A)); 87 } 88 CHKERRQ(PetscViewerDestroy(&view)); 89 90 CHKERRQ(MatGetOwnershipRange(A,&rstart,&rend)); 91 CHKERRQ(PetscMalloc1((rend-rstart)*N,&array)); 92 for (i=0; i<(rend-rstart)*N; i++) array[i] = (PetscReal)1; 93 CHKERRQ(MatDensePlaceArray(A,array)); 94 CHKERRQ(MatScale(A,2.0)); 95 CHKERRQ(MatScale(A,1.0/2.0)); 96 CHKERRQ(CheckValuesOne(A)); 97 CHKERRQ(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_WRITE,&view)); 98 CHKERRQ(MatView(A,view)); 99 CHKERRQ(MatDenseResetArray(A)); 100 CHKERRQ(PetscFree(array)); 101 CHKERRQ(CheckValuesIJ(A)); 102 CHKERRQ(PetscViewerBinarySetSkipHeader(view,PETSC_TRUE)); 103 CHKERRQ(MatView(A,view)); 104 CHKERRQ(PetscViewerBinarySetSkipHeader(view,PETSC_FALSE)); 105 CHKERRQ(PetscViewerDestroy(&view)); 106 CHKERRQ(MatDestroy(&A)); 107 108 CHKERRQ(MatCreate(PETSC_COMM_WORLD,&A)); 109 CHKERRQ(MatSetType(A,mattype)); 110 CHKERRQ(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_READ,&view)); 111 CHKERRQ(MatLoad(A,view)); 112 CHKERRQ(CheckValuesOne(A)); 113 CHKERRQ(MatZeroEntries(A)); 114 CHKERRQ(PetscViewerBinarySetSkipHeader(view,PETSC_TRUE)); 115 CHKERRQ(MatLoad(A,view)); 116 CHKERRQ(PetscViewerBinarySetSkipHeader(view,PETSC_FALSE)); 117 CHKERRQ(CheckValuesIJ(A)); 118 CHKERRQ(PetscViewerDestroy(&view)); 119 CHKERRQ(MatDestroy(&A)); 120 121 { 122 PetscInt m = PETSC_DECIDE, n = PETSC_DECIDE; 123 CHKERRQ(PetscSplitOwnership(PETSC_COMM_WORLD,&m,&M)); 124 CHKERRQ(PetscSplitOwnership(PETSC_COMM_WORLD,&n,&N)); 125 /* TODO: MatCreateDense requires data!=NULL at all processes! */ 126 CHKERRQ(PetscMalloc1(m*N+1,&array)); 127 128 CHKERRQ(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_READ,&view)); 129 CHKERRQ(MatCreateDense(PETSC_COMM_WORLD,m,n,M,N,array,&A)); 130 CHKERRQ(MatLoad(A,view)); 131 CHKERRQ(CheckValuesOne(A)); 132 CHKERRQ(PetscViewerBinarySetSkipHeader(view,PETSC_TRUE)); 133 CHKERRQ(MatLoad(A,view)); 134 CHKERRQ(PetscViewerBinarySetSkipHeader(view,PETSC_FALSE)); 135 CHKERRQ(CheckValuesIJ(A)); 136 CHKERRQ(MatDestroy(&A)); 137 CHKERRQ(PetscViewerDestroy(&view)); 138 139 CHKERRQ(MatCreateDense(PETSC_COMM_WORLD,m,n,M,N,array,&A)); 140 CHKERRQ(CheckValuesIJ(A)); 141 CHKERRQ(MatDestroy(&A)); 142 143 CHKERRQ(PetscFree(array)); 144 } 145 146 ierr = PetscFinalize(); 147 return ierr; 148 } 149 150 /*TEST 151 152 testset: 153 args: -viewer_binary_mpiio 0 154 output_file: output/ex16.out 155 test: 156 suffix: stdio_1 157 nsize: 1 158 args: -mat_type seqdense 159 test: 160 suffix: stdio_2 161 nsize: 2 162 test: 163 suffix: stdio_3 164 nsize: 3 165 test: 166 suffix: stdio_4 167 nsize: 4 168 test: 169 suffix: stdio_5 170 nsize: 5 171 test: 172 requires: cuda 173 args: -mat_type seqdensecuda 174 suffix: stdio_cuda_1 175 nsize: 1 176 test: 177 requires: cuda 178 args: -mat_type mpidensecuda 179 suffix: stdio_cuda_2 180 nsize: 2 181 test: 182 requires: cuda 183 args: -mat_type mpidensecuda 184 suffix: stdio_cuda_3 185 nsize: 3 186 test: 187 requires: cuda 188 args: -mat_type mpidensecuda 189 suffix: stdio_cuda_4 190 nsize: 4 191 test: 192 requires: cuda 193 args: -mat_type mpidensecuda 194 suffix: stdio_cuda_5 195 nsize: 5 196 197 testset: 198 requires: mpiio 199 args: -viewer_binary_mpiio 1 200 output_file: output/ex16.out 201 test: 202 suffix: mpiio_1 203 nsize: 1 204 test: 205 suffix: mpiio_2 206 nsize: 2 207 test: 208 suffix: mpiio_3 209 nsize: 3 210 test: 211 suffix: mpiio_4 212 nsize: 4 213 test: 214 suffix: mpiio_5 215 nsize: 5 216 test: 217 requires: cuda 218 args: -mat_type mpidensecuda 219 suffix: mpiio_cuda_1 220 nsize: 1 221 test: 222 requires: cuda 223 args: -mat_type mpidensecuda 224 suffix: mpiio_cuda_2 225 nsize: 2 226 test: 227 requires: cuda 228 args: -mat_type mpidensecuda 229 suffix: mpiio_cuda_3 230 nsize: 3 231 test: 232 requires: cuda 233 args: -mat_type mpidensecuda 234 suffix: mpiio_cuda_4 235 nsize: 4 236 test: 237 requires: cuda 238 args: -mat_type mpidensecuda 239 suffix: mpiio_cuda_5 240 nsize: 5 241 242 TEST*/ 243