1 static char help[] = "Tests MatDenseGetArray() and MatView()/MatLoad() with binary viewers.\n\n"; 2 3 #include <petscmat.h> 4 #include <petscviewer.h> 5 6 static PetscErrorCode CheckValues(Mat A,PetscBool one) 7 { 8 const PetscScalar *array; 9 PetscInt M,N,rstart,rend,lda,i,j; 10 11 PetscFunctionBegin; 12 PetscCall(MatDenseGetArrayRead(A,&array)); 13 PetscCall(MatDenseGetLDA(A,&lda)); 14 PetscCall(MatGetSize(A,&M,&N)); 15 PetscCall(MatGetOwnershipRange(A,&rstart,&rend)); 16 for (i=rstart; i<rend; i++) { 17 for (j=0; j<N; j++) { 18 PetscInt ii = i - rstart, jj = j; 19 PetscReal v = (PetscReal)(one ? 1 : (1 + i + j*M)); 20 PetscReal w = PetscRealPart(array[ii + jj*lda]); 21 PetscCheck(PetscAbsReal(v-w) <= 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ") should be %g, got %g",i,j,(double)v,(double)w); 22 } 23 } 24 PetscCall(MatDenseRestoreArrayRead(A,&array)); 25 PetscFunctionReturn(0); 26 } 27 28 #define CheckValuesIJ(A) CheckValues(A,PETSC_FALSE) 29 #define CheckValuesOne(A) CheckValues(A,PETSC_TRUE) 30 31 int main(int argc,char **args) 32 { 33 Mat A; 34 PetscInt i,j,M = 4,N = 3,rstart,rend; 35 PetscScalar *array; 36 char mattype[256]; 37 PetscViewer view; 38 39 PetscCall(PetscInitialize(&argc,&args,NULL,help)); 40 PetscCall(PetscStrcpy(mattype,MATMPIDENSE)); 41 PetscCall(PetscOptionsGetString(NULL,NULL,"-mat_type",mattype,sizeof(mattype),NULL)); 42 /* 43 Create a parallel dense matrix shared by all processors 44 */ 45 PetscCall(MatCreateDense(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,M,N,NULL,&A)); 46 PetscCall(MatConvert(A,mattype,MAT_INPLACE_MATRIX,&A)); 47 /* 48 Set values into the matrix 49 */ 50 for (i=0; i<M; i++) { 51 for (j=0; j<N; j++) { 52 PetscScalar v = (PetscReal)(1 + i + j*M); 53 PetscCall(MatSetValues(A,1,&i,1,&j,&v,INSERT_VALUES)); 54 } 55 } 56 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 57 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 58 PetscCall(MatScale(A,2.0)); 59 PetscCall(MatScale(A,1.0/2.0)); 60 61 /* 62 Store the binary matrix to a file 63 */ 64 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "matrix.dat", FILE_MODE_WRITE, &view)); 65 for (i=0; i<2; i++) { 66 PetscCall(MatView(A,view)); 67 PetscCall(PetscViewerPushFormat(view,PETSC_VIEWER_NATIVE)); 68 PetscCall(MatView(A,view)); 69 PetscCall(PetscViewerPopFormat(view)); 70 } 71 PetscCall(PetscViewerDestroy(&view)); 72 PetscCall(MatDestroy(&A)); 73 74 /* 75 Now reload the matrix and check its values 76 */ 77 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_READ,&view)); 78 PetscCall(MatCreate(PETSC_COMM_WORLD,&A)); 79 PetscCall(MatSetType(A,mattype)); 80 for (i=0; i<4; i++) { 81 if (i > 0) PetscCall(MatZeroEntries(A)); 82 PetscCall(MatLoad(A,view)); 83 PetscCall(CheckValuesIJ(A)); 84 } 85 PetscCall(PetscViewerDestroy(&view)); 86 87 PetscCall(MatGetOwnershipRange(A,&rstart,&rend)); 88 PetscCall(PetscMalloc1((rend-rstart)*N,&array)); 89 for (i=0; i<(rend-rstart)*N; i++) array[i] = (PetscReal)1; 90 PetscCall(MatDensePlaceArray(A,array)); 91 PetscCall(MatScale(A,2.0)); 92 PetscCall(MatScale(A,1.0/2.0)); 93 PetscCall(CheckValuesOne(A)); 94 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_WRITE,&view)); 95 PetscCall(MatView(A,view)); 96 PetscCall(MatDenseResetArray(A)); 97 PetscCall(PetscFree(array)); 98 PetscCall(CheckValuesIJ(A)); 99 PetscCall(PetscViewerBinarySetSkipHeader(view,PETSC_TRUE)); 100 PetscCall(MatView(A,view)); 101 PetscCall(PetscViewerBinarySetSkipHeader(view,PETSC_FALSE)); 102 PetscCall(PetscViewerDestroy(&view)); 103 PetscCall(MatDestroy(&A)); 104 105 PetscCall(MatCreate(PETSC_COMM_WORLD,&A)); 106 PetscCall(MatSetType(A,mattype)); 107 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_READ,&view)); 108 PetscCall(MatLoad(A,view)); 109 PetscCall(CheckValuesOne(A)); 110 PetscCall(MatZeroEntries(A)); 111 PetscCall(PetscViewerBinarySetSkipHeader(view,PETSC_TRUE)); 112 PetscCall(MatLoad(A,view)); 113 PetscCall(PetscViewerBinarySetSkipHeader(view,PETSC_FALSE)); 114 PetscCall(CheckValuesIJ(A)); 115 PetscCall(PetscViewerDestroy(&view)); 116 PetscCall(MatDestroy(&A)); 117 118 { 119 PetscInt m = PETSC_DECIDE, n = PETSC_DECIDE; 120 PetscCall(PetscSplitOwnership(PETSC_COMM_WORLD,&m,&M)); 121 PetscCall(PetscSplitOwnership(PETSC_COMM_WORLD,&n,&N)); 122 /* TODO: MatCreateDense requires data!=NULL at all processes! */ 123 PetscCall(PetscMalloc1(m*N+1,&array)); 124 125 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_READ,&view)); 126 PetscCall(MatCreateDense(PETSC_COMM_WORLD,m,n,M,N,array,&A)); 127 PetscCall(MatLoad(A,view)); 128 PetscCall(CheckValuesOne(A)); 129 PetscCall(PetscViewerBinarySetSkipHeader(view,PETSC_TRUE)); 130 PetscCall(MatLoad(A,view)); 131 PetscCall(PetscViewerBinarySetSkipHeader(view,PETSC_FALSE)); 132 PetscCall(CheckValuesIJ(A)); 133 PetscCall(MatDestroy(&A)); 134 PetscCall(PetscViewerDestroy(&view)); 135 136 PetscCall(MatCreateDense(PETSC_COMM_WORLD,m,n,M,N,array,&A)); 137 PetscCall(CheckValuesIJ(A)); 138 PetscCall(MatDestroy(&A)); 139 140 PetscCall(PetscFree(array)); 141 } 142 143 PetscCall(PetscFinalize()); 144 return 0; 145 } 146 147 /*TEST 148 149 testset: 150 args: -viewer_binary_mpiio 0 151 output_file: output/ex16.out 152 test: 153 suffix: stdio_1 154 nsize: 1 155 args: -mat_type seqdense 156 test: 157 suffix: stdio_2 158 nsize: 2 159 test: 160 suffix: stdio_3 161 nsize: 3 162 test: 163 suffix: stdio_4 164 nsize: 4 165 test: 166 suffix: stdio_5 167 nsize: 5 168 test: 169 requires: cuda 170 args: -mat_type seqdensecuda 171 suffix: stdio_cuda_1 172 nsize: 1 173 test: 174 requires: cuda 175 args: -mat_type mpidensecuda 176 suffix: stdio_cuda_2 177 nsize: 2 178 test: 179 requires: cuda 180 args: -mat_type mpidensecuda 181 suffix: stdio_cuda_3 182 nsize: 3 183 test: 184 requires: cuda 185 args: -mat_type mpidensecuda 186 suffix: stdio_cuda_4 187 nsize: 4 188 test: 189 requires: cuda 190 args: -mat_type mpidensecuda 191 suffix: stdio_cuda_5 192 nsize: 5 193 194 testset: 195 requires: mpiio 196 args: -viewer_binary_mpiio 1 197 output_file: output/ex16.out 198 test: 199 suffix: mpiio_1 200 nsize: 1 201 test: 202 suffix: mpiio_2 203 nsize: 2 204 test: 205 suffix: mpiio_3 206 nsize: 3 207 test: 208 suffix: mpiio_4 209 nsize: 4 210 test: 211 suffix: mpiio_5 212 nsize: 5 213 test: 214 requires: cuda 215 args: -mat_type mpidensecuda 216 suffix: mpiio_cuda_1 217 nsize: 1 218 test: 219 requires: cuda 220 args: -mat_type mpidensecuda 221 suffix: mpiio_cuda_2 222 nsize: 2 223 test: 224 requires: cuda 225 args: -mat_type mpidensecuda 226 suffix: mpiio_cuda_3 227 nsize: 3 228 test: 229 requires: cuda 230 args: -mat_type mpidensecuda 231 suffix: mpiio_cuda_4 232 nsize: 4 233 test: 234 requires: cuda 235 args: -mat_type mpidensecuda 236 suffix: mpiio_cuda_5 237 nsize: 5 238 239 TEST*/ 240