1 #include <../src/mat/impls/aij/seq/aij.h>
2 #include <petsc/private/isimpl.h>
3 #include <petsc/private/vecimpl.h>
4 #include <petsclayouthdf5.h>
5
MatLoad_AIJ_HDF5(Mat mat,PetscViewer viewer)6 PetscErrorCode MatLoad_AIJ_HDF5(Mat mat, PetscViewer viewer)
7 {
8 PetscViewerFormat format;
9 const PetscInt *i_glob = NULL;
10 PetscInt *i = NULL;
11 const PetscInt *j = NULL;
12 const PetscScalar *a = NULL;
13 char *a_name = NULL, *i_name = NULL, *j_name = NULL, *c_name = NULL;
14 const char *mat_name = NULL;
15 PetscInt p, m, M, N;
16 PetscInt bs = mat->rmap->bs;
17 PetscInt *range;
18 PetscBool flg;
19 IS is_i = NULL, is_j = NULL;
20 Vec vec_a = NULL;
21 PetscLayout jmap = NULL;
22 MPI_Comm comm;
23 PetscMPIInt rank, size;
24
25 PetscFunctionBegin;
26 PetscCall(PetscViewerGetFormat(viewer, &format));
27 switch (format) {
28 case PETSC_VIEWER_HDF5_PETSC:
29 case PETSC_VIEWER_DEFAULT:
30 case PETSC_VIEWER_NATIVE:
31 case PETSC_VIEWER_HDF5_MAT:
32 break;
33 default:
34 SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "PetscViewerFormat %s not supported for HDF5 input.", PetscViewerFormats[format]);
35 }
36
37 PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
38 PetscCallMPI(MPI_Comm_rank(comm, &rank));
39 PetscCallMPI(MPI_Comm_size(comm, &size));
40 PetscCall(PetscObjectGetName((PetscObject)mat, &mat_name));
41 if (format == PETSC_VIEWER_HDF5_MAT) {
42 PetscCall(PetscStrallocpy("jc", &i_name));
43 PetscCall(PetscStrallocpy("ir", &j_name));
44 PetscCall(PetscStrallocpy("data", &a_name));
45 PetscCall(PetscStrallocpy("MATLAB_sparse", &c_name));
46 } else {
47 /* TODO Once corresponding MatView is implemented, change the names to i,j,a */
48 /* TODO Maybe there could be both namings in the file, using "symbolic link" features of HDF5. */
49 PetscCall(PetscStrallocpy("jc", &i_name));
50 PetscCall(PetscStrallocpy("ir", &j_name));
51 PetscCall(PetscStrallocpy("data", &a_name));
52 PetscCall(PetscStrallocpy("MATLAB_sparse", &c_name));
53 }
54
55 PetscOptionsBegin(comm, NULL, "Options for loading matrix from HDF5", "Mat");
56 PetscCall(PetscOptionsInt("-matload_block_size", "Set the blocksize used to store the matrix", "MatLoad", bs, &bs, &flg));
57 PetscOptionsEnd();
58 if (flg) PetscCall(MatSetBlockSize(mat, bs));
59
60 PetscCall(PetscViewerHDF5PushGroup(viewer, mat_name));
61 PetscCall(PetscViewerHDF5ReadAttribute(viewer, NULL, c_name, PETSC_INT, NULL, &N));
62 PetscCall(PetscViewerHDF5ReadSizes(viewer, i_name, NULL, &M));
63 --M; /* i has size M+1 as there is global number of nonzeros stored at the end */
64
65 if (format == PETSC_VIEWER_HDF5_MAT && mat->symmetric != PETSC_BOOL3_TRUE) {
66 /* Swap row and columns layout for unallocated matrix. I want to avoid calling MatTranspose() just to transpose sparsity pattern and layout. */
67 PetscLayout tmp;
68 PetscCheck(!mat->preallocated, comm, PETSC_ERR_SUP, "Not for preallocated matrix - we would need to transpose it here which we want to avoid");
69 tmp = mat->rmap;
70 mat->rmap = mat->cmap;
71 mat->cmap = tmp;
72 }
73
74 /* If global sizes are set, check if they are consistent with that given in the file */
75 PetscCheck(mat->rmap->N < 0 || mat->rmap->N == M, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows: Matrix in file has (%" PetscInt_FMT ") and input matrix has (%" PetscInt_FMT ")", mat->rmap->N, M);
76 PetscCheck(mat->cmap->N < 0 || mat->cmap->N == N, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols: Matrix in file has (%" PetscInt_FMT ") and input matrix has (%" PetscInt_FMT ")", mat->cmap->N, N);
77
78 /* Determine ownership of all (block) rows and columns */
79 mat->rmap->N = M;
80 mat->cmap->N = N;
81 PetscCall(PetscLayoutSetUp(mat->rmap));
82 PetscCall(PetscLayoutSetUp(mat->cmap));
83 m = mat->rmap->n;
84
85 /* Read array i (array of row indices) */
86 PetscCall(PetscMalloc1(m + 1, &i)); /* allocate i with one more position for local number of nonzeros on each rank */
87 i[0] = i[m] = 0; /* make the last entry always defined - the code block below overwrites it just on last rank */
88 if (rank == size - 1) m++; /* in the loaded array i_glob, only the last rank has one more position with the global number of nonzeros */
89 M++;
90 PetscCall(ISCreate(comm, &is_i));
91 PetscCall(PetscObjectSetName((PetscObject)is_i, i_name));
92 PetscCall(PetscLayoutSetLocalSize(is_i->map, m));
93 PetscCall(PetscLayoutSetSize(is_i->map, M));
94 PetscCall(ISLoad(is_i, viewer));
95 PetscCall(ISGetIndices(is_i, &i_glob));
96 PetscCall(PetscArraycpy(i, i_glob, m));
97
98 /* Reset m and M to the matrix sizes */
99 m = mat->rmap->n;
100 M--;
101
102 /* Create PetscLayout for j and a vectors; construct ranges first */
103 PetscCall(PetscMalloc1(size + 1, &range));
104 PetscCallMPI(MPI_Allgather(i, 1, MPIU_INT, range, 1, MPIU_INT, comm));
105 /* Last rank has global number of nonzeros (= length of j and a arrays) in i[m] (last i entry) so broadcast it */
106 range[size] = i[m];
107 PetscCallMPI(MPI_Bcast(&range[size], 1, MPIU_INT, size - 1, comm));
108 for (p = size - 1; p > 0; p--) {
109 if (!range[p]) range[p] = range[p + 1]; /* for ranks with 0 rows, take the value from the next processor */
110 }
111 i[m] = range[rank + 1]; /* i[m] (last i entry) is equal to next rank's offset */
112 /* Deduce rstart, rend, n and N from the ranges */
113 PetscCall(PetscLayoutCreateFromRanges(comm, range, PETSC_OWN_POINTER, 1, &jmap));
114
115 /* Convert global to local indexing of rows */
116 for (p = 1; p < m + 1; ++p) i[p] -= i[0];
117 i[0] = 0;
118
119 /* Read array j (array of column indices) */
120 PetscCall(ISCreate(comm, &is_j));
121 PetscCall(PetscObjectSetName((PetscObject)is_j, j_name));
122 PetscCall(PetscLayoutDuplicate(jmap, &is_j->map));
123 PetscCall(ISLoad(is_j, viewer));
124 PetscCall(ISGetIndices(is_j, &j));
125
126 /* Read array a (array of values) */
127 PetscCall(VecCreate(comm, &vec_a));
128 PetscCall(PetscObjectSetName((PetscObject)vec_a, a_name));
129 PetscCall(PetscLayoutDuplicate(jmap, &vec_a->map));
130 PetscCall(VecLoad(vec_a, viewer));
131 PetscCall(VecGetArrayRead(vec_a, &a));
132
133 /* populate matrix */
134 if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
135 PetscCall(MatSeqAIJSetPreallocationCSR(mat, i, j, a));
136 PetscCall(MatMPIAIJSetPreallocationCSR(mat, i, j, a));
137 /*
138 PetscCall(MatSeqBAIJSetPreallocationCSR(mat,bs,i,j,a));
139 PetscCall(MatMPIBAIJSetPreallocationCSR(mat,bs,i,j,a));
140 */
141
142 if (format == PETSC_VIEWER_HDF5_MAT && mat->symmetric != PETSC_BOOL3_TRUE) {
143 /* Transpose the input matrix back */
144 PetscCall(MatTranspose(mat, MAT_INPLACE_MATRIX, &mat));
145 }
146
147 PetscCall(PetscViewerHDF5PopGroup(viewer));
148 PetscCall(PetscFree(i_name));
149 PetscCall(PetscFree(j_name));
150 PetscCall(PetscFree(a_name));
151 PetscCall(PetscFree(c_name));
152 PetscCall(PetscLayoutDestroy(&jmap));
153 PetscCall(PetscFree(i));
154 PetscCall(ISRestoreIndices(is_i, &i_glob));
155 PetscCall(ISRestoreIndices(is_j, &j));
156 PetscCall(VecRestoreArrayRead(vec_a, &a));
157 PetscCall(ISDestroy(&is_i));
158 PetscCall(ISDestroy(&is_j));
159 PetscCall(VecDestroy(&vec_a));
160 PetscFunctionReturn(PETSC_SUCCESS);
161 }
162