xref: /petsc/src/mat/impls/aij/mpi/mpihashmat.h (revision 2ff79c18c26c94ed8cb599682f680f231dca6444)
1 /*
2    used by MPIAIJ, BAIJ and SBAIJ to reduce code duplication
3 
4      define TYPE to AIJ BAIJ or SBAIJ
5             TYPE_SBAIJ for SBAIJ matrix
6 
7 */
8 
9 static PetscErrorCode MatSetValues_MPI_Hash(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
10 {
11   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
12   const PetscInt rStart         = A->rmap->rstart;
13   const PetscInt rEnd           = A->rmap->rend;
14   const PetscInt cStart         = A->cmap->rstart;
15   const PetscInt cEnd           = A->cmap->rend;
16 #if defined(TYPE_SBAIJ)
17   const PetscInt bs = A->rmap->bs;
18 #endif
19   const PetscBool ignorezeroentries = ((Mat_SeqAIJ *)a->A->data)->ignorezeroentries;
20 
21   PetscFunctionBegin;
22   for (PetscInt r = 0; r < m; ++r) {
23     PetscScalar value;
24     if (rows[r] < 0) continue;
25     if (rows[r] < rStart || rows[r] >= rEnd) {
26       PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[r]);
27       if (!a->donotstash) {
28         A->assembled = PETSC_FALSE;
29         if (a->roworiented) {
30           PetscCall(MatStashValuesRow_Private(&A->stash, rows[r], n, cols, PetscSafePointerPlusOffset(values, r * n), (PetscBool)(ignorezeroentries && (addv == ADD_VALUES))));
31         } else {
32           PetscCall(MatStashValuesCol_Private(&A->stash, rows[r], n, cols, PetscSafePointerPlusOffset(values, r), m, (PetscBool)(ignorezeroentries && (addv == ADD_VALUES))));
33         }
34       }
35     } else {
36       for (PetscInt c = 0; c < n; ++c) {
37 #if defined(TYPE_SBAIJ)
38         if (cols[c] / bs < rows[r] / bs) continue;
39 #else
40         if (cols[c] < 0) continue;
41 #endif
42         value = values ? (a->roworiented ? values[r * n + c] : values[r + m * c]) : 0;
43         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES) && rows[r] != cols[c]) continue;
44         if (cols[c] >= cStart && cols[c] < cEnd) {
45           PetscCall(MatSetValue(a->A, rows[r] - rStart, cols[c] - cStart, value, addv));
46         } else if (!ignorezeroentries || value != 0.0) {
47           /* MPIAIJ never inserts in B if it is 0 */
48           PetscCall(MatSetValue(a->B, rows[r] - rStart, cols[c], value, addv));
49         }
50       }
51     }
52   }
53   PetscFunctionReturn(PETSC_SUCCESS);
54 }
55 
56 static PetscErrorCode MatAssemblyBegin_MPI_Hash(Mat A, PETSC_UNUSED MatAssemblyType type)
57 {
58   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
59   PetscInt nstash, reallocs;
60 
61   PetscFunctionBegin;
62   if (a->donotstash || A->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
63   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
64   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
65   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
66   PetscFunctionReturn(PETSC_SUCCESS);
67 }
68 
69 static PetscErrorCode MatFinishScatterAndSetValues_MPI_Hash(Mat A)
70 {
71   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
72   PetscMPIInt  n;
73   PetscScalar *val;
74   PetscInt    *row, *col;
75   PetscInt     j, ncols, flg, rstart;
76 
77   PetscFunctionBegin;
78   if (!a->donotstash && !A->nooffprocentries) {
79     while (1) {
80       PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
81       if (!flg) break;
82 
83       for (PetscInt i = 0; i < n;) {
84         /* Now identify the consecutive vals belonging to the same row */
85         for (j = i, rstart = row[j]; j < n; j++) {
86           if (row[j] != rstart) break;
87         }
88         if (j < n) ncols = j - i;
89         else ncols = n - i;
90         /* Now assemble all these values with a single function call */
91         PetscCall(MatSetValues_MPI_Hash(A, 1, row + i, ncols, col + i, val + i, A->insertmode));
92         i = j;
93       }
94     }
95     PetscCall(MatStashScatterEnd_Private(&A->stash));
96   }
97   PetscFunctionReturn(PETSC_SUCCESS);
98 }
99 
100 static PetscErrorCode MatAssemblyEnd_MPI_Hash(Mat A, MatAssemblyType type)
101 {
102   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
103 
104   PetscFunctionBegin;
105   PetscCall(MatFinishScatterAndSetValues_MPI_Hash(A));
106   if (type != MAT_FINAL_ASSEMBLY) PetscFunctionReturn(PETSC_SUCCESS);
107 
108   A->insertmode = NOT_SET_VALUES; /* this was set by the previous calls to MatSetValues() */
109 
110   A->ops[0]      = a->cops;
111   A->hash_active = PETSC_FALSE;
112 
113   PetscCall(MatAssemblyBegin(a->A, MAT_FINAL_ASSEMBLY));
114   PetscCall(MatAssemblyEnd(a->A, MAT_FINAL_ASSEMBLY));
115   PetscCall(MatAssemblyBegin(a->B, MAT_FINAL_ASSEMBLY));
116   PetscCall(MatAssemblyEnd(a->B, MAT_FINAL_ASSEMBLY));
117   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
118   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
119   PetscFunctionReturn(PETSC_SUCCESS);
120 }
121 
122 static PetscErrorCode MatCopyHashToXAIJ_MPI_Hash(Mat A, Mat B)
123 {
124   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data, *b = (PetscConcat(Mat_MPI, TYPE) *)B->data;
125 
126   PetscFunctionBegin;
127   /* Let's figure there's no harm done in doing the scatters for A now even if A != B */
128   PetscCall(MatAssemblyBegin_MPI_Hash(A, /*unused*/ MAT_FINAL_ASSEMBLY));
129   PetscCall(MatFinishScatterAndSetValues_MPI_Hash(A));
130 
131   PetscCall(MatCopyHashToXAIJ(a->A, b->A));
132   PetscCall(MatCopyHashToXAIJ(a->B, b->B));
133   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
134   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
135   PetscFunctionReturn(PETSC_SUCCESS);
136 }
137 
138 static PetscErrorCode MatDestroy_MPI_Hash(Mat A)
139 {
140   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
141 
142   PetscFunctionBegin;
143   PetscCall(MatStashDestroy_Private(&A->stash));
144   PetscCall(MatDestroy(&a->A));
145   PetscCall(MatDestroy(&a->B));
146   PetscCall((*a->cops.destroy)(A));
147   PetscFunctionReturn(PETSC_SUCCESS);
148 }
149 
150 static PetscErrorCode MatZeroEntries_MPI_Hash(PETSC_UNUSED Mat A)
151 {
152   PetscFunctionBegin;
153   PetscFunctionReturn(PETSC_SUCCESS);
154 }
155 
156 static PetscErrorCode MatSetRandom_MPI_Hash(Mat A, PETSC_UNUSED PetscRandom r)
157 {
158   SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Must set preallocation first");
159 }
160 
161 static PetscErrorCode MatSetUp_MPI_Hash(Mat A)
162 {
163   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
164   PetscMPIInt size;
165 #if !defined(TYPE_AIJ)
166   PetscInt bs;
167 #endif
168 
169   PetscFunctionBegin;
170   PetscCall(PetscInfo(A, "Using hash-based MatSetValues() for MATMPISBAIJ because no preallocation provided\n"));
171   PetscCall(PetscLayoutSetUp(A->rmap));
172   PetscCall(PetscLayoutSetUp(A->cmap));
173   if (A->rmap->bs < 1) A->rmap->bs = 1;
174   if (A->cmap->bs < 1) A->cmap->bs = 1;
175   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
176 
177 #if !defined(TYPE_AIJ)
178   PetscCall(MatGetBlockSize(A, &bs));
179   /* these values are set in MatMPISBAIJSetPreallocation() */
180   a->bs2 = bs * bs;
181   a->mbs = A->rmap->n / bs;
182   a->nbs = A->cmap->n / bs;
183   a->Mbs = A->rmap->N / bs;
184   a->Nbs = A->cmap->N / bs;
185 
186   for (PetscInt i = 0; i <= a->size; i++) a->rangebs[i] = A->rmap->range[i] / bs;
187   a->rstartbs = A->rmap->rstart / bs;
188   a->rendbs   = A->rmap->rend / bs;
189   a->cstartbs = A->cmap->rstart / bs;
190   a->cendbs   = A->cmap->rend / bs;
191   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), A->rmap->bs, &A->bstash));
192 #endif
193 
194   PetscCall(MatCreate(PETSC_COMM_SELF, &a->A));
195   PetscCall(MatSetSizes(a->A, A->rmap->n, A->cmap->n, A->rmap->n, A->cmap->n));
196   PetscCall(MatSetBlockSizesFromMats(a->A, A, A));
197 #if defined(SUB_TYPE_CUSPARSE)
198   PetscCall(MatSetType(a->A, MATSEQAIJCUSPARSE));
199 #else
200   PetscCall(MatSetType(a->A, PetscConcat(MATSEQ, TYPE)));
201 #endif
202   PetscCall(MatSetUp(a->A));
203 
204   PetscCall(MatCreate(PETSC_COMM_SELF, &a->B));
205   PetscCall(MatSetSizes(a->B, A->rmap->n, size > 1 ? A->cmap->N : 0, A->rmap->n, size > 1 ? A->cmap->N : 0));
206   PetscCall(MatSetBlockSizesFromMats(a->B, A, A));
207 #if defined(TYPE_SBAIJ)
208   PetscCall(MatSetType(a->B, MATSEQBAIJ));
209 #else
210   #if defined(SUB_TYPE_CUSPARSE)
211   PetscCall(MatSetType(a->B, MATSEQAIJCUSPARSE));
212   #else
213   PetscCall(MatSetType(a->B, PetscConcat(MATSEQ, TYPE)));
214   #endif
215 #endif
216   PetscCall(MatSetUp(a->B));
217 
218   /* keep a record of the operations so they can be reset when the hash handling is complete */
219   a->cops                  = A->ops[0];
220   A->ops->assemblybegin    = MatAssemblyBegin_MPI_Hash;
221   A->ops->assemblyend      = MatAssemblyEnd_MPI_Hash;
222   A->ops->setvalues        = MatSetValues_MPI_Hash;
223   A->ops->destroy          = MatDestroy_MPI_Hash;
224   A->ops->zeroentries      = MatZeroEntries_MPI_Hash;
225   A->ops->setrandom        = MatSetRandom_MPI_Hash;
226   A->ops->copyhashtoxaij   = MatCopyHashToXAIJ_MPI_Hash;
227   A->ops->setvaluesblocked = NULL;
228 
229   A->preallocated = PETSC_TRUE;
230   A->hash_active  = PETSC_TRUE;
231   PetscFunctionReturn(PETSC_SUCCESS);
232 }
233