1 #include <../src/dm/impls/composite/packimpl.h> /*I "petscdmcomposite.h" I*/
2
DMCreateMatrix_Composite_Nest(DM dm,Mat * J)3 static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm, Mat *J)
4 {
5 const DM_Composite *com = (DM_Composite *)dm->data;
6 const struct DMCompositeLink *rlink, *clink;
7 IS *isg;
8 Mat *submats;
9 PetscInt i, j, n;
10
11 PetscFunctionBegin;
12 n = com->nDM; /* Total number of entries */
13
14 /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
15 * checking and allows ISEqual to compare by identity instead of by contents. */
16 PetscCall(DMCompositeGetGlobalISs(dm, &isg));
17
18 /* Get submatrices */
19 PetscCall(PetscMalloc1(n * n, &submats));
20 for (i = 0, rlink = com->next; rlink; i++, rlink = rlink->next) {
21 for (j = 0, clink = com->next; clink; j++, clink = clink->next) {
22 Mat sub = NULL;
23 if (i == j) PetscCall(DMCreateMatrix(rlink->dm, &sub));
24 else PetscCheck(!com->FormCoupleLocations, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Cannot manage off-diagonal parts yet");
25 submats[i * n + j] = sub;
26 }
27 }
28
29 PetscCall(MatCreateNest(PetscObjectComm((PetscObject)dm), n, isg, n, isg, submats, J));
30
31 /* Disown references */
32 for (i = 0; i < n; i++) PetscCall(ISDestroy(&isg[i]));
33 PetscCall(PetscFree(isg));
34
35 for (i = 0; i < n * n; i++) {
36 if (submats[i]) PetscCall(MatDestroy(&submats[i]));
37 }
38 PetscCall(PetscFree(submats));
39 PetscFunctionReturn(PETSC_SUCCESS);
40 }
41
DMCreateMatrix_Composite_AIJ(DM dm,Mat * J)42 static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm, Mat *J)
43 {
44 DM_Composite *com = (DM_Composite *)dm->data;
45 struct DMCompositeLink *next;
46 PetscInt m, *dnz, *onz, i, j, mA;
47 Mat Atmp;
48 PetscMPIInt rank;
49 PetscBool dense = PETSC_FALSE;
50
51 PetscFunctionBegin;
52 /* use global vector to determine layout needed for matrix */
53 m = com->n;
54
55 PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
56 PetscCall(MatSetSizes(*J, m, m, PETSC_DETERMINE, PETSC_DETERMINE));
57 PetscCall(MatSetType(*J, dm->mattype));
58
59 /*
60 Extremely inefficient but will compute entire Jacobian for testing
61 */
62 PetscCall(PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dmcomposite_dense_jacobian", &dense, NULL));
63 if (dense) {
64 PetscInt rstart, rend, *indices;
65 PetscScalar *values;
66
67 mA = com->N;
68 PetscCall(MatMPIAIJSetPreallocation(*J, mA, NULL, mA - m, NULL));
69 PetscCall(MatSeqAIJSetPreallocation(*J, mA, NULL));
70
71 PetscCall(MatGetOwnershipRange(*J, &rstart, &rend));
72 PetscCall(PetscMalloc2(mA, &values, mA, &indices));
73 PetscCall(PetscArrayzero(values, mA));
74 for (i = 0; i < mA; i++) indices[i] = i;
75 for (i = rstart; i < rend; i++) PetscCall(MatSetValues(*J, 1, &i, mA, indices, values, INSERT_VALUES));
76 PetscCall(PetscFree2(values, indices));
77 PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
78 PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
79 PetscFunctionReturn(PETSC_SUCCESS);
80 }
81
82 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
83 if (dm->prealloc_skip) PetscFunctionReturn(PETSC_SUCCESS);
84 MatPreallocateBegin(PetscObjectComm((PetscObject)dm), m, m, dnz, onz);
85 /* loop over packed objects, handling one at a time */
86 next = com->next;
87 while (next) {
88 PetscInt nc, rstart, *ccols, maxnc;
89 const PetscInt *cols, *rstarts;
90 PetscMPIInt proc;
91 MatType tmp, mat_type_old;
92
93 /* force AIJ matrix to allow queries for preallocation */
94 PetscCall(DMGetMatType(next->dm, &tmp));
95 PetscCall(PetscStrallocpy(tmp, (char **)&mat_type_old));
96 PetscCall(DMSetMatType(next->dm, MATAIJ));
97 PetscCall(DMCreateMatrix(next->dm, &Atmp));
98 PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
99 PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
100 PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
101
102 maxnc = 0;
103 for (i = 0; i < mA; i++) {
104 PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
105 maxnc = PetscMax(nc, maxnc);
106 PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
107 }
108 PetscCall(PetscMalloc1(maxnc, &ccols));
109 for (i = 0; i < mA; i++) {
110 PetscCall(MatGetRow(Atmp, rstart + i, &nc, &cols, NULL));
111 /* remap the columns taking into how much they are shifted on each process */
112 for (j = 0; j < nc; j++) {
113 proc = 0;
114 while (cols[j] >= rstarts[proc + 1]) proc++;
115 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
116 }
117 PetscCall(MatPreallocateSet(com->rstart + next->rstart + i, nc, ccols, dnz, onz));
118 PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, &cols, NULL));
119 }
120 PetscCall(PetscFree(ccols));
121 PetscCall(MatDestroy(&Atmp));
122 PetscCall(DMSetMatType(next->dm, mat_type_old));
123 PetscCall(PetscFree(mat_type_old));
124 next = next->next;
125 }
126 if (com->FormCoupleLocations) PetscCall((*com->FormCoupleLocations)(dm, NULL, dnz, onz, __rstart, __nrows, __start, __end));
127 PetscCall(MatMPIAIJSetPreallocation(*J, 0, dnz, 0, onz));
128 PetscCall(MatSeqAIJSetPreallocation(*J, 0, dnz));
129 MatPreallocateEnd(dnz, onz);
130
131 if (dm->prealloc_only) PetscFunctionReturn(PETSC_SUCCESS);
132
133 next = com->next;
134 while (next) {
135 PetscInt nc, rstart, row, maxnc, *ccols;
136 const PetscInt *cols, *rstarts;
137 const PetscScalar *values;
138 PetscMPIInt proc;
139 MatType tmp, mat_type_old;
140
141 /* force AIJ matrix to allow queries for zeroing initial matrix */
142 PetscCall(DMGetMatType(next->dm, &tmp));
143 PetscCall(PetscStrallocpy(tmp, (char **)&mat_type_old));
144 PetscCall(DMSetMatType(next->dm, MATAIJ));
145 PetscCall(DMCreateMatrix(next->dm, &Atmp));
146 PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
147 PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
148 PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
149 maxnc = 0;
150 for (i = 0; i < mA; i++) {
151 PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
152 maxnc = PetscMax(nc, maxnc);
153 PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
154 }
155 PetscCall(PetscMalloc1(maxnc, &ccols));
156 for (i = 0; i < mA; i++) {
157 PetscCall(MatGetRow(Atmp, rstart + i, &nc, &cols, &values));
158 for (j = 0; j < nc; j++) {
159 proc = 0;
160 while (cols[j] >= rstarts[proc + 1]) proc++;
161 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
162 }
163 row = com->rstart + next->rstart + i;
164 PetscCall(MatSetValues(*J, 1, &row, nc, ccols, values, INSERT_VALUES));
165 PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, &cols, &values));
166 }
167 PetscCall(PetscFree(ccols));
168 PetscCall(MatDestroy(&Atmp));
169 PetscCall(DMSetMatType(next->dm, mat_type_old));
170 PetscCall(PetscFree(mat_type_old));
171 next = next->next;
172 }
173 if (com->FormCoupleLocations) {
174 PetscInt __rstart;
175 PetscCall(MatGetOwnershipRange(*J, &__rstart, NULL));
176 PetscCall((*com->FormCoupleLocations)(dm, *J, NULL, NULL, __rstart, 0, 0, 0));
177 }
178 PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
179 PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
180 PetscFunctionReturn(PETSC_SUCCESS);
181 }
182
DMCreateMatrix_Composite(DM dm,Mat * J)183 PetscErrorCode DMCreateMatrix_Composite(DM dm, Mat *J)
184 {
185 PetscBool usenest;
186 ISLocalToGlobalMapping ltogmap;
187
188 PetscFunctionBegin;
189 PetscCall(DMSetFromOptions(dm));
190 PetscCall(DMSetUp(dm));
191 PetscCall(PetscStrcmp(dm->mattype, MATNEST, &usenest));
192 if (usenest) PetscCall(DMCreateMatrix_Composite_Nest(dm, J));
193 else PetscCall(DMCreateMatrix_Composite_AIJ(dm, J));
194
195 PetscCall(DMGetLocalToGlobalMapping(dm, <ogmap));
196 PetscCall(MatSetLocalToGlobalMapping(*J, ltogmap, ltogmap));
197 PetscCall(MatSetDM(*J, dm));
198 PetscFunctionReturn(PETSC_SUCCESS);
199 }
200