xref: /petsc/src/dm/impls/composite/packm.c (revision fbf9dbe564678ed6eff1806adbc4c4f01b9743f4)
1 
2 #include <../src/dm/impls/composite/packimpl.h> /*I  "petscdmcomposite.h"  I*/
3 
4 static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm, Mat *J)
5 {
6   const DM_Composite           *com = (DM_Composite *)dm->data;
7   const struct DMCompositeLink *rlink, *clink;
8   IS                           *isg;
9   Mat                          *submats;
10   PetscInt                      i, j, n;
11 
12   PetscFunctionBegin;
13   n = com->nDM; /* Total number of entries */
14 
15   /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
16    * checking and allows ISEqual to compare by identity instead of by contents. */
17   PetscCall(DMCompositeGetGlobalISs(dm, &isg));
18 
19   /* Get submatrices */
20   PetscCall(PetscMalloc1(n * n, &submats));
21   for (i = 0, rlink = com->next; rlink; i++, rlink = rlink->next) {
22     for (j = 0, clink = com->next; clink; j++, clink = clink->next) {
23       Mat sub = NULL;
24       if (i == j) {
25         PetscCall(DMCreateMatrix(rlink->dm, &sub));
26       } else PetscCheck(!com->FormCoupleLocations, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Cannot manage off-diagonal parts yet");
27       submats[i * n + j] = sub;
28     }
29   }
30 
31   PetscCall(MatCreateNest(PetscObjectComm((PetscObject)dm), n, isg, n, isg, submats, J));
32 
33   /* Disown references */
34   for (i = 0; i < n; i++) PetscCall(ISDestroy(&isg[i]));
35   PetscCall(PetscFree(isg));
36 
37   for (i = 0; i < n * n; i++) {
38     if (submats[i]) PetscCall(MatDestroy(&submats[i]));
39   }
40   PetscCall(PetscFree(submats));
41   PetscFunctionReturn(PETSC_SUCCESS);
42 }
43 
44 static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm, Mat *J)
45 {
46   DM_Composite           *com = (DM_Composite *)dm->data;
47   struct DMCompositeLink *next;
48   PetscInt                m, *dnz, *onz, i, j, mA;
49   Mat                     Atmp;
50   PetscMPIInt             rank;
51   PetscBool               dense = PETSC_FALSE;
52 
53   PetscFunctionBegin;
54   /* use global vector to determine layout needed for matrix */
55   m = com->n;
56 
57   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
58   PetscCall(MatSetSizes(*J, m, m, PETSC_DETERMINE, PETSC_DETERMINE));
59   PetscCall(MatSetType(*J, dm->mattype));
60 
61   /*
62      Extremely inefficient but will compute entire Jacobian for testing
63   */
64   PetscCall(PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dmcomposite_dense_jacobian", &dense, NULL));
65   if (dense) {
66     PetscInt     rstart, rend, *indices;
67     PetscScalar *values;
68 
69     mA = com->N;
70     PetscCall(MatMPIAIJSetPreallocation(*J, mA, NULL, mA - m, NULL));
71     PetscCall(MatSeqAIJSetPreallocation(*J, mA, NULL));
72 
73     PetscCall(MatGetOwnershipRange(*J, &rstart, &rend));
74     PetscCall(PetscMalloc2(mA, &values, mA, &indices));
75     PetscCall(PetscArrayzero(values, mA));
76     for (i = 0; i < mA; i++) indices[i] = i;
77     for (i = rstart; i < rend; i++) PetscCall(MatSetValues(*J, 1, &i, mA, indices, values, INSERT_VALUES));
78     PetscCall(PetscFree2(values, indices));
79     PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
80     PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
81     PetscFunctionReturn(PETSC_SUCCESS);
82   }
83 
84   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
85   MatPreallocateBegin(PetscObjectComm((PetscObject)dm), m, m, dnz, onz);
86   /* loop over packed objects, handling one at at time */
87   next = com->next;
88   while (next) {
89     PetscInt        nc, rstart, *ccols, maxnc;
90     const PetscInt *cols, *rstarts;
91     PetscMPIInt     proc;
92 
93     PetscCall(DMCreateMatrix(next->dm, &Atmp));
94     PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
95     PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
96     PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
97 
98     maxnc = 0;
99     for (i = 0; i < mA; i++) {
100       PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
101       maxnc = PetscMax(nc, maxnc);
102       PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
103     }
104     PetscCall(PetscMalloc1(maxnc, &ccols));
105     for (i = 0; i < mA; i++) {
106       PetscCall(MatGetRow(Atmp, rstart + i, &nc, &cols, NULL));
107       /* remap the columns taking into how much they are shifted on each process */
108       for (j = 0; j < nc; j++) {
109         proc = 0;
110         while (cols[j] >= rstarts[proc + 1]) proc++;
111         ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
112       }
113       PetscCall(MatPreallocateSet(com->rstart + next->rstart + i, nc, ccols, dnz, onz));
114       PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, &cols, NULL));
115     }
116     PetscCall(PetscFree(ccols));
117     PetscCall(MatDestroy(&Atmp));
118     next = next->next;
119   }
120   if (com->FormCoupleLocations) PetscCall((*com->FormCoupleLocations)(dm, NULL, dnz, onz, __rstart, __nrows, __start, __end));
121   PetscCall(MatMPIAIJSetPreallocation(*J, 0, dnz, 0, onz));
122   PetscCall(MatSeqAIJSetPreallocation(*J, 0, dnz));
123   MatPreallocateEnd(dnz, onz);
124 
125   if (dm->prealloc_only) PetscFunctionReturn(PETSC_SUCCESS);
126 
127   next = com->next;
128   while (next) {
129     PetscInt           nc, rstart, row, maxnc, *ccols;
130     const PetscInt    *cols, *rstarts;
131     const PetscScalar *values;
132     PetscMPIInt        proc;
133 
134     PetscCall(DMCreateMatrix(next->dm, &Atmp));
135     PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
136     PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
137     PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
138     maxnc = 0;
139     for (i = 0; i < mA; i++) {
140       PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
141       maxnc = PetscMax(nc, maxnc);
142       PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
143     }
144     PetscCall(PetscMalloc1(maxnc, &ccols));
145     for (i = 0; i < mA; i++) {
146       PetscCall(MatGetRow(Atmp, rstart + i, &nc, (const PetscInt **)&cols, &values));
147       for (j = 0; j < nc; j++) {
148         proc = 0;
149         while (cols[j] >= rstarts[proc + 1]) proc++;
150         ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
151       }
152       row = com->rstart + next->rstart + i;
153       PetscCall(MatSetValues(*J, 1, &row, nc, ccols, values, INSERT_VALUES));
154       PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, (const PetscInt **)&cols, &values));
155     }
156     PetscCall(PetscFree(ccols));
157     PetscCall(MatDestroy(&Atmp));
158     next = next->next;
159   }
160   if (com->FormCoupleLocations) {
161     PetscInt __rstart;
162     PetscCall(MatGetOwnershipRange(*J, &__rstart, NULL));
163     PetscCall((*com->FormCoupleLocations)(dm, *J, NULL, NULL, __rstart, 0, 0, 0));
164   }
165   PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
166   PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
167   PetscFunctionReturn(PETSC_SUCCESS);
168 }
169 
170 PetscErrorCode DMCreateMatrix_Composite(DM dm, Mat *J)
171 {
172   PetscBool              usenest;
173   ISLocalToGlobalMapping ltogmap;
174 
175   PetscFunctionBegin;
176   PetscCall(DMSetFromOptions(dm));
177   PetscCall(DMSetUp(dm));
178   PetscCall(PetscStrcmp(dm->mattype, MATNEST, &usenest));
179   if (usenest) PetscCall(DMCreateMatrix_Composite_Nest(dm, J));
180   else PetscCall(DMCreateMatrix_Composite_AIJ(dm, J));
181 
182   PetscCall(DMGetLocalToGlobalMapping(dm, &ltogmap));
183   PetscCall(MatSetLocalToGlobalMapping(*J, ltogmap, ltogmap));
184   PetscCall(MatSetDM(*J, dm));
185   PetscFunctionReturn(PETSC_SUCCESS);
186 }
187