xref: /petsc/src/dm/impls/composite/packm.c (revision daad07d386296cdcbb87925ef5f1432ee4a24ec4)
1 
2 #include <../src/dm/impls/composite/packimpl.h>       /*I  "petscdmcomposite.h"  I*/
3 
4 static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm,Mat *J)
5 {
6   const DM_Composite           *com = (DM_Composite*)dm->data;
7   const struct DMCompositeLink *rlink,*clink;
8   IS                           *isg;
9   Mat                          *submats;
10   PetscInt                     i,j,n;
11 
12   PetscFunctionBegin;
13   n = com->nDM;                 /* Total number of entries */
14 
15   /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
16    * checking and allows ISEqual to compare by identity instead of by contents. */
17   PetscCall(DMCompositeGetGlobalISs(dm,&isg));
18 
19   /* Get submatrices */
20   PetscCall(PetscMalloc1(n*n,&submats));
21   for (i=0,rlink=com->next; rlink; i++,rlink=rlink->next) {
22     for (j=0,clink=com->next; clink; j++,clink=clink->next) {
23       Mat sub = NULL;
24       if (i == j) {
25         PetscCall(DMCreateMatrix(rlink->dm,&sub));
26       } else PetscCheck(!com->FormCoupleLocations,PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Cannot manage off-diagonal parts yet");
27       submats[i*n+j] = sub;
28     }
29   }
30 
31   PetscCall(MatCreateNest(PetscObjectComm((PetscObject)dm),n,isg,n,isg,submats,J));
32 
33   /* Disown references */
34   for (i=0; i<n; i++) PetscCall(ISDestroy(&isg[i]));
35   PetscCall(PetscFree(isg));
36 
37   for (i=0; i<n*n; i++) {
38     if (submats[i]) PetscCall(MatDestroy(&submats[i]));
39   }
40   PetscCall(PetscFree(submats));
41   PetscFunctionReturn(0);
42 }
43 
44 static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm,Mat *J)
45 {
46   DM_Composite           *com = (DM_Composite*)dm->data;
47   struct DMCompositeLink *next;
48   PetscInt               m,*dnz,*onz,i,j,mA;
49   Mat                    Atmp;
50   PetscMPIInt            rank;
51   PetscBool              dense = PETSC_FALSE;
52 
53   PetscFunctionBegin;
54   /* use global vector to determine layout needed for matrix */
55   m = com->n;
56 
57   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm),J));
58   PetscCall(MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE));
59   PetscCall(MatSetType(*J,dm->mattype));
60 
61   /*
62      Extremely inefficient but will compute entire Jacobian for testing
63   */
64   PetscCall(PetscOptionsGetBool(((PetscObject)dm)->options,((PetscObject)dm)->prefix,"-dmcomposite_dense_jacobian",&dense,NULL));
65   if (dense) {
66     PetscInt    rstart,rend,*indices;
67     PetscScalar *values;
68 
69     mA   = com->N;
70     PetscCall(MatMPIAIJSetPreallocation(*J,mA,NULL,mA-m,NULL));
71     PetscCall(MatSeqAIJSetPreallocation(*J,mA,NULL));
72 
73     PetscCall(MatGetOwnershipRange(*J,&rstart,&rend));
74     PetscCall(PetscMalloc2(mA,&values,mA,&indices));
75     PetscCall(PetscArrayzero(values,mA));
76     for (i=0; i<mA; i++) indices[i] = i;
77     for (i=rstart; i<rend; i++) {
78       PetscCall(MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES));
79     }
80     PetscCall(PetscFree2(values,indices));
81     PetscCall(MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY));
82     PetscCall(MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY));
83     PetscFunctionReturn(0);
84   }
85 
86   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank));
87   MatPreallocateBegin(PetscObjectComm((PetscObject)dm),m,m,dnz,onz);
88   /* loop over packed objects, handling one at at time */
89   next = com->next;
90   while (next) {
91     PetscInt       nc,rstart,*ccols,maxnc;
92     const PetscInt *cols,*rstarts;
93     PetscMPIInt    proc;
94 
95     PetscCall(DMCreateMatrix(next->dm,&Atmp));
96     PetscCall(MatGetOwnershipRange(Atmp,&rstart,NULL));
97     PetscCall(MatGetOwnershipRanges(Atmp,&rstarts));
98     PetscCall(MatGetLocalSize(Atmp,&mA,NULL));
99 
100     maxnc = 0;
101     for (i=0; i<mA; i++) {
102       PetscCall(MatGetRow(Atmp,rstart+i,&nc,NULL,NULL));
103       maxnc = PetscMax(nc,maxnc);
104       PetscCall(MatRestoreRow(Atmp,rstart+i,&nc,NULL,NULL));
105     }
106     PetscCall(PetscMalloc1(maxnc,&ccols));
107     for (i=0; i<mA; i++) {
108       PetscCall(MatGetRow(Atmp,rstart+i,&nc,&cols,NULL));
109       /* remap the columns taking into how much they are shifted on each process */
110       for (j=0; j<nc; j++) {
111         proc = 0;
112         while (cols[j] >= rstarts[proc+1]) proc++;
113         ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
114       }
115       PetscCall(MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz));
116       PetscCall(MatRestoreRow(Atmp,rstart+i,&nc,&cols,NULL));
117     }
118     PetscCall(PetscFree(ccols));
119     PetscCall(MatDestroy(&Atmp));
120     next = next->next;
121   }
122   if (com->FormCoupleLocations) PetscCall((*com->FormCoupleLocations)(dm,NULL,dnz,onz,__rstart,__nrows,__start,__end));
123   PetscCall(MatMPIAIJSetPreallocation(*J,0,dnz,0,onz));
124   PetscCall(MatSeqAIJSetPreallocation(*J,0,dnz));
125   MatPreallocateEnd(dnz,onz);
126 
127   if (dm->prealloc_only) PetscFunctionReturn(0);
128 
129   next = com->next;
130   while (next) {
131     PetscInt          nc,rstart,row,maxnc,*ccols;
132     const PetscInt    *cols,*rstarts;
133     const PetscScalar *values;
134     PetscMPIInt       proc;
135 
136     PetscCall(DMCreateMatrix(next->dm,&Atmp));
137     PetscCall(MatGetOwnershipRange(Atmp,&rstart,NULL));
138     PetscCall(MatGetOwnershipRanges(Atmp,&rstarts));
139     PetscCall(MatGetLocalSize(Atmp,&mA,NULL));
140     maxnc = 0;
141     for (i=0; i<mA; i++) {
142       PetscCall(MatGetRow(Atmp,rstart+i,&nc,NULL,NULL));
143       maxnc = PetscMax(nc,maxnc);
144       PetscCall(MatRestoreRow(Atmp,rstart+i,&nc,NULL,NULL));
145     }
146     PetscCall(PetscMalloc1(maxnc,&ccols));
147     for (i=0; i<mA; i++) {
148       PetscCall(MatGetRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values));
149       for (j=0; j<nc; j++) {
150         proc = 0;
151         while (cols[j] >= rstarts[proc+1]) proc++;
152         ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
153       }
154       row  = com->rstart+next->rstart+i;
155       PetscCall(MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES));
156       PetscCall(MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values));
157     }
158     PetscCall(PetscFree(ccols));
159     PetscCall(MatDestroy(&Atmp));
160     next = next->next;
161   }
162   if (com->FormCoupleLocations) {
163     PetscInt __rstart;
164     PetscCall(MatGetOwnershipRange(*J,&__rstart,NULL));
165     PetscCall((*com->FormCoupleLocations)(dm,*J,NULL,NULL,__rstart,0,0,0));
166   }
167   PetscCall(MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY));
168   PetscCall(MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY));
169   PetscFunctionReturn(0);
170 }
171 
172 PetscErrorCode DMCreateMatrix_Composite(DM dm,Mat *J)
173 {
174   PetscBool              usenest;
175   ISLocalToGlobalMapping ltogmap;
176 
177   PetscFunctionBegin;
178   PetscCall(DMSetFromOptions(dm));
179   PetscCall(DMSetUp(dm));
180   PetscCall(PetscStrcmp(dm->mattype,MATNEST,&usenest));
181   if (usenest) PetscCall(DMCreateMatrix_Composite_Nest(dm,J));
182   else PetscCall(DMCreateMatrix_Composite_AIJ(dm,J));
183 
184   PetscCall(DMGetLocalToGlobalMapping(dm,&ltogmap));
185   PetscCall(MatSetLocalToGlobalMapping(*J,ltogmap,ltogmap));
186   PetscCall(MatSetDM(*J,dm));
187   PetscFunctionReturn(0);
188 }
189