xref: /petsc/src/dm/impls/composite/packm.c (revision 28b400f66ebc7ae0049166a2294dfcd3df27e64b)
1 
2 #include <../src/dm/impls/composite/packimpl.h>       /*I  "petscdmcomposite.h"  I*/
3 
4 static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm,Mat *J)
5 {
6   const DM_Composite           *com = (DM_Composite*)dm->data;
7   const struct DMCompositeLink *rlink,*clink;
8   IS                           *isg;
9   Mat                          *submats;
10   PetscInt                     i,j,n;
11 
12   PetscFunctionBegin;
13   n = com->nDM;                 /* Total number of entries */
14 
15   /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
16    * checking and allows ISEqual to compare by identity instead of by contents. */
17   CHKERRQ(DMCompositeGetGlobalISs(dm,&isg));
18 
19   /* Get submatrices */
20   CHKERRQ(PetscMalloc1(n*n,&submats));
21   for (i=0,rlink=com->next; rlink; i++,rlink=rlink->next) {
22     for (j=0,clink=com->next; clink; j++,clink=clink->next) {
23       Mat sub = NULL;
24       if (i == j) {
25         CHKERRQ(DMCreateMatrix(rlink->dm,&sub));
26       } else PetscCheck(!com->FormCoupleLocations,PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Cannot manage off-diagonal parts yet");
27       submats[i*n+j] = sub;
28     }
29   }
30 
31   CHKERRQ(MatCreateNest(PetscObjectComm((PetscObject)dm),n,isg,n,isg,submats,J));
32 
33   /* Disown references */
34   for (i=0; i<n; i++) CHKERRQ(ISDestroy(&isg[i]));
35   CHKERRQ(PetscFree(isg));
36 
37   for (i=0; i<n*n; i++) {
38     if (submats[i]) CHKERRQ(MatDestroy(&submats[i]));
39   }
40   CHKERRQ(PetscFree(submats));
41   PetscFunctionReturn(0);
42 }
43 
44 static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm,Mat *J)
45 {
46   PetscErrorCode         ierr;
47   DM_Composite           *com = (DM_Composite*)dm->data;
48   struct DMCompositeLink *next;
49   PetscInt               m,*dnz,*onz,i,j,mA;
50   Mat                    Atmp;
51   PetscMPIInt            rank;
52   PetscBool              dense = PETSC_FALSE;
53 
54   PetscFunctionBegin;
55   /* use global vector to determine layout needed for matrix */
56   m = com->n;
57 
58   CHKERRQ(MatCreate(PetscObjectComm((PetscObject)dm),J));
59   CHKERRQ(MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE));
60   CHKERRQ(MatSetType(*J,dm->mattype));
61 
62   /*
63      Extremely inefficient but will compute entire Jacobian for testing
64   */
65   CHKERRQ(PetscOptionsGetBool(((PetscObject)dm)->options,((PetscObject)dm)->prefix,"-dmcomposite_dense_jacobian",&dense,NULL));
66   if (dense) {
67     PetscInt    rstart,rend,*indices;
68     PetscScalar *values;
69 
70     mA   = com->N;
71     CHKERRQ(MatMPIAIJSetPreallocation(*J,mA,NULL,mA-m,NULL));
72     CHKERRQ(MatSeqAIJSetPreallocation(*J,mA,NULL));
73 
74     CHKERRQ(MatGetOwnershipRange(*J,&rstart,&rend));
75     CHKERRQ(PetscMalloc2(mA,&values,mA,&indices));
76     CHKERRQ(PetscArrayzero(values,mA));
77     for (i=0; i<mA; i++) indices[i] = i;
78     for (i=rstart; i<rend; i++) {
79       CHKERRQ(MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES));
80     }
81     CHKERRQ(PetscFree2(values,indices));
82     CHKERRQ(MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY));
83     CHKERRQ(MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY));
84     PetscFunctionReturn(0);
85   }
86 
87   CHKERRMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank));
88   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)dm),m,m,dnz,onz);CHKERRQ(ierr);
89   /* loop over packed objects, handling one at at time */
90   next = com->next;
91   while (next) {
92     PetscInt       nc,rstart,*ccols,maxnc;
93     const PetscInt *cols,*rstarts;
94     PetscMPIInt    proc;
95 
96     CHKERRQ(DMCreateMatrix(next->dm,&Atmp));
97     CHKERRQ(MatGetOwnershipRange(Atmp,&rstart,NULL));
98     CHKERRQ(MatGetOwnershipRanges(Atmp,&rstarts));
99     CHKERRQ(MatGetLocalSize(Atmp,&mA,NULL));
100 
101     maxnc = 0;
102     for (i=0; i<mA; i++) {
103       CHKERRQ(MatGetRow(Atmp,rstart+i,&nc,NULL,NULL));
104       maxnc = PetscMax(nc,maxnc);
105       CHKERRQ(MatRestoreRow(Atmp,rstart+i,&nc,NULL,NULL));
106     }
107     CHKERRQ(PetscMalloc1(maxnc,&ccols));
108     for (i=0; i<mA; i++) {
109       CHKERRQ(MatGetRow(Atmp,rstart+i,&nc,&cols,NULL));
110       /* remap the columns taking into how much they are shifted on each process */
111       for (j=0; j<nc; j++) {
112         proc = 0;
113         while (cols[j] >= rstarts[proc+1]) proc++;
114         ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
115       }
116       CHKERRQ(MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz));
117       CHKERRQ(MatRestoreRow(Atmp,rstart+i,&nc,&cols,NULL));
118     }
119     CHKERRQ(PetscFree(ccols));
120     CHKERRQ(MatDestroy(&Atmp));
121     next = next->next;
122   }
123   if (com->FormCoupleLocations) {
124     CHKERRQ((*com->FormCoupleLocations)(dm,NULL,dnz,onz,__rstart,__nrows,__start,__end));
125   }
126   CHKERRQ(MatMPIAIJSetPreallocation(*J,0,dnz,0,onz));
127   CHKERRQ(MatSeqAIJSetPreallocation(*J,0,dnz));
128   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
129 
130   if (dm->prealloc_only) PetscFunctionReturn(0);
131 
132   next = com->next;
133   while (next) {
134     PetscInt          nc,rstart,row,maxnc,*ccols;
135     const PetscInt    *cols,*rstarts;
136     const PetscScalar *values;
137     PetscMPIInt       proc;
138 
139     CHKERRQ(DMCreateMatrix(next->dm,&Atmp));
140     CHKERRQ(MatGetOwnershipRange(Atmp,&rstart,NULL));
141     CHKERRQ(MatGetOwnershipRanges(Atmp,&rstarts));
142     CHKERRQ(MatGetLocalSize(Atmp,&mA,NULL));
143     maxnc = 0;
144     for (i=0; i<mA; i++) {
145       CHKERRQ(MatGetRow(Atmp,rstart+i,&nc,NULL,NULL));
146       maxnc = PetscMax(nc,maxnc);
147       CHKERRQ(MatRestoreRow(Atmp,rstart+i,&nc,NULL,NULL));
148     }
149     CHKERRQ(PetscMalloc1(maxnc,&ccols));
150     for (i=0; i<mA; i++) {
151       CHKERRQ(MatGetRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values));
152       for (j=0; j<nc; j++) {
153         proc = 0;
154         while (cols[j] >= rstarts[proc+1]) proc++;
155         ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
156       }
157       row  = com->rstart+next->rstart+i;
158       CHKERRQ(MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES));
159       CHKERRQ(MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values));
160     }
161     CHKERRQ(PetscFree(ccols));
162     CHKERRQ(MatDestroy(&Atmp));
163     next = next->next;
164   }
165   if (com->FormCoupleLocations) {
166     PetscInt __rstart;
167     CHKERRQ(MatGetOwnershipRange(*J,&__rstart,NULL));
168     CHKERRQ((*com->FormCoupleLocations)(dm,*J,NULL,NULL,__rstart,0,0,0));
169   }
170   CHKERRQ(MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY));
171   CHKERRQ(MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY));
172   PetscFunctionReturn(0);
173 }
174 
175 PetscErrorCode DMCreateMatrix_Composite(DM dm,Mat *J)
176 {
177   PetscBool              usenest;
178   ISLocalToGlobalMapping ltogmap;
179 
180   PetscFunctionBegin;
181   CHKERRQ(DMSetFromOptions(dm));
182   CHKERRQ(DMSetUp(dm));
183   CHKERRQ(PetscStrcmp(dm->mattype,MATNEST,&usenest));
184   if (usenest) {
185     CHKERRQ(DMCreateMatrix_Composite_Nest(dm,J));
186   } else {
187     CHKERRQ(DMCreateMatrix_Composite_AIJ(dm,J));
188   }
189 
190   CHKERRQ(DMGetLocalToGlobalMapping(dm,&ltogmap));
191   CHKERRQ(MatSetLocalToGlobalMapping(*J,ltogmap,ltogmap));
192   CHKERRQ(MatSetDM(*J,dm));
193   PetscFunctionReturn(0);
194 }
195