xref: /petsc/src/mat/impls/elemental/matelem.cxx (revision efa12513287cff49a2b9648ae83199dcbfaad71a)
1 #include <petsc/private/petscelemental.h>
2 
3 /*
4     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
5   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
6 */
7 static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;
8 
9 static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
10 {
11   PetscErrorCode ierr;
12   Mat_Elemental  *a = (Mat_Elemental*)A->data;
13   PetscBool      iascii;
14 
15   PetscFunctionBegin;
16   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
17   if (iascii) {
18     PetscViewerFormat format;
19     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20     if (format == PETSC_VIEWER_ASCII_INFO) {
21       /* call elemental viewing function */
22       ierr = PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");CHKERRQ(ierr);
23       ierr = PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());CHKERRQ(ierr);
24       ierr = PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());CHKERRQ(ierr);
25       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
26         /* call elemental viewing function */
27         ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");CHKERRQ(ierr);
28       }
29 
30     } else if (format == PETSC_VIEWER_DEFAULT) {
31       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
32       El::Print( *a->emat, "Elemental matrix (cyclic ordering)");
33       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
34       if (A->factortype == MAT_FACTOR_NONE){
35         Mat Adense;
36         ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
37         ierr = MatView(Adense,viewer);CHKERRQ(ierr);
38         ierr = MatDestroy(&Adense);CHKERRQ(ierr);
39       }
40     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
41   } else {
42     /* convert to dense format and call MatView() */
43     Mat Adense;
44     ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
45     ierr = MatView(Adense,viewer);CHKERRQ(ierr);
46     ierr = MatDestroy(&Adense);CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
52 {
53   Mat_Elemental  *a = (Mat_Elemental*)A->data;
54 
55   PetscFunctionBegin;
56   info->block_size = 1.0;
57 
58   if (flag == MAT_LOCAL) {
59     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
60     info->nz_used        = info->nz_allocated;
61   } else if (flag == MAT_GLOBAL_MAX) {
62     //ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
63     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
64     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
65   } else if (flag == MAT_GLOBAL_SUM) {
66     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
67     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
68     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
69     //ierr = MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
70     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
71   }
72 
73   info->nz_unneeded       = 0.0;
74   info->assemblies        = A->num_ass;
75   info->mallocs           = 0;
76   info->memory            = ((PetscObject)A)->mem;
77   info->fill_ratio_given  = 0; /* determined by Elemental */
78   info->fill_ratio_needed = 0;
79   info->factor_mallocs    = 0;
80   PetscFunctionReturn(0);
81 }
82 
83 PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
84 {
85   Mat_Elemental  *a = (Mat_Elemental*)A->data;
86 
87   PetscFunctionBegin;
88   switch (op) {
89   case MAT_NEW_NONZERO_LOCATIONS:
90   case MAT_NEW_NONZERO_LOCATION_ERR:
91   case MAT_NEW_NONZERO_ALLOCATION_ERR:
92   case MAT_SYMMETRIC:
93   case MAT_SORTED_FULL:
94   case MAT_HERMITIAN:
95     break;
96   case MAT_ROW_ORIENTED:
97     a->roworiented = flg;
98     break;
99   default:
100     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
106 {
107   Mat_Elemental  *a = (Mat_Elemental*)A->data;
108   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;
109 
110   PetscFunctionBegin;
111   // TODO: Initialize matrix to all zeros?
112 
113   // Count the number of queues from this process
114   if (a->roworiented) {
115     for (i=0; i<nr; i++) {
116       if (rows[i] < 0) continue;
117       P2RO(A,0,rows[i],&rrank,&ridx);
118       RO2E(A,0,rrank,ridx,&erow);
119       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
120       for (j=0; j<nc; j++) {
121         if (cols[j] < 0) continue;
122         P2RO(A,1,cols[j],&crank,&cidx);
123         RO2E(A,1,crank,cidx,&ecol);
124         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
125         if (!a->emat->IsLocal(erow,ecol)){ /* off-proc entry */
126           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
127           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
128           ++numQueues;
129           continue;
130         }
131         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
132         switch (imode) {
133         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
134         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
135         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
136         }
137       }
138     }
139 
140     /* printf("numQueues=%d\n",numQueues); */
141     a->emat->Reserve( numQueues);
142     for (i=0; i<nr; i++) {
143       if (rows[i] < 0) continue;
144       P2RO(A,0,rows[i],&rrank,&ridx);
145       RO2E(A,0,rrank,ridx,&erow);
146       for (j=0; j<nc; j++) {
147         if (cols[j] < 0) continue;
148         P2RO(A,1,cols[j],&crank,&cidx);
149         RO2E(A,1,crank,cidx,&ecol);
150         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
151           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
152           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j]);
153         }
154       }
155     }
156   } else { /* columnoriented */
157     for (j=0; j<nc; j++) {
158       if (cols[j] < 0) continue;
159       P2RO(A,1,cols[j],&crank,&cidx);
160       RO2E(A,1,crank,cidx,&ecol);
161       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
162       for (i=0; i<nr; i++) {
163         if (rows[i] < 0) continue;
164         P2RO(A,0,rows[i],&rrank,&ridx);
165         RO2E(A,0,rrank,ridx,&erow);
166         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
167         if (!a->emat->IsLocal(erow,ecol)){ /* off-proc entry */
168           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
169           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
170           ++numQueues;
171           continue;
172         }
173         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
174         switch (imode) {
175         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
176         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
177         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
178         }
179       }
180     }
181 
182     /* printf("numQueues=%d\n",numQueues); */
183     a->emat->Reserve( numQueues);
184     for (j=0; j<nc; j++) {
185       if (cols[j] < 0) continue;
186       P2RO(A,1,cols[j],&crank,&cidx);
187       RO2E(A,1,crank,cidx,&ecol);
188 
189       for (i=0; i<nr; i++) {
190         if (rows[i] < 0) continue;
191         P2RO(A,0,rows[i],&rrank,&ridx);
192         RO2E(A,0,rrank,ridx,&erow);
193         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
194           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
195           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr]);
196         }
197       }
198     }
199   }
200   PetscFunctionReturn(0);
201 }
202 
203 static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
204 {
205   Mat_Elemental         *a = (Mat_Elemental*)A->data;
206   PetscErrorCode        ierr;
207   const PetscElemScalar *x;
208   PetscElemScalar       *y;
209   PetscElemScalar       one = 1,zero = 0;
210 
211   PetscFunctionBegin;
212   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
213   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
214   { /* Scoping so that constructor is called before pointer is returned */
215     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
216     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
217     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
218     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
219   }
220   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
221   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
222   PetscFunctionReturn(0);
223 }
224 
225 static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
226 {
227   Mat_Elemental         *a = (Mat_Elemental*)A->data;
228   PetscErrorCode        ierr;
229   const PetscElemScalar *x;
230   PetscElemScalar       *y;
231   PetscElemScalar       one = 1,zero = 0;
232 
233   PetscFunctionBegin;
234   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
235   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
236   { /* Scoping so that constructor is called before pointer is returned */
237     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
238     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
239     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
240     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
241   }
242   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
243   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
244   PetscFunctionReturn(0);
245 }
246 
247 static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
248 {
249   Mat_Elemental         *a = (Mat_Elemental*)A->data;
250   PetscErrorCode        ierr;
251   const PetscElemScalar *x;
252   PetscElemScalar       *z;
253   PetscElemScalar       one = 1;
254 
255   PetscFunctionBegin;
256   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
257   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
258   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
259   { /* Scoping so that constructor is called before pointer is returned */
260     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
261     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
262     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
263     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
264   }
265   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
266   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
267   PetscFunctionReturn(0);
268 }
269 
270 static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
271 {
272   Mat_Elemental         *a = (Mat_Elemental*)A->data;
273   PetscErrorCode        ierr;
274   const PetscElemScalar *x;
275   PetscElemScalar       *z;
276   PetscElemScalar       one = 1;
277 
278   PetscFunctionBegin;
279   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
280   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
281   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
282   { /* Scoping so that constructor is called before pointer is returned */
283     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
284     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
285     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
286     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
287   }
288   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
289   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
290   PetscFunctionReturn(0);
291 }
292 
293 PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
294 {
295   Mat_Elemental    *a = (Mat_Elemental*)A->data;
296   Mat_Elemental    *b = (Mat_Elemental*)B->data;
297   Mat_Elemental    *c = (Mat_Elemental*)C->data;
298   PetscElemScalar  one = 1,zero = 0;
299 
300   PetscFunctionBegin;
301   { /* Scoping so that constructor is called before pointer is returned */
302     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
303   }
304   C->assembled = PETSC_TRUE;
305   PetscFunctionReturn(0);
306 }
307 
308 PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat Ce)
309 {
310   PetscErrorCode ierr;
311 
312   PetscFunctionBegin;
313   ierr = MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
314   ierr = MatSetType(Ce,MATELEMENTAL);CHKERRQ(ierr);
315   ierr = MatSetUp(Ce);CHKERRQ(ierr);
316   Ce->ops->matmultnumeric = MatMatMultNumeric_Elemental;
317   PetscFunctionReturn(0);
318 }
319 
320 static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
321 {
322   Mat_Elemental      *a = (Mat_Elemental*)A->data;
323   Mat_Elemental      *b = (Mat_Elemental*)B->data;
324   Mat_Elemental      *c = (Mat_Elemental*)C->data;
325   PetscElemScalar    one = 1,zero = 0;
326 
327   PetscFunctionBegin;
328   { /* Scoping so that constructor is called before pointer is returned */
329     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
330   }
331   C->assembled = PETSC_TRUE;
332   PetscFunctionReturn(0);
333 }
334 
335 static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat C)
336 {
337   PetscErrorCode ierr;
338 
339   PetscFunctionBegin;
340   ierr = MatSetSizes(C,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
341   ierr = MatSetType(C,MATELEMENTAL);CHKERRQ(ierr);
342   ierr = MatSetUp(C);CHKERRQ(ierr);
343   PetscFunctionReturn(0);
344 }
345 
346 /* --------------------------------------- */
347 static PetscErrorCode MatProductSetFromOptions_Elemental_AB(Mat C)
348 {
349   PetscFunctionBegin;
350   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental;
351   C->ops->productsymbolic = MatProductSymbolic_AB;
352   PetscFunctionReturn(0);
353 }
354 
355 static PetscErrorCode MatProductSetFromOptions_Elemental_ABt(Mat C)
356 {
357   PetscFunctionBegin;
358   C->ops->mattransposemultsymbolic = MatMatTransposeMultSymbolic_Elemental;
359   C->ops->productsymbolic          = MatProductSymbolic_ABt;
360   PetscFunctionReturn(0);
361 }
362 
363 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Elemental(Mat C)
364 {
365   PetscErrorCode ierr;
366   Mat_Product    *product = C->product;
367 
368   PetscFunctionBegin;
369   switch (product->type) {
370   case MATPRODUCT_AB:
371     ierr = MatProductSetFromOptions_Elemental_AB(C);CHKERRQ(ierr);
372     break;
373   case MATPRODUCT_ABt:
374     ierr = MatProductSetFromOptions_Elemental_ABt(C);CHKERRQ(ierr);
375     break;
376   default:
377     break;
378   }
379   PetscFunctionReturn(0);
380 }
381 
382 PetscErrorCode MatMatMultNumeric_Elemental_MPIDense(Mat A,Mat B,Mat C)
383 {
384   Mat            Be,Ce;
385   PetscErrorCode ierr;
386 
387   PetscFunctionBegin;
388   ierr = MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&Be);CHKERRQ(ierr);
389   ierr = MatMatMult(A,Be,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&Ce);CHKERRQ(ierr);
390   ierr = MatConvert(Ce,MATMPIDENSE,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr);
391   ierr = MatDestroy(&Be);CHKERRQ(ierr);
392   ierr = MatDestroy(&Ce);CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 PetscErrorCode MatMatMultSymbolic_Elemental_MPIDense(Mat A,Mat B,PetscReal fill,Mat C)
397 {
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   ierr = MatSetSizes(C,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
402   ierr = MatSetType(C,MATMPIDENSE);CHKERRQ(ierr);
403   ierr = MatSetUp(C);CHKERRQ(ierr);
404   C->ops->matmultnumeric = MatMatMultNumeric_Elemental_MPIDense;
405   PetscFunctionReturn(0);
406 }
407 
408 PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense_AB(Mat C)
409 {
410   PetscFunctionBegin;
411   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental_MPIDense;
412   C->ops->productsymbolic = MatProductSymbolic_AB;
413   PetscFunctionReturn(0);
414 }
415 
416 PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense(Mat C)
417 {
418   PetscErrorCode ierr;
419   Mat_Product    *product = C->product;
420 
421   PetscFunctionBegin;
422   if (product->type == MATPRODUCT_AB) {
423     ierr = MatProductSetFromOptions_Elemental_MPIDense_AB(C);CHKERRQ(ierr);
424   }
425   PetscFunctionReturn(0);
426 }
427 /* --------------------------------------- */
428 
429 static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
430 {
431   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
432   Mat_Elemental   *a = (Mat_Elemental*)A->data;
433   PetscErrorCode  ierr;
434   PetscElemScalar v;
435   MPI_Comm        comm;
436 
437   PetscFunctionBegin;
438   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
439   ierr = MatGetSize(A,&nrows,&ncols);CHKERRQ(ierr);
440   nD = nrows>ncols ? ncols : nrows;
441   for (i=0; i<nD; i++) {
442     PetscInt erow,ecol;
443     P2RO(A,0,i,&rrank,&ridx);
444     RO2E(A,0,rrank,ridx,&erow);
445     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
446     P2RO(A,1,i,&crank,&cidx);
447     RO2E(A,1,crank,cidx,&ecol);
448     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
449     v = a->emat->Get(erow,ecol);
450     ierr = VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);CHKERRQ(ierr);
451   }
452   ierr = VecAssemblyBegin(D);CHKERRQ(ierr);
453   ierr = VecAssemblyEnd(D);CHKERRQ(ierr);
454   PetscFunctionReturn(0);
455 }
456 
457 static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
458 {
459   Mat_Elemental         *x = (Mat_Elemental*)X->data;
460   const PetscElemScalar *d;
461   PetscErrorCode        ierr;
462 
463   PetscFunctionBegin;
464   if (R) {
465     ierr = VecGetArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
466     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
467     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
468     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
469     ierr = VecRestoreArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
470   }
471   if (L) {
472     ierr = VecGetArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
473     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
474     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
475     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
476     ierr = VecRestoreArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
477   }
478   PetscFunctionReturn(0);
479 }
480 
481 static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
482 {
483   PetscFunctionBegin;
484   *missing = PETSC_FALSE;
485   PetscFunctionReturn(0);
486 }
487 
488 static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
489 {
490   Mat_Elemental  *x = (Mat_Elemental*)X->data;
491 
492   PetscFunctionBegin;
493   El::Scale((PetscElemScalar)a,*x->emat);
494   PetscFunctionReturn(0);
495 }
496 
497 /*
498   MatAXPY - Computes Y = a*X + Y.
499 */
500 static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
501 {
502   Mat_Elemental  *x = (Mat_Elemental*)X->data;
503   Mat_Elemental  *y = (Mat_Elemental*)Y->data;
504   PetscErrorCode ierr;
505 
506   PetscFunctionBegin;
507   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
508   ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
509   PetscFunctionReturn(0);
510 }
511 
512 static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
513 {
514   Mat_Elemental *a=(Mat_Elemental*)A->data;
515   Mat_Elemental *b=(Mat_Elemental*)B->data;
516   PetscErrorCode ierr;
517 
518   PetscFunctionBegin;
519   El::Copy(*a->emat,*b->emat);
520   ierr = PetscObjectStateIncrease((PetscObject)B);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
525 {
526   Mat            Be;
527   MPI_Comm       comm;
528   Mat_Elemental  *a=(Mat_Elemental*)A->data;
529   PetscErrorCode ierr;
530 
531   PetscFunctionBegin;
532   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
533   ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
534   ierr = MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
535   ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
536   ierr = MatSetUp(Be);CHKERRQ(ierr);
537   *B = Be;
538   if (op == MAT_COPY_VALUES) {
539     Mat_Elemental *b=(Mat_Elemental*)Be->data;
540     El::Copy(*a->emat,*b->emat);
541   }
542   Be->assembled = PETSC_TRUE;
543   PetscFunctionReturn(0);
544 }
545 
546 static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
547 {
548   Mat            Be = *B;
549   PetscErrorCode ierr;
550   MPI_Comm       comm;
551   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
552 
553   PetscFunctionBegin;
554   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
555   /* Only out-of-place supported */
556   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Only out-of-place supported");
557   if (reuse == MAT_INITIAL_MATRIX) {
558     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
559     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
560     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
561     ierr = MatSetUp(Be);CHKERRQ(ierr);
562     *B = Be;
563   }
564   b = (Mat_Elemental*)Be->data;
565   El::Transpose(*a->emat,*b->emat);
566   Be->assembled = PETSC_TRUE;
567   PetscFunctionReturn(0);
568 }
569 
570 static PetscErrorCode MatConjugate_Elemental(Mat A)
571 {
572   Mat_Elemental  *a = (Mat_Elemental*)A->data;
573 
574   PetscFunctionBegin;
575   El::Conjugate(*a->emat);
576   PetscFunctionReturn(0);
577 }
578 
579 static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
580 {
581   Mat            Be = *B;
582   PetscErrorCode ierr;
583   MPI_Comm       comm;
584   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
585 
586   PetscFunctionBegin;
587   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
588   /* Only out-of-place supported */
589   if (reuse == MAT_INITIAL_MATRIX){
590     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
591     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
592     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
593     ierr = MatSetUp(Be);CHKERRQ(ierr);
594     *B = Be;
595   }
596   b = (Mat_Elemental*)Be->data;
597   El::Adjoint(*a->emat,*b->emat);
598   Be->assembled = PETSC_TRUE;
599   PetscFunctionReturn(0);
600 }
601 
602 static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
603 {
604   Mat_Elemental     *a = (Mat_Elemental*)A->data;
605   PetscErrorCode    ierr;
606   PetscElemScalar   *x;
607   PetscInt          pivoting = a->pivoting;
608 
609   PetscFunctionBegin;
610   ierr = VecCopy(B,X);CHKERRQ(ierr);
611   ierr = VecGetArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
612 
613   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
614   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
615   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
616   switch (A->factortype) {
617   case MAT_FACTOR_LU:
618     if (pivoting == 0) {
619       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
620     } else if (pivoting == 1) {
621       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
622     } else { /* pivoting == 2 */
623       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
624     }
625     break;
626   case MAT_FACTOR_CHOLESKY:
627     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
628     break;
629   default:
630     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
631     break;
632   }
633   El::Copy(xer,xe);
634 
635   ierr = VecRestoreArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
636   PetscFunctionReturn(0);
637 }
638 
639 static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
640 {
641   PetscErrorCode    ierr;
642 
643   PetscFunctionBegin;
644   ierr = MatSolve_Elemental(A,B,X);CHKERRQ(ierr);
645   ierr = VecAXPY(X,1,Y);CHKERRQ(ierr);
646   PetscFunctionReturn(0);
647 }
648 
649 static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
650 {
651   Mat_Elemental  *a = (Mat_Elemental*)A->data;
652   Mat_Elemental  *x;
653   Mat            C;
654   PetscInt       pivoting = a->pivoting;
655   PetscBool      flg;
656   MatType        type;
657   PetscErrorCode ierr;
658 
659   PetscFunctionBegin;
660   ierr = MatGetType(X,&type);CHKERRQ(ierr);
661   ierr = PetscStrcmp(type,MATELEMENTAL,&flg);CHKERRQ(ierr);
662   if (!flg) {
663     ierr = MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&C);CHKERRQ(ierr);
664     x = (Mat_Elemental*)C->data;
665   } else {
666     x = (Mat_Elemental*)X->data;
667     El::Copy(*((Mat_Elemental*)B->data)->emat,*x->emat);
668   }
669   switch (A->factortype) {
670   case MAT_FACTOR_LU:
671     if (pivoting == 0) {
672       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
673     } else if (pivoting == 1) {
674       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
675     } else {
676       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
677     }
678     break;
679   case MAT_FACTOR_CHOLESKY:
680     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
681     break;
682   default:
683     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
684     break;
685   }
686   if (!flg) {
687     ierr = MatConvert(C,type,MAT_REUSE_MATRIX,&X);CHKERRQ(ierr);
688     ierr = MatDestroy(&C);CHKERRQ(ierr);
689   }
690   PetscFunctionReturn(0);
691 }
692 
693 static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
694 {
695   Mat_Elemental  *a = (Mat_Elemental*)A->data;
696   PetscErrorCode ierr;
697   PetscInt       pivoting = a->pivoting;
698 
699   PetscFunctionBegin;
700   if (pivoting == 0) {
701     El::LU(*a->emat);
702   } else if (pivoting == 1) {
703     El::LU(*a->emat,*a->P);
704   } else {
705     El::LU(*a->emat,*a->P,*a->Q);
706   }
707   A->factortype = MAT_FACTOR_LU;
708   A->assembled  = PETSC_TRUE;
709 
710   ierr = PetscFree(A->solvertype);CHKERRQ(ierr);
711   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 
715 static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
716 {
717   PetscErrorCode ierr;
718 
719   PetscFunctionBegin;
720   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
721   ierr = MatLUFactor_Elemental(F,0,0,info);CHKERRQ(ierr);
722   PetscFunctionReturn(0);
723 }
724 
725 static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
726 {
727   PetscFunctionBegin;
728   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
729   PetscFunctionReturn(0);
730 }
731 
732 static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
733 {
734   Mat_Elemental  *a = (Mat_Elemental*)A->data;
735   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;
736   PetscErrorCode ierr;
737 
738   PetscFunctionBegin;
739   El::Cholesky(El::UPPER,*a->emat);
740   A->factortype = MAT_FACTOR_CHOLESKY;
741   A->assembled  = PETSC_TRUE;
742 
743   ierr = PetscFree(A->solvertype);CHKERRQ(ierr);
744   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);CHKERRQ(ierr);
745   PetscFunctionReturn(0);
746 }
747 
748 static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
749 {
750   PetscErrorCode ierr;
751 
752   PetscFunctionBegin;
753   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
754   ierr = MatCholeskyFactor_Elemental(F,0,info);CHKERRQ(ierr);
755   PetscFunctionReturn(0);
756 }
757 
758 static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
759 {
760   PetscFunctionBegin;
761   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
762   PetscFunctionReturn(0);
763 }
764 
765 PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
766 {
767   PetscFunctionBegin;
768   *type = MATSOLVERELEMENTAL;
769   PetscFunctionReturn(0);
770 }
771 
772 static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
773 {
774   Mat            B;
775   PetscErrorCode ierr;
776 
777   PetscFunctionBegin;
778   /* Create the factorization matrix */
779   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
780   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
781   ierr = MatSetType(B,MATELEMENTAL);CHKERRQ(ierr);
782   ierr = MatSetUp(B);CHKERRQ(ierr);
783   B->factortype = ftype;
784   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
785   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);CHKERRQ(ierr);
786 
787   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);CHKERRQ(ierr);
788   *F            = B;
789   PetscFunctionReturn(0);
790 }
791 
792 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
793 {
794   PetscErrorCode ierr;
795 
796   PetscFunctionBegin;
797   ierr = MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
798   ierr = MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
799   PetscFunctionReturn(0);
800 }
801 
802 static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
803 {
804   Mat_Elemental *a=(Mat_Elemental*)A->data;
805 
806   PetscFunctionBegin;
807   switch (type){
808   case NORM_1:
809     *nrm = El::OneNorm(*a->emat);
810     break;
811   case NORM_FROBENIUS:
812     *nrm = El::FrobeniusNorm(*a->emat);
813     break;
814   case NORM_INFINITY:
815     *nrm = El::InfinityNorm(*a->emat);
816     break;
817   default:
818     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unsupported norm type");
819   }
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode MatZeroEntries_Elemental(Mat A)
824 {
825   Mat_Elemental *a=(Mat_Elemental*)A->data;
826 
827   PetscFunctionBegin;
828   El::Zero(*a->emat);
829   PetscFunctionReturn(0);
830 }
831 
832 static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
833 {
834   Mat_Elemental  *a = (Mat_Elemental*)A->data;
835   PetscErrorCode ierr;
836   PetscInt       i,m,shift,stride,*idx;
837 
838   PetscFunctionBegin;
839   if (rows) {
840     m = a->emat->LocalHeight();
841     shift = a->emat->ColShift();
842     stride = a->emat->ColStride();
843     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
844     for (i=0; i<m; i++) {
845       PetscInt rank,offset;
846       E2RO(A,0,shift+i*stride,&rank,&offset);
847       RO2P(A,0,rank,offset,&idx[i]);
848     }
849     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);CHKERRQ(ierr);
850   }
851   if (cols) {
852     m = a->emat->LocalWidth();
853     shift = a->emat->RowShift();
854     stride = a->emat->RowStride();
855     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
856     for (i=0; i<m; i++) {
857       PetscInt rank,offset;
858       E2RO(A,1,shift+i*stride,&rank,&offset);
859       RO2P(A,1,rank,offset,&idx[i]);
860     }
861     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);CHKERRQ(ierr);
862   }
863   PetscFunctionReturn(0);
864 }
865 
866 static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
867 {
868   Mat                Bmpi;
869   Mat_Elemental      *a = (Mat_Elemental*)A->data;
870   MPI_Comm           comm;
871   PetscErrorCode     ierr;
872   IS                 isrows,iscols;
873   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
874   const PetscInt     *rows,*cols;
875   PetscElemScalar    v;
876   const El::Grid     &grid = a->emat->Grid();
877 
878   PetscFunctionBegin;
879   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
880 
881   if (reuse == MAT_REUSE_MATRIX) {
882     Bmpi = *B;
883   } else {
884     ierr = MatCreate(comm,&Bmpi);CHKERRQ(ierr);
885     ierr = MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
886     ierr = MatSetType(Bmpi,MATDENSE);CHKERRQ(ierr);
887     ierr = MatSetUp(Bmpi);CHKERRQ(ierr);
888   }
889 
890   /* Get local entries of A */
891   ierr = MatGetOwnershipIS(A,&isrows,&iscols);CHKERRQ(ierr);
892   ierr = ISGetLocalSize(isrows,&nrows);CHKERRQ(ierr);
893   ierr = ISGetIndices(isrows,&rows);CHKERRQ(ierr);
894   ierr = ISGetLocalSize(iscols,&ncols);CHKERRQ(ierr);
895   ierr = ISGetIndices(iscols,&cols);CHKERRQ(ierr);
896 
897   if (a->roworiented) {
898     for (i=0; i<nrows; i++) {
899       P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
900       RO2E(A,0,rrank,ridx,&erow);
901       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
902       for (j=0; j<ncols; j++) {
903         P2RO(A,1,cols[j],&crank,&cidx);
904         RO2E(A,1,crank,cidx,&ecol);
905         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
906 
907         elrow = erow / grid.MCSize(); /* Elemental local row index */
908         elcol = ecol / grid.MRSize(); /* Elemental local column index */
909         v = a->emat->GetLocal(elrow,elcol);
910         ierr = MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);CHKERRQ(ierr);
911       }
912     }
913   } else { /* column-oriented */
914     for (j=0; j<ncols; j++) {
915       P2RO(A,1,cols[j],&crank,&cidx);
916       RO2E(A,1,crank,cidx,&ecol);
917       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
918       for (i=0; i<nrows; i++) {
919         P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
920         RO2E(A,0,rrank,ridx,&erow);
921         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
922 
923         elrow = erow / grid.MCSize(); /* Elemental local row index */
924         elcol = ecol / grid.MRSize(); /* Elemental local column index */
925         v = a->emat->GetLocal(elrow,elcol);
926         ierr = MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);CHKERRQ(ierr);
927       }
928     }
929   }
930   ierr = MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
931   ierr = MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
932   if (reuse == MAT_INPLACE_MATRIX) {
933     ierr = MatHeaderReplace(A,&Bmpi);CHKERRQ(ierr);
934   } else {
935     *B = Bmpi;
936   }
937   ierr = ISDestroy(&isrows);CHKERRQ(ierr);
938   ierr = ISDestroy(&iscols);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
943 {
944   Mat               mat_elemental;
945   PetscErrorCode    ierr;
946   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
947   const PetscInt    *cols;
948   const PetscScalar *vals;
949 
950   PetscFunctionBegin;
951   if (reuse == MAT_REUSE_MATRIX) {
952     mat_elemental = *newmat;
953     ierr = MatZeroEntries(mat_elemental);CHKERRQ(ierr);
954   } else {
955     ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
956     ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
957     ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
958     ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
959   }
960   for (row=0; row<M; row++) {
961     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
962     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
963     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
964     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
965   }
966   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
967   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
968 
969   if (reuse == MAT_INPLACE_MATRIX) {
970     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
971   } else {
972     *newmat = mat_elemental;
973   }
974   PetscFunctionReturn(0);
975 }
976 
977 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
978 {
979   Mat               mat_elemental;
980   PetscErrorCode    ierr;
981   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
982   const PetscInt    *cols;
983   const PetscScalar *vals;
984 
985   PetscFunctionBegin;
986   if (reuse == MAT_REUSE_MATRIX) {
987     mat_elemental = *newmat;
988     ierr = MatZeroEntries(mat_elemental);CHKERRQ(ierr);
989   } else {
990     ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
991     ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
992     ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
993     ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
994   }
995   for (row=rstart; row<rend; row++) {
996     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
997     for (j=0; j<ncols; j++) {
998       /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
999       ierr = MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);CHKERRQ(ierr);
1000     }
1001     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1002   }
1003   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1004   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1005 
1006   if (reuse == MAT_INPLACE_MATRIX) {
1007     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1008   } else {
1009     *newmat = mat_elemental;
1010   }
1011   PetscFunctionReturn(0);
1012 }
1013 
1014 PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1015 {
1016   Mat               mat_elemental;
1017   PetscErrorCode    ierr;
1018   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
1019   const PetscInt    *cols;
1020   const PetscScalar *vals;
1021 
1022   PetscFunctionBegin;
1023   if (reuse == MAT_REUSE_MATRIX) {
1024     mat_elemental = *newmat;
1025     ierr = MatZeroEntries(mat_elemental);CHKERRQ(ierr);
1026   } else {
1027     ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1028     ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1029     ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1030     ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1031   }
1032   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1033   for (row=0; row<M; row++) {
1034     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1035     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1036     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1037     for (j=0; j<ncols; j++) { /* lower triangular part */
1038       PetscScalar v;
1039       if (cols[j] == row) continue;
1040       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1041       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);CHKERRQ(ierr);
1042     }
1043     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1044   }
1045   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1046   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1047   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1048 
1049   if (reuse == MAT_INPLACE_MATRIX) {
1050     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1051   } else {
1052     *newmat = mat_elemental;
1053   }
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1058 {
1059   Mat               mat_elemental;
1060   PetscErrorCode    ierr;
1061   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1062   const PetscInt    *cols;
1063   const PetscScalar *vals;
1064 
1065   PetscFunctionBegin;
1066   if (reuse == MAT_REUSE_MATRIX) {
1067     mat_elemental = *newmat;
1068     ierr = MatZeroEntries(mat_elemental);CHKERRQ(ierr);
1069   } else {
1070     ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1071     ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1072     ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1073     ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1074   }
1075   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1076   for (row=rstart; row<rend; row++) {
1077     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1078     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1079     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1080     for (j=0; j<ncols; j++) { /* lower triangular part */
1081       PetscScalar v;
1082       if (cols[j] == row) continue;
1083       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1084       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);CHKERRQ(ierr);
1085     }
1086     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1087   }
1088   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1089   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1090   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1091 
1092   if (reuse == MAT_INPLACE_MATRIX) {
1093     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1094   } else {
1095     *newmat = mat_elemental;
1096   }
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 static PetscErrorCode MatDestroy_Elemental(Mat A)
1101 {
1102   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1103   PetscErrorCode     ierr;
1104   Mat_Elemental_Grid *commgrid;
1105   PetscBool          flg;
1106   MPI_Comm           icomm;
1107 
1108   PetscFunctionBegin;
1109   delete a->emat;
1110   delete a->P;
1111   delete a->Q;
1112 
1113   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1114   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1115   ierr = MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRMPI(ierr);
1116   if (--commgrid->grid_refct == 0) {
1117     delete commgrid->grid;
1118     ierr = PetscFree(commgrid);CHKERRQ(ierr);
1119     ierr = MPI_Comm_free_keyval(&Petsc_Elemental_keyval);CHKERRMPI(ierr);
1120   }
1121   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1122   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);CHKERRQ(ierr);
1123   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);CHKERRQ(ierr);
1124   ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",NULL);CHKERRQ(ierr);
1125   ierr = PetscFree(A->data);CHKERRQ(ierr);
1126   PetscFunctionReturn(0);
1127 }
1128 
1129 PetscErrorCode MatSetUp_Elemental(Mat A)
1130 {
1131   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1132   PetscErrorCode ierr;
1133   MPI_Comm       comm;
1134   PetscMPIInt    rsize,csize;
1135   PetscInt       n;
1136 
1137   PetscFunctionBegin;
1138   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1139   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1140 
1141   /* Check if local row and column sizes are equally distributed.
1142      Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1143      exactly.  The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1144      PetscSplitOwnership(comm,&n,&N), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1145   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1146   n = PETSC_DECIDE;
1147   ierr = PetscSplitOwnership(comm,&n,&A->rmap->N);CHKERRQ(ierr);
1148   if (n != A->rmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local row size %D of ELEMENTAL matrix must be equally distributed",A->rmap->n);
1149 
1150   n = PETSC_DECIDE;
1151   ierr = PetscSplitOwnership(comm,&n,&A->cmap->N);CHKERRQ(ierr);
1152   if (n != A->cmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local column size %D of ELEMENTAL matrix must be equally distributed",A->cmap->n);
1153 
1154   a->emat->Resize(A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1155   El::Zero(*a->emat);
1156 
1157   ierr = MPI_Comm_size(A->rmap->comm,&rsize);CHKERRMPI(ierr);
1158   ierr = MPI_Comm_size(A->cmap->comm,&csize);CHKERRMPI(ierr);
1159   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1160   a->commsize = rsize;
1161   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1162   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1163   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1164   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1169 {
1170   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1171 
1172   PetscFunctionBegin;
1173   /* printf("Calling ProcessQueues\n"); */
1174   a->emat->ProcessQueues();
1175   /* printf("Finished ProcessQueues\n"); */
1176   PetscFunctionReturn(0);
1177 }
1178 
1179 PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1180 {
1181   PetscFunctionBegin;
1182   /* Currently does nothing */
1183   PetscFunctionReturn(0);
1184 }
1185 
1186 PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1187 {
1188   PetscErrorCode ierr;
1189   Mat            Adense,Ae;
1190   MPI_Comm       comm;
1191 
1192   PetscFunctionBegin;
1193   ierr = PetscObjectGetComm((PetscObject)newMat,&comm);CHKERRQ(ierr);
1194   ierr = MatCreate(comm,&Adense);CHKERRQ(ierr);
1195   ierr = MatSetType(Adense,MATDENSE);CHKERRQ(ierr);
1196   ierr = MatLoad(Adense,viewer);CHKERRQ(ierr);
1197   ierr = MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);CHKERRQ(ierr);
1198   ierr = MatDestroy(&Adense);CHKERRQ(ierr);
1199   ierr = MatHeaderReplace(newMat,&Ae);CHKERRQ(ierr);
1200   PetscFunctionReturn(0);
1201 }
1202 
1203 /* -------------------------------------------------------------------*/
1204 static struct _MatOps MatOps_Values = {
1205        MatSetValues_Elemental,
1206        0,
1207        0,
1208        MatMult_Elemental,
1209 /* 4*/ MatMultAdd_Elemental,
1210        MatMultTranspose_Elemental,
1211        MatMultTransposeAdd_Elemental,
1212        MatSolve_Elemental,
1213        MatSolveAdd_Elemental,
1214        0,
1215 /*10*/ 0,
1216        MatLUFactor_Elemental,
1217        MatCholeskyFactor_Elemental,
1218        0,
1219        MatTranspose_Elemental,
1220 /*15*/ MatGetInfo_Elemental,
1221        0,
1222        MatGetDiagonal_Elemental,
1223        MatDiagonalScale_Elemental,
1224        MatNorm_Elemental,
1225 /*20*/ MatAssemblyBegin_Elemental,
1226        MatAssemblyEnd_Elemental,
1227        MatSetOption_Elemental,
1228        MatZeroEntries_Elemental,
1229 /*24*/ 0,
1230        MatLUFactorSymbolic_Elemental,
1231        MatLUFactorNumeric_Elemental,
1232        MatCholeskyFactorSymbolic_Elemental,
1233        MatCholeskyFactorNumeric_Elemental,
1234 /*29*/ MatSetUp_Elemental,
1235        0,
1236        0,
1237        0,
1238        0,
1239 /*34*/ MatDuplicate_Elemental,
1240        0,
1241        0,
1242        0,
1243        0,
1244 /*39*/ MatAXPY_Elemental,
1245        0,
1246        0,
1247        0,
1248        MatCopy_Elemental,
1249 /*44*/ 0,
1250        MatScale_Elemental,
1251        MatShift_Basic,
1252        0,
1253        0,
1254 /*49*/ 0,
1255        0,
1256        0,
1257        0,
1258        0,
1259 /*54*/ 0,
1260        0,
1261        0,
1262        0,
1263        0,
1264 /*59*/ 0,
1265        MatDestroy_Elemental,
1266        MatView_Elemental,
1267        0,
1268        0,
1269 /*64*/ 0,
1270        0,
1271        0,
1272        0,
1273        0,
1274 /*69*/ 0,
1275        0,
1276        MatConvert_Elemental_Dense,
1277        0,
1278        0,
1279 /*74*/ 0,
1280        0,
1281        0,
1282        0,
1283        0,
1284 /*79*/ 0,
1285        0,
1286        0,
1287        0,
1288        MatLoad_Elemental,
1289 /*84*/ 0,
1290        0,
1291        0,
1292        0,
1293        0,
1294 /*89*/ 0,
1295        0,
1296        MatMatMultNumeric_Elemental,
1297        0,
1298        0,
1299 /*94*/ 0,
1300        0,
1301        0,
1302        MatMatTransposeMultNumeric_Elemental,
1303        0,
1304 /*99*/ MatProductSetFromOptions_Elemental,
1305        0,
1306        0,
1307        MatConjugate_Elemental,
1308        0,
1309 /*104*/0,
1310        0,
1311        0,
1312        0,
1313        0,
1314 /*109*/MatMatSolve_Elemental,
1315        0,
1316        0,
1317        0,
1318        MatMissingDiagonal_Elemental,
1319 /*114*/0,
1320        0,
1321        0,
1322        0,
1323        0,
1324 /*119*/0,
1325        MatHermitianTranspose_Elemental,
1326        0,
1327        0,
1328        0,
1329 /*124*/0,
1330        0,
1331        0,
1332        0,
1333        0,
1334 /*129*/0,
1335        0,
1336        0,
1337        0,
1338        0,
1339 /*134*/0,
1340        0,
1341        0,
1342        0,
1343        0,
1344        0,
1345 /*140*/0,
1346        0,
1347        0,
1348        0,
1349        0,
1350 /*145*/0,
1351        0,
1352        0
1353 };
1354 
1355 /*MC
1356    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package
1357 
1358   Use ./configure --download-elemental to install PETSc to use Elemental
1359 
1360   Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver
1361 
1362    Options Database Keys:
1363 + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1364 - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix
1365 
1366   Level: beginner
1367 
1368 .seealso: MATDENSE
1369 M*/
1370 
1371 PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1372 {
1373   Mat_Elemental      *a;
1374   PetscErrorCode     ierr;
1375   PetscBool          flg,flg1;
1376   Mat_Elemental_Grid *commgrid;
1377   MPI_Comm           icomm;
1378   PetscInt           optv1;
1379 
1380   PetscFunctionBegin;
1381   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1382   A->insertmode = NOT_SET_VALUES;
1383 
1384   ierr = PetscNewLog(A,&a);CHKERRQ(ierr);
1385   A->data = (void*)a;
1386 
1387   /* Set up the elemental matrix */
1388   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1389 
1390   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1391   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1392     ierr = MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);CHKERRMPI(ierr);
1393   }
1394   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1395   ierr = MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRMPI(ierr);
1396   if (!flg) {
1397     ierr = PetscNewLog(A,&commgrid);CHKERRQ(ierr);
1398 
1399     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");CHKERRQ(ierr);
1400     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1401     ierr = PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);CHKERRQ(ierr);
1402     if (flg1) {
1403       if (El::mpi::Size(cxxcomm) % optv1) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1404       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1405     } else {
1406       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1407       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1408     }
1409     commgrid->grid_refct = 1;
1410     ierr = MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);CHKERRMPI(ierr);
1411 
1412     a->pivoting    = 1;
1413     ierr = PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);CHKERRQ(ierr);
1414 
1415     ierr = PetscOptionsEnd();CHKERRQ(ierr);
1416   } else {
1417     commgrid->grid_refct++;
1418   }
1419   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1420   a->grid        = commgrid->grid;
1421   a->emat        = new El::DistMatrix<PetscElemScalar>(*a->grid);
1422   a->roworiented = PETSC_TRUE;
1423 
1424   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);CHKERRQ(ierr);
1425   ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",MatProductSetFromOptions_Elemental_MPIDense);CHKERRQ(ierr);
1426   ierr = PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);CHKERRQ(ierr);
1427   PetscFunctionReturn(0);
1428 }
1429