xref: /petsc/src/mat/impls/elemental/matelem.cxx (revision 4fc747eaadbeca11629f314a99edccbc2ed7b3d3)
1 #include <../src/mat/impls/elemental/matelemimpl.h> /*I "petscmat.h" I*/
2 
3 /*
4     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
5   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
6 */
7 static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "PetscElementalInitializePackage"
11 /*@C
12    PetscElementalInitializePackage - Initialize Elemental package
13 
14    Logically Collective
15 
16    Level: developer
17 
18 .seealso: MATELEMENTAL, PetscElementalFinalizePackage()
19 @*/
20 PetscErrorCode PetscElementalInitializePackage(void)
21 {
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   if (El::Initialized()) PetscFunctionReturn(0);
26   El::Initialize();   /* called by the 1st call of MatCreate_Elemental */
27   ierr = PetscRegisterFinalize(PetscElementalFinalizePackage);CHKERRQ(ierr);
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PetscElementalFinalizePackage"
33 /*@C
34    PetscElementalFinalizePackage - Finalize Elemental package
35 
36    Logically Collective
37 
38    Level: developer
39 
40 .seealso: MATELEMENTAL, PetscElementalInitializePackage()
41 @*/
42 PetscErrorCode PetscElementalFinalizePackage(void)
43 {
44   PetscFunctionBegin;
45   El::Finalize();  /* called by PetscFinalize() */
46   PetscFunctionReturn(0);
47 }
48 
49 #undef __FUNCT__
50 #define __FUNCT__ "MatView_Elemental"
51 static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
52 {
53   PetscErrorCode ierr;
54   Mat_Elemental  *a = (Mat_Elemental*)A->data;
55   PetscBool      iascii;
56 
57   PetscFunctionBegin;
58   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
59   if (iascii) {
60     PetscViewerFormat format;
61     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
62     if (format == PETSC_VIEWER_ASCII_INFO) {
63       /* call elemental viewing function */
64       ierr = PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");CHKERRQ(ierr);
65       ierr = PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());CHKERRQ(ierr);
66       ierr = PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());CHKERRQ(ierr);
67       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
68         /* call elemental viewing function */
69         ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");CHKERRQ(ierr);
70       }
71 
72     } else if (format == PETSC_VIEWER_DEFAULT) {
73       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
74       El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
75       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
76       if (A->factortype == MAT_FACTOR_NONE){
77         Mat Adense;
78         ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");CHKERRQ(ierr);
79         ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
80         ierr = MatView(Adense,viewer);CHKERRQ(ierr);
81         ierr = MatDestroy(&Adense);CHKERRQ(ierr);
82       }
83     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
84   } else {
85     /* convert to dense format and call MatView() */
86     Mat Adense;
87     ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");CHKERRQ(ierr);
88     ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
89     ierr = MatView(Adense,viewer);CHKERRQ(ierr);
90     ierr = MatDestroy(&Adense);CHKERRQ(ierr);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "MatGetInfo_Elemental"
97 static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
98 {
99   Mat_Elemental  *a = (Mat_Elemental*)A->data;
100 
101   PetscFunctionBegin;
102   info->block_size = 1.0;
103 
104   if (flag == MAT_LOCAL) {
105     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
106     info->nz_used        = info->nz_allocated;
107   } else if (flag == MAT_GLOBAL_MAX) {
108     //ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
109     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
110     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
111   } else if (flag == MAT_GLOBAL_SUM) {
112     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
113     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
114     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
115     //ierr = MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
116     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
117   }
118 
119   info->nz_unneeded       = 0.0;
120   info->assemblies        = (double)A->num_ass;
121   info->mallocs           = 0;
122   info->memory            = ((PetscObject)A)->mem;
123   info->fill_ratio_given  = 0; /* determined by Elemental */
124   info->fill_ratio_needed = 0;
125   info->factor_mallocs    = 0;
126   PetscFunctionReturn(0);
127 }
128 
129 #undef __FUNCT__
130 #define __FUNCT__ "MatSetOption_Elemental"
131 PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
132 {
133   Mat_Elemental  *a = (Mat_Elemental*)A->data;
134 
135   PetscFunctionBegin;
136   switch (op) {
137   case MAT_NEW_NONZERO_LOCATIONS:
138   case MAT_NEW_NONZERO_LOCATION_ERR:
139   case MAT_NEW_NONZERO_ALLOCATION_ERR:
140   case MAT_ROW_ORIENTED:
141     a->roworiented = flg;
142     break;
143   case MAT_SYMMETRIC:
144     break;
145   default:
146     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
147   }
148   PetscFunctionReturn(0);
149 }
150 
151 #undef __FUNCT__
152 #define __FUNCT__ "MatSetValues_Elemental"
153 static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
154 {
155   Mat_Elemental  *a = (Mat_Elemental*)A->data;
156   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;
157 
158   PetscFunctionBegin;
159   // TODO: Initialize matrix to all zeros?
160 
161   // Count the number of queues from this process
162   if (a->roworiented) {
163     for (i=0; i<nr; i++) {
164       if (rows[i] < 0) continue;
165       P2RO(A,0,rows[i],&rrank,&ridx);
166       RO2E(A,0,rrank,ridx,&erow);
167       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
168       for (j=0; j<nc; j++) {
169         if (cols[j] < 0) continue;
170         P2RO(A,1,cols[j],&crank,&cidx);
171         RO2E(A,1,crank,cidx,&ecol);
172         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
173         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
174           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
175           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
176           ++numQueues;
177           continue;
178         }
179         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
180         switch (imode) {
181         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
182         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
183         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
184         }
185       }
186     }
187 
188     /* printf("numQueues=%d\n",numQueues); */
189     a->emat->Reserve( numQueues );
190     for (i=0; i<nr; i++) {
191       if (rows[i] < 0) continue;
192       P2RO(A,0,rows[i],&rrank,&ridx);
193       RO2E(A,0,rrank,ridx,&erow);
194       for (j=0; j<nc; j++) {
195         if (cols[j] < 0) continue;
196         P2RO(A,1,cols[j],&crank,&cidx);
197         RO2E(A,1,crank,cidx,&ecol);
198         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
199           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
200           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j] );
201         }
202       }
203     }
204   } else { /* columnoriented */
205     for (j=0; j<nc; j++) {
206       if (cols[j] < 0) continue;
207       P2RO(A,1,cols[j],&crank,&cidx);
208       RO2E(A,1,crank,cidx,&ecol);
209       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
210       for (i=0; i<nr; i++) {
211         if (rows[i] < 0) continue;
212         P2RO(A,0,rows[i],&rrank,&ridx);
213         RO2E(A,0,rrank,ridx,&erow);
214         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
215         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
216           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
217           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
218           ++numQueues;
219           continue;
220         }
221         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
222         switch (imode) {
223         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
224         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
225         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
226         }
227       }
228     }
229 
230     /* printf("numQueues=%d\n",numQueues); */
231     a->emat->Reserve( numQueues );
232     for (j=0; j<nc; j++) {
233       if (cols[j] < 0) continue;
234       P2RO(A,1,cols[j],&crank,&cidx);
235       RO2E(A,1,crank,cidx,&ecol);
236 
237       for (i=0; i<nr; i++) {
238         if (rows[i] < 0) continue;
239         P2RO(A,0,rows[i],&rrank,&ridx);
240         RO2E(A,0,rrank,ridx,&erow);
241         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
242           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
243           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr] );
244         }
245       }
246     }
247   }
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "MatMult_Elemental"
253 static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
254 {
255   Mat_Elemental         *a = (Mat_Elemental*)A->data;
256   PetscErrorCode        ierr;
257   const PetscElemScalar *x;
258   PetscElemScalar       *y;
259   PetscElemScalar       one = 1,zero = 0;
260 
261   PetscFunctionBegin;
262   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
263   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
264   { /* Scoping so that constructor is called before pointer is returned */
265     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
266     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
267     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
268     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
269   }
270   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
271   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
272   PetscFunctionReturn(0);
273 }
274 
275 #undef __FUNCT__
276 #define __FUNCT__ "MatMultTranspose_Elemental"
277 static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
278 {
279   Mat_Elemental         *a = (Mat_Elemental*)A->data;
280   PetscErrorCode        ierr;
281   const PetscElemScalar *x;
282   PetscElemScalar       *y;
283   PetscElemScalar       one = 1,zero = 0;
284 
285   PetscFunctionBegin;
286   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
287   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
288   { /* Scoping so that constructor is called before pointer is returned */
289     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
290     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
291     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
292     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
293   }
294   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
295   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
296   PetscFunctionReturn(0);
297 }
298 
299 #undef __FUNCT__
300 #define __FUNCT__ "MatMultAdd_Elemental"
301 static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
302 {
303   Mat_Elemental         *a = (Mat_Elemental*)A->data;
304   PetscErrorCode        ierr;
305   const PetscElemScalar *x;
306   PetscElemScalar       *z;
307   PetscElemScalar       one = 1;
308 
309   PetscFunctionBegin;
310   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
311   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
312   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
313   { /* Scoping so that constructor is called before pointer is returned */
314     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
315     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
316     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
317     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
318   }
319   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
320   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 #undef __FUNCT__
325 #define __FUNCT__ "MatMultTransposeAdd_Elemental"
326 static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
327 {
328   Mat_Elemental         *a = (Mat_Elemental*)A->data;
329   PetscErrorCode        ierr;
330   const PetscElemScalar *x;
331   PetscElemScalar       *z;
332   PetscElemScalar       one = 1;
333 
334   PetscFunctionBegin;
335   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
336   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
337   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
338   { /* Scoping so that constructor is called before pointer is returned */
339     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
340     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
341     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
342     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
343   }
344   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
345   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
346   PetscFunctionReturn(0);
347 }
348 
349 #undef __FUNCT__
350 #define __FUNCT__ "MatMatMultNumeric_Elemental"
351 static PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
352 {
353   Mat_Elemental    *a = (Mat_Elemental*)A->data;
354   Mat_Elemental    *b = (Mat_Elemental*)B->data;
355   Mat_Elemental    *c = (Mat_Elemental*)C->data;
356   PetscElemScalar  one = 1,zero = 0;
357 
358   PetscFunctionBegin;
359   { /* Scoping so that constructor is called before pointer is returned */
360     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
361   }
362   C->assembled = PETSC_TRUE;
363   PetscFunctionReturn(0);
364 }
365 
366 #undef __FUNCT__
367 #define __FUNCT__ "MatMatMultSymbolic_Elemental"
368 static PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
369 {
370   PetscErrorCode ierr;
371   Mat            Ce;
372   MPI_Comm       comm;
373 
374   PetscFunctionBegin;
375   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
376   ierr = MatCreate(comm,&Ce);CHKERRQ(ierr);
377   ierr = MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
378   ierr = MatSetType(Ce,MATELEMENTAL);CHKERRQ(ierr);
379   ierr = MatSetUp(Ce);CHKERRQ(ierr);
380   *C = Ce;
381   PetscFunctionReturn(0);
382 }
383 
384 #undef __FUNCT__
385 #define __FUNCT__ "MatMatMult_Elemental"
386 static PetscErrorCode MatMatMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
387 {
388   PetscErrorCode ierr;
389 
390   PetscFunctionBegin;
391   if (scall == MAT_INITIAL_MATRIX){
392     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
393     ierr = MatMatMultSymbolic_Elemental(A,B,1.0,C);CHKERRQ(ierr);
394     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
395   }
396   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
397   ierr = MatMatMultNumeric_Elemental(A,B,*C);CHKERRQ(ierr);
398   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
399   PetscFunctionReturn(0);
400 }
401 
402 #undef __FUNCT__
403 #define __FUNCT__ "MatMatTransposeMultNumeric_Elemental"
404 static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
405 {
406   Mat_Elemental      *a = (Mat_Elemental*)A->data;
407   Mat_Elemental      *b = (Mat_Elemental*)B->data;
408   Mat_Elemental      *c = (Mat_Elemental*)C->data;
409   PetscElemScalar    one = 1,zero = 0;
410 
411   PetscFunctionBegin;
412   { /* Scoping so that constructor is called before pointer is returned */
413     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
414   }
415   C->assembled = PETSC_TRUE;
416   PetscFunctionReturn(0);
417 }
418 
419 #undef __FUNCT__
420 #define __FUNCT__ "MatMatTransposeMultSymbolic_Elemental"
421 static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
422 {
423   PetscErrorCode ierr;
424   Mat            Ce;
425   MPI_Comm       comm;
426 
427   PetscFunctionBegin;
428   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
429   ierr = MatCreate(comm,&Ce);CHKERRQ(ierr);
430   ierr = MatSetSizes(Ce,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
431   ierr = MatSetType(Ce,MATELEMENTAL);CHKERRQ(ierr);
432   ierr = MatSetUp(Ce);CHKERRQ(ierr);
433   *C = Ce;
434   PetscFunctionReturn(0);
435 }
436 
437 #undef __FUNCT__
438 #define __FUNCT__ "MatMatTransposeMult_Elemental"
439 static PetscErrorCode MatMatTransposeMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
440 {
441   PetscErrorCode ierr;
442 
443   PetscFunctionBegin;
444   if (scall == MAT_INITIAL_MATRIX){
445     ierr = PetscLogEventBegin(MAT_MatTransposeMultSymbolic,A,B,0,0);CHKERRQ(ierr);
446     ierr = MatMatMultSymbolic_Elemental(A,B,1.0,C);CHKERRQ(ierr);
447     ierr = PetscLogEventEnd(MAT_MatTransposeMultSymbolic,A,B,0,0);CHKERRQ(ierr);
448   }
449   ierr = PetscLogEventBegin(MAT_MatTransposeMultNumeric,A,B,0,0);CHKERRQ(ierr);
450   ierr = MatMatTransposeMultNumeric_Elemental(A,B,*C);CHKERRQ(ierr);
451   ierr = PetscLogEventEnd(MAT_MatTransposeMultNumeric,A,B,0,0);CHKERRQ(ierr);
452   PetscFunctionReturn(0);
453 }
454 
455 #undef __FUNCT__
456 #define __FUNCT__ "MatGetDiagonal_Elemental"
457 static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
458 {
459   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
460   Mat_Elemental   *a = (Mat_Elemental*)A->data;
461   PetscErrorCode  ierr;
462   PetscElemScalar v;
463   MPI_Comm        comm;
464 
465   PetscFunctionBegin;
466   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
467   ierr = MatGetSize(A,&nrows,&ncols);CHKERRQ(ierr);
468   nD = nrows>ncols ? ncols : nrows;
469   for (i=0; i<nD; i++) {
470     PetscInt erow,ecol;
471     P2RO(A,0,i,&rrank,&ridx);
472     RO2E(A,0,rrank,ridx,&erow);
473     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
474     P2RO(A,1,i,&crank,&cidx);
475     RO2E(A,1,crank,cidx,&ecol);
476     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
477     v = a->emat->Get(erow,ecol);
478     ierr = VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);CHKERRQ(ierr);
479   }
480   ierr = VecAssemblyBegin(D);CHKERRQ(ierr);
481   ierr = VecAssemblyEnd(D);CHKERRQ(ierr);
482   PetscFunctionReturn(0);
483 }
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "MatDiagonalScale_Elemental"
487 static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
488 {
489   Mat_Elemental         *x = (Mat_Elemental*)X->data;
490   const PetscElemScalar *d;
491   PetscErrorCode        ierr;
492 
493   PetscFunctionBegin;
494   if (R) {
495     ierr = VecGetArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
496     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
497     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
498     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
499     ierr = VecRestoreArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
500   }
501   if (L) {
502     ierr = VecGetArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
503     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
504     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
505     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
506     ierr = VecRestoreArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
507   }
508   PetscFunctionReturn(0);
509 }
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "MatScale_Elemental"
513 static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
514 {
515   Mat_Elemental  *x = (Mat_Elemental*)X->data;
516 
517   PetscFunctionBegin;
518   El::Scale((PetscElemScalar)a,*x->emat);
519   PetscFunctionReturn(0);
520 }
521 
522 /*
523   MatAXPY - Computes Y = a*X + Y.
524 */
525 #undef __FUNCT__
526 #define __FUNCT__ "MatAXPY_Elemental"
527 static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
528 {
529   Mat_Elemental  *x = (Mat_Elemental*)X->data;
530   Mat_Elemental  *y = (Mat_Elemental*)Y->data;
531   PetscErrorCode ierr;
532 
533   PetscFunctionBegin;
534   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
535   ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
536   PetscFunctionReturn(0);
537 }
538 
539 #undef __FUNCT__
540 #define __FUNCT__ "MatCopy_Elemental"
541 static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
542 {
543   Mat_Elemental *a=(Mat_Elemental*)A->data;
544   Mat_Elemental *b=(Mat_Elemental*)B->data;
545 
546   PetscFunctionBegin;
547   El::Copy(*a->emat,*b->emat);
548   PetscFunctionReturn(0);
549 }
550 
551 #undef __FUNCT__
552 #define __FUNCT__ "MatDuplicate_Elemental"
553 static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
554 {
555   Mat            Be;
556   MPI_Comm       comm;
557   Mat_Elemental  *a=(Mat_Elemental*)A->data;
558   PetscErrorCode ierr;
559 
560   PetscFunctionBegin;
561   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
562   ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
563   ierr = MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
564   ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
565   ierr = MatSetUp(Be);CHKERRQ(ierr);
566   *B = Be;
567   if (op == MAT_COPY_VALUES) {
568     Mat_Elemental *b=(Mat_Elemental*)Be->data;
569     El::Copy(*a->emat,*b->emat);
570   }
571   Be->assembled = PETSC_TRUE;
572   PetscFunctionReturn(0);
573 }
574 
575 #undef __FUNCT__
576 #define __FUNCT__ "MatTranspose_Elemental"
577 static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
578 {
579   Mat            Be = *B;
580   PetscErrorCode ierr;
581   MPI_Comm       comm;
582   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
583 
584   PetscFunctionBegin;
585   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
586   /* Only out-of-place supported */
587   if (reuse == MAT_INITIAL_MATRIX){
588     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
589     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
590     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
591     ierr = MatSetUp(Be);CHKERRQ(ierr);
592     *B = Be;
593   }
594   b = (Mat_Elemental*)Be->data;
595   El::Transpose(*a->emat,*b->emat);
596   Be->assembled = PETSC_TRUE;
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "MatConjugate_Elemental"
602 static PetscErrorCode MatConjugate_Elemental(Mat A)
603 {
604   Mat_Elemental  *a = (Mat_Elemental*)A->data;
605 
606   PetscFunctionBegin;
607   El::Conjugate(*a->emat);
608   PetscFunctionReturn(0);
609 }
610 
611 #undef __FUNCT__
612 #define __FUNCT__ "MatHermitianTranspose_Elemental"
613 static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
614 {
615   Mat            Be = *B;
616   PetscErrorCode ierr;
617   MPI_Comm       comm;
618   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
619 
620   PetscFunctionBegin;
621   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
622   /* Only out-of-place supported */
623   if (reuse == MAT_INITIAL_MATRIX){
624     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
625     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
626     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
627     ierr = MatSetUp(Be);CHKERRQ(ierr);
628     *B = Be;
629   }
630   b = (Mat_Elemental*)Be->data;
631   El::Adjoint(*a->emat,*b->emat);
632   Be->assembled = PETSC_TRUE;
633   PetscFunctionReturn(0);
634 }
635 
636 #undef __FUNCT__
637 #define __FUNCT__ "MatSolve_Elemental"
638 static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
639 {
640   Mat_Elemental     *a = (Mat_Elemental*)A->data;
641   PetscErrorCode    ierr;
642   PetscElemScalar   *x;
643 
644   PetscFunctionBegin;
645   ierr = VecCopy(B,X);CHKERRQ(ierr);
646   ierr = VecGetArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
647   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
648   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
649   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
650   switch (A->factortype) {
651   case MAT_FACTOR_LU:
652     if ((*a->pivot).AllocatedMemory()) {
653       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,xer);
654       El::Copy(xer,xe);
655     } else {
656       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
657       El::Copy(xer,xe);
658     }
659     break;
660   case MAT_FACTOR_CHOLESKY:
661     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
662     El::Copy(xer,xe);
663     break;
664   default:
665     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
666     break;
667   }
668   ierr = VecRestoreArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
669   PetscFunctionReturn(0);
670 }
671 
672 #undef __FUNCT__
673 #define __FUNCT__ "MatSolveAdd_Elemental"
674 static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
675 {
676   PetscErrorCode    ierr;
677 
678   PetscFunctionBegin;
679   ierr = MatSolve_Elemental(A,B,X);CHKERRQ(ierr);
680   ierr = VecAXPY(X,1,Y);CHKERRQ(ierr);
681   PetscFunctionReturn(0);
682 }
683 
684 #undef __FUNCT__
685 #define __FUNCT__ "MatMatSolve_Elemental"
686 static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
687 {
688   Mat_Elemental *a=(Mat_Elemental*)A->data;
689   Mat_Elemental *b=(Mat_Elemental*)B->data;
690   Mat_Elemental *x=(Mat_Elemental*)X->data;
691 
692   PetscFunctionBegin;
693   El::Copy(*b->emat,*x->emat);
694   switch (A->factortype) {
695   case MAT_FACTOR_LU:
696     if ((*a->pivot).AllocatedMemory()) {
697       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,*x->emat);
698     } else {
699       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
700     }
701     break;
702   case MAT_FACTOR_CHOLESKY:
703     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
704     break;
705   default:
706     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
707     break;
708   }
709   PetscFunctionReturn(0);
710 }
711 
712 #undef __FUNCT__
713 #define __FUNCT__ "MatLUFactor_Elemental"
714 static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
715 {
716   Mat_Elemental  *a = (Mat_Elemental*)A->data;
717   PetscErrorCode ierr;
718 
719   PetscFunctionBegin;
720   if (info->dtcol){
721     El::LU(*a->emat,*a->pivot);
722   } else {
723     El::LU(*a->emat);
724   }
725   A->factortype = MAT_FACTOR_LU;
726   A->assembled  = PETSC_TRUE;
727 
728   ierr = PetscFree(A->solvertype);CHKERRQ(ierr);
729   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);CHKERRQ(ierr);
730   PetscFunctionReturn(0);
731 }
732 
733 #undef __FUNCT__
734 #define __FUNCT__ "MatLUFactorNumeric_Elemental"
735 static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
736 {
737   PetscErrorCode ierr;
738 
739   PetscFunctionBegin;
740   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
741   ierr = MatLUFactor_Elemental(F,0,0,info);CHKERRQ(ierr);
742   PetscFunctionReturn(0);
743 }
744 
745 #undef __FUNCT__
746 #define __FUNCT__ "MatLUFactorSymbolic_Elemental"
747 static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
748 {
749   PetscFunctionBegin;
750   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
751   PetscFunctionReturn(0);
752 }
753 
754 #undef __FUNCT__
755 #define __FUNCT__ "MatCholeskyFactor_Elemental"
756 static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
757 {
758   Mat_Elemental  *a = (Mat_Elemental*)A->data;
759   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;
760   PetscErrorCode ierr;
761 
762   PetscFunctionBegin;
763   El::Cholesky(El::UPPER,*a->emat);
764   A->factortype = MAT_FACTOR_CHOLESKY;
765   A->assembled  = PETSC_TRUE;
766 
767   ierr = PetscFree(A->solvertype);CHKERRQ(ierr);
768   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);CHKERRQ(ierr);
769   PetscFunctionReturn(0);
770 }
771 
772 #undef __FUNCT__
773 #define __FUNCT__ "MatCholeskyFactorNumeric_Elemental"
774 static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
775 {
776   PetscErrorCode ierr;
777 
778   PetscFunctionBegin;
779   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
780   ierr = MatCholeskyFactor_Elemental(F,0,info);CHKERRQ(ierr);
781   PetscFunctionReturn(0);
782 }
783 
784 #undef __FUNCT__
785 #define __FUNCT__ "MatCholeskyFactorSymbolic_Elemental"
786 static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
787 {
788   PetscFunctionBegin;
789   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
790   PetscFunctionReturn(0);
791 }
792 
793 #undef __FUNCT__
794 #define __FUNCT__ "MatFactorGetSolverPackage_elemental_elemental"
795 PetscErrorCode MatFactorGetSolverPackage_elemental_elemental(Mat A,const MatSolverPackage *type)
796 {
797   PetscFunctionBegin;
798   *type = MATSOLVERELEMENTAL;
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "MatGetFactor_elemental_elemental"
804 static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
805 {
806   Mat            B;
807   PetscErrorCode ierr;
808 
809   PetscFunctionBegin;
810   /* Create the factorization matrix */
811   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
812   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
813   ierr = MatSetType(B,MATELEMENTAL);CHKERRQ(ierr);
814   ierr = MatSetUp(B);CHKERRQ(ierr);
815   B->factortype = ftype;
816   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
817   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);CHKERRQ(ierr);
818 
819   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_elemental_elemental);CHKERRQ(ierr);
820   *F            = B;
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "MatSolverPackageRegister_Elemental"
826 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_Elemental(void)
827 {
828   PetscErrorCode ierr;
829 
830   PetscFunctionBegin;
831   ierr = MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
832   ierr = MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
833   PetscFunctionReturn(0);
834 }
835 
836 #undef __FUNCT__
837 #define __FUNCT__ "MatNorm_Elemental"
838 static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
839 {
840   Mat_Elemental *a=(Mat_Elemental*)A->data;
841 
842   PetscFunctionBegin;
843   switch (type){
844   case NORM_1:
845     *nrm = El::OneNorm(*a->emat);
846     break;
847   case NORM_FROBENIUS:
848     *nrm = El::FrobeniusNorm(*a->emat);
849     break;
850   case NORM_INFINITY:
851     *nrm = El::InfinityNorm(*a->emat);
852     break;
853   default:
854     printf("Error: unsupported norm type!\n");
855   }
856   PetscFunctionReturn(0);
857 }
858 
859 #undef __FUNCT__
860 #define __FUNCT__ "MatZeroEntries_Elemental"
861 static PetscErrorCode MatZeroEntries_Elemental(Mat A)
862 {
863   Mat_Elemental *a=(Mat_Elemental*)A->data;
864 
865   PetscFunctionBegin;
866   El::Zero(*a->emat);
867   PetscFunctionReturn(0);
868 }
869 
870 #undef __FUNCT__
871 #define __FUNCT__ "MatGetOwnershipIS_Elemental"
872 static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
873 {
874   Mat_Elemental  *a = (Mat_Elemental*)A->data;
875   PetscErrorCode ierr;
876   PetscInt       i,m,shift,stride,*idx;
877 
878   PetscFunctionBegin;
879   if (rows) {
880     m = a->emat->LocalHeight();
881     shift = a->emat->ColShift();
882     stride = a->emat->ColStride();
883     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
884     for (i=0; i<m; i++) {
885       PetscInt rank,offset;
886       E2RO(A,0,shift+i*stride,&rank,&offset);
887       RO2P(A,0,rank,offset,&idx[i]);
888     }
889     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);CHKERRQ(ierr);
890   }
891   if (cols) {
892     m = a->emat->LocalWidth();
893     shift = a->emat->RowShift();
894     stride = a->emat->RowStride();
895     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
896     for (i=0; i<m; i++) {
897       PetscInt rank,offset;
898       E2RO(A,1,shift+i*stride,&rank,&offset);
899       RO2P(A,1,rank,offset,&idx[i]);
900     }
901     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);CHKERRQ(ierr);
902   }
903   PetscFunctionReturn(0);
904 }
905 
906 #undef __FUNCT__
907 #define __FUNCT__ "MatConvert_Elemental_Dense"
908 static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
909 {
910   Mat                Bmpi;
911   Mat_Elemental      *a = (Mat_Elemental*)A->data;
912   MPI_Comm           comm;
913   PetscErrorCode     ierr;
914   IS                 isrows,iscols;
915   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
916   const PetscInt     *rows,*cols;
917   PetscElemScalar    v;
918   const El::Grid     &grid = a->emat->Grid();
919 
920   PetscFunctionBegin;
921   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
922 
923   if (reuse == MAT_REUSE_MATRIX) {
924     Bmpi = *B;
925   } else {
926     ierr = MatCreate(comm,&Bmpi);CHKERRQ(ierr);
927     ierr = MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
928     ierr = MatSetType(Bmpi,MATDENSE);CHKERRQ(ierr);
929     ierr = MatSetUp(Bmpi);CHKERRQ(ierr);
930   }
931 
932   /* Get local entries of A */
933   ierr = MatGetOwnershipIS(A,&isrows,&iscols);CHKERRQ(ierr);
934   ierr = ISGetLocalSize(isrows,&nrows);CHKERRQ(ierr);
935   ierr = ISGetIndices(isrows,&rows);CHKERRQ(ierr);
936   ierr = ISGetLocalSize(iscols,&ncols);CHKERRQ(ierr);
937   ierr = ISGetIndices(iscols,&cols);CHKERRQ(ierr);
938 
939   if (a->roworiented) {
940     for (i=0; i<nrows; i++) {
941       P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
942       RO2E(A,0,rrank,ridx,&erow);
943       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
944       for (j=0; j<ncols; j++) {
945         P2RO(A,1,cols[j],&crank,&cidx);
946         RO2E(A,1,crank,cidx,&ecol);
947         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
948 
949         elrow = erow / grid.MCSize(); /* Elemental local row index */
950         elcol = ecol / grid.MRSize(); /* Elemental local column index */
951         v = a->emat->GetLocal(elrow,elcol);
952         ierr = MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);CHKERRQ(ierr);
953       }
954     }
955   } else { /* column-oriented */
956     for (j=0; j<ncols; j++) {
957       P2RO(A,1,cols[j],&crank,&cidx);
958       RO2E(A,1,crank,cidx,&ecol);
959       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
960       for (i=0; i<nrows; i++) {
961         P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
962         RO2E(A,0,rrank,ridx,&erow);
963         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
964 
965         elrow = erow / grid.MCSize(); /* Elemental local row index */
966         elcol = ecol / grid.MRSize(); /* Elemental local column index */
967         v = a->emat->GetLocal(elrow,elcol);
968         ierr = MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);CHKERRQ(ierr);
969       }
970     }
971   }
972   ierr = MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
973   ierr = MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
974   if (reuse == MAT_INPLACE_MATRIX) {
975     ierr = MatHeaderReplace(A,&Bmpi);CHKERRQ(ierr);
976   } else {
977     *B = Bmpi;
978   }
979   ierr = ISDestroy(&isrows);CHKERRQ(ierr);
980   ierr = ISDestroy(&iscols);CHKERRQ(ierr);
981   PetscFunctionReturn(0);
982 }
983 
984 #undef __FUNCT__
985 #define __FUNCT__ "MatConvert_SeqAIJ_Elemental"
986 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
987 {
988   Mat               mat_elemental;
989   PetscErrorCode    ierr;
990   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
991   const PetscInt    *cols;
992   const PetscScalar *vals;
993 
994   PetscFunctionBegin;
995   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
996   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
997   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
998   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
999   for (row=0; row<M; row++) {
1000     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1001     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1002     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1003     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1004   }
1005   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1006   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1007 
1008   if (reuse == MAT_INPLACE_MATRIX) {
1009     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1010   } else {
1011     *newmat = mat_elemental;
1012   }
1013   PetscFunctionReturn(0);
1014 }
1015 
1016 #undef __FUNCT__
1017 #define __FUNCT__ "MatConvert_MPIAIJ_Elemental"
1018 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1019 {
1020   Mat               mat_elemental;
1021   PetscErrorCode    ierr;
1022   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
1023   const PetscInt    *cols;
1024   const PetscScalar *vals;
1025 
1026   PetscFunctionBegin;
1027   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1028   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1029   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1030   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1031   for (row=rstart; row<rend; row++) {
1032     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1033     for (j=0; j<ncols; j++) {
1034       /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1035       ierr = MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);CHKERRQ(ierr);
1036     }
1037     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1038   }
1039   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1040   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1041 
1042   if (reuse == MAT_INPLACE_MATRIX) {
1043     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1044   } else {
1045     *newmat = mat_elemental;
1046   }
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 #undef __FUNCT__
1051 #define __FUNCT__ "MatConvert_SeqSBAIJ_Elemental"
1052 PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1053 {
1054   Mat               mat_elemental;
1055   PetscErrorCode    ierr;
1056   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
1057   const PetscInt    *cols;
1058   const PetscScalar *vals;
1059 
1060   PetscFunctionBegin;
1061   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1062   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1063   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1064   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1065   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1066   for (row=0; row<M; row++) {
1067     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1068     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1069     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1070     for (j=0; j<ncols; j++) { /* lower triangular part */
1071       if (cols[j] == row) continue;
1072       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);CHKERRQ(ierr);
1073     }
1074     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1075   }
1076   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1077   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1078   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1079 
1080   if (reuse == MAT_INPLACE_MATRIX) {
1081     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1082   } else {
1083     *newmat = mat_elemental;
1084   }
1085   PetscFunctionReturn(0);
1086 }
1087 
1088 #undef __FUNCT__
1089 #define __FUNCT__ "MatConvert_MPISBAIJ_Elemental"
1090 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1091 {
1092   Mat               mat_elemental;
1093   PetscErrorCode    ierr;
1094   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1095   const PetscInt    *cols;
1096   const PetscScalar *vals;
1097 
1098   PetscFunctionBegin;
1099   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1100   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1101   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1102   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1103   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1104   for (row=rstart; row<rend; row++) {
1105     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1106     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1107     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1108     for (j=0; j<ncols; j++) { /* lower triangular part */
1109       if (cols[j] == row) continue;
1110       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);CHKERRQ(ierr);
1111     }
1112     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1113   }
1114   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1115   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1116   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1117 
1118   if (reuse == MAT_INPLACE_MATRIX) {
1119     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1120   } else {
1121     *newmat = mat_elemental;
1122   }
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 #undef __FUNCT__
1127 #define __FUNCT__ "MatDestroy_Elemental"
1128 static PetscErrorCode MatDestroy_Elemental(Mat A)
1129 {
1130   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1131   PetscErrorCode     ierr;
1132   Mat_Elemental_Grid *commgrid;
1133   PetscBool          flg;
1134   MPI_Comm           icomm;
1135 
1136   PetscFunctionBegin;
1137   delete a->emat;
1138   delete a->pivot;
1139 
1140   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1141   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1142   ierr = MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRQ(ierr);
1143   if (--commgrid->grid_refct == 0) {
1144     delete commgrid->grid;
1145     ierr = PetscFree(commgrid);CHKERRQ(ierr);
1146     ierr = MPI_Keyval_free(&Petsc_Elemental_keyval);CHKERRQ(ierr);
1147   }
1148   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1149   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);CHKERRQ(ierr);
1150   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
1151   ierr = PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",NULL);CHKERRQ(ierr);
1152   ierr = PetscFree(A->data);CHKERRQ(ierr);
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 #undef __FUNCT__
1157 #define __FUNCT__ "MatSetUp_Elemental"
1158 PetscErrorCode MatSetUp_Elemental(Mat A)
1159 {
1160   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1161   PetscErrorCode ierr;
1162   PetscMPIInt    rsize,csize;
1163 
1164   PetscFunctionBegin;
1165   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1166   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1167 
1168   a->emat->Resize(A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1169   El::Zero(*a->emat);
1170 
1171   ierr = MPI_Comm_size(A->rmap->comm,&rsize);CHKERRQ(ierr);
1172   ierr = MPI_Comm_size(A->cmap->comm,&csize);CHKERRQ(ierr);
1173   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1174   a->commsize = rsize;
1175   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1176   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1177   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1178   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 #undef __FUNCT__
1183 #define __FUNCT__ "MatAssemblyBegin_Elemental"
1184 PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1185 {
1186   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1187 
1188   PetscFunctionBegin;
1189   /* printf("Calling ProcessQueues\n"); */
1190   a->emat->ProcessQueues();
1191   /* printf("Finished ProcessQueues\n"); */
1192   PetscFunctionReturn(0);
1193 }
1194 
1195 #undef __FUNCT__
1196 #define __FUNCT__ "MatAssemblyEnd_Elemental"
1197 PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1198 {
1199   PetscFunctionBegin;
1200   /* Currently does nothing */
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "MatLoad_Elemental"
1206 PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1207 {
1208   PetscErrorCode ierr;
1209   Mat            Adense,Ae;
1210   MPI_Comm       comm;
1211 
1212   PetscFunctionBegin;
1213   ierr = PetscObjectGetComm((PetscObject)newMat,&comm);CHKERRQ(ierr);
1214   ierr = MatCreate(comm,&Adense);CHKERRQ(ierr);
1215   ierr = MatSetType(Adense,MATDENSE);CHKERRQ(ierr);
1216   ierr = MatLoad(Adense,viewer);CHKERRQ(ierr);
1217   ierr = MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);CHKERRQ(ierr);
1218   ierr = MatDestroy(&Adense);CHKERRQ(ierr);
1219   ierr = MatHeaderReplace(newMat,&Ae);CHKERRQ(ierr);
1220   PetscFunctionReturn(0);
1221 }
1222 
1223 #undef __FUNCT__
1224 #define __FUNCT__ "MatElementalHermitianGenDefEig_Elemental"
1225 PetscErrorCode MatElementalHermitianGenDefEig_Elemental(El::Pencil eigtype,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1226 {
1227   PetscErrorCode ierr;
1228   Mat_Elemental  *a=(Mat_Elemental*)A->data,*b=(Mat_Elemental*)B->data,*x;
1229   MPI_Comm       comm;
1230   Mat            EVAL;
1231   Mat_Elemental  *e;
1232 
1233   PetscFunctionBegin;
1234   /* Compute eigenvalues and eigenvectors */
1235   El::DistMatrix<PetscElemScalar,El::VR,El::STAR> w( *a->grid ); /* holding eigenvalues */
1236   El::DistMatrix<PetscElemScalar>                 X( *a->grid ); /* holding eigenvectors */
1237   El::HermitianGenDefEig(eigtype,uplo,*a->emat,*b->emat,w,X,sort,subset,ctrl);
1238   /* El::Print(w, "Eigenvalues"); */
1239 
1240   /* Wrap w and X into PETSc's MATMATELEMENTAL matrices */
1241   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1242   ierr = MatCreate(comm,evec);CHKERRQ(ierr);
1243   ierr = MatSetSizes(*evec,PETSC_DECIDE,PETSC_DECIDE,X.Height(),X.Width());CHKERRQ(ierr);
1244   ierr = MatSetType(*evec,MATELEMENTAL);CHKERRQ(ierr);
1245   ierr = MatSetFromOptions(*evec);CHKERRQ(ierr);
1246   ierr = MatSetUp(*evec);CHKERRQ(ierr);
1247   ierr = MatAssemblyBegin(*evec,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1248   ierr = MatAssemblyEnd(*evec,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1249 
1250   x = (Mat_Elemental*)(*evec)->data;
1251   //delete x->emat; //-- memory leak???
1252   *x->emat = X;
1253 
1254   ierr = MatCreate(comm,&EVAL);CHKERRQ(ierr);
1255   ierr = MatSetSizes(EVAL,PETSC_DECIDE,PETSC_DECIDE,w.Height(),w.Width());CHKERRQ(ierr);
1256   ierr = MatSetType(EVAL,MATELEMENTAL);CHKERRQ(ierr);
1257   ierr = MatSetFromOptions(EVAL);CHKERRQ(ierr);
1258   ierr = MatSetUp(EVAL);CHKERRQ(ierr);
1259   ierr = MatAssemblyBegin(EVAL,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1260   ierr = MatAssemblyEnd(EVAL,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1261   e         = (Mat_Elemental*)EVAL->data;
1262   *e->emat = w; //-- memory leak???
1263   *evals   = EVAL;
1264 
1265 #if defined(MV)
1266   /* Test correctness norm = || - A*X + B*X*w || */
1267   {
1268     PetscElemScalar alpha,beta;
1269     El::DistMatrix<PetscElemScalar> Y(*a->grid); //tmp matrix
1270     alpha = 1.0; beta=0.0;
1271     El::Gemm(El::NORMAL,El::NORMAL,alpha,*b->emat,X,beta,Y); //Y = B*X
1272     El::DiagonalScale(El::RIGHT,El::NORMAL, w, Y); //Y = Y*w
1273     alpha = -1.0; beta=1.0;
1274     El::Gemm(El::NORMAL,El::NORMAL,alpha,*a->emat,X,beta,Y); //Y = - A*X + B*X*w
1275 
1276     PetscElemScalar norm = El::FrobeniusNorm(Y);
1277     if ((*a->grid).Rank()==0) printf("  norm (- A*X + B*X*w) = %g\n",norm);
1278   }
1279 
1280   {
1281     PetscMPIInt rank;
1282     ierr = MPI_Comm_rank(comm,&rank);
1283     printf("w: [%d] [%d, %d %d] %d; X: %d %d\n",rank,w.DistRank(),w.ColRank(),w.RowRank(),w.LocalHeight(),X.LocalHeight(),X.LocalWidth());
1284   }
1285 #endif
1286   PetscFunctionReturn(0);
1287 }
1288 
1289 #undef __FUNCT__
1290 #define __FUNCT__ "MatElementalHermitianGenDefEig"
1291 /*@
1292   MatElementalHermitianGenDefEig - Compute the set of eigenvalues of the Hermitian-definite matrix pencil determined by the subset structure
1293 
1294    Logically Collective on Mat
1295 
1296    Level: beginner
1297 
1298    References:
1299 .      Elemental Users' Guide
1300 
1301 @*/
1302 PetscErrorCode MatElementalHermitianGenDefEig(El::Pencil type,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1303 {
1304   PetscErrorCode ierr;
1305 
1306   PetscFunctionBegin;
1307   ierr = PetscUseMethod(A,"MatElementalHermitianGenDefEig_C",(El::Pencil,El::UpperOrLower,Mat,Mat,Mat*,Mat*,El::SortType,El::HermitianEigSubset<PetscElemScalar>,const El::HermitianEigCtrl<PetscElemScalar>),(type,uplo,A,B,evals,evec,sort,subset,ctrl));CHKERRQ(ierr);
1308   PetscFunctionReturn(0);
1309 }
1310 
1311 /* -------------------------------------------------------------------*/
1312 static struct _MatOps MatOps_Values = {
1313        MatSetValues_Elemental,
1314        0,
1315        0,
1316        MatMult_Elemental,
1317 /* 4*/ MatMultAdd_Elemental,
1318        MatMultTranspose_Elemental,
1319        MatMultTransposeAdd_Elemental,
1320        MatSolve_Elemental,
1321        MatSolveAdd_Elemental,
1322        0,
1323 /*10*/ 0,
1324        MatLUFactor_Elemental,
1325        MatCholeskyFactor_Elemental,
1326        0,
1327        MatTranspose_Elemental,
1328 /*15*/ MatGetInfo_Elemental,
1329        0,
1330        MatGetDiagonal_Elemental,
1331        MatDiagonalScale_Elemental,
1332        MatNorm_Elemental,
1333 /*20*/ MatAssemblyBegin_Elemental,
1334        MatAssemblyEnd_Elemental,
1335        MatSetOption_Elemental,
1336        MatZeroEntries_Elemental,
1337 /*24*/ 0,
1338        MatLUFactorSymbolic_Elemental,
1339        MatLUFactorNumeric_Elemental,
1340        MatCholeskyFactorSymbolic_Elemental,
1341        MatCholeskyFactorNumeric_Elemental,
1342 /*29*/ MatSetUp_Elemental,
1343        0,
1344        0,
1345        0,
1346        0,
1347 /*34*/ MatDuplicate_Elemental,
1348        0,
1349        0,
1350        0,
1351        0,
1352 /*39*/ MatAXPY_Elemental,
1353        0,
1354        0,
1355        0,
1356        MatCopy_Elemental,
1357 /*44*/ 0,
1358        MatScale_Elemental,
1359        MatShift_Basic,
1360        0,
1361        0,
1362 /*49*/ 0,
1363        0,
1364        0,
1365        0,
1366        0,
1367 /*54*/ 0,
1368        0,
1369        0,
1370        0,
1371        0,
1372 /*59*/ 0,
1373        MatDestroy_Elemental,
1374        MatView_Elemental,
1375        0,
1376        0,
1377 /*64*/ 0,
1378        0,
1379        0,
1380        0,
1381        0,
1382 /*69*/ 0,
1383        0,
1384        MatConvert_Elemental_Dense,
1385        0,
1386        0,
1387 /*74*/ 0,
1388        0,
1389        0,
1390        0,
1391        0,
1392 /*79*/ 0,
1393        0,
1394        0,
1395        0,
1396        MatLoad_Elemental,
1397 /*84*/ 0,
1398        0,
1399        0,
1400        0,
1401        0,
1402 /*89*/ MatMatMult_Elemental,
1403        MatMatMultSymbolic_Elemental,
1404        MatMatMultNumeric_Elemental,
1405        0,
1406        0,
1407 /*94*/ 0,
1408        MatMatTransposeMult_Elemental,
1409        MatMatTransposeMultSymbolic_Elemental,
1410        MatMatTransposeMultNumeric_Elemental,
1411        0,
1412 /*99*/ 0,
1413        0,
1414        0,
1415        MatConjugate_Elemental,
1416        0,
1417 /*104*/0,
1418        0,
1419        0,
1420        0,
1421        0,
1422 /*109*/MatMatSolve_Elemental,
1423        0,
1424        0,
1425        0,
1426        0,
1427 /*114*/0,
1428        0,
1429        0,
1430        0,
1431        0,
1432 /*119*/0,
1433        MatHermitianTranspose_Elemental,
1434        0,
1435        0,
1436        0,
1437 /*124*/0,
1438        0,
1439        0,
1440        0,
1441        0,
1442 /*129*/0,
1443        0,
1444        0,
1445        0,
1446        0,
1447 /*134*/0,
1448        0,
1449        0,
1450        0,
1451        0
1452 };
1453 
1454 /*MC
1455    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package
1456 
1457   Use ./configure --download-elemental to install PETSc to use Elemental
1458 
1459   Use -pc_type lu -pc_factor_mat_solver_package elemental to us this direct solver
1460 
1461    Options Database Keys:
1462 + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1463 - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix
1464 
1465   Level: beginner
1466 
1467 .seealso: MATDENSE
1468 M*/
1469 
1470 #undef __FUNCT__
1471 #define __FUNCT__ "MatCreate_Elemental"
1472 PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1473 {
1474   Mat_Elemental      *a;
1475   PetscErrorCode     ierr;
1476   PetscBool          flg,flg1;
1477   Mat_Elemental_Grid *commgrid;
1478   MPI_Comm           icomm;
1479   PetscInt           optv1;
1480 
1481   PetscFunctionBegin;
1482   ierr = PetscElementalInitializePackage();CHKERRQ(ierr);
1483   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1484   A->insertmode = NOT_SET_VALUES;
1485 
1486   ierr = PetscNewLog(A,&a);CHKERRQ(ierr);
1487   A->data = (void*)a;
1488 
1489   /* Set up the elemental matrix */
1490   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1491 
1492   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1493   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1494     ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);CHKERRQ(ierr);
1495     /* ierr = MPI_Comm_create_Keyval(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0); -- new version? */
1496   }
1497   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1498   ierr = MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRQ(ierr);
1499   if (!flg) {
1500     ierr = PetscNewLog(A,&commgrid);CHKERRQ(ierr);
1501 
1502     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");CHKERRQ(ierr);
1503     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1504     ierr = PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);CHKERRQ(ierr);
1505     if (flg1) {
1506       if (El::mpi::Size(cxxcomm) % optv1 != 0) {
1507         SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1508       }
1509       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1510     } else {
1511       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1512       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1513     }
1514     commgrid->grid_refct = 1;
1515     ierr = MPI_Attr_put(icomm,Petsc_Elemental_keyval,(void*)commgrid);CHKERRQ(ierr);
1516     ierr = PetscOptionsEnd();CHKERRQ(ierr);
1517   } else {
1518     commgrid->grid_refct++;
1519   }
1520   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1521   a->grid      = commgrid->grid;
1522   a->emat      = new El::DistMatrix<PetscElemScalar>(*a->grid);
1523   a->pivot     = new El::DistMatrix<PetscInt,El::VC,El::STAR>(*a->grid);
1524   a->roworiented = PETSC_TRUE;
1525 
1526   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);CHKERRQ(ierr);
1527   ierr = PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",MatElementalHermitianGenDefEig_Elemental);CHKERRQ(ierr);
1528 
1529   ierr = PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);CHKERRQ(ierr);
1530   PetscFunctionReturn(0);
1531 }
1532