xref: /petsc/src/mat/impls/elemental/matelem.cxx (revision 32d7a74457282dfd68951f2085bdc26a145ec093)
1 #include <../src/mat/impls/elemental/matelemimpl.h> /*I "petscmat.h" I*/
2 
3 /*
4     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
5   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
6 */
7 static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "PetscElementalInitializePackage"
11 /*@C
12    PetscElementalInitializePackage - Initialize Elemental package
13 
14    Logically Collective
15 
16    Level: developer
17 
18 .seealso: MATELEMENTAL, PetscElementalFinalizePackage()
19 @*/
20 PetscErrorCode PetscElementalInitializePackage(void)
21 {
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   if (El::Initialized()) PetscFunctionReturn(0);
26   El::Initialize();   /* called by the 1st call of MatCreate_Elemental */
27   ierr = PetscRegisterFinalize(PetscElementalFinalizePackage);CHKERRQ(ierr);
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PetscElementalFinalizePackage"
33 /*@C
34    PetscElementalFinalizePackage - Finalize Elemental package
35 
36    Logically Collective
37 
38    Level: developer
39 
40 .seealso: MATELEMENTAL, PetscElementalInitializePackage()
41 @*/
42 PetscErrorCode PetscElementalFinalizePackage(void)
43 {
44   PetscFunctionBegin;
45   El::Finalize();  /* called by PetscFinalize() */
46   PetscFunctionReturn(0);
47 }
48 
49 #undef __FUNCT__
50 #define __FUNCT__ "MatView_Elemental"
51 static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
52 {
53   PetscErrorCode ierr;
54   Mat_Elemental  *a = (Mat_Elemental*)A->data;
55   PetscBool      iascii;
56 
57   PetscFunctionBegin;
58   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
59   if (iascii) {
60     PetscViewerFormat format;
61     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
62     if (format == PETSC_VIEWER_ASCII_INFO) {
63       /* call elemental viewing function */
64       ierr = PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");CHKERRQ(ierr);
65       ierr = PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());CHKERRQ(ierr);
66       ierr = PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());CHKERRQ(ierr);
67       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
68         /* call elemental viewing function */
69         ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");CHKERRQ(ierr);
70       }
71 
72     } else if (format == PETSC_VIEWER_DEFAULT) {
73       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
74       El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
75       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
76       if (A->factortype == MAT_FACTOR_NONE){
77         Mat Adense;
78         ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");CHKERRQ(ierr);
79         ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
80         ierr = MatView(Adense,viewer);CHKERRQ(ierr);
81         ierr = MatDestroy(&Adense);CHKERRQ(ierr);
82       }
83     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
84   } else {
85     /* convert to dense format and call MatView() */
86     Mat Adense;
87     ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");CHKERRQ(ierr);
88     ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
89     ierr = MatView(Adense,viewer);CHKERRQ(ierr);
90     ierr = MatDestroy(&Adense);CHKERRQ(ierr);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "MatGetInfo_Elemental"
97 static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
98 {
99   Mat_Elemental  *a = (Mat_Elemental*)A->data;
100 
101   PetscFunctionBegin;
102   info->block_size = 1.0;
103 
104   if (flag == MAT_LOCAL) {
105     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
106     info->nz_used        = info->nz_allocated;
107   } else if (flag == MAT_GLOBAL_MAX) {
108     //ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
109     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
110     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
111   } else if (flag == MAT_GLOBAL_SUM) {
112     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
113     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
114     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
115     //ierr = MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
116     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
117   }
118 
119   info->nz_unneeded       = 0.0;
120   info->assemblies        = (double)A->num_ass;
121   info->mallocs           = 0;
122   info->memory            = ((PetscObject)A)->mem;
123   info->fill_ratio_given  = 0; /* determined by Elemental */
124   info->fill_ratio_needed = 0;
125   info->factor_mallocs    = 0;
126   PetscFunctionReturn(0);
127 }
128 
129 #undef __FUNCT__
130 #define __FUNCT__ "MatSetOption_Elemental"
131 PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
132 {
133   Mat_Elemental  *a = (Mat_Elemental*)A->data;
134   PetscErrorCode ierr;
135 
136   PetscFunctionBegin;
137   switch (op) {
138   case MAT_NEW_NONZERO_LOCATIONS:
139   case MAT_NEW_NONZERO_LOCATION_ERR:
140   case MAT_NEW_NONZERO_ALLOCATION_ERR:
141   case MAT_ROW_ORIENTED:
142     a->roworiented = flg;
143     break;
144   default:
145     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
146   }
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "MatSetValues_Elemental"
152 static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
153 {
154   Mat_Elemental  *a = (Mat_Elemental*)A->data;
155   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;
156 
157   PetscFunctionBegin;
158   // TODO: Initialize matrix to all zeros?
159 
160   // Count the number of queues from this process
161   if (a->roworiented) {
162     for (i=0; i<nr; i++) {
163       if (rows[i] < 0) continue;
164       P2RO(A,0,rows[i],&rrank,&ridx);
165       RO2E(A,0,rrank,ridx,&erow);
166       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
167       for (j=0; j<nc; j++) {
168         if (cols[j] < 0) continue;
169         P2RO(A,1,cols[j],&crank,&cidx);
170         RO2E(A,1,crank,cidx,&ecol);
171         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
172         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
173           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
174           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
175           ++numQueues;
176           continue;
177         }
178         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
179         switch (imode) {
180         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
181         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
182         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
183         }
184       }
185     }
186 
187     /* printf("numQueues=%d\n",numQueues); */
188     a->emat->Reserve( numQueues );
189     for (i=0; i<nr; i++) {
190       if (rows[i] < 0) continue;
191       P2RO(A,0,rows[i],&rrank,&ridx);
192       RO2E(A,0,rrank,ridx,&erow);
193       for (j=0; j<nc; j++) {
194         if (cols[j] < 0) continue;
195         P2RO(A,1,cols[j],&crank,&cidx);
196         RO2E(A,1,crank,cidx,&ecol);
197         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
198           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
199           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j] );
200         }
201       }
202     }
203   } else { /* columnoriented */
204     for (j=0; j<nc; j++) {
205       if (cols[j] < 0) continue;
206       P2RO(A,1,cols[j],&crank,&cidx);
207       RO2E(A,1,crank,cidx,&ecol);
208       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
209       for (i=0; i<nr; i++) {
210         if (rows[i] < 0) continue;
211         P2RO(A,0,rows[i],&rrank,&ridx);
212         RO2E(A,0,rrank,ridx,&erow);
213         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
214         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
215           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
216           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
217           ++numQueues;
218           continue;
219         }
220         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
221         switch (imode) {
222         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
223         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
224         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
225         }
226       }
227     }
228 
229     /* printf("numQueues=%d\n",numQueues); */
230     a->emat->Reserve( numQueues );
231     for (j=0; j<nc; j++) {
232       if (cols[j] < 0) continue;
233       P2RO(A,1,cols[j],&crank,&cidx);
234       RO2E(A,1,crank,cidx,&ecol);
235 
236       for (i=0; i<nr; i++) {
237         if (rows[i] < 0) continue;
238         P2RO(A,0,rows[i],&rrank,&ridx);
239         RO2E(A,0,rrank,ridx,&erow);
240         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
241           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
242           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr] );
243         }
244       }
245     }
246   }
247   PetscFunctionReturn(0);
248 }
249 
250 #undef __FUNCT__
251 #define __FUNCT__ "MatMult_Elemental"
252 static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
253 {
254   Mat_Elemental         *a = (Mat_Elemental*)A->data;
255   PetscErrorCode        ierr;
256   const PetscElemScalar *x;
257   PetscElemScalar       *y;
258   PetscElemScalar       one = 1,zero = 0;
259 
260   PetscFunctionBegin;
261   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
262   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
263   { /* Scoping so that constructor is called before pointer is returned */
264     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
265     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
266     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
267     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
268   }
269   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
270   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "MatMultTranspose_Elemental"
276 static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
277 {
278   Mat_Elemental         *a = (Mat_Elemental*)A->data;
279   PetscErrorCode        ierr;
280   const PetscElemScalar *x;
281   PetscElemScalar       *y;
282   PetscElemScalar       one = 1,zero = 0;
283 
284   PetscFunctionBegin;
285   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
286   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
287   { /* Scoping so that constructor is called before pointer is returned */
288     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
289     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
290     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
291     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
292   }
293   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
294   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
295   PetscFunctionReturn(0);
296 }
297 
298 #undef __FUNCT__
299 #define __FUNCT__ "MatMultAdd_Elemental"
300 static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
301 {
302   Mat_Elemental         *a = (Mat_Elemental*)A->data;
303   PetscErrorCode        ierr;
304   const PetscElemScalar *x;
305   PetscElemScalar       *z;
306   PetscElemScalar       one = 1;
307 
308   PetscFunctionBegin;
309   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
310   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
311   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
312   { /* Scoping so that constructor is called before pointer is returned */
313     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
314     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
315     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
316     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
317   }
318   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
319   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
320   PetscFunctionReturn(0);
321 }
322 
323 #undef __FUNCT__
324 #define __FUNCT__ "MatMultTransposeAdd_Elemental"
325 static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
326 {
327   Mat_Elemental         *a = (Mat_Elemental*)A->data;
328   PetscErrorCode        ierr;
329   const PetscElemScalar *x;
330   PetscElemScalar       *z;
331   PetscElemScalar       one = 1;
332 
333   PetscFunctionBegin;
334   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
335   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
336   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
337   { /* Scoping so that constructor is called before pointer is returned */
338     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
339     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
340     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
341     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
342   }
343   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
344   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
345   PetscFunctionReturn(0);
346 }
347 
348 #undef __FUNCT__
349 #define __FUNCT__ "MatMatMultNumeric_Elemental"
350 static PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
351 {
352   Mat_Elemental    *a = (Mat_Elemental*)A->data;
353   Mat_Elemental    *b = (Mat_Elemental*)B->data;
354   Mat_Elemental    *c = (Mat_Elemental*)C->data;
355   PetscElemScalar  one = 1,zero = 0;
356 
357   PetscFunctionBegin;
358   { /* Scoping so that constructor is called before pointer is returned */
359     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
360   }
361   C->assembled = PETSC_TRUE;
362   PetscFunctionReturn(0);
363 }
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "MatMatMultSymbolic_Elemental"
367 static PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
368 {
369   PetscErrorCode ierr;
370   Mat            Ce;
371   MPI_Comm       comm;
372 
373   PetscFunctionBegin;
374   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
375   ierr = MatCreate(comm,&Ce);CHKERRQ(ierr);
376   ierr = MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
377   ierr = MatSetType(Ce,MATELEMENTAL);CHKERRQ(ierr);
378   ierr = MatSetUp(Ce);CHKERRQ(ierr);
379   *C = Ce;
380   PetscFunctionReturn(0);
381 }
382 
383 #undef __FUNCT__
384 #define __FUNCT__ "MatMatMult_Elemental"
385 static PetscErrorCode MatMatMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
386 {
387   PetscErrorCode ierr;
388 
389   PetscFunctionBegin;
390   if (scall == MAT_INITIAL_MATRIX){
391     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
392     ierr = MatMatMultSymbolic_Elemental(A,B,1.0,C);CHKERRQ(ierr);
393     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
394   }
395   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
396   ierr = MatMatMultNumeric_Elemental(A,B,*C);CHKERRQ(ierr);
397   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 
401 #undef __FUNCT__
402 #define __FUNCT__ "MatMatTransposeMultNumeric_Elemental"
403 static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
404 {
405   Mat_Elemental      *a = (Mat_Elemental*)A->data;
406   Mat_Elemental      *b = (Mat_Elemental*)B->data;
407   Mat_Elemental      *c = (Mat_Elemental*)C->data;
408   PetscElemScalar    one = 1,zero = 0;
409 
410   PetscFunctionBegin;
411   { /* Scoping so that constructor is called before pointer is returned */
412     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
413   }
414   C->assembled = PETSC_TRUE;
415   PetscFunctionReturn(0);
416 }
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "MatMatTransposeMultSymbolic_Elemental"
420 static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
421 {
422   PetscErrorCode ierr;
423   Mat            Ce;
424   MPI_Comm       comm;
425 
426   PetscFunctionBegin;
427   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
428   ierr = MatCreate(comm,&Ce);CHKERRQ(ierr);
429   ierr = MatSetSizes(Ce,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
430   ierr = MatSetType(Ce,MATELEMENTAL);CHKERRQ(ierr);
431   ierr = MatSetUp(Ce);CHKERRQ(ierr);
432   *C = Ce;
433   PetscFunctionReturn(0);
434 }
435 
436 #undef __FUNCT__
437 #define __FUNCT__ "MatMatTransposeMult_Elemental"
438 static PetscErrorCode MatMatTransposeMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
439 {
440   PetscErrorCode ierr;
441 
442   PetscFunctionBegin;
443   if (scall == MAT_INITIAL_MATRIX){
444     ierr = PetscLogEventBegin(MAT_MatTransposeMultSymbolic,A,B,0,0);CHKERRQ(ierr);
445     ierr = MatMatMultSymbolic_Elemental(A,B,1.0,C);CHKERRQ(ierr);
446     ierr = PetscLogEventEnd(MAT_MatTransposeMultSymbolic,A,B,0,0);CHKERRQ(ierr);
447   }
448   ierr = PetscLogEventBegin(MAT_MatTransposeMultNumeric,A,B,0,0);CHKERRQ(ierr);
449   ierr = MatMatTransposeMultNumeric_Elemental(A,B,*C);CHKERRQ(ierr);
450   ierr = PetscLogEventEnd(MAT_MatTransposeMultNumeric,A,B,0,0);CHKERRQ(ierr);
451   PetscFunctionReturn(0);
452 }
453 
454 #undef __FUNCT__
455 #define __FUNCT__ "MatGetDiagonal_Elemental"
456 static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
457 {
458   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
459   Mat_Elemental   *a = (Mat_Elemental*)A->data;
460   PetscErrorCode  ierr;
461   PetscElemScalar v;
462   MPI_Comm        comm;
463 
464   PetscFunctionBegin;
465   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
466   ierr = MatGetSize(A,&nrows,&ncols);CHKERRQ(ierr);
467   nD = nrows>ncols ? ncols : nrows;
468   for (i=0; i<nD; i++) {
469     PetscInt erow,ecol;
470     P2RO(A,0,i,&rrank,&ridx);
471     RO2E(A,0,rrank,ridx,&erow);
472     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
473     P2RO(A,1,i,&crank,&cidx);
474     RO2E(A,1,crank,cidx,&ecol);
475     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
476     v = a->emat->Get(erow,ecol);
477     ierr = VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);CHKERRQ(ierr);
478   }
479   ierr = VecAssemblyBegin(D);CHKERRQ(ierr);
480   ierr = VecAssemblyEnd(D);CHKERRQ(ierr);
481   PetscFunctionReturn(0);
482 }
483 
484 #undef __FUNCT__
485 #define __FUNCT__ "MatDiagonalScale_Elemental"
486 static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
487 {
488   Mat_Elemental         *x = (Mat_Elemental*)X->data;
489   const PetscElemScalar *d;
490   PetscErrorCode        ierr;
491 
492   PetscFunctionBegin;
493   if (R) {
494     ierr = VecGetArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
495     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
496     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
497     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
498     ierr = VecRestoreArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
499   }
500   if (L) {
501     ierr = VecGetArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
502     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
503     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
504     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
505     ierr = VecRestoreArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
506   }
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "MatScale_Elemental"
512 static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
513 {
514   Mat_Elemental  *x = (Mat_Elemental*)X->data;
515 
516   PetscFunctionBegin;
517   El::Scale((PetscElemScalar)a,*x->emat);
518   PetscFunctionReturn(0);
519 }
520 
521 /*
522   MatAXPY - Computes Y = a*X + Y.
523 */
524 #undef __FUNCT__
525 #define __FUNCT__ "MatAXPY_Elemental"
526 static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
527 {
528   Mat_Elemental  *x = (Mat_Elemental*)X->data;
529   Mat_Elemental  *y = (Mat_Elemental*)Y->data;
530   PetscErrorCode ierr;
531 
532   PetscFunctionBegin;
533   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
534   ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
535   PetscFunctionReturn(0);
536 }
537 
538 #undef __FUNCT__
539 #define __FUNCT__ "MatCopy_Elemental"
540 static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
541 {
542   Mat_Elemental *a=(Mat_Elemental*)A->data;
543   Mat_Elemental *b=(Mat_Elemental*)B->data;
544 
545   PetscFunctionBegin;
546   El::Copy(*a->emat,*b->emat);
547   PetscFunctionReturn(0);
548 }
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "MatDuplicate_Elemental"
552 static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
553 {
554   Mat            Be;
555   MPI_Comm       comm;
556   Mat_Elemental  *a=(Mat_Elemental*)A->data;
557   PetscErrorCode ierr;
558 
559   PetscFunctionBegin;
560   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
561   ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
562   ierr = MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
563   ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
564   ierr = MatSetUp(Be);CHKERRQ(ierr);
565   *B = Be;
566   if (op == MAT_COPY_VALUES) {
567     Mat_Elemental *b=(Mat_Elemental*)Be->data;
568     El::Copy(*a->emat,*b->emat);
569   }
570   Be->assembled = PETSC_TRUE;
571   PetscFunctionReturn(0);
572 }
573 
574 #undef __FUNCT__
575 #define __FUNCT__ "MatTranspose_Elemental"
576 static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
577 {
578   Mat            Be = *B;
579   PetscErrorCode ierr;
580   MPI_Comm       comm;
581   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
582 
583   PetscFunctionBegin;
584   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
585   /* Only out-of-place supported */
586   if (reuse == MAT_INITIAL_MATRIX){
587     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
588     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
589     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
590     ierr = MatSetUp(Be);CHKERRQ(ierr);
591     *B = Be;
592   }
593   b = (Mat_Elemental*)Be->data;
594   El::Transpose(*a->emat,*b->emat);
595   Be->assembled = PETSC_TRUE;
596   PetscFunctionReturn(0);
597 }
598 
599 #undef __FUNCT__
600 #define __FUNCT__ "MatConjugate_Elemental"
601 static PetscErrorCode MatConjugate_Elemental(Mat A)
602 {
603   Mat_Elemental  *a = (Mat_Elemental*)A->data;
604 
605   PetscFunctionBegin;
606   El::Conjugate(*a->emat);
607   PetscFunctionReturn(0);
608 }
609 
610 #undef __FUNCT__
611 #define __FUNCT__ "MatHermitianTranspose_Elemental"
612 static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
613 {
614   Mat            Be = *B;
615   PetscErrorCode ierr;
616   MPI_Comm       comm;
617   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
618 
619   PetscFunctionBegin;
620   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
621   /* Only out-of-place supported */
622   if (reuse == MAT_INITIAL_MATRIX){
623     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
624     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
625     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
626     ierr = MatSetUp(Be);CHKERRQ(ierr);
627     *B = Be;
628   }
629   b = (Mat_Elemental*)Be->data;
630   El::Adjoint(*a->emat,*b->emat);
631   Be->assembled = PETSC_TRUE;
632   PetscFunctionReturn(0);
633 }
634 
635 #undef __FUNCT__
636 #define __FUNCT__ "MatSolve_Elemental"
637 static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
638 {
639   Mat_Elemental     *a = (Mat_Elemental*)A->data;
640   PetscErrorCode    ierr;
641   PetscElemScalar   *x;
642 
643   PetscFunctionBegin;
644   ierr = VecCopy(B,X);CHKERRQ(ierr);
645   ierr = VecGetArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
646   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
647   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
648   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
649   switch (A->factortype) {
650   case MAT_FACTOR_LU:
651     if ((*a->pivot).AllocatedMemory()) {
652       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,xer);
653       El::Copy(xer,xe);
654     } else {
655       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
656       El::Copy(xer,xe);
657     }
658     break;
659   case MAT_FACTOR_CHOLESKY:
660     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
661     El::Copy(xer,xe);
662     break;
663   default:
664     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
665     break;
666   }
667   ierr = VecRestoreArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
668   PetscFunctionReturn(0);
669 }
670 
671 #undef __FUNCT__
672 #define __FUNCT__ "MatSolveAdd_Elemental"
673 static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
674 {
675   PetscErrorCode    ierr;
676 
677   PetscFunctionBegin;
678   ierr = MatSolve_Elemental(A,B,X);CHKERRQ(ierr);
679   ierr = VecAXPY(X,1,Y);CHKERRQ(ierr);
680   PetscFunctionReturn(0);
681 }
682 
683 #undef __FUNCT__
684 #define __FUNCT__ "MatMatSolve_Elemental"
685 static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
686 {
687   Mat_Elemental *a=(Mat_Elemental*)A->data;
688   Mat_Elemental *b=(Mat_Elemental*)B->data;
689   Mat_Elemental *x=(Mat_Elemental*)X->data;
690 
691   PetscFunctionBegin;
692   El::Copy(*b->emat,*x->emat);
693   switch (A->factortype) {
694   case MAT_FACTOR_LU:
695     if ((*a->pivot).AllocatedMemory()) {
696       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,*x->emat);
697     } else {
698       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
699     }
700     break;
701   case MAT_FACTOR_CHOLESKY:
702     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
703     break;
704   default:
705     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
706     break;
707   }
708   PetscFunctionReturn(0);
709 }
710 
711 #undef __FUNCT__
712 #define __FUNCT__ "MatLUFactor_Elemental"
713 static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
714 {
715   Mat_Elemental  *a = (Mat_Elemental*)A->data;
716 
717   PetscFunctionBegin;
718   if (info->dtcol){
719     El::LU(*a->emat,*a->pivot);
720   } else {
721     El::LU(*a->emat);
722   }
723   A->factortype = MAT_FACTOR_LU;
724   A->assembled  = PETSC_TRUE;
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "MatLUFactorNumeric_Elemental"
730 static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
731 {
732   PetscErrorCode ierr;
733 
734   PetscFunctionBegin;
735   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
736   ierr = MatLUFactor_Elemental(F,0,0,info);CHKERRQ(ierr);
737   PetscFunctionReturn(0);
738 }
739 
740 #undef __FUNCT__
741 #define __FUNCT__ "MatLUFactorSymbolic_Elemental"
742 static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
743 {
744   PetscFunctionBegin;
745   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
746   PetscFunctionReturn(0);
747 }
748 
749 #undef __FUNCT__
750 #define __FUNCT__ "MatCholeskyFactor_Elemental"
751 static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
752 {
753   Mat_Elemental  *a = (Mat_Elemental*)A->data;
754   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;
755 
756   PetscFunctionBegin;
757   El::Cholesky(El::UPPER,*a->emat);
758   A->factortype = MAT_FACTOR_CHOLESKY;
759   A->assembled  = PETSC_TRUE;
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "MatCholeskyFactorNumeric_Elemental"
765 static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
766 {
767   PetscErrorCode ierr;
768 
769   PetscFunctionBegin;
770   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
771   ierr = MatCholeskyFactor_Elemental(F,0,info);CHKERRQ(ierr);
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "MatCholeskyFactorSymbolic_Elemental"
777 static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
778 {
779   PetscFunctionBegin;
780   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
781   PetscFunctionReturn(0);
782 }
783 
784 #undef __FUNCT__
785 #define __FUNCT__ "MatFactorGetSolverPackage_elemental_elemental"
786 PetscErrorCode MatFactorGetSolverPackage_elemental_elemental(Mat A,const MatSolverPackage *type)
787 {
788   PetscFunctionBegin;
789   *type = MATSOLVERELEMENTAL;
790   PetscFunctionReturn(0);
791 }
792 
793 #undef __FUNCT__
794 #define __FUNCT__ "MatGetFactor_elemental_elemental"
795 static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
796 {
797   Mat            B;
798   PetscErrorCode ierr;
799 
800   PetscFunctionBegin;
801   /* Create the factorization matrix */
802   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
803   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
804   ierr = MatSetType(B,MATELEMENTAL);CHKERRQ(ierr);
805   ierr = MatSetUp(B);CHKERRQ(ierr);
806   B->factortype = ftype;
807   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_elemental_elemental);CHKERRQ(ierr);
808   *F            = B;
809   PetscFunctionReturn(0);
810 }
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "MatSolverPackageRegister_Elemental"
814 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_Elemental(void)
815 {
816   PetscErrorCode ierr;
817 
818   PetscFunctionBegin;
819   ierr = MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
820   ierr = MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "MatNorm_Elemental"
826 static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
827 {
828   Mat_Elemental *a=(Mat_Elemental*)A->data;
829 
830   PetscFunctionBegin;
831   switch (type){
832   case NORM_1:
833     *nrm = El::OneNorm(*a->emat);
834     break;
835   case NORM_FROBENIUS:
836     *nrm = El::FrobeniusNorm(*a->emat);
837     break;
838   case NORM_INFINITY:
839     *nrm = El::InfinityNorm(*a->emat);
840     break;
841   default:
842     printf("Error: unsupported norm type!\n");
843   }
844   PetscFunctionReturn(0);
845 }
846 
847 #undef __FUNCT__
848 #define __FUNCT__ "MatZeroEntries_Elemental"
849 static PetscErrorCode MatZeroEntries_Elemental(Mat A)
850 {
851   Mat_Elemental *a=(Mat_Elemental*)A->data;
852 
853   PetscFunctionBegin;
854   El::Zero(*a->emat);
855   PetscFunctionReturn(0);
856 }
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "MatGetOwnershipIS_Elemental"
860 static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
861 {
862   Mat_Elemental  *a = (Mat_Elemental*)A->data;
863   PetscErrorCode ierr;
864   PetscInt       i,m,shift,stride,*idx;
865 
866   PetscFunctionBegin;
867   if (rows) {
868     m = a->emat->LocalHeight();
869     shift = a->emat->ColShift();
870     stride = a->emat->ColStride();
871     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
872     for (i=0; i<m; i++) {
873       PetscInt rank,offset;
874       E2RO(A,0,shift+i*stride,&rank,&offset);
875       RO2P(A,0,rank,offset,&idx[i]);
876     }
877     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);CHKERRQ(ierr);
878   }
879   if (cols) {
880     m = a->emat->LocalWidth();
881     shift = a->emat->RowShift();
882     stride = a->emat->RowStride();
883     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
884     for (i=0; i<m; i++) {
885       PetscInt rank,offset;
886       E2RO(A,1,shift+i*stride,&rank,&offset);
887       RO2P(A,1,rank,offset,&idx[i]);
888     }
889     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);CHKERRQ(ierr);
890   }
891   PetscFunctionReturn(0);
892 }
893 
894 #undef __FUNCT__
895 #define __FUNCT__ "MatConvert_Elemental_Dense"
896 static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
897 {
898   Mat                Bmpi;
899   Mat_Elemental      *a = (Mat_Elemental*)A->data;
900   MPI_Comm           comm;
901   PetscErrorCode     ierr;
902   IS                 isrows,iscols;
903   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
904   const PetscInt     *rows,*cols;
905   PetscElemScalar    v;
906   const El::Grid     &grid = a->emat->Grid();
907 
908   PetscFunctionBegin;
909   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
910 
911   if (reuse == MAT_REUSE_MATRIX) {
912     Bmpi = *B;
913   } else {
914     ierr = MatCreate(comm,&Bmpi);CHKERRQ(ierr);
915     ierr = MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
916     ierr = MatSetType(Bmpi,MATDENSE);CHKERRQ(ierr);
917     ierr = MatSetUp(Bmpi);CHKERRQ(ierr);
918   }
919 
920   /* Get local entries of A */
921   ierr = MatGetOwnershipIS(A,&isrows,&iscols);CHKERRQ(ierr);
922   ierr = ISGetLocalSize(isrows,&nrows);CHKERRQ(ierr);
923   ierr = ISGetIndices(isrows,&rows);CHKERRQ(ierr);
924   ierr = ISGetLocalSize(iscols,&ncols);CHKERRQ(ierr);
925   ierr = ISGetIndices(iscols,&cols);CHKERRQ(ierr);
926 
927   for (i=0; i<nrows; i++) {
928     P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
929     RO2E(A,0,rrank,ridx,&erow);
930     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
931     for (j=0; j<ncols; j++) {
932       P2RO(A,1,cols[j],&crank,&cidx);
933       RO2E(A,1,crank,cidx,&ecol);
934       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
935 
936       elrow = erow / grid.MCSize(); /* Elemental local row index */
937       elcol = ecol / grid.MRSize(); /* Elemental local column index */
938       v = a->emat->GetLocal(elrow,elcol);
939       ierr = MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);CHKERRQ(ierr);
940     }
941   }
942   ierr = MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
943   ierr = MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
944   if (reuse == MAT_INPLACE_MATRIX) {
945     ierr = MatHeaderReplace(A,&Bmpi);CHKERRQ(ierr);
946   } else {
947     *B = Bmpi;
948   }
949   ierr = ISDestroy(&isrows);CHKERRQ(ierr);
950   ierr = ISDestroy(&iscols);CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 #undef __FUNCT__
955 #define __FUNCT__ "MatConvert_SeqAIJ_Elemental"
956 PETSC_EXTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
957 {
958   Mat               mat_elemental;
959   PetscErrorCode    ierr;
960   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
961   const PetscInt    *cols;
962   const PetscScalar *vals;
963 
964   PetscFunctionBegin;
965   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
966   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
967   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
968   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
969   for (row=0; row<M; row++) {
970     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
971     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
972     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
973     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
974   }
975   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
976   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
977 
978   if (reuse == MAT_INPLACE_MATRIX) {
979     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
980   } else {
981     *newmat = mat_elemental;
982   }
983   PetscFunctionReturn(0);
984 }
985 
986 #undef __FUNCT__
987 #define __FUNCT__ "MatConvert_MPIAIJ_Elemental"
988 PETSC_EXTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
989 {
990   Mat               mat_elemental;
991   PetscErrorCode    ierr;
992   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
993   const PetscInt    *cols;
994   const PetscScalar *vals;
995 
996   PetscFunctionBegin;
997   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
998   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
999   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1000   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1001   for (row=rstart; row<rend; row++) {
1002     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1003     for (j=0; j<ncols; j++) {
1004       /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1005       ierr = MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);CHKERRQ(ierr);
1006     }
1007     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1008   }
1009   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1010   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1011 
1012   if (reuse == MAT_INPLACE_MATRIX) {
1013     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1014   } else {
1015     *newmat = mat_elemental;
1016   }
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 #undef __FUNCT__
1021 #define __FUNCT__ "MatConvert_SeqSBAIJ_Elemental"
1022 PETSC_EXTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1023 {
1024   Mat               mat_elemental;
1025   PetscErrorCode    ierr;
1026   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
1027   const PetscInt    *cols;
1028   const PetscScalar *vals;
1029 
1030   PetscFunctionBegin;
1031   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1032   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1033   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1034   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1035   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1036   for (row=0; row<M; row++) {
1037     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1038     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1039     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1040     for (j=0; j<ncols; j++) { /* lower triangular part */
1041       if (cols[j] == row) continue;
1042       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);CHKERRQ(ierr);
1043     }
1044     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1045   }
1046   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1047   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1048   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1049 
1050   if (reuse == MAT_INPLACE_MATRIX) {
1051     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1052   } else {
1053     *newmat = mat_elemental;
1054   }
1055   PetscFunctionReturn(0);
1056 }
1057 
1058 #undef __FUNCT__
1059 #define __FUNCT__ "MatConvert_MPISBAIJ_Elemental"
1060 PETSC_EXTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1061 {
1062   Mat               mat_elemental;
1063   PetscErrorCode    ierr;
1064   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1065   const PetscInt    *cols;
1066   const PetscScalar *vals;
1067 
1068   PetscFunctionBegin;
1069   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1070   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1071   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1072   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1073   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1074   for (row=rstart; row<rend; row++) {
1075     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1076     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1077     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1078     for (j=0; j<ncols; j++) { /* lower triangular part */
1079       if (cols[j] == row) continue;
1080       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);CHKERRQ(ierr);
1081     }
1082     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1083   }
1084   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1085   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1086   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1087 
1088   if (reuse == MAT_INPLACE_MATRIX) {
1089     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1090   } else {
1091     *newmat = mat_elemental;
1092   }
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 #undef __FUNCT__
1097 #define __FUNCT__ "MatDestroy_Elemental"
1098 static PetscErrorCode MatDestroy_Elemental(Mat A)
1099 {
1100   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1101   PetscErrorCode     ierr;
1102   Mat_Elemental_Grid *commgrid;
1103   PetscBool          flg;
1104   MPI_Comm           icomm;
1105 
1106   PetscFunctionBegin;
1107   delete a->emat;
1108   delete a->pivot;
1109 
1110   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1111   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1112   ierr = MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRQ(ierr);
1113   if (--commgrid->grid_refct == 0) {
1114     delete commgrid->grid;
1115     ierr = PetscFree(commgrid);CHKERRQ(ierr);
1116     ierr = MPI_Keyval_free(&Petsc_Elemental_keyval);CHKERRQ(ierr);
1117   }
1118   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1119   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);CHKERRQ(ierr);
1120   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
1121   ierr = PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",NULL);CHKERRQ(ierr);
1122   ierr = PetscFree(A->data);CHKERRQ(ierr);
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 #undef __FUNCT__
1127 #define __FUNCT__ "MatSetUp_Elemental"
1128 PetscErrorCode MatSetUp_Elemental(Mat A)
1129 {
1130   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1131   PetscErrorCode ierr;
1132   PetscMPIInt    rsize,csize;
1133 
1134   PetscFunctionBegin;
1135   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1136   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1137 
1138   a->emat->Resize(A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1139   El::Zero(*a->emat);
1140 
1141   ierr = MPI_Comm_size(A->rmap->comm,&rsize);CHKERRQ(ierr);
1142   ierr = MPI_Comm_size(A->cmap->comm,&csize);CHKERRQ(ierr);
1143   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1144   a->commsize = rsize;
1145   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1146   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1147   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1148   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1149   PetscFunctionReturn(0);
1150 }
1151 
1152 #undef __FUNCT__
1153 #define __FUNCT__ "MatAssemblyBegin_Elemental"
1154 PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1155 {
1156   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1157 
1158   PetscFunctionBegin;
1159   /* printf("Calling ProcessQueues\n"); */
1160   a->emat->ProcessQueues();
1161   /* printf("Finished ProcessQueues\n"); */
1162   PetscFunctionReturn(0);
1163 }
1164 
1165 #undef __FUNCT__
1166 #define __FUNCT__ "MatAssemblyEnd_Elemental"
1167 PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1168 {
1169   PetscFunctionBegin;
1170   /* Currently does nothing */
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 #undef __FUNCT__
1175 #define __FUNCT__ "MatLoad_Elemental"
1176 PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1177 {
1178   PetscErrorCode ierr;
1179   Mat            Adense,Ae;
1180   MPI_Comm       comm;
1181 
1182   PetscFunctionBegin;
1183   ierr = PetscObjectGetComm((PetscObject)newMat,&comm);CHKERRQ(ierr);
1184   ierr = MatCreate(comm,&Adense);CHKERRQ(ierr);
1185   ierr = MatSetType(Adense,MATDENSE);CHKERRQ(ierr);
1186   ierr = MatLoad(Adense,viewer);CHKERRQ(ierr);
1187   ierr = MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);CHKERRQ(ierr);
1188   ierr = MatDestroy(&Adense);CHKERRQ(ierr);
1189   ierr = MatHeaderReplace(newMat,&Ae);CHKERRQ(ierr);
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 #undef __FUNCT__
1194 #define __FUNCT__ "MatElementalHermitianGenDefEig_Elemental"
1195 PetscErrorCode MatElementalHermitianGenDefEig_Elemental(El::Pencil eigtype,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1196 {
1197   PetscErrorCode ierr;
1198   Mat_Elemental  *a=(Mat_Elemental*)A->data,*b=(Mat_Elemental*)B->data,*x;
1199   MPI_Comm       comm;
1200   Mat            EVAL;
1201   Mat_Elemental  *e;
1202 
1203   PetscFunctionBegin;
1204   /* Compute eigenvalues and eigenvectors */
1205   El::DistMatrix<PetscElemScalar,El::VR,El::STAR> w( *a->grid ); /* holding eigenvalues */
1206   El::DistMatrix<PetscElemScalar>                 X( *a->grid ); /* holding eigenvectors */
1207   El::HermitianGenDefEig(eigtype,uplo,*a->emat,*b->emat,w,X,sort,subset,ctrl);
1208   /* El::Print(w, "Eigenvalues"); */
1209 
1210   /* Wrap w and X into PETSc's MATMATELEMENTAL matrices */
1211   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1212   ierr = MatCreate(comm,evec);CHKERRQ(ierr);
1213   ierr = MatSetSizes(*evec,PETSC_DECIDE,PETSC_DECIDE,X.Height(),X.Width());CHKERRQ(ierr);
1214   ierr = MatSetType(*evec,MATELEMENTAL);CHKERRQ(ierr);
1215   ierr = MatSetFromOptions(*evec);CHKERRQ(ierr);
1216   ierr = MatSetUp(*evec);CHKERRQ(ierr);
1217   ierr = MatAssemblyBegin(*evec,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1218   ierr = MatAssemblyEnd(*evec,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1219 
1220   x = (Mat_Elemental*)(*evec)->data;
1221   //delete x->emat; //-- memory leak???
1222   *x->emat = X;
1223 
1224   ierr = MatCreate(comm,&EVAL);CHKERRQ(ierr);
1225   ierr = MatSetSizes(EVAL,PETSC_DECIDE,PETSC_DECIDE,w.Height(),w.Width());CHKERRQ(ierr);
1226   ierr = MatSetType(EVAL,MATELEMENTAL);CHKERRQ(ierr);
1227   ierr = MatSetFromOptions(EVAL);CHKERRQ(ierr);
1228   ierr = MatSetUp(EVAL);CHKERRQ(ierr);
1229   ierr = MatAssemblyBegin(EVAL,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1230   ierr = MatAssemblyEnd(EVAL,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1231   e         = (Mat_Elemental*)EVAL->data;
1232   *e->emat = w; //-- memory leak???
1233   *evals   = EVAL;
1234 
1235 #if defined(MV)
1236   /* Test correctness norm = || - A*X + B*X*w || */
1237   {
1238     PetscElemScalar alpha,beta;
1239     El::DistMatrix<PetscElemScalar> Y(*a->grid); //tmp matrix
1240     alpha = 1.0; beta=0.0;
1241     El::Gemm(El::NORMAL,El::NORMAL,alpha,*b->emat,X,beta,Y); //Y = B*X
1242     El::DiagonalScale(El::RIGHT,El::NORMAL, w, Y); //Y = Y*w
1243     alpha = -1.0; beta=1.0;
1244     El::Gemm(El::NORMAL,El::NORMAL,alpha,*a->emat,X,beta,Y); //Y = - A*X + B*X*w
1245 
1246     PetscElemScalar norm = El::FrobeniusNorm(Y);
1247     if ((*a->grid).Rank()==0) printf("  norm (- A*X + B*X*w) = %g\n",norm);
1248   }
1249 
1250   {
1251     PetscMPIInt rank;
1252     ierr = MPI_Comm_rank(comm,&rank);
1253     printf("w: [%d] [%d, %d %d] %d; X: %d %d\n",rank,w.DistRank(),w.ColRank(),w.RowRank(),w.LocalHeight(),X.LocalHeight(),X.LocalWidth());
1254   }
1255 #endif
1256   PetscFunctionReturn(0);
1257 }
1258 
1259 #undef __FUNCT__
1260 #define __FUNCT__ "MatElementalHermitianGenDefEig"
1261 /*@
1262   MatElementalHermitianGenDefEig - Compute the set of eigenvalues of the Hermitian-definite matrix pencil determined by the subset structure
1263 
1264    Logically Collective on Mat
1265 
1266    Level: beginner
1267 
1268    References: Elemental Users' Guide
1269 @*/
1270 PetscErrorCode MatElementalHermitianGenDefEig(El::Pencil type,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1271 {
1272   PetscErrorCode ierr;
1273 
1274   PetscFunctionBegin;
1275   ierr = PetscTryMethod(A,"MatElementalHermitianGenDefEig_C",(El::Pencil,El::UpperOrLower,Mat,Mat,Mat*,Mat*,El::SortType,El::HermitianEigSubset<PetscElemScalar>,const El::HermitianEigCtrl<PetscElemScalar>),(type,uplo,A,B,evals,evec,sort,subset,ctrl));CHKERRQ(ierr);
1276   PetscFunctionReturn(0);
1277 }
1278 
1279 /* -------------------------------------------------------------------*/
1280 static struct _MatOps MatOps_Values = {
1281        MatSetValues_Elemental,
1282        0,
1283        0,
1284        MatMult_Elemental,
1285 /* 4*/ MatMultAdd_Elemental,
1286        MatMultTranspose_Elemental,
1287        MatMultTransposeAdd_Elemental,
1288        MatSolve_Elemental,
1289        MatSolveAdd_Elemental,
1290        0,
1291 /*10*/ 0,
1292        MatLUFactor_Elemental,
1293        MatCholeskyFactor_Elemental,
1294        0,
1295        MatTranspose_Elemental,
1296 /*15*/ MatGetInfo_Elemental,
1297        0,
1298        MatGetDiagonal_Elemental,
1299        MatDiagonalScale_Elemental,
1300        MatNorm_Elemental,
1301 /*20*/ MatAssemblyBegin_Elemental,
1302        MatAssemblyEnd_Elemental,
1303        MatSetOption_Elemental,
1304        MatZeroEntries_Elemental,
1305 /*24*/ 0,
1306        MatLUFactorSymbolic_Elemental,
1307        MatLUFactorNumeric_Elemental,
1308        MatCholeskyFactorSymbolic_Elemental,
1309        MatCholeskyFactorNumeric_Elemental,
1310 /*29*/ MatSetUp_Elemental,
1311        0,
1312        0,
1313        0,
1314        0,
1315 /*34*/ MatDuplicate_Elemental,
1316        0,
1317        0,
1318        0,
1319        0,
1320 /*39*/ MatAXPY_Elemental,
1321        0,
1322        0,
1323        0,
1324        MatCopy_Elemental,
1325 /*44*/ 0,
1326        MatScale_Elemental,
1327        MatShift_Basic,
1328        0,
1329        0,
1330 /*49*/ 0,
1331        0,
1332        0,
1333        0,
1334        0,
1335 /*54*/ 0,
1336        0,
1337        0,
1338        0,
1339        0,
1340 /*59*/ 0,
1341        MatDestroy_Elemental,
1342        MatView_Elemental,
1343        0,
1344        0,
1345 /*64*/ 0,
1346        0,
1347        0,
1348        0,
1349        0,
1350 /*69*/ 0,
1351        0,
1352        MatConvert_Elemental_Dense,
1353        0,
1354        0,
1355 /*74*/ 0,
1356        0,
1357        0,
1358        0,
1359        0,
1360 /*79*/ 0,
1361        0,
1362        0,
1363        0,
1364        MatLoad_Elemental,
1365 /*84*/ 0,
1366        0,
1367        0,
1368        0,
1369        0,
1370 /*89*/ MatMatMult_Elemental,
1371        MatMatMultSymbolic_Elemental,
1372        MatMatMultNumeric_Elemental,
1373        0,
1374        0,
1375 /*94*/ 0,
1376        MatMatTransposeMult_Elemental,
1377        MatMatTransposeMultSymbolic_Elemental,
1378        MatMatTransposeMultNumeric_Elemental,
1379        0,
1380 /*99*/ 0,
1381        0,
1382        0,
1383        MatConjugate_Elemental,
1384        0,
1385 /*104*/0,
1386        0,
1387        0,
1388        0,
1389        0,
1390 /*109*/MatMatSolve_Elemental,
1391        0,
1392        0,
1393        0,
1394        0,
1395 /*114*/0,
1396        0,
1397        0,
1398        0,
1399        0,
1400 /*119*/0,
1401        MatHermitianTranspose_Elemental,
1402        0,
1403        0,
1404        0,
1405 /*124*/0,
1406        0,
1407        0,
1408        0,
1409        0,
1410 /*129*/0,
1411        0,
1412        0,
1413        0,
1414        0,
1415 /*134*/0,
1416        0,
1417        0,
1418        0,
1419        0
1420 };
1421 
1422 /*MC
1423    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package
1424 
1425   Use ./configure --download-elemental to install PETSc to use Elemental
1426 
1427   Use -pc_type lu -pc_factor_mat_solver_package elemental to us this direct solver
1428 
1429    Options Database Keys:
1430 + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1431 - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix
1432 
1433   Level: beginner
1434 
1435 .seealso: MATDENSE
1436 M*/
1437 
1438 #undef __FUNCT__
1439 #define __FUNCT__ "MatCreate_Elemental"
1440 PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1441 {
1442   Mat_Elemental      *a;
1443   PetscErrorCode     ierr;
1444   PetscBool          flg,flg1;
1445   Mat_Elemental_Grid *commgrid;
1446   MPI_Comm           icomm;
1447   PetscInt           optv1;
1448 
1449   PetscFunctionBegin;
1450   ierr = PetscElementalInitializePackage();CHKERRQ(ierr);
1451   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1452   A->insertmode = NOT_SET_VALUES;
1453 
1454   ierr = PetscNewLog(A,&a);CHKERRQ(ierr);
1455   A->data = (void*)a;
1456 
1457   /* Set up the elemental matrix */
1458   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1459 
1460   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1461   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1462     ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1463     /* ierr = MPI_Comm_create_Keyval(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0); -- new version? */
1464   }
1465   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1466   ierr = MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRQ(ierr);
1467   if (!flg) {
1468     ierr = PetscNewLog(A,&commgrid);CHKERRQ(ierr);
1469 
1470     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");CHKERRQ(ierr);
1471     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1472     ierr = PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);CHKERRQ(ierr);
1473     if (flg1) {
1474       if (El::mpi::Size(cxxcomm) % optv1 != 0) {
1475         SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1476       }
1477       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1478     } else {
1479       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1480       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1481     }
1482     commgrid->grid_refct = 1;
1483     ierr = MPI_Attr_put(icomm,Petsc_Elemental_keyval,(void*)commgrid);CHKERRQ(ierr);
1484     PetscOptionsEnd();
1485   } else {
1486     commgrid->grid_refct++;
1487   }
1488   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1489   a->grid      = commgrid->grid;
1490   a->emat      = new El::DistMatrix<PetscElemScalar>(*a->grid);
1491   a->pivot     = new El::DistMatrix<PetscInt,El::VC,El::STAR>(*a->grid);
1492   a->roworiented = PETSC_TRUE;
1493 
1494   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);CHKERRQ(ierr);
1495   ierr = PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",MatElementalHermitianGenDefEig_Elemental);CHKERRQ(ierr);
1496 
1497   ierr = PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);CHKERRQ(ierr);
1498   PetscFunctionReturn(0);
1499 }
1500