xref: /petsc/src/ksp/pc/impls/tfs/xxt.c (revision d736bfeb4d37a01fcbdf00fe73fb60d6f0ba2142)
1 #define PETSCKSP_DLL
2 
3 /*************************************xxt.c************************************
4 Module Name: xxt
5 Module Info:
6 
7 author:  Henry M. Tufo III
8 e-mail:  hmt@asci.uchicago.edu
9 contact:
10 +--------------------------------+--------------------------------+
11 |MCS Division - Building 221     |Department of Computer Science  |
12 |Argonne National Laboratory     |Ryerson 152                     |
13 |9700 S. Cass Avenue             |The University of Chicago       |
14 |Argonne, IL  60439              |Chicago, IL  60637              |
15 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
16 +--------------------------------+--------------------------------+
17 
18 Last Modification: 3.20.01
19 **************************************xxt.c***********************************/
20 #include "../src/ksp/pc/impls/tfs/tfs.h"
21 
22 #define LEFT  -1
23 #define RIGHT  1
24 #define BOTH   0
25 
26 typedef struct xxt_solver_info {
27   PetscInt n, m, n_global, m_global;
28   PetscInt nnz, max_nnz, msg_buf_sz;
29   PetscInt *nsep, *lnsep, *fo, nfo, *stages;
30   PetscInt *col_sz, *col_indices;
31   PetscScalar **col_vals, *x, *solve_uu, *solve_w;
32   PetscInt nsolves;
33   PetscScalar tot_solve_time;
34 } xxt_info;
35 
36 typedef struct matvec_info {
37   PetscInt n, m, n_global, m_global;
38   PetscInt *local2global;
39   gs_ADT gs_handle;
40   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
41   void *grid_data;
42 } mv_info;
43 
44 struct xxt_CDT{
45   PetscInt id;
46   PetscInt ns;
47   PetscInt level;
48   xxt_info *info;
49   mv_info  *mvi;
50 };
51 
52 static PetscInt n_xxt=0;
53 static PetscInt n_xxt_handles=0;
54 
55 /* prototypes */
56 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
57 static PetscErrorCode check_handle(xxt_ADT xxt_handle);
58 static PetscErrorCode det_separators(xxt_ADT xxt_handle);
59 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
60 static PetscInt xxt_generate(xxt_ADT xxt_handle);
61 static PetscInt do_xxt_factor(xxt_ADT xxt_handle);
62 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data);
63 
64 /**************************************xxt.c***********************************/
65 xxt_ADT XXT_new(void)
66 {
67   xxt_ADT xxt_handle;
68 
69   /* rolling count on n_xxt ... pot. problem here */
70   n_xxt_handles++;
71   xxt_handle       = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
72   xxt_handle->id   = ++n_xxt;
73   xxt_handle->info = NULL; xxt_handle->mvi  = NULL;
74 
75   return(xxt_handle);
76 }
77 
78 /**************************************xxt.c***********************************/
79 PetscInt XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt  handle */
80 	   PetscInt *local2global,  /* global column mapping       */
81 	   PetscInt n,              /* local num rows              */
82 	   PetscInt m,              /* local num cols              */
83 	   void *matvec,       /* b_loc=A_local.x_loc         */
84 	   void *grid_data     /* grid data for matvec        */
85 	   )
86 {
87   comm_init();
88   check_handle(xxt_handle);
89 
90   /* only 2^k for now and all nodes participating */
91   if ((1<<(xxt_handle->level=i_log2_num_nodes))!=num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<i_log2_num_nodes,num_nodes);
92 
93   /* space for X info */
94   xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info));
95 
96   /* set up matvec handles */
97   xxt_handle->mvi  = set_mvi(local2global, n, m, matvec, grid_data);
98 
99   /* matrix is assumed to be of full rank */
100   /* LATER we can reset to indicate rank def. */
101   xxt_handle->ns=0;
102 
103   /* determine separators and generate firing order - NB xxt info set here */
104   det_separators(xxt_handle);
105 
106   return(do_xxt_factor(xxt_handle));
107 }
108 
109 /**************************************xxt.c***********************************/
110 PetscInt XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
111 {
112 
113   comm_init();
114   check_handle(xxt_handle);
115 
116   /* need to copy b into x? */
117   if (b)
118     {rvec_copy(x,b,xxt_handle->mvi->n);}
119   do_xxt_solve(xxt_handle,x);
120 
121   return(0);
122 }
123 
124 /**************************************xxt.c***********************************/
125 PetscInt XXT_free(xxt_ADT xxt_handle)
126 {
127 
128   comm_init();
129   check_handle(xxt_handle);
130   n_xxt_handles--;
131 
132   free(xxt_handle->info->nsep);
133   free(xxt_handle->info->lnsep);
134   free(xxt_handle->info->fo);
135   free(xxt_handle->info->stages);
136   free(xxt_handle->info->solve_uu);
137   free(xxt_handle->info->solve_w);
138   free(xxt_handle->info->x);
139   free(xxt_handle->info->col_vals);
140   free(xxt_handle->info->col_sz);
141   free(xxt_handle->info->col_indices);
142   free(xxt_handle->info);
143   free(xxt_handle->mvi->local2global);
144    gs_free(xxt_handle->mvi->gs_handle);
145   free(xxt_handle->mvi);
146   free(xxt_handle);
147 
148   /* if the check fails we nuke */
149   /* if NULL pointer passed to free we nuke */
150   /* if the calls to free fail that's not my problem */
151   return(0);
152 }
153 
154 /**************************************xxt.c***********************************/
155 PetscInt XXT_stats(xxt_ADT xxt_handle)
156 {
157   PetscInt  op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
158   PetscInt fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
159   PetscInt   vals[9],  work[9];
160   PetscScalar fvals[3], fwork[3];
161   PetscErrorCode ierr;
162 
163   comm_init();
164   check_handle(xxt_handle);
165 
166   /* if factorization not done there are no stats */
167   if (!xxt_handle->info||!xxt_handle->mvi)
168     {
169       if (!my_id)
170 	{ierr = PetscPrintf(PETSC_COMM_WORLD,"XXT_stats() :: no stats available!\n");}
171       return 1;
172     }
173 
174   vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz;
175   vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n;
176   vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz;
177   giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
178 
179   fvals[0]=fvals[1]=fvals[2]
180     =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++;
181   grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
182 
183   if (!my_id)
184     {
185       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_nnz=%D\n",my_id,vals[0]);
186       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_nnz=%D\n",my_id,vals[1]);
187       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_nnz=%g\n",my_id,1.0*vals[2]/num_nodes);
188       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_nnz=%D\n",my_id,vals[2]);
189       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(2d)  =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.5)));
190       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(3d)  =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.6667)));
191       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_n  =%D\n",my_id,vals[3]);
192       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_n  =%D\n",my_id,vals[4]);
193       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_n  =%g\n",my_id,1.0*vals[5]/num_nodes);
194       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_n  =%D\n",my_id,vals[5]);
195       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_buf=%D\n",my_id,vals[6]);
196       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_buf=%D\n",my_id,vals[7]);
197       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_buf=%g\n",my_id,1.0*vals[8]/num_nodes);
198       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_slv=%g\n",my_id,fvals[0]);
199       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_slv=%g\n",my_id,fvals[1]);
200       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_slv=%g\n",my_id,fvals[2]/num_nodes);
201     }
202 
203   return(0);
204 }
205 
206 /*************************************xxt.c************************************
207 
208 Description: get A_local, local portion of global coarse matrix which
209 is a row dist. nxm matrix w/ n<m.
210    o my_ml holds address of ML struct associated w/A_local and coarse grid
211    o local2global holds global number of column i (i=0,...,m-1)
212    o local2global holds global number of row    i (i=0,...,n-1)
213    o mylocmatvec performs A_local . vec_local (note that gs is performed using
214    gs_init/gop).
215 
216 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
217 mylocmatvec (void :: void *data, double *in, double *out)
218 **************************************xxt.c***********************************/
219 static PetscInt do_xxt_factor(xxt_ADT xxt_handle)
220 {
221   return xxt_generate(xxt_handle);
222 }
223 
224 /**************************************xxt.c***********************************/
225 static PetscInt xxt_generate(xxt_ADT xxt_handle)
226 {
227   PetscInt i,j,k,idex;
228   PetscInt dim, col;
229   PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
230   PetscInt *segs;
231   PetscInt op[] = {GL_ADD,0};
232   PetscInt off, len;
233   PetscScalar *x_ptr;
234   PetscInt *iptr, flag;
235   PetscInt start=0, end, work;
236   PetscInt op2[] = {GL_MIN,0};
237   gs_ADT gs_handle;
238   PetscInt *nsep, *lnsep, *fo;
239   PetscInt a_n=xxt_handle->mvi->n;
240   PetscInt a_m=xxt_handle->mvi->m;
241   PetscInt *a_local2global=xxt_handle->mvi->local2global;
242   PetscInt level;
243   PetscInt xxt_nnz=0, xxt_max_nnz=0;
244   PetscInt n, m;
245   PetscInt *col_sz, *col_indices, *stages;
246   PetscScalar **col_vals, *x;
247   PetscInt n_global;
248   PetscInt xxt_zero_nnz=0;
249   PetscInt xxt_zero_nnz_0=0;
250   PetscBLASInt i1 = 1,dlen;
251   PetscScalar dm1 = -1.0;
252   PetscErrorCode ierr;
253 
254   n=xxt_handle->mvi->n;
255   nsep=xxt_handle->info->nsep;
256   lnsep=xxt_handle->info->lnsep;
257   fo=xxt_handle->info->fo;
258   end=lnsep[0];
259   level=xxt_handle->level;
260   gs_handle=xxt_handle->mvi->gs_handle;
261 
262   /* is there a null space? */
263   /* LATER add in ability to detect null space by checking alpha */
264   for (i=0, j=0; i<=level; i++)
265     {j+=nsep[i];}
266 
267   m = j-xxt_handle->ns;
268   if (m!=j)
269     {ierr = PetscPrintf(PETSC_COMM_WORLD,"xxt_generate() :: null space exists %D %D %D\n",m,j,xxt_handle->ns);}
270 
271   /* get and initialize storage for x local         */
272   /* note that x local is nxm and stored by columns */
273   col_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
274   col_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
275   col_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
276   for (i=j=0; i<m; i++, j+=2)
277     {
278       col_indices[j]=col_indices[j+1]=col_sz[i]=-1;
279       col_vals[i] = NULL;
280     }
281   col_indices[j]=-1;
282 
283   /* size of separators for each sub-hc working from bottom of tree to top */
284   /* this looks like nsep[]=segments */
285   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
286   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
287   ivec_zero(stages,level+1);
288   ivec_copy(segs,nsep,level+1);
289   for (i=0; i<level; i++)
290     {segs[i+1] += segs[i];}
291   stages[0] = segs[0];
292 
293   /* temporary vectors  */
294   u  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
295   z  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
296   v  = (PetscScalar *) malloc(a_m*sizeof(PetscScalar));
297   uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
298   w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
299 
300   /* extra nnz due to replication of vertices across separators */
301   for (i=1, j=0; i<=level; i++)
302     {j+=nsep[i];}
303 
304   /* storage for sparse x values */
305   n_global = xxt_handle->info->n_global;
306   xxt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/num_nodes;
307   x = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar));
308   xxt_nnz = 0;
309 
310   /* LATER - can embed next sep to fire in gs */
311   /* time to make the donuts - generate X factor */
312   for (dim=i=j=0;i<m;i++)
313     {
314       /* time to move to the next level? */
315       while (i==segs[dim]) {
316         if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
317         stages[dim++]=i;
318         end+=lnsep[dim];
319       }
320       stages[dim]=i;
321 
322       /* which column are we firing? */
323       /* i.e. set v_l */
324       /* use new seps and do global min across hc to determine which one to fire */
325       (start<end) ? (col=fo[start]) : (col=INT_MAX);
326       giop_hc(&col,&work,1,op2,dim);
327 
328       /* shouldn't need this */
329       if (col==INT_MAX)
330 	{
331 	  ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr);
332 	  continue;
333 	}
334 
335       /* do I own it? I should */
336       rvec_zero(v ,a_m);
337       if (col==fo[start])
338 	{
339 	  start++;
340 	  idex=ivec_linear_search(col, a_local2global, a_n);
341 	  if (idex!=-1){
342             v[idex] = 1.0; j++;
343           } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
344 	}
345       else
346 	{
347 	  idex=ivec_linear_search(col, a_local2global, a_m);
348 	  if (idex!=-1)
349 	    {v[idex] = 1.0;}
350 	}
351 
352       /* perform u = A.v_l */
353       rvec_zero(u,n);
354       do_matvec(xxt_handle->mvi,v,u);
355 
356       /* uu =  X^T.u_l (local portion) */
357       /* technically only need to zero out first i entries */
358       /* later turn this into an XXT_solve call ? */
359       rvec_zero(uu,m);
360       x_ptr=x;
361       iptr = col_indices;
362       for (k=0; k<i; k++)
363 	{
364 	  off = *iptr++;
365 	  len = *iptr++;
366           dlen = PetscBLASIntCast(len);
367 	  uu[k] = BLASdot_(&dlen,u+off,&i1,x_ptr,&i1);
368 	  x_ptr+=len;
369 	}
370 
371 
372       /* uu = X^T.u_l (comm portion) */
373       ssgl_radd  (uu, w, dim, stages);
374 
375       /* z = X.uu */
376       rvec_zero(z,n);
377       x_ptr=x;
378       iptr = col_indices;
379       for (k=0; k<i; k++)
380 	{
381 	  off = *iptr++;
382 	  len = *iptr++;
383           dlen = PetscBLASIntCast(len);
384 	  BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1);
385 	  x_ptr+=len;
386 	}
387 
388       /* compute v_l = v_l - z */
389       rvec_zero(v+a_n,a_m-a_n);
390       dlen = PetscBLASIntCast(n);
391       BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1);
392 
393       /* compute u_l = A.v_l */
394       if (a_n!=a_m)
395 	{gs_gop_hc(gs_handle,v,"+\0",dim);}
396       rvec_zero(u,n);
397       do_matvec(xxt_handle->mvi,v,u);
398 
399       /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
400       dlen = PetscBLASIntCast(n);
401       alpha = BLASdot_(&dlen,u,&i1,v,&i1);
402       /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
403       grop_hc(&alpha, &alpha_w, 1, op, dim);
404 
405       alpha = (PetscScalar) sqrt((double)alpha);
406 
407       /* check for small alpha                             */
408       /* LATER use this to detect and determine null space */
409       if (fabs(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);
410 
411       /* compute v_l = v_l/sqrt(alpha) */
412       rvec_scale(v,1.0/alpha,n);
413 
414       /* add newly generated column, v_l, to X */
415       flag = 1;
416       off=len=0;
417       for (k=0; k<n; k++)
418 	{
419 	  if (v[k]!=0.0)
420 	    {
421 	      len=k;
422 	      if (flag)
423 		{off=k; flag=0;}
424 	    }
425 	}
426 
427       len -= (off-1);
428 
429       if (len>0)
430 	{
431 	  if ((xxt_nnz+len)>xxt_max_nnz)
432 	    {
433 	      ierr = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr);
434 	      xxt_max_nnz *= 2;
435 	      x_ptr = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar));
436 	      rvec_copy(x_ptr,x,xxt_nnz);
437 	      free(x);
438 	      x = x_ptr;
439 	      x_ptr+=xxt_nnz;
440 	    }
441 	  xxt_nnz += len;
442 	  rvec_copy(x_ptr,v+off,len);
443 
444           /* keep track of number of zeros */
445 	  if (dim)
446 	    {
447 	      for (k=0; k<len; k++)
448 		{
449 		  if (x_ptr[k]==0.0)
450 		    {xxt_zero_nnz++;}
451 		}
452 	    }
453 	  else
454 	    {
455 	      for (k=0; k<len; k++)
456 		{
457 		  if (x_ptr[k]==0.0)
458 		    {xxt_zero_nnz_0++;}
459 		}
460 	    }
461 	  col_indices[2*i] = off;
462 	  col_sz[i] = col_indices[2*i+1] = len;
463 	  col_vals[i] = x_ptr;
464 	}
465       else
466 	{
467 	  col_indices[2*i] = 0;
468 	  col_sz[i] = col_indices[2*i+1] = 0;
469 	  col_vals[i] = x_ptr;
470 	}
471     }
472 
473   /* close off stages for execution phase */
474   while (dim!=level)
475     {
476       stages[dim++]=i;
477       ierr = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr);
478     }
479   stages[dim]=i;
480 
481   xxt_handle->info->n=xxt_handle->mvi->n;
482   xxt_handle->info->m=m;
483   xxt_handle->info->nnz=xxt_nnz;
484   xxt_handle->info->max_nnz=xxt_max_nnz;
485   xxt_handle->info->msg_buf_sz=stages[level]-stages[0];
486   xxt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
487   xxt_handle->info->solve_w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
488   xxt_handle->info->x=x;
489   xxt_handle->info->col_vals=col_vals;
490   xxt_handle->info->col_sz=col_sz;
491   xxt_handle->info->col_indices=col_indices;
492   xxt_handle->info->stages=stages;
493   xxt_handle->info->nsolves=0;
494   xxt_handle->info->tot_solve_time=0.0;
495 
496   free(segs);
497   free(u);
498   free(v);
499   free(uu);
500   free(z);
501   free(w);
502 
503   return(0);
504 }
505 
506 /**************************************xxt.c***********************************/
507 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle,  PetscScalar *uc)
508 {
509    PetscInt off, len, *iptr;
510   PetscInt level       =xxt_handle->level;
511   PetscInt n           =xxt_handle->info->n;
512   PetscInt m           =xxt_handle->info->m;
513   PetscInt *stages     =xxt_handle->info->stages;
514   PetscInt *col_indices=xxt_handle->info->col_indices;
515   PetscScalar *x_ptr, *uu_ptr;
516   PetscScalar *solve_uu=xxt_handle->info->solve_uu;
517   PetscScalar *solve_w =xxt_handle->info->solve_w;
518   PetscScalar *x       =xxt_handle->info->x;
519   PetscBLASInt i1 = 1,dlen;
520 
521   PetscFunctionBegin;
522   uu_ptr=solve_uu;
523   rvec_zero(uu_ptr,m);
524 
525   /* x  = X.Y^T.b */
526   /* uu = Y^T.b */
527   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len)
528     {
529       off=*iptr++; len=*iptr++;
530       dlen = PetscBLASIntCast(len);
531       *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,x_ptr,&i1);
532     }
533 
534   /* comunication of beta */
535   uu_ptr=solve_uu;
536   if (level) {ssgl_radd(uu_ptr, solve_w, level, stages);}
537 
538   rvec_zero(uc,n);
539 
540   /* x = X.uu */
541   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len)
542     {
543       off=*iptr++; len=*iptr++;
544       dlen = PetscBLASIntCast(len);
545       BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1);
546     }
547   PetscFunctionReturn(0);
548 }
549 
550 /**************************************xxt.c***********************************/
551 static PetscErrorCode check_handle(xxt_ADT xxt_handle)
552 {
553   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
554 
555   PetscFunctionBegin;
556   if (!xxt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xxt_handle);
557 
558   vals[0]=vals[1]=xxt_handle->id;
559   giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
560   if ((vals[0]!=vals[1])||(xxt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n",vals[0],vals[1], xxt_handle->id);
561   PetscFunctionReturn(0);
562 }
563 
564 /**************************************xxt.c***********************************/
565 static  PetscErrorCode det_separators(xxt_ADT xxt_handle)
566 {
567   PetscInt i, ct, id;
568   PetscInt mask, edge, *iptr;
569   PetscInt *dir, *used;
570   PetscInt sum[4], w[4];
571   PetscScalar rsum[4], rw[4];
572   PetscInt op[] = {GL_ADD,0};
573   PetscScalar *lhs, *rhs;
574   PetscInt *nsep, *lnsep, *fo, nfo=0;
575   gs_ADT gs_handle=xxt_handle->mvi->gs_handle;
576   PetscInt *local2global=xxt_handle->mvi->local2global;
577   PetscInt  n=xxt_handle->mvi->n;
578   PetscInt  m=xxt_handle->mvi->m;
579   PetscInt level=xxt_handle->level;
580   PetscInt shared=FALSE;
581 
582   PetscFunctionBegin;
583   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
584   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
585   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
586   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
587   used = (PetscInt*)malloc(sizeof(PetscInt)*n);
588 
589   ivec_zero(dir  ,level+1);
590   ivec_zero(nsep ,level+1);
591   ivec_zero(lnsep,level+1);
592   ivec_set (fo   ,-1,n+1);
593   ivec_zero(used,n);
594 
595   lhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
596   rhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
597 
598   /* determine the # of unique dof */
599   rvec_zero(lhs,m);
600   rvec_set(lhs,1.0,n);
601   gs_gop_hc(gs_handle,lhs,"+\0",level);
602   rvec_zero(rsum,2);
603   for (ct=i=0;i<n;i++)
604     {
605       if (lhs[i]!=0.0)
606 	{rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];}
607     }
608   grop_hc(rsum,rw,2,op,level);
609   rsum[0]+=0.1;
610   rsum[1]+=0.1;
611 
612   if (fabs(rsum[0]-rsum[1])>EPS)
613     {shared=TRUE;}
614 
615   xxt_handle->info->n_global=xxt_handle->info->m_global=(PetscInt) rsum[0];
616   xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(PetscInt) rsum[0];
617 
618   /* determine separator sets top down */
619   if (shared)
620     {
621       for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
622 	{
623 	  /* set rsh of hc, fire, and collect lhs responses */
624 	  (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m);
625 	  gs_gop_hc(gs_handle,lhs,"+\0",edge);
626 
627 	  /* set lsh of hc, fire, and collect rhs responses */
628 	  (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m);
629 	  gs_gop_hc(gs_handle,rhs,"+\0",edge);
630 
631 	  for (i=0;i<n;i++)
632 	    {
633 	      if (id< mask)
634 		{
635 		  if (lhs[i]!=0.0)
636 		    {lhs[i]=1.0;}
637 		}
638 	      if (id>=mask)
639 		{
640 		  if (rhs[i]!=0.0)
641 		    {rhs[i]=1.0;}
642 		}
643 	    }
644 
645 	  if (id< mask)
646 	    {gs_gop_hc(gs_handle,lhs,"+\0",edge-1);}
647 	  else
648 	    {gs_gop_hc(gs_handle,rhs,"+\0",edge-1);}
649 
650 	  /* count number of dofs I own that have signal and not in sep set */
651 	  rvec_zero(rsum,4);
652 	  for (ivec_zero(sum,4),ct=i=0;i<n;i++)
653 	    {
654 	      if (!used[i])
655 		{
656 		  /* number of unmarked dofs on node */
657 		  ct++;
658 		  /* number of dofs to be marked on lhs hc */
659 		  if (id< mask)
660 		    {
661 		      if (lhs[i]!=0.0)
662 			{sum[0]++; rsum[0]+=1.0/lhs[i];}
663 		    }
664 		  /* number of dofs to be marked on rhs hc */
665 		  if (id>=mask)
666 		    {
667 		      if (rhs[i]!=0.0)
668 			{sum[1]++; rsum[1]+=1.0/rhs[i];}
669 		    }
670 		}
671 	    }
672 
673 	  /* go for load balance - choose half with most unmarked dofs, bias LHS */
674 	  (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
675 	  (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
676 	  giop_hc(sum,w,4,op,edge);
677 	  grop_hc(rsum,rw,4,op,edge);
678 	  rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;
679 
680 	  if (id<mask)
681 	    {
682 	      /* mark dofs I own that have signal and not in sep set */
683 	      for (ct=i=0;i<n;i++)
684 		{
685 		  if ((!used[i])&&(lhs[i]!=0.0))
686 		    {
687 		      ct++; nfo++;
688 
689 		      if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
690 
691 		      *--iptr = local2global[i];
692 		      used[i]=edge;
693 		    }
694 		}
695 	      if (ct>1) {ivec_sort(iptr,ct);}
696 
697 	      lnsep[edge]=ct;
698 	      nsep[edge]=(PetscInt) rsum[0];
699 	      dir [edge]=LEFT;
700 	    }
701 
702 	  if (id>=mask)
703 	    {
704 	      /* mark dofs I own that have signal and not in sep set */
705 	      for (ct=i=0;i<n;i++)
706 		{
707 		  if ((!used[i])&&(rhs[i]!=0.0))
708 		    {
709 		      ct++; nfo++;
710 
711 		      if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
712 
713 		      *--iptr = local2global[i];
714 		      used[i]=edge;
715 		    }
716 		}
717 	      if (ct>1) {ivec_sort(iptr,ct);}
718 
719 	      lnsep[edge]=ct;
720 	      nsep[edge]= (PetscInt) rsum[1];
721 	      dir [edge]=RIGHT;
722 	    }
723 
724 	  /* LATER or we can recur on these to order seps at this level */
725 	  /* do we need full set of separators for this?                */
726 
727 	  /* fold rhs hc into lower */
728 	  if (id>=mask)
729 	    {id-=mask;}
730 	}
731     }
732   else
733     {
734       for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
735 	{
736 	  /* set rsh of hc, fire, and collect lhs responses */
737 	  (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m);
738 	  gs_gop_hc(gs_handle,lhs,"+\0",edge);
739 
740 	  /* set lsh of hc, fire, and collect rhs responses */
741 	  (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m);
742 	  gs_gop_hc(gs_handle,rhs,"+\0",edge);
743 
744 	  /* count number of dofs I own that have signal and not in sep set */
745 	  for (ivec_zero(sum,4),ct=i=0;i<n;i++)
746 	    {
747 	      if (!used[i])
748 		{
749 		  /* number of unmarked dofs on node */
750 		  ct++;
751 		  /* number of dofs to be marked on lhs hc */
752 		  if ((id< mask)&&(lhs[i]!=0.0)) {sum[0]++;}
753 		  /* number of dofs to be marked on rhs hc */
754 		  if ((id>=mask)&&(rhs[i]!=0.0)) {sum[1]++;}
755 		}
756 	    }
757 
758 	  /* go for load balance - choose half with most unmarked dofs, bias LHS */
759 	  (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
760 	  giop_hc(sum,w,4,op,edge);
761 
762 	  /* lhs hc wins */
763 	  if (sum[2]>=sum[3])
764 	    {
765 	      if (id<mask)
766 		{
767 		  /* mark dofs I own that have signal and not in sep set */
768 		  for (ct=i=0;i<n;i++)
769 		    {
770 		      if ((!used[i])&&(lhs[i]!=0.0))
771 			{
772 			  ct++; nfo++;
773 			  *--iptr = local2global[i];
774 			  used[i]=edge;
775 			}
776 		    }
777 		  if (ct>1) {ivec_sort(iptr,ct);}
778 		  lnsep[edge]=ct;
779 		}
780 	      nsep[edge]=sum[0];
781 	      dir [edge]=LEFT;
782 	    }
783 	  /* rhs hc wins */
784 	  else
785 	    {
786 	      if (id>=mask)
787 		{
788 		  /* mark dofs I own that have signal and not in sep set */
789 		  for (ct=i=0;i<n;i++)
790 		    {
791 		      if ((!used[i])&&(rhs[i]!=0.0))
792 			{
793 			  ct++; nfo++;
794 			  *--iptr = local2global[i];
795 			  used[i]=edge;
796 			}
797 		    }
798 		  if (ct>1) {ivec_sort(iptr,ct);}
799 		  lnsep[edge]=ct;
800 		}
801 	      nsep[edge]=sum[1];
802 	      dir [edge]=RIGHT;
803 	    }
804 	  /* LATER or we can recur on these to order seps at this level */
805 	  /* do we need full set of separators for this?                */
806 
807 	  /* fold rhs hc into lower */
808 	  if (id>=mask)
809 	    {id-=mask;}
810 	}
811     }
812 
813 
814   /* level 0 is on processor case - so mark the remainder */
815   for (ct=i=0;i<n;i++)
816     {
817       if (!used[i])
818 	{
819 	  ct++; nfo++;
820 	  *--iptr = local2global[i];
821 	  used[i]=edge;
822 	}
823     }
824   if (ct>1) {ivec_sort(iptr,ct);}
825   lnsep[edge]=ct;
826   nsep [edge]=ct;
827   dir  [edge]=LEFT;
828 
829   xxt_handle->info->nsep=nsep;
830   xxt_handle->info->lnsep=lnsep;
831   xxt_handle->info->fo=fo;
832   xxt_handle->info->nfo=nfo;
833 
834   free(dir);
835   free(lhs);
836   free(rhs);
837   free(used);
838   PetscFunctionReturn(0);
839 }
840 
841 /**************************************xxt.c***********************************/
842 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data)
843 {
844   mv_info *mvi;
845 
846 
847   mvi = (mv_info*)malloc(sizeof(mv_info));
848   mvi->n=n;
849   mvi->m=m;
850   mvi->n_global=-1;
851   mvi->m_global=-1;
852   mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt));
853   ivec_copy(mvi->local2global,local2global,m);
854   mvi->local2global[m] = INT_MAX;
855   mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec;
856   mvi->grid_data=grid_data;
857 
858   /* set xxt communication handle to perform restricted matvec */
859   mvi->gs_handle = gs_init(local2global, m, num_nodes);
860 
861   return(mvi);
862 }
863 
864 /**************************************xxt.c***********************************/
865 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
866 {
867   PetscFunctionBegin;
868   A->matvec((mv_info*)A->grid_data,v,u);
869   PetscFunctionReturn(0);
870 }
871 
872 
873 
874