xref: /petsc/src/ksp/pc/impls/tfs/xxt.c (revision ce0a2cd1da0658c2b28aad1be2e2c8e41567bece)
1 #define PETSCKSP_DLL
2 
3 /*************************************xxt.c************************************
4 Module Name: xxt
5 Module Info:
6 
7 author:  Henry M. Tufo III
8 e-mail:  hmt@asci.uchicago.edu
9 contact:
10 +--------------------------------+--------------------------------+
11 |MCS Division - Building 221     |Department of Computer Science  |
12 |Argonne National Laboratory     |Ryerson 152                     |
13 |9700 S. Cass Avenue             |The University of Chicago       |
14 |Argonne, IL  60439              |Chicago, IL  60637              |
15 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
16 +--------------------------------+--------------------------------+
17 
18 Last Modification: 3.20.01
19 **************************************xxt.c***********************************/
20 #include "src/ksp/pc/impls/tfs/tfs.h"
21 
22 #define LEFT  -1
23 #define RIGHT  1
24 #define BOTH   0
25 
26 typedef struct xxt_solver_info {
27   PetscInt n, m, n_global, m_global;
28   PetscInt nnz, max_nnz, msg_buf_sz;
29   PetscInt *nsep, *lnsep, *fo, nfo, *stages;
30   PetscInt *col_sz, *col_indices;
31   PetscScalar **col_vals, *x, *solve_uu, *solve_w;
32   PetscInt nsolves;
33   PetscScalar tot_solve_time;
34 } xxt_info;
35 
36 typedef struct matvec_info {
37   PetscInt n, m, n_global, m_global;
38   PetscInt *local2global;
39   gs_ADT gs_handle;
40   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
41   void *grid_data;
42 } mv_info;
43 
44 struct xxt_CDT{
45   PetscInt id;
46   PetscInt ns;
47   PetscInt level;
48   xxt_info *info;
49   mv_info  *mvi;
50 };
51 
52 static PetscInt n_xxt=0;
53 static PetscInt n_xxt_handles=0;
54 
55 /* prototypes */
56 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
57 static PetscErrorCode check_handle(xxt_ADT xxt_handle);
58 static PetscErrorCode det_separators(xxt_ADT xxt_handle);
59 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
60 static PetscInt xxt_generate(xxt_ADT xxt_handle);
61 static PetscInt do_xxt_factor(xxt_ADT xxt_handle);
62 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data);
63 
64 /**************************************xxt.c***********************************/
65 xxt_ADT XXT_new(void)
66 {
67   xxt_ADT xxt_handle;
68 
69   /* rolling count on n_xxt ... pot. problem here */
70   n_xxt_handles++;
71   xxt_handle       = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
72   xxt_handle->id   = ++n_xxt;
73   xxt_handle->info = NULL; xxt_handle->mvi  = NULL;
74 
75   return(xxt_handle);
76 }
77 
78 /**************************************xxt.c***********************************/
79 PetscInt XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt  handle */
80 	   PetscInt *local2global,  /* global column mapping       */
81 	   PetscInt n,              /* local num rows              */
82 	   PetscInt m,              /* local num cols              */
83 	   void *matvec,       /* b_loc=A_local.x_loc         */
84 	   void *grid_data     /* grid data for matvec        */
85 	   )
86 {
87   comm_init();
88   check_handle(xxt_handle);
89 
90   /* only 2^k for now and all nodes participating */
91   if ((1<<(xxt_handle->level=i_log2_num_nodes))!=num_nodes)
92     {SETERRQ2(PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<i_log2_num_nodes,num_nodes);}
93 
94   /* space for X info */
95   xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info));
96 
97   /* set up matvec handles */
98   xxt_handle->mvi  = set_mvi(local2global, n, m, matvec, grid_data);
99 
100   /* matrix is assumed to be of full rank */
101   /* LATER we can reset to indicate rank def. */
102   xxt_handle->ns=0;
103 
104   /* determine separators and generate firing order - NB xxt info set here */
105   det_separators(xxt_handle);
106 
107   return(do_xxt_factor(xxt_handle));
108 }
109 
110 /**************************************xxt.c***********************************/
111 PetscInt XXT_solve(xxt_ADT xxt_handle, double *x, double *b)
112 {
113 
114   comm_init();
115   check_handle(xxt_handle);
116 
117   /* need to copy b into x? */
118   if (b)
119     {rvec_copy(x,b,xxt_handle->mvi->n);}
120   do_xxt_solve(xxt_handle,x);
121 
122   return(0);
123 }
124 
125 /**************************************xxt.c***********************************/
126 PetscInt XXT_free(xxt_ADT xxt_handle)
127 {
128 
129   comm_init();
130   check_handle(xxt_handle);
131   n_xxt_handles--;
132 
133   free(xxt_handle->info->nsep);
134   free(xxt_handle->info->lnsep);
135   free(xxt_handle->info->fo);
136   free(xxt_handle->info->stages);
137   free(xxt_handle->info->solve_uu);
138   free(xxt_handle->info->solve_w);
139   free(xxt_handle->info->x);
140   free(xxt_handle->info->col_vals);
141   free(xxt_handle->info->col_sz);
142   free(xxt_handle->info->col_indices);
143   free(xxt_handle->info);
144   free(xxt_handle->mvi->local2global);
145    gs_free(xxt_handle->mvi->gs_handle);
146   free(xxt_handle->mvi);
147   free(xxt_handle);
148 
149   /* if the check fails we nuke */
150   /* if NULL pointer passed to free we nuke */
151   /* if the calls to free fail that's not my problem */
152   return(0);
153 }
154 
155 /**************************************xxt.c***********************************/
156 PetscInt XXT_stats(xxt_ADT xxt_handle)
157 {
158   PetscInt  op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
159   PetscInt fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
160   PetscInt   vals[9],  work[9];
161   PetscScalar fvals[3], fwork[3];
162   PetscErrorCode ierr;
163 
164   comm_init();
165   check_handle(xxt_handle);
166 
167   /* if factorization not done there are no stats */
168   if (!xxt_handle->info||!xxt_handle->mvi)
169     {
170       if (!my_id)
171 	{ierr = PetscPrintf(PETSC_COMM_WORLD,"XXT_stats() :: no stats available!\n");}
172       return 1;
173     }
174 
175   vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz;
176   vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n;
177   vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz;
178   giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
179 
180   fvals[0]=fvals[1]=fvals[2]
181     =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++;
182   grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
183 
184   if (!my_id)
185     {
186       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_nnz=%D\n",my_id,vals[0]);
187       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_nnz=%D\n",my_id,vals[1]);
188       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_nnz=%g\n",my_id,1.0*vals[2]/num_nodes);
189       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_nnz=%D\n",my_id,vals[2]);
190       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(2d)  =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.5)));
191       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(3d)  =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.6667)));
192       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_n  =%D\n",my_id,vals[3]);
193       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_n  =%D\n",my_id,vals[4]);
194       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_n  =%g\n",my_id,1.0*vals[5]/num_nodes);
195       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_n  =%D\n",my_id,vals[5]);
196       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_buf=%D\n",my_id,vals[6]);
197       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_buf=%D\n",my_id,vals[7]);
198       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_buf=%g\n",my_id,1.0*vals[8]/num_nodes);
199       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_slv=%g\n",my_id,fvals[0]);
200       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_slv=%g\n",my_id,fvals[1]);
201       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_slv=%g\n",my_id,fvals[2]/num_nodes);
202     }
203 
204   return(0);
205 }
206 
207 /*************************************xxt.c************************************
208 
209 Description: get A_local, local portion of global coarse matrix which
210 is a row dist. nxm matrix w/ n<m.
211    o my_ml holds address of ML struct associated w/A_local and coarse grid
212    o local2global holds global number of column i (i=0,...,m-1)
213    o local2global holds global number of row    i (i=0,...,n-1)
214    o mylocmatvec performs A_local . vec_local (note that gs is performed using
215    gs_init/gop).
216 
217 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
218 mylocmatvec (void :: void *data, double *in, double *out)
219 **************************************xxt.c***********************************/
220 static PetscInt do_xxt_factor(xxt_ADT xxt_handle)
221 {
222   return xxt_generate(xxt_handle);
223 }
224 
225 /**************************************xxt.c***********************************/
226 static PetscInt xxt_generate(xxt_ADT xxt_handle)
227 {
228   PetscInt i,j,k,idex;
229   PetscInt dim, col;
230   PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
231   PetscInt *segs;
232   PetscInt op[] = {GL_ADD,0};
233   PetscInt off, len;
234   PetscScalar *x_ptr;
235   PetscInt *iptr, flag;
236   PetscInt start=0, end, work;
237   PetscInt op2[] = {GL_MIN,0};
238   gs_ADT gs_handle;
239   PetscInt *nsep, *lnsep, *fo;
240   PetscInt a_n=xxt_handle->mvi->n;
241   PetscInt a_m=xxt_handle->mvi->m;
242   PetscInt *a_local2global=xxt_handle->mvi->local2global;
243   PetscInt level;
244   PetscInt xxt_nnz=0, xxt_max_nnz=0;
245   PetscInt n, m;
246   PetscInt *col_sz, *col_indices, *stages;
247   PetscScalar **col_vals, *x;
248   PetscInt n_global;
249   PetscInt xxt_zero_nnz=0;
250   PetscInt xxt_zero_nnz_0=0;
251   PetscBLASInt i1 = 1,dlen;
252   PetscScalar dm1 = -1.0;
253   PetscErrorCode ierr;
254 
255   n=xxt_handle->mvi->n;
256   nsep=xxt_handle->info->nsep;
257   lnsep=xxt_handle->info->lnsep;
258   fo=xxt_handle->info->fo;
259   end=lnsep[0];
260   level=xxt_handle->level;
261   gs_handle=xxt_handle->mvi->gs_handle;
262 
263   /* is there a null space? */
264   /* LATER add in ability to detect null space by checking alpha */
265   for (i=0, j=0; i<=level; i++)
266     {j+=nsep[i];}
267 
268   m = j-xxt_handle->ns;
269   if (m!=j)
270     {ierr = PetscPrintf(PETSC_COMM_WORLD,"xxt_generate() :: null space exists %D %D %D\n",m,j,xxt_handle->ns);}
271 
272   /* get and initialize storage for x local         */
273   /* note that x local is nxm and stored by columns */
274   col_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
275   col_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
276   col_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
277   for (i=j=0; i<m; i++, j+=2)
278     {
279       col_indices[j]=col_indices[j+1]=col_sz[i]=-1;
280       col_vals[i] = NULL;
281     }
282   col_indices[j]=-1;
283 
284   /* size of separators for each sub-hc working from bottom of tree to top */
285   /* this looks like nsep[]=segments */
286   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
287   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
288   ivec_zero(stages,level+1);
289   ivec_copy(segs,nsep,level+1);
290   for (i=0; i<level; i++)
291     {segs[i+1] += segs[i];}
292   stages[0] = segs[0];
293 
294   /* temporary vectors  */
295   u  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
296   z  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
297   v  = (PetscScalar *) malloc(a_m*sizeof(PetscScalar));
298   uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
299   w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
300 
301   /* extra nnz due to replication of vertices across separators */
302   for (i=1, j=0; i<=level; i++)
303     {j+=nsep[i];}
304 
305   /* storage for sparse x values */
306   n_global = xxt_handle->info->n_global;
307   xxt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/num_nodes;
308   x = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar));
309   xxt_nnz = 0;
310 
311   /* LATER - can embed next sep to fire in gs */
312   /* time to make the donuts - generate X factor */
313   for (dim=i=j=0;i<m;i++)
314     {
315       /* time to move to the next level? */
316       while (i==segs[dim])
317 	{
318 	  if (dim==level)
319 	    {SETERRQ(PETSC_ERR_PLIB,"dim about to exceed level\n"); break;}
320 
321 	  stages[dim++]=i;
322 	  end+=lnsep[dim];
323 	}
324       stages[dim]=i;
325 
326       /* which column are we firing? */
327       /* i.e. set v_l */
328       /* use new seps and do global min across hc to determine which one to fire */
329       (start<end) ? (col=fo[start]) : (col=INT_MAX);
330       giop_hc(&col,&work,1,op2,dim);
331 
332       /* shouldn't need this */
333       if (col==INT_MAX)
334 	{
335 	  ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr);
336 	  continue;
337 	}
338 
339       /* do I own it? I should */
340       rvec_zero(v ,a_m);
341       if (col==fo[start])
342 	{
343 	  start++;
344 	  idex=ivec_linear_search(col, a_local2global, a_n);
345 	  if (idex!=-1)
346 	    {v[idex] = 1.0; j++;}
347 	  else
348 	    {SETERRQ(PETSC_ERR_PLIB,"NOT FOUND!\n");}
349 	}
350       else
351 	{
352 	  idex=ivec_linear_search(col, a_local2global, a_m);
353 	  if (idex!=-1)
354 	    {v[idex] = 1.0;}
355 	}
356 
357       /* perform u = A.v_l */
358       rvec_zero(u,n);
359       do_matvec(xxt_handle->mvi,v,u);
360 
361       /* uu =  X^T.u_l (local portion) */
362       /* technically only need to zero out first i entries */
363       /* later turn this into an XXT_solve call ? */
364       rvec_zero(uu,m);
365       x_ptr=x;
366       iptr = col_indices;
367       for (k=0; k<i; k++)
368 	{
369 	  off = *iptr++;
370 	  len = *iptr++;
371           dlen = PetscBLASIntCast(len);
372 	  uu[k] = BLASdot_(&dlen,u+off,&i1,x_ptr,&i1);
373 	  x_ptr+=len;
374 	}
375 
376 
377       /* uu = X^T.u_l (comm portion) */
378       ssgl_radd  (uu, w, dim, stages);
379 
380       /* z = X.uu */
381       rvec_zero(z,n);
382       x_ptr=x;
383       iptr = col_indices;
384       for (k=0; k<i; k++)
385 	{
386 	  off = *iptr++;
387 	  len = *iptr++;
388           dlen = PetscBLASIntCast(len);
389 	  BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1);
390 	  x_ptr+=len;
391 	}
392 
393       /* compute v_l = v_l - z */
394       rvec_zero(v+a_n,a_m-a_n);
395       dlen = PetscBLASIntCast(n);
396       BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1);
397 
398       /* compute u_l = A.v_l */
399       if (a_n!=a_m)
400 	{gs_gop_hc(gs_handle,v,"+\0",dim);}
401       rvec_zero(u,n);
402       do_matvec(xxt_handle->mvi,v,u);
403 
404       /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
405       dlen = PetscBLASIntCast(n);
406       alpha = BLASdot_(&dlen,u,&i1,v,&i1);
407       /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
408       grop_hc(&alpha, &alpha_w, 1, op, dim);
409 
410       alpha = (PetscScalar) sqrt((double)alpha);
411 
412       /* check for small alpha                             */
413       /* LATER use this to detect and determine null space */
414       if (fabs(alpha)<1.0e-14)
415 	{SETERRQ1(PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);}
416 
417       /* compute v_l = v_l/sqrt(alpha) */
418       rvec_scale(v,1.0/alpha,n);
419 
420       /* add newly generated column, v_l, to X */
421       flag = 1;
422       off=len=0;
423       for (k=0; k<n; k++)
424 	{
425 	  if (v[k]!=0.0)
426 	    {
427 	      len=k;
428 	      if (flag)
429 		{off=k; flag=0;}
430 	    }
431 	}
432 
433       len -= (off-1);
434 
435       if (len>0)
436 	{
437 	  if ((xxt_nnz+len)>xxt_max_nnz)
438 	    {
439 	      ierr = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr);
440 	      xxt_max_nnz *= 2;
441 	      x_ptr = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar));
442 	      rvec_copy(x_ptr,x,xxt_nnz);
443 	      free(x);
444 	      x = x_ptr;
445 	      x_ptr+=xxt_nnz;
446 	    }
447 	  xxt_nnz += len;
448 	  rvec_copy(x_ptr,v+off,len);
449 
450           /* keep track of number of zeros */
451 	  if (dim)
452 	    {
453 	      for (k=0; k<len; k++)
454 		{
455 		  if (x_ptr[k]==0.0)
456 		    {xxt_zero_nnz++;}
457 		}
458 	    }
459 	  else
460 	    {
461 	      for (k=0; k<len; k++)
462 		{
463 		  if (x_ptr[k]==0.0)
464 		    {xxt_zero_nnz_0++;}
465 		}
466 	    }
467 	  col_indices[2*i] = off;
468 	  col_sz[i] = col_indices[2*i+1] = len;
469 	  col_vals[i] = x_ptr;
470 	}
471       else
472 	{
473 	  col_indices[2*i] = 0;
474 	  col_sz[i] = col_indices[2*i+1] = 0;
475 	  col_vals[i] = x_ptr;
476 	}
477     }
478 
479   /* close off stages for execution phase */
480   while (dim!=level)
481     {
482       stages[dim++]=i;
483       ierr = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr);
484     }
485   stages[dim]=i;
486 
487   xxt_handle->info->n=xxt_handle->mvi->n;
488   xxt_handle->info->m=m;
489   xxt_handle->info->nnz=xxt_nnz;
490   xxt_handle->info->max_nnz=xxt_max_nnz;
491   xxt_handle->info->msg_buf_sz=stages[level]-stages[0];
492   xxt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
493   xxt_handle->info->solve_w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
494   xxt_handle->info->x=x;
495   xxt_handle->info->col_vals=col_vals;
496   xxt_handle->info->col_sz=col_sz;
497   xxt_handle->info->col_indices=col_indices;
498   xxt_handle->info->stages=stages;
499   xxt_handle->info->nsolves=0;
500   xxt_handle->info->tot_solve_time=0.0;
501 
502   free(segs);
503   free(u);
504   free(v);
505   free(uu);
506   free(z);
507   free(w);
508 
509   return(0);
510 }
511 
512 /**************************************xxt.c***********************************/
513 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle,  PetscScalar *uc)
514 {
515    PetscInt off, len, *iptr;
516   PetscInt level       =xxt_handle->level;
517   PetscInt n           =xxt_handle->info->n;
518   PetscInt m           =xxt_handle->info->m;
519   PetscInt *stages     =xxt_handle->info->stages;
520   PetscInt *col_indices=xxt_handle->info->col_indices;
521   PetscScalar *x_ptr, *uu_ptr;
522   PetscScalar *solve_uu=xxt_handle->info->solve_uu;
523   PetscScalar *solve_w =xxt_handle->info->solve_w;
524   PetscScalar *x       =xxt_handle->info->x;
525   PetscBLASInt i1 = 1,dlen;
526 
527   PetscFunctionBegin;
528   uu_ptr=solve_uu;
529   rvec_zero(uu_ptr,m);
530 
531   /* x  = X.Y^T.b */
532   /* uu = Y^T.b */
533   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len)
534     {
535       off=*iptr++; len=*iptr++;
536       dlen = PetscBLASIntCast(len);
537       *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,x_ptr,&i1);
538     }
539 
540   /* comunication of beta */
541   uu_ptr=solve_uu;
542   if (level) {ssgl_radd(uu_ptr, solve_w, level, stages);}
543 
544   rvec_zero(uc,n);
545 
546   /* x = X.uu */
547   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len)
548     {
549       off=*iptr++; len=*iptr++;
550       dlen = PetscBLASIntCast(len);
551       BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1);
552     }
553   PetscFunctionReturn(0);
554 }
555 
556 /**************************************xxt.c***********************************/
557 static PetscErrorCode check_handle(xxt_ADT xxt_handle)
558 {
559   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
560 
561   PetscFunctionBegin;
562   if (xxt_handle==NULL)
563     {SETERRQ1(PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xxt_handle);}
564 
565   vals[0]=vals[1]=xxt_handle->id;
566   giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
567   if ((vals[0]!=vals[1])||(xxt_handle->id<=0))
568     {SETERRQ3(PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n",vals[0],vals[1], xxt_handle->id);}
569   PetscFunctionReturn(0);
570 }
571 
572 /**************************************xxt.c***********************************/
573 static  PetscErrorCode det_separators(xxt_ADT xxt_handle)
574 {
575   PetscInt i, ct, id;
576   PetscInt mask, edge, *iptr;
577   PetscInt *dir, *used;
578   PetscInt sum[4], w[4];
579   PetscScalar rsum[4], rw[4];
580   PetscInt op[] = {GL_ADD,0};
581   PetscScalar *lhs, *rhs;
582   PetscInt *nsep, *lnsep, *fo, nfo=0;
583   gs_ADT gs_handle=xxt_handle->mvi->gs_handle;
584   PetscInt *local2global=xxt_handle->mvi->local2global;
585   PetscInt  n=xxt_handle->mvi->n;
586   PetscInt  m=xxt_handle->mvi->m;
587   PetscInt level=xxt_handle->level;
588   PetscInt shared=FALSE;
589 
590   PetscFunctionBegin;
591   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
592   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
593   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
594   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
595   used = (PetscInt*)malloc(sizeof(PetscInt)*n);
596 
597   ivec_zero(dir  ,level+1);
598   ivec_zero(nsep ,level+1);
599   ivec_zero(lnsep,level+1);
600   ivec_set (fo   ,-1,n+1);
601   ivec_zero(used,n);
602 
603   lhs  = (double*)malloc(sizeof(PetscScalar)*m);
604   rhs  = (double*)malloc(sizeof(PetscScalar)*m);
605 
606   /* determine the # of unique dof */
607   rvec_zero(lhs,m);
608   rvec_set(lhs,1.0,n);
609   gs_gop_hc(gs_handle,lhs,"+\0",level);
610   rvec_zero(rsum,2);
611   for (ct=i=0;i<n;i++)
612     {
613       if (lhs[i]!=0.0)
614 	{rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];}
615     }
616   grop_hc(rsum,rw,2,op,level);
617   rsum[0]+=0.1;
618   rsum[1]+=0.1;
619 
620   if (fabs(rsum[0]-rsum[1])>EPS)
621     {shared=TRUE;}
622 
623   xxt_handle->info->n_global=xxt_handle->info->m_global=(PetscInt) rsum[0];
624   xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(PetscInt) rsum[0];
625 
626   /* determine separator sets top down */
627   if (shared)
628     {
629       for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
630 	{
631 	  /* set rsh of hc, fire, and collect lhs responses */
632 	  (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m);
633 	  gs_gop_hc(gs_handle,lhs,"+\0",edge);
634 
635 	  /* set lsh of hc, fire, and collect rhs responses */
636 	  (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m);
637 	  gs_gop_hc(gs_handle,rhs,"+\0",edge);
638 
639 	  for (i=0;i<n;i++)
640 	    {
641 	      if (id< mask)
642 		{
643 		  if (lhs[i]!=0.0)
644 		    {lhs[i]=1.0;}
645 		}
646 	      if (id>=mask)
647 		{
648 		  if (rhs[i]!=0.0)
649 		    {rhs[i]=1.0;}
650 		}
651 	    }
652 
653 	  if (id< mask)
654 	    {gs_gop_hc(gs_handle,lhs,"+\0",edge-1);}
655 	  else
656 	    {gs_gop_hc(gs_handle,rhs,"+\0",edge-1);}
657 
658 	  /* count number of dofs I own that have signal and not in sep set */
659 	  rvec_zero(rsum,4);
660 	  for (ivec_zero(sum,4),ct=i=0;i<n;i++)
661 	    {
662 	      if (!used[i])
663 		{
664 		  /* number of unmarked dofs on node */
665 		  ct++;
666 		  /* number of dofs to be marked on lhs hc */
667 		  if (id< mask)
668 		    {
669 		      if (lhs[i]!=0.0)
670 			{sum[0]++; rsum[0]+=1.0/lhs[i];}
671 		    }
672 		  /* number of dofs to be marked on rhs hc */
673 		  if (id>=mask)
674 		    {
675 		      if (rhs[i]!=0.0)
676 			{sum[1]++; rsum[1]+=1.0/rhs[i];}
677 		    }
678 		}
679 	    }
680 
681 	  /* go for load balance - choose half with most unmarked dofs, bias LHS */
682 	  (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
683 	  (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
684 	  giop_hc(sum,w,4,op,edge);
685 	  grop_hc(rsum,rw,4,op,edge);
686 	  rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;
687 
688 	  if (id<mask)
689 	    {
690 	      /* mark dofs I own that have signal and not in sep set */
691 	      for (ct=i=0;i<n;i++)
692 		{
693 		  if ((!used[i])&&(lhs[i]!=0.0))
694 		    {
695 		      ct++; nfo++;
696 
697 		      if (nfo>n)
698 			{SETERRQ(PETSC_ERR_PLIB,"nfo about to exceed n\n");}
699 
700 		      *--iptr = local2global[i];
701 		      used[i]=edge;
702 		    }
703 		}
704 	      if (ct>1) {ivec_sort(iptr,ct);}
705 
706 	      lnsep[edge]=ct;
707 	      nsep[edge]=(PetscInt) rsum[0];
708 	      dir [edge]=LEFT;
709 	    }
710 
711 	  if (id>=mask)
712 	    {
713 	      /* mark dofs I own that have signal and not in sep set */
714 	      for (ct=i=0;i<n;i++)
715 		{
716 		  if ((!used[i])&&(rhs[i]!=0.0))
717 		    {
718 		      ct++; nfo++;
719 
720 		      if (nfo>n)
721 			{SETERRQ(PETSC_ERR_PLIB,"nfo about to exceed n\n");}
722 
723 		      *--iptr = local2global[i];
724 		      used[i]=edge;
725 		    }
726 		}
727 	      if (ct>1) {ivec_sort(iptr,ct);}
728 
729 	      lnsep[edge]=ct;
730 	      nsep[edge]= (PetscInt) rsum[1];
731 	      dir [edge]=RIGHT;
732 	    }
733 
734 	  /* LATER or we can recur on these to order seps at this level */
735 	  /* do we need full set of separators for this?                */
736 
737 	  /* fold rhs hc into lower */
738 	  if (id>=mask)
739 	    {id-=mask;}
740 	}
741     }
742   else
743     {
744       for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
745 	{
746 	  /* set rsh of hc, fire, and collect lhs responses */
747 	  (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m);
748 	  gs_gop_hc(gs_handle,lhs,"+\0",edge);
749 
750 	  /* set lsh of hc, fire, and collect rhs responses */
751 	  (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m);
752 	  gs_gop_hc(gs_handle,rhs,"+\0",edge);
753 
754 	  /* count number of dofs I own that have signal and not in sep set */
755 	  for (ivec_zero(sum,4),ct=i=0;i<n;i++)
756 	    {
757 	      if (!used[i])
758 		{
759 		  /* number of unmarked dofs on node */
760 		  ct++;
761 		  /* number of dofs to be marked on lhs hc */
762 		  if ((id< mask)&&(lhs[i]!=0.0)) {sum[0]++;}
763 		  /* number of dofs to be marked on rhs hc */
764 		  if ((id>=mask)&&(rhs[i]!=0.0)) {sum[1]++;}
765 		}
766 	    }
767 
768 	  /* go for load balance - choose half with most unmarked dofs, bias LHS */
769 	  (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
770 	  giop_hc(sum,w,4,op,edge);
771 
772 	  /* lhs hc wins */
773 	  if (sum[2]>=sum[3])
774 	    {
775 	      if (id<mask)
776 		{
777 		  /* mark dofs I own that have signal and not in sep set */
778 		  for (ct=i=0;i<n;i++)
779 		    {
780 		      if ((!used[i])&&(lhs[i]!=0.0))
781 			{
782 			  ct++; nfo++;
783 			  *--iptr = local2global[i];
784 			  used[i]=edge;
785 			}
786 		    }
787 		  if (ct>1) {ivec_sort(iptr,ct);}
788 		  lnsep[edge]=ct;
789 		}
790 	      nsep[edge]=sum[0];
791 	      dir [edge]=LEFT;
792 	    }
793 	  /* rhs hc wins */
794 	  else
795 	    {
796 	      if (id>=mask)
797 		{
798 		  /* mark dofs I own that have signal and not in sep set */
799 		  for (ct=i=0;i<n;i++)
800 		    {
801 		      if ((!used[i])&&(rhs[i]!=0.0))
802 			{
803 			  ct++; nfo++;
804 			  *--iptr = local2global[i];
805 			  used[i]=edge;
806 			}
807 		    }
808 		  if (ct>1) {ivec_sort(iptr,ct);}
809 		  lnsep[edge]=ct;
810 		}
811 	      nsep[edge]=sum[1];
812 	      dir [edge]=RIGHT;
813 	    }
814 	  /* LATER or we can recur on these to order seps at this level */
815 	  /* do we need full set of separators for this?                */
816 
817 	  /* fold rhs hc into lower */
818 	  if (id>=mask)
819 	    {id-=mask;}
820 	}
821     }
822 
823 
824   /* level 0 is on processor case - so mark the remainder */
825   for (ct=i=0;i<n;i++)
826     {
827       if (!used[i])
828 	{
829 	  ct++; nfo++;
830 	  *--iptr = local2global[i];
831 	  used[i]=edge;
832 	}
833     }
834   if (ct>1) {ivec_sort(iptr,ct);}
835   lnsep[edge]=ct;
836   nsep [edge]=ct;
837   dir  [edge]=LEFT;
838 
839   xxt_handle->info->nsep=nsep;
840   xxt_handle->info->lnsep=lnsep;
841   xxt_handle->info->fo=fo;
842   xxt_handle->info->nfo=nfo;
843 
844   free(dir);
845   free(lhs);
846   free(rhs);
847   free(used);
848   PetscFunctionReturn(0);
849 }
850 
851 /**************************************xxt.c***********************************/
852 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data)
853 {
854   mv_info *mvi;
855 
856 
857   mvi = (mv_info*)malloc(sizeof(mv_info));
858   mvi->n=n;
859   mvi->m=m;
860   mvi->n_global=-1;
861   mvi->m_global=-1;
862   mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt));
863   ivec_copy(mvi->local2global,local2global,m);
864   mvi->local2global[m] = INT_MAX;
865   mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec;
866   mvi->grid_data=grid_data;
867 
868   /* set xxt communication handle to perform restricted matvec */
869   mvi->gs_handle = gs_init(local2global, m, num_nodes);
870 
871   return(mvi);
872 }
873 
874 /**************************************xxt.c***********************************/
875 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
876 {
877   PetscFunctionBegin;
878   A->matvec((mv_info*)A->grid_data,v,u);
879   PetscFunctionReturn(0);
880 }
881 
882 
883 
884