xref: /petsc/src/ksp/pc/impls/tfs/xxt.c (revision 2f613bf53f46f9356e00a2ca2bd69453be72fc31)
1 
2 /*************************************xxt.c************************************
3 Module Name: xxt
4 Module Info:
5 
6 author:  Henry M. Tufo III
7 e-mail:  hmt@asci.uchicago.edu
8 contact:
9 +--------------------------------+--------------------------------+
10 |MCS Division - Building 221     |Department of Computer Science  |
11 |Argonne National Laboratory     |Ryerson 152                     |
12 |9700 S. Cass Avenue             |The University of Chicago       |
13 |Argonne, IL  60439              |Chicago, IL  60637              |
14 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
15 +--------------------------------+--------------------------------+
16 
17 Last Modification: 3.20.01
18 **************************************xxt.c***********************************/
19 #include <../src/ksp/pc/impls/tfs/tfs.h>
20 
21 #define LEFT  -1
22 #define RIGHT  1
23 #define BOTH   0
24 
25 typedef struct xxt_solver_info {
26   PetscInt    n, m, n_global, m_global;
27   PetscInt    nnz, max_nnz, msg_buf_sz;
28   PetscInt    *nsep, *lnsep, *fo, nfo, *stages;
29   PetscInt    *col_sz, *col_indices;
30   PetscScalar **col_vals, *x, *solve_uu, *solve_w;
31   PetscInt    nsolves;
32   PetscScalar tot_solve_time;
33 } xxt_info;
34 
35 typedef struct matvec_info {
36   PetscInt     n, m, n_global, m_global;
37   PetscInt     *local2global;
38   PCTFS_gs_ADT PCTFS_gs_handle;
39   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
40   void *grid_data;
41 } mv_info;
42 
43 struct xxt_CDT {
44   PetscInt id;
45   PetscInt ns;
46   PetscInt level;
47   xxt_info *info;
48   mv_info  *mvi;
49 };
50 
51 static PetscInt n_xxt        =0;
52 static PetscInt n_xxt_handles=0;
53 
54 /* prototypes */
55 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
56 static PetscErrorCode check_handle(xxt_ADT xxt_handle);
57 static PetscErrorCode det_separators(xxt_ADT xxt_handle);
58 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
59 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle);
60 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle);
61 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data);
62 
63 /**************************************xxt.c***********************************/
64 xxt_ADT XXT_new(void)
65 {
66   xxt_ADT xxt_handle;
67 
68   /* rolling count on n_xxt ... pot. problem here */
69   n_xxt_handles++;
70   xxt_handle       = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
71   xxt_handle->id   = ++n_xxt;
72   xxt_handle->info = NULL; xxt_handle->mvi  = NULL;
73 
74   return(xxt_handle);
75 }
76 
77 /**************************************xxt.c***********************************/
78 PetscErrorCode XXT_factor(xxt_ADT xxt_handle,     /* prev. allocated xxt  handle */
79                     PetscInt *local2global, /* global column mapping       */
80                     PetscInt n,             /* local num rows              */
81                     PetscInt m,             /* local num cols              */
82                     PetscErrorCode (*matvec)(void*,PetscScalar*,PetscScalar*), /* b_loc=A_local.x_loc         */
83                     void *grid_data)        /* grid data for matvec        */
84 {
85   PCTFS_comm_init();
86   check_handle(xxt_handle);
87 
88   /* only 2^k for now and all nodes participating */
89   if ((1<<(xxt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes);
90 
91   /* space for X info */
92   xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info));
93 
94   /* set up matvec handles */
95   xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec, grid_data);
96 
97   /* matrix is assumed to be of full rank */
98   /* LATER we can reset to indicate rank def. */
99   xxt_handle->ns=0;
100 
101   /* determine separators and generate firing order - NB xxt info set here */
102   det_separators(xxt_handle);
103 
104   return(do_xxt_factor(xxt_handle));
105 }
106 
107 /**************************************xxt.c***********************************/
108 PetscErrorCode XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
109 {
110 
111   PCTFS_comm_init();
112   check_handle(xxt_handle);
113 
114   /* need to copy b into x? */
115   if (b) PCTFS_rvec_copy(x,b,xxt_handle->mvi->n);
116   return do_xxt_solve(xxt_handle,x);
117 }
118 
119 /**************************************xxt.c***********************************/
120 PetscInt XXT_free(xxt_ADT xxt_handle)
121 {
122 
123   PCTFS_comm_init();
124   check_handle(xxt_handle);
125   n_xxt_handles--;
126 
127   free(xxt_handle->info->nsep);
128   free(xxt_handle->info->lnsep);
129   free(xxt_handle->info->fo);
130   free(xxt_handle->info->stages);
131   free(xxt_handle->info->solve_uu);
132   free(xxt_handle->info->solve_w);
133   free(xxt_handle->info->x);
134   free(xxt_handle->info->col_vals);
135   free(xxt_handle->info->col_sz);
136   free(xxt_handle->info->col_indices);
137   free(xxt_handle->info);
138   free(xxt_handle->mvi->local2global);
139   PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle);
140   free(xxt_handle->mvi);
141   free(xxt_handle);
142 
143   /* if the check fails we nuke */
144   /* if NULL pointer passed to free we nuke */
145   /* if the calls to free fail that's not my problem */
146   return(0);
147 }
148 
149 /**************************************xxt.c***********************************/
150 PetscErrorCode XXT_stats(xxt_ADT xxt_handle)
151 {
152   PetscInt       op[]  = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
153   PetscInt       fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
154   PetscInt       vals[9],  work[9];
155   PetscScalar    fvals[3], fwork[3];
156   PetscErrorCode ierr;
157 
158   PCTFS_comm_init();
159   check_handle(xxt_handle);
160 
161   /* if factorization not done there are no stats */
162   if (!xxt_handle->info||!xxt_handle->mvi) {
163     if (!PCTFS_my_id) { ierr = PetscPrintf(PETSC_COMM_WORLD,"XXT_stats() :: no stats available!\n");CHKERRQ(ierr); }
164     return 1;
165   }
166 
167   vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz;
168   vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n;
169   vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz;
170   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
171 
172   fvals[0]=fvals[1]=fvals[2] =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++;
173   PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
174 
175   if (!PCTFS_my_id) {
176     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_nnz=%D\n",PCTFS_my_id,vals[0]);CHKERRQ(ierr);
177     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_nnz=%D\n",PCTFS_my_id,vals[1]);CHKERRQ(ierr);
178     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);CHKERRQ(ierr);
179     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_nnz=%D\n",PCTFS_my_id,vals[2]);CHKERRQ(ierr);
180     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(2d)  =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.5)));CHKERRQ(ierr);
181     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(3d)  =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.6667)));CHKERRQ(ierr);
182     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_n  =%D\n",PCTFS_my_id,vals[3]);CHKERRQ(ierr);
183     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_n  =%D\n",PCTFS_my_id,vals[4]);CHKERRQ(ierr);
184     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_n  =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);CHKERRQ(ierr);
185     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_n  =%D\n",PCTFS_my_id,vals[5]);CHKERRQ(ierr);
186     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_buf=%D\n",PCTFS_my_id,vals[6]);CHKERRQ(ierr);
187     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_buf=%D\n",PCTFS_my_id,vals[7]);CHKERRQ(ierr);
188     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);CHKERRQ(ierr);
189     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_slv=%g\n",PCTFS_my_id,fvals[0]);CHKERRQ(ierr);
190     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_slv=%g\n",PCTFS_my_id,fvals[1]);CHKERRQ(ierr);
191     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);CHKERRQ(ierr);
192   }
193 
194   return(0);
195 }
196 
197 /*************************************xxt.c************************************
198 
199 Description: get A_local, local portion of global coarse matrix which
200 is a row dist. nxm matrix w/ n<m.
201    o my_ml holds address of ML struct associated w/A_local and coarse grid
202    o local2global holds global number of column i (i=0,...,m-1)
203    o local2global holds global number of row    i (i=0,...,n-1)
204    o mylocmatvec performs A_local . vec_local (note that gs is performed using
205    PCTFS_gs_init/gop).
206 
207 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
208 mylocmatvec (void :: void *data, double *in, double *out)
209 **************************************xxt.c***********************************/
210 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle)
211 {
212   return xxt_generate(xxt_handle);
213 }
214 
215 /**************************************xxt.c***********************************/
216 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle)
217 {
218   PetscInt       i,j,k,idex;
219   PetscInt       dim, col;
220   PetscScalar    *u, *uu, *v, *z, *w, alpha, alpha_w;
221   PetscInt       *segs;
222   PetscInt       op[] = {GL_ADD,0};
223   PetscInt       off, len;
224   PetscScalar    *x_ptr;
225   PetscInt       *iptr, flag;
226   PetscInt       start =0, end, work;
227   PetscInt       op2[] = {GL_MIN,0};
228   PCTFS_gs_ADT   PCTFS_gs_handle;
229   PetscInt       *nsep, *lnsep, *fo;
230   PetscInt       a_n            =xxt_handle->mvi->n;
231   PetscInt       a_m            =xxt_handle->mvi->m;
232   PetscInt       *a_local2global=xxt_handle->mvi->local2global;
233   PetscInt       level;
234   PetscInt       xxt_nnz=0, xxt_max_nnz=0;
235   PetscInt       n, m;
236   PetscInt       *col_sz, *col_indices, *stages;
237   PetscScalar    **col_vals, *x;
238   PetscInt       n_global;
239   PetscInt       xxt_zero_nnz  =0;
240   PetscInt       xxt_zero_nnz_0=0;
241   PetscBLASInt   i1            = 1,dlen;
242   PetscScalar    dm1           = -1.0;
243   PetscErrorCode ierr;
244 
245   n               = xxt_handle->mvi->n;
246   nsep            = xxt_handle->info->nsep;
247   lnsep           = xxt_handle->info->lnsep;
248   fo              = xxt_handle->info->fo;
249   end             = lnsep[0];
250   level           = xxt_handle->level;
251   PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
252 
253   /* is there a null space? */
254   /* LATER add in ability to detect null space by checking alpha */
255   for (i=0, j=0; i<=level; i++) j+=nsep[i];
256 
257   m = j-xxt_handle->ns;
258   if (m!=j) {
259     ierr = PetscPrintf(PETSC_COMM_WORLD,"xxt_generate() :: null space exists %D %D %D\n",m,j,xxt_handle->ns);CHKERRQ(ierr);
260   }
261 
262   /* get and initialize storage for x local         */
263   /* note that x local is nxm and stored by columns */
264   col_sz      = (PetscInt*) malloc(m*sizeof(PetscInt));
265   col_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
266   col_vals    = (PetscScalar**) malloc(m*sizeof(PetscScalar*));
267   for (i=j=0; i<m; i++, j+=2) {
268     col_indices[j]=col_indices[j+1]=col_sz[i]=-1;
269     col_vals[i]   = NULL;
270   }
271   col_indices[j]=-1;
272 
273   /* size of separators for each sub-hc working from bottom of tree to top */
274   /* this looks like nsep[]=segments */
275   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
276   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
277   PCTFS_ivec_zero(stages,level+1);
278   PCTFS_ivec_copy(segs,nsep,level+1);
279   for (i=0; i<level; i++) segs[i+1] += segs[i];
280   stages[0] = segs[0];
281 
282   /* temporary vectors  */
283   u  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
284   z  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
285   v  = (PetscScalar*) malloc(a_m*sizeof(PetscScalar));
286   uu = (PetscScalar*) malloc(m*sizeof(PetscScalar));
287   w  = (PetscScalar*) malloc(m*sizeof(PetscScalar));
288 
289   /* extra nnz due to replication of vertices across separators */
290   for (i=1, j=0; i<=level; i++) j+=nsep[i];
291 
292   /* storage for sparse x values */
293   n_global    = xxt_handle->info->n_global;
294   xxt_max_nnz = (PetscInt)(2.5*PetscPowReal(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes;
295   x           = (PetscScalar*) malloc(xxt_max_nnz*sizeof(PetscScalar));
296   xxt_nnz     = 0;
297 
298   /* LATER - can embed next sep to fire in gs */
299   /* time to make the donuts - generate X factor */
300   for (dim=i=j=0; i<m; i++) {
301     /* time to move to the next level? */
302     while (i==segs[dim]) {
303       if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
304       stages[dim++]=i;
305       end         +=lnsep[dim];
306     }
307     stages[dim]=i;
308 
309     /* which column are we firing? */
310     /* i.e. set v_l */
311     /* use new seps and do global min across hc to determine which one to fire */
312     (start<end) ? (col=fo[start]) : (col=INT_MAX);
313     PCTFS_giop_hc(&col,&work,1,op2,dim);
314 
315     /* shouldn't need this */
316     if (col==INT_MAX) {
317       ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr);
318       continue;
319     }
320 
321     /* do I own it? I should */
322     PCTFS_rvec_zero(v,a_m);
323     if (col==fo[start]) {
324       start++;
325       idex=PCTFS_ivec_linear_search(col, a_local2global, a_n);
326       if (idex!=-1) {
327         v[idex] = 1.0; j++;
328       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
329     } else {
330       idex=PCTFS_ivec_linear_search(col, a_local2global, a_m);
331       if (idex!=-1) v[idex] = 1.0;
332     }
333 
334     /* perform u = A.v_l */
335     PCTFS_rvec_zero(u,n);
336     do_matvec(xxt_handle->mvi,v,u);
337 
338     /* uu =  X^T.u_l (local portion) */
339     /* technically only need to zero out first i entries */
340     /* later turn this into an XXT_solve call ? */
341     PCTFS_rvec_zero(uu,m);
342     x_ptr=x;
343     iptr = col_indices;
344     for (k=0; k<i; k++) {
345       off   = *iptr++;
346       len   = *iptr++;
347       ierr  = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
348       PetscStackCallBLAS("BLASdot",uu[k] = BLASdot_(&dlen,u+off,&i1,x_ptr,&i1));
349       x_ptr+=len;
350     }
351 
352     /* uu = X^T.u_l (comm portion) */
353     ierr = PCTFS_ssgl_radd  (uu, w, dim, stages);CHKERRQ(ierr);
354 
355     /* z = X.uu */
356     PCTFS_rvec_zero(z,n);
357     x_ptr=x;
358     iptr = col_indices;
359     for (k=0; k<i; k++) {
360       off  = *iptr++;
361       len  = *iptr++;
362       ierr = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
363       PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1));
364       x_ptr+=len;
365     }
366 
367     /* compute v_l = v_l - z */
368     PCTFS_rvec_zero(v+a_n,a_m-a_n);
369     ierr = PetscBLASIntCast(n,&dlen);CHKERRQ(ierr);
370     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1));
371 
372     /* compute u_l = A.v_l */
373     if (a_n!=a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim);
374     PCTFS_rvec_zero(u,n);
375     do_matvec(xxt_handle->mvi,v,u);
376 
377     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
378     ierr  = PetscBLASIntCast(n,&dlen);CHKERRQ(ierr);
379     PetscStackCallBLAS("BLASdot",alpha = BLASdot_(&dlen,u,&i1,v,&i1));
380     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
381     PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);
382 
383     alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha);
384 
385     /* check for small alpha                             */
386     /* LATER use this to detect and determine null space */
387     if (PetscAbsScalar(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);
388 
389     /* compute v_l = v_l/sqrt(alpha) */
390     PCTFS_rvec_scale(v,1.0/alpha,n);
391 
392     /* add newly generated column, v_l, to X */
393     flag = 1;
394     off=len=0;
395     for (k=0; k<n; k++) {
396       if (v[k]!=0.0) {
397         len=k;
398         if (flag) { off=k; flag=0; }
399       }
400     }
401 
402     len -= (off-1);
403 
404     if (len>0) {
405       if ((xxt_nnz+len)>xxt_max_nnz) {
406         ierr         = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr);
407         xxt_max_nnz *= 2;
408         x_ptr        = (PetscScalar*) malloc(xxt_max_nnz*sizeof(PetscScalar));
409         PCTFS_rvec_copy(x_ptr,x,xxt_nnz);
410         free(x);
411         x     = x_ptr;
412         x_ptr+=xxt_nnz;
413       }
414       xxt_nnz += len;
415       PCTFS_rvec_copy(x_ptr,v+off,len);
416 
417       /* keep track of number of zeros */
418       if (dim) {
419         for (k=0; k<len; k++) {
420           if (x_ptr[k]==0.0) xxt_zero_nnz++;
421         }
422       } else {
423         for (k=0; k<len; k++) {
424           if (x_ptr[k]==0.0) xxt_zero_nnz_0++;
425         }
426       }
427       col_indices[2*i] = off;
428       col_sz[i] = col_indices[2*i+1] = len;
429       col_vals[i] = x_ptr;
430     }
431     else {
432       col_indices[2*i] = 0;
433       col_sz[i]        = col_indices[2*i+1] = 0;
434       col_vals[i]      = x_ptr;
435     }
436   }
437 
438   /* close off stages for execution phase */
439   while (dim!=level) {
440     stages[dim++] = i;
441     ierr          = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr);
442   }
443   stages[dim]=i;
444 
445   xxt_handle->info->n              = xxt_handle->mvi->n;
446   xxt_handle->info->m              = m;
447   xxt_handle->info->nnz            = xxt_nnz;
448   xxt_handle->info->max_nnz        = xxt_max_nnz;
449   xxt_handle->info->msg_buf_sz     = stages[level]-stages[0];
450   xxt_handle->info->solve_uu       = (PetscScalar*) malloc(m*sizeof(PetscScalar));
451   xxt_handle->info->solve_w        = (PetscScalar*) malloc(m*sizeof(PetscScalar));
452   xxt_handle->info->x              = x;
453   xxt_handle->info->col_vals       = col_vals;
454   xxt_handle->info->col_sz         = col_sz;
455   xxt_handle->info->col_indices    = col_indices;
456   xxt_handle->info->stages         = stages;
457   xxt_handle->info->nsolves        = 0;
458   xxt_handle->info->tot_solve_time = 0.0;
459 
460   free(segs);
461   free(u);
462   free(v);
463   free(uu);
464   free(z);
465   free(w);
466 
467   return(0);
468 }
469 
470 /**************************************xxt.c***********************************/
471 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle,  PetscScalar *uc)
472 {
473   PetscInt       off, len, *iptr;
474   PetscInt       level        = xxt_handle->level;
475   PetscInt       n            = xxt_handle->info->n;
476   PetscInt       m            = xxt_handle->info->m;
477   PetscInt       *stages      = xxt_handle->info->stages;
478   PetscInt       *col_indices = xxt_handle->info->col_indices;
479   PetscScalar    *x_ptr, *uu_ptr;
480   PetscScalar    *solve_uu = xxt_handle->info->solve_uu;
481   PetscScalar    *solve_w  = xxt_handle->info->solve_w;
482   PetscScalar    *x        = xxt_handle->info->x;
483   PetscBLASInt   i1        = 1,dlen;
484   PetscErrorCode ierr;
485 
486   PetscFunctionBegin;
487   uu_ptr=solve_uu;
488   PCTFS_rvec_zero(uu_ptr,m);
489 
490   /* x  = X.Y^T.b */
491   /* uu = Y^T.b */
492   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) {
493     off       =*iptr++;
494     len       =*iptr++;
495     ierr      = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
496     PetscStackCallBLAS("BLASdot",*uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,x_ptr,&i1));
497   }
498 
499   /* comunication of beta */
500   uu_ptr=solve_uu;
501   if (level) {ierr = PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages);CHKERRQ(ierr);}
502 
503   PCTFS_rvec_zero(uc,n);
504 
505   /* x = X.uu */
506   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) {
507     off  =*iptr++;
508     len  =*iptr++;
509     ierr = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
510     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1));
511   }
512   PetscFunctionReturn(0);
513 }
514 
515 /**************************************xxt.c***********************************/
516 static PetscErrorCode check_handle(xxt_ADT xxt_handle)
517 {
518   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
519 
520   PetscFunctionBegin;
521   if (!xxt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xxt_handle);
522 
523   vals[0]=vals[1]=xxt_handle->id;
524   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
525   if ((vals[0]!=vals[1])||(xxt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n",vals[0],vals[1], xxt_handle->id);
526   PetscFunctionReturn(0);
527 }
528 
529 /**************************************xxt.c***********************************/
530 static PetscErrorCode det_separators(xxt_ADT xxt_handle)
531 {
532   PetscInt     i, ct, id;
533   PetscInt     mask, edge, *iptr;
534   PetscInt     *dir, *used;
535   PetscInt     sum[4], w[4];
536   PetscScalar  rsum[4], rw[4];
537   PetscInt     op[] = {GL_ADD,0};
538   PetscScalar  *lhs, *rhs;
539   PetscInt     *nsep, *lnsep, *fo, nfo=0;
540   PCTFS_gs_ADT PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
541   PetscInt     *local2global   = xxt_handle->mvi->local2global;
542   PetscInt     n               = xxt_handle->mvi->n;
543   PetscInt     m               = xxt_handle->mvi->m;
544   PetscInt     level           = xxt_handle->level;
545   PetscInt     shared          = 0;
546 
547   PetscFunctionBegin;
548   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
549   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
550   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
551   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
552   used = (PetscInt*)malloc(sizeof(PetscInt)*n);
553 
554   PCTFS_ivec_zero(dir,level+1);
555   PCTFS_ivec_zero(nsep,level+1);
556   PCTFS_ivec_zero(lnsep,level+1);
557   PCTFS_ivec_set (fo,-1,n+1);
558   PCTFS_ivec_zero(used,n);
559 
560   lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
561   rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
562 
563   /* determine the # of unique dof */
564   PCTFS_rvec_zero(lhs,m);
565   PCTFS_rvec_set(lhs,1.0,n);
566   PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level);
567   PCTFS_rvec_zero(rsum,2);
568   for (i=0; i<n; i++) {
569     if (lhs[i]!=0.0) {
570       rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];
571     }
572   }
573   PCTFS_grop_hc(rsum,rw,2,op,level);
574   rsum[0]+=0.1;
575   rsum[1]+=0.1;
576 
577   if (PetscAbsScalar(rsum[0]-rsum[1])>EPS) shared=1;
578 
579   xxt_handle->info->n_global=xxt_handle->info->m_global=(PetscInt) rsum[0];
580   xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(PetscInt) rsum[0];
581 
582   /* determine separator sets top down */
583   if (shared) {
584     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
585 
586       /* set rsh of hc, fire, and collect lhs responses */
587       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
588       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
589 
590       /* set lsh of hc, fire, and collect rhs responses */
591       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
592       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
593 
594       for (i=0;i<n;i++) {
595         if (id< mask) {
596           if (lhs[i]!=0.0) lhs[i]=1.0;
597         }
598         if (id>=mask) {
599           if (rhs[i]!=0.0) rhs[i]=1.0;
600         }
601       }
602 
603       if (id< mask) PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge-1);
604       else          PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge-1);
605 
606       /* count number of dofs I own that have signal and not in sep set */
607       PCTFS_rvec_zero(rsum,4);
608       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
609         if (!used[i]) {
610           /* number of unmarked dofs on node */
611           ct++;
612           /* number of dofs to be marked on lhs hc */
613           if (id< mask) {
614             if (lhs[i]!=0.0) { sum[0]++; rsum[0]+=1.0/lhs[i]; }
615           }
616           /* number of dofs to be marked on rhs hc */
617           if (id>=mask) {
618             if (rhs[i]!=0.0) { sum[1]++; rsum[1]+=1.0/rhs[i]; }
619           }
620         }
621       }
622 
623       /* go for load balance - choose half with most unmarked dofs, bias LHS */
624       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
625       (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
626       PCTFS_giop_hc(sum,w,4,op,edge);
627       PCTFS_grop_hc(rsum,rw,4,op,edge);
628       rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;
629 
630       if (id<mask) {
631         /* mark dofs I own that have signal and not in sep set */
632         for (ct=i=0;i<n;i++) {
633           if ((!used[i])&&(lhs[i]!=0.0)) {
634             ct++; nfo++;
635 
636             if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
637 
638             *--iptr = local2global[i];
639             used[i] = edge;
640           }
641         }
642         if (ct>1) PCTFS_ivec_sort(iptr,ct);
643 
644         lnsep[edge]=ct;
645         nsep[edge]=(PetscInt) rsum[0];
646         dir [edge]=LEFT;
647       }
648 
649       if (id>=mask) {
650         /* mark dofs I own that have signal and not in sep set */
651         for (ct=i=0;i<n;i++) {
652           if ((!used[i])&&(rhs[i]!=0.0)) {
653             ct++; nfo++;
654 
655             if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
656 
657             *--iptr = local2global[i];
658             used[i] = edge;
659           }
660         }
661         if (ct>1) PCTFS_ivec_sort(iptr,ct);
662 
663         lnsep[edge] = ct;
664         nsep[edge]  = (PetscInt) rsum[1];
665         dir [edge]  = RIGHT;
666       }
667 
668       /* LATER or we can recur on these to order seps at this level */
669       /* do we need full set of separators for this?                */
670 
671       /* fold rhs hc into lower */
672       if (id>=mask) id-=mask;
673     }
674   } else {
675     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
676       /* set rsh of hc, fire, and collect lhs responses */
677       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
678       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
679 
680       /* set lsh of hc, fire, and collect rhs responses */
681       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
682       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
683 
684       /* count number of dofs I own that have signal and not in sep set */
685       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
686         if (!used[i]) {
687           /* number of unmarked dofs on node */
688           ct++;
689           /* number of dofs to be marked on lhs hc */
690           if ((id< mask)&&(lhs[i]!=0.0)) sum[0]++;
691           /* number of dofs to be marked on rhs hc */
692           if ((id>=mask)&&(rhs[i]!=0.0)) sum[1]++;
693         }
694       }
695 
696       /* go for load balance - choose half with most unmarked dofs, bias LHS */
697       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
698       PCTFS_giop_hc(sum,w,4,op,edge);
699 
700       /* lhs hc wins */
701       if (sum[2]>=sum[3]) {
702         if (id<mask) {
703           /* mark dofs I own that have signal and not in sep set */
704           for (ct=i=0;i<n;i++) {
705             if ((!used[i])&&(lhs[i]!=0.0)) {
706               ct++; nfo++;
707               *--iptr = local2global[i];
708               used[i]=edge;
709             }
710           }
711           if (ct>1) PCTFS_ivec_sort(iptr,ct);
712           lnsep[edge]=ct;
713         }
714         nsep[edge]=sum[0];
715         dir [edge]=LEFT;
716       } else { /* rhs hc wins */
717         if (id>=mask) {
718           /* mark dofs I own that have signal and not in sep set */
719           for (ct=i=0;i<n;i++) {
720             if ((!used[i])&&(rhs[i]!=0.0)) {
721               ct++; nfo++;
722               *--iptr = local2global[i];
723               used[i]=edge;
724             }
725           }
726           if (ct>1) PCTFS_ivec_sort(iptr,ct);
727           lnsep[edge]=ct;
728         }
729         nsep[edge]=sum[1];
730         dir [edge]=RIGHT;
731       }
732       /* LATER or we can recur on these to order seps at this level */
733       /* do we need full set of separators for this?                */
734 
735       /* fold rhs hc into lower */
736       if (id>=mask) id-=mask;
737     }
738   }
739 
740   /* level 0 is on processor case - so mark the remainder */
741   for (ct=i=0; i<n; i++) {
742     if (!used[i]) {
743       ct++; nfo++;
744       *--iptr = local2global[i];
745       used[i] = edge;
746     }
747   }
748   if (ct>1) PCTFS_ivec_sort(iptr,ct);
749   lnsep[edge]=ct;
750   nsep [edge]=ct;
751   dir  [edge]=LEFT;
752 
753   xxt_handle->info->nsep  = nsep;
754   xxt_handle->info->lnsep = lnsep;
755   xxt_handle->info->fo    = fo;
756   xxt_handle->info->nfo   = nfo;
757 
758   free(dir);
759   free(lhs);
760   free(rhs);
761   free(used);
762   PetscFunctionReturn(0);
763 }
764 
765 /**************************************xxt.c***********************************/
766 static mv_info *set_mvi(PetscInt *local2global,PetscInt n,PetscInt m,PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*),void *grid_data)
767 {
768   mv_info *mvi;
769 
770   mvi               = (mv_info*)malloc(sizeof(mv_info));
771   mvi->n            = n;
772   mvi->m            = m;
773   mvi->n_global     = -1;
774   mvi->m_global     = -1;
775   mvi->local2global = (PetscInt*)malloc((m+1)*sizeof(PetscInt));
776   PCTFS_ivec_copy(mvi->local2global,local2global,m);
777   mvi->local2global[m] = INT_MAX;
778   mvi->matvec          = matvec;
779   mvi->grid_data       = grid_data;
780 
781   /* set xxt communication handle to perform restricted matvec */
782   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
783 
784   return(mvi);
785 }
786 
787 /**************************************xxt.c***********************************/
788 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
789 {
790   PetscFunctionBegin;
791   A->matvec((mv_info*)A->grid_data,v,u);
792   PetscFunctionReturn(0);
793 }
794