xref: /petsc/src/ksp/pc/impls/tfs/xxt.c (revision a1cb98fac0cdf0eb4d3e8a0c8b58f3fe8f800bc6)
1 
2 /*
3 Module Name: xxt
4 Module Info:
5 
6 author:  Henry M. Tufo III
7 e-mail:  hmt@asci.uchicago.edu
8 contact:
9 +--------------------------------+--------------------------------+
10 |MCS Division - Building 221     |Department of Computer Science  |
11 |Argonne National Laboratory     |Ryerson 152                     |
12 |9700 S. Cass Avenue             |The University of Chicago       |
13 |Argonne, IL  60439              |Chicago, IL  60637              |
14 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
15 +--------------------------------+--------------------------------+
16 
17 Last Modification: 3.20.01
18 */
19 #include <../src/ksp/pc/impls/tfs/tfs.h>
20 
21 #define LEFT  -1
22 #define RIGHT 1
23 #define BOTH  0
24 
25 typedef struct xxt_solver_info {
26   PetscInt      n, m, n_global, m_global;
27   PetscInt      nnz, max_nnz, msg_buf_sz;
28   PetscInt     *nsep, *lnsep, *fo, nfo, *stages;
29   PetscInt     *col_sz, *col_indices;
30   PetscScalar **col_vals, *x, *solve_uu, *solve_w;
31   PetscInt      nsolves;
32   PetscScalar   tot_solve_time;
33 } xxt_info;
34 
35 typedef struct matvec_info {
36   PetscInt     n, m, n_global, m_global;
37   PetscInt    *local2global;
38   PCTFS_gs_ADT PCTFS_gs_handle;
39   PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
40   void *grid_data;
41 } mv_info;
42 
43 struct xxt_CDT {
44   PetscInt  id;
45   PetscInt  ns;
46   PetscInt  level;
47   xxt_info *info;
48   mv_info  *mvi;
49 };
50 
51 static PetscInt n_xxt         = 0;
52 static PetscInt n_xxt_handles = 0;
53 
54 /* prototypes */
55 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
56 static PetscErrorCode check_handle(xxt_ADT xxt_handle);
57 static PetscErrorCode det_separators(xxt_ADT xxt_handle);
58 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
59 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle);
60 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle);
61 static mv_info       *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);
62 
63 xxt_ADT XXT_new(void)
64 {
65   xxt_ADT xxt_handle;
66 
67   /* rolling count on n_xxt ... pot. problem here */
68   n_xxt_handles++;
69   xxt_handle       = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
70   xxt_handle->id   = ++n_xxt;
71   xxt_handle->info = NULL;
72   xxt_handle->mvi  = NULL;
73 
74   return (xxt_handle);
75 }
76 
77 PetscErrorCode XXT_factor(xxt_ADT   xxt_handle,                                           /* prev. allocated xxt  handle */
78                           PetscInt *local2global,                                         /* global column mapping       */
79                           PetscInt  n,                                                    /* local num rows              */
80                           PetscInt  m,                                                    /* local num cols              */
81                           PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc         */
82                           void *grid_data)                                                /* grid data for matvec        */
83 {
84   PCTFS_comm_init();
85   check_handle(xxt_handle);
86 
87   /* only 2^k for now and all nodes participating */
88   PetscCheck((1 << (xxt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes);
89 
90   /* space for X info */
91   xxt_handle->info = (xxt_info *)malloc(sizeof(xxt_info));
92 
93   /* set up matvec handles */
94   xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode(*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);
95 
96   /* matrix is assumed to be of full rank */
97   /* LATER we can reset to indicate rank def. */
98   xxt_handle->ns = 0;
99 
100   /* determine separators and generate firing order - NB xxt info set here */
101   det_separators(xxt_handle);
102 
103   return (do_xxt_factor(xxt_handle));
104 }
105 
106 PetscErrorCode XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
107 {
108   PCTFS_comm_init();
109   check_handle(xxt_handle);
110 
111   /* need to copy b into x? */
112   if (b) PCTFS_rvec_copy(x, b, xxt_handle->mvi->n);
113   return do_xxt_solve(xxt_handle, x);
114 }
115 
116 PetscInt XXT_free(xxt_ADT xxt_handle)
117 {
118   PCTFS_comm_init();
119   check_handle(xxt_handle);
120   n_xxt_handles--;
121 
122   free(xxt_handle->info->nsep);
123   free(xxt_handle->info->lnsep);
124   free(xxt_handle->info->fo);
125   free(xxt_handle->info->stages);
126   free(xxt_handle->info->solve_uu);
127   free(xxt_handle->info->solve_w);
128   free(xxt_handle->info->x);
129   free(xxt_handle->info->col_vals);
130   free(xxt_handle->info->col_sz);
131   free(xxt_handle->info->col_indices);
132   free(xxt_handle->info);
133   free(xxt_handle->mvi->local2global);
134   PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle);
135   free(xxt_handle->mvi);
136   free(xxt_handle);
137 
138   /* if the check fails we nuke */
139   /* if NULL pointer passed to free we nuke */
140   /* if the calls to free fail that's not my problem */
141   return (0);
142 }
143 
144 /* This function is currently unused */
145 PetscErrorCode XXT_stats(xxt_ADT xxt_handle)
146 {
147   PetscInt    op[]  = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD};
148   PetscInt    fop[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD};
149   PetscInt    vals[9], work[9];
150   PetscScalar fvals[3], fwork[3];
151 
152   PetscFunctionBegin;
153   PetscCall(PCTFS_comm_init());
154   PetscCall(check_handle(xxt_handle));
155 
156   /* if factorization not done there are no stats */
157   if (!xxt_handle->info || !xxt_handle->mvi) {
158     if (!PCTFS_my_id) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "XXT_stats() :: no stats available!\n"));
159     PetscFunctionReturn(0);
160   }
161 
162   vals[0] = vals[1] = vals[2] = xxt_handle->info->nnz;
163   vals[3] = vals[4] = vals[5] = xxt_handle->mvi->n;
164   vals[6] = vals[7] = vals[8] = xxt_handle->info->msg_buf_sz;
165   PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op);
166 
167   fvals[0] = fvals[1] = fvals[2] = xxt_handle->info->tot_solve_time / xxt_handle->info->nsolves++;
168   PCTFS_grop(fvals, fwork, PETSC_STATIC_ARRAY_LENGTH(fop) - 1, fop);
169 
170   if (!PCTFS_my_id) {
171     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xxt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[0]));
172     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xxt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[1]));
173     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xxt_nnz=%g\n", PCTFS_my_id, (double)(1.0 * vals[2] / PCTFS_num_nodes)));
174     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xxt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[2]));
175     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xxt   C(2d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.5)))));
176     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xxt   C(3d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.6667)))));
177     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xxt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[3]));
178     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xxt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[4]));
179     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xxt_n  =%g\n", PCTFS_my_id, (double)(1.0 * vals[5] / PCTFS_num_nodes)));
180     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xxt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[5]));
181     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xxt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[6]));
182     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xxt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[7]));
183     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xxt_buf=%g\n", PCTFS_my_id, (double)(1.0 * vals[8] / PCTFS_num_nodes)));
184     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xxt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[0])));
185     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xxt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[1])));
186     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xxt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[2] / PCTFS_num_nodes)));
187   }
188   PetscFunctionReturn(0);
189 }
190 
191 /*
192 
193 Description: get A_local, local portion of global coarse matrix which
194 is a row dist. nxm matrix w/ n<m.
195    o my_ml holds address of ML struct associated w/A_local and coarse grid
196    o local2global holds global number of column i (i=0,...,m-1)
197    o local2global holds global number of row    i (i=0,...,n-1)
198    o mylocmatvec performs A_local . vec_local (note that gs is performed using
199    PCTFS_gs_init/gop).
200 
201 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
202 mylocmatvec (void :: void *data, double *in, double *out)
203 */
204 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle)
205 {
206   return xxt_generate(xxt_handle);
207 }
208 
209 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle)
210 {
211   PetscInt      i, j, k, idex;
212   PetscInt      dim, col;
213   PetscScalar  *u, *uu, *v, *z, *w, alpha, alpha_w;
214   PetscInt     *segs;
215   PetscInt      op[] = {GL_ADD, 0};
216   PetscInt      off, len;
217   PetscScalar  *x_ptr;
218   PetscInt     *iptr, flag;
219   PetscInt      start = 0, end, work;
220   PetscInt      op2[] = {GL_MIN, 0};
221   PCTFS_gs_ADT  PCTFS_gs_handle;
222   PetscInt     *nsep, *lnsep, *fo;
223   PetscInt      a_n            = xxt_handle->mvi->n;
224   PetscInt      a_m            = xxt_handle->mvi->m;
225   PetscInt     *a_local2global = xxt_handle->mvi->local2global;
226   PetscInt      level;
227   PetscInt      xxt_nnz = 0, xxt_max_nnz = 0;
228   PetscInt      n, m;
229   PetscInt     *col_sz, *col_indices, *stages;
230   PetscScalar **col_vals, *x;
231   PetscInt      n_global;
232   PetscBLASInt  i1  = 1, dlen;
233   PetscScalar   dm1 = -1.0;
234 
235   n               = xxt_handle->mvi->n;
236   nsep            = xxt_handle->info->nsep;
237   lnsep           = xxt_handle->info->lnsep;
238   fo              = xxt_handle->info->fo;
239   end             = lnsep[0];
240   level           = xxt_handle->level;
241   PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
242 
243   /* is there a null space? */
244   /* LATER add in ability to detect null space by checking alpha */
245   for (i = 0, j = 0; i <= level; i++) j += nsep[i];
246 
247   m = j - xxt_handle->ns;
248   if (m != j) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xxt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xxt_handle->ns));
249 
250   /* get and initialize storage for x local         */
251   /* note that x local is nxm and stored by columns */
252   col_sz      = (PetscInt *)malloc(m * sizeof(PetscInt));
253   col_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
254   col_vals    = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
255   for (i = j = 0; i < m; i++, j += 2) {
256     col_indices[j] = col_indices[j + 1] = col_sz[i] = -1;
257     col_vals[i]                                     = NULL;
258   }
259   col_indices[j] = -1;
260 
261   /* size of separators for each sub-hc working from bottom of tree to top */
262   /* this looks like nsep[]=segments */
263   stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
264   segs   = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
265   PCTFS_ivec_zero(stages, level + 1);
266   PCTFS_ivec_copy(segs, nsep, level + 1);
267   for (i = 0; i < level; i++) segs[i + 1] += segs[i];
268   stages[0] = segs[0];
269 
270   /* temporary vectors  */
271   u  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
272   z  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
273   v  = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
274   uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
275   w  = (PetscScalar *)malloc(m * sizeof(PetscScalar));
276 
277   /* extra nnz due to replication of vertices across separators */
278   for (i = 1, j = 0; i <= level; i++) j += nsep[i];
279 
280   /* storage for sparse x values */
281   n_global    = xxt_handle->info->n_global;
282   xxt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
283   x           = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
284   xxt_nnz     = 0;
285 
286   /* LATER - can embed next sep to fire in gs */
287   /* time to make the donuts - generate X factor */
288   for (dim = i = j = 0; i < m; i++) {
289     /* time to move to the next level? */
290     while (i == segs[dim]) {
291       PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level");
292       stages[dim++] = i;
293       end += lnsep[dim];
294     }
295     stages[dim] = i;
296 
297     /* which column are we firing? */
298     /* i.e. set v_l */
299     /* use new seps and do global min across hc to determine which one to fire */
300     (start < end) ? (col = fo[start]) : (col = INT_MAX);
301     PCTFS_giop_hc(&col, &work, 1, op2, dim);
302 
303     /* shouldn't need this */
304     if (col == INT_MAX) {
305       PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n"));
306       continue;
307     }
308 
309     /* do I own it? I should */
310     PCTFS_rvec_zero(v, a_m);
311     if (col == fo[start]) {
312       start++;
313       idex = PCTFS_ivec_linear_search(col, a_local2global, a_n);
314       if (idex != -1) {
315         v[idex] = 1.0;
316         j++;
317       } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
318     } else {
319       idex = PCTFS_ivec_linear_search(col, a_local2global, a_m);
320       if (idex != -1) v[idex] = 1.0;
321     }
322 
323     /* perform u = A.v_l */
324     PCTFS_rvec_zero(u, n);
325     do_matvec(xxt_handle->mvi, v, u);
326 
327     /* uu =  X^T.u_l (local portion) */
328     /* technically only need to zero out first i entries */
329     /* later turn this into an XXT_solve call ? */
330     PCTFS_rvec_zero(uu, m);
331     x_ptr = x;
332     iptr  = col_indices;
333     for (k = 0; k < i; k++) {
334       off = *iptr++;
335       len = *iptr++;
336       PetscCall(PetscBLASIntCast(len, &dlen));
337       PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, x_ptr, &i1));
338       x_ptr += len;
339     }
340 
341     /* uu = X^T.u_l (comm portion) */
342     PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages));
343 
344     /* z = X.uu */
345     PCTFS_rvec_zero(z, n);
346     x_ptr = x;
347     iptr  = col_indices;
348     for (k = 0; k < i; k++) {
349       off = *iptr++;
350       len = *iptr++;
351       PetscCall(PetscBLASIntCast(len, &dlen));
352       PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
353       x_ptr += len;
354     }
355 
356     /* compute v_l = v_l - z */
357     PCTFS_rvec_zero(v + a_n, a_m - a_n);
358     PetscCall(PetscBLASIntCast(n, &dlen));
359     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));
360 
361     /* compute u_l = A.v_l */
362     if (a_n != a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim);
363     PCTFS_rvec_zero(u, n);
364     do_matvec(xxt_handle->mvi, v, u);
365 
366     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
367     PetscCall(PetscBLASIntCast(n, &dlen));
368     PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, v, &i1));
369     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
370     PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);
371 
372     alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);
373 
374     /* check for small alpha                             */
375     /* LATER use this to detect and determine null space */
376     PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha));
377 
378     /* compute v_l = v_l/sqrt(alpha) */
379     PCTFS_rvec_scale(v, 1.0 / alpha, n);
380 
381     /* add newly generated column, v_l, to X */
382     flag = 1;
383     off = len = 0;
384     for (k = 0; k < n; k++) {
385       if (v[k] != 0.0) {
386         len = k;
387         if (flag) {
388           off  = k;
389           flag = 0;
390         }
391       }
392     }
393 
394     len -= (off - 1);
395 
396     if (len > 0) {
397       if ((xxt_nnz + len) > xxt_max_nnz) {
398         PetscCall(PetscInfo(0, "increasing space for X by 2x!\n"));
399         xxt_max_nnz *= 2;
400         x_ptr = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
401         PCTFS_rvec_copy(x_ptr, x, xxt_nnz);
402         free(x);
403         x = x_ptr;
404         x_ptr += xxt_nnz;
405       }
406       xxt_nnz += len;
407       PCTFS_rvec_copy(x_ptr, v + off, len);
408 
409       col_indices[2 * i] = off;
410       col_sz[i] = col_indices[2 * i + 1] = len;
411       col_vals[i]                        = x_ptr;
412     } else {
413       col_indices[2 * i] = 0;
414       col_sz[i] = col_indices[2 * i + 1] = 0;
415       col_vals[i]                        = x_ptr;
416     }
417   }
418 
419   /* close off stages for execution phase */
420   while (dim != level) {
421     stages[dim++] = i;
422     PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level));
423   }
424   stages[dim] = i;
425 
426   xxt_handle->info->n              = xxt_handle->mvi->n;
427   xxt_handle->info->m              = m;
428   xxt_handle->info->nnz            = xxt_nnz;
429   xxt_handle->info->max_nnz        = xxt_max_nnz;
430   xxt_handle->info->msg_buf_sz     = stages[level] - stages[0];
431   xxt_handle->info->solve_uu       = (PetscScalar *)malloc(m * sizeof(PetscScalar));
432   xxt_handle->info->solve_w        = (PetscScalar *)malloc(m * sizeof(PetscScalar));
433   xxt_handle->info->x              = x;
434   xxt_handle->info->col_vals       = col_vals;
435   xxt_handle->info->col_sz         = col_sz;
436   xxt_handle->info->col_indices    = col_indices;
437   xxt_handle->info->stages         = stages;
438   xxt_handle->info->nsolves        = 0;
439   xxt_handle->info->tot_solve_time = 0.0;
440 
441   free(segs);
442   free(u);
443   free(v);
444   free(uu);
445   free(z);
446   free(w);
447 
448   return (0);
449 }
450 
451 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc)
452 {
453   PetscInt     off, len, *iptr;
454   PetscInt     level       = xxt_handle->level;
455   PetscInt     n           = xxt_handle->info->n;
456   PetscInt     m           = xxt_handle->info->m;
457   PetscInt    *stages      = xxt_handle->info->stages;
458   PetscInt    *col_indices = xxt_handle->info->col_indices;
459   PetscScalar *x_ptr, *uu_ptr;
460   PetscScalar *solve_uu = xxt_handle->info->solve_uu;
461   PetscScalar *solve_w  = xxt_handle->info->solve_w;
462   PetscScalar *x        = xxt_handle->info->x;
463   PetscBLASInt i1       = 1, dlen;
464 
465   PetscFunctionBegin;
466   uu_ptr = solve_uu;
467   PCTFS_rvec_zero(uu_ptr, m);
468 
469   /* x  = X.Y^T.b */
470   /* uu = Y^T.b */
471   for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
472     off = *iptr++;
473     len = *iptr++;
474     PetscCall(PetscBLASIntCast(len, &dlen));
475     PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, x_ptr, &i1));
476   }
477 
478   /* communication of beta */
479   uu_ptr = solve_uu;
480   if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
481 
482   PCTFS_rvec_zero(uc, n);
483 
484   /* x = X.uu */
485   for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
486     off = *iptr++;
487     len = *iptr++;
488     PetscCall(PetscBLASIntCast(len, &dlen));
489     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
490   }
491   PetscFunctionReturn(0);
492 }
493 
494 static PetscErrorCode check_handle(xxt_ADT xxt_handle)
495 {
496   PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};
497 
498   PetscFunctionBegin;
499   PetscCheck(xxt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", xxt_handle);
500 
501   vals[0] = vals[1] = xxt_handle->id;
502   PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op);
503   PetscCheck(!(vals[0] != vals[1]) && !(xxt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xxt_handle->id);
504   PetscFunctionReturn(0);
505 }
506 
507 static PetscErrorCode det_separators(xxt_ADT xxt_handle)
508 {
509   PetscInt     i, ct, id;
510   PetscInt     mask, edge, *iptr;
511   PetscInt    *dir, *used;
512   PetscInt     sum[4], w[4];
513   PetscScalar  rsum[4], rw[4];
514   PetscInt     op[] = {GL_ADD, 0};
515   PetscScalar *lhs, *rhs;
516   PetscInt    *nsep, *lnsep, *fo, nfo = 0;
517   PCTFS_gs_ADT PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
518   PetscInt    *local2global    = xxt_handle->mvi->local2global;
519   PetscInt     n               = xxt_handle->mvi->n;
520   PetscInt     m               = xxt_handle->mvi->m;
521   PetscInt     level           = xxt_handle->level;
522   PetscInt     shared          = 0;
523 
524   PetscFunctionBegin;
525   dir   = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
526   nsep  = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
527   lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
528   fo    = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
529   used  = (PetscInt *)malloc(sizeof(PetscInt) * n);
530 
531   PCTFS_ivec_zero(dir, level + 1);
532   PCTFS_ivec_zero(nsep, level + 1);
533   PCTFS_ivec_zero(lnsep, level + 1);
534   PCTFS_ivec_set(fo, -1, n + 1);
535   PCTFS_ivec_zero(used, n);
536 
537   lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
538   rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
539 
540   /* determine the # of unique dof */
541   PCTFS_rvec_zero(lhs, m);
542   PCTFS_rvec_set(lhs, 1.0, n);
543   PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level);
544   PCTFS_rvec_zero(rsum, 2);
545   for (i = 0; i < n; i++) {
546     if (lhs[i] != 0.0) {
547       rsum[0] += 1.0 / lhs[i];
548       rsum[1] += lhs[i];
549     }
550   }
551   PCTFS_grop_hc(rsum, rw, 2, op, level);
552   rsum[0] += 0.1;
553   rsum[1] += 0.1;
554 
555   if (PetscAbsScalar(rsum[0] - rsum[1]) > EPS) shared = 1;
556 
557   xxt_handle->info->n_global = xxt_handle->info->m_global = (PetscInt)rsum[0];
558   xxt_handle->mvi->n_global = xxt_handle->mvi->m_global = (PetscInt)rsum[0];
559 
560   /* determine separator sets top down */
561   if (shared) {
562     for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
563       /* set rsh of hc, fire, and collect lhs responses */
564       (id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m);
565       PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge);
566 
567       /* set lsh of hc, fire, and collect rhs responses */
568       (id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m);
569       PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge);
570 
571       for (i = 0; i < n; i++) {
572         if (id < mask) {
573           if (lhs[i] != 0.0) lhs[i] = 1.0;
574         }
575         if (id >= mask) {
576           if (rhs[i] != 0.0) rhs[i] = 1.0;
577         }
578       }
579 
580       if (id < mask) PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge - 1);
581       else PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge - 1);
582 
583       /* count number of dofs I own that have signal and not in sep set */
584       PCTFS_rvec_zero(rsum, 4);
585       for (PCTFS_ivec_zero(sum, 4), ct = i = 0; i < n; i++) {
586         if (!used[i]) {
587           /* number of unmarked dofs on node */
588           ct++;
589           /* number of dofs to be marked on lhs hc */
590           if (id < mask) {
591             if (lhs[i] != 0.0) {
592               sum[0]++;
593               rsum[0] += 1.0 / lhs[i];
594             }
595           }
596           /* number of dofs to be marked on rhs hc */
597           if (id >= mask) {
598             if (rhs[i] != 0.0) {
599               sum[1]++;
600               rsum[1] += 1.0 / rhs[i];
601             }
602           }
603         }
604       }
605 
606       /* go for load balance - choose half with most unmarked dofs, bias LHS */
607       (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
608       (id < mask) ? (rsum[2] = ct) : (rsum[3] = ct);
609       PCTFS_giop_hc(sum, w, 4, op, edge);
610       PCTFS_grop_hc(rsum, rw, 4, op, edge);
611       rsum[0] += 0.1;
612       rsum[1] += 0.1;
613       rsum[2] += 0.1;
614       rsum[3] += 0.1;
615 
616       if (id < mask) {
617         /* mark dofs I own that have signal and not in sep set */
618         for (ct = i = 0; i < n; i++) {
619           if ((!used[i]) && (lhs[i] != 0.0)) {
620             ct++;
621             nfo++;
622 
623             PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
624 
625             *--iptr = local2global[i];
626             used[i] = edge;
627           }
628         }
629         if (ct > 1) PCTFS_ivec_sort(iptr, ct);
630 
631         lnsep[edge] = ct;
632         nsep[edge]  = (PetscInt)rsum[0];
633         dir[edge]   = LEFT;
634       }
635 
636       if (id >= mask) {
637         /* mark dofs I own that have signal and not in sep set */
638         for (ct = i = 0; i < n; i++) {
639           if ((!used[i]) && (rhs[i] != 0.0)) {
640             ct++;
641             nfo++;
642 
643             PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
644 
645             *--iptr = local2global[i];
646             used[i] = edge;
647           }
648         }
649         if (ct > 1) PCTFS_ivec_sort(iptr, ct);
650 
651         lnsep[edge] = ct;
652         nsep[edge]  = (PetscInt)rsum[1];
653         dir[edge]   = RIGHT;
654       }
655 
656       /* LATER or we can recur on these to order seps at this level */
657       /* do we need full set of separators for this?                */
658 
659       /* fold rhs hc into lower */
660       if (id >= mask) id -= mask;
661     }
662   } else {
663     for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
664       /* set rsh of hc, fire, and collect lhs responses */
665       (id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m);
666       PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge);
667 
668       /* set lsh of hc, fire, and collect rhs responses */
669       (id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m);
670       PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge);
671 
672       /* count number of dofs I own that have signal and not in sep set */
673       for (PCTFS_ivec_zero(sum, 4), ct = i = 0; i < n; i++) {
674         if (!used[i]) {
675           /* number of unmarked dofs on node */
676           ct++;
677           /* number of dofs to be marked on lhs hc */
678           if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
679           /* number of dofs to be marked on rhs hc */
680           if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
681         }
682       }
683 
684       /* go for load balance - choose half with most unmarked dofs, bias LHS */
685       (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
686       PCTFS_giop_hc(sum, w, 4, op, edge);
687 
688       /* lhs hc wins */
689       if (sum[2] >= sum[3]) {
690         if (id < mask) {
691           /* mark dofs I own that have signal and not in sep set */
692           for (ct = i = 0; i < n; i++) {
693             if ((!used[i]) && (lhs[i] != 0.0)) {
694               ct++;
695               nfo++;
696               *--iptr = local2global[i];
697               used[i] = edge;
698             }
699           }
700           if (ct > 1) PCTFS_ivec_sort(iptr, ct);
701           lnsep[edge] = ct;
702         }
703         nsep[edge] = sum[0];
704         dir[edge]  = LEFT;
705       } else { /* rhs hc wins */
706         if (id >= mask) {
707           /* mark dofs I own that have signal and not in sep set */
708           for (ct = i = 0; i < n; i++) {
709             if ((!used[i]) && (rhs[i] != 0.0)) {
710               ct++;
711               nfo++;
712               *--iptr = local2global[i];
713               used[i] = edge;
714             }
715           }
716           if (ct > 1) PCTFS_ivec_sort(iptr, ct);
717           lnsep[edge] = ct;
718         }
719         nsep[edge] = sum[1];
720         dir[edge]  = RIGHT;
721       }
722       /* LATER or we can recur on these to order seps at this level */
723       /* do we need full set of separators for this?                */
724 
725       /* fold rhs hc into lower */
726       if (id >= mask) id -= mask;
727     }
728   }
729 
730   /* level 0 is on processor case - so mark the remainder */
731   for (ct = i = 0; i < n; i++) {
732     if (!used[i]) {
733       ct++;
734       nfo++;
735       *--iptr = local2global[i];
736       used[i] = edge;
737     }
738   }
739   if (ct > 1) PCTFS_ivec_sort(iptr, ct);
740   lnsep[edge] = ct;
741   nsep[edge]  = ct;
742   dir[edge]   = LEFT;
743 
744   xxt_handle->info->nsep  = nsep;
745   xxt_handle->info->lnsep = lnsep;
746   xxt_handle->info->fo    = fo;
747   xxt_handle->info->nfo   = nfo;
748 
749   free(dir);
750   free(lhs);
751   free(rhs);
752   free(used);
753   PetscFunctionReturn(0);
754 }
755 
756 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data)
757 {
758   mv_info *mvi;
759 
760   mvi               = (mv_info *)malloc(sizeof(mv_info));
761   mvi->n            = n;
762   mvi->m            = m;
763   mvi->n_global     = -1;
764   mvi->m_global     = -1;
765   mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));
766   PCTFS_ivec_copy(mvi->local2global, local2global, m);
767   mvi->local2global[m] = INT_MAX;
768   mvi->matvec          = matvec;
769   mvi->grid_data       = grid_data;
770 
771   /* set xxt communication handle to perform restricted matvec */
772   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
773 
774   return (mvi);
775 }
776 
777 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
778 {
779   PetscFunctionBegin;
780   A->matvec((mv_info *)A->grid_data, v, u);
781   PetscFunctionReturn(0);
782 }
783