xref: /petsc/src/ksp/pc/impls/tfs/xyt.c (revision f13dfd9ea68e0ddeee984e65c377a1819eab8a8a)
1 /*
2 Module Name: xyt
3 Module Info:
4 
5 author:  Henry M. Tufo III
6 e-mail:  hmt@asci.uchicago.edu
7 contact:
8 +--------------------------------+--------------------------------+
9 |MCS Division - Building 221     |Department of Computer Science  |
10 |Argonne National Laboratory     |Ryerson 152                     |
11 |9700 S. Cass Avenue             |The University of Chicago       |
12 |Argonne, IL  60439              |Chicago, IL  60637              |
13 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
14 +--------------------------------+--------------------------------+
15 
16 Last Modification: 3.20.01
17 */
18 #include <../src/ksp/pc/impls/tfs/tfs.h>
19 
20 #define LEFT  -1
21 #define RIGHT 1
22 #define BOTH  0
23 
24 typedef struct xyt_solver_info {
25   PetscInt      n, m, n_global, m_global;
26   PetscInt      nnz, max_nnz, msg_buf_sz;
27   PetscInt     *nsep, *lnsep, *fo, nfo, *stages;
28   PetscInt     *xcol_sz, *xcol_indices;
29   PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
30   PetscInt     *ycol_sz, *ycol_indices;
31   PetscScalar **ycol_vals, *y;
32   PetscInt      nsolves;
33   PetscScalar   tot_solve_time;
34 } xyt_info;
35 
36 typedef struct matvec_info {
37   PetscInt     n, m, n_global, m_global;
38   PetscInt    *local2global;
39   PCTFS_gs_ADT PCTFS_gs_handle;
40   PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
41   void *grid_data;
42 } mv_info;
43 
44 struct xyt_CDT {
45   PetscInt  id;
46   PetscInt  ns;
47   PetscInt  level;
48   xyt_info *info;
49   mv_info  *mvi;
50 };
51 
52 static PetscInt n_xyt         = 0;
53 static PetscInt n_xyt_handles = 0;
54 
55 /* prototypes */
56 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
57 static PetscErrorCode check_handle(xyt_ADT xyt_handle);
58 static PetscErrorCode det_separators(xyt_ADT xyt_handle);
59 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
60 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle);
61 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle);
62 static mv_info       *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);
63 
64 xyt_ADT XYT_new(void)
65 {
66   xyt_ADT xyt_handle;
67 
68   /* rolling count on n_xyt ... pot. problem here */
69   n_xyt_handles++;
70   xyt_handle       = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
71   xyt_handle->id   = ++n_xyt;
72   xyt_handle->info = NULL;
73   xyt_handle->mvi  = NULL;
74 
75   return xyt_handle;
76 }
77 
78 PetscErrorCode XYT_factor(xyt_ADT   xyt_handle,                                           /* prev. allocated xyt  handle */
79                           PetscInt *local2global,                                         /* global column mapping       */
80                           PetscInt  n,                                                    /* local num rows              */
81                           PetscInt  m,                                                    /* local num cols              */
82                           PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc         */
83                           void *grid_data)                                                /* grid data for matvec        */
84 {
85   PetscFunctionBegin;
86   PetscCall(PCTFS_comm_init());
87   PetscCall(check_handle(xyt_handle));
88 
89   /* only 2^k for now and all nodes participating */
90   PetscCheck((1 << (xyt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes);
91 
92   /* space for X info */
93   xyt_handle->info = (xyt_info *)malloc(sizeof(xyt_info));
94 
95   /* set up matvec handles */
96   xyt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode(*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);
97 
98   /* matrix is assumed to be of full rank */
99   /* LATER we can reset to indicate rank def. */
100   xyt_handle->ns = 0;
101 
102   /* determine separators and generate firing order - NB xyt info set here */
103   PetscCall(det_separators(xyt_handle));
104 
105   PetscCall(do_xyt_factor(xyt_handle));
106   PetscFunctionReturn(PETSC_SUCCESS);
107 }
108 
109 PetscErrorCode XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
110 {
111   PetscFunctionBegin;
112   PetscCall(PCTFS_comm_init());
113   PetscCall(check_handle(xyt_handle));
114 
115   /* need to copy b into x? */
116   if (b) PetscCall(PCTFS_rvec_copy(x, b, xyt_handle->mvi->n));
117   PetscCall(do_xyt_solve(xyt_handle, x));
118   PetscFunctionReturn(PETSC_SUCCESS);
119 }
120 
121 PetscErrorCode XYT_free(xyt_ADT xyt_handle)
122 {
123   PetscFunctionBegin;
124   PetscCall(PCTFS_comm_init());
125   PetscCall(check_handle(xyt_handle));
126   n_xyt_handles--;
127 
128   free(xyt_handle->info->nsep);
129   free(xyt_handle->info->lnsep);
130   free(xyt_handle->info->fo);
131   free(xyt_handle->info->stages);
132   free(xyt_handle->info->solve_uu);
133   free(xyt_handle->info->solve_w);
134   free(xyt_handle->info->x);
135   free(xyt_handle->info->xcol_vals);
136   free(xyt_handle->info->xcol_sz);
137   free(xyt_handle->info->xcol_indices);
138   free(xyt_handle->info->y);
139   free(xyt_handle->info->ycol_vals);
140   free(xyt_handle->info->ycol_sz);
141   free(xyt_handle->info->ycol_indices);
142   free(xyt_handle->info);
143   free(xyt_handle->mvi->local2global);
144   PetscCall(PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle));
145   free(xyt_handle->mvi);
146   free(xyt_handle);
147 
148   /* if the check fails we nuke */
149   /* if NULL pointer passed to free we nuke */
150   /* if the calls to free fail that's not my problem */
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 /* This function is currently not used */
155 PetscErrorCode XYT_stats(xyt_ADT xyt_handle)
156 {
157   PetscInt    op[]  = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD};
158   PetscInt    fop[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD};
159   PetscInt    vals[9], work[9];
160   PetscScalar fvals[3], fwork[3];
161 
162   PetscFunctionBegin;
163   PetscCall(PCTFS_comm_init());
164   PetscCall(check_handle(xyt_handle));
165 
166   /* if factorization not done there are no stats */
167   if (!xyt_handle->info || !xyt_handle->mvi) {
168     if (!PCTFS_my_id) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "XYT_stats() :: no stats available!\n"));
169     PetscFunctionReturn(PETSC_SUCCESS);
170   }
171 
172   vals[0] = vals[1] = vals[2] = xyt_handle->info->nnz;
173   vals[3] = vals[4] = vals[5] = xyt_handle->mvi->n;
174   vals[6] = vals[7] = vals[8] = xyt_handle->info->msg_buf_sz;
175   PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
176 
177   fvals[0] = fvals[1] = fvals[2] = xyt_handle->info->tot_solve_time / xyt_handle->info->nsolves++;
178   PetscCall(PCTFS_grop(fvals, fwork, PETSC_STATIC_ARRAY_LENGTH(fop) - 1, fop));
179 
180   if (!PCTFS_my_id) {
181     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[0]));
182     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[1]));
183     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_nnz=%g\n", PCTFS_my_id, (double)(1.0 * vals[2] / PCTFS_num_nodes)));
184     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[2]));
185     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt   C(2d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.5)))));
186     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt   C(3d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.6667)))));
187     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[3]));
188     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[4]));
189     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_n  =%g\n", PCTFS_my_id, (double)(1.0 * vals[5] / PCTFS_num_nodes)));
190     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xyt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[5]));
191     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[6]));
192     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[7]));
193     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_buf=%g\n", PCTFS_my_id, (double)(1.0 * vals[8] / PCTFS_num_nodes)));
194     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[0])));
195     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[1])));
196     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[2] / PCTFS_num_nodes)));
197   }
198   PetscFunctionReturn(PETSC_SUCCESS);
199 }
200 
201 /*
202 
203 Description: get A_local, local portion of global coarse matrix which
204 is a row dist. nxm matrix w/ n<m.
205    o my_ml holds address of ML struct associated w/A_local and coarse grid
206    o local2global holds global number of column i (i=0,...,m-1)
207    o local2global holds global number of row    i (i=0,...,n-1)
208    o mylocmatvec performs A_local . vec_local (note that gs is performed using
209    PCTFS_gs_init/gop).
210 
211 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
212 mylocmatvec (void :: void *data, double *in, double *out)
213 */
214 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle)
215 {
216   return xyt_generate(xyt_handle);
217 }
218 
219 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle)
220 {
221   PetscInt      i, j, k, idx;
222   PetscInt      dim, col;
223   PetscScalar  *u, *uu, *v, *z, *w, alpha, alpha_w;
224   PetscInt     *segs;
225   PetscInt      op[] = {GL_ADD, 0};
226   PetscInt      off, len;
227   PetscScalar  *x_ptr, *y_ptr;
228   PetscInt     *iptr, flag;
229   PetscInt      start = 0, end, work;
230   PetscInt      op2[] = {GL_MIN, 0};
231   PCTFS_gs_ADT  PCTFS_gs_handle;
232   PetscInt     *nsep, *lnsep, *fo;
233   PetscInt      a_n            = xyt_handle->mvi->n;
234   PetscInt      a_m            = xyt_handle->mvi->m;
235   PetscInt     *a_local2global = xyt_handle->mvi->local2global;
236   PetscInt      level;
237   PetscInt      n, m;
238   PetscInt     *xcol_sz, *xcol_indices, *stages;
239   PetscScalar **xcol_vals, *x;
240   PetscInt     *ycol_sz, *ycol_indices;
241   PetscScalar **ycol_vals, *y;
242   PetscInt      n_global;
243   PetscInt      xt_nnz = 0, xt_max_nnz = 0;
244   PetscInt      yt_nnz = 0, yt_max_nnz = 0;
245   PetscBLASInt  i1  = 1, dlen;
246   PetscScalar   dm1 = -1.0;
247 
248   n               = xyt_handle->mvi->n;
249   nsep            = xyt_handle->info->nsep;
250   lnsep           = xyt_handle->info->lnsep;
251   fo              = xyt_handle->info->fo;
252   end             = lnsep[0];
253   level           = xyt_handle->level;
254   PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;
255 
256   /* is there a null space? */
257   /* LATER add in ability to detect null space by checking alpha */
258   for (i = 0, j = 0; i <= level; i++) j += nsep[i];
259 
260   m = j - xyt_handle->ns;
261   if (m != j) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xyt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xyt_handle->ns));
262 
263   PetscCall(PetscInfo(0, "xyt_generate() :: X(%" PetscInt_FMT ",%" PetscInt_FMT ")\n", n, m));
264 
265   /* get and initialize storage for x local         */
266   /* note that x local is nxm and stored by columns */
267   xcol_sz      = (PetscInt *)malloc(m * sizeof(PetscInt));
268   xcol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
269   xcol_vals    = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
270   for (i = j = 0; i < m; i++, j += 2) {
271     xcol_indices[j] = xcol_indices[j + 1] = xcol_sz[i] = -1;
272     xcol_vals[i]                                       = NULL;
273   }
274   xcol_indices[j] = -1;
275 
276   /* get and initialize storage for y local         */
277   /* note that y local is nxm and stored by columns */
278   ycol_sz      = (PetscInt *)malloc(m * sizeof(PetscInt));
279   ycol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
280   ycol_vals    = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
281   for (i = j = 0; i < m; i++, j += 2) {
282     ycol_indices[j] = ycol_indices[j + 1] = ycol_sz[i] = -1;
283     ycol_vals[i]                                       = NULL;
284   }
285   ycol_indices[j] = -1;
286 
287   /* size of separators for each sub-hc working from bottom of tree to top */
288   /* this looks like nsep[]=segments */
289   stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
290   segs   = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
291   PetscCall(PCTFS_ivec_zero(stages, level + 1));
292   PCTFS_ivec_copy(segs, nsep, level + 1);
293   for (i = 0; i < level; i++) segs[i + 1] += segs[i];
294   stages[0] = segs[0];
295 
296   /* temporary vectors  */
297   u  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
298   z  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
299   v  = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
300   uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
301   w  = (PetscScalar *)malloc(m * sizeof(PetscScalar));
302 
303   /* extra nnz due to replication of vertices across separators */
304   for (i = 1, j = 0; i <= level; i++) j += nsep[i];
305 
306   /* storage for sparse x values */
307   n_global   = xyt_handle->info->n_global;
308   xt_max_nnz = yt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
309   x                       = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
310   y                       = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));
311 
312   /* LATER - can embed next sep to fire in gs */
313   /* time to make the donuts - generate X factor */
314   for (dim = i = j = 0; i < m; i++) {
315     /* time to move to the next level? */
316     while (i == segs[dim]) {
317       PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level");
318       stages[dim++] = i;
319       end += lnsep[dim];
320     }
321     stages[dim] = i;
322 
323     /* which column are we firing? */
324     /* i.e. set v_l */
325     /* use new seps and do global min across hc to determine which one to fire */
326     (start < end) ? (col = fo[start]) : (col = INT_MAX);
327     PetscCall(PCTFS_giop_hc(&col, &work, 1, op2, dim));
328 
329     /* shouldn't need this */
330     if (col == INT_MAX) {
331       PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n"));
332       continue;
333     }
334 
335     /* do I own it? I should */
336     PetscCall(PCTFS_rvec_zero(v, a_m));
337     if (col == fo[start]) {
338       start++;
339       idx = PCTFS_ivec_linear_search(col, a_local2global, a_n);
340       if (idx != -1) {
341         v[idx] = 1.0;
342         j++;
343       } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
344     } else {
345       idx = PCTFS_ivec_linear_search(col, a_local2global, a_m);
346       if (idx != -1) v[idx] = 1.0;
347     }
348 
349     /* perform u = A.v_l */
350     PetscCall(PCTFS_rvec_zero(u, n));
351     PetscCall(do_matvec(xyt_handle->mvi, v, u));
352 
353     /* uu =  X^T.u_l (local portion) */
354     /* technically only need to zero out first i entries */
355     /* later turn this into an XYT_solve call ? */
356     PetscCall(PCTFS_rvec_zero(uu, m));
357     y_ptr = y;
358     iptr  = ycol_indices;
359     for (k = 0; k < i; k++) {
360       off = *iptr++;
361       len = *iptr++;
362       PetscCall(PetscBLASIntCast(len, &dlen));
363       PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, y_ptr, &i1));
364       y_ptr += len;
365     }
366 
367     /* uu = X^T.u_l (comm portion) */
368     PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages));
369 
370     /* z = X.uu */
371     PetscCall(PCTFS_rvec_zero(z, n));
372     x_ptr = x;
373     iptr  = xcol_indices;
374     for (k = 0; k < i; k++) {
375       off = *iptr++;
376       len = *iptr++;
377       PetscCall(PetscBLASIntCast(len, &dlen));
378       PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
379       x_ptr += len;
380     }
381 
382     /* compute v_l = v_l - z */
383     PetscCall(PCTFS_rvec_zero(v + a_n, a_m - a_n));
384     PetscCall(PetscBLASIntCast(n, &dlen));
385     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));
386 
387     /* compute u_l = A.v_l */
388     if (a_n != a_m) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim));
389     PetscCall(PCTFS_rvec_zero(u, n));
390     PetscCall(do_matvec(xyt_handle->mvi, v, u));
391 
392     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
393     PetscCall(PetscBLASIntCast(n, &dlen));
394     PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, u, &i1));
395     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
396     PetscCall(PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim));
397 
398     alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);
399 
400     /* check for small alpha                             */
401     /* LATER use this to detect and determine null space */
402     PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha));
403 
404     /* compute v_l = v_l/sqrt(alpha) */
405     PetscCall(PCTFS_rvec_scale(v, 1.0 / alpha, n));
406     PetscCall(PCTFS_rvec_scale(u, 1.0 / alpha, n));
407 
408     /* add newly generated column, v_l, to X */
409     flag = 1;
410     off = len = 0;
411     for (k = 0; k < n; k++) {
412       if (v[k] != 0.0) {
413         len = k;
414         if (flag) {
415           off  = k;
416           flag = 0;
417         }
418       }
419     }
420 
421     len -= (off - 1);
422 
423     if (len > 0) {
424       if ((xt_nnz + len) > xt_max_nnz) {
425         PetscCall(PetscInfo(0, "increasing space for X by 2x!\n"));
426         xt_max_nnz *= 2;
427         x_ptr = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
428         PetscCall(PCTFS_rvec_copy(x_ptr, x, xt_nnz));
429         free(x);
430         x = x_ptr;
431         x_ptr += xt_nnz;
432       }
433       xt_nnz += len;
434       PetscCall(PCTFS_rvec_copy(x_ptr, v + off, len));
435 
436       xcol_indices[2 * i] = off;
437       xcol_sz[i] = xcol_indices[2 * i + 1] = len;
438       xcol_vals[i]                         = x_ptr;
439     } else {
440       xcol_indices[2 * i] = 0;
441       xcol_sz[i] = xcol_indices[2 * i + 1] = 0;
442       xcol_vals[i]                         = x_ptr;
443     }
444 
445     /* add newly generated column, u_l, to Y */
446     flag = 1;
447     off = len = 0;
448     for (k = 0; k < n; k++) {
449       if (u[k] != 0.0) {
450         len = k;
451         if (flag) {
452           off  = k;
453           flag = 0;
454         }
455       }
456     }
457 
458     len -= (off - 1);
459 
460     if (len > 0) {
461       if ((yt_nnz + len) > yt_max_nnz) {
462         PetscCall(PetscInfo(0, "increasing space for Y by 2x!\n"));
463         yt_max_nnz *= 2;
464         y_ptr = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));
465         PetscCall(PCTFS_rvec_copy(y_ptr, y, yt_nnz));
466         free(y);
467         y = y_ptr;
468         y_ptr += yt_nnz;
469       }
470       yt_nnz += len;
471       PetscCall(PCTFS_rvec_copy(y_ptr, u + off, len));
472 
473       ycol_indices[2 * i] = off;
474       ycol_sz[i] = ycol_indices[2 * i + 1] = len;
475       ycol_vals[i]                         = y_ptr;
476     } else {
477       ycol_indices[2 * i] = 0;
478       ycol_sz[i] = ycol_indices[2 * i + 1] = 0;
479       ycol_vals[i]                         = y_ptr;
480     }
481   }
482 
483   /* close off stages for execution phase */
484   while (dim != level) {
485     stages[dim++] = i;
486     PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level));
487   }
488   stages[dim] = i;
489 
490   xyt_handle->info->n            = xyt_handle->mvi->n;
491   xyt_handle->info->m            = m;
492   xyt_handle->info->nnz          = xt_nnz + yt_nnz;
493   xyt_handle->info->max_nnz      = xt_max_nnz + yt_max_nnz;
494   xyt_handle->info->msg_buf_sz   = stages[level] - stages[0];
495   xyt_handle->info->solve_uu     = (PetscScalar *)malloc(m * sizeof(PetscScalar));
496   xyt_handle->info->solve_w      = (PetscScalar *)malloc(m * sizeof(PetscScalar));
497   xyt_handle->info->x            = x;
498   xyt_handle->info->xcol_vals    = xcol_vals;
499   xyt_handle->info->xcol_sz      = xcol_sz;
500   xyt_handle->info->xcol_indices = xcol_indices;
501   xyt_handle->info->stages       = stages;
502   xyt_handle->info->y            = y;
503   xyt_handle->info->ycol_vals    = ycol_vals;
504   xyt_handle->info->ycol_sz      = ycol_sz;
505   xyt_handle->info->ycol_indices = ycol_indices;
506 
507   free(segs);
508   free(u);
509   free(v);
510   free(uu);
511   free(z);
512   free(w);
513 
514   return PETSC_SUCCESS;
515 }
516 
517 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *uc)
518 {
519   PetscInt     off, len, *iptr;
520   PetscInt     level        = xyt_handle->level;
521   PetscInt     n            = xyt_handle->info->n;
522   PetscInt     m            = xyt_handle->info->m;
523   PetscInt    *stages       = xyt_handle->info->stages;
524   PetscInt    *xcol_indices = xyt_handle->info->xcol_indices;
525   PetscInt    *ycol_indices = xyt_handle->info->ycol_indices;
526   PetscScalar *x_ptr, *y_ptr, *uu_ptr;
527   PetscScalar *solve_uu = xyt_handle->info->solve_uu;
528   PetscScalar *solve_w  = xyt_handle->info->solve_w;
529   PetscScalar *x        = xyt_handle->info->x;
530   PetscScalar *y        = xyt_handle->info->y;
531   PetscBLASInt i1       = 1, dlen;
532 
533   PetscFunctionBegin;
534   uu_ptr = solve_uu;
535   PetscCall(PCTFS_rvec_zero(uu_ptr, m));
536 
537   /* x  = X.Y^T.b */
538   /* uu = Y^T.b */
539   for (y_ptr = y, iptr = ycol_indices; *iptr != -1; y_ptr += len) {
540     off = *iptr++;
541     len = *iptr++;
542     PetscCall(PetscBLASIntCast(len, &dlen));
543     PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, y_ptr, &i1));
544   }
545 
546   /* communication of beta */
547   uu_ptr = solve_uu;
548   if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
549   PetscCall(PCTFS_rvec_zero(uc, n));
550 
551   /* x = X.uu */
552   for (x_ptr = x, iptr = xcol_indices; *iptr != -1; x_ptr += len) {
553     off = *iptr++;
554     len = *iptr++;
555     PetscCall(PetscBLASIntCast(len, &dlen));
556     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
557   }
558   PetscFunctionReturn(PETSC_SUCCESS);
559 }
560 
561 static PetscErrorCode check_handle(xyt_ADT xyt_handle)
562 {
563   PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};
564 
565   PetscFunctionBegin;
566   PetscCheck(xyt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", (void *)xyt_handle);
567 
568   vals[0] = vals[1] = xyt_handle->id;
569   PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
570   PetscCheck(!(vals[0] != vals[1]) && !(xyt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xyt_handle->id);
571   PetscFunctionReturn(PETSC_SUCCESS);
572 }
573 
574 static PetscErrorCode det_separators(xyt_ADT xyt_handle)
575 {
576   PetscInt     i, ct, id;
577   PetscInt     mask, edge, *iptr;
578   PetscInt    *dir, *used;
579   PetscInt     sum[4], w[4];
580   PetscScalar  rsum[4], rw[4];
581   PetscInt     op[] = {GL_ADD, 0};
582   PetscScalar *lhs, *rhs;
583   PetscInt    *nsep, *lnsep, *fo, nfo = 0;
584   PCTFS_gs_ADT PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;
585   PetscInt    *local2global    = xyt_handle->mvi->local2global;
586   PetscInt     n               = xyt_handle->mvi->n;
587   PetscInt     m               = xyt_handle->mvi->m;
588   PetscInt     level           = xyt_handle->level;
589   PetscInt     shared          = 0;
590 
591   PetscFunctionBegin;
592   dir   = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
593   nsep  = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
594   lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
595   fo    = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
596   used  = (PetscInt *)malloc(sizeof(PetscInt) * n);
597 
598   PetscCall(PCTFS_ivec_zero(dir, level + 1));
599   PetscCall(PCTFS_ivec_zero(nsep, level + 1));
600   PetscCall(PCTFS_ivec_zero(lnsep, level + 1));
601   PetscCall(PCTFS_ivec_set(fo, -1, n + 1));
602   PetscCall(PCTFS_ivec_zero(used, n));
603 
604   lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
605   rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
606 
607   /* determine the # of unique dof */
608   PetscCall(PCTFS_rvec_zero(lhs, m));
609   PetscCall(PCTFS_rvec_set(lhs, 1.0, n));
610   PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level));
611   PetscCall(PetscInfo(0, "done first PCTFS_gs_gop_hc\n"));
612   PetscCall(PCTFS_rvec_zero(rsum, 2));
613   for (i = 0; i < n; i++) {
614     if (lhs[i] != 0.0) {
615       rsum[0] += 1.0 / lhs[i];
616       rsum[1] += lhs[i];
617     }
618     if (lhs[i] != 1.0) shared = 1;
619   }
620 
621   PetscCall(PCTFS_grop_hc(rsum, rw, 2, op, level));
622   rsum[0] += 0.1;
623   rsum[1] += 0.1;
624 
625   xyt_handle->info->n_global = xyt_handle->info->m_global = (PetscInt)rsum[0];
626   xyt_handle->mvi->n_global = xyt_handle->mvi->m_global = (PetscInt)rsum[0];
627 
628   /* determine separator sets top down */
629   PetscCheck(!shared, PETSC_COMM_SELF, PETSC_ERR_PLIB, "shared dof separator determination not ready ... see hmt!!!");
630   /* solution is to do as in the symmetric shared case but then */
631   /* pick the sub-hc with the most free dofs and do a mat-vec   */
632   /* and pick up the responses on the other sub-hc from the     */
633   /* initial separator set obtained from the symm. shared case  */
634   /* [dead code deleted since it is unlikely to be completed] */
635   for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
636     /* set rsh of hc, fire, and collect lhs responses */
637     PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
638     PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
639 
640     /* set lsh of hc, fire, and collect rhs responses */
641     PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
642     PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
643 
644     /* count number of dofs I own that have signal and not in sep set */
645     PetscCall(PCTFS_ivec_zero(sum, 4));
646     for (ct = i = 0; i < n; i++) {
647       if (!used[i]) {
648         /* number of unmarked dofs on node */
649         ct++;
650         /* number of dofs to be marked on lhs hc */
651         if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
652         /* number of dofs to be marked on rhs hc */
653         if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
654       }
655     }
656 
657     /* for the non-symmetric case we need separators of width 2 */
658     /* so take both sides */
659     (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
660     PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
661 
662     ct = 0;
663     if (id < mask) {
664       /* mark dofs I own that have signal and not in sep set */
665       for (i = 0; i < n; i++) {
666         if ((!used[i]) && (lhs[i] != 0.0)) {
667           ct++;
668           nfo++;
669           *--iptr = local2global[i];
670           used[i] = edge;
671         }
672       }
673       /* LSH hc summation of ct should be sum[0] */
674     } else {
675       /* mark dofs I own that have signal and not in sep set */
676       for (i = 0; i < n; i++) {
677         if ((!used[i]) && (rhs[i] != 0.0)) {
678           ct++;
679           nfo++;
680           *--iptr = local2global[i];
681           used[i] = edge;
682         }
683       }
684       /* RSH hc summation of ct should be sum[1] */
685     }
686 
687     if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
688     lnsep[edge] = ct;
689     nsep[edge]  = sum[0] + sum[1];
690     dir[edge]   = BOTH;
691 
692     /* LATER or we can recur on these to order seps at this level */
693     /* do we need full set of separators for this?                */
694 
695     /* fold rhs hc into lower */
696     if (id >= mask) id -= mask;
697   }
698 
699   /* level 0 is on processor case - so mark the remainder */
700   for (ct = i = 0; i < n; i++) {
701     if (!used[i]) {
702       ct++;
703       nfo++;
704       *--iptr = local2global[i];
705       used[i] = edge;
706     }
707   }
708   if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
709   lnsep[edge] = ct;
710   nsep[edge]  = ct;
711   dir[edge]   = BOTH;
712 
713   xyt_handle->info->nsep  = nsep;
714   xyt_handle->info->lnsep = lnsep;
715   xyt_handle->info->fo    = fo;
716   xyt_handle->info->nfo   = nfo;
717 
718   free(dir);
719   free(lhs);
720   free(rhs);
721   free(used);
722   PetscFunctionReturn(PETSC_SUCCESS);
723 }
724 
725 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data)
726 {
727   mv_info *mvi;
728 
729   mvi               = (mv_info *)malloc(sizeof(mv_info));
730   mvi->n            = n;
731   mvi->m            = m;
732   mvi->n_global     = -1;
733   mvi->m_global     = -1;
734   mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));
735 
736   PCTFS_ivec_copy(mvi->local2global, local2global, m);
737   mvi->local2global[m] = INT_MAX;
738   mvi->matvec          = matvec;
739   mvi->grid_data       = grid_data;
740 
741   /* set xyt communication handle to perform restricted matvec */
742   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
743 
744   return mvi;
745 }
746 
747 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
748 {
749   PetscFunctionBegin;
750   PetscCall(A->matvec((mv_info *)A->grid_data, v, u));
751   PetscFunctionReturn(PETSC_SUCCESS);
752 }
753