1 /*
2 Module Name: xyt
3 Module Info:
4
5 author: Henry M. Tufo III
6 e-mail: hmt@asci.uchicago.edu
7 contact:
8 +--------------------------------+--------------------------------+
9 |MCS Division - Building 221 |Department of Computer Science |
10 |Argonne National Laboratory |Ryerson 152 |
11 |9700 S. Cass Avenue |The University of Chicago |
12 |Argonne, IL 60439 |Chicago, IL 60637 |
13 |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx |
14 +--------------------------------+--------------------------------+
15
16 Last Modification: 3.20.01
17 */
18 #include <../src/ksp/pc/impls/tfs/tfs.h>
19
20 #define LEFT -1
21 #define RIGHT 1
22 #define BOTH 0
23
24 typedef struct xyt_solver_info {
25 PetscInt n, m, n_global, m_global;
26 PetscInt nnz, max_nnz, msg_buf_sz;
27 PetscInt *nsep, *lnsep, *fo, nfo, *stages;
28 PetscInt *xcol_sz, *xcol_indices;
29 PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
30 PetscInt *ycol_sz, *ycol_indices;
31 PetscScalar **ycol_vals, *y;
32 PetscInt nsolves;
33 PetscScalar tot_solve_time;
34 } xyt_info;
35
36 typedef struct matvec_info {
37 PetscInt n, m, n_global, m_global;
38 PetscInt *local2global;
39 PCTFS_gs_ADT PCTFS_gs_handle;
40 PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
41 void *grid_data;
42 } mv_info;
43
44 struct xyt_CDT {
45 PetscInt id;
46 PetscInt ns;
47 PetscInt level;
48 xyt_info *info;
49 mv_info *mvi;
50 };
51
52 static PetscInt n_xyt = 0;
53 static PetscInt n_xyt_handles = 0;
54
55 /* prototypes */
56 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
57 static PetscErrorCode check_handle(xyt_ADT xyt_handle);
58 static PetscErrorCode det_separators(xyt_ADT xyt_handle);
59 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
60 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle);
61 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle);
62 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);
63
XYT_new(void)64 xyt_ADT XYT_new(void)
65 {
66 xyt_ADT xyt_handle;
67
68 /* rolling count on n_xyt ... pot. problem here */
69 n_xyt_handles++;
70 xyt_handle = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
71 xyt_handle->id = ++n_xyt;
72 xyt_handle->info = NULL;
73 xyt_handle->mvi = NULL;
74
75 return xyt_handle;
76 }
77
XYT_factor(xyt_ADT xyt_handle,PetscInt * local2global,PetscInt n,PetscInt m,PetscErrorCode (* matvec)(void *,PetscScalar *,PetscScalar *),void * grid_data)78 PetscErrorCode XYT_factor(xyt_ADT xyt_handle, /* prev. allocated xyt handle */
79 PetscInt *local2global, /* global column mapping */
80 PetscInt n, /* local num rows */
81 PetscInt m, /* local num cols */
82 PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc */
83 void *grid_data) /* grid data for matvec */
84 {
85 PetscFunctionBegin;
86 PetscCall(PCTFS_comm_init());
87 PetscCall(check_handle(xyt_handle));
88
89 /* only 2^k for now and all nodes participating */
90 PetscCheck((1 << (xyt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes);
91
92 /* space for X info */
93 xyt_handle->info = (xyt_info *)malloc(sizeof(xyt_info));
94
95 /* set up matvec handles */
96 xyt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);
97
98 /* matrix is assumed to be of full rank */
99 /* LATER we can reset to indicate rank def. */
100 xyt_handle->ns = 0;
101
102 /* determine separators and generate firing order - NB xyt info set here */
103 PetscCall(det_separators(xyt_handle));
104
105 PetscCall(do_xyt_factor(xyt_handle));
106 PetscFunctionReturn(PETSC_SUCCESS);
107 }
108
XYT_solve(xyt_ADT xyt_handle,PetscScalar * x,PetscScalar * b)109 PetscErrorCode XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
110 {
111 PetscFunctionBegin;
112 PetscCall(PCTFS_comm_init());
113 PetscCall(check_handle(xyt_handle));
114
115 /* need to copy b into x? */
116 if (b) PetscCall(PCTFS_rvec_copy(x, b, xyt_handle->mvi->n));
117 PetscCall(do_xyt_solve(xyt_handle, x));
118 PetscFunctionReturn(PETSC_SUCCESS);
119 }
120
XYT_free(xyt_ADT xyt_handle)121 PetscErrorCode XYT_free(xyt_ADT xyt_handle)
122 {
123 PetscFunctionBegin;
124 PetscCall(PCTFS_comm_init());
125 PetscCall(check_handle(xyt_handle));
126 n_xyt_handles--;
127
128 free(xyt_handle->info->nsep);
129 free(xyt_handle->info->lnsep);
130 free(xyt_handle->info->fo);
131 free(xyt_handle->info->stages);
132 free(xyt_handle->info->solve_uu);
133 free(xyt_handle->info->solve_w);
134 free(xyt_handle->info->x);
135 free(xyt_handle->info->xcol_vals);
136 free(xyt_handle->info->xcol_sz);
137 free(xyt_handle->info->xcol_indices);
138 free(xyt_handle->info->y);
139 free(xyt_handle->info->ycol_vals);
140 free(xyt_handle->info->ycol_sz);
141 free(xyt_handle->info->ycol_indices);
142 free(xyt_handle->info);
143 free(xyt_handle->mvi->local2global);
144 PetscCall(PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle));
145 free(xyt_handle->mvi);
146 free(xyt_handle);
147
148 /* if the check fails we nuke */
149 /* if NULL pointer passed to free we nuke */
150 /* if the calls to free fail that's not my problem */
151 PetscFunctionReturn(PETSC_SUCCESS);
152 }
153
154 /* This function is currently not used */
XYT_stats(xyt_ADT xyt_handle)155 PetscErrorCode XYT_stats(xyt_ADT xyt_handle)
156 {
157 PetscInt op[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD};
158 PetscInt fop[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD};
159 PetscInt vals[9], work[9];
160 PetscScalar fvals[3], fwork[3];
161
162 PetscFunctionBegin;
163 PetscCall(PCTFS_comm_init());
164 PetscCall(check_handle(xyt_handle));
165
166 /* if factorization not done there are no stats */
167 if (!xyt_handle->info || !xyt_handle->mvi) {
168 if (!PCTFS_my_id) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "XYT_stats() :: no stats available!\n"));
169 PetscFunctionReturn(PETSC_SUCCESS);
170 }
171
172 vals[0] = vals[1] = vals[2] = xyt_handle->info->nnz;
173 vals[3] = vals[4] = vals[5] = xyt_handle->mvi->n;
174 vals[6] = vals[7] = vals[8] = xyt_handle->info->msg_buf_sz;
175 PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
176
177 fvals[0] = fvals[1] = fvals[2] = xyt_handle->info->tot_solve_time / xyt_handle->info->nsolves++;
178 PetscCall(PCTFS_grop(fvals, fwork, PETSC_STATIC_ARRAY_LENGTH(fop) - 1, fop));
179
180 if (!PCTFS_my_id) {
181 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[0]));
182 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[1]));
183 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xyt_nnz=%g\n", PCTFS_my_id, (double)(1.0 * vals[2] / PCTFS_num_nodes)));
184 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[2]));
185 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt C(2d) =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.5)))));
186 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt C(3d) =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.6667)))));
187 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xyt_n =%" PetscInt_FMT "\n", PCTFS_my_id, vals[3]));
188 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xyt_n =%" PetscInt_FMT "\n", PCTFS_my_id, vals[4]));
189 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xyt_n =%g\n", PCTFS_my_id, (double)(1.0 * vals[5] / PCTFS_num_nodes)));
190 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot xyt_n =%" PetscInt_FMT "\n", PCTFS_my_id, vals[5]));
191 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[6]));
192 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[7]));
193 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xyt_buf=%g\n", PCTFS_my_id, (double)(1.0 * vals[8] / PCTFS_num_nodes)));
194 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[0])));
195 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[1])));
196 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[2] / PCTFS_num_nodes)));
197 }
198 PetscFunctionReturn(PETSC_SUCCESS);
199 }
200
201 /*
202
203 Description: get A_local, local portion of global coarse matrix which
204 is a row dist. nxm matrix w/ n<m.
205 o my_ml holds address of ML struct associated w/A_local and coarse grid
206 o local2global holds global number of column i (i=0,...,m-1)
207 o local2global holds global number of row i (i=0,...,n-1)
208 o mylocmatvec performs A_local . vec_local (note that gs is performed using
209 PCTFS_gs_init/gop).
210
211 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
212 mylocmatvec (void :: void *data, double *in, double *out)
213 */
do_xyt_factor(xyt_ADT xyt_handle)214 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle)
215 {
216 return xyt_generate(xyt_handle);
217 }
218
xyt_generate(xyt_ADT xyt_handle)219 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle)
220 {
221 PetscInt i, j, k, idx;
222 PetscInt dim, col;
223 PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
224 PetscInt *segs;
225 PetscInt op[] = {GL_ADD, 0};
226 PetscInt off, len;
227 PetscScalar *x_ptr, *y_ptr;
228 PetscInt *iptr, flag;
229 PetscInt start = 0, end, work;
230 PetscInt op2[] = {GL_MIN, 0};
231 PCTFS_gs_ADT PCTFS_gs_handle;
232 PetscInt *nsep, *lnsep, *fo;
233 PetscInt a_n = xyt_handle->mvi->n;
234 PetscInt a_m = xyt_handle->mvi->m;
235 PetscInt *a_local2global = xyt_handle->mvi->local2global;
236 PetscInt level;
237 PetscInt n, m;
238 PetscInt *xcol_sz, *xcol_indices, *stages;
239 PetscScalar **xcol_vals, *x;
240 PetscInt *ycol_sz, *ycol_indices;
241 PetscScalar **ycol_vals, *y;
242 PetscInt n_global;
243 PetscInt xt_nnz = 0, xt_max_nnz = 0;
244 PetscInt yt_nnz = 0, yt_max_nnz = 0;
245 PetscBLASInt i1 = 1, dlen;
246 PetscScalar dm1 = -1.0;
247
248 n = xyt_handle->mvi->n;
249 nsep = xyt_handle->info->nsep;
250 lnsep = xyt_handle->info->lnsep;
251 fo = xyt_handle->info->fo;
252 end = lnsep[0];
253 level = xyt_handle->level;
254 PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;
255
256 /* is there a null space? */
257 /* LATER add in ability to detect null space by checking alpha */
258 for (i = 0, j = 0; i <= level; i++) j += nsep[i];
259
260 m = j - xyt_handle->ns;
261 if (m != j) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xyt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xyt_handle->ns));
262
263 PetscCall(PetscInfo(0, "xyt_generate() :: X(%" PetscInt_FMT ",%" PetscInt_FMT ")\n", n, m));
264
265 /* get and initialize storage for x local */
266 /* note that x local is nxm and stored by columns */
267 xcol_sz = (PetscInt *)malloc(m * sizeof(PetscInt));
268 xcol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
269 xcol_vals = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
270 for (i = j = 0; i < m; i++, j += 2) {
271 xcol_indices[j] = xcol_indices[j + 1] = xcol_sz[i] = -1;
272 xcol_vals[i] = NULL;
273 }
274 xcol_indices[j] = -1;
275
276 /* get and initialize storage for y local */
277 /* note that y local is nxm and stored by columns */
278 ycol_sz = (PetscInt *)malloc(m * sizeof(PetscInt));
279 ycol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
280 ycol_vals = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
281 for (i = j = 0; i < m; i++, j += 2) {
282 ycol_indices[j] = ycol_indices[j + 1] = ycol_sz[i] = -1;
283 ycol_vals[i] = NULL;
284 }
285 ycol_indices[j] = -1;
286
287 /* size of separators for each sub-hc working from bottom of tree to top */
288 /* this looks like nsep[]=segments */
289 stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
290 segs = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
291 PetscCall(PCTFS_ivec_zero(stages, level + 1));
292 PCTFS_ivec_copy(segs, nsep, level + 1);
293 for (i = 0; i < level; i++) segs[i + 1] += segs[i];
294 stages[0] = segs[0];
295
296 /* temporary vectors */
297 u = (PetscScalar *)malloc(n * sizeof(PetscScalar));
298 z = (PetscScalar *)malloc(n * sizeof(PetscScalar));
299 v = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
300 uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
301 w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
302
303 /* extra nnz due to replication of vertices across separators */
304 for (i = 1, j = 0; i <= level; i++) j += nsep[i];
305
306 /* storage for sparse x values */
307 n_global = xyt_handle->info->n_global;
308 xt_max_nnz = yt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
309 x = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
310 y = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));
311
312 /* LATER - can embed next sep to fire in gs */
313 /* time to make the donuts - generate X factor */
314 for (dim = i = j = 0; i < m; i++) {
315 /* time to move to the next level? */
316 while (i == segs[dim]) {
317 PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level");
318 stages[dim++] = i;
319 end += lnsep[dim];
320 }
321 stages[dim] = i;
322
323 /* which column are we firing? */
324 /* i.e. set v_l */
325 /* use new seps and do global min across hc to determine which one to fire */
326 (start < end) ? (col = fo[start]) : (col = INT_MAX);
327 PetscCall(PCTFS_giop_hc(&col, &work, 1, op2, dim));
328
329 /* shouldn't need this */
330 if (col == INT_MAX) {
331 PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n"));
332 continue;
333 }
334
335 /* do I own it? I should */
336 PetscCall(PCTFS_rvec_zero(v, a_m));
337 if (col == fo[start]) {
338 start++;
339 idx = PCTFS_ivec_linear_search(col, a_local2global, a_n);
340 PetscCheck(idx != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
341 v[idx] = 1.0;
342 j++;
343 } else {
344 idx = PCTFS_ivec_linear_search(col, a_local2global, a_m);
345 if (idx != -1) v[idx] = 1.0;
346 }
347
348 /* perform u = A.v_l */
349 PetscCall(PCTFS_rvec_zero(u, n));
350 PetscCall(do_matvec(xyt_handle->mvi, v, u));
351
352 /* uu = X^T.u_l (local portion) */
353 /* technically only need to zero out first i entries */
354 /* later turn this into an XYT_solve call ? */
355 PetscCall(PCTFS_rvec_zero(uu, m));
356 y_ptr = y;
357 iptr = ycol_indices;
358 for (k = 0; k < i; k++) {
359 off = *iptr++;
360 len = *iptr++;
361 PetscCall(PetscBLASIntCast(len, &dlen));
362 PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, y_ptr, &i1));
363 y_ptr += len;
364 }
365
366 /* uu = X^T.u_l (comm portion) */
367 PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages));
368
369 /* z = X.uu */
370 PetscCall(PCTFS_rvec_zero(z, n));
371 x_ptr = x;
372 iptr = xcol_indices;
373 for (k = 0; k < i; k++) {
374 off = *iptr++;
375 len = *iptr++;
376 PetscCall(PetscBLASIntCast(len, &dlen));
377 PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
378 x_ptr += len;
379 }
380
381 /* compute v_l = v_l - z */
382 PetscCall(PCTFS_rvec_zero(v + a_n, a_m - a_n));
383 PetscCall(PetscBLASIntCast(n, &dlen));
384 PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));
385
386 /* compute u_l = A.v_l */
387 if (a_n != a_m) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim));
388 PetscCall(PCTFS_rvec_zero(u, n));
389 PetscCall(do_matvec(xyt_handle->mvi, v, u));
390
391 /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
392 PetscCall(PetscBLASIntCast(n, &dlen));
393 PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, u, &i1));
394 /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
395 PetscCall(PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim));
396
397 alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);
398
399 /* check for small alpha */
400 /* LATER use this to detect and determine null space */
401 PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha));
402
403 /* compute v_l = v_l/sqrt(alpha) */
404 PetscCall(PCTFS_rvec_scale(v, 1.0 / alpha, n));
405 PetscCall(PCTFS_rvec_scale(u, 1.0 / alpha, n));
406
407 /* add newly generated column, v_l, to X */
408 flag = 1;
409 off = len = 0;
410 for (k = 0; k < n; k++) {
411 if (v[k] != 0.0) {
412 len = k;
413 if (flag) {
414 off = k;
415 flag = 0;
416 }
417 }
418 }
419
420 len -= (off - 1);
421
422 if (len > 0) {
423 if ((xt_nnz + len) > xt_max_nnz) {
424 PetscCall(PetscInfo(0, "increasing space for X by 2x!\n"));
425 xt_max_nnz *= 2;
426 x_ptr = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
427 PetscCall(PCTFS_rvec_copy(x_ptr, x, xt_nnz));
428 free(x);
429 x = x_ptr;
430 x_ptr += xt_nnz;
431 }
432 xt_nnz += len;
433 PetscCall(PCTFS_rvec_copy(x_ptr, v + off, len));
434
435 xcol_indices[2 * i] = off;
436 xcol_sz[i] = xcol_indices[2 * i + 1] = len;
437 xcol_vals[i] = x_ptr;
438 } else {
439 xcol_indices[2 * i] = 0;
440 xcol_sz[i] = xcol_indices[2 * i + 1] = 0;
441 xcol_vals[i] = x_ptr;
442 }
443
444 /* add newly generated column, u_l, to Y */
445 flag = 1;
446 off = len = 0;
447 for (k = 0; k < n; k++) {
448 if (u[k] != 0.0) {
449 len = k;
450 if (flag) {
451 off = k;
452 flag = 0;
453 }
454 }
455 }
456
457 len -= (off - 1);
458
459 if (len > 0) {
460 if ((yt_nnz + len) > yt_max_nnz) {
461 PetscCall(PetscInfo(0, "increasing space for Y by 2x!\n"));
462 yt_max_nnz *= 2;
463 y_ptr = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));
464 PetscCall(PCTFS_rvec_copy(y_ptr, y, yt_nnz));
465 free(y);
466 y = y_ptr;
467 y_ptr += yt_nnz;
468 }
469 yt_nnz += len;
470 PetscCall(PCTFS_rvec_copy(y_ptr, u + off, len));
471
472 ycol_indices[2 * i] = off;
473 ycol_sz[i] = ycol_indices[2 * i + 1] = len;
474 ycol_vals[i] = y_ptr;
475 } else {
476 ycol_indices[2 * i] = 0;
477 ycol_sz[i] = ycol_indices[2 * i + 1] = 0;
478 ycol_vals[i] = y_ptr;
479 }
480 }
481
482 /* close off stages for execution phase */
483 while (dim != level) {
484 stages[dim++] = i;
485 PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level));
486 }
487 stages[dim] = i;
488
489 xyt_handle->info->n = xyt_handle->mvi->n;
490 xyt_handle->info->m = m;
491 xyt_handle->info->nnz = xt_nnz + yt_nnz;
492 xyt_handle->info->max_nnz = xt_max_nnz + yt_max_nnz;
493 xyt_handle->info->msg_buf_sz = stages[level] - stages[0];
494 xyt_handle->info->solve_uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
495 xyt_handle->info->solve_w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
496 xyt_handle->info->x = x;
497 xyt_handle->info->xcol_vals = xcol_vals;
498 xyt_handle->info->xcol_sz = xcol_sz;
499 xyt_handle->info->xcol_indices = xcol_indices;
500 xyt_handle->info->stages = stages;
501 xyt_handle->info->y = y;
502 xyt_handle->info->ycol_vals = ycol_vals;
503 xyt_handle->info->ycol_sz = ycol_sz;
504 xyt_handle->info->ycol_indices = ycol_indices;
505
506 free(segs);
507 free(u);
508 free(v);
509 free(uu);
510 free(z);
511 free(w);
512
513 return PETSC_SUCCESS;
514 }
515
do_xyt_solve(xyt_ADT xyt_handle,PetscScalar * uc)516 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *uc)
517 {
518 PetscInt off, len, *iptr;
519 PetscInt level = xyt_handle->level;
520 PetscInt n = xyt_handle->info->n;
521 PetscInt m = xyt_handle->info->m;
522 PetscInt *stages = xyt_handle->info->stages;
523 PetscInt *xcol_indices = xyt_handle->info->xcol_indices;
524 PetscInt *ycol_indices = xyt_handle->info->ycol_indices;
525 PetscScalar *x_ptr, *y_ptr, *uu_ptr;
526 PetscScalar *solve_uu = xyt_handle->info->solve_uu;
527 PetscScalar *solve_w = xyt_handle->info->solve_w;
528 PetscScalar *x = xyt_handle->info->x;
529 PetscScalar *y = xyt_handle->info->y;
530 PetscBLASInt i1 = 1, dlen;
531
532 PetscFunctionBegin;
533 uu_ptr = solve_uu;
534 PetscCall(PCTFS_rvec_zero(uu_ptr, m));
535
536 /* x = X.Y^T.b */
537 /* uu = Y^T.b */
538 for (y_ptr = y, iptr = ycol_indices; *iptr != -1; y_ptr += len) {
539 off = *iptr++;
540 len = *iptr++;
541 PetscCall(PetscBLASIntCast(len, &dlen));
542 PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, y_ptr, &i1));
543 }
544
545 /* communication of beta */
546 uu_ptr = solve_uu;
547 if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
548 PetscCall(PCTFS_rvec_zero(uc, n));
549
550 /* x = X.uu */
551 for (x_ptr = x, iptr = xcol_indices; *iptr != -1; x_ptr += len) {
552 off = *iptr++;
553 len = *iptr++;
554 PetscCall(PetscBLASIntCast(len, &dlen));
555 PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
556 }
557 PetscFunctionReturn(PETSC_SUCCESS);
558 }
559
check_handle(xyt_ADT xyt_handle)560 static PetscErrorCode check_handle(xyt_ADT xyt_handle)
561 {
562 PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};
563
564 PetscFunctionBegin;
565 PetscCheck(xyt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", (void *)xyt_handle);
566
567 vals[0] = vals[1] = xyt_handle->id;
568 PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
569 PetscCheck(!(vals[0] != vals[1]) && !(xyt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xyt_handle->id);
570 PetscFunctionReturn(PETSC_SUCCESS);
571 }
572
det_separators(xyt_ADT xyt_handle)573 static PetscErrorCode det_separators(xyt_ADT xyt_handle)
574 {
575 PetscInt i, ct, id;
576 PetscInt mask, edge, *iptr;
577 PetscInt *dir, *used;
578 PetscInt sum[4], w[4];
579 PetscScalar rsum[4], rw[4];
580 PetscInt op[] = {GL_ADD, 0};
581 PetscScalar *lhs, *rhs;
582 PetscInt *nsep, *lnsep, *fo, nfo = 0;
583 PCTFS_gs_ADT PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;
584 PetscInt *local2global = xyt_handle->mvi->local2global;
585 PetscInt n = xyt_handle->mvi->n;
586 PetscInt m = xyt_handle->mvi->m;
587 PetscInt level = xyt_handle->level;
588 PetscInt shared = 0;
589
590 PetscFunctionBegin;
591 dir = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
592 nsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
593 lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
594 fo = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
595 used = (PetscInt *)malloc(sizeof(PetscInt) * n);
596
597 PetscCall(PCTFS_ivec_zero(dir, level + 1));
598 PetscCall(PCTFS_ivec_zero(nsep, level + 1));
599 PetscCall(PCTFS_ivec_zero(lnsep, level + 1));
600 PetscCall(PCTFS_ivec_set(fo, -1, n + 1));
601 PetscCall(PCTFS_ivec_zero(used, n));
602
603 lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
604 rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
605
606 /* determine the # of unique dof */
607 PetscCall(PCTFS_rvec_zero(lhs, m));
608 PetscCall(PCTFS_rvec_set(lhs, 1.0, n));
609 PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level));
610 PetscCall(PetscInfo(0, "done first PCTFS_gs_gop_hc\n"));
611 PetscCall(PCTFS_rvec_zero(rsum, 2));
612 for (i = 0; i < n; i++) {
613 if (lhs[i] != 0.0) {
614 rsum[0] += 1.0 / lhs[i];
615 rsum[1] += lhs[i];
616 }
617 if (lhs[i] != 1.0) shared = 1;
618 }
619
620 PetscCall(PCTFS_grop_hc(rsum, rw, 2, op, level));
621 rsum[0] += 0.1;
622 rsum[1] += 0.1;
623
624 xyt_handle->info->n_global = xyt_handle->info->m_global = (PetscInt)rsum[0];
625 xyt_handle->mvi->n_global = xyt_handle->mvi->m_global = (PetscInt)rsum[0];
626
627 /* determine separator sets top down */
628 PetscCheck(!shared, PETSC_COMM_SELF, PETSC_ERR_PLIB, "shared dof separator determination not ready ... see hmt!!!");
629 /* solution is to do as in the symmetric shared case but then */
630 /* pick the sub-hc with the most free dofs and do a mat-vec */
631 /* and pick up the responses on the other sub-hc from the */
632 /* initial separator set obtained from the symm. shared case */
633 /* [dead code deleted since it is unlikely to be completed] */
634 for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
635 /* set rsh of hc, fire, and collect lhs responses */
636 PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
637 PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
638
639 /* set lsh of hc, fire, and collect rhs responses */
640 PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
641 PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
642
643 /* count number of dofs I own that have signal and not in sep set */
644 PetscCall(PCTFS_ivec_zero(sum, 4));
645 for (ct = i = 0; i < n; i++) {
646 if (!used[i]) {
647 /* number of unmarked dofs on node */
648 ct++;
649 /* number of dofs to be marked on lhs hc */
650 if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
651 /* number of dofs to be marked on rhs hc */
652 if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
653 }
654 }
655
656 /* for the non-symmetric case we need separators of width 2 */
657 /* so take both sides */
658 (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
659 PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
660
661 ct = 0;
662 if (id < mask) {
663 /* mark dofs I own that have signal and not in sep set */
664 for (i = 0; i < n; i++) {
665 if ((!used[i]) && (lhs[i] != 0.0)) {
666 ct++;
667 nfo++;
668 *--iptr = local2global[i];
669 used[i] = edge;
670 }
671 }
672 /* LSH hc summation of ct should be sum[0] */
673 } else {
674 /* mark dofs I own that have signal and not in sep set */
675 for (i = 0; i < n; i++) {
676 if ((!used[i]) && (rhs[i] != 0.0)) {
677 ct++;
678 nfo++;
679 *--iptr = local2global[i];
680 used[i] = edge;
681 }
682 }
683 /* RSH hc summation of ct should be sum[1] */
684 }
685
686 if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
687 lnsep[edge] = ct;
688 nsep[edge] = sum[0] + sum[1];
689 dir[edge] = BOTH;
690
691 /* LATER or we can recur on these to order seps at this level */
692 /* do we need full set of separators for this? */
693
694 /* fold rhs hc into lower */
695 if (id >= mask) id -= mask;
696 }
697
698 /* level 0 is on processor case - so mark the remainder */
699 for (ct = i = 0; i < n; i++) {
700 if (!used[i]) {
701 ct++;
702 nfo++;
703 *--iptr = local2global[i];
704 used[i] = edge;
705 }
706 }
707 if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
708 lnsep[edge] = ct;
709 nsep[edge] = ct;
710 dir[edge] = BOTH;
711
712 xyt_handle->info->nsep = nsep;
713 xyt_handle->info->lnsep = lnsep;
714 xyt_handle->info->fo = fo;
715 xyt_handle->info->nfo = nfo;
716
717 free(dir);
718 free(lhs);
719 free(rhs);
720 free(used);
721 PetscFunctionReturn(PETSC_SUCCESS);
722 }
723
set_mvi(PetscInt * local2global,PetscInt n,PetscInt m,PetscErrorCode (* matvec)(mv_info *,PetscScalar *,PetscScalar *),void * grid_data)724 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data)
725 {
726 mv_info *mvi;
727
728 mvi = (mv_info *)malloc(sizeof(mv_info));
729 mvi->n = n;
730 mvi->m = m;
731 mvi->n_global = -1;
732 mvi->m_global = -1;
733 mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));
734
735 PCTFS_ivec_copy(mvi->local2global, local2global, m);
736 mvi->local2global[m] = INT_MAX;
737 mvi->matvec = matvec;
738 mvi->grid_data = grid_data;
739
740 /* set xyt communication handle to perform restricted matvec */
741 mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
742
743 return mvi;
744 }
745
do_matvec(mv_info * A,PetscScalar * v,PetscScalar * u)746 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
747 {
748 PetscFunctionBegin;
749 PetscCall(A->matvec((mv_info *)A->grid_data, v, u));
750 PetscFunctionReturn(PETSC_SUCCESS);
751 }
752