1 2 /*************************************xxt.c************************************ 3 Module Name: xxt 4 Module Info: 5 6 author: Henry M. Tufo III 7 e-mail: hmt@asci.uchicago.edu 8 contact: 9 +--------------------------------+--------------------------------+ 10 |MCS Division - Building 221 |Department of Computer Science | 11 |Argonne National Laboratory |Ryerson 152 | 12 |9700 S. Cass Avenue |The University of Chicago | 13 |Argonne, IL 60439 |Chicago, IL 60637 | 14 |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx | 15 +--------------------------------+--------------------------------+ 16 17 Last Modification: 3.20.01 18 **************************************xxt.c***********************************/ 19 #include <../src/ksp/pc/impls/tfs/tfs.h> 20 21 #define LEFT -1 22 #define RIGHT 1 23 #define BOTH 0 24 25 typedef struct xxt_solver_info { 26 PetscInt n, m, n_global, m_global; 27 PetscInt nnz, max_nnz, msg_buf_sz; 28 PetscInt *nsep, *lnsep, *fo, nfo, *stages; 29 PetscInt *col_sz, *col_indices; 30 PetscScalar **col_vals, *x, *solve_uu, *solve_w; 31 PetscInt nsolves; 32 PetscScalar tot_solve_time; 33 } xxt_info; 34 35 typedef struct matvec_info { 36 PetscInt n, m, n_global, m_global; 37 PetscInt *local2global; 38 PCTFS_gs_ADT PCTFS_gs_handle; 39 PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *); 40 void *grid_data; 41 } mv_info; 42 43 struct xxt_CDT { 44 PetscInt id; 45 PetscInt ns; 46 PetscInt level; 47 xxt_info *info; 48 mv_info *mvi; 49 }; 50 51 static PetscInt n_xxt = 0; 52 static PetscInt n_xxt_handles = 0; 53 54 /* prototypes */ 55 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs); 56 static PetscErrorCode check_handle(xxt_ADT xxt_handle); 57 static PetscErrorCode det_separators(xxt_ADT xxt_handle); 58 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u); 59 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle); 60 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle); 61 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data); 62 63 /**************************************xxt.c***********************************/ 64 xxt_ADT XXT_new(void) { 65 xxt_ADT xxt_handle; 66 67 /* rolling count on n_xxt ... pot. problem here */ 68 n_xxt_handles++; 69 xxt_handle = (xxt_ADT)malloc(sizeof(struct xxt_CDT)); 70 xxt_handle->id = ++n_xxt; 71 xxt_handle->info = NULL; 72 xxt_handle->mvi = NULL; 73 74 return (xxt_handle); 75 } 76 77 /**************************************xxt.c***********************************/ 78 PetscErrorCode XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt handle */ 79 PetscInt *local2global, /* global column mapping */ 80 PetscInt n, /* local num rows */ 81 PetscInt m, /* local num cols */ 82 PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc */ 83 void *grid_data) /* grid data for matvec */ 84 { 85 PCTFS_comm_init(); 86 check_handle(xxt_handle); 87 88 /* only 2^k for now and all nodes participating */ 89 PetscCheck((1 << (xxt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes); 90 91 /* space for X info */ 92 xxt_handle->info = (xxt_info *)malloc(sizeof(xxt_info)); 93 94 /* set up matvec handles */ 95 xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode(*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data); 96 97 /* matrix is assumed to be of full rank */ 98 /* LATER we can reset to indicate rank def. */ 99 xxt_handle->ns = 0; 100 101 /* determine separators and generate firing order - NB xxt info set here */ 102 det_separators(xxt_handle); 103 104 return (do_xxt_factor(xxt_handle)); 105 } 106 107 /**************************************xxt.c***********************************/ 108 PetscErrorCode XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b) { 109 PCTFS_comm_init(); 110 check_handle(xxt_handle); 111 112 /* need to copy b into x? */ 113 if (b) PCTFS_rvec_copy(x, b, xxt_handle->mvi->n); 114 return do_xxt_solve(xxt_handle, x); 115 } 116 117 /**************************************xxt.c***********************************/ 118 PetscInt XXT_free(xxt_ADT xxt_handle) { 119 PCTFS_comm_init(); 120 check_handle(xxt_handle); 121 n_xxt_handles--; 122 123 free(xxt_handle->info->nsep); 124 free(xxt_handle->info->lnsep); 125 free(xxt_handle->info->fo); 126 free(xxt_handle->info->stages); 127 free(xxt_handle->info->solve_uu); 128 free(xxt_handle->info->solve_w); 129 free(xxt_handle->info->x); 130 free(xxt_handle->info->col_vals); 131 free(xxt_handle->info->col_sz); 132 free(xxt_handle->info->col_indices); 133 free(xxt_handle->info); 134 free(xxt_handle->mvi->local2global); 135 PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle); 136 free(xxt_handle->mvi); 137 free(xxt_handle); 138 139 /* if the check fails we nuke */ 140 /* if NULL pointer passed to free we nuke */ 141 /* if the calls to free fail that's not my problem */ 142 return (0); 143 } 144 145 /**************************************xxt.c***********************************/ 146 PetscErrorCode XXT_stats(xxt_ADT xxt_handle) { 147 PetscInt op[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD}; 148 PetscInt fop[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD}; 149 PetscInt vals[9], work[9]; 150 PetscScalar fvals[3], fwork[3]; 151 152 PetscFunctionBegin; 153 PetscCall(PCTFS_comm_init()); 154 PetscCall(check_handle(xxt_handle)); 155 156 /* if factorization not done there are no stats */ 157 if (!xxt_handle->info || !xxt_handle->mvi) { 158 if (!PCTFS_my_id) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "XXT_stats() :: no stats available!\n")); 159 PetscFunctionReturn(1); 160 } 161 162 vals[0] = vals[1] = vals[2] = xxt_handle->info->nnz; 163 vals[3] = vals[4] = vals[5] = xxt_handle->mvi->n; 164 vals[6] = vals[7] = vals[8] = xxt_handle->info->msg_buf_sz; 165 PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op); 166 167 fvals[0] = fvals[1] = fvals[2] = xxt_handle->info->tot_solve_time / xxt_handle->info->nsolves++; 168 PCTFS_grop(fvals, fwork, PETSC_STATIC_ARRAY_LENGTH(fop) - 1, fop); 169 170 if (!PCTFS_my_id) { 171 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xxt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[0])); 172 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xxt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[1])); 173 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xxt_nnz=%g\n", PCTFS_my_id, (double)(1.0 * vals[2] / PCTFS_num_nodes))); 174 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot xxt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[2])); 175 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xxt C(2d) =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.5))))); 176 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xxt C(3d) =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.6667))))); 177 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xxt_n =%" PetscInt_FMT "\n", PCTFS_my_id, vals[3])); 178 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xxt_n =%" PetscInt_FMT "\n", PCTFS_my_id, vals[4])); 179 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xxt_n =%g\n", PCTFS_my_id, (double)(1.0 * vals[5] / PCTFS_num_nodes))); 180 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot xxt_n =%" PetscInt_FMT "\n", PCTFS_my_id, vals[5])); 181 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xxt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[6])); 182 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xxt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[7])); 183 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xxt_buf=%g\n", PCTFS_my_id, (double)(1.0 * vals[8] / PCTFS_num_nodes))); 184 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xxt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[0]))); 185 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xxt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[1]))); 186 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xxt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[2] / PCTFS_num_nodes))); 187 } 188 PetscFunctionReturn(0); 189 } 190 191 /*************************************xxt.c************************************ 192 193 Description: get A_local, local portion of global coarse matrix which 194 is a row dist. nxm matrix w/ n<m. 195 o my_ml holds address of ML struct associated w/A_local and coarse grid 196 o local2global holds global number of column i (i=0,...,m-1) 197 o local2global holds global number of row i (i=0,...,n-1) 198 o mylocmatvec performs A_local . vec_local (note that gs is performed using 199 PCTFS_gs_init/gop). 200 201 mylocmatvec = my_ml->Amat[grid_tag].matvec->external; 202 mylocmatvec (void :: void *data, double *in, double *out) 203 **************************************xxt.c***********************************/ 204 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle) { 205 return xxt_generate(xxt_handle); 206 } 207 208 /**************************************xxt.c***********************************/ 209 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle) { 210 PetscInt i, j, k, idex; 211 PetscInt dim, col; 212 PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w; 213 PetscInt *segs; 214 PetscInt op[] = {GL_ADD, 0}; 215 PetscInt off, len; 216 PetscScalar *x_ptr; 217 PetscInt *iptr, flag; 218 PetscInt start = 0, end, work; 219 PetscInt op2[] = {GL_MIN, 0}; 220 PCTFS_gs_ADT PCTFS_gs_handle; 221 PetscInt *nsep, *lnsep, *fo; 222 PetscInt a_n = xxt_handle->mvi->n; 223 PetscInt a_m = xxt_handle->mvi->m; 224 PetscInt *a_local2global = xxt_handle->mvi->local2global; 225 PetscInt level; 226 PetscInt xxt_nnz = 0, xxt_max_nnz = 0; 227 PetscInt n, m; 228 PetscInt *col_sz, *col_indices, *stages; 229 PetscScalar **col_vals, *x; 230 PetscInt n_global; 231 PetscBLASInt i1 = 1, dlen; 232 PetscScalar dm1 = -1.0; 233 234 n = xxt_handle->mvi->n; 235 nsep = xxt_handle->info->nsep; 236 lnsep = xxt_handle->info->lnsep; 237 fo = xxt_handle->info->fo; 238 end = lnsep[0]; 239 level = xxt_handle->level; 240 PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle; 241 242 /* is there a null space? */ 243 /* LATER add in ability to detect null space by checking alpha */ 244 for (i = 0, j = 0; i <= level; i++) j += nsep[i]; 245 246 m = j - xxt_handle->ns; 247 if (m != j) { PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xxt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xxt_handle->ns)); } 248 249 /* get and initialize storage for x local */ 250 /* note that x local is nxm and stored by columns */ 251 col_sz = (PetscInt *)malloc(m * sizeof(PetscInt)); 252 col_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt)); 253 col_vals = (PetscScalar **)malloc(m * sizeof(PetscScalar *)); 254 for (i = j = 0; i < m; i++, j += 2) { 255 col_indices[j] = col_indices[j + 1] = col_sz[i] = -1; 256 col_vals[i] = NULL; 257 } 258 col_indices[j] = -1; 259 260 /* size of separators for each sub-hc working from bottom of tree to top */ 261 /* this looks like nsep[]=segments */ 262 stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt)); 263 segs = (PetscInt *)malloc((level + 1) * sizeof(PetscInt)); 264 PCTFS_ivec_zero(stages, level + 1); 265 PCTFS_ivec_copy(segs, nsep, level + 1); 266 for (i = 0; i < level; i++) segs[i + 1] += segs[i]; 267 stages[0] = segs[0]; 268 269 /* temporary vectors */ 270 u = (PetscScalar *)malloc(n * sizeof(PetscScalar)); 271 z = (PetscScalar *)malloc(n * sizeof(PetscScalar)); 272 v = (PetscScalar *)malloc(a_m * sizeof(PetscScalar)); 273 uu = (PetscScalar *)malloc(m * sizeof(PetscScalar)); 274 w = (PetscScalar *)malloc(m * sizeof(PetscScalar)); 275 276 /* extra nnz due to replication of vertices across separators */ 277 for (i = 1, j = 0; i <= level; i++) j += nsep[i]; 278 279 /* storage for sparse x values */ 280 n_global = xxt_handle->info->n_global; 281 xxt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes; 282 x = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar)); 283 xxt_nnz = 0; 284 285 /* LATER - can embed next sep to fire in gs */ 286 /* time to make the donuts - generate X factor */ 287 for (dim = i = j = 0; i < m; i++) { 288 /* time to move to the next level? */ 289 while (i == segs[dim]) { 290 PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level"); 291 stages[dim++] = i; 292 end += lnsep[dim]; 293 } 294 stages[dim] = i; 295 296 /* which column are we firing? */ 297 /* i.e. set v_l */ 298 /* use new seps and do global min across hc to determine which one to fire */ 299 (start < end) ? (col = fo[start]) : (col = INT_MAX); 300 PCTFS_giop_hc(&col, &work, 1, op2, dim); 301 302 /* shouldn't need this */ 303 if (col == INT_MAX) { 304 PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n")); 305 continue; 306 } 307 308 /* do I own it? I should */ 309 PCTFS_rvec_zero(v, a_m); 310 if (col == fo[start]) { 311 start++; 312 idex = PCTFS_ivec_linear_search(col, a_local2global, a_n); 313 if (idex != -1) { 314 v[idex] = 1.0; 315 j++; 316 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!"); 317 } else { 318 idex = PCTFS_ivec_linear_search(col, a_local2global, a_m); 319 if (idex != -1) v[idex] = 1.0; 320 } 321 322 /* perform u = A.v_l */ 323 PCTFS_rvec_zero(u, n); 324 do_matvec(xxt_handle->mvi, v, u); 325 326 /* uu = X^T.u_l (local portion) */ 327 /* technically only need to zero out first i entries */ 328 /* later turn this into an XXT_solve call ? */ 329 PCTFS_rvec_zero(uu, m); 330 x_ptr = x; 331 iptr = col_indices; 332 for (k = 0; k < i; k++) { 333 off = *iptr++; 334 len = *iptr++; 335 PetscCall(PetscBLASIntCast(len, &dlen)); 336 PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, x_ptr, &i1)); 337 x_ptr += len; 338 } 339 340 /* uu = X^T.u_l (comm portion) */ 341 PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages)); 342 343 /* z = X.uu */ 344 PCTFS_rvec_zero(z, n); 345 x_ptr = x; 346 iptr = col_indices; 347 for (k = 0; k < i; k++) { 348 off = *iptr++; 349 len = *iptr++; 350 PetscCall(PetscBLASIntCast(len, &dlen)); 351 PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1)); 352 x_ptr += len; 353 } 354 355 /* compute v_l = v_l - z */ 356 PCTFS_rvec_zero(v + a_n, a_m - a_n); 357 PetscCall(PetscBLASIntCast(n, &dlen)); 358 PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1)); 359 360 /* compute u_l = A.v_l */ 361 if (a_n != a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim); 362 PCTFS_rvec_zero(u, n); 363 do_matvec(xxt_handle->mvi, v, u); 364 365 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */ 366 PetscCall(PetscBLASIntCast(n, &dlen)); 367 PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, v, &i1)); 368 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */ 369 PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim); 370 371 alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha); 372 373 /* check for small alpha */ 374 /* LATER use this to detect and determine null space */ 375 PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha)); 376 377 /* compute v_l = v_l/sqrt(alpha) */ 378 PCTFS_rvec_scale(v, 1.0 / alpha, n); 379 380 /* add newly generated column, v_l, to X */ 381 flag = 1; 382 off = len = 0; 383 for (k = 0; k < n; k++) { 384 if (v[k] != 0.0) { 385 len = k; 386 if (flag) { 387 off = k; 388 flag = 0; 389 } 390 } 391 } 392 393 len -= (off - 1); 394 395 if (len > 0) { 396 if ((xxt_nnz + len) > xxt_max_nnz) { 397 PetscCall(PetscInfo(0, "increasing space for X by 2x!\n")); 398 xxt_max_nnz *= 2; 399 x_ptr = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar)); 400 PCTFS_rvec_copy(x_ptr, x, xxt_nnz); 401 free(x); 402 x = x_ptr; 403 x_ptr += xxt_nnz; 404 } 405 xxt_nnz += len; 406 PCTFS_rvec_copy(x_ptr, v + off, len); 407 408 col_indices[2 * i] = off; 409 col_sz[i] = col_indices[2 * i + 1] = len; 410 col_vals[i] = x_ptr; 411 } else { 412 col_indices[2 * i] = 0; 413 col_sz[i] = col_indices[2 * i + 1] = 0; 414 col_vals[i] = x_ptr; 415 } 416 } 417 418 /* close off stages for execution phase */ 419 while (dim != level) { 420 stages[dim++] = i; 421 PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level)); 422 } 423 stages[dim] = i; 424 425 xxt_handle->info->n = xxt_handle->mvi->n; 426 xxt_handle->info->m = m; 427 xxt_handle->info->nnz = xxt_nnz; 428 xxt_handle->info->max_nnz = xxt_max_nnz; 429 xxt_handle->info->msg_buf_sz = stages[level] - stages[0]; 430 xxt_handle->info->solve_uu = (PetscScalar *)malloc(m * sizeof(PetscScalar)); 431 xxt_handle->info->solve_w = (PetscScalar *)malloc(m * sizeof(PetscScalar)); 432 xxt_handle->info->x = x; 433 xxt_handle->info->col_vals = col_vals; 434 xxt_handle->info->col_sz = col_sz; 435 xxt_handle->info->col_indices = col_indices; 436 xxt_handle->info->stages = stages; 437 xxt_handle->info->nsolves = 0; 438 xxt_handle->info->tot_solve_time = 0.0; 439 440 free(segs); 441 free(u); 442 free(v); 443 free(uu); 444 free(z); 445 free(w); 446 447 return (0); 448 } 449 450 /**************************************xxt.c***********************************/ 451 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc) { 452 PetscInt off, len, *iptr; 453 PetscInt level = xxt_handle->level; 454 PetscInt n = xxt_handle->info->n; 455 PetscInt m = xxt_handle->info->m; 456 PetscInt *stages = xxt_handle->info->stages; 457 PetscInt *col_indices = xxt_handle->info->col_indices; 458 PetscScalar *x_ptr, *uu_ptr; 459 PetscScalar *solve_uu = xxt_handle->info->solve_uu; 460 PetscScalar *solve_w = xxt_handle->info->solve_w; 461 PetscScalar *x = xxt_handle->info->x; 462 PetscBLASInt i1 = 1, dlen; 463 464 PetscFunctionBegin; 465 uu_ptr = solve_uu; 466 PCTFS_rvec_zero(uu_ptr, m); 467 468 /* x = X.Y^T.b */ 469 /* uu = Y^T.b */ 470 for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) { 471 off = *iptr++; 472 len = *iptr++; 473 PetscCall(PetscBLASIntCast(len, &dlen)); 474 PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, x_ptr, &i1)); 475 } 476 477 /* comunication of beta */ 478 uu_ptr = solve_uu; 479 if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages)); 480 481 PCTFS_rvec_zero(uc, n); 482 483 /* x = X.uu */ 484 for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) { 485 off = *iptr++; 486 len = *iptr++; 487 PetscCall(PetscBLASIntCast(len, &dlen)); 488 PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1)); 489 } 490 PetscFunctionReturn(0); 491 } 492 493 /**************************************xxt.c***********************************/ 494 static PetscErrorCode check_handle(xxt_ADT xxt_handle) { 495 PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX}; 496 497 PetscFunctionBegin; 498 PetscCheck(xxt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", xxt_handle); 499 500 vals[0] = vals[1] = xxt_handle->id; 501 PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op); 502 PetscCheck(!(vals[0] != vals[1]) && !(xxt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xxt_handle->id); 503 PetscFunctionReturn(0); 504 } 505 506 /**************************************xxt.c***********************************/ 507 static PetscErrorCode det_separators(xxt_ADT xxt_handle) { 508 PetscInt i, ct, id; 509 PetscInt mask, edge, *iptr; 510 PetscInt *dir, *used; 511 PetscInt sum[4], w[4]; 512 PetscScalar rsum[4], rw[4]; 513 PetscInt op[] = {GL_ADD, 0}; 514 PetscScalar *lhs, *rhs; 515 PetscInt *nsep, *lnsep, *fo, nfo = 0; 516 PCTFS_gs_ADT PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle; 517 PetscInt *local2global = xxt_handle->mvi->local2global; 518 PetscInt n = xxt_handle->mvi->n; 519 PetscInt m = xxt_handle->mvi->m; 520 PetscInt level = xxt_handle->level; 521 PetscInt shared = 0; 522 523 PetscFunctionBegin; 524 dir = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1)); 525 nsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1)); 526 lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1)); 527 fo = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1)); 528 used = (PetscInt *)malloc(sizeof(PetscInt) * n); 529 530 PCTFS_ivec_zero(dir, level + 1); 531 PCTFS_ivec_zero(nsep, level + 1); 532 PCTFS_ivec_zero(lnsep, level + 1); 533 PCTFS_ivec_set(fo, -1, n + 1); 534 PCTFS_ivec_zero(used, n); 535 536 lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m); 537 rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m); 538 539 /* determine the # of unique dof */ 540 PCTFS_rvec_zero(lhs, m); 541 PCTFS_rvec_set(lhs, 1.0, n); 542 PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level); 543 PCTFS_rvec_zero(rsum, 2); 544 for (i = 0; i < n; i++) { 545 if (lhs[i] != 0.0) { 546 rsum[0] += 1.0 / lhs[i]; 547 rsum[1] += lhs[i]; 548 } 549 } 550 PCTFS_grop_hc(rsum, rw, 2, op, level); 551 rsum[0] += 0.1; 552 rsum[1] += 0.1; 553 554 if (PetscAbsScalar(rsum[0] - rsum[1]) > EPS) shared = 1; 555 556 xxt_handle->info->n_global = xxt_handle->info->m_global = (PetscInt)rsum[0]; 557 xxt_handle->mvi->n_global = xxt_handle->mvi->m_global = (PetscInt)rsum[0]; 558 559 /* determine separator sets top down */ 560 if (shared) { 561 for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) { 562 /* set rsh of hc, fire, and collect lhs responses */ 563 (id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m); 564 PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge); 565 566 /* set lsh of hc, fire, and collect rhs responses */ 567 (id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m); 568 PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge); 569 570 for (i = 0; i < n; i++) { 571 if (id < mask) { 572 if (lhs[i] != 0.0) lhs[i] = 1.0; 573 } 574 if (id >= mask) { 575 if (rhs[i] != 0.0) rhs[i] = 1.0; 576 } 577 } 578 579 if (id < mask) PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge - 1); 580 else PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge - 1); 581 582 /* count number of dofs I own that have signal and not in sep set */ 583 PCTFS_rvec_zero(rsum, 4); 584 for (PCTFS_ivec_zero(sum, 4), ct = i = 0; i < n; i++) { 585 if (!used[i]) { 586 /* number of unmarked dofs on node */ 587 ct++; 588 /* number of dofs to be marked on lhs hc */ 589 if (id < mask) { 590 if (lhs[i] != 0.0) { 591 sum[0]++; 592 rsum[0] += 1.0 / lhs[i]; 593 } 594 } 595 /* number of dofs to be marked on rhs hc */ 596 if (id >= mask) { 597 if (rhs[i] != 0.0) { 598 sum[1]++; 599 rsum[1] += 1.0 / rhs[i]; 600 } 601 } 602 } 603 } 604 605 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 606 (id < mask) ? (sum[2] = ct) : (sum[3] = ct); 607 (id < mask) ? (rsum[2] = ct) : (rsum[3] = ct); 608 PCTFS_giop_hc(sum, w, 4, op, edge); 609 PCTFS_grop_hc(rsum, rw, 4, op, edge); 610 rsum[0] += 0.1; 611 rsum[1] += 0.1; 612 rsum[2] += 0.1; 613 rsum[3] += 0.1; 614 615 if (id < mask) { 616 /* mark dofs I own that have signal and not in sep set */ 617 for (ct = i = 0; i < n; i++) { 618 if ((!used[i]) && (lhs[i] != 0.0)) { 619 ct++; 620 nfo++; 621 622 PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n"); 623 624 *--iptr = local2global[i]; 625 used[i] = edge; 626 } 627 } 628 if (ct > 1) PCTFS_ivec_sort(iptr, ct); 629 630 lnsep[edge] = ct; 631 nsep[edge] = (PetscInt)rsum[0]; 632 dir[edge] = LEFT; 633 } 634 635 if (id >= mask) { 636 /* mark dofs I own that have signal and not in sep set */ 637 for (ct = i = 0; i < n; i++) { 638 if ((!used[i]) && (rhs[i] != 0.0)) { 639 ct++; 640 nfo++; 641 642 PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n"); 643 644 *--iptr = local2global[i]; 645 used[i] = edge; 646 } 647 } 648 if (ct > 1) PCTFS_ivec_sort(iptr, ct); 649 650 lnsep[edge] = ct; 651 nsep[edge] = (PetscInt)rsum[1]; 652 dir[edge] = RIGHT; 653 } 654 655 /* LATER or we can recur on these to order seps at this level */ 656 /* do we need full set of separators for this? */ 657 658 /* fold rhs hc into lower */ 659 if (id >= mask) id -= mask; 660 } 661 } else { 662 for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) { 663 /* set rsh of hc, fire, and collect lhs responses */ 664 (id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m); 665 PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge); 666 667 /* set lsh of hc, fire, and collect rhs responses */ 668 (id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m); 669 PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge); 670 671 /* count number of dofs I own that have signal and not in sep set */ 672 for (PCTFS_ivec_zero(sum, 4), ct = i = 0; i < n; i++) { 673 if (!used[i]) { 674 /* number of unmarked dofs on node */ 675 ct++; 676 /* number of dofs to be marked on lhs hc */ 677 if ((id < mask) && (lhs[i] != 0.0)) sum[0]++; 678 /* number of dofs to be marked on rhs hc */ 679 if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++; 680 } 681 } 682 683 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 684 (id < mask) ? (sum[2] = ct) : (sum[3] = ct); 685 PCTFS_giop_hc(sum, w, 4, op, edge); 686 687 /* lhs hc wins */ 688 if (sum[2] >= sum[3]) { 689 if (id < mask) { 690 /* mark dofs I own that have signal and not in sep set */ 691 for (ct = i = 0; i < n; i++) { 692 if ((!used[i]) && (lhs[i] != 0.0)) { 693 ct++; 694 nfo++; 695 *--iptr = local2global[i]; 696 used[i] = edge; 697 } 698 } 699 if (ct > 1) PCTFS_ivec_sort(iptr, ct); 700 lnsep[edge] = ct; 701 } 702 nsep[edge] = sum[0]; 703 dir[edge] = LEFT; 704 } else { /* rhs hc wins */ 705 if (id >= mask) { 706 /* mark dofs I own that have signal and not in sep set */ 707 for (ct = i = 0; i < n; i++) { 708 if ((!used[i]) && (rhs[i] != 0.0)) { 709 ct++; 710 nfo++; 711 *--iptr = local2global[i]; 712 used[i] = edge; 713 } 714 } 715 if (ct > 1) PCTFS_ivec_sort(iptr, ct); 716 lnsep[edge] = ct; 717 } 718 nsep[edge] = sum[1]; 719 dir[edge] = RIGHT; 720 } 721 /* LATER or we can recur on these to order seps at this level */ 722 /* do we need full set of separators for this? */ 723 724 /* fold rhs hc into lower */ 725 if (id >= mask) id -= mask; 726 } 727 } 728 729 /* level 0 is on processor case - so mark the remainder */ 730 for (ct = i = 0; i < n; i++) { 731 if (!used[i]) { 732 ct++; 733 nfo++; 734 *--iptr = local2global[i]; 735 used[i] = edge; 736 } 737 } 738 if (ct > 1) PCTFS_ivec_sort(iptr, ct); 739 lnsep[edge] = ct; 740 nsep[edge] = ct; 741 dir[edge] = LEFT; 742 743 xxt_handle->info->nsep = nsep; 744 xxt_handle->info->lnsep = lnsep; 745 xxt_handle->info->fo = fo; 746 xxt_handle->info->nfo = nfo; 747 748 free(dir); 749 free(lhs); 750 free(rhs); 751 free(used); 752 PetscFunctionReturn(0); 753 } 754 755 /**************************************xxt.c***********************************/ 756 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data) { 757 mv_info *mvi; 758 759 mvi = (mv_info *)malloc(sizeof(mv_info)); 760 mvi->n = n; 761 mvi->m = m; 762 mvi->n_global = -1; 763 mvi->m_global = -1; 764 mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt)); 765 PCTFS_ivec_copy(mvi->local2global, local2global, m); 766 mvi->local2global[m] = INT_MAX; 767 mvi->matvec = matvec; 768 mvi->grid_data = grid_data; 769 770 /* set xxt communication handle to perform restricted matvec */ 771 mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes); 772 773 return (mvi); 774 } 775 776 /**************************************xxt.c***********************************/ 777 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u) { 778 PetscFunctionBegin; 779 A->matvec((mv_info *)A->grid_data, v, u); 780 PetscFunctionReturn(0); 781 } 782