1 2 /*************************************xxt.c************************************ 3 Module Name: xxt 4 Module Info: 5 6 author: Henry M. Tufo III 7 e-mail: hmt@asci.uchicago.edu 8 contact: 9 +--------------------------------+--------------------------------+ 10 |MCS Division - Building 221 |Department of Computer Science | 11 |Argonne National Laboratory |Ryerson 152 | 12 |9700 S. Cass Avenue |The University of Chicago | 13 |Argonne, IL 60439 |Chicago, IL 60637 | 14 |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx | 15 +--------------------------------+--------------------------------+ 16 17 Last Modification: 3.20.01 18 **************************************xxt.c***********************************/ 19 #include <../src/ksp/pc/impls/tfs/tfs.h> 20 21 #define LEFT -1 22 #define RIGHT 1 23 #define BOTH 0 24 25 typedef struct xxt_solver_info { 26 PetscInt n, m, n_global, m_global; 27 PetscInt nnz, max_nnz, msg_buf_sz; 28 PetscInt *nsep, *lnsep, *fo, nfo, *stages; 29 PetscInt *col_sz, *col_indices; 30 PetscScalar **col_vals, *x, *solve_uu, *solve_w; 31 PetscInt nsolves; 32 PetscScalar tot_solve_time; 33 } xxt_info; 34 35 typedef struct matvec_info { 36 PetscInt n, m, n_global, m_global; 37 PetscInt *local2global; 38 PCTFS_gs_ADT PCTFS_gs_handle; 39 PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*); 40 void *grid_data; 41 } mv_info; 42 43 struct xxt_CDT{ 44 PetscInt id; 45 PetscInt ns; 46 PetscInt level; 47 xxt_info *info; 48 mv_info *mvi; 49 }; 50 51 static PetscInt n_xxt=0; 52 static PetscInt n_xxt_handles=0; 53 54 /* prototypes */ 55 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs); 56 static PetscErrorCode check_handle(xxt_ADT xxt_handle); 57 static PetscErrorCode det_separators(xxt_ADT xxt_handle); 58 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u); 59 static PetscInt xxt_generate(xxt_ADT xxt_handle); 60 static PetscInt do_xxt_factor(xxt_ADT xxt_handle); 61 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data); 62 63 /**************************************xxt.c***********************************/ 64 xxt_ADT XXT_new(void) 65 { 66 xxt_ADT xxt_handle; 67 68 /* rolling count on n_xxt ... pot. problem here */ 69 n_xxt_handles++; 70 xxt_handle = (xxt_ADT)malloc(sizeof(struct xxt_CDT)); 71 xxt_handle->id = ++n_xxt; 72 xxt_handle->info = NULL; xxt_handle->mvi = NULL; 73 74 return(xxt_handle); 75 } 76 77 /**************************************xxt.c***********************************/ 78 PetscInt XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt handle */ 79 PetscInt *local2global, /* global column mapping */ 80 PetscInt n, /* local num rows */ 81 PetscInt m, /* local num cols */ 82 void *matvec, /* b_loc=A_local.x_loc */ 83 void *grid_data /* grid data for matvec */ 84 ) 85 { 86 PCTFS_comm_init(); 87 check_handle(xxt_handle); 88 89 /* only 2^k for now and all nodes participating */ 90 if ((1<<(xxt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes); 91 92 /* space for X info */ 93 xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info)); 94 95 /* set up matvec handles */ 96 xxt_handle->mvi = set_mvi(local2global, n, m, matvec, grid_data); 97 98 /* matrix is assumed to be of full rank */ 99 /* LATER we can reset to indicate rank def. */ 100 xxt_handle->ns=0; 101 102 /* determine separators and generate firing order - NB xxt info set here */ 103 det_separators(xxt_handle); 104 105 return(do_xxt_factor(xxt_handle)); 106 } 107 108 /**************************************xxt.c***********************************/ 109 PetscInt XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b) 110 { 111 112 PCTFS_comm_init(); 113 check_handle(xxt_handle); 114 115 /* need to copy b into x? */ 116 if (b) {PCTFS_rvec_copy(x,b,xxt_handle->mvi->n);} 117 do_xxt_solve(xxt_handle,x); 118 119 return(0); 120 } 121 122 /**************************************xxt.c***********************************/ 123 PetscInt XXT_free(xxt_ADT xxt_handle) 124 { 125 126 PCTFS_comm_init(); 127 check_handle(xxt_handle); 128 n_xxt_handles--; 129 130 free(xxt_handle->info->nsep); 131 free(xxt_handle->info->lnsep); 132 free(xxt_handle->info->fo); 133 free(xxt_handle->info->stages); 134 free(xxt_handle->info->solve_uu); 135 free(xxt_handle->info->solve_w); 136 free(xxt_handle->info->x); 137 free(xxt_handle->info->col_vals); 138 free(xxt_handle->info->col_sz); 139 free(xxt_handle->info->col_indices); 140 free(xxt_handle->info); 141 free(xxt_handle->mvi->local2global); 142 PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle); 143 free(xxt_handle->mvi); 144 free(xxt_handle); 145 146 /* if the check fails we nuke */ 147 /* if NULL pointer passed to free we nuke */ 148 /* if the calls to free fail that's not my problem */ 149 return(0); 150 } 151 152 /**************************************xxt.c***********************************/ 153 PetscInt XXT_stats(xxt_ADT xxt_handle) 154 { 155 PetscInt op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD}; 156 PetscInt fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD}; 157 PetscInt vals[9], work[9]; 158 PetscScalar fvals[3], fwork[3]; 159 PetscErrorCode ierr; 160 161 PCTFS_comm_init(); 162 check_handle(xxt_handle); 163 164 /* if factorization not done there are no stats */ 165 if (!xxt_handle->info||!xxt_handle->mvi) { 166 if (!PCTFS_my_id) { ierr = PetscPrintf(PETSC_COMM_WORLD,"XXT_stats() :: no stats available!\n");CHKERRQ(ierr); } 167 return 1; 168 } 169 170 vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz; 171 vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n; 172 vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz; 173 PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 174 175 fvals[0]=fvals[1]=fvals[2] 176 =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++; 177 PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop); 178 179 if (!PCTFS_my_id) { 180 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_nnz=%D\n",PCTFS_my_id,vals[0]);CHKERRQ(ierr); 181 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_nnz=%D\n",PCTFS_my_id,vals[1]);CHKERRQ(ierr); 182 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);CHKERRQ(ierr); 183 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xxt_nnz=%D\n",PCTFS_my_id,vals[2]);CHKERRQ(ierr); 184 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt C(2d) =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.5)));CHKERRQ(ierr); 185 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt C(3d) =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.6667)));CHKERRQ(ierr); 186 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_n =%D\n",PCTFS_my_id,vals[3]);CHKERRQ(ierr); 187 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_n =%D\n",PCTFS_my_id,vals[4]);CHKERRQ(ierr); 188 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_n =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);CHKERRQ(ierr); 189 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xxt_n =%D\n",PCTFS_my_id,vals[5]);CHKERRQ(ierr); 190 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_buf=%D\n",PCTFS_my_id,vals[6]);CHKERRQ(ierr); 191 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_buf=%D\n",PCTFS_my_id,vals[7]);CHKERRQ(ierr); 192 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);CHKERRQ(ierr); 193 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_slv=%g\n",PCTFS_my_id,fvals[0]);CHKERRQ(ierr); 194 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_slv=%g\n",PCTFS_my_id,fvals[1]);CHKERRQ(ierr); 195 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);CHKERRQ(ierr); 196 } 197 198 return(0); 199 } 200 201 /*************************************xxt.c************************************ 202 203 Description: get A_local, local portion of global coarse matrix which 204 is a row dist. nxm matrix w/ n<m. 205 o my_ml holds address of ML struct associated w/A_local and coarse grid 206 o local2global holds global number of column i (i=0,...,m-1) 207 o local2global holds global number of row i (i=0,...,n-1) 208 o mylocmatvec performs A_local . vec_local (note that gs is performed using 209 PCTFS_gs_init/gop). 210 211 mylocmatvec = my_ml->Amat[grid_tag].matvec->external; 212 mylocmatvec (void :: void *data, double *in, double *out) 213 **************************************xxt.c***********************************/ 214 static PetscInt do_xxt_factor(xxt_ADT xxt_handle) 215 { 216 return xxt_generate(xxt_handle); 217 } 218 219 /**************************************xxt.c***********************************/ 220 static PetscInt xxt_generate(xxt_ADT xxt_handle) 221 { 222 PetscInt i,j,k,idex; 223 PetscInt dim, col; 224 PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w; 225 PetscInt *segs; 226 PetscInt op[] = {GL_ADD,0}; 227 PetscInt off, len; 228 PetscScalar *x_ptr; 229 PetscInt *iptr, flag; 230 PetscInt start=0, end, work; 231 PetscInt op2[] = {GL_MIN,0}; 232 PCTFS_gs_ADT PCTFS_gs_handle; 233 PetscInt *nsep, *lnsep, *fo; 234 PetscInt a_n=xxt_handle->mvi->n; 235 PetscInt a_m=xxt_handle->mvi->m; 236 PetscInt *a_local2global=xxt_handle->mvi->local2global; 237 PetscInt level; 238 PetscInt xxt_nnz=0, xxt_max_nnz=0; 239 PetscInt n, m; 240 PetscInt *col_sz, *col_indices, *stages; 241 PetscScalar **col_vals, *x; 242 PetscInt n_global; 243 PetscInt xxt_zero_nnz=0; 244 PetscInt xxt_zero_nnz_0=0; 245 PetscBLASInt i1 = 1,dlen; 246 PetscScalar dm1 = -1.0; 247 PetscErrorCode ierr; 248 249 n=xxt_handle->mvi->n; 250 nsep=xxt_handle->info->nsep; 251 lnsep=xxt_handle->info->lnsep; 252 fo=xxt_handle->info->fo; 253 end=lnsep[0]; 254 level=xxt_handle->level; 255 PCTFS_gs_handle=xxt_handle->mvi->PCTFS_gs_handle; 256 257 /* is there a null space? */ 258 /* LATER add in ability to detect null space by checking alpha */ 259 for (i=0, j=0; i<=level; i++) 260 {j+=nsep[i];} 261 262 m = j-xxt_handle->ns; 263 if (m!=j) { 264 ierr = PetscPrintf(PETSC_COMM_WORLD,"xxt_generate() :: null space exists %D %D %D\n",m,j,xxt_handle->ns);CHKERRQ(ierr); 265 } 266 267 /* get and initialize storage for x local */ 268 /* note that x local is nxm and stored by columns */ 269 col_sz = (PetscInt*) malloc(m*sizeof(PetscInt)); 270 col_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt)); 271 col_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *)); 272 for (i=j=0; i<m; i++, j+=2) { 273 col_indices[j]=col_indices[j+1]=col_sz[i]=-1; 274 col_vals[i] = NULL; 275 } 276 col_indices[j]=-1; 277 278 /* size of separators for each sub-hc working from bottom of tree to top */ 279 /* this looks like nsep[]=segments */ 280 stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt)); 281 segs = (PetscInt*) malloc((level+1)*sizeof(PetscInt)); 282 PCTFS_ivec_zero(stages,level+1); 283 PCTFS_ivec_copy(segs,nsep,level+1); 284 for (i=0; i<level; i++) { segs[i+1] += segs[i]; } 285 stages[0] = segs[0]; 286 287 /* temporary vectors */ 288 u = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 289 z = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 290 v = (PetscScalar *) malloc(a_m*sizeof(PetscScalar)); 291 uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 292 w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 293 294 /* extra nnz due to replication of vertices across separators */ 295 for (i=1, j=0; i<=level; i++) {j+=nsep[i];} 296 297 /* storage for sparse x values */ 298 n_global = xxt_handle->info->n_global; 299 xxt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes; 300 x = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar)); 301 xxt_nnz = 0; 302 303 /* LATER - can embed next sep to fire in gs */ 304 /* time to make the donuts - generate X factor */ 305 for (dim=i=j=0;i<m;i++) 306 { 307 /* time to move to the next level? */ 308 while (i==segs[dim]) { 309 if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n"); 310 stages[dim++]=i; 311 end+=lnsep[dim]; 312 } 313 stages[dim]=i; 314 315 /* which column are we firing? */ 316 /* i.e. set v_l */ 317 /* use new seps and do global min across hc to determine which one to fire */ 318 (start<end) ? (col=fo[start]) : (col=INT_MAX); 319 PCTFS_giop_hc(&col,&work,1,op2,dim); 320 321 /* shouldn't need this */ 322 if (col==INT_MAX) { 323 ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr); 324 continue; 325 } 326 327 /* do I own it? I should */ 328 PCTFS_rvec_zero(v ,a_m); 329 if (col==fo[start]) { 330 start++; 331 idex=PCTFS_ivec_linear_search(col, a_local2global, a_n); 332 if (idex!=-1) { 333 v[idex] = 1.0; j++; 334 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n"); 335 } else { 336 idex=PCTFS_ivec_linear_search(col, a_local2global, a_m); 337 if (idex!=-1) 338 {v[idex] = 1.0;} 339 } 340 341 /* perform u = A.v_l */ 342 PCTFS_rvec_zero(u,n); 343 do_matvec(xxt_handle->mvi,v,u); 344 345 /* uu = X^T.u_l (local portion) */ 346 /* technically only need to zero out first i entries */ 347 /* later turn this into an XXT_solve call ? */ 348 PCTFS_rvec_zero(uu,m); 349 x_ptr=x; 350 iptr = col_indices; 351 for (k=0; k<i; k++) { 352 off = *iptr++; 353 len = *iptr++; 354 dlen = PetscBLASIntCast(len); 355 uu[k] = BLASdot_(&dlen,u+off,&i1,x_ptr,&i1); 356 x_ptr+=len; 357 } 358 359 360 /* uu = X^T.u_l (comm portion) */ 361 PCTFS_ssgl_radd (uu, w, dim, stages); 362 363 /* z = X.uu */ 364 PCTFS_rvec_zero(z,n); 365 x_ptr=x; 366 iptr = col_indices; 367 for (k=0; k<i; k++) { 368 off = *iptr++; 369 len = *iptr++; 370 dlen = PetscBLASIntCast(len); 371 BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1); 372 x_ptr+=len; 373 } 374 375 /* compute v_l = v_l - z */ 376 PCTFS_rvec_zero(v+a_n,a_m-a_n); 377 dlen = PetscBLASIntCast(n); 378 BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1); 379 380 /* compute u_l = A.v_l */ 381 if (a_n!=a_m) { PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim); } 382 PCTFS_rvec_zero(u,n); 383 do_matvec(xxt_handle->mvi,v,u); 384 385 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */ 386 dlen = PetscBLASIntCast(n); 387 alpha = BLASdot_(&dlen,u,&i1,v,&i1); 388 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */ 389 PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim); 390 391 alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha); 392 393 /* check for small alpha */ 394 /* LATER use this to detect and determine null space */ 395 if (fabs(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha); 396 397 /* compute v_l = v_l/sqrt(alpha) */ 398 PCTFS_rvec_scale(v,1.0/alpha,n); 399 400 /* add newly generated column, v_l, to X */ 401 flag = 1; 402 off=len=0; 403 for (k=0; k<n; k++) { 404 if (v[k]!=0.0) { 405 len=k; 406 if (flag) { off=k; flag=0; } 407 } 408 } 409 410 len -= (off-1); 411 412 if (len>0) { 413 if ((xxt_nnz+len)>xxt_max_nnz) { 414 ierr = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr); 415 xxt_max_nnz *= 2; 416 x_ptr = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar)); 417 PCTFS_rvec_copy(x_ptr,x,xxt_nnz); 418 free(x); 419 x = x_ptr; 420 x_ptr+=xxt_nnz; 421 } 422 xxt_nnz += len; 423 PCTFS_rvec_copy(x_ptr,v+off,len); 424 425 /* keep track of number of zeros */ 426 if (dim) { 427 for (k=0; k<len; k++) { 428 if (x_ptr[k]==0.0) { xxt_zero_nnz++; } 429 } 430 } else { 431 for (k=0; k<len; k++) { 432 if (x_ptr[k]==0.0) {xxt_zero_nnz_0++;} 433 } 434 } 435 col_indices[2*i] = off; 436 col_sz[i] = col_indices[2*i+1] = len; 437 col_vals[i] = x_ptr; 438 } 439 else { 440 col_indices[2*i] = 0; 441 col_sz[i] = col_indices[2*i+1] = 0; 442 col_vals[i] = x_ptr; 443 } 444 } 445 446 /* close off stages for execution phase */ 447 while (dim!=level) { 448 stages[dim++]=i; 449 ierr = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr); 450 } 451 stages[dim]=i; 452 453 xxt_handle->info->n=xxt_handle->mvi->n; 454 xxt_handle->info->m=m; 455 xxt_handle->info->nnz=xxt_nnz; 456 xxt_handle->info->max_nnz=xxt_max_nnz; 457 xxt_handle->info->msg_buf_sz=stages[level]-stages[0]; 458 xxt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 459 xxt_handle->info->solve_w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 460 xxt_handle->info->x=x; 461 xxt_handle->info->col_vals=col_vals; 462 xxt_handle->info->col_sz=col_sz; 463 xxt_handle->info->col_indices=col_indices; 464 xxt_handle->info->stages=stages; 465 xxt_handle->info->nsolves=0; 466 xxt_handle->info->tot_solve_time=0.0; 467 468 free(segs); 469 free(u); 470 free(v); 471 free(uu); 472 free(z); 473 free(w); 474 475 return(0); 476 } 477 478 /**************************************xxt.c***********************************/ 479 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc) 480 { 481 PetscInt off, len, *iptr; 482 PetscInt level =xxt_handle->level; 483 PetscInt n =xxt_handle->info->n; 484 PetscInt m =xxt_handle->info->m; 485 PetscInt *stages =xxt_handle->info->stages; 486 PetscInt *col_indices=xxt_handle->info->col_indices; 487 PetscScalar *x_ptr, *uu_ptr; 488 PetscScalar *solve_uu=xxt_handle->info->solve_uu; 489 PetscScalar *solve_w =xxt_handle->info->solve_w; 490 PetscScalar *x =xxt_handle->info->x; 491 PetscBLASInt i1 = 1,dlen; 492 493 PetscFunctionBegin; 494 uu_ptr=solve_uu; 495 PCTFS_rvec_zero(uu_ptr,m); 496 497 /* x = X.Y^T.b */ 498 /* uu = Y^T.b */ 499 for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) { 500 off=*iptr++; len=*iptr++; 501 dlen = PetscBLASIntCast(len); 502 *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,x_ptr,&i1); 503 } 504 505 /* comunication of beta */ 506 uu_ptr=solve_uu; 507 if (level) { PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages); } 508 509 PCTFS_rvec_zero(uc,n); 510 511 /* x = X.uu */ 512 for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) { 513 off=*iptr++; len=*iptr++; 514 dlen = PetscBLASIntCast(len); 515 BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1); 516 } 517 PetscFunctionReturn(0); 518 } 519 520 /**************************************xxt.c***********************************/ 521 static PetscErrorCode check_handle(xxt_ADT xxt_handle) 522 { 523 PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX}; 524 525 PetscFunctionBegin; 526 if (!xxt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xxt_handle); 527 528 vals[0]=vals[1]=xxt_handle->id; 529 PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 530 if ((vals[0]!=vals[1])||(xxt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n",vals[0],vals[1], xxt_handle->id); 531 PetscFunctionReturn(0); 532 } 533 534 /**************************************xxt.c***********************************/ 535 static PetscErrorCode det_separators(xxt_ADT xxt_handle) 536 { 537 PetscInt i, ct, id; 538 PetscInt mask, edge, *iptr; 539 PetscInt *dir, *used; 540 PetscInt sum[4], w[4]; 541 PetscScalar rsum[4], rw[4]; 542 PetscInt op[] = {GL_ADD,0}; 543 PetscScalar *lhs, *rhs; 544 PetscInt *nsep, *lnsep, *fo, nfo=0; 545 PCTFS_gs_ADT PCTFS_gs_handle=xxt_handle->mvi->PCTFS_gs_handle; 546 PetscInt *local2global=xxt_handle->mvi->local2global; 547 PetscInt n=xxt_handle->mvi->n; 548 PetscInt m=xxt_handle->mvi->m; 549 PetscInt level=xxt_handle->level; 550 PetscInt shared=0; 551 552 PetscFunctionBegin; 553 dir = (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 554 nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 555 lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 556 fo = (PetscInt*)malloc(sizeof(PetscInt)*(n+1)); 557 used = (PetscInt*)malloc(sizeof(PetscInt)*n); 558 559 PCTFS_ivec_zero(dir ,level+1); 560 PCTFS_ivec_zero(nsep ,level+1); 561 PCTFS_ivec_zero(lnsep,level+1); 562 PCTFS_ivec_set (fo ,-1,n+1); 563 PCTFS_ivec_zero(used,n); 564 565 lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m); 566 rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m); 567 568 /* determine the # of unique dof */ 569 PCTFS_rvec_zero(lhs,m); 570 PCTFS_rvec_set(lhs,1.0,n); 571 PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level); 572 PCTFS_rvec_zero(rsum,2); 573 for (ct=i=0;i<n;i++) { 574 if (lhs[i]!=0.0) 575 {rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];} 576 } 577 PCTFS_grop_hc(rsum,rw,2,op,level); 578 rsum[0]+=0.1; 579 rsum[1]+=0.1; 580 581 if (fabs(rsum[0]-rsum[1])>EPS) { shared=1; } 582 583 xxt_handle->info->n_global=xxt_handle->info->m_global=(PetscInt) rsum[0]; 584 xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(PetscInt) rsum[0]; 585 586 /* determine separator sets top down */ 587 if (shared) 588 { 589 for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) { 590 591 /* set rsh of hc, fire, and collect lhs responses */ 592 (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m); 593 PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge); 594 595 /* set lsh of hc, fire, and collect rhs responses */ 596 (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m); 597 PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge); 598 599 for (i=0;i<n;i++) { 600 if (id< mask) { 601 if (lhs[i]!=0.0) { lhs[i]=1.0; } 602 } 603 604 if (id>=mask) { 605 if (rhs[i]!=0.0) { rhs[i]=1.0; } 606 } 607 } 608 609 if (id< mask) { PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge-1); } 610 else { PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge-1); } 611 612 /* count number of dofs I own that have signal and not in sep set */ 613 PCTFS_rvec_zero(rsum,4); 614 for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) { 615 616 if (!used[i]) { 617 /* number of unmarked dofs on node */ 618 ct++; 619 /* number of dofs to be marked on lhs hc */ 620 if (id< mask) { 621 if (lhs[i]!=0.0) { sum[0]++; rsum[0]+=1.0/lhs[i]; } 622 } 623 /* number of dofs to be marked on rhs hc */ 624 if (id>=mask) { 625 if (rhs[i]!=0.0) { sum[1]++; rsum[1]+=1.0/rhs[i]; } 626 } 627 } 628 629 } 630 631 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 632 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 633 (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct); 634 PCTFS_giop_hc(sum,w,4,op,edge); 635 PCTFS_grop_hc(rsum,rw,4,op,edge); 636 rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1; 637 638 if (id<mask) { 639 /* mark dofs I own that have signal and not in sep set */ 640 for (ct=i=0;i<n;i++) { 641 if ((!used[i])&&(lhs[i]!=0.0)) { 642 ct++; nfo++; 643 644 if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n"); 645 646 *--iptr = local2global[i]; 647 used[i]=edge; 648 } 649 } 650 if (ct>1) {PCTFS_ivec_sort(iptr,ct);} 651 652 lnsep[edge]=ct; 653 nsep[edge]=(PetscInt) rsum[0]; 654 dir [edge]=LEFT; 655 } 656 657 if (id>=mask) { 658 /* mark dofs I own that have signal and not in sep set */ 659 for (ct=i=0;i<n;i++) { 660 if ((!used[i])&&(rhs[i]!=0.0)) { 661 ct++; nfo++; 662 663 if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n"); 664 665 *--iptr = local2global[i]; 666 used[i]=edge; 667 } 668 } 669 if (ct>1) { PCTFS_ivec_sort(iptr,ct); } 670 671 lnsep[edge]=ct; 672 nsep[edge]= (PetscInt) rsum[1]; 673 dir [edge]=RIGHT; 674 } 675 676 /* LATER or we can recur on these to order seps at this level */ 677 /* do we need full set of separators for this? */ 678 679 /* fold rhs hc into lower */ 680 if (id>=mask) { id-=mask; } 681 } 682 } else { 683 for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) { 684 /* set rsh of hc, fire, and collect lhs responses */ 685 (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m); 686 PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge); 687 688 /* set lsh of hc, fire, and collect rhs responses */ 689 (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m); 690 PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge); 691 692 /* count number of dofs I own that have signal and not in sep set */ 693 for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) { 694 if (!used[i]) { 695 /* number of unmarked dofs on node */ 696 ct++; 697 /* number of dofs to be marked on lhs hc */ 698 if ((id< mask)&&(lhs[i]!=0.0)) { sum[0]++; } 699 /* number of dofs to be marked on rhs hc */ 700 if ((id>=mask)&&(rhs[i]!=0.0)) { sum[1]++; } 701 } 702 } 703 704 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 705 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 706 PCTFS_giop_hc(sum,w,4,op,edge); 707 708 /* lhs hc wins */ 709 if (sum[2]>=sum[3]) { 710 if (id<mask) { 711 /* mark dofs I own that have signal and not in sep set */ 712 for (ct=i=0;i<n;i++) { 713 if ((!used[i])&&(lhs[i]!=0.0)) { 714 ct++; nfo++; 715 *--iptr = local2global[i]; 716 used[i]=edge; 717 } 718 } 719 if (ct>1) { PCTFS_ivec_sort(iptr,ct); } 720 lnsep[edge]=ct; 721 } 722 nsep[edge]=sum[0]; 723 dir [edge]=LEFT; 724 } else { /* rhs hc wins */ 725 if (id>=mask) { 726 /* mark dofs I own that have signal and not in sep set */ 727 for (ct=i=0;i<n;i++) { 728 if ((!used[i])&&(rhs[i]!=0.0)) { 729 ct++; nfo++; 730 *--iptr = local2global[i]; 731 used[i]=edge; 732 } 733 } 734 if (ct>1) {PCTFS_ivec_sort(iptr,ct);} 735 lnsep[edge]=ct; 736 } 737 nsep[edge]=sum[1]; 738 dir [edge]=RIGHT; 739 } 740 /* LATER or we can recur on these to order seps at this level */ 741 /* do we need full set of separators for this? */ 742 743 /* fold rhs hc into lower */ 744 if (id>=mask) { id-=mask; } 745 } 746 } 747 748 749 /* level 0 is on processor case - so mark the remainder */ 750 for (ct=i=0;i<n;i++) { 751 if (!used[i]) { 752 ct++; nfo++; 753 *--iptr = local2global[i]; 754 used[i]=edge; 755 } 756 } 757 if (ct>1) { PCTFS_ivec_sort(iptr,ct); } 758 lnsep[edge]=ct; 759 nsep [edge]=ct; 760 dir [edge]=LEFT; 761 762 xxt_handle->info->nsep=nsep; 763 xxt_handle->info->lnsep=lnsep; 764 xxt_handle->info->fo=fo; 765 xxt_handle->info->nfo=nfo; 766 767 free(dir); 768 free(lhs); 769 free(rhs); 770 free(used); 771 PetscFunctionReturn(0); 772 } 773 774 /**************************************xxt.c***********************************/ 775 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data) 776 { 777 mv_info *mvi; 778 779 780 mvi = (mv_info*)malloc(sizeof(mv_info)); 781 mvi->n=n; 782 mvi->m=m; 783 mvi->n_global=-1; 784 mvi->m_global=-1; 785 mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt)); 786 PCTFS_ivec_copy(mvi->local2global,local2global,m); 787 mvi->local2global[m] = INT_MAX; 788 mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec; 789 mvi->grid_data=grid_data; 790 791 /* set xxt communication handle to perform restricted matvec */ 792 mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes); 793 794 return(mvi); 795 } 796 797 /**************************************xxt.c***********************************/ 798 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u) 799 { 800 PetscFunctionBegin; 801 A->matvec((mv_info*)A->grid_data,v,u); 802 PetscFunctionReturn(0); 803 } 804 805 806 807