1 #define PETSCKSP_DLL 2 3 /*************************************xxt.c************************************ 4 Module Name: xxt 5 Module Info: 6 7 author: Henry M. Tufo III 8 e-mail: hmt@asci.uchicago.edu 9 contact: 10 +--------------------------------+--------------------------------+ 11 |MCS Division - Building 221 |Department of Computer Science | 12 |Argonne National Laboratory |Ryerson 152 | 13 |9700 S. Cass Avenue |The University of Chicago | 14 |Argonne, IL 60439 |Chicago, IL 60637 | 15 |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx | 16 +--------------------------------+--------------------------------+ 17 18 Last Modification: 3.20.01 19 **************************************xxt.c***********************************/ 20 #include "../src/ksp/pc/impls/tfs/tfs.h" 21 22 #define LEFT -1 23 #define RIGHT 1 24 #define BOTH 0 25 26 typedef struct xxt_solver_info { 27 PetscInt n, m, n_global, m_global; 28 PetscInt nnz, max_nnz, msg_buf_sz; 29 PetscInt *nsep, *lnsep, *fo, nfo, *stages; 30 PetscInt *col_sz, *col_indices; 31 PetscScalar **col_vals, *x, *solve_uu, *solve_w; 32 PetscInt nsolves; 33 PetscScalar tot_solve_time; 34 } xxt_info; 35 36 typedef struct matvec_info { 37 PetscInt n, m, n_global, m_global; 38 PetscInt *local2global; 39 gs_ADT gs_handle; 40 PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*); 41 void *grid_data; 42 } mv_info; 43 44 struct xxt_CDT{ 45 PetscInt id; 46 PetscInt ns; 47 PetscInt level; 48 xxt_info *info; 49 mv_info *mvi; 50 }; 51 52 static PetscInt n_xxt=0; 53 static PetscInt n_xxt_handles=0; 54 55 /* prototypes */ 56 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs); 57 static PetscErrorCode check_handle(xxt_ADT xxt_handle); 58 static PetscErrorCode det_separators(xxt_ADT xxt_handle); 59 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u); 60 static PetscInt xxt_generate(xxt_ADT xxt_handle); 61 static PetscInt do_xxt_factor(xxt_ADT xxt_handle); 62 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data); 63 64 /**************************************xxt.c***********************************/ 65 xxt_ADT XXT_new(void) 66 { 67 xxt_ADT xxt_handle; 68 69 /* rolling count on n_xxt ... pot. problem here */ 70 n_xxt_handles++; 71 xxt_handle = (xxt_ADT)malloc(sizeof(struct xxt_CDT)); 72 xxt_handle->id = ++n_xxt; 73 xxt_handle->info = NULL; xxt_handle->mvi = NULL; 74 75 return(xxt_handle); 76 } 77 78 /**************************************xxt.c***********************************/ 79 PetscInt XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt handle */ 80 PetscInt *local2global, /* global column mapping */ 81 PetscInt n, /* local num rows */ 82 PetscInt m, /* local num cols */ 83 void *matvec, /* b_loc=A_local.x_loc */ 84 void *grid_data /* grid data for matvec */ 85 ) 86 { 87 comm_init(); 88 check_handle(xxt_handle); 89 90 /* only 2^k for now and all nodes participating */ 91 if ((1<<(xxt_handle->level=i_log2_num_nodes))!=num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<i_log2_num_nodes,num_nodes); 92 93 /* space for X info */ 94 xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info)); 95 96 /* set up matvec handles */ 97 xxt_handle->mvi = set_mvi(local2global, n, m, matvec, grid_data); 98 99 /* matrix is assumed to be of full rank */ 100 /* LATER we can reset to indicate rank def. */ 101 xxt_handle->ns=0; 102 103 /* determine separators and generate firing order - NB xxt info set here */ 104 det_separators(xxt_handle); 105 106 return(do_xxt_factor(xxt_handle)); 107 } 108 109 /**************************************xxt.c***********************************/ 110 PetscInt XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b) 111 { 112 113 comm_init(); 114 check_handle(xxt_handle); 115 116 /* need to copy b into x? */ 117 if (b) 118 {rvec_copy(x,b,xxt_handle->mvi->n);} 119 do_xxt_solve(xxt_handle,x); 120 121 return(0); 122 } 123 124 /**************************************xxt.c***********************************/ 125 PetscInt XXT_free(xxt_ADT xxt_handle) 126 { 127 128 comm_init(); 129 check_handle(xxt_handle); 130 n_xxt_handles--; 131 132 free(xxt_handle->info->nsep); 133 free(xxt_handle->info->lnsep); 134 free(xxt_handle->info->fo); 135 free(xxt_handle->info->stages); 136 free(xxt_handle->info->solve_uu); 137 free(xxt_handle->info->solve_w); 138 free(xxt_handle->info->x); 139 free(xxt_handle->info->col_vals); 140 free(xxt_handle->info->col_sz); 141 free(xxt_handle->info->col_indices); 142 free(xxt_handle->info); 143 free(xxt_handle->mvi->local2global); 144 gs_free(xxt_handle->mvi->gs_handle); 145 free(xxt_handle->mvi); 146 free(xxt_handle); 147 148 /* if the check fails we nuke */ 149 /* if NULL pointer passed to free we nuke */ 150 /* if the calls to free fail that's not my problem */ 151 return(0); 152 } 153 154 /**************************************xxt.c***********************************/ 155 PetscInt XXT_stats(xxt_ADT xxt_handle) 156 { 157 PetscInt op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD}; 158 PetscInt fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD}; 159 PetscInt vals[9], work[9]; 160 PetscScalar fvals[3], fwork[3]; 161 PetscErrorCode ierr; 162 163 comm_init(); 164 check_handle(xxt_handle); 165 166 /* if factorization not done there are no stats */ 167 if (!xxt_handle->info||!xxt_handle->mvi) 168 { 169 if (!my_id) {ierr = PetscPrintf(PETSC_COMM_WORLD,"XXT_stats() :: no stats available!\n");CHKERRQ(ierr);} 170 return 1; 171 } 172 173 vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz; 174 vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n; 175 vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz; 176 giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 177 178 fvals[0]=fvals[1]=fvals[2] 179 =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++; 180 grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop); 181 182 if (!my_id) 183 { 184 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_nnz=%D\n",my_id,vals[0]);CHKERRQ(ierr); 185 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_nnz=%D\n",my_id,vals[1]);CHKERRQ(ierr); 186 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_nnz=%g\n",my_id,1.0*vals[2]/num_nodes);CHKERRQ(ierr); 187 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xxt_nnz=%D\n",my_id,vals[2]);CHKERRQ(ierr); 188 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt C(2d) =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.5)));CHKERRQ(ierr); 189 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt C(3d) =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.6667)));CHKERRQ(ierr); 190 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_n =%D\n",my_id,vals[3]);CHKERRQ(ierr); 191 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_n =%D\n",my_id,vals[4]);CHKERRQ(ierr); 192 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_n =%g\n",my_id,1.0*vals[5]/num_nodes);CHKERRQ(ierr); 193 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xxt_n =%D\n",my_id,vals[5]);CHKERRQ(ierr); 194 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_buf=%D\n",my_id,vals[6]);CHKERRQ(ierr); 195 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_buf=%D\n",my_id,vals[7]);CHKERRQ(ierr); 196 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_buf=%g\n",my_id,1.0*vals[8]/num_nodes);CHKERRQ(ierr); 197 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_slv=%g\n",my_id,fvals[0]);CHKERRQ(ierr); 198 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_slv=%g\n",my_id,fvals[1]);CHKERRQ(ierr); 199 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_slv=%g\n",my_id,fvals[2]/num_nodes);CHKERRQ(ierr); 200 } 201 202 return(0); 203 } 204 205 /*************************************xxt.c************************************ 206 207 Description: get A_local, local portion of global coarse matrix which 208 is a row dist. nxm matrix w/ n<m. 209 o my_ml holds address of ML struct associated w/A_local and coarse grid 210 o local2global holds global number of column i (i=0,...,m-1) 211 o local2global holds global number of row i (i=0,...,n-1) 212 o mylocmatvec performs A_local . vec_local (note that gs is performed using 213 gs_init/gop). 214 215 mylocmatvec = my_ml->Amat[grid_tag].matvec->external; 216 mylocmatvec (void :: void *data, double *in, double *out) 217 **************************************xxt.c***********************************/ 218 static PetscInt do_xxt_factor(xxt_ADT xxt_handle) 219 { 220 return xxt_generate(xxt_handle); 221 } 222 223 /**************************************xxt.c***********************************/ 224 static PetscInt xxt_generate(xxt_ADT xxt_handle) 225 { 226 PetscInt i,j,k,idex; 227 PetscInt dim, col; 228 PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w; 229 PetscInt *segs; 230 PetscInt op[] = {GL_ADD,0}; 231 PetscInt off, len; 232 PetscScalar *x_ptr; 233 PetscInt *iptr, flag; 234 PetscInt start=0, end, work; 235 PetscInt op2[] = {GL_MIN,0}; 236 gs_ADT gs_handle; 237 PetscInt *nsep, *lnsep, *fo; 238 PetscInt a_n=xxt_handle->mvi->n; 239 PetscInt a_m=xxt_handle->mvi->m; 240 PetscInt *a_local2global=xxt_handle->mvi->local2global; 241 PetscInt level; 242 PetscInt xxt_nnz=0, xxt_max_nnz=0; 243 PetscInt n, m; 244 PetscInt *col_sz, *col_indices, *stages; 245 PetscScalar **col_vals, *x; 246 PetscInt n_global; 247 PetscInt xxt_zero_nnz=0; 248 PetscInt xxt_zero_nnz_0=0; 249 PetscBLASInt i1 = 1,dlen; 250 PetscScalar dm1 = -1.0; 251 PetscErrorCode ierr; 252 253 n=xxt_handle->mvi->n; 254 nsep=xxt_handle->info->nsep; 255 lnsep=xxt_handle->info->lnsep; 256 fo=xxt_handle->info->fo; 257 end=lnsep[0]; 258 level=xxt_handle->level; 259 gs_handle=xxt_handle->mvi->gs_handle; 260 261 /* is there a null space? */ 262 /* LATER add in ability to detect null space by checking alpha */ 263 for (i=0, j=0; i<=level; i++) 264 {j+=nsep[i];} 265 266 m = j-xxt_handle->ns; 267 if (m!=j) 268 {ierr = PetscPrintf(PETSC_COMM_WORLD,"xxt_generate() :: null space exists %D %D %D\n",m,j,xxt_handle->ns);} 269 270 /* get and initialize storage for x local */ 271 /* note that x local is nxm and stored by columns */ 272 col_sz = (PetscInt*) malloc(m*sizeof(PetscInt)); 273 col_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt)); 274 col_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *)); 275 for (i=j=0; i<m; i++, j+=2) 276 { 277 col_indices[j]=col_indices[j+1]=col_sz[i]=-1; 278 col_vals[i] = NULL; 279 } 280 col_indices[j]=-1; 281 282 /* size of separators for each sub-hc working from bottom of tree to top */ 283 /* this looks like nsep[]=segments */ 284 stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt)); 285 segs = (PetscInt*) malloc((level+1)*sizeof(PetscInt)); 286 ivec_zero(stages,level+1); 287 ivec_copy(segs,nsep,level+1); 288 for (i=0; i<level; i++) 289 {segs[i+1] += segs[i];} 290 stages[0] = segs[0]; 291 292 /* temporary vectors */ 293 u = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 294 z = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 295 v = (PetscScalar *) malloc(a_m*sizeof(PetscScalar)); 296 uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 297 w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 298 299 /* extra nnz due to replication of vertices across separators */ 300 for (i=1, j=0; i<=level; i++) 301 {j+=nsep[i];} 302 303 /* storage for sparse x values */ 304 n_global = xxt_handle->info->n_global; 305 xxt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/num_nodes; 306 x = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar)); 307 xxt_nnz = 0; 308 309 /* LATER - can embed next sep to fire in gs */ 310 /* time to make the donuts - generate X factor */ 311 for (dim=i=j=0;i<m;i++) 312 { 313 /* time to move to the next level? */ 314 while (i==segs[dim]) { 315 if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n"); 316 stages[dim++]=i; 317 end+=lnsep[dim]; 318 } 319 stages[dim]=i; 320 321 /* which column are we firing? */ 322 /* i.e. set v_l */ 323 /* use new seps and do global min across hc to determine which one to fire */ 324 (start<end) ? (col=fo[start]) : (col=INT_MAX); 325 giop_hc(&col,&work,1,op2,dim); 326 327 /* shouldn't need this */ 328 if (col==INT_MAX) 329 { 330 ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr); 331 continue; 332 } 333 334 /* do I own it? I should */ 335 rvec_zero(v ,a_m); 336 if (col==fo[start]) 337 { 338 start++; 339 idex=ivec_linear_search(col, a_local2global, a_n); 340 if (idex!=-1){ 341 v[idex] = 1.0; j++; 342 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n"); 343 } 344 else 345 { 346 idex=ivec_linear_search(col, a_local2global, a_m); 347 if (idex!=-1) 348 {v[idex] = 1.0;} 349 } 350 351 /* perform u = A.v_l */ 352 rvec_zero(u,n); 353 do_matvec(xxt_handle->mvi,v,u); 354 355 /* uu = X^T.u_l (local portion) */ 356 /* technically only need to zero out first i entries */ 357 /* later turn this into an XXT_solve call ? */ 358 rvec_zero(uu,m); 359 x_ptr=x; 360 iptr = col_indices; 361 for (k=0; k<i; k++) 362 { 363 off = *iptr++; 364 len = *iptr++; 365 dlen = PetscBLASIntCast(len); 366 uu[k] = BLASdot_(&dlen,u+off,&i1,x_ptr,&i1); 367 x_ptr+=len; 368 } 369 370 371 /* uu = X^T.u_l (comm portion) */ 372 ssgl_radd (uu, w, dim, stages); 373 374 /* z = X.uu */ 375 rvec_zero(z,n); 376 x_ptr=x; 377 iptr = col_indices; 378 for (k=0; k<i; k++) 379 { 380 off = *iptr++; 381 len = *iptr++; 382 dlen = PetscBLASIntCast(len); 383 BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1); 384 x_ptr+=len; 385 } 386 387 /* compute v_l = v_l - z */ 388 rvec_zero(v+a_n,a_m-a_n); 389 dlen = PetscBLASIntCast(n); 390 BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1); 391 392 /* compute u_l = A.v_l */ 393 if (a_n!=a_m) 394 {gs_gop_hc(gs_handle,v,"+\0",dim);} 395 rvec_zero(u,n); 396 do_matvec(xxt_handle->mvi,v,u); 397 398 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */ 399 dlen = PetscBLASIntCast(n); 400 alpha = BLASdot_(&dlen,u,&i1,v,&i1); 401 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */ 402 grop_hc(&alpha, &alpha_w, 1, op, dim); 403 404 alpha = (PetscScalar) sqrt((double)alpha); 405 406 /* check for small alpha */ 407 /* LATER use this to detect and determine null space */ 408 if (fabs(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha); 409 410 /* compute v_l = v_l/sqrt(alpha) */ 411 rvec_scale(v,1.0/alpha,n); 412 413 /* add newly generated column, v_l, to X */ 414 flag = 1; 415 off=len=0; 416 for (k=0; k<n; k++) 417 { 418 if (v[k]!=0.0) 419 { 420 len=k; 421 if (flag) 422 {off=k; flag=0;} 423 } 424 } 425 426 len -= (off-1); 427 428 if (len>0) 429 { 430 if ((xxt_nnz+len)>xxt_max_nnz) 431 { 432 ierr = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr); 433 xxt_max_nnz *= 2; 434 x_ptr = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar)); 435 rvec_copy(x_ptr,x,xxt_nnz); 436 free(x); 437 x = x_ptr; 438 x_ptr+=xxt_nnz; 439 } 440 xxt_nnz += len; 441 rvec_copy(x_ptr,v+off,len); 442 443 /* keep track of number of zeros */ 444 if (dim) 445 { 446 for (k=0; k<len; k++) 447 { 448 if (x_ptr[k]==0.0) 449 {xxt_zero_nnz++;} 450 } 451 } 452 else 453 { 454 for (k=0; k<len; k++) 455 { 456 if (x_ptr[k]==0.0) 457 {xxt_zero_nnz_0++;} 458 } 459 } 460 col_indices[2*i] = off; 461 col_sz[i] = col_indices[2*i+1] = len; 462 col_vals[i] = x_ptr; 463 } 464 else 465 { 466 col_indices[2*i] = 0; 467 col_sz[i] = col_indices[2*i+1] = 0; 468 col_vals[i] = x_ptr; 469 } 470 } 471 472 /* close off stages for execution phase */ 473 while (dim!=level) 474 { 475 stages[dim++]=i; 476 ierr = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr); 477 } 478 stages[dim]=i; 479 480 xxt_handle->info->n=xxt_handle->mvi->n; 481 xxt_handle->info->m=m; 482 xxt_handle->info->nnz=xxt_nnz; 483 xxt_handle->info->max_nnz=xxt_max_nnz; 484 xxt_handle->info->msg_buf_sz=stages[level]-stages[0]; 485 xxt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 486 xxt_handle->info->solve_w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 487 xxt_handle->info->x=x; 488 xxt_handle->info->col_vals=col_vals; 489 xxt_handle->info->col_sz=col_sz; 490 xxt_handle->info->col_indices=col_indices; 491 xxt_handle->info->stages=stages; 492 xxt_handle->info->nsolves=0; 493 xxt_handle->info->tot_solve_time=0.0; 494 495 free(segs); 496 free(u); 497 free(v); 498 free(uu); 499 free(z); 500 free(w); 501 502 return(0); 503 } 504 505 /**************************************xxt.c***********************************/ 506 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc) 507 { 508 PetscInt off, len, *iptr; 509 PetscInt level =xxt_handle->level; 510 PetscInt n =xxt_handle->info->n; 511 PetscInt m =xxt_handle->info->m; 512 PetscInt *stages =xxt_handle->info->stages; 513 PetscInt *col_indices=xxt_handle->info->col_indices; 514 PetscScalar *x_ptr, *uu_ptr; 515 PetscScalar *solve_uu=xxt_handle->info->solve_uu; 516 PetscScalar *solve_w =xxt_handle->info->solve_w; 517 PetscScalar *x =xxt_handle->info->x; 518 PetscBLASInt i1 = 1,dlen; 519 520 PetscFunctionBegin; 521 uu_ptr=solve_uu; 522 rvec_zero(uu_ptr,m); 523 524 /* x = X.Y^T.b */ 525 /* uu = Y^T.b */ 526 for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) 527 { 528 off=*iptr++; len=*iptr++; 529 dlen = PetscBLASIntCast(len); 530 *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,x_ptr,&i1); 531 } 532 533 /* comunication of beta */ 534 uu_ptr=solve_uu; 535 if (level) {ssgl_radd(uu_ptr, solve_w, level, stages);} 536 537 rvec_zero(uc,n); 538 539 /* x = X.uu */ 540 for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) 541 { 542 off=*iptr++; len=*iptr++; 543 dlen = PetscBLASIntCast(len); 544 BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1); 545 } 546 PetscFunctionReturn(0); 547 } 548 549 /**************************************xxt.c***********************************/ 550 static PetscErrorCode check_handle(xxt_ADT xxt_handle) 551 { 552 PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX}; 553 554 PetscFunctionBegin; 555 if (!xxt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xxt_handle); 556 557 vals[0]=vals[1]=xxt_handle->id; 558 giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 559 if ((vals[0]!=vals[1])||(xxt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n",vals[0],vals[1], xxt_handle->id); 560 PetscFunctionReturn(0); 561 } 562 563 /**************************************xxt.c***********************************/ 564 static PetscErrorCode det_separators(xxt_ADT xxt_handle) 565 { 566 PetscInt i, ct, id; 567 PetscInt mask, edge, *iptr; 568 PetscInt *dir, *used; 569 PetscInt sum[4], w[4]; 570 PetscScalar rsum[4], rw[4]; 571 PetscInt op[] = {GL_ADD,0}; 572 PetscScalar *lhs, *rhs; 573 PetscInt *nsep, *lnsep, *fo, nfo=0; 574 gs_ADT gs_handle=xxt_handle->mvi->gs_handle; 575 PetscInt *local2global=xxt_handle->mvi->local2global; 576 PetscInt n=xxt_handle->mvi->n; 577 PetscInt m=xxt_handle->mvi->m; 578 PetscInt level=xxt_handle->level; 579 PetscInt shared=FALSE; 580 581 PetscFunctionBegin; 582 dir = (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 583 nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 584 lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 585 fo = (PetscInt*)malloc(sizeof(PetscInt)*(n+1)); 586 used = (PetscInt*)malloc(sizeof(PetscInt)*n); 587 588 ivec_zero(dir ,level+1); 589 ivec_zero(nsep ,level+1); 590 ivec_zero(lnsep,level+1); 591 ivec_set (fo ,-1,n+1); 592 ivec_zero(used,n); 593 594 lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m); 595 rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m); 596 597 /* determine the # of unique dof */ 598 rvec_zero(lhs,m); 599 rvec_set(lhs,1.0,n); 600 gs_gop_hc(gs_handle,lhs,"+\0",level); 601 rvec_zero(rsum,2); 602 for (ct=i=0;i<n;i++) 603 { 604 if (lhs[i]!=0.0) 605 {rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];} 606 } 607 grop_hc(rsum,rw,2,op,level); 608 rsum[0]+=0.1; 609 rsum[1]+=0.1; 610 611 if (fabs(rsum[0]-rsum[1])>EPS) 612 {shared=TRUE;} 613 614 xxt_handle->info->n_global=xxt_handle->info->m_global=(PetscInt) rsum[0]; 615 xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(PetscInt) rsum[0]; 616 617 /* determine separator sets top down */ 618 if (shared) 619 { 620 for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) 621 { 622 /* set rsh of hc, fire, and collect lhs responses */ 623 (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m); 624 gs_gop_hc(gs_handle,lhs,"+\0",edge); 625 626 /* set lsh of hc, fire, and collect rhs responses */ 627 (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m); 628 gs_gop_hc(gs_handle,rhs,"+\0",edge); 629 630 for (i=0;i<n;i++) 631 { 632 if (id< mask) 633 { 634 if (lhs[i]!=0.0) 635 {lhs[i]=1.0;} 636 } 637 if (id>=mask) 638 { 639 if (rhs[i]!=0.0) 640 {rhs[i]=1.0;} 641 } 642 } 643 644 if (id< mask) 645 {gs_gop_hc(gs_handle,lhs,"+\0",edge-1);} 646 else 647 {gs_gop_hc(gs_handle,rhs,"+\0",edge-1);} 648 649 /* count number of dofs I own that have signal and not in sep set */ 650 rvec_zero(rsum,4); 651 for (ivec_zero(sum,4),ct=i=0;i<n;i++) 652 { 653 if (!used[i]) 654 { 655 /* number of unmarked dofs on node */ 656 ct++; 657 /* number of dofs to be marked on lhs hc */ 658 if (id< mask) 659 { 660 if (lhs[i]!=0.0) 661 {sum[0]++; rsum[0]+=1.0/lhs[i];} 662 } 663 /* number of dofs to be marked on rhs hc */ 664 if (id>=mask) 665 { 666 if (rhs[i]!=0.0) 667 {sum[1]++; rsum[1]+=1.0/rhs[i];} 668 } 669 } 670 } 671 672 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 673 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 674 (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct); 675 giop_hc(sum,w,4,op,edge); 676 grop_hc(rsum,rw,4,op,edge); 677 rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1; 678 679 if (id<mask) 680 { 681 /* mark dofs I own that have signal and not in sep set */ 682 for (ct=i=0;i<n;i++) 683 { 684 if ((!used[i])&&(lhs[i]!=0.0)) 685 { 686 ct++; nfo++; 687 688 if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n"); 689 690 *--iptr = local2global[i]; 691 used[i]=edge; 692 } 693 } 694 if (ct>1) {ivec_sort(iptr,ct);} 695 696 lnsep[edge]=ct; 697 nsep[edge]=(PetscInt) rsum[0]; 698 dir [edge]=LEFT; 699 } 700 701 if (id>=mask) 702 { 703 /* mark dofs I own that have signal and not in sep set */ 704 for (ct=i=0;i<n;i++) 705 { 706 if ((!used[i])&&(rhs[i]!=0.0)) 707 { 708 ct++; nfo++; 709 710 if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n"); 711 712 *--iptr = local2global[i]; 713 used[i]=edge; 714 } 715 } 716 if (ct>1) {ivec_sort(iptr,ct);} 717 718 lnsep[edge]=ct; 719 nsep[edge]= (PetscInt) rsum[1]; 720 dir [edge]=RIGHT; 721 } 722 723 /* LATER or we can recur on these to order seps at this level */ 724 /* do we need full set of separators for this? */ 725 726 /* fold rhs hc into lower */ 727 if (id>=mask) 728 {id-=mask;} 729 } 730 } 731 else 732 { 733 for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) 734 { 735 /* set rsh of hc, fire, and collect lhs responses */ 736 (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m); 737 gs_gop_hc(gs_handle,lhs,"+\0",edge); 738 739 /* set lsh of hc, fire, and collect rhs responses */ 740 (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m); 741 gs_gop_hc(gs_handle,rhs,"+\0",edge); 742 743 /* count number of dofs I own that have signal and not in sep set */ 744 for (ivec_zero(sum,4),ct=i=0;i<n;i++) 745 { 746 if (!used[i]) 747 { 748 /* number of unmarked dofs on node */ 749 ct++; 750 /* number of dofs to be marked on lhs hc */ 751 if ((id< mask)&&(lhs[i]!=0.0)) {sum[0]++;} 752 /* number of dofs to be marked on rhs hc */ 753 if ((id>=mask)&&(rhs[i]!=0.0)) {sum[1]++;} 754 } 755 } 756 757 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 758 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 759 giop_hc(sum,w,4,op,edge); 760 761 /* lhs hc wins */ 762 if (sum[2]>=sum[3]) 763 { 764 if (id<mask) 765 { 766 /* mark dofs I own that have signal and not in sep set */ 767 for (ct=i=0;i<n;i++) 768 { 769 if ((!used[i])&&(lhs[i]!=0.0)) 770 { 771 ct++; nfo++; 772 *--iptr = local2global[i]; 773 used[i]=edge; 774 } 775 } 776 if (ct>1) {ivec_sort(iptr,ct);} 777 lnsep[edge]=ct; 778 } 779 nsep[edge]=sum[0]; 780 dir [edge]=LEFT; 781 } 782 /* rhs hc wins */ 783 else 784 { 785 if (id>=mask) 786 { 787 /* mark dofs I own that have signal and not in sep set */ 788 for (ct=i=0;i<n;i++) 789 { 790 if ((!used[i])&&(rhs[i]!=0.0)) 791 { 792 ct++; nfo++; 793 *--iptr = local2global[i]; 794 used[i]=edge; 795 } 796 } 797 if (ct>1) {ivec_sort(iptr,ct);} 798 lnsep[edge]=ct; 799 } 800 nsep[edge]=sum[1]; 801 dir [edge]=RIGHT; 802 } 803 /* LATER or we can recur on these to order seps at this level */ 804 /* do we need full set of separators for this? */ 805 806 /* fold rhs hc into lower */ 807 if (id>=mask) 808 {id-=mask;} 809 } 810 } 811 812 813 /* level 0 is on processor case - so mark the remainder */ 814 for (ct=i=0;i<n;i++) 815 { 816 if (!used[i]) 817 { 818 ct++; nfo++; 819 *--iptr = local2global[i]; 820 used[i]=edge; 821 } 822 } 823 if (ct>1) {ivec_sort(iptr,ct);} 824 lnsep[edge]=ct; 825 nsep [edge]=ct; 826 dir [edge]=LEFT; 827 828 xxt_handle->info->nsep=nsep; 829 xxt_handle->info->lnsep=lnsep; 830 xxt_handle->info->fo=fo; 831 xxt_handle->info->nfo=nfo; 832 833 free(dir); 834 free(lhs); 835 free(rhs); 836 free(used); 837 PetscFunctionReturn(0); 838 } 839 840 /**************************************xxt.c***********************************/ 841 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data) 842 { 843 mv_info *mvi; 844 845 846 mvi = (mv_info*)malloc(sizeof(mv_info)); 847 mvi->n=n; 848 mvi->m=m; 849 mvi->n_global=-1; 850 mvi->m_global=-1; 851 mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt)); 852 ivec_copy(mvi->local2global,local2global,m); 853 mvi->local2global[m] = INT_MAX; 854 mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec; 855 mvi->grid_data=grid_data; 856 857 /* set xxt communication handle to perform restricted matvec */ 858 mvi->gs_handle = gs_init(local2global, m, num_nodes); 859 860 return(mvi); 861 } 862 863 /**************************************xxt.c***********************************/ 864 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u) 865 { 866 PetscFunctionBegin; 867 A->matvec((mv_info*)A->grid_data,v,u); 868 PetscFunctionReturn(0); 869 } 870 871 872 873