1 /* 2 GAMG geometric-algebric multiogrid PC - Mark Adams 2011 3 */ 4 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 8 #include <assert.h> 9 #include <petscblaslapack.h> 10 11 typedef struct { 12 PetscInt nsmooths; 13 PetscBool sym_graph; 14 PetscBool square_graph; 15 }PC_GAMG_AGG; 16 17 #undef __FUNCT__ 18 #define __FUNCT__ "PCGAMGSetNSmooths" 19 /*@ 20 PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical) 21 22 Not Collective on PC 23 24 Input Parameters: 25 . pc - the preconditioner context 26 27 Options Database Key: 28 . -pc_gamg_agg_nsmooths 29 30 Level: intermediate 31 32 Concepts: Aggregation AMG preconditioner 33 34 .seealso: () 35 @*/ 36 PetscErrorCode PCGAMGSetNSmooths(PC pc, PetscInt n) 37 { 38 PetscErrorCode ierr; 39 40 PetscFunctionBegin; 41 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 42 ierr = PetscTryMethod(pc,"PCGAMGSetNSmooths_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 43 PetscFunctionReturn(0); 44 } 45 46 EXTERN_C_BEGIN 47 #undef __FUNCT__ 48 #define __FUNCT__ "PCGAMGSetNSmooths_GAMG" 49 PetscErrorCode PCGAMGSetNSmooths_GAMG(PC pc, PetscInt n) 50 { 51 PC_MG *mg = (PC_MG*)pc->data; 52 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 53 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 54 55 PetscFunctionBegin; 56 pc_gamg_agg->nsmooths = n; 57 PetscFunctionReturn(0); 58 } 59 EXTERN_C_END 60 61 #undef __FUNCT__ 62 #define __FUNCT__ "PCGAMGSetSymGraph" 63 /*@ 64 PCGAMGSetSymGraph - 65 66 Not Collective on PC 67 68 Input Parameters: 69 . pc - the preconditioner context 70 71 Options Database Key: 72 . -pc_gamg_sym_graph 73 74 Level: intermediate 75 76 Concepts: Aggregation AMG preconditioner 77 78 .seealso: () 79 @*/ 80 PetscErrorCode PCGAMGSetSymGraph(PC pc, PetscBool n) 81 { 82 PetscErrorCode ierr; 83 84 PetscFunctionBegin; 85 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 86 ierr = PetscTryMethod(pc,"PCGAMGSetSymGraph_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 87 PetscFunctionReturn(0); 88 } 89 90 EXTERN_C_BEGIN 91 #undef __FUNCT__ 92 #define __FUNCT__ "PCGAMGSetSymGraph_GAMG" 93 PetscErrorCode PCGAMGSetSymGraph_GAMG(PC pc, PetscBool n) 94 { 95 PC_MG *mg = (PC_MG*)pc->data; 96 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 97 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 98 99 PetscFunctionBegin; 100 pc_gamg_agg->sym_graph = n; 101 PetscFunctionReturn(0); 102 } 103 EXTERN_C_END 104 105 #undef __FUNCT__ 106 #define __FUNCT__ "PCGAMGSetSquareGraph" 107 /*@ 108 PCGAMGSetSquareGraph - 109 110 Not Collective on PC 111 112 Input Parameters: 113 . pc - the preconditioner context 114 115 Options Database Key: 116 . -pc_gamg_square_graph 117 118 Level: intermediate 119 120 Concepts: Aggregation AMG preconditioner 121 122 .seealso: () 123 @*/ 124 PetscErrorCode PCGAMGSetSquareGraph(PC pc, PetscBool n) 125 { 126 PetscErrorCode ierr; 127 128 PetscFunctionBegin; 129 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 130 ierr = PetscTryMethod(pc,"PCGAMGSetSquareGraph_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 131 PetscFunctionReturn(0); 132 } 133 134 EXTERN_C_BEGIN 135 #undef __FUNCT__ 136 #define __FUNCT__ "PCGAMGSetSquareGraph_GAMG" 137 PetscErrorCode PCGAMGSetSquareGraph_GAMG(PC pc, PetscBool n) 138 { 139 PC_MG *mg = (PC_MG*)pc->data; 140 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 141 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 142 143 PetscFunctionBegin; 144 pc_gamg_agg->square_graph = n; 145 PetscFunctionReturn(0); 146 } 147 EXTERN_C_END 148 149 /* -------------------------------------------------------------------------- */ 150 /* 151 PCSetFromOptions_GAMG_AGG 152 153 Input Parameter: 154 . pc - 155 */ 156 #undef __FUNCT__ 157 #define __FUNCT__ "PCSetFromOptions_GAMG_AGG" 158 PetscErrorCode PCSetFromOptions_GAMG_AGG( PC pc ) 159 { 160 PetscErrorCode ierr; 161 PC_MG *mg = (PC_MG*)pc->data; 162 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 163 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 164 PetscBool flag; 165 166 PetscFunctionBegin; 167 /* call base class */ 168 ierr = PCSetFromOptions_GAMG( pc ); CHKERRQ(ierr); 169 170 ierr = PetscOptionsHead("GAMG-AGG options"); CHKERRQ(ierr); 171 { 172 /* -pc_gamg_agg_nsmooths */ 173 pc_gamg_agg->nsmooths = 0; 174 ierr = PetscOptionsInt("-pc_gamg_agg_nsmooths", 175 "smoothing steps for smoothed aggregation, usually 1 (0)", 176 "PCGAMGSetNSmooths", 177 pc_gamg_agg->nsmooths, 178 &pc_gamg_agg->nsmooths, 179 &flag); 180 CHKERRQ(ierr); 181 182 /* -pc_gamg_sym_graph */ 183 pc_gamg_agg->sym_graph = PETSC_FALSE; 184 ierr = PetscOptionsBool("-pc_gamg_sym_graph", 185 "Set for asymmetric matrices", 186 "PCGAMGSetSymGraph", 187 pc_gamg_agg->sym_graph, 188 &pc_gamg_agg->sym_graph, 189 &flag); 190 CHKERRQ(ierr); 191 192 /* -pc_gamg_square_graph */ 193 pc_gamg_agg->square_graph = PETSC_TRUE; 194 ierr = PetscOptionsBool("-pc_gamg_square_graph", 195 "For faster coarsening and lower coarse grid complexity", 196 "PCGAMGSetSquareGraph", 197 pc_gamg_agg->square_graph, 198 &pc_gamg_agg->square_graph, 199 &flag); 200 CHKERRQ(ierr); 201 } 202 ierr = PetscOptionsTail();CHKERRQ(ierr); 203 204 PetscFunctionReturn(0); 205 } 206 207 /* -------------------------------------------------------------------------- */ 208 /* 209 PCDestroy_AGG 210 211 Input Parameter: 212 . pc - 213 */ 214 #undef __FUNCT__ 215 #define __FUNCT__ "PCDestroy_AGG" 216 PetscErrorCode PCDestroy_AGG( PC pc ) 217 { 218 PetscErrorCode ierr; 219 PC_MG *mg = (PC_MG*)pc->data; 220 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 221 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 222 223 PetscFunctionBegin; 224 if( pc_gamg_agg ) { 225 ierr = PetscFree(pc_gamg_agg);CHKERRQ(ierr); 226 pc_gamg_agg = 0; 227 } 228 229 /* call base class */ 230 ierr = PCDestroy_GAMG( pc );CHKERRQ(ierr); 231 232 PetscFunctionReturn(0); 233 } 234 235 /* -------------------------------------------------------------------------- */ 236 /* 237 PCSetCoordinates_AGG 238 - collective 239 240 Input Parameter: 241 . pc - the preconditioner context 242 . ndm - dimesion of data (used for dof/vertex for Stokes) 243 . a_nloc - number of vertices local 244 . coords - [a_nloc][ndm] - interleaved coordinate data: {x_0, y_0, z_0, x_1, y_1, ...} 245 */ 246 EXTERN_C_BEGIN 247 #undef __FUNCT__ 248 #define __FUNCT__ "PCSetCoordinates_AGG" 249 PetscErrorCode PCSetCoordinates_AGG( PC pc, PetscInt ndm, PetscInt a_nloc, PetscReal *coords ) 250 { 251 PC_MG *mg = (PC_MG*)pc->data; 252 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 253 PetscErrorCode ierr; 254 PetscInt arrsz,kk,ii,jj,nloc,ndatarows,ndf; 255 Mat mat = pc->pmat; 256 /* MPI_Comm wcomm = ((PetscObject)pc)->comm; */ 257 258 PetscFunctionBegin; 259 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 260 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 261 nloc = a_nloc; 262 263 /* SA: null space vectors */ 264 ierr = MatGetBlockSize( mat, &ndf ); CHKERRQ( ierr ); /* this does not work for Stokes */ 265 if( coords && ndf==1 ) pc_gamg->data_cell_cols = 1; /* scalar w/ coords and SA (not needed) */ 266 else if( coords ) { 267 if(ndm > ndf) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"degrees of motion %d > block size %d",ndm,ndf); 268 pc_gamg->data_cell_cols = (ndm==2 ? 3 : 6); /* displacement elasticity */ 269 if(ndm != ndf) { 270 if(pc_gamg->data_cell_cols != ndf) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Don't know how to create null space for ndm=%d, ndf=%d. Use MatSetNearNullSpace.",ndm,ndf); 271 } 272 } 273 else pc_gamg->data_cell_cols = ndf; /* no data, force SA with constant null space vectors */ 274 pc_gamg->data_cell_rows = ndatarows = ndf; assert(pc_gamg->data_cell_cols>0); 275 arrsz = nloc*pc_gamg->data_cell_rows*pc_gamg->data_cell_cols; 276 277 /* create data - syntactic sugar that should be refactored at some point */ 278 if (pc_gamg->data==0 || (pc_gamg->data_sz != arrsz)) { 279 ierr = PetscFree( pc_gamg->data ); CHKERRQ(ierr); 280 ierr = PetscMalloc((arrsz+1)*sizeof(PetscReal), &pc_gamg->data ); CHKERRQ(ierr); 281 } 282 /* copy data in - column oriented */ 283 for(kk=0;kk<nloc;kk++){ 284 const PetscInt M = nloc*pc_gamg->data_cell_rows; /* stride into data */ 285 PetscReal *data = &pc_gamg->data[kk*ndatarows]; /* start of cell */ 286 if( pc_gamg->data_cell_cols==1 ) *data = 1.0; 287 else { 288 /* translational modes */ 289 for(ii=0;ii<ndatarows;ii++) 290 for(jj=0;jj<ndatarows;jj++) 291 if(ii==jj)data[ii*M + jj] = 1.0; 292 else data[ii*M + jj] = 0.0; 293 294 /* rotational modes */ 295 if( coords ) { 296 if( ndm == 2 ){ 297 data += 2*M; 298 data[0] = -coords[2*kk+1]; 299 data[1] = coords[2*kk]; 300 } 301 else { 302 data += 3*M; 303 data[0] = 0.0; data[M+0] = coords[3*kk+2]; data[2*M+0] = -coords[3*kk+1]; 304 data[1] = -coords[3*kk+2]; data[M+1] = 0.0; data[2*M+1] = coords[3*kk]; 305 data[2] = coords[3*kk+1]; data[M+2] = -coords[3*kk]; data[2*M+2] = 0.0; 306 } 307 } 308 } 309 } 310 311 pc_gamg->data_sz = arrsz; 312 313 PetscFunctionReturn(0); 314 } 315 EXTERN_C_END 316 317 typedef PetscInt NState; 318 static const NState NOT_DONE=-2; 319 static const NState DELETED=-1; 320 static const NState REMOVED=-3; 321 #define IS_SELECTED(s) (s!=DELETED && s!=NOT_DONE && s!=REMOVED) 322 323 /* -------------------------------------------------------------------------- */ 324 /* 325 smoothAggs - greedy grab of with G1 (unsquared graph) -- AIJ specific 326 - AGG-MG specific: clears singletons out of 'selected_2' 327 328 Input Parameter: 329 . Gmat_2 - glabal matrix of graph (data not defined) 330 . Gmat_1 - base graph to grab with 331 Input/Output Parameter: 332 . aggs_2 - linked list of aggs with gids ) 333 */ 334 #undef __FUNCT__ 335 #define __FUNCT__ "smoothAggs" 336 static PetscErrorCode smoothAggs( const Mat Gmat_2, /* base (squared) graph */ 337 const Mat Gmat_1, /* base graph */ 338 /* const IS selected_2, [nselected local] selected vertices */ 339 PetscCoarsenData *aggs_2 /* [nselected local] global ID of aggregate */ 340 ) 341 { 342 PetscErrorCode ierr; 343 PetscBool isMPI; 344 Mat_SeqAIJ *matA_1, *matB_1=0, *matA_2, *matB_2=0; 345 MPI_Comm wcomm = ((PetscObject)Gmat_2)->comm; 346 PetscMPIInt mype,npe; 347 PetscInt lid,*ii,*idx,ix,Iend,my0,kk,n,j; 348 Mat_MPIAIJ *mpimat_2 = 0, *mpimat_1=0; 349 const PetscInt nloc = Gmat_2->rmap->n; 350 PetscScalar *cpcol_1_state,*cpcol_2_state,*cpcol_2_par_orig,*lid_parent_gid; 351 PetscInt *lid_cprowID_1; 352 NState *lid_state; 353 Vec ghost_par_orig2; 354 355 PetscFunctionBegin; 356 ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr); 357 ierr = MPI_Comm_size( wcomm, &npe ); CHKERRQ(ierr); 358 ierr = MatGetOwnershipRange(Gmat_1,&my0,&Iend); CHKERRQ(ierr); 359 360 if( PETSC_FALSE ) { 361 PetscViewer viewer; char fname[32]; static int llev=0; 362 sprintf(fname,"Gmat2_%d.m",llev++); 363 PetscViewerASCIIOpen(wcomm,fname,&viewer); 364 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 365 ierr = MatView(Gmat_2, viewer ); CHKERRQ(ierr); 366 ierr = PetscViewerDestroy( &viewer ); 367 } 368 369 /* get submatrices */ 370 ierr = PetscObjectTypeCompare( (PetscObject)Gmat_1, MATMPIAIJ, &isMPI ); CHKERRQ(ierr); 371 if(isMPI) { 372 /* grab matrix objects */ 373 mpimat_2 = (Mat_MPIAIJ*)Gmat_2->data; 374 mpimat_1 = (Mat_MPIAIJ*)Gmat_1->data; 375 matA_1 = (Mat_SeqAIJ*)mpimat_1->A->data; 376 matB_1 = (Mat_SeqAIJ*)mpimat_1->B->data; 377 matA_2 = (Mat_SeqAIJ*)mpimat_2->A->data; 378 matB_2 = (Mat_SeqAIJ*)mpimat_2->B->data; 379 380 /* force compressed row storage for B matrix in AuxMat */ 381 matB_1->compressedrow.check = PETSC_TRUE; 382 ierr = MatCheckCompressedRow(mpimat_1->B,&matB_1->compressedrow,matB_1->i,Gmat_1->rmap->n,-1.0); 383 CHKERRQ(ierr); 384 385 ierr = PetscMalloc( nloc*sizeof(PetscInt), &lid_cprowID_1 ); CHKERRQ(ierr); 386 for( lid = 0 ; lid < nloc ; lid++ ) lid_cprowID_1[lid] = -1; 387 for (ix=0; ix<matB_1->compressedrow.nrows; ix++) { 388 PetscInt lid = matB_1->compressedrow.rindex[ix]; 389 lid_cprowID_1[lid] = ix; 390 } 391 } 392 else { 393 matA_1 = (Mat_SeqAIJ*)Gmat_1->data; 394 matA_2 = (Mat_SeqAIJ*)Gmat_2->data; 395 lid_cprowID_1 = PETSC_NULL; 396 } 397 assert( matA_1 && !matA_1->compressedrow.use ); 398 assert( matB_1==0 || matB_1->compressedrow.use ); 399 assert( matA_2 && !matA_2->compressedrow.use ); 400 assert( matB_2==0 || matB_2->compressedrow.use ); 401 402 /* get state of locals and selected gid for deleted */ 403 ierr = PetscMalloc( nloc*sizeof(NState), &lid_state ); CHKERRQ(ierr); 404 ierr = PetscMalloc( nloc*sizeof(PetscScalar), &lid_parent_gid ); CHKERRQ(ierr); 405 for( lid = 0 ; lid < nloc ; lid++ ) { 406 lid_parent_gid[lid] = -1.0; 407 lid_state[lid] = DELETED; 408 } 409 410 /* set lid_state */ 411 for( lid = 0 ; lid < nloc ; lid++ ) { 412 PetscCDPos pos; 413 ierr = PetscCDGetHeadPos(aggs_2,lid,&pos); CHKERRQ(ierr); 414 if( pos ) { 415 PetscInt gid1; 416 ierr = PetscLLNGetID( pos, &gid1 ); CHKERRQ(ierr); assert(gid1==lid+my0); 417 lid_state[lid] = gid1; 418 } 419 } 420 421 /* map local to selected local, DELETED means a ghost owns it */ 422 for(lid=kk=0;lid<nloc;lid++){ 423 NState state = lid_state[lid]; 424 if( IS_SELECTED(state) ){ 425 PetscCDPos pos; 426 ierr = PetscCDGetHeadPos(aggs_2,lid,&pos); CHKERRQ(ierr); 427 while(pos){ 428 PetscInt gid1; 429 ierr = PetscLLNGetID( pos, &gid1 ); CHKERRQ(ierr); 430 ierr = PetscCDGetNextPos(aggs_2,lid,&pos); CHKERRQ(ierr); 431 432 if( gid1 >= my0 && gid1 < Iend ){ 433 lid_parent_gid[gid1-my0] = (PetscScalar)(lid + my0); 434 } 435 } 436 } 437 } 438 /* get 'cpcol_1/2_state' & cpcol_2_par_orig - uses mpimat_1/2->lvec for temp space */ 439 if (isMPI) { 440 Vec tempVec; 441 /* get 'cpcol_1_state' */ 442 ierr = MatGetVecs( Gmat_1, &tempVec, 0 ); CHKERRQ(ierr); 443 for(kk=0,j=my0;kk<nloc;kk++,j++){ 444 PetscScalar v = (PetscScalar)lid_state[kk]; 445 ierr = VecSetValues( tempVec, 1, &j, &v, INSERT_VALUES ); CHKERRQ(ierr); 446 } 447 ierr = VecAssemblyBegin( tempVec ); CHKERRQ(ierr); 448 ierr = VecAssemblyEnd( tempVec ); CHKERRQ(ierr); 449 ierr = VecScatterBegin(mpimat_1->Mvctx,tempVec, mpimat_1->lvec,INSERT_VALUES,SCATTER_FORWARD); 450 CHKERRQ(ierr); 451 ierr = VecScatterEnd(mpimat_1->Mvctx,tempVec, mpimat_1->lvec,INSERT_VALUES,SCATTER_FORWARD); 452 CHKERRQ(ierr); 453 ierr = VecGetArray( mpimat_1->lvec, &cpcol_1_state ); CHKERRQ(ierr); 454 /* get 'cpcol_2_state' */ 455 ierr = VecScatterBegin(mpimat_2->Mvctx,tempVec, mpimat_2->lvec,INSERT_VALUES,SCATTER_FORWARD); 456 CHKERRQ(ierr); 457 ierr = VecScatterEnd(mpimat_2->Mvctx,tempVec, mpimat_2->lvec,INSERT_VALUES,SCATTER_FORWARD); 458 CHKERRQ(ierr); 459 ierr = VecGetArray( mpimat_2->lvec, &cpcol_2_state ); CHKERRQ(ierr); 460 /* get 'cpcol_2_par_orig' */ 461 for(kk=0,j=my0;kk<nloc;kk++,j++){ 462 PetscScalar v = (PetscScalar)lid_parent_gid[kk]; 463 ierr = VecSetValues( tempVec, 1, &j, &v, INSERT_VALUES ); CHKERRQ(ierr); 464 } 465 ierr = VecAssemblyBegin( tempVec ); CHKERRQ(ierr); 466 ierr = VecAssemblyEnd( tempVec ); CHKERRQ(ierr); 467 ierr = VecDuplicate( mpimat_2->lvec, &ghost_par_orig2 ); CHKERRQ(ierr); 468 ierr = VecScatterBegin(mpimat_2->Mvctx,tempVec, ghost_par_orig2,INSERT_VALUES,SCATTER_FORWARD); 469 CHKERRQ(ierr); 470 ierr = VecScatterEnd(mpimat_2->Mvctx,tempVec, ghost_par_orig2,INSERT_VALUES,SCATTER_FORWARD); 471 CHKERRQ(ierr); 472 ierr = VecGetArray( ghost_par_orig2, &cpcol_2_par_orig ); CHKERRQ(ierr); 473 474 ierr = VecDestroy( &tempVec ); CHKERRQ(ierr); 475 } /* ismpi */ 476 477 /* doit */ 478 for(lid=0;lid<nloc;lid++){ 479 NState state = lid_state[lid]; 480 if( IS_SELECTED(state) ) { 481 /* steal locals */ 482 ii = matA_1->i; n = ii[lid+1] - ii[lid]; 483 idx = matA_1->j + ii[lid]; 484 for (j=0; j<n; j++) { 485 PetscInt lidj = idx[j], sgid; 486 NState statej = lid_state[lidj]; 487 if (statej==DELETED && (sgid=(PetscInt)PetscRealPart(lid_parent_gid[lidj])) != lid+my0) { /* steal local */ 488 lid_parent_gid[lidj] = (PetscScalar)(lid+my0); /* send this if sgid is not local */ 489 if( sgid >= my0 && sgid < Iend ){ /* I'm stealing this local from a local sgid */ 490 PetscInt hav=0,slid=sgid-my0,gidj=lidj+my0; 491 PetscCDPos pos,last=PETSC_NULL; 492 /* looking for local from local so id_llist_2 works */ 493 ierr = PetscCDGetHeadPos(aggs_2,slid,&pos); CHKERRQ(ierr); 494 while(pos){ 495 PetscInt gid; 496 ierr = PetscLLNGetID( pos, &gid ); CHKERRQ(ierr); 497 if( gid == gidj ) { 498 assert(last); 499 ierr = PetscCDRemoveNextNode( aggs_2, slid, last ); CHKERRQ(ierr); 500 ierr = PetscCDAppendNode( aggs_2, lid, pos ); CHKERRQ(ierr); 501 hav = 1; 502 break; 503 } 504 else last = pos; 505 506 ierr = PetscCDGetNextPos(aggs_2,slid,&pos); CHKERRQ(ierr); 507 } 508 if(hav!=1){ 509 if(hav==0)SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"failed to find adj in 'selected' lists - structurally unsymmetric matrix"); 510 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"found node %d times???",hav); 511 } 512 } 513 else{ /* I'm stealing this local, owned by a ghost */ 514 if(sgid != -1 ) { 515 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Have un-symmetric graph (apparently). Use '-pc_gamg_sym_graph true' to symetrize the graph or '-pc_gamg_threshold 0.0' if the matrix is structurally symmetric."); 516 } 517 ierr = PetscCDAppendID( aggs_2, lid, lidj+my0 ); CHKERRQ(ierr); 518 } 519 } 520 } /* local neighbors */ 521 } 522 else if( state == DELETED && lid_cprowID_1 ) { 523 PetscInt sgidold = (PetscInt)PetscRealPart(lid_parent_gid[lid]); 524 /* see if I have a selected ghost neighbor that will steal me */ 525 if( (ix=lid_cprowID_1[lid]) != -1 ){ 526 ii = matB_1->compressedrow.i; n = ii[ix+1] - ii[ix]; 527 idx = matB_1->j + ii[ix]; 528 for( j=0 ; j<n ; j++ ) { 529 PetscInt cpid = idx[j]; 530 NState statej = (NState)PetscRealPart(cpcol_1_state[cpid]); 531 if( IS_SELECTED(statej) && sgidold != (PetscInt)statej ) { /* ghost will steal this, remove from my list */ 532 lid_parent_gid[lid] = (PetscScalar)statej; /* send who selected */ 533 if( sgidold>=my0 && sgidold<Iend ) { /* this was mine */ 534 PetscInt hav=0,oldslidj=sgidold-my0; 535 PetscCDPos pos,last=PETSC_NULL; 536 /* remove from 'oldslidj' list */ 537 ierr = PetscCDGetHeadPos(aggs_2,oldslidj,&pos); CHKERRQ(ierr); 538 while( pos ) { 539 PetscInt gid; 540 ierr = PetscLLNGetID( pos, &gid ); CHKERRQ(ierr); 541 if( lid+my0 == gid ) { 542 /* id_llist_2[lastid] = id_llist_2[flid]; /\* remove lid from oldslidj list *\/ */ 543 assert(last); 544 ierr = PetscCDRemoveNextNode( aggs_2, oldslidj, last ); CHKERRQ(ierr); 545 /* ghost (PetscScalar)statej will add this later */ 546 hav = 1; 547 break; 548 } 549 else last = pos; 550 551 ierr = PetscCDGetNextPos(aggs_2,oldslidj,&pos); CHKERRQ(ierr); 552 } 553 if(hav!=1){ 554 if(hav==0)SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"failed to find adj in 'selected' lists - structurally unsymmetric matrix"); 555 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"found node %d times???",hav); 556 } 557 } 558 else { 559 /* ghosts remove this later */ 560 } 561 } 562 } 563 } 564 } /* selected/deleted */ 565 } /* node loop */ 566 567 if( isMPI ) { 568 PetscScalar *cpcol_2_parent,*cpcol_2_gid; 569 Vec tempVec,ghostgids2,ghostparents2; 570 PetscInt cpid,nghost_2; 571 GAMGHashTable gid_cpid; 572 573 ierr = VecGetSize( mpimat_2->lvec, &nghost_2 ); CHKERRQ(ierr); 574 ierr = MatGetVecs( Gmat_2, &tempVec, 0 ); CHKERRQ(ierr); 575 576 /* get 'cpcol_2_parent' */ 577 for(kk=0,j=my0;kk<nloc;kk++,j++){ 578 ierr = VecSetValues( tempVec, 1, &j, &lid_parent_gid[kk], INSERT_VALUES ); CHKERRQ(ierr); 579 } 580 ierr = VecAssemblyBegin( tempVec ); CHKERRQ(ierr); 581 ierr = VecAssemblyEnd( tempVec ); CHKERRQ(ierr); 582 ierr = VecDuplicate( mpimat_2->lvec, &ghostparents2 ); CHKERRQ(ierr); 583 ierr = VecScatterBegin(mpimat_2->Mvctx,tempVec, ghostparents2,INSERT_VALUES,SCATTER_FORWARD); 584 CHKERRQ(ierr); 585 ierr = VecScatterEnd(mpimat_2->Mvctx,tempVec, ghostparents2,INSERT_VALUES,SCATTER_FORWARD); 586 CHKERRQ(ierr); 587 ierr = VecGetArray( ghostparents2, &cpcol_2_parent ); CHKERRQ(ierr); 588 589 /* get 'cpcol_2_gid' */ 590 for(kk=0,j=my0;kk<nloc;kk++,j++){ 591 PetscScalar v = (PetscScalar)j; 592 ierr = VecSetValues( tempVec, 1, &j, &v, INSERT_VALUES ); CHKERRQ(ierr); 593 } 594 ierr = VecAssemblyBegin( tempVec ); CHKERRQ(ierr); 595 ierr = VecAssemblyEnd( tempVec ); CHKERRQ(ierr); 596 ierr = VecDuplicate( mpimat_2->lvec, &ghostgids2 ); CHKERRQ(ierr); 597 ierr = VecScatterBegin(mpimat_2->Mvctx,tempVec, ghostgids2,INSERT_VALUES,SCATTER_FORWARD); 598 CHKERRQ(ierr); 599 ierr = VecScatterEnd(mpimat_2->Mvctx,tempVec, ghostgids2,INSERT_VALUES,SCATTER_FORWARD); 600 CHKERRQ(ierr); 601 ierr = VecGetArray( ghostgids2, &cpcol_2_gid ); CHKERRQ(ierr); 602 603 ierr = VecDestroy( &tempVec ); CHKERRQ(ierr); 604 605 /* look for deleted ghosts and add to table */ 606 ierr = GAMGTableCreate( 2*nghost_2, &gid_cpid ); CHKERRQ(ierr); 607 for( cpid = 0 ; cpid < nghost_2 ; cpid++ ) { 608 NState state = (NState)PetscRealPart(cpcol_2_state[cpid]); 609 if( state==DELETED ) { 610 PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]); 611 PetscInt sgid_old = (PetscInt)PetscRealPart(cpcol_2_par_orig[cpid]); 612 if( sgid_old == -1 && sgid_new != -1 ) { 613 PetscInt gid = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]); 614 ierr = GAMGTableAdd( &gid_cpid, gid, cpid ); CHKERRQ(ierr); 615 } 616 } 617 } 618 619 /* look for deleted ghosts and see if they moved - remove it */ 620 for(lid=0;lid<nloc;lid++){ 621 NState state = lid_state[lid]; 622 if( IS_SELECTED(state) ){ 623 PetscCDPos pos,last=PETSC_NULL; 624 /* look for deleted ghosts and see if they moved */ 625 ierr = PetscCDGetHeadPos(aggs_2,lid,&pos); CHKERRQ(ierr); 626 while(pos){ 627 PetscInt gid; 628 ierr = PetscLLNGetID( pos, &gid ); CHKERRQ(ierr); 629 630 if( gid < my0 || gid >= Iend ) { 631 ierr = GAMGTableFind( &gid_cpid, gid, &cpid ); CHKERRQ(ierr); 632 if( cpid != -1 ) { 633 /* a moved ghost - */ 634 /* id_llist_2[lastid] = id_llist_2[flid]; /\* remove 'flid' from list *\/ */ 635 ierr = PetscCDRemoveNextNode( aggs_2, lid, last ); CHKERRQ(ierr); 636 } 637 else last = pos; 638 } 639 else last = pos; 640 641 ierr = PetscCDGetNextPos(aggs_2,lid,&pos); CHKERRQ(ierr); 642 } /* loop over list of deleted */ 643 } /* selected */ 644 } 645 ierr = GAMGTableDestroy( &gid_cpid ); CHKERRQ(ierr); 646 647 /* look at ghosts, see if they changed - and it */ 648 for( cpid = 0 ; cpid < nghost_2 ; cpid++ ){ 649 PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]); 650 if( sgid_new >= my0 && sgid_new < Iend ) { /* this is mine */ 651 PetscInt gid = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]); 652 PetscInt slid_new=sgid_new-my0,hav=0; 653 PetscCDPos pos; 654 /* search for this gid to see if I have it */ 655 ierr = PetscCDGetHeadPos(aggs_2,slid_new,&pos); CHKERRQ(ierr); 656 while(pos){ 657 PetscInt gidj; 658 ierr = PetscLLNGetID( pos, &gidj ); CHKERRQ(ierr); 659 ierr = PetscCDGetNextPos(aggs_2,slid_new,&pos); CHKERRQ(ierr); 660 661 if( gidj == gid ) { hav = 1; break; } 662 } 663 if( hav != 1 ){ 664 /* insert 'flidj' into head of llist */ 665 ierr = PetscCDAppendID( aggs_2, slid_new, gid ); CHKERRQ(ierr); 666 } 667 } 668 } 669 670 ierr = VecRestoreArray( mpimat_1->lvec, &cpcol_1_state ); CHKERRQ(ierr); 671 ierr = VecRestoreArray( mpimat_2->lvec, &cpcol_2_state ); CHKERRQ(ierr); 672 ierr = VecRestoreArray( ghostparents2, &cpcol_2_parent ); CHKERRQ(ierr); 673 ierr = VecRestoreArray( ghostgids2, &cpcol_2_gid ); CHKERRQ(ierr); 674 ierr = PetscFree( lid_cprowID_1 ); CHKERRQ(ierr); 675 ierr = VecDestroy( &ghostgids2 ); CHKERRQ(ierr); 676 ierr = VecDestroy( &ghostparents2 ); CHKERRQ(ierr); 677 ierr = VecDestroy( &ghost_par_orig2 ); CHKERRQ(ierr); 678 } 679 680 ierr = PetscFree( lid_parent_gid ); CHKERRQ(ierr); 681 ierr = PetscFree( lid_state ); CHKERRQ(ierr); 682 683 PetscFunctionReturn(0); 684 } 685 686 /* -------------------------------------------------------------------------- */ 687 /* 688 PCSetData_AGG - called if data is not set with PCSetCoordinates. 689 Looks in Mat for near null space. 690 Does not work for Stokes 691 692 Input Parameter: 693 . pc - 694 . a_A - matrix to get (near) null space out of. 695 */ 696 #undef __FUNCT__ 697 #define __FUNCT__ "PCSetData_AGG" 698 PetscErrorCode PCSetData_AGG( PC pc, Mat a_A ) 699 { 700 PetscErrorCode ierr; 701 PC_MG *mg = (PC_MG*)pc->data; 702 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 703 MatNullSpace mnull; 704 705 PetscFunctionBegin; 706 ierr = MatGetNearNullSpace( a_A, &mnull ); CHKERRQ(ierr); 707 if( !mnull ) { 708 PetscInt bs,NN,MM; 709 ierr = MatGetBlockSize( a_A, &bs ); CHKERRQ( ierr ); 710 ierr = MatGetLocalSize( a_A, &MM, &NN ); CHKERRQ( ierr ); assert(MM%bs==0); 711 ierr = PCSetCoordinates_AGG( pc, bs, MM/bs, PETSC_NULL ); CHKERRQ(ierr); 712 } 713 else { 714 PetscReal *nullvec; 715 PetscBool has_const; 716 PetscInt i,j,mlocal,nvec,bs; 717 const Vec *vecs; const PetscScalar *v; 718 ierr = MatGetLocalSize(a_A,&mlocal,PETSC_NULL);CHKERRQ(ierr); 719 ierr = MatNullSpaceGetVecs( mnull, &has_const, &nvec, &vecs ); CHKERRQ(ierr); 720 ierr = PetscMalloc((nvec+!!has_const)*mlocal*sizeof *nullvec,&nullvec);CHKERRQ(ierr); 721 if (has_const) for (i=0; i<mlocal; i++) nullvec[i] = 1.0; 722 for (i=0; i<nvec; i++) { 723 ierr = VecGetArrayRead(vecs[i],&v);CHKERRQ(ierr); 724 for (j=0; j<mlocal; j++) nullvec[(i+!!has_const)*mlocal + j] = PetscRealPart(v[j]); 725 ierr = VecRestoreArrayRead(vecs[i],&v);CHKERRQ(ierr); 726 } 727 pc_gamg->data = nullvec; 728 pc_gamg->data_cell_cols = (nvec+!!has_const); 729 ierr = MatGetBlockSize( a_A, &bs ); CHKERRQ( ierr ); /* this does not work for Stokes */ 730 pc_gamg->data_cell_rows = bs; 731 } 732 PetscFunctionReturn(0); 733 } 734 735 /* -------------------------------------------------------------------------- */ 736 /* 737 formProl0 738 739 Input Parameter: 740 . agg_llists - list of arrays with aggregates 741 . bs - block size 742 . nSAvec - column bs of new P 743 . my0crs - global index of start of locals 744 . data_stride - bs*(nloc nodes + ghost nodes) 745 . data_in[data_stride*nSAvec] - local data on fine grid 746 . flid_fgid[data_stride/bs] - make local to global IDs, includes ghosts in 'locals_llist' 747 Output Parameter: 748 . a_data_out - in with fine grid data (w/ghosts), out with coarse grid data 749 . a_Prol - prolongation operator 750 */ 751 #undef __FUNCT__ 752 #define __FUNCT__ "formProl0" 753 static PetscErrorCode formProl0(const PetscCoarsenData *agg_llists,/* list from selected vertices of aggregate unselected vertices */ 754 const PetscInt bs, /* (row) block size */ 755 const PetscInt nSAvec, /* column bs */ 756 const PetscInt my0crs, /* global index of start of locals */ 757 const PetscInt data_stride, /* (nloc+nghost)*bs */ 758 PetscReal data_in[], /* [data_stride][nSAvec] */ 759 const PetscInt flid_fgid[], /* [data_stride/bs] */ 760 PetscReal **a_data_out, 761 Mat a_Prol /* prolongation operator (output)*/ 762 ) 763 { 764 PetscErrorCode ierr; 765 PetscInt Istart,my0,Iend,nloc,clid,flid,aggID,kk,jj,ii,mm,ndone,nSelected,minsz,nghosts,out_data_stride; 766 MPI_Comm wcomm = ((PetscObject)a_Prol)->comm; 767 PetscMPIInt mype, npe; 768 PetscReal *out_data; 769 PetscCDPos pos; 770 GAMGHashTable fgid_flid; 771 772 /* #define OUT_AGGS */ 773 #ifdef OUT_AGGS 774 static PetscInt llev = 0; char fname[32]; FILE *file = PETSC_NULL; PetscInt pM; 775 #endif 776 777 PetscFunctionBegin; 778 ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr); 779 ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr); 780 ierr = MatGetOwnershipRange( a_Prol, &Istart, &Iend ); CHKERRQ(ierr); 781 nloc = (Iend-Istart)/bs; my0 = Istart/bs; assert((Iend-Istart)%bs==0); 782 Iend /= bs; 783 nghosts = data_stride/bs - nloc; 784 785 ierr = GAMGTableCreate( 2*nghosts, &fgid_flid ); CHKERRQ(ierr); 786 for(kk=0;kk<nghosts;kk++) { 787 ierr = GAMGTableAdd( &fgid_flid, flid_fgid[nloc+kk], nloc+kk ); CHKERRQ(ierr); 788 } 789 790 #ifdef OUT_AGGS 791 sprintf(fname,"aggs_%d_%d.m",llev++,mype); 792 if(llev==1) { 793 file = fopen(fname,"w"); 794 } 795 MatGetSize( a_Prol, &pM, &jj ); 796 #endif 797 798 /* count selected -- same as number of cols of P */ 799 for(nSelected=mm=0;mm<nloc;mm++) { 800 PetscBool ise; 801 ierr = PetscCDEmptyAt( agg_llists, mm, &ise ); CHKERRQ(ierr); 802 if( !ise ) nSelected++; 803 } 804 ierr = MatGetOwnershipRangeColumn( a_Prol, &ii, &jj ); CHKERRQ(ierr); 805 assert((ii/nSAvec)==my0crs); assert(nSelected==(jj-ii)/nSAvec); 806 807 /* aloc space for coarse point data (output) */ 808 out_data_stride = nSelected*nSAvec; 809 ierr = PetscMalloc( out_data_stride*nSAvec*sizeof(PetscReal), &out_data ); CHKERRQ(ierr); 810 for(ii=0;ii<out_data_stride*nSAvec;ii++) { 811 out_data[ii]=1.e300; 812 } 813 *a_data_out = out_data; /* output - stride nSelected*nSAvec */ 814 815 /* find points and set prolongation */ 816 minsz = 100; 817 ndone = 0; 818 for( mm = clid = 0 ; mm < nloc ; mm++ ){ 819 ierr = PetscCDSizeAt( agg_llists, mm, &jj ); CHKERRQ(ierr); 820 if( jj > 0 ) { 821 const PetscInt lid = mm, cgid = my0crs + clid; 822 PetscInt cids[100]; /* max bs */ 823 PetscBLASInt asz=jj,M=asz*bs,N=nSAvec,INFO; 824 PetscBLASInt Mdata=M+((N-M>0)?N-M:0),LDA=Mdata,LWORK=N*bs; 825 PetscScalar *qqc,*qqr,*TAU,*WORK; 826 PetscInt *fids; 827 PetscReal *data; 828 /* count agg */ 829 if( asz<minsz ) minsz = asz; 830 831 /* get block */ 832 ierr = PetscMalloc( (Mdata*N)*sizeof(PetscScalar), &qqc ); CHKERRQ(ierr); 833 ierr = PetscMalloc( (M*N)*sizeof(PetscScalar), &qqr ); CHKERRQ(ierr); 834 ierr = PetscMalloc( N*sizeof(PetscScalar), &TAU ); CHKERRQ(ierr); 835 ierr = PetscMalloc( LWORK*sizeof(PetscScalar), &WORK ); CHKERRQ(ierr); 836 ierr = PetscMalloc( M*sizeof(PetscInt), &fids ); CHKERRQ(ierr); 837 838 aggID = 0; 839 ierr = PetscCDGetHeadPos(agg_llists,lid,&pos); CHKERRQ(ierr); 840 while(pos){ 841 PetscInt gid1; 842 ierr = PetscLLNGetID( pos, &gid1 ); CHKERRQ(ierr); 843 ierr = PetscCDGetNextPos(agg_llists,lid,&pos); CHKERRQ(ierr); 844 845 if( gid1 >= my0 && gid1 < Iend ) flid = gid1 - my0; 846 else { 847 ierr = GAMGTableFind( &fgid_flid, gid1, &flid ); CHKERRQ(ierr); 848 assert(flid>=0); 849 } 850 /* copy in B_i matrix - column oriented */ 851 data = &data_in[flid*bs]; 852 for( kk = ii = 0; ii < bs ; ii++ ) { 853 for( jj = 0; jj < N ; jj++ ) { 854 PetscReal d = data[jj*data_stride + ii]; 855 qqc[jj*Mdata + aggID*bs + ii] = d; 856 } 857 } 858 #ifdef OUT_AGGS 859 if(llev==1) { 860 char str[] = "plot(%e,%e,'r*'), hold on,\n", col[] = "rgbkmc", sim[] = "*os+h>d<vx^"; 861 PetscInt MM,pi,pj; 862 str[12] = col[(clid+17*mype)%6]; str[13] = sim[(clid+17*mype)%11]; 863 MM = (PetscInt)(PetscSqrtReal((PetscReal)pM)); 864 pj = gid1/MM; pi = gid1%MM; 865 fprintf(file,str,(double)pi,(double)pj); 866 /* fprintf(file,str,data[2*data_stride+1],-data[2*data_stride]); */ 867 } 868 #endif 869 /* set fine IDs */ 870 for(kk=0;kk<bs;kk++) fids[aggID*bs + kk] = flid_fgid[flid]*bs + kk; 871 872 aggID++; 873 } 874 875 /* pad with zeros */ 876 for( ii = asz*bs; ii < Mdata ; ii++ ) { 877 for( jj = 0; jj < N ; jj++, kk++ ) { 878 qqc[jj*Mdata + ii] = .0; 879 } 880 } 881 882 ndone += aggID; 883 /* QR */ 884 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 885 LAPACKgeqrf_( &Mdata, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO ); 886 ierr = PetscFPTrapPop();CHKERRQ(ierr); 887 if( INFO != 0 ) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"xGEQRS error"); 888 /* get R - column oriented - output B_{i+1} */ 889 { 890 PetscReal *data = &out_data[clid*nSAvec]; 891 for( jj = 0; jj < nSAvec ; jj++ ) { 892 for( ii = 0; ii < nSAvec ; ii++ ) { 893 assert(data[jj*out_data_stride + ii] == 1.e300); 894 if( ii <= jj ) data[jj*out_data_stride + ii] = PetscRealPart(qqc[jj*Mdata + ii]); 895 else data[jj*out_data_stride + ii] = 0.; 896 } 897 } 898 } 899 900 /* get Q - row oriented */ 901 LAPACKungqr_( &Mdata, &N, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO ); 902 if( INFO != 0 ) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"xORGQR error arg %d",-INFO); 903 904 for( ii = 0 ; ii < M ; ii++ ){ 905 for( jj = 0 ; jj < N ; jj++ ) { 906 qqr[N*ii + jj] = qqc[jj*Mdata + ii]; 907 } 908 } 909 910 /* add diagonal block of P0 */ 911 for(kk=0;kk<N;kk++) { 912 cids[kk] = N*cgid + kk; /* global col IDs in P0 */ 913 } 914 ierr = MatSetValues(a_Prol,M,fids,N,cids,qqr,INSERT_VALUES); CHKERRQ(ierr); 915 916 ierr = PetscFree( qqc ); CHKERRQ(ierr); 917 ierr = PetscFree( qqr ); CHKERRQ(ierr); 918 ierr = PetscFree( TAU ); CHKERRQ(ierr); 919 ierr = PetscFree( WORK ); CHKERRQ(ierr); 920 ierr = PetscFree( fids ); CHKERRQ(ierr); 921 clid++; 922 } /* coarse agg */ 923 } /* for all fine nodes */ 924 ierr = MatAssemblyBegin(a_Prol,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 925 ierr = MatAssemblyEnd(a_Prol,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 926 927 /* ierr = MPI_Allreduce( &ndone, &ii, 1, MPIU_INT, MPIU_SUM, wcomm ); */ 928 /* MatGetSize( a_Prol, &kk, &jj ); */ 929 /* ierr = MPI_Allreduce( &minsz, &jj, 1, MPIU_INT, MPIU_MIN, wcomm ); */ 930 /* PetscPrintf(wcomm," **** [%d]%s %d total done, %d nodes (%d local done), min agg. size = %d\n",mype,__FUNCT__,ii,kk/bs,ndone,jj); */ 931 932 #ifdef OUT_AGGS 933 if(llev==1) fclose(file); 934 #endif 935 ierr = GAMGTableDestroy( &fgid_flid ); CHKERRQ(ierr); 936 937 PetscFunctionReturn(0); 938 } 939 940 /* -------------------------------------------------------------------------- */ 941 /* 942 PCGAMGgraph_AGG 943 944 Input Parameter: 945 . pc - this 946 . Amat - matrix on this fine level 947 Output Parameter: 948 . a_Gmat - 949 */ 950 #undef __FUNCT__ 951 #define __FUNCT__ "PCGAMGgraph_AGG" 952 PetscErrorCode PCGAMGgraph_AGG( PC pc, 953 const Mat Amat, 954 Mat *a_Gmat 955 ) 956 { 957 PetscErrorCode ierr; 958 PC_MG *mg = (PC_MG*)pc->data; 959 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 960 const PetscInt verbose = pc_gamg->verbose; 961 const PetscReal vfilter = pc_gamg->threshold; 962 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 963 PetscMPIInt mype,npe; 964 Mat Gmat; 965 MPI_Comm wcomm = ((PetscObject)Amat)->comm; 966 PetscBool /* set,flg , */symm; 967 968 PetscFunctionBegin; 969 #if defined PETSC_USE_LOG 970 ierr = PetscLogEventBegin(PC_GAMGGgraph_AGG,0,0,0,0);CHKERRQ(ierr); 971 #endif 972 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 973 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 974 975 /* ierr = MatIsSymmetricKnown(Amat, &set, &flg); CHKERRQ(ierr); || !(set && flg) -- this causes lot of symm calls */ 976 symm = (PetscBool)(pc_gamg_agg->sym_graph); /* && !pc_gamg_agg->square_graph; */ 977 978 ierr = PCGAMGCreateGraph( Amat, &Gmat ); CHKERRQ( ierr ); 979 ierr = PCGAMGFilterGraph( &Gmat, vfilter, symm, verbose ); CHKERRQ( ierr ); 980 981 *a_Gmat = Gmat; 982 983 #if defined PETSC_USE_LOG 984 ierr = PetscLogEventEnd(PC_GAMGGgraph_AGG,0,0,0,0);CHKERRQ(ierr); 985 #endif 986 PetscFunctionReturn(0); 987 } 988 989 /* -------------------------------------------------------------------------- */ 990 /* 991 PCGAMGCoarsen_AGG 992 993 Input Parameter: 994 . a_pc - this 995 Input/Output Parameter: 996 . a_Gmat1 - graph on this fine level - coarsening can change this (squares it) 997 Output Parameter: 998 . agg_lists - list of aggregates 999 */ 1000 #undef __FUNCT__ 1001 #define __FUNCT__ "PCGAMGCoarsen_AGG" 1002 PetscErrorCode PCGAMGCoarsen_AGG( PC a_pc, 1003 Mat *a_Gmat1, 1004 PetscCoarsenData **agg_lists 1005 ) 1006 { 1007 PetscErrorCode ierr; 1008 PC_MG *mg = (PC_MG*)a_pc->data; 1009 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1010 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 1011 Mat mat,Gmat2, Gmat1 = *a_Gmat1; /* squared graph */ 1012 IS perm; 1013 PetscInt Ii,nloc,bs,n,m; 1014 PetscInt *permute; 1015 PetscBool *bIndexSet; 1016 MatCoarsen crs; 1017 MPI_Comm wcomm = ((PetscObject)Gmat1)->comm; 1018 PetscMPIInt mype,npe; 1019 1020 PetscFunctionBegin; 1021 #if defined PETSC_USE_LOG 1022 ierr = PetscLogEventBegin(PC_GAMGCoarsen_AGG,0,0,0,0);CHKERRQ(ierr); 1023 #endif 1024 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 1025 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 1026 ierr = MatGetLocalSize( Gmat1, &n, &m ); CHKERRQ(ierr); 1027 ierr = MatGetBlockSize( Gmat1, &bs ); CHKERRQ(ierr); assert(bs==1); 1028 nloc = n/bs; 1029 1030 if( pc_gamg_agg->square_graph ) { 1031 /* ierr = MatMatTransposeMult( Gmat1, Gmat1, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Gmat2 ); */ 1032 ierr = MatTransposeMatMult( Gmat1, Gmat1, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Gmat2 ); 1033 CHKERRQ(ierr); 1034 } 1035 else Gmat2 = Gmat1; 1036 1037 /* get MIS aggs */ 1038 /* randomize */ 1039 ierr = PetscMalloc( nloc*sizeof(PetscInt), &permute ); CHKERRQ(ierr); 1040 ierr = PetscMalloc( nloc*sizeof(PetscBool), &bIndexSet ); CHKERRQ(ierr); 1041 for ( Ii = 0; Ii < nloc ; Ii++ ){ 1042 bIndexSet[Ii] = PETSC_FALSE; 1043 permute[Ii] = Ii; 1044 } 1045 srand(1); /* make deterministic */ 1046 for ( Ii = 0; Ii < nloc ; Ii++ ) { 1047 PetscInt iSwapIndex = rand()%nloc; 1048 if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) { 1049 PetscInt iTemp = permute[iSwapIndex]; 1050 permute[iSwapIndex] = permute[Ii]; 1051 permute[Ii] = iTemp; 1052 bIndexSet[iSwapIndex] = PETSC_TRUE; 1053 } 1054 } 1055 ierr = PetscFree( bIndexSet ); CHKERRQ(ierr); 1056 1057 ierr = ISCreateGeneral(PETSC_COMM_SELF, nloc, permute, PETSC_USE_POINTER, &perm); 1058 CHKERRQ(ierr); 1059 #if defined PETSC_GAMG_USE_LOG 1060 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET4],0,0,0,0);CHKERRQ(ierr); 1061 #endif 1062 ierr = MatCoarsenCreate( wcomm, &crs ); CHKERRQ(ierr); 1063 /* ierr = MatCoarsenSetType( crs, MATCOARSENMIS ); CHKERRQ(ierr); */ 1064 ierr = MatCoarsenSetFromOptions( crs ); CHKERRQ(ierr); 1065 ierr = MatCoarsenSetGreedyOrdering( crs, perm ); CHKERRQ(ierr); 1066 ierr = MatCoarsenSetAdjacency( crs, Gmat2 ); CHKERRQ(ierr); 1067 ierr = MatCoarsenSetVerbose( crs, pc_gamg->verbose ); CHKERRQ(ierr); 1068 ierr = MatCoarsenSetStrictAggs( crs, PETSC_TRUE ); CHKERRQ(ierr); 1069 ierr = MatCoarsenApply( crs ); CHKERRQ(ierr); 1070 ierr = MatCoarsenGetData( crs, agg_lists ); CHKERRQ(ierr); /* output */ 1071 ierr = MatCoarsenDestroy( &crs ); CHKERRQ(ierr); 1072 1073 ierr = ISDestroy( &perm ); CHKERRQ(ierr); 1074 ierr = PetscFree( permute ); CHKERRQ(ierr); 1075 #if defined PETSC_GAMG_USE_LOG 1076 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET4],0,0,0,0);CHKERRQ(ierr); 1077 #endif 1078 /* smooth aggs */ 1079 if( Gmat2 != Gmat1 ) { 1080 const PetscCoarsenData *llist = *agg_lists; 1081 ierr = smoothAggs( Gmat2, Gmat1, *agg_lists ); CHKERRQ(ierr); 1082 ierr = MatDestroy( &Gmat1 ); CHKERRQ(ierr); 1083 *a_Gmat1 = Gmat2; /* output */ 1084 ierr = PetscCDGetMat( llist, &mat ); CHKERRQ(ierr); 1085 if(mat) SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "Auxilary matrix with squared graph????"); 1086 } 1087 else { 1088 const PetscCoarsenData *llist = *agg_lists; 1089 /* see if we have a matrix that takes pecedence (returned from MatCoarsenAppply) */ 1090 ierr = PetscCDGetMat( llist, &mat ); CHKERRQ(ierr); 1091 if( mat ) { 1092 ierr = MatDestroy( &Gmat1 ); CHKERRQ(ierr); 1093 *a_Gmat1 = mat; /* output */ 1094 } 1095 } 1096 #if defined PETSC_USE_LOG 1097 ierr = PetscLogEventEnd(PC_GAMGCoarsen_AGG,0,0,0,0);CHKERRQ(ierr); 1098 #endif 1099 PetscFunctionReturn(0); 1100 } 1101 1102 /* -------------------------------------------------------------------------- */ 1103 /* 1104 PCGAMGProlongator_AGG 1105 1106 Input Parameter: 1107 . pc - this 1108 . Amat - matrix on this fine level 1109 . Graph - used to get ghost data for nodes in 1110 . agg_lists - list of aggregates 1111 Output Parameter: 1112 . a_P_out - prolongation operator to the next level 1113 */ 1114 #undef __FUNCT__ 1115 #define __FUNCT__ "PCGAMGProlongator_AGG" 1116 PetscErrorCode PCGAMGProlongator_AGG( PC pc, 1117 const Mat Amat, 1118 const Mat Gmat, 1119 PetscCoarsenData *agg_lists, 1120 Mat *a_P_out 1121 ) 1122 { 1123 PC_MG *mg = (PC_MG*)pc->data; 1124 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1125 const PetscInt verbose = pc_gamg->verbose; 1126 const PetscInt data_cols = pc_gamg->data_cell_cols; 1127 PetscErrorCode ierr; 1128 PetscInt Istart,Iend,nloc,ii,jj,kk,my0,nLocalSelected,bs; 1129 Mat Prol; 1130 PetscMPIInt mype, npe; 1131 MPI_Comm wcomm = ((PetscObject)Amat)->comm; 1132 const PetscInt col_bs = data_cols; 1133 PetscReal *data_w_ghost; 1134 PetscInt myCrs0, nbnodes=0, *flid_fgid; 1135 1136 PetscFunctionBegin; 1137 #if defined PETSC_USE_LOG 1138 ierr = PetscLogEventBegin(PC_GAMGProlongator_AGG,0,0,0,0);CHKERRQ(ierr); 1139 #endif 1140 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 1141 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 1142 ierr = MatGetOwnershipRange( Amat, &Istart, &Iend ); CHKERRQ(ierr); 1143 ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ( ierr ); 1144 nloc = (Iend-Istart)/bs; my0 = Istart/bs; assert((Iend-Istart)%bs==0); 1145 1146 /* get 'nLocalSelected' */ 1147 for( ii=0, nLocalSelected = 0 ; ii < nloc ; ii++ ){ 1148 PetscBool ise; 1149 /* filter out singletons 0 or 1? */ 1150 ierr = PetscCDEmptyAt( agg_lists, ii, &ise ); CHKERRQ(ierr); 1151 if( !ise ) nLocalSelected++; 1152 } 1153 1154 /* create prolongator, create P matrix */ 1155 ierr = MatCreate( wcomm, &Prol ); CHKERRQ(ierr); 1156 ierr = MatSetSizes(Prol,nloc*bs,nLocalSelected*col_bs,PETSC_DETERMINE,PETSC_DETERMINE); 1157 CHKERRQ(ierr); 1158 ierr = MatSetBlockSizes( Prol, bs, col_bs ); CHKERRQ(ierr); 1159 ierr = MatSetType( Prol, MATAIJ ); CHKERRQ(ierr); 1160 ierr = MatSeqAIJSetPreallocation( Prol, data_cols, PETSC_NULL); CHKERRQ(ierr); 1161 ierr = MatMPIAIJSetPreallocation(Prol,data_cols, PETSC_NULL,data_cols, PETSC_NULL);CHKERRQ(ierr); 1162 /* nloc*bs, nLocalSelected*col_bs, */ 1163 /* PETSC_DETERMINE, PETSC_DETERMINE, */ 1164 /* data_cols, PETSC_NULL, data_cols, PETSC_NULL, */ 1165 /* &Prol ); */ 1166 1167 /* can get all points "removed" */ 1168 ierr = MatGetSize( Prol, &kk, &ii ); CHKERRQ(ierr); 1169 if( ii==0 ) { 1170 if( verbose ) { 1171 PetscPrintf(wcomm,"[%d]%s no selected points on coarse grid\n",mype,__FUNCT__); 1172 } 1173 ierr = MatDestroy( &Prol ); CHKERRQ(ierr); 1174 *a_P_out = PETSC_NULL; /* out */ 1175 PetscFunctionReturn(0); 1176 } 1177 if( verbose ) { 1178 PetscPrintf(wcomm,"\t\t[%d]%s New grid %d nodes\n",mype,__FUNCT__,ii/col_bs); 1179 } 1180 ierr = MatGetOwnershipRangeColumn( Prol, &myCrs0, &kk ); CHKERRQ(ierr); 1181 1182 assert((kk-myCrs0)%col_bs==0); 1183 myCrs0 = myCrs0/col_bs; 1184 assert((kk/col_bs-myCrs0)==nLocalSelected); 1185 1186 /* create global vector of data in 'data_w_ghost' */ 1187 #if defined PETSC_GAMG_USE_LOG 1188 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET7],0,0,0,0);CHKERRQ(ierr); 1189 #endif 1190 if (npe > 1) { /* */ 1191 PetscReal *tmp_gdata,*tmp_ldata,*tp2; 1192 ierr = PetscMalloc( nloc*sizeof(PetscReal), &tmp_ldata ); CHKERRQ(ierr); 1193 for( jj = 0 ; jj < data_cols ; jj++ ){ 1194 for( kk = 0 ; kk < bs ; kk++) { 1195 PetscInt ii,stride; 1196 const PetscReal *tp = pc_gamg->data + jj*bs*nloc + kk; 1197 for( ii = 0 ; ii < nloc ; ii++, tp += bs ){ 1198 tmp_ldata[ii] = *tp; 1199 } 1200 ierr = PCGAMGGetDataWithGhosts( Gmat, 1, tmp_ldata, &stride, &tmp_gdata ); 1201 CHKERRQ(ierr); 1202 1203 if(jj==0 && kk==0) { /* now I know how many todal nodes - allocate */ 1204 ierr = PetscMalloc( stride*bs*data_cols*sizeof(PetscReal), &data_w_ghost ); CHKERRQ(ierr); 1205 nbnodes = bs*stride; 1206 } 1207 tp2 = data_w_ghost + jj*bs*stride + kk; 1208 for( ii = 0 ; ii < stride ; ii++, tp2 += bs ){ 1209 *tp2 = tmp_gdata[ii]; 1210 } 1211 ierr = PetscFree( tmp_gdata ); CHKERRQ(ierr); 1212 } 1213 } 1214 ierr = PetscFree( tmp_ldata ); CHKERRQ(ierr); 1215 } 1216 else { 1217 nbnodes = bs*nloc; 1218 data_w_ghost = (PetscReal*)pc_gamg->data; 1219 } 1220 1221 /* get P0 */ 1222 if( npe > 1 ){ 1223 PetscReal *fid_glid_loc,*fiddata; 1224 PetscInt stride; 1225 1226 ierr = PetscMalloc( nloc*sizeof(PetscReal), &fid_glid_loc ); CHKERRQ(ierr); 1227 for(kk=0;kk<nloc;kk++) fid_glid_loc[kk] = (PetscReal)(my0+kk); 1228 ierr = PCGAMGGetDataWithGhosts( Gmat, 1, fid_glid_loc, &stride, &fiddata ); 1229 CHKERRQ(ierr); 1230 ierr = PetscMalloc( stride*sizeof(PetscInt), &flid_fgid ); CHKERRQ(ierr); 1231 for(kk=0;kk<stride;kk++) flid_fgid[kk] = (PetscInt)fiddata[kk]; 1232 ierr = PetscFree( fiddata ); CHKERRQ(ierr); 1233 1234 assert(stride==nbnodes/bs); 1235 ierr = PetscFree( fid_glid_loc ); CHKERRQ(ierr); 1236 } 1237 else { 1238 ierr = PetscMalloc( nloc*sizeof(PetscInt), &flid_fgid ); CHKERRQ(ierr); 1239 for(kk=0;kk<nloc;kk++) flid_fgid[kk] = my0 + kk; 1240 } 1241 #if defined PETSC_GAMG_USE_LOG 1242 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET7],0,0,0,0);CHKERRQ(ierr); 1243 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET8],0,0,0,0);CHKERRQ(ierr); 1244 #endif 1245 { 1246 PetscReal *data_out = PETSC_NULL; 1247 ierr = formProl0( agg_lists, bs, data_cols, myCrs0, nbnodes, 1248 data_w_ghost, flid_fgid, &data_out, Prol ); 1249 CHKERRQ(ierr); 1250 ierr = PetscFree( pc_gamg->data ); CHKERRQ( ierr ); 1251 pc_gamg->data = data_out; 1252 pc_gamg->data_cell_rows = data_cols; 1253 pc_gamg->data_sz = data_cols*data_cols*nLocalSelected; 1254 } 1255 #if defined PETSC_GAMG_USE_LOG 1256 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET8],0,0,0,0);CHKERRQ(ierr); 1257 #endif 1258 if (npe > 1) ierr = PetscFree( data_w_ghost ); CHKERRQ(ierr); 1259 ierr = PetscFree( flid_fgid ); CHKERRQ(ierr); 1260 1261 *a_P_out = Prol; /* out */ 1262 #if defined PETSC_USE_LOG 1263 ierr = PetscLogEventEnd(PC_GAMGProlongator_AGG,0,0,0,0);CHKERRQ(ierr); 1264 #endif 1265 PetscFunctionReturn(0); 1266 } 1267 1268 /* -------------------------------------------------------------------------- */ 1269 /* 1270 PCGAMGOptprol_AGG 1271 1272 Input Parameter: 1273 . pc - this 1274 . Amat - matrix on this fine level 1275 In/Output Parameter: 1276 . a_P_out - prolongation operator to the next level 1277 */ 1278 #undef __FUNCT__ 1279 #define __FUNCT__ "PCGAMGOptprol_AGG" 1280 PetscErrorCode PCGAMGOptprol_AGG( PC pc, 1281 const Mat Amat, 1282 Mat *a_P 1283 ) 1284 { 1285 PetscErrorCode ierr; 1286 PC_MG *mg = (PC_MG*)pc->data; 1287 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1288 const PetscInt verbose = pc_gamg->verbose; 1289 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 1290 PetscInt jj; 1291 PetscMPIInt mype,npe; 1292 Mat Prol = *a_P; 1293 MPI_Comm wcomm = ((PetscObject)Amat)->comm; 1294 1295 PetscFunctionBegin; 1296 #if defined PETSC_USE_LOG 1297 ierr = PetscLogEventBegin(PC_GAMGOptprol_AGG,0,0,0,0);CHKERRQ(ierr); 1298 #endif 1299 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 1300 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 1301 1302 /* smooth P0 */ 1303 for( jj = 0 ; jj < pc_gamg_agg->nsmooths ; jj++ ){ 1304 Mat tMat; 1305 Vec diag; 1306 PetscReal alpha, emax, emin; 1307 #if defined PETSC_GAMG_USE_LOG 1308 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET9],0,0,0,0);CHKERRQ(ierr); 1309 #endif 1310 if( jj == 0 ) { 1311 KSP eksp; 1312 Vec bb, xx; 1313 PC pc; 1314 ierr = MatGetVecs( Amat, &bb, 0 ); CHKERRQ(ierr); 1315 ierr = MatGetVecs( Amat, &xx, 0 ); CHKERRQ(ierr); 1316 { 1317 PetscRandom rctx; 1318 ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr); 1319 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 1320 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 1321 ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr); 1322 } 1323 ierr = KSPCreate(wcomm,&eksp); CHKERRQ(ierr); 1324 ierr = KSPAppendOptionsPrefix( eksp, "gamg_est_"); CHKERRQ(ierr); 1325 ierr = KSPSetFromOptions( eksp ); CHKERRQ(ierr); 1326 ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr); 1327 ierr = KSPSetOperators( eksp, Amat, Amat, SAME_NONZERO_PATTERN ); 1328 CHKERRQ( ierr ); 1329 ierr = KSPGetPC( eksp, &pc ); CHKERRQ( ierr ); 1330 ierr = PCSetType( pc, PCJACOBI ); CHKERRQ(ierr); /* smoother */ 1331 ierr = KSPSetTolerances(eksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,10); 1332 CHKERRQ(ierr); 1333 ierr = KSPSetNormType( eksp, KSP_NORM_NONE ); CHKERRQ(ierr); 1334 ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr); 1335 1336 /* solve - keep stuff out of logging */ 1337 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 1338 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 1339 ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr); 1340 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 1341 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 1342 1343 ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr); 1344 if( verbose ) { 1345 PetscPrintf(wcomm,"\t\t\t%s smooth P0: max eigen=%e min=%e PC=%s\n", 1346 __FUNCT__,emax,emin,PCJACOBI); 1347 } 1348 ierr = VecDestroy( &xx ); CHKERRQ(ierr); 1349 ierr = VecDestroy( &bb ); CHKERRQ(ierr); 1350 ierr = KSPDestroy( &eksp ); CHKERRQ(ierr); 1351 1352 if( pc_gamg->emax_id == -1 ) { 1353 ierr = PetscObjectComposedDataRegister( &pc_gamg->emax_id ); 1354 assert(pc_gamg->emax_id != -1 ); 1355 } 1356 ierr = PetscObjectComposedDataSetScalar( (PetscObject)Amat, pc_gamg->emax_id, emax ); CHKERRQ(ierr); 1357 } 1358 1359 /* smooth P1 := (I - omega/lam D^{-1}A)P0 */ 1360 ierr = MatMatMult( Amat, Prol, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tMat ); CHKERRQ(ierr); 1361 ierr = MatGetVecs( Amat, &diag, 0 ); CHKERRQ(ierr); 1362 ierr = MatGetDiagonal( Amat, diag ); CHKERRQ(ierr); /* effectively PCJACOBI */ 1363 ierr = VecReciprocal( diag ); CHKERRQ(ierr); 1364 ierr = MatDiagonalScale( tMat, diag, 0 ); CHKERRQ(ierr); 1365 ierr = VecDestroy( &diag ); CHKERRQ(ierr); 1366 alpha = -1.4/emax; 1367 ierr = MatAYPX( tMat, alpha, Prol, SUBSET_NONZERO_PATTERN ); CHKERRQ(ierr); 1368 ierr = MatDestroy( &Prol ); CHKERRQ(ierr); 1369 Prol = tMat; 1370 #if defined PETSC_GAMG_USE_LOG 1371 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET9],0,0,0,0);CHKERRQ(ierr); 1372 #endif 1373 } 1374 #if defined PETSC_USE_LOG 1375 ierr = PetscLogEventEnd(PC_GAMGOptprol_AGG,0,0,0,0);CHKERRQ(ierr); 1376 #endif 1377 *a_P = Prol; 1378 1379 PetscFunctionReturn(0); 1380 } 1381 1382 /* -------------------------------------------------------------------------- */ 1383 /* 1384 PCGAMGKKTProl_AGG 1385 1386 Input Parameter: 1387 . pc - this 1388 . Prol11 - matrix on this fine level 1389 . A21 - matrix on this fine level 1390 In/Output Parameter: 1391 . a_P22 - prolongation operator to the next level 1392 */ 1393 #undef __FUNCT__ 1394 #define __FUNCT__ "PCGAMGKKTProl_AGG" 1395 PetscErrorCode PCGAMGKKTProl_AGG( PC pc, 1396 const Mat Prol11, 1397 const Mat A21, 1398 Mat *a_P22 1399 ) 1400 { 1401 PetscErrorCode ierr; 1402 PC_MG *mg = (PC_MG*)pc->data; 1403 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1404 const PetscInt verbose = pc_gamg->verbose; 1405 /* PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; */ 1406 PetscMPIInt mype,npe; 1407 Mat Prol22,Tmat,Gmat; 1408 MPI_Comm wcomm = ((PetscObject)pc)->comm; 1409 PetscCoarsenData *agg_lists; 1410 1411 PetscFunctionBegin; 1412 #if defined PETSC_USE_LOG 1413 ierr = PetscLogEventBegin(PC_GAMGKKTProl_AGG,0,0,0,0); CHKERRQ(ierr); 1414 #endif 1415 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 1416 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 1417 1418 /* form C graph */ 1419 ierr = MatMatMult( A21, Prol11, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Tmat); CHKERRQ(ierr); 1420 ierr = MatMatTransposeMult(Tmat, Tmat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Gmat ); CHKERRQ(ierr); 1421 ierr = MatDestroy(&Tmat); CHKERRQ(ierr); 1422 ierr = PCGAMGFilterGraph(&Gmat, 0.0, PETSC_FALSE, verbose); CHKERRQ(ierr); 1423 1424 /* coarsen constraints */ 1425 { 1426 MatCoarsen crs; 1427 ierr = MatCoarsenCreate( wcomm, &crs ); CHKERRQ(ierr); 1428 ierr = MatCoarsenSetType( crs, MATCOARSENMIS ); CHKERRQ(ierr); 1429 ierr = MatCoarsenSetAdjacency( crs, Gmat ); CHKERRQ(ierr); 1430 ierr = MatCoarsenSetVerbose( crs, verbose ); CHKERRQ(ierr); 1431 ierr = MatCoarsenSetStrictAggs( crs, PETSC_TRUE ); CHKERRQ(ierr); 1432 ierr = MatCoarsenApply( crs ); CHKERRQ(ierr); 1433 ierr = MatCoarsenGetData( crs, &agg_lists ); CHKERRQ(ierr); 1434 ierr = MatCoarsenDestroy( &crs ); CHKERRQ(ierr); 1435 } 1436 1437 /* form simple prolongation 'Prol22' */ 1438 { 1439 PetscInt ii,mm,clid,my0,nloc,nLocalSelected; 1440 PetscScalar val = 1.0; 1441 /* get 'nLocalSelected' */ 1442 ierr = MatGetLocalSize( Gmat, &nloc, &ii ); CHKERRQ(ierr); 1443 for( ii=0, nLocalSelected = 0 ; ii < nloc ; ii++ ){ 1444 PetscBool ise; 1445 /* filter out singletons 0 or 1? */ 1446 ierr = PetscCDEmptyAt( agg_lists, ii, &ise ); CHKERRQ(ierr); 1447 if( !ise ) nLocalSelected++; 1448 } 1449 1450 ierr = MatCreate(wcomm,&Prol22);CHKERRQ(ierr); 1451 ierr = MatSetSizes( Prol22,nloc, nLocalSelected, 1452 PETSC_DETERMINE, PETSC_DETERMINE); 1453 CHKERRQ(ierr); 1454 ierr = MatSetType( Prol22, MATAIJ ); CHKERRQ(ierr); 1455 ierr = MatSeqAIJSetPreallocation(Prol22,1,PETSC_NULL); CHKERRQ(ierr); 1456 ierr = MatMPIAIJSetPreallocation(Prol22,1,PETSC_NULL,1,PETSC_NULL); 1457 CHKERRQ(ierr); 1458 /* ierr = MatCreateAIJ( wcomm, */ 1459 /* nloc, nLocalSelected, */ 1460 /* PETSC_DETERMINE, PETSC_DETERMINE, */ 1461 /* 1, PETSC_NULL, 1, PETSC_NULL, */ 1462 /* &Prol22 ); */ 1463 1464 ierr = MatGetOwnershipRange( Prol22, &my0, &ii ); CHKERRQ(ierr); 1465 nloc = ii - my0; 1466 1467 /* make aggregates */ 1468 for( mm = clid = 0 ; mm < nloc ; mm++ ){ 1469 ierr = PetscCDSizeAt( agg_lists, mm, &ii ); CHKERRQ(ierr); 1470 if( ii > 0 ) { 1471 PetscInt asz=ii,cgid=my0+clid,rids[1000]; 1472 PetscCDPos pos; 1473 if(asz>1000)SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Very large aggregate: %d",asz); 1474 ii = 0; 1475 ierr = PetscCDGetHeadPos(agg_lists,mm,&pos); CHKERRQ(ierr); 1476 while(pos){ 1477 PetscInt gid1; 1478 ierr = PetscLLNGetID( pos, &gid1 ); CHKERRQ(ierr); 1479 ierr = PetscCDGetNextPos(agg_lists,mm,&pos); CHKERRQ(ierr); 1480 1481 rids[ii++] = gid1; 1482 } 1483 assert(ii==asz); 1484 /* add diagonal block of P0 */ 1485 ierr = MatSetValues(Prol22,asz,rids,1,&cgid,&val,INSERT_VALUES); CHKERRQ(ierr); 1486 1487 clid++; 1488 } /* coarse agg */ 1489 } /* for all fine nodes */ 1490 ierr = MatAssemblyBegin(Prol22,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr); 1491 ierr = MatAssemblyEnd(Prol22,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr); 1492 } 1493 1494 /* clean up */ 1495 ierr = MatDestroy( &Gmat ); CHKERRQ(ierr); 1496 ierr = PetscCDDestroy( agg_lists ); CHKERRQ(ierr); 1497 #if defined PETSC_USE_LOG 1498 ierr = PetscLogEventEnd(PC_GAMGKKTProl_AGG,0,0,0,0);CHKERRQ(ierr); 1499 #endif 1500 *a_P22 = Prol22; 1501 1502 PetscFunctionReturn(0); 1503 } 1504 1505 /* -------------------------------------------------------------------------- */ 1506 /* 1507 PCCreateGAMG_AGG 1508 1509 Input Parameter: 1510 . pc - 1511 */ 1512 #undef __FUNCT__ 1513 #define __FUNCT__ "PCCreateGAMG_AGG" 1514 PetscErrorCode PCCreateGAMG_AGG( PC pc ) 1515 { 1516 PetscErrorCode ierr; 1517 PC_MG *mg = (PC_MG*)pc->data; 1518 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1519 PC_GAMG_AGG *pc_gamg_agg; 1520 1521 PetscFunctionBegin; 1522 /* create sub context for SA */ 1523 ierr = PetscNewLog( pc, PC_GAMG_AGG, &pc_gamg_agg ); CHKERRQ(ierr); 1524 assert(!pc_gamg->subctx); 1525 pc_gamg->subctx = pc_gamg_agg; 1526 1527 pc->ops->setfromoptions = PCSetFromOptions_GAMG_AGG; 1528 pc->ops->destroy = PCDestroy_AGG; 1529 /* reset does not do anything; setup not virtual */ 1530 1531 /* set internal function pointers */ 1532 pc_gamg->graph = PCGAMGgraph_AGG; 1533 pc_gamg->coarsen = PCGAMGCoarsen_AGG; 1534 pc_gamg->prolongator = PCGAMGProlongator_AGG; 1535 pc_gamg->optprol = PCGAMGOptprol_AGG; 1536 pc_gamg->formkktprol = PCGAMGKKTProl_AGG; 1537 1538 pc_gamg->createdefaultdata = PCSetData_AGG; 1539 1540 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1541 "PCSetCoordinates_C", 1542 "PCSetCoordinates_AGG", 1543 PCSetCoordinates_AGG); 1544 PetscFunctionReturn(0); 1545 } 1546