1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc/private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "MatCollapseRow" 9 /* 10 Produces a set of block column indices of the matrix row, one for each block represented in the original row 11 12 n - the number of block indices in cc[] 13 cc - the block indices (must be large enough to contain the indices) 14 */ 15 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRow(Mat Amat,PetscInt row,PetscInt bs,PetscInt *n,PetscInt *cc) 16 { 17 PetscInt cnt = -1,nidx,j; 18 const PetscInt *idx; 19 PetscErrorCode ierr; 20 21 PetscFunctionBegin; 22 ierr = MatGetRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr); 23 if (nidx) { 24 cnt = 0; 25 cc[cnt] = idx[0]/bs; 26 for (j=1; j<nidx; j++) { 27 if (cc[cnt] < idx[j]/bs) cc[++cnt] = idx[j]/bs; 28 } 29 } 30 ierr = MatRestoreRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr); 31 *n = cnt+1; 32 PetscFunctionReturn(0); 33 } 34 35 #undef __FUNCT__ 36 #define __FUNCT__ "MatCollapseRows" 37 /* 38 Produces a set of block column indices of the matrix block row, one for each block represented in the original set of rows 39 40 ncollapsed - the number of block indices 41 collapsed - the block indices (must be large enough to contain the indices) 42 */ 43 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRows(Mat Amat,PetscInt start,PetscInt bs,PetscInt *w0,PetscInt *w1,PetscInt *w2,PetscInt *ncollapsed,PetscInt **collapsed) 44 { 45 PetscInt i,nprev,*cprev = w0,ncur = 0,*ccur = w1,*merged = w2,*cprevtmp; 46 PetscErrorCode ierr; 47 48 PetscFunctionBegin; 49 ierr = MatCollapseRow(Amat,start,bs,&nprev,cprev);CHKERRQ(ierr); 50 for (i=start+1; i<start+bs; i++) { 51 ierr = MatCollapseRow(Amat,i,bs,&ncur,ccur);CHKERRQ(ierr); 52 ierr = PetscMergeIntArray(nprev,cprev,ncur,ccur,&nprev,&merged);CHKERRQ(ierr); 53 cprevtmp = cprev; cprev = merged; merged = cprevtmp; 54 } 55 *ncollapsed = nprev; 56 if (collapsed) *collapsed = cprev; 57 PetscFunctionReturn(0); 58 } 59 60 61 /* -------------------------------------------------------------------------- */ 62 /* 63 PCGAMGCreateGraph - create simple scaled scalar graph from matrix 64 65 Input Parameter: 66 . Amat - matrix 67 Output Parameter: 68 . a_Gmaat - eoutput scalar graph (symmetric?) 69 */ 70 #undef __FUNCT__ 71 #define __FUNCT__ "PCGAMGCreateGraph" 72 PetscErrorCode PCGAMGCreateGraph(Mat Amat, Mat *a_Gmat) 73 { 74 PetscErrorCode ierr; 75 PetscInt Istart,Iend,Ii,jj,kk,ncols,nloc,NN,MM,bs; 76 MPI_Comm comm; 77 Mat Gmat; 78 MatType mtype; 79 80 PetscFunctionBegin; 81 ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 82 ierr = MatGetOwnershipRange(Amat, &Istart, &Iend);CHKERRQ(ierr); 83 ierr = MatGetSize(Amat, &MM, &NN);CHKERRQ(ierr); 84 ierr = MatGetBlockSize(Amat, &bs);CHKERRQ(ierr); 85 nloc = (Iend-Istart)/bs; 86 87 #if defined PETSC_GAMG_USE_LOG 88 ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 89 #endif 90 91 if (bs > 1) { 92 const PetscScalar *vals; 93 const PetscInt *idx; 94 PetscInt *d_nnz, *o_nnz,*w0,*w1,*w2; 95 PetscBool ismpiaij,isseqaij; 96 97 /* 98 Determine the preallocation needed for the scalar matrix derived from the vector matrix. 99 */ 100 101 ierr = PetscObjectTypeCompare((PetscObject)Amat,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 102 ierr = PetscObjectTypeCompare((PetscObject)Amat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 103 104 ierr = PetscMalloc2(nloc, &d_nnz,isseqaij ? 0 : nloc, &o_nnz);CHKERRQ(ierr); 105 106 if (isseqaij) { 107 PetscInt max_d_nnz; 108 109 /* 110 Determine exact preallocation count for (sequential) scalar matrix 111 */ 112 ierr = MatSeqAIJGetMaxRowNonzeros(Amat,&max_d_nnz);CHKERRQ(ierr); 113 max_d_nnz = PetscMin(nloc,bs*max_d_nnz);CHKERRQ(ierr); 114 ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr); 115 for (Ii = 0, jj = 0; Ii < Iend; Ii += bs, jj++) { 116 ierr = MatCollapseRows(Amat,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr); 117 } 118 ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr); 119 120 } else if (ismpiaij) { 121 Mat Daij,Oaij; 122 const PetscInt *garray; 123 PetscInt max_d_nnz; 124 125 ierr = MatMPIAIJGetSeqAIJ(Amat,&Daij,&Oaij,&garray);CHKERRQ(ierr); 126 127 /* 128 Determine exact preallocation count for diagonal block portion of scalar matrix 129 */ 130 ierr = MatSeqAIJGetMaxRowNonzeros(Daij,&max_d_nnz);CHKERRQ(ierr); 131 max_d_nnz = PetscMin(nloc,bs*max_d_nnz);CHKERRQ(ierr); 132 ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr); 133 for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) { 134 ierr = MatCollapseRows(Daij,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr); 135 } 136 ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr); 137 138 /* 139 Over estimate (usually grossly over), preallocation count for off-diagonal portion of scalar matrix 140 */ 141 for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) { 142 o_nnz[jj] = 0; 143 for (kk=0; kk<bs; kk++) { /* rows that get collapsed to a single row */ 144 ierr = MatGetRow(Oaij,Ii+kk,&ncols,0,0);CHKERRQ(ierr); 145 o_nnz[jj] += ncols; 146 ierr = MatRestoreRow(Oaij,Ii+kk,&ncols,0,0);CHKERRQ(ierr); 147 } 148 if (o_nnz[jj] > (NN/bs-nloc)) o_nnz[jj] = NN/bs-nloc; 149 } 150 151 } else { 152 SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require AIJ matrix type"); 153 } 154 155 /* get scalar copy (norms) of matrix */ 156 ierr = MatGetType(Amat,&mtype);CHKERRQ(ierr); 157 ierr = MatCreate(comm, &Gmat);CHKERRQ(ierr); 158 ierr = MatSetSizes(Gmat,nloc,nloc,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 159 ierr = MatSetBlockSizes(Gmat, 1, 1);CHKERRQ(ierr); 160 ierr = MatSetType(Gmat, mtype);CHKERRQ(ierr); 161 ierr = MatSeqAIJSetPreallocation(Gmat,0,d_nnz);CHKERRQ(ierr); 162 ierr = MatMPIAIJSetPreallocation(Gmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 163 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 164 165 for (Ii = Istart; Ii < Iend; Ii++) { 166 PetscInt dest_row = Ii/bs; 167 ierr = MatGetRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 168 for (jj=0; jj<ncols; jj++) { 169 PetscInt dest_col = idx[jj]/bs; 170 PetscScalar sv = PetscAbs(PetscRealPart(vals[jj])); 171 ierr = MatSetValues(Gmat,1,&dest_row,1,&dest_col,&sv,ADD_VALUES);CHKERRQ(ierr); 172 } 173 ierr = MatRestoreRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 174 } 175 ierr = MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 176 ierr = MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 177 } else { 178 /* just copy scalar matrix - abs() not taken here but scaled later */ 179 ierr = MatDuplicate(Amat, MAT_COPY_VALUES, &Gmat);CHKERRQ(ierr); 180 } 181 182 #if defined PETSC_GAMG_USE_LOG 183 ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 184 #endif 185 186 *a_Gmat = Gmat; 187 PetscFunctionReturn(0); 188 } 189 190 /* -------------------------------------------------------------------------- */ 191 /* 192 PCGAMGFilterGraph - filter (remove zero and possibly small values from the) graph and symetrize if needed 193 194 Input Parameter: 195 . vfilter - threshold paramter [0,1) 196 . symm - symetrize? 197 In/Output Parameter: 198 . a_Gmat - original graph 199 */ 200 #undef __FUNCT__ 201 #define __FUNCT__ "PCGAMGFilterGraph" 202 PetscErrorCode PCGAMGFilterGraph(Mat *a_Gmat,PetscReal vfilter,PetscBool symm) 203 { 204 PetscErrorCode ierr; 205 PetscInt Istart,Iend,Ii,jj,ncols,nnz0,nnz1, NN, MM, nloc; 206 PetscMPIInt rank; 207 Mat Gmat = *a_Gmat, tGmat, matTrans; 208 MPI_Comm comm; 209 const PetscScalar *vals; 210 const PetscInt *idx; 211 PetscInt *d_nnz, *o_nnz; 212 Vec diag; 213 MatType mtype; 214 215 PetscFunctionBegin; 216 #if defined PETSC_GAMG_USE_LOG 217 ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 218 #endif 219 /* scale Gmat for all values between -1 and 1 */ 220 ierr = MatCreateVecs(Gmat, &diag, 0);CHKERRQ(ierr); 221 ierr = MatGetDiagonal(Gmat, diag);CHKERRQ(ierr); 222 ierr = VecReciprocal(diag);CHKERRQ(ierr); 223 ierr = VecSqrtAbs(diag);CHKERRQ(ierr); 224 ierr = MatDiagonalScale(Gmat, diag, diag);CHKERRQ(ierr); 225 ierr = VecDestroy(&diag);CHKERRQ(ierr); 226 227 if (vfilter < 0.0 && !symm) { 228 /* Just use the provided matrix as the graph but make all values positive */ 229 MatInfo info; 230 PetscScalar *avals; 231 PetscBool isaij,ismpiaij; 232 ierr = PetscObjectTypeCompare((PetscObject)Gmat,MATSEQAIJ,&isaij);CHKERRQ(ierr); 233 ierr = PetscObjectTypeCompare((PetscObject)Gmat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 234 if (!isaij && !ismpiaij) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require (MPI)AIJ matrix type"); 235 if (isaij) { 236 ierr = MatGetInfo(Gmat,MAT_LOCAL,&info);CHKERRQ(ierr); 237 ierr = MatSeqAIJGetArray(Gmat,&avals);CHKERRQ(ierr); 238 for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]); 239 ierr = MatSeqAIJRestoreArray(Gmat,&avals);CHKERRQ(ierr); 240 } else { 241 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)Gmat->data; 242 ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 243 ierr = MatSeqAIJGetArray(aij->A,&avals);CHKERRQ(ierr); 244 for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]); 245 ierr = MatSeqAIJRestoreArray(aij->A,&avals);CHKERRQ(ierr); 246 ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 247 ierr = MatSeqAIJGetArray(aij->B,&avals);CHKERRQ(ierr); 248 for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]); 249 ierr = MatSeqAIJRestoreArray(aij->B,&avals);CHKERRQ(ierr); 250 } 251 #if defined PETSC_GAMG_USE_LOG 252 ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 253 #endif 254 PetscFunctionReturn(0); 255 } 256 257 ierr = PetscObjectGetComm((PetscObject)Gmat,&comm);CHKERRQ(ierr); 258 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 259 ierr = MatGetOwnershipRange(Gmat, &Istart, &Iend);CHKERRQ(ierr); 260 nloc = Iend - Istart; 261 ierr = MatGetSize(Gmat, &MM, &NN);CHKERRQ(ierr); 262 263 if (symm) { 264 ierr = MatTranspose(Gmat, MAT_INITIAL_MATRIX, &matTrans);CHKERRQ(ierr); 265 } 266 267 /* Determine upper bound on nonzeros needed in new filtered matrix */ 268 ierr = PetscMalloc2(nloc, &d_nnz,nloc, &o_nnz);CHKERRQ(ierr); 269 for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) { 270 ierr = MatGetRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr); 271 d_nnz[jj] = ncols; 272 o_nnz[jj] = ncols; 273 ierr = MatRestoreRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr); 274 if (symm) { 275 ierr = MatGetRow(matTrans,Ii,&ncols,NULL,NULL);CHKERRQ(ierr); 276 d_nnz[jj] += ncols; 277 o_nnz[jj] += ncols; 278 ierr = MatRestoreRow(matTrans,Ii,&ncols,NULL,NULL);CHKERRQ(ierr); 279 } 280 if (d_nnz[jj] > nloc) d_nnz[jj] = nloc; 281 if (o_nnz[jj] > (MM-nloc)) o_nnz[jj] = MM - nloc; 282 } 283 ierr = MatGetType(Gmat,&mtype);CHKERRQ(ierr); 284 ierr = MatCreate(comm, &tGmat);CHKERRQ(ierr); 285 ierr = MatSetSizes(tGmat,nloc,nloc,MM,MM);CHKERRQ(ierr); 286 ierr = MatSetBlockSizes(tGmat, 1, 1);CHKERRQ(ierr); 287 ierr = MatSetType(tGmat, mtype);CHKERRQ(ierr); 288 ierr = MatSeqAIJSetPreallocation(tGmat,0,d_nnz);CHKERRQ(ierr); 289 ierr = MatMPIAIJSetPreallocation(tGmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 290 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 291 if (symm) { 292 ierr = MatDestroy(&matTrans);CHKERRQ(ierr); 293 } else { 294 /* all entries are generated locally so MatAssembly will be slightly faster for large process counts */ 295 ierr = MatSetOption(tGmat,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);CHKERRQ(ierr); 296 } 297 298 for (Ii = Istart, nnz0 = nnz1 = 0; Ii < Iend; Ii++) { 299 ierr = MatGetRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 300 for (jj=0; jj<ncols; jj++,nnz0++) { 301 PetscScalar sv = PetscAbs(PetscRealPart(vals[jj])); 302 if (PetscRealPart(sv) > vfilter) { 303 nnz1++; 304 if (symm) { 305 sv *= 0.5; 306 ierr = MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,ADD_VALUES);CHKERRQ(ierr); 307 ierr = MatSetValues(tGmat,1,&idx[jj],1,&Ii,&sv,ADD_VALUES);CHKERRQ(ierr); 308 } else { 309 ierr = MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,ADD_VALUES);CHKERRQ(ierr); 310 } 311 } 312 } 313 ierr = MatRestoreRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 314 } 315 ierr = MatAssemblyBegin(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 316 ierr = MatAssemblyEnd(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 317 318 #if defined PETSC_GAMG_USE_LOG 319 ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 320 #endif 321 322 #if defined(PETSC_USE_INFO) 323 { 324 double t1 = (!nnz0) ? 1. : 100.*(double)nnz1/(double)nnz0, t2 = (!nloc) ? 1. : (double)nnz0/(double)nloc; 325 ierr = PetscInfo4(*a_Gmat,"\t %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%D)\n",t1,vfilter,t2,MM);CHKERRQ(ierr); 326 } 327 #endif 328 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 329 *a_Gmat = tGmat; 330 PetscFunctionReturn(0); 331 } 332 333 /* -------------------------------------------------------------------------- */ 334 /* 335 PCGAMGGetDataWithGhosts - hacks into Mat MPIAIJ so this must have npe > 1 336 337 Input Parameter: 338 . Gmat - MPIAIJ matrix for scattters 339 . data_sz - number of data terms per node (# cols in output) 340 . data_in[nloc*data_sz] - column oriented data 341 Output Parameter: 342 . a_stride - numbrt of rows of output 343 . a_data_out[stride*data_sz] - output data with ghosts 344 */ 345 #undef __FUNCT__ 346 #define __FUNCT__ "PCGAMGGetDataWithGhosts" 347 PetscErrorCode PCGAMGGetDataWithGhosts(Mat Gmat,PetscInt data_sz,PetscReal data_in[],PetscInt *a_stride,PetscReal **a_data_out) 348 { 349 PetscErrorCode ierr; 350 MPI_Comm comm; 351 Vec tmp_crds; 352 Mat_MPIAIJ *mpimat = (Mat_MPIAIJ*)Gmat->data; 353 PetscInt nnodes,num_ghosts,dir,kk,jj,my0,Iend,nloc; 354 PetscScalar *data_arr; 355 PetscReal *datas; 356 PetscBool isMPIAIJ; 357 358 PetscFunctionBegin; 359 ierr = PetscObjectGetComm((PetscObject)Gmat,&comm);CHKERRQ(ierr); 360 ierr = PetscObjectTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPIAIJ);CHKERRQ(ierr); 361 ierr = MatGetOwnershipRange(Gmat, &my0, &Iend);CHKERRQ(ierr); 362 nloc = Iend - my0; 363 ierr = VecGetLocalSize(mpimat->lvec, &num_ghosts);CHKERRQ(ierr); 364 nnodes = num_ghosts + nloc; 365 *a_stride = nnodes; 366 ierr = MatCreateVecs(Gmat, &tmp_crds, 0);CHKERRQ(ierr); 367 368 ierr = PetscMalloc1(data_sz*nnodes, &datas);CHKERRQ(ierr); 369 for (dir=0; dir<data_sz; dir++) { 370 /* set local, and global */ 371 for (kk=0; kk<nloc; kk++) { 372 PetscInt gid = my0 + kk; 373 PetscScalar crd = (PetscScalar)data_in[dir*nloc + kk]; /* col oriented */ 374 datas[dir*nnodes + kk] = PetscRealPart(crd); 375 376 ierr = VecSetValues(tmp_crds, 1, &gid, &crd, INSERT_VALUES);CHKERRQ(ierr); 377 } 378 ierr = VecAssemblyBegin(tmp_crds);CHKERRQ(ierr); 379 ierr = VecAssemblyEnd(tmp_crds);CHKERRQ(ierr); 380 /* get ghost datas */ 381 ierr = VecScatterBegin(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 382 ierr = VecScatterEnd(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 383 ierr = VecGetArray(mpimat->lvec, &data_arr);CHKERRQ(ierr); 384 for (kk=nloc,jj=0;jj<num_ghosts;kk++,jj++) datas[dir*nnodes + kk] = PetscRealPart(data_arr[jj]); 385 ierr = VecRestoreArray(mpimat->lvec, &data_arr);CHKERRQ(ierr); 386 } 387 ierr = VecDestroy(&tmp_crds);CHKERRQ(ierr); 388 *a_data_out = datas; 389 PetscFunctionReturn(0); 390 } 391 392 393 /* 394 * 395 * PCGAMGHashTableCreate 396 */ 397 398 #undef __FUNCT__ 399 #define __FUNCT__ "PCGAMGHashTableCreate" 400 PetscErrorCode PCGAMGHashTableCreate(PetscInt a_size, PCGAMGHashTable *a_tab) 401 { 402 PetscErrorCode ierr; 403 PetscInt kk; 404 405 PetscFunctionBegin; 406 a_tab->size = a_size; 407 ierr = PetscMalloc1(a_size, &a_tab->table);CHKERRQ(ierr); 408 ierr = PetscMalloc1(a_size, &a_tab->data);CHKERRQ(ierr); 409 for (kk=0; kk<a_size; kk++) a_tab->table[kk] = -1; 410 PetscFunctionReturn(0); 411 } 412 413 #undef __FUNCT__ 414 #define __FUNCT__ "PCGAMGHashTableDestroy" 415 PetscErrorCode PCGAMGHashTableDestroy(PCGAMGHashTable *a_tab) 416 { 417 PetscErrorCode ierr; 418 419 PetscFunctionBegin; 420 ierr = PetscFree(a_tab->table);CHKERRQ(ierr); 421 ierr = PetscFree(a_tab->data);CHKERRQ(ierr); 422 PetscFunctionReturn(0); 423 } 424 425 #undef __FUNCT__ 426 #define __FUNCT__ "PCGAMGHashTableAdd" 427 PetscErrorCode PCGAMGHashTableAdd(PCGAMGHashTable *a_tab, PetscInt a_key, PetscInt a_data) 428 { 429 PetscInt kk,idx; 430 431 PetscFunctionBegin; 432 if (a_key<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Negative key %d.",a_key); 433 for (kk = 0, idx = GAMG_HASH(a_key); 434 kk < a_tab->size; 435 kk++, idx = (idx==(a_tab->size-1)) ? 0 : idx + 1) { 436 437 if (a_tab->table[idx] == a_key) { 438 /* exists */ 439 a_tab->data[idx] = a_data; 440 break; 441 } else if (a_tab->table[idx] == -1) { 442 /* add */ 443 a_tab->table[idx] = a_key; 444 a_tab->data[idx] = a_data; 445 break; 446 } 447 } 448 if (kk==a_tab->size) { 449 /* this is not to efficient, waiting until completely full */ 450 PetscInt oldsize = a_tab->size, new_size = 2*a_tab->size + 5, *oldtable = a_tab->table, *olddata = a_tab->data; 451 PetscErrorCode ierr; 452 453 a_tab->size = new_size; 454 455 ierr = PetscMalloc1(a_tab->size, &a_tab->table);CHKERRQ(ierr); 456 ierr = PetscMalloc1(a_tab->size, &a_tab->data);CHKERRQ(ierr); 457 458 for (kk=0;kk<a_tab->size;kk++) a_tab->table[kk] = -1; 459 for (kk=0;kk<oldsize;kk++) { 460 if (oldtable[kk] != -1) { 461 ierr = PCGAMGHashTableAdd(a_tab, oldtable[kk], olddata[kk]);CHKERRQ(ierr); 462 } 463 } 464 ierr = PetscFree(oldtable);CHKERRQ(ierr); 465 ierr = PetscFree(olddata);CHKERRQ(ierr); 466 ierr = PCGAMGHashTableAdd(a_tab, a_key, a_data);CHKERRQ(ierr); 467 } 468 PetscFunctionReturn(0); 469 } 470 471