1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc/private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 7 /* 8 Produces a set of block column indices of the matrix row, one for each block represented in the original row 9 10 n - the number of block indices in cc[] 11 cc - the block indices (must be large enough to contain the indices) 12 */ 13 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRow(Mat Amat,PetscInt row,PetscInt bs,PetscInt *n,PetscInt *cc) 14 { 15 PetscInt cnt = -1,nidx,j; 16 const PetscInt *idx; 17 PetscErrorCode ierr; 18 19 PetscFunctionBegin; 20 ierr = MatGetRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr); 21 if (nidx) { 22 cnt = 0; 23 cc[cnt] = idx[0]/bs; 24 for (j=1; j<nidx; j++) { 25 if (cc[cnt] < idx[j]/bs) cc[++cnt] = idx[j]/bs; 26 } 27 } 28 ierr = MatRestoreRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr); 29 *n = cnt+1; 30 PetscFunctionReturn(0); 31 } 32 33 /* 34 Produces a set of block column indices of the matrix block row, one for each block represented in the original set of rows 35 36 ncollapsed - the number of block indices 37 collapsed - the block indices (must be large enough to contain the indices) 38 */ 39 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRows(Mat Amat,PetscInt start,PetscInt bs,PetscInt *w0,PetscInt *w1,PetscInt *w2,PetscInt *ncollapsed,PetscInt **collapsed) 40 { 41 PetscInt i,nprev,*cprev = w0,ncur = 0,*ccur = w1,*merged = w2,*cprevtmp; 42 PetscErrorCode ierr; 43 44 PetscFunctionBegin; 45 ierr = MatCollapseRow(Amat,start,bs,&nprev,cprev);CHKERRQ(ierr); 46 for (i=start+1; i<start+bs; i++) { 47 ierr = MatCollapseRow(Amat,i,bs,&ncur,ccur);CHKERRQ(ierr); 48 ierr = PetscMergeIntArray(nprev,cprev,ncur,ccur,&nprev,&merged);CHKERRQ(ierr); 49 cprevtmp = cprev; cprev = merged; merged = cprevtmp; 50 } 51 *ncollapsed = nprev; 52 if (collapsed) *collapsed = cprev; 53 PetscFunctionReturn(0); 54 } 55 56 /* -------------------------------------------------------------------------- */ 57 /* 58 PCGAMGCreateGraph - create simple scaled scalar graph from matrix 59 60 Input Parameter: 61 . Amat - matrix 62 Output Parameter: 63 . a_Gmaat - eoutput scalar graph (symmetric?) 64 */ 65 PetscErrorCode PCGAMGCreateGraph(Mat Amat, Mat *a_Gmat) 66 { 67 PetscErrorCode ierr; 68 PetscInt Istart,Iend,Ii,jj,kk,ncols,nloc,NN,MM,bs; 69 MPI_Comm comm; 70 Mat Gmat; 71 72 PetscFunctionBegin; 73 ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 74 ierr = MatGetOwnershipRange(Amat, &Istart, &Iend);CHKERRQ(ierr); 75 ierr = MatGetSize(Amat, &MM, &NN);CHKERRQ(ierr); 76 ierr = MatGetBlockSize(Amat, &bs);CHKERRQ(ierr); 77 nloc = (Iend-Istart)/bs; 78 79 ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 80 81 /* TODO GPU: these calls are potentially expensive if matrices are large and we want to use the GPU */ 82 /* A solution consists in providing a new API, MatAIJGetCollapsedAIJ, and each class can provide a fast 83 implementation */ 84 if (bs > 1) { 85 const PetscScalar *vals; 86 const PetscInt *idx; 87 PetscInt *d_nnz, *o_nnz,*w0,*w1,*w2; 88 PetscBool ismpiaij,isseqaij; 89 90 /* 91 Determine the preallocation needed for the scalar matrix derived from the vector matrix. 92 */ 93 94 ierr = PetscObjectBaseTypeCompare((PetscObject)Amat,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 95 ierr = PetscObjectBaseTypeCompare((PetscObject)Amat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 96 ierr = PetscMalloc2(nloc, &d_nnz,isseqaij ? 0 : nloc, &o_nnz);CHKERRQ(ierr); 97 98 if (isseqaij) { 99 PetscInt max_d_nnz; 100 101 /* 102 Determine exact preallocation count for (sequential) scalar matrix 103 */ 104 ierr = MatSeqAIJGetMaxRowNonzeros(Amat,&max_d_nnz);CHKERRQ(ierr); 105 max_d_nnz = PetscMin(nloc,bs*max_d_nnz); 106 ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr); 107 for (Ii = 0, jj = 0; Ii < Iend; Ii += bs, jj++) { 108 ierr = MatCollapseRows(Amat,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr); 109 } 110 ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr); 111 112 } else if (ismpiaij) { 113 Mat Daij,Oaij; 114 const PetscInt *garray; 115 PetscInt max_d_nnz; 116 117 ierr = MatMPIAIJGetSeqAIJ(Amat,&Daij,&Oaij,&garray);CHKERRQ(ierr); 118 119 /* 120 Determine exact preallocation count for diagonal block portion of scalar matrix 121 */ 122 ierr = MatSeqAIJGetMaxRowNonzeros(Daij,&max_d_nnz);CHKERRQ(ierr); 123 max_d_nnz = PetscMin(nloc,bs*max_d_nnz); 124 ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr); 125 for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) { 126 ierr = MatCollapseRows(Daij,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr); 127 } 128 ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr); 129 130 /* 131 Over estimate (usually grossly over), preallocation count for off-diagonal portion of scalar matrix 132 */ 133 for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) { 134 o_nnz[jj] = 0; 135 for (kk=0; kk<bs; kk++) { /* rows that get collapsed to a single row */ 136 ierr = MatGetRow(Oaij,Ii+kk,&ncols,NULL,NULL);CHKERRQ(ierr); 137 o_nnz[jj] += ncols; 138 ierr = MatRestoreRow(Oaij,Ii+kk,&ncols,NULL,NULL);CHKERRQ(ierr); 139 } 140 if (o_nnz[jj] > (NN/bs-nloc)) o_nnz[jj] = NN/bs-nloc; 141 } 142 143 } else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require AIJ matrix type"); 144 145 /* get scalar copy (norms) of matrix */ 146 ierr = MatCreate(comm, &Gmat);CHKERRQ(ierr); 147 ierr = MatSetSizes(Gmat,nloc,nloc,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 148 ierr = MatSetBlockSizes(Gmat, 1, 1);CHKERRQ(ierr); 149 ierr = MatSetType(Gmat, MATAIJ);CHKERRQ(ierr); 150 ierr = MatSeqAIJSetPreallocation(Gmat,0,d_nnz);CHKERRQ(ierr); 151 ierr = MatMPIAIJSetPreallocation(Gmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 152 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 153 154 for (Ii = Istart; Ii < Iend; Ii++) { 155 PetscInt dest_row = Ii/bs; 156 ierr = MatGetRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 157 for (jj=0; jj<ncols; jj++) { 158 PetscInt dest_col = idx[jj]/bs; 159 PetscScalar sv = PetscAbs(PetscRealPart(vals[jj])); 160 ierr = MatSetValues(Gmat,1,&dest_row,1,&dest_col,&sv,ADD_VALUES);CHKERRQ(ierr); 161 } 162 ierr = MatRestoreRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 163 } 164 ierr = MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 165 ierr = MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 166 } else { 167 /* just copy scalar matrix - abs() not taken here but scaled later */ 168 ierr = MatDuplicate(Amat, MAT_COPY_VALUES, &Gmat);CHKERRQ(ierr); 169 } 170 ierr = MatPropagateSymmetryOptions(Amat, Gmat);CHKERRQ(ierr); 171 172 ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 173 174 *a_Gmat = Gmat; 175 PetscFunctionReturn(0); 176 } 177 178 /* -------------------------------------------------------------------------- */ 179 /*@C 180 PCGAMGFilterGraph - filter (remove zero and possibly small values from the) graph and make it symmetric if requested 181 182 Collective on Mat 183 184 Input Parameters: 185 + a_Gmat - the graph 186 . vfilter - threshold parameter [0,1) 187 - symm - make the result symmetric 188 189 Level: developer 190 191 Notes: 192 This is called before graph coarsers are called. 193 194 .seealso: PCGAMGSetThreshold() 195 @*/ 196 PetscErrorCode PCGAMGFilterGraph(Mat *a_Gmat,PetscReal vfilter,PetscBool symm) 197 { 198 PetscErrorCode ierr; 199 PetscInt Istart,Iend,Ii,jj,ncols,nnz0,nnz1, NN, MM, nloc; 200 PetscMPIInt rank; 201 Mat Gmat = *a_Gmat, tGmat; 202 MPI_Comm comm; 203 const PetscScalar *vals; 204 const PetscInt *idx; 205 PetscInt *d_nnz, *o_nnz; 206 Vec diag; 207 208 PetscFunctionBegin; 209 ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 210 211 /* TODO GPU: optimization proposal, each class provides fast implementation of this 212 procedure via MatAbs API */ 213 if (vfilter < 0.0 && !symm) { 214 /* Just use the provided matrix as the graph but make all values positive */ 215 MatInfo info; 216 PetscScalar *avals; 217 PetscBool isaij,ismpiaij; 218 ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat,MATSEQAIJ,&isaij);CHKERRQ(ierr); 219 ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 220 if (!isaij && !ismpiaij) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require (MPI)AIJ matrix type"); 221 if (isaij) { 222 ierr = MatGetInfo(Gmat,MAT_LOCAL,&info);CHKERRQ(ierr); 223 ierr = MatSeqAIJGetArray(Gmat,&avals);CHKERRQ(ierr); 224 for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]); 225 ierr = MatSeqAIJRestoreArray(Gmat,&avals);CHKERRQ(ierr); 226 } else { 227 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)Gmat->data; 228 ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 229 ierr = MatSeqAIJGetArray(aij->A,&avals);CHKERRQ(ierr); 230 for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]); 231 ierr = MatSeqAIJRestoreArray(aij->A,&avals);CHKERRQ(ierr); 232 ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 233 ierr = MatSeqAIJGetArray(aij->B,&avals);CHKERRQ(ierr); 234 for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]); 235 ierr = MatSeqAIJRestoreArray(aij->B,&avals);CHKERRQ(ierr); 236 } 237 ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 238 PetscFunctionReturn(0); 239 } 240 241 /* TODO GPU: this can be called when filter = 0 -> Probably provide MatAIJThresholdCompress that compresses the entries below a threshold? 242 Also, if the matrix is symmetric, can we skip this 243 operation? It can be very expensive on large matrices. */ 244 ierr = PetscObjectGetComm((PetscObject)Gmat,&comm);CHKERRQ(ierr); 245 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 246 ierr = MatGetOwnershipRange(Gmat, &Istart, &Iend);CHKERRQ(ierr); 247 nloc = Iend - Istart; 248 ierr = MatGetSize(Gmat, &MM, &NN);CHKERRQ(ierr); 249 250 if (symm) { 251 Mat matTrans; 252 ierr = MatTranspose(Gmat, MAT_INITIAL_MATRIX, &matTrans);CHKERRQ(ierr); 253 ierr = MatAXPY(Gmat, 1.0, matTrans, Gmat->structurally_symmetric ? SAME_NONZERO_PATTERN : DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr); 254 ierr = MatDestroy(&matTrans);CHKERRQ(ierr); 255 } 256 257 /* scale Gmat for all values between -1 and 1 */ 258 ierr = MatCreateVecs(Gmat, &diag, NULL);CHKERRQ(ierr); 259 ierr = MatGetDiagonal(Gmat, diag);CHKERRQ(ierr); 260 ierr = VecReciprocal(diag);CHKERRQ(ierr); 261 ierr = VecSqrtAbs(diag);CHKERRQ(ierr); 262 ierr = MatDiagonalScale(Gmat, diag, diag);CHKERRQ(ierr); 263 ierr = VecDestroy(&diag);CHKERRQ(ierr); 264 265 /* Determine upper bound on nonzeros needed in new filtered matrix */ 266 ierr = PetscMalloc2(nloc, &d_nnz,nloc, &o_nnz);CHKERRQ(ierr); 267 for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) { 268 ierr = MatGetRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr); 269 d_nnz[jj] = ncols; 270 o_nnz[jj] = ncols; 271 ierr = MatRestoreRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr); 272 if (d_nnz[jj] > nloc) d_nnz[jj] = nloc; 273 if (o_nnz[jj] > (MM-nloc)) o_nnz[jj] = MM - nloc; 274 } 275 ierr = MatCreate(comm, &tGmat);CHKERRQ(ierr); 276 ierr = MatSetSizes(tGmat,nloc,nloc,MM,MM);CHKERRQ(ierr); 277 ierr = MatSetBlockSizes(tGmat, 1, 1);CHKERRQ(ierr); 278 ierr = MatSetType(tGmat, MATAIJ);CHKERRQ(ierr); 279 ierr = MatSeqAIJSetPreallocation(tGmat,0,d_nnz);CHKERRQ(ierr); 280 ierr = MatMPIAIJSetPreallocation(tGmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 281 ierr = MatSetOption(tGmat,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);CHKERRQ(ierr); 282 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 283 284 for (Ii = Istart, nnz0 = nnz1 = 0; Ii < Iend; Ii++) { 285 ierr = MatGetRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 286 for (jj=0; jj<ncols; jj++,nnz0++) { 287 PetscScalar sv = PetscAbs(PetscRealPart(vals[jj])); 288 if (PetscRealPart(sv) > vfilter) { 289 nnz1++; 290 ierr = MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,INSERT_VALUES);CHKERRQ(ierr); 291 } 292 } 293 ierr = MatRestoreRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 294 } 295 ierr = MatAssemblyBegin(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 296 ierr = MatAssemblyEnd(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 297 if (symm) { 298 ierr = MatSetOption(tGmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr); 299 } else { 300 ierr = MatPropagateSymmetryOptions(Gmat,tGmat);CHKERRQ(ierr); 301 } 302 ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 303 304 #if defined(PETSC_USE_INFO) 305 { 306 double t1 = (!nnz0) ? 1. : 100.*(double)nnz1/(double)nnz0, t2 = (!nloc) ? 1. : (double)nnz0/(double)nloc; 307 ierr = PetscInfo4(*a_Gmat,"\t %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%D)\n",t1,vfilter,t2,MM);CHKERRQ(ierr); 308 } 309 #endif 310 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 311 *a_Gmat = tGmat; 312 PetscFunctionReturn(0); 313 } 314 315 /* -------------------------------------------------------------------------- */ 316 /* 317 PCGAMGGetDataWithGhosts - hacks into Mat MPIAIJ so this must have size > 1 318 319 Input Parameter: 320 . Gmat - MPIAIJ matrix for scattters 321 . data_sz - number of data terms per node (# cols in output) 322 . data_in[nloc*data_sz] - column oriented data 323 Output Parameter: 324 . a_stride - numbrt of rows of output 325 . a_data_out[stride*data_sz] - output data with ghosts 326 */ 327 PetscErrorCode PCGAMGGetDataWithGhosts(Mat Gmat,PetscInt data_sz,PetscReal data_in[],PetscInt *a_stride,PetscReal **a_data_out) 328 { 329 PetscErrorCode ierr; 330 Vec tmp_crds; 331 Mat_MPIAIJ *mpimat = (Mat_MPIAIJ*)Gmat->data; 332 PetscInt nnodes,num_ghosts,dir,kk,jj,my0,Iend,nloc; 333 PetscScalar *data_arr; 334 PetscReal *datas; 335 PetscBool isMPIAIJ; 336 337 PetscFunctionBegin; 338 ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPIAIJ);CHKERRQ(ierr); 339 ierr = MatGetOwnershipRange(Gmat, &my0, &Iend);CHKERRQ(ierr); 340 nloc = Iend - my0; 341 ierr = VecGetLocalSize(mpimat->lvec, &num_ghosts);CHKERRQ(ierr); 342 nnodes = num_ghosts + nloc; 343 *a_stride = nnodes; 344 ierr = MatCreateVecs(Gmat, &tmp_crds, NULL);CHKERRQ(ierr); 345 346 ierr = PetscMalloc1(data_sz*nnodes, &datas);CHKERRQ(ierr); 347 for (dir=0; dir<data_sz; dir++) { 348 /* set local, and global */ 349 for (kk=0; kk<nloc; kk++) { 350 PetscInt gid = my0 + kk; 351 PetscScalar crd = (PetscScalar)data_in[dir*nloc + kk]; /* col oriented */ 352 datas[dir*nnodes + kk] = PetscRealPart(crd); 353 354 ierr = VecSetValues(tmp_crds, 1, &gid, &crd, INSERT_VALUES);CHKERRQ(ierr); 355 } 356 ierr = VecAssemblyBegin(tmp_crds);CHKERRQ(ierr); 357 ierr = VecAssemblyEnd(tmp_crds);CHKERRQ(ierr); 358 /* get ghost datas */ 359 ierr = VecScatterBegin(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 360 ierr = VecScatterEnd(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 361 ierr = VecGetArray(mpimat->lvec, &data_arr);CHKERRQ(ierr); 362 for (kk=nloc,jj=0;jj<num_ghosts;kk++,jj++) datas[dir*nnodes + kk] = PetscRealPart(data_arr[jj]); 363 ierr = VecRestoreArray(mpimat->lvec, &data_arr);CHKERRQ(ierr); 364 } 365 ierr = VecDestroy(&tmp_crds);CHKERRQ(ierr); 366 *a_data_out = datas; 367 PetscFunctionReturn(0); 368 } 369 370 PetscErrorCode PCGAMGHashTableCreate(PetscInt a_size, PCGAMGHashTable *a_tab) 371 { 372 PetscErrorCode ierr; 373 PetscInt kk; 374 375 PetscFunctionBegin; 376 a_tab->size = a_size; 377 ierr = PetscMalloc2(a_size, &a_tab->table,a_size, &a_tab->data);CHKERRQ(ierr); 378 for (kk=0; kk<a_size; kk++) a_tab->table[kk] = -1; 379 PetscFunctionReturn(0); 380 } 381 382 PetscErrorCode PCGAMGHashTableDestroy(PCGAMGHashTable *a_tab) 383 { 384 PetscErrorCode ierr; 385 386 PetscFunctionBegin; 387 ierr = PetscFree2(a_tab->table,a_tab->data);CHKERRQ(ierr); 388 PetscFunctionReturn(0); 389 } 390 391 PetscErrorCode PCGAMGHashTableAdd(PCGAMGHashTable *a_tab, PetscInt a_key, PetscInt a_data) 392 { 393 PetscInt kk,idx; 394 395 PetscFunctionBegin; 396 if (a_key<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Negative key %D.",a_key); 397 for (kk = 0, idx = GAMG_HASH(a_key); kk < a_tab->size; kk++, idx = (idx==(a_tab->size-1)) ? 0 : idx + 1) { 398 if (a_tab->table[idx] == a_key) { 399 /* exists */ 400 a_tab->data[idx] = a_data; 401 break; 402 } else if (a_tab->table[idx] == -1) { 403 /* add */ 404 a_tab->table[idx] = a_key; 405 a_tab->data[idx] = a_data; 406 break; 407 } 408 } 409 if (kk==a_tab->size) { 410 /* this is not to efficient, waiting until completely full */ 411 PetscInt oldsize = a_tab->size, new_size = 2*a_tab->size + 5, *oldtable = a_tab->table, *olddata = a_tab->data; 412 PetscErrorCode ierr; 413 414 a_tab->size = new_size; 415 ierr = PetscMalloc2(a_tab->size, &a_tab->table,a_tab->size, &a_tab->data);CHKERRQ(ierr); 416 for (kk=0;kk<a_tab->size;kk++) a_tab->table[kk] = -1; 417 for (kk=0;kk<oldsize;kk++) { 418 if (oldtable[kk] != -1) { 419 ierr = PCGAMGHashTableAdd(a_tab, oldtable[kk], olddata[kk]);CHKERRQ(ierr); 420 } 421 } 422 ierr = PetscFree2(oldtable,olddata);CHKERRQ(ierr); 423 ierr = PCGAMGHashTableAdd(a_tab, a_key, a_data);CHKERRQ(ierr); 424 } 425 PetscFunctionReturn(0); 426 } 427