1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc/private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 7 /* 8 Produces a set of block column indices of the matrix row, one for each block represented in the original row 9 10 n - the number of block indices in cc[] 11 cc - the block indices (must be large enough to contain the indices) 12 */ 13 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRow(Mat Amat,PetscInt row,PetscInt bs,PetscInt *n,PetscInt *cc) 14 { 15 PetscInt cnt = -1,nidx,j; 16 const PetscInt *idx; 17 PetscErrorCode ierr; 18 19 PetscFunctionBegin; 20 ierr = MatGetRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr); 21 if (nidx) { 22 cnt = 0; 23 cc[cnt] = idx[0]/bs; 24 for (j=1; j<nidx; j++) { 25 if (cc[cnt] < idx[j]/bs) cc[++cnt] = idx[j]/bs; 26 } 27 } 28 ierr = MatRestoreRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr); 29 *n = cnt+1; 30 PetscFunctionReturn(0); 31 } 32 33 /* 34 Produces a set of block column indices of the matrix block row, one for each block represented in the original set of rows 35 36 ncollapsed - the number of block indices 37 collapsed - the block indices (must be large enough to contain the indices) 38 */ 39 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRows(Mat Amat,PetscInt start,PetscInt bs,PetscInt *w0,PetscInt *w1,PetscInt *w2,PetscInt *ncollapsed,PetscInt **collapsed) 40 { 41 PetscInt i,nprev,*cprev = w0,ncur = 0,*ccur = w1,*merged = w2,*cprevtmp; 42 PetscErrorCode ierr; 43 44 PetscFunctionBegin; 45 ierr = MatCollapseRow(Amat,start,bs,&nprev,cprev);CHKERRQ(ierr); 46 for (i=start+1; i<start+bs; i++) { 47 ierr = MatCollapseRow(Amat,i,bs,&ncur,ccur);CHKERRQ(ierr); 48 ierr = PetscMergeIntArray(nprev,cprev,ncur,ccur,&nprev,&merged);CHKERRQ(ierr); 49 cprevtmp = cprev; cprev = merged; merged = cprevtmp; 50 } 51 *ncollapsed = nprev; 52 if (collapsed) *collapsed = cprev; 53 PetscFunctionReturn(0); 54 } 55 56 57 /* -------------------------------------------------------------------------- */ 58 /* 59 PCGAMGCreateGraph - create simple scaled scalar graph from matrix 60 61 Input Parameter: 62 . Amat - matrix 63 Output Parameter: 64 . a_Gmaat - eoutput scalar graph (symmetric?) 65 */ 66 PetscErrorCode PCGAMGCreateGraph(Mat Amat, Mat *a_Gmat) 67 { 68 PetscErrorCode ierr; 69 PetscInt Istart,Iend,Ii,jj,kk,ncols,nloc,NN,MM,bs; 70 MPI_Comm comm; 71 Mat Gmat; 72 73 PetscFunctionBegin; 74 ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 75 ierr = MatGetOwnershipRange(Amat, &Istart, &Iend);CHKERRQ(ierr); 76 ierr = MatGetSize(Amat, &MM, &NN);CHKERRQ(ierr); 77 ierr = MatGetBlockSize(Amat, &bs);CHKERRQ(ierr); 78 nloc = (Iend-Istart)/bs; 79 80 ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 81 82 /* TODO GPU: these calls are potentially expensive if matrices are large and we want to use the GPU */ 83 /* A solution consists in providing a new API, MatAIJGetCollapsedAIJ, and each class can provide a fast 84 implementation */ 85 if (bs > 1) { 86 const PetscScalar *vals; 87 const PetscInt *idx; 88 PetscInt *d_nnz, *o_nnz,*w0,*w1,*w2; 89 PetscBool ismpiaij,isseqaij; 90 91 /* 92 Determine the preallocation needed for the scalar matrix derived from the vector matrix. 93 */ 94 95 ierr = PetscObjectBaseTypeCompare((PetscObject)Amat,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 96 ierr = PetscObjectBaseTypeCompare((PetscObject)Amat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 97 ierr = PetscMalloc2(nloc, &d_nnz,isseqaij ? 0 : nloc, &o_nnz);CHKERRQ(ierr); 98 99 if (isseqaij) { 100 PetscInt max_d_nnz; 101 102 /* 103 Determine exact preallocation count for (sequential) scalar matrix 104 */ 105 ierr = MatSeqAIJGetMaxRowNonzeros(Amat,&max_d_nnz);CHKERRQ(ierr); 106 max_d_nnz = PetscMin(nloc,bs*max_d_nnz);CHKERRQ(ierr); 107 ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr); 108 for (Ii = 0, jj = 0; Ii < Iend; Ii += bs, jj++) { 109 ierr = MatCollapseRows(Amat,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr); 110 } 111 ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr); 112 113 } else if (ismpiaij) { 114 Mat Daij,Oaij; 115 const PetscInt *garray; 116 PetscInt max_d_nnz; 117 118 ierr = MatMPIAIJGetSeqAIJ(Amat,&Daij,&Oaij,&garray);CHKERRQ(ierr); 119 120 /* 121 Determine exact preallocation count for diagonal block portion of scalar matrix 122 */ 123 ierr = MatSeqAIJGetMaxRowNonzeros(Daij,&max_d_nnz);CHKERRQ(ierr); 124 max_d_nnz = PetscMin(nloc,bs*max_d_nnz);CHKERRQ(ierr); 125 ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr); 126 for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) { 127 ierr = MatCollapseRows(Daij,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr); 128 } 129 ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr); 130 131 /* 132 Over estimate (usually grossly over), preallocation count for off-diagonal portion of scalar matrix 133 */ 134 for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) { 135 o_nnz[jj] = 0; 136 for (kk=0; kk<bs; kk++) { /* rows that get collapsed to a single row */ 137 ierr = MatGetRow(Oaij,Ii+kk,&ncols,NULL,NULL);CHKERRQ(ierr); 138 o_nnz[jj] += ncols; 139 ierr = MatRestoreRow(Oaij,Ii+kk,&ncols,NULL,NULL);CHKERRQ(ierr); 140 } 141 if (o_nnz[jj] > (NN/bs-nloc)) o_nnz[jj] = NN/bs-nloc; 142 } 143 144 } else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require AIJ matrix type"); 145 146 /* get scalar copy (norms) of matrix */ 147 ierr = MatCreate(comm, &Gmat);CHKERRQ(ierr); 148 ierr = MatSetSizes(Gmat,nloc,nloc,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 149 ierr = MatSetBlockSizes(Gmat, 1, 1);CHKERRQ(ierr); 150 ierr = MatSetType(Gmat, MATAIJ);CHKERRQ(ierr); 151 ierr = MatSeqAIJSetPreallocation(Gmat,0,d_nnz);CHKERRQ(ierr); 152 ierr = MatMPIAIJSetPreallocation(Gmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 153 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 154 155 for (Ii = Istart; Ii < Iend; Ii++) { 156 PetscInt dest_row = Ii/bs; 157 ierr = MatGetRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 158 for (jj=0; jj<ncols; jj++) { 159 PetscInt dest_col = idx[jj]/bs; 160 PetscScalar sv = PetscAbs(PetscRealPart(vals[jj])); 161 ierr = MatSetValues(Gmat,1,&dest_row,1,&dest_col,&sv,ADD_VALUES);CHKERRQ(ierr); 162 } 163 ierr = MatRestoreRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 164 } 165 ierr = MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 166 ierr = MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 167 } else { 168 /* just copy scalar matrix - abs() not taken here but scaled later */ 169 ierr = MatDuplicate(Amat, MAT_COPY_VALUES, &Gmat);CHKERRQ(ierr); 170 } 171 ierr = MatPropagateSymmetryOptions(Amat, Gmat);CHKERRQ(ierr); 172 173 ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 174 175 *a_Gmat = Gmat; 176 PetscFunctionReturn(0); 177 } 178 179 /* -------------------------------------------------------------------------- */ 180 /*@C 181 PCGAMGFilterGraph - filter (remove zero and possibly small values from the) graph and make it symmetric if requested 182 183 Collective on Mat 184 185 Input Parameter: 186 + a_Gmat - the graph 187 . vfilter - threshold parameter [0,1) 188 - symm - make the result symmetric 189 190 Level: developer 191 192 Notes: 193 This is called before graph coarsers are called. 194 195 .seealso: PCGAMGSetThreshold() 196 @*/ 197 PetscErrorCode PCGAMGFilterGraph(Mat *a_Gmat,PetscReal vfilter,PetscBool symm) 198 { 199 PetscErrorCode ierr; 200 PetscInt Istart,Iend,Ii,jj,ncols,nnz0,nnz1, NN, MM, nloc; 201 PetscMPIInt rank; 202 Mat Gmat = *a_Gmat, tGmat; 203 MPI_Comm comm; 204 const PetscScalar *vals; 205 const PetscInt *idx; 206 PetscInt *d_nnz, *o_nnz; 207 Vec diag; 208 209 PetscFunctionBegin; 210 ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 211 212 /* TODO GPU: optimization proposal, each class provides fast implementation of this 213 procedure via MatAbs API */ 214 if (vfilter < 0.0 && !symm) { 215 /* Just use the provided matrix as the graph but make all values positive */ 216 MatInfo info; 217 PetscScalar *avals; 218 PetscBool isaij,ismpiaij; 219 ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat,MATSEQAIJ,&isaij);CHKERRQ(ierr); 220 ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 221 if (!isaij && !ismpiaij) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require (MPI)AIJ matrix type"); 222 if (isaij) { 223 ierr = MatGetInfo(Gmat,MAT_LOCAL,&info);CHKERRQ(ierr); 224 ierr = MatSeqAIJGetArray(Gmat,&avals);CHKERRQ(ierr); 225 for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]); 226 ierr = MatSeqAIJRestoreArray(Gmat,&avals);CHKERRQ(ierr); 227 } else { 228 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)Gmat->data; 229 ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 230 ierr = MatSeqAIJGetArray(aij->A,&avals);CHKERRQ(ierr); 231 for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]); 232 ierr = MatSeqAIJRestoreArray(aij->A,&avals);CHKERRQ(ierr); 233 ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 234 ierr = MatSeqAIJGetArray(aij->B,&avals);CHKERRQ(ierr); 235 for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]); 236 ierr = MatSeqAIJRestoreArray(aij->B,&avals);CHKERRQ(ierr); 237 } 238 ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 239 PetscFunctionReturn(0); 240 } 241 242 /* TODO GPU: this can be called when filter = 0 -> Probably provide MatAIJThresholdCompress that compresses the entries below a threshold? 243 Also, if the matrix is symmetric, can we skip this 244 operation? It can be very expensive on large matrices. */ 245 ierr = PetscObjectGetComm((PetscObject)Gmat,&comm);CHKERRQ(ierr); 246 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 247 ierr = MatGetOwnershipRange(Gmat, &Istart, &Iend);CHKERRQ(ierr); 248 nloc = Iend - Istart; 249 ierr = MatGetSize(Gmat, &MM, &NN);CHKERRQ(ierr); 250 251 if (symm) { 252 Mat matTrans; 253 ierr = MatTranspose(Gmat, MAT_INITIAL_MATRIX, &matTrans);CHKERRQ(ierr); 254 ierr = MatAXPY(Gmat, 1.0, matTrans, Gmat->structurally_symmetric ? SAME_NONZERO_PATTERN : DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr); 255 ierr = MatDestroy(&matTrans);CHKERRQ(ierr); 256 } 257 258 /* scale Gmat for all values between -1 and 1 */ 259 ierr = MatCreateVecs(Gmat, &diag, NULL);CHKERRQ(ierr); 260 ierr = MatGetDiagonal(Gmat, diag);CHKERRQ(ierr); 261 ierr = VecReciprocal(diag);CHKERRQ(ierr); 262 ierr = VecSqrtAbs(diag);CHKERRQ(ierr); 263 ierr = MatDiagonalScale(Gmat, diag, diag);CHKERRQ(ierr); 264 ierr = VecDestroy(&diag);CHKERRQ(ierr); 265 266 /* Determine upper bound on nonzeros needed in new filtered matrix */ 267 ierr = PetscMalloc2(nloc, &d_nnz,nloc, &o_nnz);CHKERRQ(ierr); 268 for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) { 269 ierr = MatGetRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr); 270 d_nnz[jj] = ncols; 271 o_nnz[jj] = ncols; 272 ierr = MatRestoreRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr); 273 if (d_nnz[jj] > nloc) d_nnz[jj] = nloc; 274 if (o_nnz[jj] > (MM-nloc)) o_nnz[jj] = MM - nloc; 275 } 276 ierr = MatCreate(comm, &tGmat);CHKERRQ(ierr); 277 ierr = MatSetSizes(tGmat,nloc,nloc,MM,MM);CHKERRQ(ierr); 278 ierr = MatSetBlockSizes(tGmat, 1, 1);CHKERRQ(ierr); 279 ierr = MatSetType(tGmat, MATAIJ);CHKERRQ(ierr); 280 ierr = MatSeqAIJSetPreallocation(tGmat,0,d_nnz);CHKERRQ(ierr); 281 ierr = MatMPIAIJSetPreallocation(tGmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 282 ierr = MatSetOption(tGmat,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);CHKERRQ(ierr); 283 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 284 285 for (Ii = Istart, nnz0 = nnz1 = 0; Ii < Iend; Ii++) { 286 ierr = MatGetRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 287 for (jj=0; jj<ncols; jj++,nnz0++) { 288 PetscScalar sv = PetscAbs(PetscRealPart(vals[jj])); 289 if (PetscRealPart(sv) > vfilter) { 290 nnz1++; 291 ierr = MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,INSERT_VALUES);CHKERRQ(ierr); 292 } 293 } 294 ierr = MatRestoreRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr); 295 } 296 ierr = MatAssemblyBegin(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 297 ierr = MatAssemblyEnd(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 298 if (symm) { 299 ierr = MatSetOption(tGmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr); 300 } else { 301 ierr = MatPropagateSymmetryOptions(Gmat,tGmat);CHKERRQ(ierr); 302 } 303 ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 304 305 #if defined(PETSC_USE_INFO) 306 { 307 double t1 = (!nnz0) ? 1. : 100.*(double)nnz1/(double)nnz0, t2 = (!nloc) ? 1. : (double)nnz0/(double)nloc; 308 ierr = PetscInfo4(*a_Gmat,"\t %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%D)\n",t1,vfilter,t2,MM);CHKERRQ(ierr); 309 } 310 #endif 311 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 312 *a_Gmat = tGmat; 313 PetscFunctionReturn(0); 314 } 315 316 /* -------------------------------------------------------------------------- */ 317 /* 318 PCGAMGGetDataWithGhosts - hacks into Mat MPIAIJ so this must have size > 1 319 320 Input Parameter: 321 . Gmat - MPIAIJ matrix for scattters 322 . data_sz - number of data terms per node (# cols in output) 323 . data_in[nloc*data_sz] - column oriented data 324 Output Parameter: 325 . a_stride - numbrt of rows of output 326 . a_data_out[stride*data_sz] - output data with ghosts 327 */ 328 PetscErrorCode PCGAMGGetDataWithGhosts(Mat Gmat,PetscInt data_sz,PetscReal data_in[],PetscInt *a_stride,PetscReal **a_data_out) 329 { 330 PetscErrorCode ierr; 331 Vec tmp_crds; 332 Mat_MPIAIJ *mpimat = (Mat_MPIAIJ*)Gmat->data; 333 PetscInt nnodes,num_ghosts,dir,kk,jj,my0,Iend,nloc; 334 PetscScalar *data_arr; 335 PetscReal *datas; 336 PetscBool isMPIAIJ; 337 338 PetscFunctionBegin; 339 ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPIAIJ);CHKERRQ(ierr); 340 ierr = MatGetOwnershipRange(Gmat, &my0, &Iend);CHKERRQ(ierr); 341 nloc = Iend - my0; 342 ierr = VecGetLocalSize(mpimat->lvec, &num_ghosts);CHKERRQ(ierr); 343 nnodes = num_ghosts + nloc; 344 *a_stride = nnodes; 345 ierr = MatCreateVecs(Gmat, &tmp_crds, NULL);CHKERRQ(ierr); 346 347 ierr = PetscMalloc1(data_sz*nnodes, &datas);CHKERRQ(ierr); 348 for (dir=0; dir<data_sz; dir++) { 349 /* set local, and global */ 350 for (kk=0; kk<nloc; kk++) { 351 PetscInt gid = my0 + kk; 352 PetscScalar crd = (PetscScalar)data_in[dir*nloc + kk]; /* col oriented */ 353 datas[dir*nnodes + kk] = PetscRealPart(crd); 354 355 ierr = VecSetValues(tmp_crds, 1, &gid, &crd, INSERT_VALUES);CHKERRQ(ierr); 356 } 357 ierr = VecAssemblyBegin(tmp_crds);CHKERRQ(ierr); 358 ierr = VecAssemblyEnd(tmp_crds);CHKERRQ(ierr); 359 /* get ghost datas */ 360 ierr = VecScatterBegin(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 361 ierr = VecScatterEnd(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 362 ierr = VecGetArray(mpimat->lvec, &data_arr);CHKERRQ(ierr); 363 for (kk=nloc,jj=0;jj<num_ghosts;kk++,jj++) datas[dir*nnodes + kk] = PetscRealPart(data_arr[jj]); 364 ierr = VecRestoreArray(mpimat->lvec, &data_arr);CHKERRQ(ierr); 365 } 366 ierr = VecDestroy(&tmp_crds);CHKERRQ(ierr); 367 *a_data_out = datas; 368 PetscFunctionReturn(0); 369 } 370 371 PetscErrorCode PCGAMGHashTableCreate(PetscInt a_size, PCGAMGHashTable *a_tab) 372 { 373 PetscErrorCode ierr; 374 PetscInt kk; 375 376 PetscFunctionBegin; 377 a_tab->size = a_size; 378 ierr = PetscMalloc2(a_size, &a_tab->table,a_size, &a_tab->data);CHKERRQ(ierr); 379 for (kk=0; kk<a_size; kk++) a_tab->table[kk] = -1; 380 PetscFunctionReturn(0); 381 } 382 383 PetscErrorCode PCGAMGHashTableDestroy(PCGAMGHashTable *a_tab) 384 { 385 PetscErrorCode ierr; 386 387 PetscFunctionBegin; 388 ierr = PetscFree2(a_tab->table,a_tab->data);CHKERRQ(ierr); 389 PetscFunctionReturn(0); 390 } 391 392 PetscErrorCode PCGAMGHashTableAdd(PCGAMGHashTable *a_tab, PetscInt a_key, PetscInt a_data) 393 { 394 PetscInt kk,idx; 395 396 PetscFunctionBegin; 397 if (a_key<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Negative key %D.",a_key); 398 for (kk = 0, idx = GAMG_HASH(a_key); kk < a_tab->size; kk++, idx = (idx==(a_tab->size-1)) ? 0 : idx + 1) { 399 if (a_tab->table[idx] == a_key) { 400 /* exists */ 401 a_tab->data[idx] = a_data; 402 break; 403 } else if (a_tab->table[idx] == -1) { 404 /* add */ 405 a_tab->table[idx] = a_key; 406 a_tab->data[idx] = a_data; 407 break; 408 } 409 } 410 if (kk==a_tab->size) { 411 /* this is not to efficient, waiting until completely full */ 412 PetscInt oldsize = a_tab->size, new_size = 2*a_tab->size + 5, *oldtable = a_tab->table, *olddata = a_tab->data; 413 PetscErrorCode ierr; 414 415 a_tab->size = new_size; 416 ierr = PetscMalloc2(a_tab->size, &a_tab->table,a_tab->size, &a_tab->data);CHKERRQ(ierr); 417 for (kk=0;kk<a_tab->size;kk++) a_tab->table[kk] = -1; 418 for (kk=0;kk<oldsize;kk++) { 419 if (oldtable[kk] != -1) { 420 ierr = PCGAMGHashTableAdd(a_tab, oldtable[kk], olddata[kk]);CHKERRQ(ierr); 421 } 422 } 423 ierr = PetscFree2(oldtable,olddata);CHKERRQ(ierr); 424 ierr = PCGAMGHashTableAdd(a_tab, a_key, a_data);CHKERRQ(ierr); 425 } 426 PetscFunctionReturn(0); 427 } 428