xref: /petsc/src/ksp/pc/impls/gamg/util.c (revision fa2bb9fe3cba50bac3b093e83d6f95e65b517df9)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 
7 /*
8    Produces a set of block column indices of the matrix row, one for each block represented in the original row
9 
10    n - the number of block indices in cc[]
11    cc - the block indices (must be large enough to contain the indices)
12 */
13 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRow(Mat Amat,PetscInt row,PetscInt bs,PetscInt *n,PetscInt *cc)
14 {
15   PetscInt       cnt = -1,nidx,j;
16   const PetscInt *idx;
17   PetscErrorCode ierr;
18 
19   PetscFunctionBegin;
20   ierr = MatGetRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr);
21   if (nidx) {
22     cnt = 0;
23     cc[cnt] = idx[0]/bs;
24     for (j=1; j<nidx; j++) {
25       if (cc[cnt] < idx[j]/bs) cc[++cnt] = idx[j]/bs;
26     }
27   }
28   ierr = MatRestoreRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr);
29   *n = cnt+1;
30   PetscFunctionReturn(0);
31 }
32 
33 /*
34     Produces a set of block column indices of the matrix block row, one for each block represented in the original set of rows
35 
36     ncollapsed - the number of block indices
37     collapsed - the block indices (must be large enough to contain the indices)
38 */
39 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRows(Mat Amat,PetscInt start,PetscInt bs,PetscInt *w0,PetscInt *w1,PetscInt *w2,PetscInt *ncollapsed,PetscInt **collapsed)
40 {
41   PetscInt       i,nprev,*cprev = w0,ncur = 0,*ccur = w1,*merged = w2,*cprevtmp;
42   PetscErrorCode ierr;
43 
44   PetscFunctionBegin;
45   ierr = MatCollapseRow(Amat,start,bs,&nprev,cprev);CHKERRQ(ierr);
46   for (i=start+1; i<start+bs; i++) {
47     ierr  = MatCollapseRow(Amat,i,bs,&ncur,ccur);CHKERRQ(ierr);
48     ierr  = PetscMergeIntArray(nprev,cprev,ncur,ccur,&nprev,&merged);CHKERRQ(ierr);
49     cprevtmp = cprev; cprev = merged; merged = cprevtmp;
50   }
51   *ncollapsed = nprev;
52   if (collapsed) *collapsed  = cprev;
53   PetscFunctionReturn(0);
54 }
55 
56 
57 /* -------------------------------------------------------------------------- */
58 /*
59    PCGAMGCreateGraph - create simple scaled scalar graph from matrix
60 
61  Input Parameter:
62  . Amat - matrix
63  Output Parameter:
64  . a_Gmaat - eoutput scalar graph (symmetric?)
65  */
66 PetscErrorCode PCGAMGCreateGraph(Mat Amat, Mat *a_Gmat)
67 {
68   PetscErrorCode ierr;
69   PetscInt       Istart,Iend,Ii,jj,kk,ncols,nloc,NN,MM,bs;
70   MPI_Comm       comm;
71   Mat            Gmat;
72   MatType        mtype;
73 
74   PetscFunctionBegin;
75   ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr);
76   ierr = MatGetOwnershipRange(Amat, &Istart, &Iend);CHKERRQ(ierr);
77   ierr = MatGetSize(Amat, &MM, &NN);CHKERRQ(ierr);
78   ierr = MatGetBlockSize(Amat, &bs);CHKERRQ(ierr);
79   nloc = (Iend-Istart)/bs;
80 
81 #if defined PETSC_GAMG_USE_LOG
82   ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
83 #endif
84 
85   if (bs > 1) {
86     const PetscScalar *vals;
87     const PetscInt    *idx;
88     PetscInt          *d_nnz, *o_nnz,*w0,*w1,*w2;
89     PetscBool         ismpiaij,isseqaij;
90 
91     /*
92        Determine the preallocation needed for the scalar matrix derived from the vector matrix.
93     */
94 
95     ierr = PetscObjectBaseTypeCompare((PetscObject)Amat,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
96     ierr = PetscObjectBaseTypeCompare((PetscObject)Amat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
97 
98     ierr = PetscMalloc2(nloc, &d_nnz,isseqaij ? 0 : nloc, &o_nnz);CHKERRQ(ierr);
99 
100     if (isseqaij) {
101       PetscInt       max_d_nnz;
102 
103       /*
104           Determine exact preallocation count for (sequential) scalar matrix
105       */
106       ierr = MatSeqAIJGetMaxRowNonzeros(Amat,&max_d_nnz);CHKERRQ(ierr);
107       max_d_nnz = PetscMin(nloc,bs*max_d_nnz);CHKERRQ(ierr);
108       ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr);
109       for (Ii = 0, jj = 0; Ii < Iend; Ii += bs, jj++) {
110         ierr = MatCollapseRows(Amat,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr);
111       }
112       ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr);
113 
114     } else if (ismpiaij) {
115       Mat            Daij,Oaij;
116       const PetscInt *garray;
117       PetscInt       max_d_nnz;
118 
119       ierr = MatMPIAIJGetSeqAIJ(Amat,&Daij,&Oaij,&garray);CHKERRQ(ierr);
120 
121       /*
122           Determine exact preallocation count for diagonal block portion of scalar matrix
123       */
124       ierr = MatSeqAIJGetMaxRowNonzeros(Daij,&max_d_nnz);CHKERRQ(ierr);
125       max_d_nnz = PetscMin(nloc,bs*max_d_nnz);CHKERRQ(ierr);
126       ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr);
127       for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) {
128         ierr = MatCollapseRows(Daij,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr);
129       }
130       ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr);
131 
132       /*
133          Over estimate (usually grossly over), preallocation count for off-diagonal portion of scalar matrix
134       */
135       for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) {
136         o_nnz[jj] = 0;
137         for (kk=0; kk<bs; kk++) { /* rows that get collapsed to a single row */
138           ierr = MatGetRow(Oaij,Ii+kk,&ncols,0,0);CHKERRQ(ierr);
139           o_nnz[jj] += ncols;
140           ierr = MatRestoreRow(Oaij,Ii+kk,&ncols,0,0);CHKERRQ(ierr);
141         }
142         if (o_nnz[jj] > (NN/bs-nloc)) o_nnz[jj] = NN/bs-nloc;
143       }
144 
145     } else {
146       SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require AIJ matrix type");
147     }
148 
149     /* get scalar copy (norms) of matrix */
150     ierr = MatGetType(Amat,&mtype);CHKERRQ(ierr);
151     ierr = MatCreate(comm, &Gmat);CHKERRQ(ierr);
152     ierr = MatSetSizes(Gmat,nloc,nloc,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
153     ierr = MatSetBlockSizes(Gmat, 1, 1);CHKERRQ(ierr);
154     ierr = MatSetType(Gmat, mtype);CHKERRQ(ierr);
155     ierr = MatSeqAIJSetPreallocation(Gmat,0,d_nnz);CHKERRQ(ierr);
156     ierr = MatMPIAIJSetPreallocation(Gmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
157     ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
158 
159     for (Ii = Istart; Ii < Iend; Ii++) {
160       PetscInt dest_row = Ii/bs;
161       ierr = MatGetRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr);
162       for (jj=0; jj<ncols; jj++) {
163         PetscInt    dest_col = idx[jj]/bs;
164         PetscScalar sv       = PetscAbs(PetscRealPart(vals[jj]));
165         ierr = MatSetValues(Gmat,1,&dest_row,1,&dest_col,&sv,ADD_VALUES);CHKERRQ(ierr);
166       }
167       ierr = MatRestoreRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr);
168     }
169     ierr = MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
170     ierr = MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
171   } else {
172     /* just copy scalar matrix - abs() not taken here but scaled later */
173     ierr = MatDuplicate(Amat, MAT_COPY_VALUES, &Gmat);CHKERRQ(ierr);
174   }
175 
176 #if defined PETSC_GAMG_USE_LOG
177   ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
178 #endif
179 
180   *a_Gmat = Gmat;
181   PetscFunctionReturn(0);
182 }
183 
184 /* -------------------------------------------------------------------------- */
185 /*@C
186    PCGAMGFilterGraph - filter (remove zero and possibly small values from the) graph and make it symmetric if requested
187 
188    Collective on Mat
189 
190    Input Parameter:
191 +   a_Gmat - the graph
192 .   vfilter - threshold paramter [0,1)
193 -   symm - make the result symmetric
194 
195    Level: developer
196 
197    Notes:
198     This is called before graph coarsers are called.
199 
200 .seealso: PCGAMGSetThreshold()
201 @*/
202 PetscErrorCode PCGAMGFilterGraph(Mat *a_Gmat,PetscReal vfilter,PetscBool symm)
203 {
204   PetscErrorCode    ierr;
205   PetscInt          Istart,Iend,Ii,jj,ncols,nnz0,nnz1, NN, MM, nloc;
206   PetscMPIInt       rank;
207   Mat               Gmat  = *a_Gmat, tGmat, matTrans;
208   MPI_Comm          comm;
209   const PetscScalar *vals;
210   const PetscInt    *idx;
211   PetscInt          *d_nnz, *o_nnz;
212   Vec               diag;
213   MatType           mtype;
214 
215   PetscFunctionBegin;
216 #if defined PETSC_GAMG_USE_LOG
217   ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
218 #endif
219   /* scale Gmat for all values between -1 and 1 */
220   ierr = MatCreateVecs(Gmat, &diag, 0);CHKERRQ(ierr);
221   ierr = MatGetDiagonal(Gmat, diag);CHKERRQ(ierr);
222   ierr = VecReciprocal(diag);CHKERRQ(ierr);
223   ierr = VecSqrtAbs(diag);CHKERRQ(ierr);
224   ierr = MatDiagonalScale(Gmat, diag, diag);CHKERRQ(ierr);
225   ierr = VecDestroy(&diag);CHKERRQ(ierr);
226 
227   if (vfilter < 0.0 && !symm) {
228     /* Just use the provided matrix as the graph but make all values positive */
229     MatInfo     info;
230     PetscScalar *avals;
231     PetscBool isaij,ismpiaij;
232     ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat,MATSEQAIJ,&isaij);CHKERRQ(ierr);
233     ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
234     if (!isaij && !ismpiaij) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require (MPI)AIJ matrix type");
235     if (isaij) {
236       ierr = MatGetInfo(Gmat,MAT_LOCAL,&info);CHKERRQ(ierr);
237       ierr = MatSeqAIJGetArray(Gmat,&avals);CHKERRQ(ierr);
238       for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
239       ierr = MatSeqAIJRestoreArray(Gmat,&avals);CHKERRQ(ierr);
240     } else {
241       Mat_MPIAIJ  *aij = (Mat_MPIAIJ*)Gmat->data;
242       ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
243       ierr = MatSeqAIJGetArray(aij->A,&avals);CHKERRQ(ierr);
244       for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
245       ierr = MatSeqAIJRestoreArray(aij->A,&avals);CHKERRQ(ierr);
246       ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
247       ierr = MatSeqAIJGetArray(aij->B,&avals);CHKERRQ(ierr);
248       for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
249       ierr = MatSeqAIJRestoreArray(aij->B,&avals);CHKERRQ(ierr);
250     }
251 #if defined PETSC_GAMG_USE_LOG
252     ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
253 #endif
254     PetscFunctionReturn(0);
255   }
256 
257   ierr = PetscObjectGetComm((PetscObject)Gmat,&comm);CHKERRQ(ierr);
258   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
259   ierr = MatGetOwnershipRange(Gmat, &Istart, &Iend);CHKERRQ(ierr);
260   nloc = Iend - Istart;
261   ierr = MatGetSize(Gmat, &MM, &NN);CHKERRQ(ierr);
262 
263   if (symm) {
264     ierr = MatTranspose(Gmat, MAT_INITIAL_MATRIX, &matTrans);CHKERRQ(ierr);
265   }
266 
267   /* Determine upper bound on nonzeros needed in new filtered matrix */
268   ierr = PetscMalloc2(nloc, &d_nnz,nloc, &o_nnz);CHKERRQ(ierr);
269   for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
270     ierr      = MatGetRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr);
271     d_nnz[jj] = ncols;
272     o_nnz[jj] = ncols;
273     ierr      = MatRestoreRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr);
274     if (symm) {
275       ierr       = MatGetRow(matTrans,Ii,&ncols,NULL,NULL);CHKERRQ(ierr);
276       d_nnz[jj] += ncols;
277       o_nnz[jj] += ncols;
278       ierr       = MatRestoreRow(matTrans,Ii,&ncols,NULL,NULL);CHKERRQ(ierr);
279     }
280     if (d_nnz[jj] > nloc) d_nnz[jj] = nloc;
281     if (o_nnz[jj] > (MM-nloc)) o_nnz[jj] = MM - nloc;
282   }
283   ierr = MatGetType(Gmat,&mtype);CHKERRQ(ierr);
284   ierr = MatCreate(comm, &tGmat);CHKERRQ(ierr);
285   ierr = MatSetSizes(tGmat,nloc,nloc,MM,MM);CHKERRQ(ierr);
286   ierr = MatSetBlockSizes(tGmat, 1, 1);CHKERRQ(ierr);
287   ierr = MatSetType(tGmat, mtype);CHKERRQ(ierr);
288   ierr = MatSeqAIJSetPreallocation(tGmat,0,d_nnz);CHKERRQ(ierr);
289   ierr = MatMPIAIJSetPreallocation(tGmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
290   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
291   if (symm) {
292     ierr = MatDestroy(&matTrans);CHKERRQ(ierr);
293   } else {
294     /* all entries are generated locally so MatAssembly will be slightly faster for large process counts */
295     ierr = MatSetOption(tGmat,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
296   }
297 
298   for (Ii = Istart, nnz0 = nnz1 = 0; Ii < Iend; Ii++) {
299     ierr = MatGetRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr);
300     for (jj=0; jj<ncols; jj++,nnz0++) {
301       PetscScalar sv = PetscAbs(PetscRealPart(vals[jj]));
302       if (PetscRealPart(sv) > vfilter) {
303         nnz1++;
304         if (symm) {
305           sv  *= 0.5;
306           ierr = MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,ADD_VALUES);CHKERRQ(ierr);
307           ierr = MatSetValues(tGmat,1,&idx[jj],1,&Ii,&sv,ADD_VALUES);CHKERRQ(ierr);
308         } else {
309           ierr = MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,ADD_VALUES);CHKERRQ(ierr);
310         }
311       }
312     }
313     ierr = MatRestoreRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr);
314   }
315   ierr = MatAssemblyBegin(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
316   ierr = MatAssemblyEnd(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
317 
318 #if defined PETSC_GAMG_USE_LOG
319   ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
320 #endif
321 
322 #if defined(PETSC_USE_INFO)
323   {
324     double t1 = (!nnz0) ? 1. : 100.*(double)nnz1/(double)nnz0, t2 = (!nloc) ? 1. : (double)nnz0/(double)nloc;
325     ierr = PetscInfo4(*a_Gmat,"\t %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%D)\n",t1,vfilter,t2,MM);CHKERRQ(ierr);
326   }
327 #endif
328   ierr    = MatDestroy(&Gmat);CHKERRQ(ierr);
329   *a_Gmat = tGmat;
330   PetscFunctionReturn(0);
331 }
332 
333 /* -------------------------------------------------------------------------- */
334 /*
335    PCGAMGGetDataWithGhosts - hacks into Mat MPIAIJ so this must have npe > 1
336 
337    Input Parameter:
338    . Gmat - MPIAIJ matrix for scattters
339    . data_sz - number of data terms per node (# cols in output)
340    . data_in[nloc*data_sz] - column oriented data
341    Output Parameter:
342    . a_stride - numbrt of rows of output
343    . a_data_out[stride*data_sz] - output data with ghosts
344 */
345 PetscErrorCode PCGAMGGetDataWithGhosts(Mat Gmat,PetscInt data_sz,PetscReal data_in[],PetscInt *a_stride,PetscReal **a_data_out)
346 {
347   PetscErrorCode ierr;
348   MPI_Comm       comm;
349   Vec            tmp_crds;
350   Mat_MPIAIJ     *mpimat = (Mat_MPIAIJ*)Gmat->data;
351   PetscInt       nnodes,num_ghosts,dir,kk,jj,my0,Iend,nloc;
352   PetscScalar    *data_arr;
353   PetscReal      *datas;
354   PetscBool      isMPIAIJ;
355 
356   PetscFunctionBegin;
357   ierr = PetscObjectGetComm((PetscObject)Gmat,&comm);CHKERRQ(ierr);
358   ierr      = PetscObjectBaseTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPIAIJ);CHKERRQ(ierr);
359   ierr      = MatGetOwnershipRange(Gmat, &my0, &Iend);CHKERRQ(ierr);
360   nloc      = Iend - my0;
361   ierr      = VecGetLocalSize(mpimat->lvec, &num_ghosts);CHKERRQ(ierr);
362   nnodes    = num_ghosts + nloc;
363   *a_stride = nnodes;
364   ierr      = MatCreateVecs(Gmat, &tmp_crds, 0);CHKERRQ(ierr);
365 
366   ierr = PetscMalloc1(data_sz*nnodes, &datas);CHKERRQ(ierr);
367   for (dir=0; dir<data_sz; dir++) {
368     /* set local, and global */
369     for (kk=0; kk<nloc; kk++) {
370       PetscInt    gid = my0 + kk;
371       PetscScalar crd = (PetscScalar)data_in[dir*nloc + kk]; /* col oriented */
372       datas[dir*nnodes + kk] = PetscRealPart(crd);
373 
374       ierr = VecSetValues(tmp_crds, 1, &gid, &crd, INSERT_VALUES);CHKERRQ(ierr);
375     }
376     ierr = VecAssemblyBegin(tmp_crds);CHKERRQ(ierr);
377     ierr = VecAssemblyEnd(tmp_crds);CHKERRQ(ierr);
378     /* get ghost datas */
379     ierr = VecScatterBegin(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
380     ierr = VecScatterEnd(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
381     ierr = VecGetArray(mpimat->lvec, &data_arr);CHKERRQ(ierr);
382     for (kk=nloc,jj=0;jj<num_ghosts;kk++,jj++) datas[dir*nnodes + kk] = PetscRealPart(data_arr[jj]);
383     ierr = VecRestoreArray(mpimat->lvec, &data_arr);CHKERRQ(ierr);
384   }
385   ierr        = VecDestroy(&tmp_crds);CHKERRQ(ierr);
386   *a_data_out = datas;
387   PetscFunctionReturn(0);
388 }
389 
390 
391 /*
392  *
393  *  PCGAMGHashTableCreate
394  */
395 
396 PetscErrorCode PCGAMGHashTableCreate(PetscInt a_size, PCGAMGHashTable *a_tab)
397 {
398   PetscErrorCode ierr;
399   PetscInt       kk;
400 
401   PetscFunctionBegin;
402   a_tab->size = a_size;
403   ierr = PetscMalloc1(a_size, &a_tab->table);CHKERRQ(ierr);
404   ierr = PetscMalloc1(a_size, &a_tab->data);CHKERRQ(ierr);
405   for (kk=0; kk<a_size; kk++) a_tab->table[kk] = -1;
406   PetscFunctionReturn(0);
407 }
408 
409 PetscErrorCode PCGAMGHashTableDestroy(PCGAMGHashTable *a_tab)
410 {
411   PetscErrorCode ierr;
412 
413   PetscFunctionBegin;
414   ierr = PetscFree(a_tab->table);CHKERRQ(ierr);
415   ierr = PetscFree(a_tab->data);CHKERRQ(ierr);
416   PetscFunctionReturn(0);
417 }
418 
419 PetscErrorCode PCGAMGHashTableAdd(PCGAMGHashTable *a_tab, PetscInt a_key, PetscInt a_data)
420 {
421   PetscInt kk,idx;
422 
423   PetscFunctionBegin;
424   if (a_key<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Negative key %d.",a_key);
425   for (kk = 0, idx = GAMG_HASH(a_key);
426        kk < a_tab->size;
427        kk++, idx = (idx==(a_tab->size-1)) ? 0 : idx + 1) {
428 
429     if (a_tab->table[idx] == a_key) {
430       /* exists */
431       a_tab->data[idx] = a_data;
432       break;
433     } else if (a_tab->table[idx] == -1) {
434       /* add */
435       a_tab->table[idx] = a_key;
436       a_tab->data[idx]  = a_data;
437       break;
438     }
439   }
440   if (kk==a_tab->size) {
441     /* this is not to efficient, waiting until completely full */
442     PetscInt       oldsize = a_tab->size, new_size = 2*a_tab->size + 5, *oldtable = a_tab->table, *olddata = a_tab->data;
443     PetscErrorCode ierr;
444 
445     a_tab->size = new_size;
446 
447     ierr = PetscMalloc1(a_tab->size, &a_tab->table);CHKERRQ(ierr);
448     ierr = PetscMalloc1(a_tab->size, &a_tab->data);CHKERRQ(ierr);
449 
450     for (kk=0;kk<a_tab->size;kk++) a_tab->table[kk] = -1;
451     for (kk=0;kk<oldsize;kk++) {
452       if (oldtable[kk] != -1) {
453         ierr = PCGAMGHashTableAdd(a_tab, oldtable[kk], olddata[kk]);CHKERRQ(ierr);
454        }
455     }
456     ierr = PetscFree(oldtable);CHKERRQ(ierr);
457     ierr = PetscFree(olddata);CHKERRQ(ierr);
458     ierr = PCGAMGHashTableAdd(a_tab, a_key, a_data);CHKERRQ(ierr);
459   }
460   PetscFunctionReturn(0);
461 }
462 
463