xref: /petsc/src/vec/is/utils/isltog.c (revision e6e75211d226c622f451867f53ce5d558649ff4f)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 
10 #undef __FUNCT__
11 #define __FUNCT__ "ISG2LMapApply"
12 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
13 {
14   PetscErrorCode ierr;
15   PetscInt       i,start,end;
16 
17   PetscFunctionBegin;
18   if (!mapping->globals) {
19     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
20   }
21   start = mapping->globalstart;
22   end = mapping->globalend;
23   for (i=0; i<n; i++) {
24     if (in[i] < 0)          out[i] = in[i];
25     else if (in[i] < start) out[i] = -1;
26     else if (in[i] > end)   out[i] = -1;
27     else                    out[i] = mapping->globals[in[i] - start];
28   }
29   PetscFunctionReturn(0);
30 }
31 
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
35 /*@
36     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
37 
38     Not Collective
39 
40     Input Parameter:
41 .   ltog - local to global mapping
42 
43     Output Parameter:
44 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
45 
46     Level: advanced
47 
48     Concepts: mapping^local to global
49 
50 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
51 @*/
52 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
53 {
54   PetscFunctionBegin;
55   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
56   PetscValidIntPointer(n,2);
57   *n = mapping->bs*mapping->n;
58   PetscFunctionReturn(0);
59 }
60 
61 #undef __FUNCT__
62 #define __FUNCT__ "ISLocalToGlobalMappingView"
63 /*@C
64     ISLocalToGlobalMappingView - View a local to global mapping
65 
66     Not Collective
67 
68     Input Parameters:
69 +   ltog - local to global mapping
70 -   viewer - viewer
71 
72     Level: advanced
73 
74     Concepts: mapping^local to global
75 
76 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
77 @*/
78 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
79 {
80   PetscInt       i;
81   PetscMPIInt    rank;
82   PetscBool      iascii;
83   PetscErrorCode ierr;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
87   if (!viewer) {
88     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
89   }
90   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
91 
92   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
93   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
94   if (iascii) {
95     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
96     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
97     for (i=0; i<mapping->n; i++) {
98       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
99     }
100     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
101     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
102   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
103   PetscFunctionReturn(0);
104 }
105 
106 #undef __FUNCT__
107 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
108 /*@
109     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
110     ordering and a global parallel ordering.
111 
112     Not collective
113 
114     Input Parameter:
115 .   is - index set containing the global numbers for each local number
116 
117     Output Parameter:
118 .   mapping - new mapping data structure
119 
120     Notes: the block size of the IS determines the block size of the mapping
121     Level: advanced
122 
123     Concepts: mapping^local to global
124 
125 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
126 @*/
127 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
128 {
129   PetscErrorCode ierr;
130   PetscInt       n,bs;
131   const PetscInt *indices;
132   MPI_Comm       comm;
133   PetscBool      isblock;
134 
135   PetscFunctionBegin;
136   PetscValidHeaderSpecific(is,IS_CLASSID,1);
137   PetscValidPointer(mapping,2);
138 
139   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
140   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
141   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
142   if (!isblock) {
143     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
144     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
145     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
146   } else {
147     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
148     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
149     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
150     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
157 /*@C
158     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
159     ordering and a global parallel ordering.
160 
161     Collective
162 
163     Input Parameter:
164 +   sf - star forest mapping contiguous local indices to (rank, offset)
165 -   start - first global index on this process
166 
167     Output Parameter:
168 .   mapping - new mapping data structure
169 
170     Level: advanced
171 
172     Concepts: mapping^local to global
173 
174 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
175 @*/
176 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
177 {
178   PetscErrorCode ierr;
179   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
180   const PetscInt *ilocal;
181   MPI_Comm       comm;
182 
183   PetscFunctionBegin;
184   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
185   PetscValidPointer(mapping,3);
186 
187   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
188   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
189   if (ilocal) {
190     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
191   }
192   else maxlocal = nleaves;
193   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
194   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
195   for (i=0; i<nroots; i++) globals[i] = start + i;
196   for (i=0; i<maxlocal; i++) ltog[i] = -1;
197   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
199   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
200   ierr = PetscFree(globals);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
206 /*@
207     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
208     ordering and a global parallel ordering.
209 
210     Not Collective
211 
212     Input Parameters:
213 .   mapping - mapping data structure
214 
215     Output Parameter:
216 .   bs - the blocksize
217 
218     Level: advanced
219 
220     Concepts: mapping^local to global
221 
222 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
223 @*/
224 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
225 {
226   PetscFunctionBegin;
227   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
228   *bs = mapping->bs;
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
234 /*@
235     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
236     ordering and a global parallel ordering.
237 
238     Not Collective, but communicator may have more than one process
239 
240     Input Parameters:
241 +   comm - MPI communicator
242 .   bs - the block size
243 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
244 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
245 -   mode - see PetscCopyMode
246 
247     Output Parameter:
248 .   mapping - new mapping data structure
249 
250     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
251     Level: advanced
252 
253     Concepts: mapping^local to global
254 
255 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
256 @*/
257 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
258 {
259   PetscErrorCode ierr;
260   PetscInt       *in;
261 
262   PetscFunctionBegin;
263   if (n) PetscValidIntPointer(indices,3);
264   PetscValidPointer(mapping,4);
265 
266   *mapping = NULL;
267   ierr = ISInitializePackage();CHKERRQ(ierr);
268 
269   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
270                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
271   (*mapping)->n             = n;
272   (*mapping)->bs            = bs;
273   (*mapping)->info_cached   = PETSC_FALSE;
274   (*mapping)->info_free     = PETSC_FALSE;
275   (*mapping)->info_procs    = NULL;
276   (*mapping)->info_numprocs = NULL;
277   (*mapping)->info_indices  = NULL;
278   /*
279     Do not create the global to local mapping. This is only created if
280     ISGlobalToLocalMapping() is called
281   */
282   (*mapping)->globals = 0;
283   if (mode == PETSC_COPY_VALUES) {
284     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
285     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
286     (*mapping)->indices = in;
287     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
288   } else if (mode == PETSC_OWN_POINTER) {
289     (*mapping)->indices = (PetscInt*)indices;
290     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
291   }
292   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
293   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
294   PetscFunctionReturn(0);
295 }
296 
297 #undef __FUNCT__
298 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
299 /*@
300    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
301    ordering and a global parallel ordering.
302 
303    Note Collective
304 
305    Input Parameters:
306 .  mapping - mapping data structure
307 
308    Level: advanced
309 
310 .seealso: ISLocalToGlobalMappingCreate()
311 @*/
312 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
313 {
314   PetscErrorCode ierr;
315 
316   PetscFunctionBegin;
317   if (!*mapping) PetscFunctionReturn(0);
318   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
319   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
320   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
321   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
322   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
323   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
324   if ((*mapping)->info_indices) {
325     PetscInt i;
326 
327     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
328     for (i=1; i<(*mapping)->info_nproc; i++) {
329       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
330     }
331     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
332   }
333   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
334   *mapping = 0;
335   PetscFunctionReturn(0);
336 }
337 
338 #undef __FUNCT__
339 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
340 /*@
341     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
342     a new index set using the global numbering defined in an ISLocalToGlobalMapping
343     context.
344 
345     Not collective
346 
347     Input Parameters:
348 +   mapping - mapping between local and global numbering
349 -   is - index set in local numbering
350 
351     Output Parameters:
352 .   newis - index set in global numbering
353 
354     Level: advanced
355 
356     Concepts: mapping^local to global
357 
358 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
359           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
360 @*/
361 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
362 {
363   PetscErrorCode ierr;
364   PetscInt       n,*idxout;
365   const PetscInt *idxin;
366 
367   PetscFunctionBegin;
368   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
369   PetscValidHeaderSpecific(is,IS_CLASSID,2);
370   PetscValidPointer(newis,3);
371 
372   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
373   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
374   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
375   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
376   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
377   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
378   PetscFunctionReturn(0);
379 }
380 
381 #undef __FUNCT__
382 #define __FUNCT__ "ISLocalToGlobalMappingApply"
383 /*@
384    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
385    and converts them to the global numbering.
386 
387    Not collective
388 
389    Input Parameters:
390 +  mapping - the local to global mapping context
391 .  N - number of integers
392 -  in - input indices in local numbering
393 
394    Output Parameter:
395 .  out - indices in global numbering
396 
397    Notes:
398    The in and out array parameters may be identical.
399 
400    Level: advanced
401 
402 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
403           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
404           AOPetscToApplication(), ISGlobalToLocalMappingApply()
405 
406     Concepts: mapping^local to global
407 @*/
408 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
409 {
410   PetscInt i,bs,Nmax;
411 
412   PetscFunctionBegin;
413   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
414   bs   = mapping->bs;
415   Nmax = bs*mapping->n;
416   if (bs == 1) {
417     const PetscInt *idx = mapping->indices;
418     for (i=0; i<N; i++) {
419       if (in[i] < 0) {
420         out[i] = in[i];
421         continue;
422       }
423       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
424       out[i] = idx[in[i]];
425     }
426   } else {
427     const PetscInt *idx = mapping->indices;
428     for (i=0; i<N; i++) {
429       if (in[i] < 0) {
430         out[i] = in[i];
431         continue;
432       }
433       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
434       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
435     }
436   }
437   PetscFunctionReturn(0);
438 }
439 
440 #undef __FUNCT__
441 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
442 /*@
443    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
444 
445    Not collective
446 
447    Input Parameters:
448 +  mapping - the local to global mapping context
449 .  N - number of integers
450 -  in - input indices in local block numbering
451 
452    Output Parameter:
453 .  out - indices in global block numbering
454 
455    Notes:
456    The in and out array parameters may be identical.
457 
458    Example:
459      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
460      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
461 
462    Level: advanced
463 
464 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
465           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
466           AOPetscToApplication(), ISGlobalToLocalMappingApply()
467 
468     Concepts: mapping^local to global
469 @*/
470 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
471 {
472 
473   PetscFunctionBegin;
474   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
475   {
476     PetscInt i,Nmax = mapping->n;
477     const PetscInt *idx = mapping->indices;
478 
479     for (i=0; i<N; i++) {
480       if (in[i] < 0) {
481         out[i] = in[i];
482         continue;
483       }
484       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
485       out[i] = idx[in[i]];
486     }
487   }
488   PetscFunctionReturn(0);
489 }
490 
491 /* -----------------------------------------------------------------------------------------*/
492 
493 #undef __FUNCT__
494 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
495 /*
496     Creates the global fields in the ISLocalToGlobalMapping structure
497 */
498 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
499 {
500   PetscErrorCode ierr;
501   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
502 
503   PetscFunctionBegin;
504   end   = 0;
505   start = PETSC_MAX_INT;
506 
507   for (i=0; i<n; i++) {
508     if (idx[i] < 0) continue;
509     if (idx[i] < start) start = idx[i];
510     if (idx[i] > end)   end   = idx[i];
511   }
512   if (start > end) {start = 0; end = -1;}
513   mapping->globalstart = start;
514   mapping->globalend   = end;
515 
516   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
517   mapping->globals = globals;
518   for (i=0; i<end-start+1; i++) globals[i] = -1;
519   for (i=0; i<n; i++) {
520     if (idx[i] < 0) continue;
521     globals[idx[i] - start] = i;
522   }
523 
524   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
525   PetscFunctionReturn(0);
526 }
527 
528 #undef __FUNCT__
529 #define __FUNCT__ "ISGlobalToLocalMappingApply"
530 /*@
531     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
532     specified with a global numbering.
533 
534     Not collective
535 
536     Input Parameters:
537 +   mapping - mapping between local and global numbering
538 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
539            IS_GTOLM_DROP - drops the indices with no local value from the output list
540 .   n - number of global indices to map
541 -   idx - global indices to map
542 
543     Output Parameters:
544 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
545 -   idxout - local index of each global index, one must pass in an array long enough
546              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
547              idxout == NULL to determine the required length (returned in nout)
548              and then allocate the required space and call ISGlobalToLocalMappingApply()
549              a second time to set the values.
550 
551     Notes:
552     Either nout or idxout may be NULL. idx and idxout may be identical.
553 
554     This is not scalable in memory usage. Each processor requires O(Nglobal) size
555     array to compute these.
556 
557     Level: advanced
558 
559     Developer Note: The manual page states that idx and idxout may be identical but the calling
560        sequence declares idx as const so it cannot be the same as idxout.
561 
562     Concepts: mapping^global to local
563 
564 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
565           ISLocalToGlobalMappingDestroy()
566 @*/
567 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
568                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
569 {
570   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
571   PetscErrorCode ierr;
572 
573   PetscFunctionBegin;
574   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
575   if (!mapping->globals) {
576     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
577   }
578   globals = mapping->globals;
579   start   = mapping->globalstart;
580   end     = mapping->globalend;
581   bs      = mapping->bs;
582 
583   if (type == IS_GTOLM_MASK) {
584     if (idxout) {
585       for (i=0; i<n; i++) {
586         if (idx[i] < 0)                   idxout[i] = idx[i];
587         else if (idx[i] < bs*start)       idxout[i] = -1;
588         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
589         else                              idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
590       }
591     }
592     if (nout) *nout = n;
593   } else {
594     if (idxout) {
595       for (i=0; i<n; i++) {
596         if (idx[i] < 0) continue;
597         if (idx[i] < bs*start) continue;
598         if (idx[i] > bs*(end+1)-1) continue;
599         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
600         if (tmp < 0) continue;
601         idxout[nf++] = tmp;
602       }
603     } else {
604       for (i=0; i<n; i++) {
605         if (idx[i] < 0) continue;
606         if (idx[i] < bs*start) continue;
607         if (idx[i] > bs*(end+1)-1) continue;
608         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
609         if (tmp < 0) continue;
610         nf++;
611       }
612     }
613     if (nout) *nout = nf;
614   }
615   PetscFunctionReturn(0);
616 }
617 
618 #undef __FUNCT__
619 #define __FUNCT__ "ISGlobalToLocalMappingApplyIS"
620 /*@
621     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
622     a new index set using the local numbering defined in an ISLocalToGlobalMapping
623     context.
624 
625     Not collective
626 
627     Input Parameters:
628 +   mapping - mapping between local and global numbering
629 -   is - index set in global numbering
630 
631     Output Parameters:
632 .   newis - index set in local numbering
633 
634     Level: advanced
635 
636     Concepts: mapping^local to global
637 
638 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
639           ISLocalToGlobalMappingDestroy()
640 @*/
641 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
642 {
643   PetscErrorCode ierr;
644   PetscInt       n,nout,*idxout;
645   const PetscInt *idxin;
646 
647   PetscFunctionBegin;
648   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
649   PetscValidHeaderSpecific(is,IS_CLASSID,3);
650   PetscValidPointer(newis,4);
651 
652   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
653   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
654   if (type == IS_GTOLM_MASK) {
655     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
656   } else {
657     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
658     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
659   }
660   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
661   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
662   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
663   PetscFunctionReturn(0);
664 }
665 
666 #undef __FUNCT__
667 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
668 /*@
669     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
670     specified with a block global numbering.
671 
672     Not collective
673 
674     Input Parameters:
675 +   mapping - mapping between local and global numbering
676 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
677            IS_GTOLM_DROP - drops the indices with no local value from the output list
678 .   n - number of global indices to map
679 -   idx - global indices to map
680 
681     Output Parameters:
682 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
683 -   idxout - local index of each global index, one must pass in an array long enough
684              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
685              idxout == NULL to determine the required length (returned in nout)
686              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
687              a second time to set the values.
688 
689     Notes:
690     Either nout or idxout may be NULL. idx and idxout may be identical.
691 
692     This is not scalable in memory usage. Each processor requires O(Nglobal) size
693     array to compute these.
694 
695     Level: advanced
696 
697     Developer Note: The manual page states that idx and idxout may be identical but the calling
698        sequence declares idx as const so it cannot be the same as idxout.
699 
700     Concepts: mapping^global to local
701 
702 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
703           ISLocalToGlobalMappingDestroy()
704 @*/
705 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
706                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
707 {
708   PetscInt       i,*globals,nf = 0,tmp,start,end;
709   PetscErrorCode ierr;
710 
711   PetscFunctionBegin;
712   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
713   if (!mapping->globals) {
714     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
715   }
716   globals = mapping->globals;
717   start   = mapping->globalstart;
718   end     = mapping->globalend;
719 
720   if (type == IS_GTOLM_MASK) {
721     if (idxout) {
722       for (i=0; i<n; i++) {
723         if (idx[i] < 0) idxout[i] = idx[i];
724         else if (idx[i] < start) idxout[i] = -1;
725         else if (idx[i] > end)   idxout[i] = -1;
726         else                     idxout[i] = globals[idx[i] - start];
727       }
728     }
729     if (nout) *nout = n;
730   } else {
731     if (idxout) {
732       for (i=0; i<n; i++) {
733         if (idx[i] < 0) continue;
734         if (idx[i] < start) continue;
735         if (idx[i] > end) continue;
736         tmp = globals[idx[i] - start];
737         if (tmp < 0) continue;
738         idxout[nf++] = tmp;
739       }
740     } else {
741       for (i=0; i<n; i++) {
742         if (idx[i] < 0) continue;
743         if (idx[i] < start) continue;
744         if (idx[i] > end) continue;
745         tmp = globals[idx[i] - start];
746         if (tmp < 0) continue;
747         nf++;
748       }
749     }
750     if (nout) *nout = nf;
751   }
752   PetscFunctionReturn(0);
753 }
754 
755 #undef __FUNCT__
756 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
757 /*@C
758     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
759      each index shared by more than one processor
760 
761     Collective on ISLocalToGlobalMapping
762 
763     Input Parameters:
764 .   mapping - the mapping from local to global indexing
765 
766     Output Parameter:
767 +   nproc - number of processors that are connected to this one
768 .   proc - neighboring processors
769 .   numproc - number of indices for each subdomain (processor)
770 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
771 
772     Level: advanced
773 
774     Concepts: mapping^local to global
775 
776     Fortran Usage:
777 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
778 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
779           PetscInt indices[nproc][numprocmax],ierr)
780         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
781         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
782 
783 
784 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
785           ISLocalToGlobalMappingRestoreInfo()
786 @*/
787 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
788 {
789   PetscErrorCode ierr;
790 
791   PetscFunctionBegin;
792   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
793   if (mapping->info_cached) {
794     *nproc = mapping->info_nproc;
795     *procs = mapping->info_procs;
796     *numprocs = mapping->info_numprocs;
797     *indices = mapping->info_indices;
798   } else {
799     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
800   }
801   PetscFunctionReturn(0);
802 }
803 
804 #undef __FUNCT__
805 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo_Private"
806 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
807 {
808   PetscErrorCode ierr;
809   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
810   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
811   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
812   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
813   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
814   PetscInt       first_procs,first_numprocs,*first_indices;
815   MPI_Request    *recv_waits,*send_waits;
816   MPI_Status     recv_status,*send_status,*recv_statuses;
817   MPI_Comm       comm;
818   PetscBool      debug = PETSC_FALSE;
819 
820   PetscFunctionBegin;
821   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
822   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
823   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
824   if (size == 1) {
825     *nproc         = 0;
826     *procs         = NULL;
827     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
828     (*numprocs)[0] = 0;
829     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
830     (*indices)[0]  = NULL;
831     /* save info for reuse */
832     mapping->info_nproc = *nproc;
833     mapping->info_procs = *procs;
834     mapping->info_numprocs = *numprocs;
835     mapping->info_indices = *indices;
836     mapping->info_cached = PETSC_TRUE;
837     PetscFunctionReturn(0);
838   }
839 
840   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
841 
842   /*
843     Notes on ISLocalToGlobalMappingGetBlockInfo
844 
845     globally owned node - the nodes that have been assigned to this processor in global
846            numbering, just for this routine.
847 
848     nontrivial globally owned node - node assigned to this processor that is on a subdomain
849            boundary (i.e. is has more than one local owner)
850 
851     locally owned node - node that exists on this processors subdomain
852 
853     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
854            local subdomain
855   */
856   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
857   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
858   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
859 
860   for (i=0; i<n; i++) {
861     if (lindices[i] > max) max = lindices[i];
862   }
863   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
864   Ng++;
865   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
866   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
867   scale  = Ng/size + 1;
868   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
869   rstart = scale*rank;
870 
871   /* determine ownership ranges of global indices */
872   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
873   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
874 
875   /* determine owners of each local node  */
876   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
877   for (i=0; i<n; i++) {
878     proc             = lindices[i]/scale; /* processor that globally owns this index */
879     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
880     owner[i]         = proc;
881     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
882   }
883   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
884   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
885 
886   /* inform other processors of number of messages and max length*/
887   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
888   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
889 
890   /* post receives for owned rows */
891   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
892   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
893   for (i=0; i<nrecvs; i++) {
894     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
895   }
896 
897   /* pack messages containing lists of local nodes to owners */
898   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
899   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
900   starts[0] = 0;
901   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
902   for (i=0; i<n; i++) {
903     sends[starts[owner[i]]++] = lindices[i];
904     sends[starts[owner[i]]++] = i;
905   }
906   ierr = PetscFree(owner);CHKERRQ(ierr);
907   starts[0] = 0;
908   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
909 
910   /* send the messages */
911   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
912   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
913   cnt = 0;
914   for (i=0; i<size; i++) {
915     if (nprocs[2*i]) {
916       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
917       dest[cnt] = i;
918       cnt++;
919     }
920   }
921   ierr = PetscFree(starts);CHKERRQ(ierr);
922 
923   /* wait on receives */
924   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
925   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
926   cnt  = nrecvs;
927   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
928   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
929   while (cnt) {
930     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
931     /* unpack receives into our local space */
932     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
933     source[imdex] = recv_status.MPI_SOURCE;
934     len[imdex]    = len[imdex]/2;
935     /* count how many local owners for each of my global owned indices */
936     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
937     cnt--;
938   }
939   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
940 
941   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
942   nowned  = 0;
943   nownedm = 0;
944   for (i=0; i<ng; i++) {
945     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
946   }
947 
948   /* create single array to contain rank of all local owners of each globally owned index */
949   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
950   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
951   starts[0] = 0;
952   for (i=1; i<ng; i++) {
953     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
954     else starts[i] = starts[i-1];
955   }
956 
957   /* for each nontrival globally owned node list all arriving processors */
958   for (i=0; i<nrecvs; i++) {
959     for (j=0; j<len[i]; j++) {
960       node = recvs[2*i*nmax+2*j]-rstart;
961       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
962     }
963   }
964 
965   if (debug) { /* -----------------------------------  */
966     starts[0] = 0;
967     for (i=1; i<ng; i++) {
968       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
969       else starts[i] = starts[i-1];
970     }
971     for (i=0; i<ng; i++) {
972       if (nownedsenders[i] > 1) {
973         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
974         for (j=0; j<nownedsenders[i]; j++) {
975           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
976         }
977         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
978       }
979     }
980     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
981   } /* -----------------------------------  */
982 
983   /* wait on original sends */
984   if (nsends) {
985     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
986     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
987     ierr = PetscFree(send_status);CHKERRQ(ierr);
988   }
989   ierr = PetscFree(send_waits);CHKERRQ(ierr);
990   ierr = PetscFree(sends);CHKERRQ(ierr);
991   ierr = PetscFree(nprocs);CHKERRQ(ierr);
992 
993   /* pack messages to send back to local owners */
994   starts[0] = 0;
995   for (i=1; i<ng; i++) {
996     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
997     else starts[i] = starts[i-1];
998   }
999   nsends2 = nrecvs;
1000   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1001   for (i=0; i<nrecvs; i++) {
1002     nprocs[i] = 1;
1003     for (j=0; j<len[i]; j++) {
1004       node = recvs[2*i*nmax+2*j]-rstart;
1005       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1006     }
1007   }
1008   nt = 0;
1009   for (i=0; i<nsends2; i++) nt += nprocs[i];
1010 
1011   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1012   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1013 
1014   starts2[0] = 0;
1015   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1016   /*
1017      Each message is 1 + nprocs[i] long, and consists of
1018        (0) the number of nodes being sent back
1019        (1) the local node number,
1020        (2) the number of processors sharing it,
1021        (3) the processors sharing it
1022   */
1023   for (i=0; i<nsends2; i++) {
1024     cnt = 1;
1025     sends2[starts2[i]] = 0;
1026     for (j=0; j<len[i]; j++) {
1027       node = recvs[2*i*nmax+2*j]-rstart;
1028       if (nownedsenders[node] > 1) {
1029         sends2[starts2[i]]++;
1030         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1031         sends2[starts2[i]+cnt++] = nownedsenders[node];
1032         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1033         cnt += nownedsenders[node];
1034       }
1035     }
1036   }
1037 
1038   /* receive the message lengths */
1039   nrecvs2 = nsends;
1040   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1041   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1042   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1043   for (i=0; i<nrecvs2; i++) {
1044     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1045   }
1046 
1047   /* send the message lengths */
1048   for (i=0; i<nsends2; i++) {
1049     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1050   }
1051 
1052   /* wait on receives of lens */
1053   if (nrecvs2) {
1054     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1055     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1056     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1057   }
1058   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1059 
1060   starts3[0] = 0;
1061   nt         = 0;
1062   for (i=0; i<nrecvs2-1; i++) {
1063     starts3[i+1] = starts3[i] + lens2[i];
1064     nt          += lens2[i];
1065   }
1066   if (nrecvs2) nt += lens2[nrecvs2-1];
1067 
1068   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1069   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1070   for (i=0; i<nrecvs2; i++) {
1071     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1072   }
1073 
1074   /* send the messages */
1075   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1076   for (i=0; i<nsends2; i++) {
1077     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1078   }
1079 
1080   /* wait on receives */
1081   if (nrecvs2) {
1082     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1083     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1084     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1085   }
1086   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1087   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1088 
1089   if (debug) { /* -----------------------------------  */
1090     cnt = 0;
1091     for (i=0; i<nrecvs2; i++) {
1092       nt = recvs2[cnt++];
1093       for (j=0; j<nt; j++) {
1094         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1095         for (k=0; k<recvs2[cnt+1]; k++) {
1096           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1097         }
1098         cnt += 2 + recvs2[cnt+1];
1099         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1100       }
1101     }
1102     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1103   } /* -----------------------------------  */
1104 
1105   /* count number subdomains for each local node */
1106   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1107   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1108   cnt  = 0;
1109   for (i=0; i<nrecvs2; i++) {
1110     nt = recvs2[cnt++];
1111     for (j=0; j<nt; j++) {
1112       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1113       cnt += 2 + recvs2[cnt+1];
1114     }
1115   }
1116   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1117   *nproc    = nt;
1118   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1119   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1120   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1121   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1122   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1123   cnt  = 0;
1124   for (i=0; i<size; i++) {
1125     if (nprocs[i] > 0) {
1126       bprocs[i]        = cnt;
1127       (*procs)[cnt]    = i;
1128       (*numprocs)[cnt] = nprocs[i];
1129       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1130       cnt++;
1131     }
1132   }
1133 
1134   /* make the list of subdomains for each nontrivial local node */
1135   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1136   cnt  = 0;
1137   for (i=0; i<nrecvs2; i++) {
1138     nt = recvs2[cnt++];
1139     for (j=0; j<nt; j++) {
1140       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1141       cnt += 2 + recvs2[cnt+1];
1142     }
1143   }
1144   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1145   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1146 
1147   /* sort the node indexing by their global numbers */
1148   nt = *nproc;
1149   for (i=0; i<nt; i++) {
1150     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1151     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1152     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1153     ierr = PetscFree(tmp);CHKERRQ(ierr);
1154   }
1155 
1156   if (debug) { /* -----------------------------------  */
1157     nt = *nproc;
1158     for (i=0; i<nt; i++) {
1159       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1160       for (j=0; j<(*numprocs)[i]; j++) {
1161         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1162       }
1163       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1164     }
1165     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1166   } /* -----------------------------------  */
1167 
1168   /* wait on sends */
1169   if (nsends2) {
1170     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1171     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1172     ierr = PetscFree(send_status);CHKERRQ(ierr);
1173   }
1174 
1175   ierr = PetscFree(starts3);CHKERRQ(ierr);
1176   ierr = PetscFree(dest);CHKERRQ(ierr);
1177   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1178 
1179   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1180   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1181   ierr = PetscFree(starts);CHKERRQ(ierr);
1182   ierr = PetscFree(starts2);CHKERRQ(ierr);
1183   ierr = PetscFree(lens2);CHKERRQ(ierr);
1184 
1185   ierr = PetscFree(source);CHKERRQ(ierr);
1186   ierr = PetscFree(len);CHKERRQ(ierr);
1187   ierr = PetscFree(recvs);CHKERRQ(ierr);
1188   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1189   ierr = PetscFree(sends2);CHKERRQ(ierr);
1190 
1191   /* put the information about myself as the first entry in the list */
1192   first_procs    = (*procs)[0];
1193   first_numprocs = (*numprocs)[0];
1194   first_indices  = (*indices)[0];
1195   for (i=0; i<*nproc; i++) {
1196     if ((*procs)[i] == rank) {
1197       (*procs)[0]    = (*procs)[i];
1198       (*numprocs)[0] = (*numprocs)[i];
1199       (*indices)[0]  = (*indices)[i];
1200       (*procs)[i]    = first_procs;
1201       (*numprocs)[i] = first_numprocs;
1202       (*indices)[i]  = first_indices;
1203       break;
1204     }
1205   }
1206 
1207   /* save info for reuse */
1208   mapping->info_nproc = *nproc;
1209   mapping->info_procs = *procs;
1210   mapping->info_numprocs = *numprocs;
1211   mapping->info_indices = *indices;
1212   mapping->info_cached = PETSC_TRUE;
1213   PetscFunctionReturn(0);
1214 }
1215 
1216 #undef __FUNCT__
1217 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1218 /*@C
1219     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1220 
1221     Collective on ISLocalToGlobalMapping
1222 
1223     Input Parameters:
1224 .   mapping - the mapping from local to global indexing
1225 
1226     Output Parameter:
1227 +   nproc - number of processors that are connected to this one
1228 .   proc - neighboring processors
1229 .   numproc - number of indices for each processor
1230 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1231 
1232     Level: advanced
1233 
1234 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1235           ISLocalToGlobalMappingGetInfo()
1236 @*/
1237 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1238 {
1239   PetscErrorCode ierr;
1240 
1241   PetscFunctionBegin;
1242   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1243   if (mapping->info_free) {
1244     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1245     if (*indices) {
1246       PetscInt i;
1247 
1248       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1249       for (i=1; i<*nproc; i++) {
1250         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1251       }
1252       ierr = PetscFree(*indices);CHKERRQ(ierr);
1253     }
1254   }
1255   *nproc = 0;
1256   *procs = NULL;
1257   *numprocs = NULL;
1258   *indices = NULL;
1259   PetscFunctionReturn(0);
1260 }
1261 
1262 #undef __FUNCT__
1263 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1264 /*@C
1265     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1266      each index shared by more than one processor
1267 
1268     Collective on ISLocalToGlobalMapping
1269 
1270     Input Parameters:
1271 .   mapping - the mapping from local to global indexing
1272 
1273     Output Parameter:
1274 +   nproc - number of processors that are connected to this one
1275 .   proc - neighboring processors
1276 .   numproc - number of indices for each subdomain (processor)
1277 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1278 
1279     Level: advanced
1280 
1281     Concepts: mapping^local to global
1282 
1283     Fortran Usage:
1284 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1285 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1286           PetscInt indices[nproc][numprocmax],ierr)
1287         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1288         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1289 
1290 
1291 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1292           ISLocalToGlobalMappingRestoreInfo()
1293 @*/
1294 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1295 {
1296   PetscErrorCode ierr;
1297   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1298 
1299   PetscFunctionBegin;
1300   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1301   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1302   if (bs > 1) { /* we need to expand the cached info */
1303     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1304     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1305     for (i=0; i<*nproc; i++) {
1306       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1307       for (j=0; j<bnumprocs[i]; j++) {
1308         for (k=0; k<bs; k++) {
1309           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1310         }
1311       }
1312       (*numprocs)[i] = bnumprocs[i]*bs;
1313     }
1314     mapping->info_free = PETSC_TRUE;
1315   } else {
1316     *numprocs = bnumprocs;
1317     *indices  = bindices;
1318   }
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 #undef __FUNCT__
1323 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1324 /*@C
1325     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1326 
1327     Collective on ISLocalToGlobalMapping
1328 
1329     Input Parameters:
1330 .   mapping - the mapping from local to global indexing
1331 
1332     Output Parameter:
1333 +   nproc - number of processors that are connected to this one
1334 .   proc - neighboring processors
1335 .   numproc - number of indices for each processor
1336 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1337 
1338     Level: advanced
1339 
1340 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1341           ISLocalToGlobalMappingGetInfo()
1342 @*/
1343 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1344 {
1345   PetscErrorCode ierr;
1346 
1347   PetscFunctionBegin;
1348   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1349   PetscFunctionReturn(0);
1350 }
1351 
1352 #undef __FUNCT__
1353 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1354 /*@C
1355    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1356 
1357    Not Collective
1358 
1359    Input Arguments:
1360 . ltog - local to global mapping
1361 
1362    Output Arguments:
1363 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1364 
1365    Level: advanced
1366 
1367    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1368 
1369 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1370 @*/
1371 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1372 {
1373   PetscFunctionBegin;
1374   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1375   PetscValidPointer(array,2);
1376   if (ltog->bs == 1) {
1377     *array = ltog->indices;
1378   } else {
1379     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1380     const PetscInt *ii;
1381     PetscErrorCode ierr;
1382 
1383     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1384     *array = jj;
1385     k    = 0;
1386     ii   = ltog->indices;
1387     for (i=0; i<n; i++)
1388       for (j=0; j<bs; j++)
1389         jj[k++] = bs*ii[i] + j;
1390   }
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 #undef __FUNCT__
1395 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1396 /*@C
1397    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1398 
1399    Not Collective
1400 
1401    Input Arguments:
1402 + ltog - local to global mapping
1403 - array - array of indices
1404 
1405    Level: advanced
1406 
1407 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1408 @*/
1409 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1410 {
1411   PetscFunctionBegin;
1412   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1413   PetscValidPointer(array,2);
1414   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1415 
1416   if (ltog->bs > 1) {
1417     PetscErrorCode ierr;
1418     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1419   }
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 #undef __FUNCT__
1424 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1425 /*@C
1426    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1427 
1428    Not Collective
1429 
1430    Input Arguments:
1431 . ltog - local to global mapping
1432 
1433    Output Arguments:
1434 . array - array of indices
1435 
1436    Level: advanced
1437 
1438 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1439 @*/
1440 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1441 {
1442   PetscFunctionBegin;
1443   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1444   PetscValidPointer(array,2);
1445   *array = ltog->indices;
1446   PetscFunctionReturn(0);
1447 }
1448 
1449 #undef __FUNCT__
1450 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1451 /*@C
1452    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1453 
1454    Not Collective
1455 
1456    Input Arguments:
1457 + ltog - local to global mapping
1458 - array - array of indices
1459 
1460    Level: advanced
1461 
1462 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1463 @*/
1464 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1465 {
1466   PetscFunctionBegin;
1467   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1468   PetscValidPointer(array,2);
1469   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1470   *array = NULL;
1471   PetscFunctionReturn(0);
1472 }
1473 
1474 #undef __FUNCT__
1475 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1476 /*@C
1477    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1478 
1479    Not Collective
1480 
1481    Input Arguments:
1482 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1483 . n - number of mappings to concatenate
1484 - ltogs - local to global mappings
1485 
1486    Output Arguments:
1487 . ltogcat - new mapping
1488 
1489    Note: this currently always returns a mapping with block size of 1
1490 
1491    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1492 
1493    Level: advanced
1494 
1495 .seealso: ISLocalToGlobalMappingCreate()
1496 @*/
1497 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1498 {
1499   PetscInt       i,cnt,m,*idx;
1500   PetscErrorCode ierr;
1501 
1502   PetscFunctionBegin;
1503   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1504   if (n > 0) PetscValidPointer(ltogs,3);
1505   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1506   PetscValidPointer(ltogcat,4);
1507   for (cnt=0,i=0; i<n; i++) {
1508     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1509     cnt += m;
1510   }
1511   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1512   for (cnt=0,i=0; i<n; i++) {
1513     const PetscInt *subidx;
1514     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1515     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1516     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1517     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1518     cnt += m;
1519   }
1520   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1521   PetscFunctionReturn(0);
1522 }
1523 
1524 
1525