xref: /petsc/src/vec/is/utils/isltog.c (revision 09573ac72a50d3e7ecd55a2b7f0ef28450cd0a8b)
1 #define PETSCVEC_DLL
2 
3 #include "petscvec.h"   /*I "petscvec.h" I*/
4 #include "private/isimpl.h"    /*I "petscis.h"  I*/
5 
6 PetscClassId PETSCVEC_DLLEXPORT IS_LTOGM_CLASSID;
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
10 /*@C
11     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
12 
13     Not Collective
14 
15     Input Parameter:
16 .   ltog - local to global mapping
17 
18     Output Parameter:
19 .   n - the number of entries in the local mapping
20 
21     Level: advanced
22 
23     Concepts: mapping^local to global
24 
25 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
26 @*/
27 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
28 {
29   PetscFunctionBegin;
30   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
31   PetscValidIntPointer(n,2);
32   *n = mapping->n;
33   PetscFunctionReturn(0);
34 }
35 
36 #undef __FUNCT__
37 #define __FUNCT__ "ISLocalToGlobalMappingView"
38 /*@C
39     ISLocalToGlobalMappingView - View a local to global mapping
40 
41     Not Collective
42 
43     Input Parameters:
44 +   ltog - local to global mapping
45 -   viewer - viewer
46 
47     Level: advanced
48 
49     Concepts: mapping^local to global
50 
51 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
52 @*/
53 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
54 {
55   PetscInt        i;
56   PetscMPIInt     rank;
57   PetscBool       iascii;
58   PetscErrorCode  ierr;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
62   if (!viewer) {
63     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
64   }
65   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
66 
67   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
68   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
69   if (iascii) {
70     for (i=0; i<mapping->n; i++) {
71       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
72     }
73     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
74   } else {
75     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
76   }
77 
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
83 /*@
84     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
85     ordering and a global parallel ordering.
86 
87     Not collective
88 
89     Input Parameter:
90 .   is - index set containing the global numbers for each local number
91 
92     Output Parameter:
93 .   mapping - new mapping data structure
94 
95     Level: advanced
96 
97     Concepts: mapping^local to global
98 
99 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
100 @*/
101 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
102 {
103   PetscErrorCode ierr;
104   PetscInt       n;
105   const PetscInt *indices;
106   MPI_Comm       comm;
107 
108   PetscFunctionBegin;
109   PetscValidHeaderSpecific(is,IS_CLASSID,1);
110   PetscValidPointer(mapping,2);
111 
112   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
113   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
114   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
115   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
116   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
123 /*@
124     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
125     ordering and a global parallel ordering.
126 
127     Not Collective, but communicator may have more than one process
128 
129     Input Parameters:
130 +   comm - MPI communicator
131 .   n - the number of local elements
132 .   indices - the global index for each local element
133 -   mode - see PetscCopyMode
134 
135     Output Parameter:
136 .   mapping - new mapping data structure
137 
138     Level: advanced
139 
140     Concepts: mapping^local to global
141 
142 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
143 @*/
144 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
145 {
146   PetscErrorCode ierr;
147   PetscInt       *in;
148 
149   PetscFunctionBegin;
150   if (n) PetscValidIntPointer(indices,3);
151   PetscValidPointer(mapping,4);
152 
153   *mapping = PETSC_NULL;
154 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
155   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
156 #endif
157 
158   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping",
159 			   cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
160   (*mapping)->n       = n;
161   /*
162     Do not create the global to local mapping. This is only created if
163     ISGlobalToLocalMapping() is called
164   */
165   (*mapping)->globals = 0;
166   if (mode == PETSC_COPY_VALUES) {
167     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
168     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
169     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
170     (*mapping)->indices = in;
171   } else if (mode == PETSC_OWN_POINTER) {
172     (*mapping)->indices = (PetscInt*)indices;
173   } else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
174   PetscFunctionReturn(0);
175 }
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
179 /*@
180     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
181        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
182        and VecSetLocalToGlobalMappingBlock().
183 
184     Not Collective, but communicator may have more than one process
185 
186     Input Parameters:
187 +    inmap - original point-wise mapping
188 -    bs - block size
189 
190     Output Parameter:
191 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
192 
193     Level: advanced
194 
195     Concepts: mapping^local to global
196 
197 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
198 @*/
199 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
200 {
201   PetscErrorCode ierr;
202   PetscInt       *ii,i,n;
203 
204   PetscFunctionBegin;
205   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
206   PetscValidPointer(outmap,3);
207   if (bs > 1) {
208     n    = inmap->n/bs;
209     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
210     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
211     for (i=0; i<n; i++) {
212       ii[i] = inmap->indices[bs*i]/bs;
213     }
214     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
215   } else {
216     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
217     *outmap = inmap;
218   }
219   PetscFunctionReturn(0);
220 }
221 
222 #undef __FUNCT__
223 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
224 /*@
225     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
226        ISLocalToGlobalMapping
227 
228     Not Collective, but communicator may have more than one process
229 
230     Input Parameter:
231 +inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
232 -bs - block size
233 
234     Output Parameter:
235 .   outmap - pointwise mapping
236 
237     Level: advanced
238 
239     Concepts: mapping^local to global
240 
241 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
242 @*/
243 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
244 {
245   PetscErrorCode ierr;
246   PetscInt       *ii,i,n;
247 
248   PetscFunctionBegin;
249   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
250   PetscValidPointer(outmap,2);
251   if (bs > 1) {
252     n    = inmap->n*bs;
253     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
254     for (i=0; i<n; i++) {
255       ii[i] = inmap->indices[i/bs]*bs + (i%bs);
256     }
257     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
258   } else {
259     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
260     *outmap = inmap;
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 #undef __FUNCT__
266 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
267 /*@
268    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
269    ordering and a global parallel ordering.
270 
271    Note Collective
272 
273    Input Parameters:
274 .  mapping - mapping data structure
275 
276    Level: advanced
277 
278 .seealso: ISLocalToGlobalMappingCreate()
279 @*/
280 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping)
281 {
282   PetscErrorCode ierr;
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
285   if (--((PetscObject)mapping)->refct > 0) PetscFunctionReturn(0);
286   ierr = PetscFree(mapping->indices);CHKERRQ(ierr);
287   ierr = PetscFree(mapping->globals);CHKERRQ(ierr);
288   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
289   PetscFunctionReturn(0);
290 }
291 
292 #undef __FUNCT__
293 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
294 /*@
295     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
296     a new index set using the global numbering defined in an ISLocalToGlobalMapping
297     context.
298 
299     Not collective
300 
301     Input Parameters:
302 +   mapping - mapping between local and global numbering
303 -   is - index set in local numbering
304 
305     Output Parameters:
306 .   newis - index set in global numbering
307 
308     Level: advanced
309 
310     Concepts: mapping^local to global
311 
312 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
313           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
314 @*/
315 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
316 {
317   PetscErrorCode ierr;
318   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
319   const PetscInt *idxin;
320 
321   PetscFunctionBegin;
322   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
323   PetscValidHeaderSpecific(is,IS_CLASSID,2);
324   PetscValidPointer(newis,3);
325 
326   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
327   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
328   idxmap = mapping->indices;
329 
330   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
331   for (i=0; i<n; i++) {
332     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
333     idxout[i] = idxmap[idxin[i]];
334   }
335   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
336   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
337   PetscFunctionReturn(0);
338 }
339 
340 /*MC
341    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
342    and converts them to the global numbering.
343 
344    Synopsis:
345    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
346 
347    Not collective
348 
349    Input Parameters:
350 +  mapping - the local to global mapping context
351 .  N - number of integers
352 -  in - input indices in local numbering
353 
354    Output Parameter:
355 .  out - indices in global numbering
356 
357    Notes:
358    The in and out array parameters may be identical.
359 
360    Level: advanced
361 
362 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
363           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
364           AOPetscToApplication(), ISGlobalToLocalMappingApply()
365 
366     Concepts: mapping^local to global
367 
368 M*/
369 
370 /* -----------------------------------------------------------------------------------------*/
371 
372 #undef __FUNCT__
373 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
374 /*
375     Creates the global fields in the ISLocalToGlobalMapping structure
376 */
377 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
378 {
379   PetscErrorCode ierr;
380   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
381 
382   PetscFunctionBegin;
383   end   = 0;
384   start = 100000000;
385 
386   for (i=0; i<n; i++) {
387     if (idx[i] < 0) continue;
388     if (idx[i] < start) start = idx[i];
389     if (idx[i] > end)   end   = idx[i];
390   }
391   if (start > end) {start = 0; end = -1;}
392   mapping->globalstart = start;
393   mapping->globalend   = end;
394 
395   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
396   mapping->globals = globals;
397   for (i=0; i<end-start+1; i++) {
398     globals[i] = -1;
399   }
400   for (i=0; i<n; i++) {
401     if (idx[i] < 0) continue;
402     globals[idx[i] - start] = i;
403   }
404 
405   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
406   PetscFunctionReturn(0);
407 }
408 
409 #undef __FUNCT__
410 #define __FUNCT__ "ISGlobalToLocalMappingApply"
411 /*@
412     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
413     specified with a global numbering.
414 
415     Not collective
416 
417     Input Parameters:
418 +   mapping - mapping between local and global numbering
419 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
420            IS_GTOLM_DROP - drops the indices with no local value from the output list
421 .   n - number of global indices to map
422 -   idx - global indices to map
423 
424     Output Parameters:
425 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
426 -   idxout - local index of each global index, one must pass in an array long enough
427              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
428              idxout == PETSC_NULL to determine the required length (returned in nout)
429              and then allocate the required space and call ISGlobalToLocalMappingApply()
430              a second time to set the values.
431 
432     Notes:
433     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
434 
435     This is not scalable in memory usage. Each processor requires O(Nglobal) size
436     array to compute these.
437 
438     Level: advanced
439 
440     Concepts: mapping^global to local
441 
442 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
443           ISLocalToGlobalMappingDestroy()
444 @*/
445 PetscErrorCode PETSCVEC_DLLEXPORT ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
446                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
447 {
448   PetscInt       i,*globals,nf = 0,tmp,start,end;
449   PetscErrorCode ierr;
450 
451   PetscFunctionBegin;
452   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
453   if (!mapping->globals) {
454     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
455   }
456   globals = mapping->globals;
457   start   = mapping->globalstart;
458   end     = mapping->globalend;
459 
460   if (type == IS_GTOLM_MASK) {
461     if (idxout) {
462       for (i=0; i<n; i++) {
463         if (idx[i] < 0) idxout[i] = idx[i];
464         else if (idx[i] < start) idxout[i] = -1;
465         else if (idx[i] > end)   idxout[i] = -1;
466         else                     idxout[i] = globals[idx[i] - start];
467       }
468     }
469     if (nout) *nout = n;
470   } else {
471     if (idxout) {
472       for (i=0; i<n; i++) {
473         if (idx[i] < 0) continue;
474         if (idx[i] < start) continue;
475         if (idx[i] > end) continue;
476         tmp = globals[idx[i] - start];
477         if (tmp < 0) continue;
478         idxout[nf++] = tmp;
479       }
480     } else {
481       for (i=0; i<n; i++) {
482         if (idx[i] < 0) continue;
483         if (idx[i] < start) continue;
484         if (idx[i] > end) continue;
485         tmp = globals[idx[i] - start];
486         if (tmp < 0) continue;
487         nf++;
488       }
489     }
490     if (nout) *nout = nf;
491   }
492 
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
498 /*@C
499     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
500      each index shared by more than one processor
501 
502     Collective on ISLocalToGlobalMapping
503 
504     Input Parameters:
505 .   mapping - the mapping from local to global indexing
506 
507     Output Parameter:
508 +   nproc - number of processors that are connected to this one
509 .   proc - neighboring processors
510 .   numproc - number of indices for each subdomain (processor)
511 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
512 
513     Level: advanced
514 
515     Concepts: mapping^local to global
516 
517     Fortran Usage:
518 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
519 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
520           PetscInt indices[nproc][numprocmax],ierr)
521         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
522         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
523 
524 
525 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
526           ISLocalToGlobalMappingRestoreInfo()
527 @*/
528 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
529 {
530   PetscErrorCode ierr;
531   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
532   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
533   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
534   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
535   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
536   PetscInt       first_procs,first_numprocs,*first_indices;
537   MPI_Request    *recv_waits,*send_waits;
538   MPI_Status     recv_status,*send_status,*recv_statuses;
539   MPI_Comm       comm = ((PetscObject)mapping)->comm;
540   PetscBool      debug = PETSC_FALSE;
541 
542   PetscFunctionBegin;
543   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
544   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
545   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
546   if (size == 1) {
547     *nproc         = 0;
548     *procs         = PETSC_NULL;
549     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
550     (*numprocs)[0] = 0;
551     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
552     (*indices)[0]  = PETSC_NULL;
553     PetscFunctionReturn(0);
554   }
555 
556   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
557 
558   /*
559     Notes on ISLocalToGlobalMappingGetInfo
560 
561     globally owned node - the nodes that have been assigned to this processor in global
562            numbering, just for this routine.
563 
564     nontrivial globally owned node - node assigned to this processor that is on a subdomain
565            boundary (i.e. is has more than one local owner)
566 
567     locally owned node - node that exists on this processors subdomain
568 
569     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
570            local subdomain
571   */
572   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
573   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
574   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
575 
576   for (i=0; i<n; i++) {
577     if (lindices[i] > max) max = lindices[i];
578   }
579   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
580   Ng++;
581   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
582   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
583   scale  = Ng/size + 1;
584   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
585   rstart = scale*rank;
586 
587   /* determine ownership ranges of global indices */
588   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
589   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
590 
591   /* determine owners of each local node  */
592   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
593   for (i=0; i<n; i++) {
594     proc             = lindices[i]/scale; /* processor that globally owns this index */
595     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
596     owner[i]         = proc;
597     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
598   }
599   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
600   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
601 
602   /* inform other processors of number of messages and max length*/
603   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
604   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
605 
606   /* post receives for owned rows */
607   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
608   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
609   for (i=0; i<nrecvs; i++) {
610     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
611   }
612 
613   /* pack messages containing lists of local nodes to owners */
614   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
615   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
616   starts[0]  = 0;
617   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
618   for (i=0; i<n; i++) {
619     sends[starts[owner[i]]++] = lindices[i];
620     sends[starts[owner[i]]++] = i;
621   }
622   ierr = PetscFree(owner);CHKERRQ(ierr);
623   starts[0]  = 0;
624   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
625 
626   /* send the messages */
627   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
628   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
629   cnt = 0;
630   for (i=0; i<size; i++) {
631     if (nprocs[2*i]) {
632       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
633       dest[cnt] = i;
634       cnt++;
635     }
636   }
637   ierr = PetscFree(starts);CHKERRQ(ierr);
638 
639   /* wait on receives */
640   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
641   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
642   cnt  = nrecvs;
643   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
644   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
645   while (cnt) {
646     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
647     /* unpack receives into our local space */
648     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
649     source[imdex]  = recv_status.MPI_SOURCE;
650     len[imdex]     = len[imdex]/2;
651     /* count how many local owners for each of my global owned indices */
652     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
653     cnt--;
654   }
655   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
656 
657   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
658   nowned  = 0;
659   nownedm = 0;
660   for (i=0; i<ng; i++) {
661     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
662   }
663 
664   /* create single array to contain rank of all local owners of each globally owned index */
665   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
666   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
667   starts[0] = 0;
668   for (i=1; i<ng; i++) {
669     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
670     else starts[i] = starts[i-1];
671   }
672 
673   /* for each nontrival globally owned node list all arriving processors */
674   for (i=0; i<nrecvs; i++) {
675     for (j=0; j<len[i]; j++) {
676       node = recvs[2*i*nmax+2*j]-rstart;
677       if (nownedsenders[node] > 1) {
678         ownedsenders[starts[node]++] = source[i];
679       }
680     }
681   }
682 
683   if (debug) { /* -----------------------------------  */
684     starts[0]    = 0;
685     for (i=1; i<ng; i++) {
686       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
687       else starts[i] = starts[i-1];
688     }
689     for (i=0; i<ng; i++) {
690       if (nownedsenders[i] > 1) {
691         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
692         for (j=0; j<nownedsenders[i]; j++) {
693           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
694         }
695         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
696       }
697     }
698     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
699   }/* -----------------------------------  */
700 
701   /* wait on original sends */
702   if (nsends) {
703     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
704     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
705     ierr = PetscFree(send_status);CHKERRQ(ierr);
706   }
707   ierr = PetscFree(send_waits);CHKERRQ(ierr);
708   ierr = PetscFree(sends);CHKERRQ(ierr);
709   ierr = PetscFree(nprocs);CHKERRQ(ierr);
710 
711   /* pack messages to send back to local owners */
712   starts[0]    = 0;
713   for (i=1; i<ng; i++) {
714     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
715     else starts[i] = starts[i-1];
716   }
717   nsends2 = nrecvs;
718   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
719   for (i=0; i<nrecvs; i++) {
720     nprocs[i] = 1;
721     for (j=0; j<len[i]; j++) {
722       node = recvs[2*i*nmax+2*j]-rstart;
723       if (nownedsenders[node] > 1) {
724         nprocs[i] += 2 + nownedsenders[node];
725       }
726     }
727   }
728   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
729   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
730   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
731   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
732   /*
733      Each message is 1 + nprocs[i] long, and consists of
734        (0) the number of nodes being sent back
735        (1) the local node number,
736        (2) the number of processors sharing it,
737        (3) the processors sharing it
738   */
739   for (i=0; i<nsends2; i++) {
740     cnt = 1;
741     sends2[starts2[i]] = 0;
742     for (j=0; j<len[i]; j++) {
743       node = recvs[2*i*nmax+2*j]-rstart;
744       if (nownedsenders[node] > 1) {
745         sends2[starts2[i]]++;
746         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
747         sends2[starts2[i]+cnt++] = nownedsenders[node];
748         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
749         cnt += nownedsenders[node];
750       }
751     }
752   }
753 
754   /* receive the message lengths */
755   nrecvs2 = nsends;
756   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
757   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
758   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
759   for (i=0; i<nrecvs2; i++) {
760     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
761   }
762 
763   /* send the message lengths */
764   for (i=0; i<nsends2; i++) {
765     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
766   }
767 
768   /* wait on receives of lens */
769   if (nrecvs2) {
770     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
771     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
772     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
773   }
774   ierr = PetscFree(recv_waits);
775 
776   starts3[0] = 0;
777   nt         = 0;
778   for (i=0; i<nrecvs2-1; i++) {
779     starts3[i+1] = starts3[i] + lens2[i];
780     nt          += lens2[i];
781   }
782   nt += lens2[nrecvs2-1];
783 
784   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
785   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
786   for (i=0; i<nrecvs2; i++) {
787     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
788   }
789 
790   /* send the messages */
791   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
792   for (i=0; i<nsends2; i++) {
793     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
794   }
795 
796   /* wait on receives */
797   if (nrecvs2) {
798     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
799     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
800     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
801   }
802   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
803   ierr = PetscFree(nprocs);CHKERRQ(ierr);
804 
805   if (debug) { /* -----------------------------------  */
806     cnt = 0;
807     for (i=0; i<nrecvs2; i++) {
808       nt = recvs2[cnt++];
809       for (j=0; j<nt; j++) {
810         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
811         for (k=0; k<recvs2[cnt+1]; k++) {
812           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
813         }
814         cnt += 2 + recvs2[cnt+1];
815         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
816       }
817     }
818     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
819   } /* -----------------------------------  */
820 
821   /* count number subdomains for each local node */
822   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
823   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
824   cnt  = 0;
825   for (i=0; i<nrecvs2; i++) {
826     nt = recvs2[cnt++];
827     for (j=0; j<nt; j++) {
828       for (k=0; k<recvs2[cnt+1]; k++) {
829         nprocs[recvs2[cnt+2+k]]++;
830       }
831       cnt += 2 + recvs2[cnt+1];
832     }
833   }
834   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
835   *nproc    = nt;
836   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
837   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
838   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
839   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
840   cnt       = 0;
841   for (i=0; i<size; i++) {
842     if (nprocs[i] > 0) {
843       bprocs[i]        = cnt;
844       (*procs)[cnt]    = i;
845       (*numprocs)[cnt] = nprocs[i];
846       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
847       cnt++;
848     }
849   }
850 
851   /* make the list of subdomains for each nontrivial local node */
852   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
853   cnt  = 0;
854   for (i=0; i<nrecvs2; i++) {
855     nt = recvs2[cnt++];
856     for (j=0; j<nt; j++) {
857       for (k=0; k<recvs2[cnt+1]; k++) {
858         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
859       }
860       cnt += 2 + recvs2[cnt+1];
861     }
862   }
863   ierr = PetscFree(bprocs);CHKERRQ(ierr);
864   ierr = PetscFree(recvs2);CHKERRQ(ierr);
865 
866   /* sort the node indexing by their global numbers */
867   nt = *nproc;
868   for (i=0; i<nt; i++) {
869     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
870     for (j=0; j<(*numprocs)[i]; j++) {
871       tmp[j] = lindices[(*indices)[i][j]];
872     }
873     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
874     ierr = PetscFree(tmp);CHKERRQ(ierr);
875   }
876 
877   if (debug) { /* -----------------------------------  */
878     nt = *nproc;
879     for (i=0; i<nt; i++) {
880       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
881       for (j=0; j<(*numprocs)[i]; j++) {
882         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
883       }
884       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
885     }
886     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
887   } /* -----------------------------------  */
888 
889   /* wait on sends */
890   if (nsends2) {
891     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
892     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
893     ierr = PetscFree(send_status);CHKERRQ(ierr);
894   }
895 
896   ierr = PetscFree(starts3);CHKERRQ(ierr);
897   ierr = PetscFree(dest);CHKERRQ(ierr);
898   ierr = PetscFree(send_waits);CHKERRQ(ierr);
899 
900   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
901   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
902   ierr = PetscFree(starts);CHKERRQ(ierr);
903   ierr = PetscFree(starts2);CHKERRQ(ierr);
904   ierr = PetscFree(lens2);CHKERRQ(ierr);
905 
906   ierr = PetscFree(source);CHKERRQ(ierr);
907   ierr = PetscFree(len);CHKERRQ(ierr);
908   ierr = PetscFree(recvs);CHKERRQ(ierr);
909   ierr = PetscFree(nprocs);CHKERRQ(ierr);
910   ierr = PetscFree(sends2);CHKERRQ(ierr);
911 
912   /* put the information about myself as the first entry in the list */
913   first_procs    = (*procs)[0];
914   first_numprocs = (*numprocs)[0];
915   first_indices  = (*indices)[0];
916   for (i=0; i<*nproc; i++) {
917     if ((*procs)[i] == rank) {
918       (*procs)[0]    = (*procs)[i];
919       (*numprocs)[0] = (*numprocs)[i];
920       (*indices)[0]  = (*indices)[i];
921       (*procs)[i]    = first_procs;
922       (*numprocs)[i] = first_numprocs;
923       (*indices)[i]  = first_indices;
924       break;
925     }
926   }
927   PetscFunctionReturn(0);
928 }
929 
930 #undef __FUNCT__
931 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
932 /*@C
933     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
934 
935     Collective on ISLocalToGlobalMapping
936 
937     Input Parameters:
938 .   mapping - the mapping from local to global indexing
939 
940     Output Parameter:
941 +   nproc - number of processors that are connected to this one
942 .   proc - neighboring processors
943 .   numproc - number of indices for each processor
944 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
945 
946     Level: advanced
947 
948 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
949           ISLocalToGlobalMappingGetInfo()
950 @*/
951 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
952 {
953   PetscErrorCode ierr;
954   PetscInt       i;
955 
956   PetscFunctionBegin;
957   ierr = PetscFree(*procs);CHKERRQ(ierr);
958   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
959   if (*indices) {
960     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
961     for (i=1; i<*nproc; i++) {
962       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
963     }
964     ierr = PetscFree(*indices);CHKERRQ(ierr);
965   }
966   PetscFunctionReturn(0);
967 }
968 
969 #undef __FUNCT__
970 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
971 /*@C
972    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
973 
974    Not Collective
975 
976    Input Arguments:
977 . ltog - local to global mapping
978 
979    Output Arguments:
980 . array - array of indices
981 
982    Level: advanced
983 
984 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
985 @*/
986 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
987 {
988 
989   PetscFunctionBegin;
990   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
991   PetscValidPointer(array,2);
992   *array = ltog->indices;
993   PetscFunctionReturn(0);
994 }
995 
996 #undef __FUNCT__
997 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
998 /*@C
999    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1000 
1001    Not Collective
1002 
1003    Input Arguments:
1004 + ltog - local to global mapping
1005 - array - array of indices
1006 
1007    Level: advanced
1008 
1009 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1010 @*/
1011 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1012 {
1013 
1014   PetscFunctionBegin;
1015   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1016   PetscValidPointer(array,2);
1017   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1018   *array = PETSC_NULL;
1019   PetscFunctionReturn(0);
1020 }
1021