/*$Id: isltog.c,v 1.57 2000/08/08 21:51:55 bsmith Exp bsmith $*/
#include "petscsys.h" /*I "petscsys.h" I*/
#include "src/vec/is/isimpl.h" /*I "petscis.h" I*/
#undef __FUNC__
#define __FUNC__ /**/"ISLocalToGlobalMappingGetSize"
/*@C
ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
Not Collective
Input Parameter:
. ltog - local to global mapping
Output Parameter:
. n - the number of entries in the local mapping
Level: advanced
.keywords: IS, local-to-global mapping, create
.seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
@*/
int ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,int *n)
{
PetscFunctionBegin;
PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE);
*n = mapping->n;
PetscFunctionReturn(0);
}
#undef __FUNC__
#define __FUNC__ /**/"ISLocalToGlobalMappingView"
/*@C
ISLocalToGlobalMappingView - View a local to global mapping
Not Collective
Input Parameters:
+ ltog - local to global mapping
- viewer - viewer
Level: advanced
.keywords: IS, local-to-global mapping, create
.seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
@*/
int ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,Viewer viewer)
{
int i,ierr,rank;
PetscTruth isascii;
PetscFunctionBegin;
PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE);
if (!viewer) viewer = VIEWER_STDOUT_(mapping->comm);
PetscValidHeaderSpecific(viewer,VIEWER_COOKIE);
PetscCheckSameComm(mapping,viewer);
ierr = MPI_Comm_rank(mapping->comm,&rank);CHKERRQ(ierr);
ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
if (isascii) {
for (i=0; in; i++) {
ierr = ViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
}
ierr = ViewerFlush(viewer);CHKERRQ(ierr);
} else {
SETERRQ1(1,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
}
PetscFunctionReturn(0);
}
#undef __FUNC__
#define __FUNC__ /**/"ISLocalToGlobalMappingCreateIS"
/*@C
ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
ordering and a global parallel ordering.
Not collective
Input Parameter:
. is - index set containing the global numbers for each local
Output Parameter:
. mapping - new mapping data structure
Level: advanced
.keywords: IS, local-to-global mapping, create
.seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
@*/
int ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
{
int n,*indices,ierr;
MPI_Comm comm;
PetscFunctionBegin;
PetscValidHeaderSpecific(is,IS_COOKIE);
ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
ierr = ISLocalToGlobalMappingCreate(comm,n,indices,mapping);CHKERRQ(ierr);
ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
#undef __FUNC__
#define __FUNC__ /**/"ISLocalToGlobalMappingCreate"
/*@C
ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
ordering and a global parallel ordering.
Not Collective, but communicator may have more than one process
Input Parameters:
+ comm - MPI communicator
. n - the number of local elements
- indices - the global index for each local element
Output Parameter:
. mapping - new mapping data structure
Level: advanced
.keywords: IS, local-to-global mapping, create
.seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
@*/
int ISLocalToGlobalMappingCreate(MPI_Comm cm,int n,const int indices[],ISLocalToGlobalMapping *mapping)
{
int ierr;
PetscFunctionBegin;
PetscValidIntPointer(indices);
PetscValidPointer(mapping);
PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_COOKIE,0,"ISLocalToGlobalMapping",
cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
PLogObjectCreate(*mapping);
PLogObjectMemory(*mapping,sizeof(struct _p_ISLocalToGlobalMapping)+n*sizeof(int));
(*mapping)->n = n;
(*mapping)->indices = (int*)PetscMalloc((n+1)*sizeof(int));CHKPTRQ((*mapping)->indices);
ierr = PetscMemcpy((*mapping)->indices,indices,n*sizeof(int));CHKERRQ(ierr);
/*
Do not create the global to local mapping. This is only created if
ISGlobalToLocalMapping() is called
*/
(*mapping)->globals = 0;
PetscFunctionReturn(0);
}
#undef __FUNC__
#define __FUNC__ /**/"ISLocalToGlobalMappingDestroy"
/*@
ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
ordering and a global parallel ordering.
Note Collective
Input Parameters:
. mapping - mapping data structure
Level: advanced
.keywords: IS, local-to-global mapping, destroy
.seealso: ISLocalToGlobalMappingCreate()
@*/
int ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping)
{
int ierr;
PetscFunctionBegin;
PetscValidPointer(mapping);
if (--mapping->refct > 0) PetscFunctionReturn(0);
if (mapping->refct < 0) {
SETERRQ(1,"Mapping already destroyed");
}
ierr = PetscFree(mapping->indices);CHKERRQ(ierr);
if (mapping->globals) {ierr = PetscFree(mapping->globals);CHKERRQ(ierr);}
PLogObjectDestroy(mapping);
PetscHeaderDestroy(mapping);
PetscFunctionReturn(0);
}
#undef __FUNC__
#define __FUNC__ /**/"ISLocalToGlobalMappingApplyIS"
/*@
ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
a new index set using the global numbering defined in an ISLocalToGlobalMapping
context.
Not collective
Input Parameters:
+ mapping - mapping between local and global numbering
- is - index set in local numbering
Output Parameters:
. newis - index set in global numbering
Level: advanced
.keywords: IS, local-to-global mapping, apply
.seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
@*/
int ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
{
int ierr,n,i,*idxin,*idxmap,*idxout,Nmax = mapping->n;
PetscFunctionBegin;
PetscValidPointer(mapping);
PetscValidHeaderSpecific(is,IS_COOKIE);
PetscValidPointer(newis);
ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
idxmap = mapping->indices;
idxout = (int*)PetscMalloc((n+1)*sizeof(int));CHKPTRQ(idxout);
for (i=0; i= Nmax) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax,i);
idxout[i] = idxmap[idxin[i]];
}
ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,newis);CHKERRQ(ierr);
ierr = PetscFree(idxout);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
/*MC
ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
and converts them to the global numbering.
Not collective
Input Parameters:
+ mapping - the local to global mapping context
. N - number of integers
- in - input indices in local numbering
Output Parameter:
. out - indices in global numbering
Synopsis:
ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
Notes:
The in and out array parameters may be identical.
Level: advanced
.seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
AOPetscToApplication(), ISGlobalToLocalMappingApply()
.keywords: local-to-global, mapping, apply
M*/
/* -----------------------------------------------------------------------------------------*/
#undef __FUNC__
#define __FUNC__ /**/"ISGlobalToLocalMappingSetUp_Private"
/*
Creates the global fields in the ISLocalToGlobalMapping structure
*/
static int ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
{
int i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
PetscFunctionBegin;
end = 0;
start = 100000000;
for (i=0; i end) end = idx[i];
}
if (start > end) {start = 0; end = -1;}
mapping->globalstart = start;
mapping->globalend = end;
globals = mapping->globals = (int*)PetscMalloc((end-start+2)*sizeof(int));CHKPTRQ(mapping->globals);
for (i=0; i*/"ISGlobalToLocalMappingApply"
/*@
ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
specified with a global numbering.
Not collective
Input Parameters:
+ mapping - mapping between local and global numbering
. type - IS_GTOLM_MASK - replaces global indices with no local value with -1
IS_GTOLM_DROP - drops the indices with no local value from the output list
. n - number of global indices to map
- idx - global indices to map
Output Parameters:
+ nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
- idxout - local index of each global index, one must pass in an array long enough
to hold all the indices. You can call ISGlobalToLocalMappingApply() with
idxout == PETSC_NULL to determine the required length (returned in nout)
and then allocate the required space and call ISGlobalToLocalMappingApply()
a second time to set the values.
Notes:
Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
This is not scalable in memory usage. Each processor requires O(Nglobal) size
array to compute these.
Level: advanced
.keywords: IS, global-to-local mapping, apply
.seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
ISLocalToGlobalMappingDestroy()
@*/
int ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
int n,const int idx[],int *nout,int idxout[])
{
int i,ierr,*globals,nf = 0,tmp,start,end;
PetscFunctionBegin;
if (!mapping->globals) {
ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
}
globals = mapping->globals;
start = mapping->globalstart;
end = mapping->globalend;
if (type == IS_GTOLM_MASK) {
if (idxout) {
for (i=0; i end) idxout[i] = -1;
else idxout[i] = globals[idx[i] - start];
}
}
if (nout) *nout = n;
} else {
if (idxout) {
for (i=0; i end) continue;
tmp = globals[idx[i] - start];
if (tmp < 0) continue;
idxout[nf++] = tmp;
}
} else {
for (i=0; i end) continue;
tmp = globals[idx[i] - start];
if (tmp < 0) continue;
nf++;
}
}
if (nout) *nout = nf;
}
PetscFunctionReturn(0);
}
#undef __FUNC__
#define __FUNC__ /**/"ISLocalToGlobalMappingGetInfo"
/*@C
ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
each index shared by more than one processor
Collective on ISLocalToGlobalMapping
Input Parameters:
. mapping - the mapping from local to global indexing
Output Parameter:
+ nproc - number of processors that are connected to this one
. proc - neighboring processors
. numproc - number of indices for each subdomain (processor)
- indices - indices of local nodes shared with neighbor (sorted by global numbering)
Level: advanced
.keywords: IS, local-to-global mapping, neighbors
.seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
ISLocalToGlobalMappingRestoreInfo()
@*/
int ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,int *nproc,int **procs,int **numprocs,int ***indices)
{
int i,n = mapping->n,ierr,Ng,ng = PETSC_DECIDE,max = 0,*lindices = mapping->indices;
int size,rank,*nprocs,*owner,nsends,*sends,j,*starts,*work,nmax,nrecvs,*recvs,proc;
int tag1,tag2,tag3,cnt,*len,*source,imdex,scale,*ownedsenders,*nownedsenders,rstart,nowned;
int node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
int first_procs,first_numprocs,*first_indices;
MPI_Request *recv_waits,*send_waits;
MPI_Status recv_status,*send_status,*recv_statuses;
MPI_Comm comm = mapping->comm;
PetscTruth debug = PETSC_FALSE;
PetscFunctionBegin;
ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
if (size == 1) {
*nproc = 0;
*procs = PETSC_NULL;
*numprocs = (int*)PetscMalloc(sizeof(int));CHKPTRQ(*numprocs);
(*numprocs)[0] = 0;
*indices = (int**)PetscMalloc(sizeof(int*));CHKPTRQ(*indices);
(*indices)[0] = PETSC_NULL;
PetscFunctionReturn(0);
}
ierr = OptionsHasName(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug);CHKERRQ(ierr);
/*
Notes on ISLocalToGlobalMappingGetInfo
globally owned node - the nodes that have been assigned to this processor in global
numbering, just for this routine.
nontrivial globally owned node - node assigned to this processor that is on a subdomain
boundary (i.e. is has more than one local owner)
locally owned node - node that exists on this processors subdomain
nontrivial locally owned node - node that is not in the interior (i.e. has more than one
local subdomain
*/
ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
for (i=0; i max) max = lindices[i];
}
ierr = MPI_Allreduce(&max,&Ng,1,MPI_INT,MPI_MAX,comm);CHKERRQ(ierr);
Ng++;
ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
scale = Ng/size + 1;
ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
rstart = scale*rank;
/* determine ownership ranges of global indices */
nprocs = (int*)PetscMalloc((2*size+1)*sizeof(int));CHKPTRQ(nprocs);
ierr = PetscMemzero(nprocs,2*size*sizeof(int));CHKERRQ(ierr);
/* determine owners of each local node */
owner = (int*)PetscMalloc((n+1)*sizeof(int));CHKPTRQ(owner);
for (i=0; i 1) {nownedm += nownedsenders[i]; nowned++;}
}
/* create single array to contain rank of all local owners of each globally owned index */
ownedsenders = (int*)PetscMalloc((nownedm+1)*sizeof(int));CHKERRQ(ierr);
starts = (int*)PetscMalloc((ng+1)*sizeof(int));CHKPTRQ(starts);
starts[0] = 0;
for (i=1; i 1) starts[i] = starts[i-1] + nownedsenders[i-1];
else starts[i] = starts[i-1];
}
/* for each nontrival globally owned node list all arriving processors */
for (i=0; i 1) {
ownedsenders[starts[node]++] = source[i];
}
}
}
if (debug) { /* ----------------------------------- */
starts[0] = 0;
for (i=1; i 1) starts[i] = starts[i-1] + nownedsenders[i-1];
else starts[i] = starts[i-1];
}
for (i=0; i 1) {
ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
for (j=0; j 1) starts[i] = starts[i-1] + nownedsenders[i-1];
else starts[i] = starts[i-1];
}
nsends2 = nrecvs;
nprocs = (int*)PetscMalloc((nsends2+1)*sizeof(int));CHKPTRQ(nprocs); /* length of each message */
cnt = 0;
for (i=0; i 1) {
nprocs[i] += 2 + nownedsenders[node];
}
}
}
nt = 0; for (i=0; i 1) {
sends2[starts2[i]]++;
sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
sends2[starts2[i]+cnt++] = nownedsenders[node];
ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(int));CHKERRQ(ierr);
cnt += nownedsenders[node];
}
}
}
/* send the message lengths */
for (i=0; i 0);
*nproc = nt;
*procs = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(procs);
*numprocs = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(numprocs);
*indices = (int**)PetscMalloc((nt+1)*sizeof(int*));CHKPTRQ(procs);
bprocs = (int*)PetscMalloc(size*sizeof(int));CHKERRQ(ierr);
cnt = 0;
for (i=0; i 0) {
bprocs[i] = cnt;
(*procs)[cnt] = i;
(*numprocs)[cnt] = nprocs[i];
(*indices)[cnt] = (int*)PetscMalloc(nprocs[i]*sizeof(int));CHKPTRQ((*indices)[cnt]);
cnt++;
}
}
/* make the list of subdomains for each nontrivial local node */
ierr = PetscMemzero(*numprocs,nt*sizeof(int));CHKERRQ(ierr);
cnt = 0;
for (i=0; i*/"ISLocalToGlobalMappingRestoreInfo"
/*@C
ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
Collective on ISLocalToGlobalMapping
Input Parameters:
. mapping - the mapping from local to global indexing
Output Parameter:
+ nproc - number of processors that are connected to this one
. proc - neighboring processors
. numproc - number of indices for each processor
- indices - indices of local nodes shared with neighbor (sorted by global numbering)
Level: advanced
.keywords: IS, local-to-global mapping, neighbors
.seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
ISLocalToGlobalMappingGetInfo()
@*/
int ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,int *nproc,int **procs,int **numprocs,int ***indices)
{
int ierr,i;
PetscFunctionBegin;
if (*procs) {ierr = PetscFree(*procs);CHKERRQ(ierr);}
if (*numprocs) {ierr = PetscFree(*numprocs);CHKERRQ(ierr);}
if (*indices) {
if ((*indices)[0]) {ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);}
for (i=1; i<*nproc; i++) {
if ((*indices)[i]) {ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);}
}
ierr = PetscFree(*indices);CHKERRQ(ierr);
}
PetscFunctionReturn(0);
}