#include /*I "petscis.h" I*/ PetscClassId IS_LTOGM_CLASSID; #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingGetSize" /*@C ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping. Not Collective Input Parameter: . ltog - local to global mapping Output Parameter: . n - the number of entries in the local mapping Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() @*/ PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n) { PetscFunctionBegin; PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); PetscValidIntPointer(n,2); *n = mapping->n; PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingView" /*@C ISLocalToGlobalMappingView - View a local to global mapping Not Collective Input Parameters: + ltog - local to global mapping - viewer - viewer Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() @*/ PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer) { PetscInt i; PetscMPIInt rank; PetscBool iascii; PetscErrorCode ierr; PetscFunctionBegin; PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); if (!viewer) { ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr); } PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr); ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); if (iascii) { ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); for (i=0; in; i++) { ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr); } ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name); PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingCreateIS" /*@ ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) ordering and a global parallel ordering. Not collective Input Parameter: . is - index set containing the global numbers for each local number Output Parameter: . mapping - new mapping data structure Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() @*/ PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping) { PetscErrorCode ierr; PetscInt n; const PetscInt *indices; MPI_Comm comm; PetscFunctionBegin; PetscValidHeaderSpecific(is,IS_CLASSID,1); PetscValidPointer(mapping,2); ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr); ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); ierr = ISGetIndices(is,&indices);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr); PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingCreateSF" /*@C ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) ordering and a global parallel ordering. Collective Input Parameter: + sf - star forest mapping contiguous local indices to (rank, offset) - start - first global index on this process Output Parameter: . mapping - new mapping data structure Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS() @*/ PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping) { PetscErrorCode ierr; PetscInt i,maxlocal,nroots,nleaves,*globals,*ltog; const PetscInt *ilocal; MPI_Comm comm; PetscFunctionBegin; PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); PetscValidPointer(mapping,3); ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,PETSC_NULL);CHKERRQ(ierr); if (ilocal) { for (i=0,maxlocal=0; in = n; /* Do not create the global to local mapping. This is only created if ISGlobalToLocalMapping() is called */ (*mapping)->globals = 0; if (mode == PETSC_COPY_VALUES) { ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr); ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr); ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); (*mapping)->indices = in; } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices; else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER"); PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingBlock" /*@ ISLocalToGlobalMappingBlock - Creates a blocked index version of an ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock() and VecSetLocalToGlobalMappingBlock(). Not Collective, but communicator may have more than one process Input Parameters: + inmap - original point-wise mapping - bs - block size Output Parameter: . outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries. Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS() @*/ PetscErrorCode ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap) { PetscErrorCode ierr; PetscInt *ii,i,n; PetscFunctionBegin; PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1); PetscValidPointer(outmap,3); if (bs > 1) { n = inmap->n/bs; if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size"); ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr); for (i=0; iindices[bs*i]/bs; ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr); } else { ierr = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr); *outmap = inmap; } PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingUnBlock" /*@ ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked ISLocalToGlobalMapping Not Collective, but communicator may have more than one process Input Parameter: + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries. - bs - block size Output Parameter: . outmap - pointwise mapping Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock() @*/ PetscErrorCode ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap) { PetscErrorCode ierr; PetscInt *ii,i,n; PetscFunctionBegin; PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1); PetscValidPointer(outmap,2); if (bs > 1) { n = inmap->n*bs; ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr); for (i=0; iindices[i/bs]*bs + (i%bs); ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr); } else { ierr = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr); *outmap = inmap; } PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingDestroy" /*@ ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) ordering and a global parallel ordering. Note Collective Input Parameters: . mapping - mapping data structure Level: advanced .seealso: ISLocalToGlobalMappingCreate() @*/ PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) { PetscErrorCode ierr; PetscFunctionBegin; if (!*mapping) PetscFunctionReturn(0); PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1); if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);} ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr); ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr); ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr); *mapping = 0; PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingApplyIS" /*@ ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering a new index set using the global numbering defined in an ISLocalToGlobalMapping context. Not collective Input Parameters: + mapping - mapping between local and global numbering - is - index set in local numbering Output Parameters: . newis - index set in global numbering Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply() @*/ PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis) { PetscErrorCode ierr; PetscInt n,i,*idxmap,*idxout,Nmax = mapping->n; const PetscInt *idxin; PetscFunctionBegin; PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); PetscValidHeaderSpecific(is,IS_CLASSID,2); PetscValidPointer(newis,3); ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); idxmap = mapping->indices; ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr); for (i=0; i= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i); idxout[i] = idxmap[idxin[i]]; } ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); PetscFunctionReturn(0); } /*MC ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering and converts them to the global numbering. Synopsis: #include "petscis.h" PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[]) Not collective Input Parameters: + mapping - the local to global mapping context . N - number of integers - in - input indices in local numbering Output Parameter: . out - indices in global numbering Notes: The in and out array parameters may be identical. Level: advanced .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), AOPetscToApplication(), ISGlobalToLocalMappingApply() Concepts: mapping^local to global M*/ /* -----------------------------------------------------------------------------------------*/ #undef __FUNCT__ #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private" /* Creates the global fields in the ISLocalToGlobalMapping structure */ static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping) { PetscErrorCode ierr; PetscInt i,*idx = mapping->indices,n = mapping->n,end,start,*globals; PetscFunctionBegin; end = 0; start = PETSC_MAX_INT; for (i=0; i end) end = idx[i]; } if (start > end) {start = 0; end = -1;} mapping->globalstart = start; mapping->globalend = end; ierr = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr); mapping->globals = globals; for (i=0; iglobals) { ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr); } globals = mapping->globals; start = mapping->globalstart; end = mapping->globalend; if (type == IS_GTOLM_MASK) { if (idxout) { for (i=0; i end) idxout[i] = -1; else idxout[i] = globals[idx[i] - start]; } } if (nout) *nout = n; } else { if (idxout) { for (i=0; i end) continue; tmp = globals[idx[i] - start]; if (tmp < 0) continue; idxout[nf++] = tmp; } } else { for (i=0; i end) continue; tmp = globals[idx[i] - start]; if (tmp < 0) continue; nf++; } } if (nout) *nout = nf; } PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingGetInfo" /*@C ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and each index shared by more than one processor Collective on ISLocalToGlobalMapping Input Parameters: . mapping - the mapping from local to global indexing Output Parameter: + nproc - number of processors that are connected to this one . proc - neighboring processors . numproc - number of indices for each subdomain (processor) - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) Level: advanced Concepts: mapping^local to global Fortran Usage: $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], PetscInt indices[nproc][numprocmax],ierr) There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingRestoreInfo() @*/ PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) { PetscErrorCode ierr; PetscMPIInt size,rank,tag1,tag2,tag3,*len,*source,imdex; PetscInt i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices; PetscInt *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc; PetscInt cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned; PetscInt node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp; PetscInt first_procs,first_numprocs,*first_indices; MPI_Request *recv_waits,*send_waits; MPI_Status recv_status,*send_status,*recv_statuses; MPI_Comm comm = ((PetscObject)mapping)->comm; PetscBool debug = PETSC_FALSE; PetscFunctionBegin; PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); if (size == 1) { *nproc = 0; *procs = PETSC_NULL; ierr = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr); (*numprocs)[0] = 0; ierr = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr); (*indices)[0] = PETSC_NULL; PetscFunctionReturn(0); } ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr); /* Notes on ISLocalToGlobalMappingGetInfo globally owned node - the nodes that have been assigned to this processor in global numbering, just for this routine. nontrivial globally owned node - node assigned to this processor that is on a subdomain boundary (i.e. is has more than one local owner) locally owned node - node that exists on this processors subdomain nontrivial locally owned node - node that is not in the interior (i.e. has more than one local subdomain */ ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr); ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr); ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr); for (i=0; i max) max = lindices[i]; } ierr = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr); Ng++; ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); scale = Ng/size + 1; ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng); rstart = scale*rank; /* determine ownership ranges of global indices */ ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); /* determine owners of each local node */ ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr); for (i=0; i 1) {nownedm += nownedsenders[i]; nowned++;} } /* create single array to contain rank of all local owners of each globally owned index */ ierr = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr); ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); starts[0] = 0; for (i=1; i 1) starts[i] = starts[i-1] + nownedsenders[i-1]; else starts[i] = starts[i-1]; } /* for each nontrival globally owned node list all arriving processors */ for (i=0; i 1) ownedsenders[starts[node]++] = source[i]; } } if (debug) { /* ----------------------------------- */ starts[0] = 0; for (i=1; i 1) starts[i] = starts[i-1] + nownedsenders[i-1]; else starts[i] = starts[i-1]; } for (i=0; i 1) { ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr); for (j=0; j 1) starts[i] = starts[i-1] + nownedsenders[i-1]; else starts[i] = starts[i-1]; } nsends2 = nrecvs; ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */ for (i=0; i 1) nprocs[i] += 2 + nownedsenders[node]; } } nt = 0; for (i=0; i 1) { sends2[starts2[i]]++; sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1]; sends2[starts2[i]+cnt++] = nownedsenders[node]; ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr); cnt += nownedsenders[node]; } } } /* receive the message lengths */ nrecvs2 = nsends; ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr); ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr); ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); for (i=0; i 0); *nproc = nt; ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr); ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr); ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr); for (i=0;i 0) { bprocs[i] = cnt; (*procs)[cnt] = i; (*numprocs)[cnt] = nprocs[i]; ierr = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr); cnt++; } } /* make the list of subdomains for each nontrivial local node */ ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr); cnt = 0; for (i=0; iindices; PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices" /*@C ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices() Not Collective Input Arguments: + ltog - local to global mapping - array - array of indices Level: advanced .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() @*/ PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) { PetscFunctionBegin; PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); PetscValidPointer(array,2); if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); *array = PETSC_NULL; PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingConcatenate" /*@C ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings Not Collective Input Arguments: + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate . n - number of mappings to concatenate - ltogs - local to global mappings Output Arguments: . ltogcat - new mapping Level: advanced .seealso: ISLocalToGlobalMappingCreate() @*/ PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat) { PetscInt i,cnt,m,*idx; PetscErrorCode ierr; PetscFunctionBegin; if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n); if (n > 0) PetscValidPointer(ltogs,3); for (i=0; i