static char help[]= " Test VecScatterRemap() on various vecscatter. \n\ We may do optimization based on index patterns. After index remapping by VecScatterRemap(), we need to \n\ make sure the vecscatter works as expected with the optimizaiton. \n\ VecScatterRemap() does not support all kinds of vecscatters. In addition, it only supports remapping \n\ entries where we read the data (i.e., todata in paralle scatter, fromdata in sequential scatter). This test \n\ tests VecScatterRemap on parallel to paralle (PtoP) vecscatter, sequential general to sequential \n\ general (SGToSG) vecscatter and sequential general to sequential stride 1 (SGToSS_Stride1) vecscatter.\n\n"; #include int main(int argc,char **argv) { PetscErrorCode ierr; PetscInt i,n,*ix,*iy,*tomap,start; Vec x,y; PetscMPIInt nproc,rank; IS isx,isy; const PetscInt *ranges; VecScatter vscat; PetscFunctionBegin; ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr; ierr = MPI_Comm_size(PETSC_COMM_WORLD,&nproc);CHKERRMPI(ierr); ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); if (nproc != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"This test must run with exactly two MPI ranks\n"); /* ==================================================================== (1) test VecScatterRemap on a parallel to parallel (PtoP) vecscatter ==================================================================== */ n = 64; /* long enough to trigger memcpy optimizations both in local scatter and remote scatter */ /* create two MPI vectors x, y of length n=64, N=128 */ ierr = VecCreateMPI(PETSC_COMM_WORLD,n,PETSC_DECIDE,&x);CHKERRQ(ierr); ierr = VecDuplicate(x,&y);CHKERRQ(ierr); /* Initialize x as {0~127} */ ierr = VecGetOwnershipRanges(x,&ranges);CHKERRQ(ierr); for (i=ranges[rank]; i