1 2 static char help[] = "Tests MatCreateSubmatrix() in parallel."; 3 4 #include <petscmat.h> 5 #include <../src/mat/impls/aij/mpi/mpiaij.h> 6 7 PetscErrorCode ISGetSeqIS_SameColDist_Private(Mat mat,IS isrow,IS iscol,IS *isrow_d,IS *iscol_d,IS *iscol_o,const PetscInt *garray[]) 8 { 9 PetscErrorCode ierr; 10 Vec x,cmap; 11 const PetscInt *is_idx; 12 PetscScalar *xarray,*cmaparray; 13 PetscInt ncols,isstart,*idx,m,rstart,count; 14 Mat_MPIAIJ *a=(Mat_MPIAIJ*)mat->data; 15 Mat B=a->B; 16 Vec lvec=a->lvec,lcmap; 17 PetscInt i,cstart,cend,Bn=B->cmap->N; 18 MPI_Comm comm; 19 PetscMPIInt rank; 20 VecScatter Mvctx; 21 22 PetscFunctionBegin; 23 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 24 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 25 ierr = ISGetLocalSize(iscol,&ncols);CHKERRQ(ierr); 26 27 /* (1) iscol is a sub-column vector of mat, pad it with '-1.' to form a full vector x */ 28 ierr = MatCreateVecs(mat,&x,NULL);CHKERRQ(ierr); 29 ierr = VecDuplicate(x,&cmap);CHKERRQ(ierr); 30 ierr = VecSet(x,-1.0);CHKERRQ(ierr); 31 ierr = VecSet(cmap,-1.0);CHKERRQ(ierr); 32 33 ierr = VecDuplicate(lvec,&lcmap);CHKERRQ(ierr); 34 35 /* Get start indices */ 36 ierr = MPI_Scan(&ncols,&isstart,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr); 37 isstart -= ncols; 38 ierr = MatGetOwnershipRangeColumn(mat,&cstart,&cend);CHKERRQ(ierr); 39 40 ierr = ISGetIndices(iscol,&is_idx);CHKERRQ(ierr); 41 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 42 ierr = VecGetArray(cmap,&cmaparray);CHKERRQ(ierr); 43 ierr = PetscMalloc1(ncols,&idx);CHKERRQ(ierr); 44 for (i=0; i<ncols; i++) { 45 xarray[is_idx[i]-cstart] = (PetscScalar)is_idx[i]; 46 cmaparray[is_idx[i]-cstart] = (PetscScalar)(i + isstart); /* global index of iscol[i] */ 47 idx[i] = is_idx[i]-cstart; /* local index of iscol[i] */ 48 } 49 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 50 ierr = VecRestoreArray(cmap,&cmaparray);CHKERRQ(ierr); 51 ierr = ISRestoreIndices(iscol,&is_idx);CHKERRQ(ierr); 52 53 /* Get iscol_d */ 54 ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,iscol_d);CHKERRQ(ierr); 55 ierr = ISGetBlockSize(iscol,&i);CHKERRQ(ierr); 56 ierr = ISSetBlockSize(*iscol_d,i);CHKERRQ(ierr); 57 58 /* Get isrow_d */ 59 ierr = ISGetLocalSize(isrow,&m);CHKERRQ(ierr); 60 rstart = mat->rmap->rstart; 61 ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr); 62 ierr = ISGetIndices(isrow,&is_idx);CHKERRQ(ierr); 63 for (i=0; i<m; i++) idx[i] = is_idx[i]-rstart; 64 ierr = ISRestoreIndices(isrow,&is_idx);CHKERRQ(ierr); 65 66 ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,isrow_d);CHKERRQ(ierr); 67 ierr = ISGetBlockSize(isrow,&i);CHKERRQ(ierr); 68 ierr = ISSetBlockSize(*isrow_d,i);CHKERRQ(ierr); 69 70 /* (2) Scatter x and cmap using aij->Mvctx to get their off-process portions (see MatMult_MPIAIJ) */ 71 #if 0 72 if (!a->Mvctx_mpi1) { 73 /* a->Mvctx causes random 'count' in o-build? See src/mat/tests/runex59_2 */ 74 a->Mvctx_mpi1_flg = PETSC_TRUE; 75 ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr); 76 } 77 Mvctx = a->Mvctx_mpi1; 78 #endif 79 Mvctx = a->Mvctx; 80 ierr = VecScatterBegin(Mvctx,x,lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 81 ierr = VecScatterEnd(Mvctx,x,lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 82 83 ierr = VecScatterBegin(Mvctx,cmap,lcmap,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 84 ierr = VecScatterEnd(Mvctx,cmap,lcmap,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 85 86 /* (3) create sequential iscol_o (a subset of iscol) and isgarray */ 87 /* off-process column indices */ 88 count = 0; 89 PetscInt *cmap1; 90 ierr = PetscMalloc1(Bn,&idx);CHKERRQ(ierr); 91 ierr = PetscMalloc1(Bn,&cmap1);CHKERRQ(ierr); 92 93 ierr = VecGetArray(lvec,&xarray);CHKERRQ(ierr); 94 ierr = VecGetArray(lcmap,&cmaparray);CHKERRQ(ierr); 95 for (i=0; i<Bn; i++) { 96 if (PetscRealPart(xarray[i]) > -1.0) { 97 idx[count] = i; /* local column index in off-diagonal part B */ 98 cmap1[count] = (PetscInt)(PetscRealPart(cmaparray[i])); /* column index in submat */ 99 count++; 100 } 101 } 102 printf("[%d] Bn %d, count %d\n",rank,Bn,count); 103 ierr = VecRestoreArray(lvec,&xarray);CHKERRQ(ierr); 104 ierr = VecRestoreArray(lcmap,&cmaparray);CHKERRQ(ierr); 105 if (count != 6) { 106 printf("[%d] count %d != 6 lvec:\n",rank,count); 107 ierr = VecView(lvec,0);CHKERRQ(ierr); 108 109 printf("[%d] count %d != 6 lcmap:\n",rank,count); 110 ierr = VecView(lcmap,0);CHKERRQ(ierr); 111 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"count %d != 6",count); 112 } 113 114 ierr = ISCreateGeneral(PETSC_COMM_SELF,count,idx,PETSC_COPY_VALUES,iscol_o);CHKERRQ(ierr); 115 /* cannot ensure iscol_o has same blocksize as iscol! */ 116 117 ierr = PetscFree(idx);CHKERRQ(ierr); 118 119 *garray = cmap1; 120 121 ierr = VecDestroy(&x);CHKERRQ(ierr); 122 ierr = VecDestroy(&cmap);CHKERRQ(ierr); 123 ierr = VecDestroy(&lcmap);CHKERRQ(ierr); 124 PetscFunctionReturn(0); 125 } 126 127 int main(int argc,char **args) 128 { 129 Mat C,A; 130 PetscInt i,j,m = 3,n = 2,rstart,rend; 131 PetscMPIInt size,rank; 132 PetscErrorCode ierr; 133 PetscScalar v; 134 IS isrow,iscol; 135 136 ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr; 137 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 138 ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr); 139 n = 2*size; 140 141 ierr = MatCreate(PETSC_COMM_WORLD,&C);CHKERRQ(ierr); 142 ierr = MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);CHKERRQ(ierr); 143 ierr = MatSetFromOptions(C);CHKERRQ(ierr); 144 ierr = MatSetUp(C);CHKERRQ(ierr); 145 146 /* 147 This is JUST to generate a nice test matrix, all processors fill up 148 the entire matrix. This is not something one would ever do in practice. 149 */ 150 ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr); 151 for (i=rstart; i<rend; i++) { 152 for (j=0; j<m*n; j++) { 153 v = i + j + 1; 154 ierr = MatSetValues(C,1,&i,1,&j,&v,INSERT_VALUES);CHKERRQ(ierr); 155 } 156 } 157 158 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 159 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 160 161 /* 162 Generate a new matrix consisting of every second row and column of 163 the original matrix 164 */ 165 ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr); 166 /* Create parallel IS with the rows we want on THIS processor */ 167 ierr = ISCreateStride(PETSC_COMM_WORLD,(rend-rstart)/2,rstart,2,&isrow);CHKERRQ(ierr); 168 /* Create parallel IS with the rows we want on THIS processor (same as rows for now) */ 169 ierr = ISCreateStride(PETSC_COMM_WORLD,(rend-rstart)/2,rstart,2,&iscol);CHKERRQ(ierr); 170 171 IS iscol_d,isrow_d,iscol_o; 172 const PetscInt *garray; 173 ierr = ISGetSeqIS_SameColDist_Private(C,isrow,iscol,&isrow_d,&iscol_d,&iscol_o,&garray);CHKERRQ(ierr); 174 175 ierr = ISDestroy(&isrow_d);CHKERRQ(ierr); 176 ierr = ISDestroy(&iscol_d);CHKERRQ(ierr); 177 ierr = ISDestroy(&iscol_o);CHKERRQ(ierr); 178 ierr = PetscFree(garray);CHKERRQ(ierr); 179 180 ierr = MatCreateSubMatrix(C,isrow,iscol,MAT_INITIAL_MATRIX,&A);CHKERRQ(ierr); 181 ierr = MatCreateSubMatrix(C,isrow,iscol,MAT_REUSE_MATRIX,&A);CHKERRQ(ierr); 182 183 ierr = ISDestroy(&isrow);CHKERRQ(ierr); 184 ierr = ISDestroy(&iscol);CHKERRQ(ierr); 185 ierr = MatDestroy(&A);CHKERRQ(ierr); 186 ierr = MatDestroy(&C);CHKERRQ(ierr); 187 ierr = PetscFinalize(); 188 return ierr; 189 } 190