1 #include "../src/mat/impls/aij/mpi/mpiaij.h" 2 3 /* 4 5 This routine creates multiple [bjacobi] 'parallel submatrices' from 6 a given 'mat' object. Each submatrix can span multiple procs. 7 8 The submatrix partition across processors is dicated by 'subComm' a 9 communicator obtained by com_split(comm). Note: the comm_split 10 is not restriced to be grouped with consequitive original ranks. 11 12 Due the comm_split() usage, the parallel layout of the submatrices 13 map directly to the layout of the original matrix [wrt the local 14 row,col partitioning]. So the original 'DiagonalMat' naturally maps 15 into the 'DiagonalMat' of the subMat, hence it is used directly from 16 the subMat. However the offDiagMat looses some columns - and this is 17 reconstructed with MatSetValues() 18 19 */ 20 21 #undef __FUNCT__ 22 #define __FUNCT__ "MatGetMultiProcBlock_MPIAIJ" 23 PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat mat, MPI_Comm subComm, Mat* subMat) 24 { 25 PetscErrorCode ierr; 26 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 27 Mat_SeqAIJ* aijB = (Mat_SeqAIJ*)aij->B->data; 28 PetscMPIInt commRank,subCommSize,subCommRank; 29 PetscMPIInt *commRankMap,subRank,rank; 30 PetscInt *garrayCMap,col,i,j,*nnz,newRow,newCol; 31 32 PetscFunctionBegin; 33 34 /* create subMat object with the relavent layout */ 35 ierr = MatCreate(subComm,subMat);CHKERRQ(ierr); 36 ierr = MatSetType(*subMat,MATMPIAIJ);CHKERRQ(ierr); 37 ierr = MatSetSizes(*subMat,mat->rmap->n,mat->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 38 /* need to setup rmap and cmap before Preallocation */ 39 ierr = PetscLayoutSetBlockSize((*subMat)->rmap,mat->rmap->bs);CHKERRQ(ierr); 40 ierr = PetscLayoutSetBlockSize((*subMat)->cmap,mat->cmap->bs);CHKERRQ(ierr); 41 ierr = PetscLayoutSetUp((*subMat)->rmap);CHKERRQ(ierr); 42 ierr = PetscLayoutSetUp((*subMat)->cmap);CHKERRQ(ierr); 43 44 /* create a map of comm_rank from subComm to comm */ 45 ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&commRank);CHKERRQ(ierr); 46 ierr = MPI_Comm_size(subComm,&subCommSize);CHKERRQ(ierr); 47 ierr = MPI_Comm_rank(subComm,&subCommRank);CHKERRQ(ierr); 48 ierr = PetscMalloc(subCommSize*sizeof(PetscMPIInt),&commRankMap);CHKERRQ(ierr); 49 ierr = MPI_Allgather(&commRank,1,MPI_INT,commRankMap,1,MPI_INT,subComm);CHKERRQ(ierr); 50 51 /* Traverse garray and identify column indices [of offdiag mat] that 52 should be discarded. For the ones not discarded, store the newCol+1 53 value in garrayCMap */ 54 ierr = PetscMalloc(aij->B->cmap->n*sizeof(PetscInt),&garrayCMap);CHKERRQ(ierr); 55 ierr = PetscMemzero(garrayCMap,aij->B->cmap->n*sizeof(PetscInt));CHKERRQ(ierr); 56 for (i=0; i<aij->B->cmap->n; i++) { 57 col = aij->garray[i]; 58 for (subRank=0; subRank<subCommSize; subRank++) { 59 rank = commRankMap[subRank]; 60 if ((col >= mat->cmap->range[rank]) && (col < mat->cmap->range[rank+1])) { 61 garrayCMap[i] = (*subMat)->cmap->range[subRank] + col - mat->cmap->range[rank]+1; 62 break; 63 } 64 } 65 } 66 67 /* Now compute preallocation for the offdiag mat */ 68 ierr = PetscMalloc(aij->B->rmap->n*sizeof(PetscInt),&nnz);CHKERRQ(ierr); 69 ierr = PetscMemzero(nnz,aij->B->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 70 for (i=0; i<aij->B->rmap->n; i++) { 71 for (j=aijB->i[i]; j<aijB->i[i+1]; j++) { 72 if (garrayCMap[aijB->j[j]]) nnz[i]++; 73 } 74 } 75 ierr = MatMPIAIJSetPreallocation(*(subMat),PETSC_NULL,PETSC_NULL,PETSC_NULL,nnz);CHKERRQ(ierr); 76 77 /* reuse diag block with the new submat */ 78 ierr = MatDestroy(((Mat_MPIAIJ*)((*subMat)->data))->A);CHKERRQ(ierr); 79 ((Mat_MPIAIJ*)((*subMat)->data))->A = aij->A; 80 ierr = PetscObjectReference((PetscObject)aij->A);CHKERRQ(ierr); 81 82 /* Now traverse aij->B and insert values into subMat */ 83 for (i=0; i<aij->B->rmap->n; i++) { 84 newRow = (*subMat)->rmap->range[subCommRank] + i; 85 for (j=aijB->i[i]; j<aijB->i[i+1]; j++) { 86 newCol = garrayCMap[aijB->j[j]]; 87 if (newCol) { 88 newCol--; /* remove the increment */ 89 ierr = MatSetValues(*subMat,1,&newRow,1,&newCol,(aijB->a+j),INSERT_VALUES);CHKERRQ(ierr); 90 } 91 } 92 } 93 94 /* assemble the submat */ 95 ierr = MatAssemblyBegin(*subMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 96 ierr = MatAssemblyEnd(*subMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 97 98 /* deallocate temporary data */ 99 ierr = PetscFree(commRankMap);CHKERRQ(ierr); 100 ierr = PetscFree(garrayCMap);CHKERRQ(ierr); 101 ierr = PetscFree(nnz);CHKERRQ(ierr); 102 103 PetscFunctionReturn(0); 104 } 105