1 #include <../src/mat/impls/aij/mpi/mpiaij.h> 2 3 PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat mat, MPI_Comm subComm, MatReuse scall,Mat *subMat) 4 { 5 PetscErrorCode ierr; 6 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 7 Mat_SeqAIJ *aijB = (Mat_SeqAIJ*)aij->B->data; 8 PetscMPIInt subCommSize,subCommRank; 9 PetscMPIInt *commRankMap,subRank,rank,commRank; 10 PetscInt *garrayCMap,col,i,j,*nnz,newRow,newCol; 11 12 PetscFunctionBegin; 13 ierr = MPI_Comm_size(subComm,&subCommSize);CHKERRMPI(ierr); 14 ierr = MPI_Comm_rank(subComm,&subCommRank);CHKERRMPI(ierr); 15 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&commRank);CHKERRMPI(ierr); 16 17 /* create subMat object with the relevant layout */ 18 if (scall == MAT_INITIAL_MATRIX) { 19 ierr = MatCreate(subComm,subMat);CHKERRQ(ierr); 20 ierr = MatSetType(*subMat,MATMPIAIJ);CHKERRQ(ierr); 21 ierr = MatSetSizes(*subMat,mat->rmap->n,mat->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 22 ierr = MatSetBlockSizesFromMats(*subMat,mat,mat);CHKERRQ(ierr); 23 24 /* need to setup rmap and cmap before Preallocation */ 25 ierr = PetscLayoutSetUp((*subMat)->rmap);CHKERRQ(ierr); 26 ierr = PetscLayoutSetUp((*subMat)->cmap);CHKERRQ(ierr); 27 } 28 29 /* create a map of comm_rank from subComm to comm - should commRankMap and garrayCMap be kept for reused? */ 30 ierr = PetscMalloc1(subCommSize,&commRankMap);CHKERRQ(ierr); 31 ierr = MPI_Allgather(&commRank,1,MPI_INT,commRankMap,1,MPI_INT,subComm);CHKERRMPI(ierr); 32 33 /* Traverse garray and identify column indices [of offdiag mat] that 34 should be discarded. For the ones not discarded, store the newCol+1 35 value in garrayCMap */ 36 ierr = PetscCalloc1(aij->B->cmap->n,&garrayCMap);CHKERRQ(ierr); 37 for (i=0; i<aij->B->cmap->n; i++) { 38 col = aij->garray[i]; 39 for (subRank=0; subRank<subCommSize; subRank++) { 40 rank = commRankMap[subRank]; 41 if ((col >= mat->cmap->range[rank]) && (col < mat->cmap->range[rank+1])) { 42 garrayCMap[i] = (*subMat)->cmap->range[subRank] + col - mat->cmap->range[rank]+1; 43 break; 44 } 45 } 46 } 47 48 if (scall == MAT_INITIAL_MATRIX) { 49 /* Compute preallocation for the offdiag mat */ 50 ierr = PetscCalloc1(aij->B->rmap->n,&nnz);CHKERRQ(ierr); 51 for (i=0; i<aij->B->rmap->n; i++) { 52 for (j=aijB->i[i]; j<aijB->i[i+1]; j++) { 53 if (garrayCMap[aijB->j[j]]) nnz[i]++; 54 } 55 } 56 ierr = MatMPIAIJSetPreallocation(*(subMat),0,NULL,0,nnz);CHKERRQ(ierr); 57 58 /* reuse diag block with the new submat */ 59 ierr = MatDestroy(&((Mat_MPIAIJ*)((*subMat)->data))->A);CHKERRQ(ierr); 60 ((Mat_MPIAIJ*)((*subMat)->data))->A = aij->A; 61 ierr = PetscObjectReference((PetscObject)aij->A);CHKERRQ(ierr); 62 } else if (((Mat_MPIAIJ*)(*subMat)->data)->A != aij->A) { 63 PetscObject obj = (PetscObject)((Mat_MPIAIJ*)((*subMat)->data))->A; 64 ierr = PetscObjectReference((PetscObject)obj);CHKERRQ(ierr); 65 ((Mat_MPIAIJ*)((*subMat)->data))->A = aij->A; 66 ierr = PetscObjectReference((PetscObject)aij->A);CHKERRQ(ierr); 67 } 68 69 /* Traverse aij->B and insert values into subMat */ 70 if ((*subMat)->assembled) { 71 (*subMat)->was_assembled = PETSC_TRUE; 72 (*subMat)->assembled = PETSC_FALSE; 73 } 74 for (i=0; i<aij->B->rmap->n; i++) { 75 newRow = (*subMat)->rmap->range[subCommRank] + i; 76 for (j=aijB->i[i]; j<aijB->i[i+1]; j++) { 77 newCol = garrayCMap[aijB->j[j]]; 78 if (newCol) { 79 newCol--; /* remove the increment */ 80 ierr = MatSetValues_MPIAIJ(*subMat,1,&newRow,1,&newCol,(aijB->a+j),INSERT_VALUES);CHKERRQ(ierr); 81 } 82 } 83 } 84 ierr = MatAssemblyBegin(*subMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 85 ierr = MatAssemblyEnd(*subMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 86 87 /* deallocate temporary data */ 88 ierr = PetscFree(commRankMap);CHKERRQ(ierr); 89 ierr = PetscFree(garrayCMap);CHKERRQ(ierr); 90 if (scall == MAT_INITIAL_MATRIX) { 91 ierr = PetscFree(nnz);CHKERRQ(ierr); 92 } 93 PetscFunctionReturn(0); 94 } 95