1 #include <../src/mat/impls/aij/mpi/mpiaij.h> 2 3 PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat) { 4 Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 5 Mat_SeqAIJ *aijB = (Mat_SeqAIJ *)aij->B->data; 6 PetscMPIInt subCommSize, subCommRank; 7 PetscMPIInt *commRankMap, subRank, rank, commRank; 8 PetscInt *garrayCMap, col, i, j, *nnz, newRow, newCol; 9 10 PetscFunctionBegin; 11 PetscCallMPI(MPI_Comm_size(subComm, &subCommSize)); 12 PetscCallMPI(MPI_Comm_rank(subComm, &subCommRank)); 13 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &commRank)); 14 15 /* create subMat object with the relevant layout */ 16 if (scall == MAT_INITIAL_MATRIX) { 17 PetscCall(MatCreate(subComm, subMat)); 18 PetscCall(MatSetType(*subMat, MATMPIAIJ)); 19 PetscCall(MatSetSizes(*subMat, mat->rmap->n, mat->cmap->n, PETSC_DECIDE, PETSC_DECIDE)); 20 PetscCall(MatSetBlockSizesFromMats(*subMat, mat, mat)); 21 22 /* need to setup rmap and cmap before Preallocation */ 23 PetscCall(PetscLayoutSetUp((*subMat)->rmap)); 24 PetscCall(PetscLayoutSetUp((*subMat)->cmap)); 25 } 26 27 /* create a map of comm_rank from subComm to comm - should commRankMap and garrayCMap be kept for reused? */ 28 PetscCall(PetscMalloc1(subCommSize, &commRankMap)); 29 PetscCallMPI(MPI_Allgather(&commRank, 1, MPI_INT, commRankMap, 1, MPI_INT, subComm)); 30 31 /* Traverse garray and identify column indices [of offdiag mat] that 32 should be discarded. For the ones not discarded, store the newCol+1 33 value in garrayCMap */ 34 PetscCall(PetscCalloc1(aij->B->cmap->n, &garrayCMap)); 35 for (i = 0; i < aij->B->cmap->n; i++) { 36 col = aij->garray[i]; 37 for (subRank = 0; subRank < subCommSize; subRank++) { 38 rank = commRankMap[subRank]; 39 if ((col >= mat->cmap->range[rank]) && (col < mat->cmap->range[rank + 1])) { 40 garrayCMap[i] = (*subMat)->cmap->range[subRank] + col - mat->cmap->range[rank] + 1; 41 break; 42 } 43 } 44 } 45 46 if (scall == MAT_INITIAL_MATRIX) { 47 /* Compute preallocation for the offdiag mat */ 48 PetscCall(PetscCalloc1(aij->B->rmap->n, &nnz)); 49 for (i = 0; i < aij->B->rmap->n; i++) { 50 for (j = aijB->i[i]; j < aijB->i[i + 1]; j++) { 51 if (garrayCMap[aijB->j[j]]) nnz[i]++; 52 } 53 } 54 PetscCall(MatMPIAIJSetPreallocation(*(subMat), 0, NULL, 0, nnz)); 55 56 /* reuse diag block with the new submat */ 57 PetscCall(MatDestroy(&((Mat_MPIAIJ *)((*subMat)->data))->A)); 58 ((Mat_MPIAIJ *)((*subMat)->data))->A = aij->A; 59 PetscCall(PetscObjectReference((PetscObject)aij->A)); 60 } else if (((Mat_MPIAIJ *)(*subMat)->data)->A != aij->A) { 61 PetscObject obj = (PetscObject)((Mat_MPIAIJ *)((*subMat)->data))->A; 62 PetscCall(PetscObjectReference((PetscObject)obj)); 63 ((Mat_MPIAIJ *)((*subMat)->data))->A = aij->A; 64 PetscCall(PetscObjectReference((PetscObject)aij->A)); 65 } 66 67 /* Traverse aij->B and insert values into subMat */ 68 if ((*subMat)->assembled) { 69 (*subMat)->was_assembled = PETSC_TRUE; 70 (*subMat)->assembled = PETSC_FALSE; 71 } 72 for (i = 0; i < aij->B->rmap->n; i++) { 73 newRow = (*subMat)->rmap->range[subCommRank] + i; 74 for (j = aijB->i[i]; j < aijB->i[i + 1]; j++) { 75 newCol = garrayCMap[aijB->j[j]]; 76 if (newCol) { 77 newCol--; /* remove the increment */ 78 PetscCall(MatSetValues_MPIAIJ(*subMat, 1, &newRow, 1, &newCol, (aijB->a + j), INSERT_VALUES)); 79 } 80 } 81 } 82 PetscCall(MatAssemblyBegin(*subMat, MAT_FINAL_ASSEMBLY)); 83 PetscCall(MatAssemblyEnd(*subMat, MAT_FINAL_ASSEMBLY)); 84 85 /* deallocate temporary data */ 86 PetscCall(PetscFree(commRankMap)); 87 PetscCall(PetscFree(garrayCMap)); 88 if (scall == MAT_INITIAL_MATRIX) { PetscCall(PetscFree(nnz)); } 89 PetscFunctionReturn(0); 90 } 91