xref: /petsc/src/mat/impls/aij/mpi/mpb_aij.c (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 #include <../src/mat/impls/aij/mpi/mpiaij.h>
2 
3 PetscErrorCode  MatGetMultiProcBlock_MPIAIJ(Mat mat, MPI_Comm subComm, MatReuse scall,Mat *subMat)
4 {
5   Mat_MPIAIJ     *aij  = (Mat_MPIAIJ*)mat->data;
6   Mat_SeqAIJ     *aijB = (Mat_SeqAIJ*)aij->B->data;
7   PetscMPIInt    subCommSize,subCommRank;
8   PetscMPIInt    *commRankMap,subRank,rank,commRank;
9   PetscInt       *garrayCMap,col,i,j,*nnz,newRow,newCol;
10 
11   PetscFunctionBegin;
12   PetscCallMPI(MPI_Comm_size(subComm,&subCommSize));
13   PetscCallMPI(MPI_Comm_rank(subComm,&subCommRank));
14   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&commRank));
15 
16   /* create subMat object with the relevant layout */
17   if (scall == MAT_INITIAL_MATRIX) {
18     PetscCall(MatCreate(subComm,subMat));
19     PetscCall(MatSetType(*subMat,MATMPIAIJ));
20     PetscCall(MatSetSizes(*subMat,mat->rmap->n,mat->cmap->n,PETSC_DECIDE,PETSC_DECIDE));
21     PetscCall(MatSetBlockSizesFromMats(*subMat,mat,mat));
22 
23     /* need to setup rmap and cmap before Preallocation */
24     PetscCall(PetscLayoutSetUp((*subMat)->rmap));
25     PetscCall(PetscLayoutSetUp((*subMat)->cmap));
26   }
27 
28   /* create a map of comm_rank from subComm to comm - should commRankMap and garrayCMap be kept for reused? */
29   PetscCall(PetscMalloc1(subCommSize,&commRankMap));
30   PetscCallMPI(MPI_Allgather(&commRank,1,MPI_INT,commRankMap,1,MPI_INT,subComm));
31 
32   /* Traverse garray and identify column indices [of offdiag mat] that
33    should be discarded. For the ones not discarded, store the newCol+1
34    value in garrayCMap */
35   PetscCall(PetscCalloc1(aij->B->cmap->n,&garrayCMap));
36   for (i=0; i<aij->B->cmap->n; i++) {
37     col = aij->garray[i];
38     for (subRank=0; subRank<subCommSize; subRank++) {
39       rank = commRankMap[subRank];
40       if ((col >= mat->cmap->range[rank]) && (col < mat->cmap->range[rank+1])) {
41         garrayCMap[i] = (*subMat)->cmap->range[subRank] + col - mat->cmap->range[rank]+1;
42         break;
43       }
44     }
45   }
46 
47   if (scall == MAT_INITIAL_MATRIX) {
48     /* Compute preallocation for the offdiag mat */
49     PetscCall(PetscCalloc1(aij->B->rmap->n,&nnz));
50     for (i=0; i<aij->B->rmap->n; i++) {
51       for (j=aijB->i[i]; j<aijB->i[i+1]; j++) {
52         if (garrayCMap[aijB->j[j]]) nnz[i]++;
53       }
54     }
55     PetscCall(MatMPIAIJSetPreallocation(*(subMat),0,NULL,0,nnz));
56 
57     /* reuse diag block with the new submat */
58     PetscCall(MatDestroy(&((Mat_MPIAIJ*)((*subMat)->data))->A));
59     ((Mat_MPIAIJ*)((*subMat)->data))->A = aij->A;
60     PetscCall(PetscObjectReference((PetscObject)aij->A));
61   } else if (((Mat_MPIAIJ*)(*subMat)->data)->A != aij->A) {
62     PetscObject obj = (PetscObject)((Mat_MPIAIJ*)((*subMat)->data))->A;
63     PetscCall(PetscObjectReference((PetscObject)obj));
64     ((Mat_MPIAIJ*)((*subMat)->data))->A = aij->A;
65     PetscCall(PetscObjectReference((PetscObject)aij->A));
66   }
67 
68   /* Traverse aij->B and insert values into subMat */
69   if ((*subMat)->assembled) {
70     (*subMat)->was_assembled = PETSC_TRUE;
71     (*subMat)->assembled     = PETSC_FALSE;
72   }
73   for (i=0; i<aij->B->rmap->n; i++) {
74     newRow = (*subMat)->rmap->range[subCommRank] + i;
75     for (j=aijB->i[i]; j<aijB->i[i+1]; j++) {
76       newCol = garrayCMap[aijB->j[j]];
77       if (newCol) {
78         newCol--; /* remove the increment */
79         PetscCall(MatSetValues_MPIAIJ(*subMat,1,&newRow,1,&newCol,(aijB->a+j),INSERT_VALUES));
80       }
81     }
82   }
83   PetscCall(MatAssemblyBegin(*subMat,MAT_FINAL_ASSEMBLY));
84   PetscCall(MatAssemblyEnd(*subMat,MAT_FINAL_ASSEMBLY));
85 
86   /* deallocate temporary data */
87   PetscCall(PetscFree(commRankMap));
88   PetscCall(PetscFree(garrayCMap));
89   if (scall == MAT_INITIAL_MATRIX) {
90     PetscCall(PetscFree(nnz));
91   }
92   PetscFunctionReturn(0);
93 }
94