xref: /petsc/src/mat/impls/dense/mpi/mpidense.h (revision 8bc8193efbc389280f83b3d41dffa9e2d23e2ace)
1 
2 #include "src/mat/impls/dense/seq/dense.h"
3 
4   /*  Data stuctures for basic parallel dense matrix  */
5 
6 /* Structure to hold the information for factorization of a dense matrix */
7 /* Most of this info is used in the pipe send/recv routines */
8 typedef struct {
9   PetscInt    nlnr;        /* number of local rows downstream */
10   PetscInt    nrend;       /* rend for downstream processor */
11   PetscInt    nbr,pnbr;   /* Down and upstream neighbors */
12   PetscInt    *tag;        /* message tags */
13   PetscInt    currow;      /* current row number */
14   PetscInt    phase;       /* phase (used to indicate tag) */
15   PetscInt    up;          /* Are we moving up or down in row number? */
16   PetscInt    use_bcast;   /* Are we broadcasting max length? */
17   PetscInt    nsend;       /* number of sends */
18   PetscInt    nrecv;       /* number of receives */
19 
20   /* data initially in matrix context */
21   PetscInt    k;           /* Blocking factor (unused as yet) */
22   PetscInt    k2;          /* Blocking factor for solves */
23   PetscScalar *temp;
24   PetscInt    nlptr;
25   PetscInt    *lrows;
26   PetscInt    *nlrows;
27   PetscInt    *pivots;
28 } FactorCtx;
29 
30 #define PIPEPHASE (ctx->phase == 0)
31 
32 typedef struct {
33   PetscInt           *rowners,*cowners;     /* ranges owned by each processor */
34                                         /* note n == N */
35   PetscInt           nvec;                   /* this is the n size for the vector one multiplies with */
36   PetscInt           rstart,rend;           /* starting and ending owned rows */
37   Mat           A;                      /* local submatrix */
38   PetscMPIInt   size;                   /* size of communicator */
39   PetscMPIInt   rank;                   /* rank of proc in communicator */
40   /* The following variables are used for matrix assembly */
41   PetscTruth    donotstash;             /* Flag indicationg if values should be stashed */
42   MPI_Request   *send_waits;            /* array of send requests */
43   MPI_Request   *recv_waits;            /* array of receive requests */
44   PetscInt           nsends,nrecvs;         /* numbers of sends and receives */
45   PetscScalar   *svalues,*rvalues;     /* sending and receiving data */
46   PetscInt           rmax;                   /* maximum message length */
47 
48   /* The following variables are used for matrix-vector products */
49 
50   Vec           lvec;                   /* local vector */
51   VecScatter    Mvctx;                  /* scatter context for vector */
52 
53   PetscTruth    roworiented;            /* if true, row oriented input (default) */
54   FactorCtx     *factor;                /* factorization context */
55 } Mat_MPIDense;
56 
57 EXTERN PetscErrorCode MatLoad_MPIDense(PetscViewer,const MatType,Mat*);
58 EXTERN PetscErrorCode MatSetUpMultiply_MPIDense(Mat);
59 EXTERN PetscErrorCode MatGetSubMatrices_MPIDense(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
60