xref: /petsc/src/mat/impls/aij/mpi/mpiaijpc.c (revision 08480c60afa5ef1d2e4e27b9ebdf48b02c6a2186)
1 #ifndef lint
2 static char vcid[] = "$Id: mpiaijpc.c,v 1.4 1995/10/17 03:24:46 bsmith Exp bsmith $";
3 #endif
4 /*
5    Defines a block Jacobi preconditioner for the MPIAIJ format.
6    At the moment only supports a single block per processor.
7    This file knows about storage formats for MPIAIJ matrices.
8    This code is nearly identical to that for the MPIROW format;
9 */
10 #include "mpiaij.h"
11 #include "src/pc/pcimpl.h"
12 #include "src/pc/impls/bjacobi/bjacobi.h"
13 #include "sles.h"
14 
15 typedef struct {
16   Vec  x;
17 } PC_BJacobiMPIAIJ;
18 
19 int PCDestroy_BJacobiMPIAIJ(PetscObject obj)
20 {
21   PC               pc = (PC) obj;
22   PC_BJacobi       *jac = (PC_BJacobi *) pc->data;
23   PC_BJacobiMPIAIJ *bjac = (PC_BJacobiMPIAIJ *) jac->data;
24   int              i,ierr;
25 
26   for ( i=0; i<jac->n_local; i++ ) {
27     ierr = SLESDestroy(jac->sles[i]); CHKERRQ(ierr);
28   }
29   PETSCFREE(jac->sles);
30   ierr = VecDestroy(bjac->x); CHKERRQ(ierr);
31   PETSCFREE(bjac); PETSCFREE(jac);
32   return 0;
33 }
34 
35 int PCApply_BJacobiMPIAIJ(PC pc,Vec x, Vec y)
36 {
37   int              ierr,its;
38   PC_BJacobi       *jac = (PC_BJacobi *) pc->data;
39   PC_BJacobiMPIAIJ *bjac = (PC_BJacobiMPIAIJ *) jac->data;
40 
41   ierr = SLESSolve(jac->sles[0],x,bjac->x,&its); CHKERRQ(ierr);
42   ierr = VecCopy(bjac->x,y); CHKERRQ(ierr);
43   return 0;
44 }
45 
46 int PCSetUp_BJacobiMPIAIJ(PC pc)
47 {
48   PC_BJacobi       *jac = (PC_BJacobi *) pc->data;
49   Mat              mat = pc->mat, pmat = pc->pmat;
50   Mat_MPIAIJ       *pmatin = (Mat_MPIAIJ *) pmat->data;
51   Mat_MPIAIJ       *matin = 0;
52   int              ierr, m;
53   SLES             sles;
54   Vec              x;
55   PC_BJacobiMPIAIJ *bjac;
56   KSP              subksp;
57   PC               subpc;
58   MatType          type;
59 
60   if (jac->use_true_local) {
61     MatGetType(pc->mat,&type);
62     if (type != MATMPIAIJ) SETERRQ(1,"PCSetUp_BJacobiMPIAIJ:Incompatible matrix type.");
63     matin = (Mat_MPIAIJ *) mat->data;
64   }
65 
66   /* set default direct solver with no Krylov method */
67   if (!pc->setupcalled) {
68     ierr = SLESCreate(MPI_COMM_SELF,&sles); CHKERRQ(ierr);
69     PLogObjectParent(pc,sles);
70     ierr = SLESGetKSP(sles,&subksp); CHKERRQ(ierr);
71     ierr = KSPSetMethod(subksp,KSPPREONLY); CHKERRQ(ierr);
72     ierr = SLESGetPC(sles,&subpc); CHKERRQ(ierr);
73     ierr = PCSetMethod(subpc,PCLU); CHKERRQ(ierr);
74     ierr = SLESSetOptionsPrefix(sles,"-sub_"); CHKERRQ(ierr);
75     ierr = SLESSetFromOptions(sles); CHKERRQ(ierr);
76 /*
77    This is not so good. The only reason we need to generate this vector
78   is so KSP may generate seq vectors for the local solves
79 */
80     ierr = MatGetSize(pmatin->A,&m,&m); CHKERRQ(ierr);
81     ierr = VecCreateSeq(MPI_COMM_SELF,m,&x); CHKERRQ(ierr);
82     PLogObjectParent(pmat,x);
83 
84     pc->destroy  = PCDestroy_BJacobiMPIAIJ;
85     pc->apply    = PCApply_BJacobiMPIAIJ;
86 
87     bjac         = (PC_BJacobiMPIAIJ *) PETSCMALLOC(sizeof(PC_BJacobiMPIAIJ));
88     CHKPTRQ(bjac);
89     PLogObjectMemory(pc,sizeof(PC_BJacobiMPIAIJ));
90     bjac->x      = x;
91 
92     jac->sles    = (SLES*) PETSCMALLOC( sizeof(SLES) ); CHKPTRQ(jac->sles);
93     jac->sles[0] = sles;
94     jac->data    = (void *) bjac;
95   }
96   else {
97     sles = jac->sles[0];
98   }
99   if (jac->use_true_local)
100     ierr = SLESSetOperators(sles,matin->A,pmatin->A,pc->flag);
101   else
102     ierr = SLESSetOperators(sles,pmatin->A,pmatin->A,pc->flag);
103   CHKERRQ(ierr);
104 
105   return 0;
106 }
107 
108 
109