xref: /petsc/src/mat/impls/aij/mpi/mpiaijpc.c (revision 8ba3a721c9105e9240dbd90dceddbf8833cabcd1)
1 #ifndef lint
2 static char vcid[] = "$Id: mpiaijpc.c,v 1.26 1997/01/06 20:24:32 balay Exp balay $";
3 #endif
4 /*
5    Defines a block Jacobi preconditioner for the MPIAIJ format.
6    Handles special case of  single block per processor.
7    This file knows about storage formats for MPIAIJ matrices.
8    The general case is handled in aijpc.c
9 */
10 #include "src/mat/impls/aij/mpi/mpiaij.h"
11 #include "src/pc/pcimpl.h"
12 #include "src/vec/vecimpl.h"
13 #include "src/pc/impls/bjacobi/bjacobi.h"
14 #include "sles.h"
15 
16 typedef struct {
17   Vec  x, y;
18 } PC_BJacobi_MPIAIJ;
19 
20 #undef __FUNC__
21 #define __FUNC__ "PCDestroy_BJacobi_MPIAIJ"
22 int PCDestroy_BJacobi_MPIAIJ(PetscObject obj)
23 {
24   PC                pc = (PC) obj;
25   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
26   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
27   int               ierr;
28 
29   ierr = SLESDestroy(jac->sles[0]); CHKERRQ(ierr);
30   PetscFree(jac->sles);
31   ierr = VecDestroy(bjac->x); CHKERRQ(ierr);
32   ierr = VecDestroy(bjac->y); CHKERRQ(ierr);
33   if (jac->l_lens) PetscFree(jac->l_lens);
34   if (jac->g_lens) PetscFree(jac->g_lens);
35   PetscFree(bjac); PetscFree(jac);
36   return 0;
37 }
38 
39 
40 #undef __FUNC__
41 #define __FUNC__ "PCSetUpOnBlocks_BJacobi_MPIAIJ"
42 int PCSetUpOnBlocks_BJacobi_MPIAIJ(PC pc)
43 {
44   int               ierr;
45   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
46   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
47 
48   ierr = SLESSetUp(jac->sles[0],bjac->x,bjac->y); CHKERRQ(ierr);
49   return 0;
50 }
51 
52 #undef __FUNC__
53 #define __FUNC__ "PCApply_BJacobi_MPIAIJ"
54 int PCApply_BJacobi_MPIAIJ(PC pc,Vec x, Vec y)
55 {
56   int               ierr,its;
57   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
58   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
59   Scalar            *x_array,*x_true_array, *y_array,*y_true_array;
60 
61   /*
62       The VecPlaceArray() is to avoid having to copy the
63     y vector into the bjac->x vector. The reason for
64     the bjac->x vector is that we need a sequential vector
65     for the sequential solve.
66   */
67   VecGetArray_Fast(x,x_array);
68   VecGetArray_Fast(y,y_array);
69   VecGetArray_Fast(bjac->x,x_true_array);
70   VecGetArray_Fast(bjac->y,y_true_array);
71   VecPlaceArray_Fast(bjac->x,x_array);
72   VecPlaceArray_Fast(bjac->y,y_array);
73   ierr = SLESSolve(jac->sles[0],bjac->x,bjac->y,&its);
74   VecPlaceArray_Fast(bjac->x,x_true_array);
75   VecPlaceArray_Fast(bjac->y,y_true_array);
76   return 0;
77 }
78 
79 #undef __FUNC__
80 #define __FUNC__ "PCSetUp_BJacobi_MPIAIJ"
81 int PCSetUp_BJacobi_MPIAIJ(PC pc)
82 {
83   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
84   Mat               mat = pc->mat, pmat = pc->pmat;
85   Mat_MPIAIJ        *pmatin = (Mat_MPIAIJ *) pmat->data;
86   Mat_MPIAIJ        *matin = 0;
87   int               ierr, m;
88   SLES              sles;
89   Vec               x,y;
90   PC_BJacobi_MPIAIJ *bjac;
91   KSP               subksp;
92   PC                subpc;
93   MatType           type;
94 
95   if (jac->use_true_local) {
96     MatGetType(pc->mat,&type,PETSC_NULL);
97     if (type != MATMPIAIJ) SETERRQ(1,0,"Incompatible matrix type.");
98     matin = (Mat_MPIAIJ *) mat->data;
99   }
100 
101   /* set default direct solver with no Krylov method */
102   if (!pc->setupcalled) {
103     char *prefix;
104     ierr = SLESCreate(MPI_COMM_SELF,&sles); CHKERRQ(ierr);
105     PLogObjectParent(pc,sles);
106     ierr = SLESGetKSP(sles,&subksp); CHKERRQ(ierr);
107     ierr = KSPSetType(subksp,KSPPREONLY); CHKERRQ(ierr);
108     ierr = SLESGetPC(sles,&subpc); CHKERRQ(ierr);
109     ierr = PCSetType(subpc,PCILU); CHKERRQ(ierr);
110     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
111     ierr = SLESSetOptionsPrefix(sles,prefix); CHKERRQ(ierr);
112     ierr = SLESAppendOptionsPrefix(sles,"sub_"); CHKERRQ(ierr);
113     ierr = SLESSetFromOptions(sles); CHKERRQ(ierr);
114 /*
115    This is not so good. The only reason we need to generate this vector
116   is so KSP may generate seq vectors for the local solves
117 */
118     ierr = MatGetSize(pmatin->A,&m,&m); CHKERRQ(ierr);
119     ierr = VecCreateSeq(MPI_COMM_SELF,m,&x); CHKERRQ(ierr);
120     ierr = VecCreateSeq(MPI_COMM_SELF,m,&y); CHKERRQ(ierr);
121     PLogObjectParent(pc,x);
122     PLogObjectParent(pc,y);
123 
124     pc->destroy       = PCDestroy_BJacobi_MPIAIJ;
125     pc->apply         = PCApply_BJacobi_MPIAIJ;
126     pc->setuponblocks = PCSetUpOnBlocks_BJacobi_MPIAIJ;
127 
128     bjac         = (PC_BJacobi_MPIAIJ *) PetscMalloc(sizeof(PC_BJacobi_MPIAIJ));CHKPTRQ(bjac);
129     PLogObjectMemory(pc,sizeof(PC_BJacobi_MPIAIJ));
130     bjac->x      = x;
131     bjac->y      = y;
132 
133     jac->sles    = (SLES*) PetscMalloc( sizeof(SLES) ); CHKPTRQ(jac->sles);
134     jac->sles[0] = sles;
135     jac->data    = (void *) bjac;
136   }
137   else {
138     sles = jac->sles[0];
139     bjac = (PC_BJacobi_MPIAIJ *)jac->data;
140   }
141   /* if (jac->l_true[0] == USE_TRUE_MATRIX) {
142     ierr = SLESSetOperators(sles,matin->A,matin->A,pc->flag);
143   }
144   else */ if (jac->use_true_local)
145     ierr = SLESSetOperators(sles,matin->A,pmatin->A,pc->flag);
146   else
147     ierr = SLESSetOperators(sles,pmatin->A,pmatin->A,pc->flag);
148   CHKERRQ(ierr);
149   return 0;
150 }
151 
152 
153