xref: /petsc/src/mat/impls/shell/shell.c (revision ff2fd23678e82342cbd04bb0e286b55a1de873a7)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: shell.c,v 1.65 1999/03/01 04:53:54 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6    This provides a simple shell for Fortran (and C programmers) to
7   create a very simple matrix class for use with KSP without coding
8   much of anything.
9 */
10 
11 #include "petsc.h"
12 #include "src/mat/matimpl.h"        /*I "mat.h" I*/
13 #include "src/vec/vecimpl.h"
14 
15 typedef struct {
16   int  M, N;                  /* number of global rows, columns */
17   int  m, n;                  /* number of local rows, columns */
18   int  (*destroy)(Mat);
19   void *ctx;
20 } Mat_Shell;
21 
22 #undef __FUNC__
23 #define __FUNC__ "MatShellGetContext"
24 /*@
25     MatShellGetContext - Returns the user-provided context associated with a shell matrix.
26 
27     Not Collective
28 
29     Input Parameter:
30 .   mat - the matrix, should have been created with MatCreateShell()
31 
32     Output Parameter:
33 .   ctx - the user provided context
34 
35     Notes:
36     This routine is intended for use within various shell matrix routines,
37     as set with MatShellSetOperation().
38 
39 .keywords: matrix, shell, get, context
40 
41 .seealso: MatCreateShell(), MatShellSetOperation()
42 @*/
43 int MatShellGetContext(Mat mat,void **ctx)
44 {
45   PetscFunctionBegin;
46   PetscValidHeaderSpecific(mat,MAT_COOKIE);
47   if (mat->type != MATSHELL) *ctx = 0;
48   else                       *ctx = ((Mat_Shell *) (mat->data))->ctx;
49   PetscFunctionReturn(0);
50 }
51 
52 #undef __FUNC__
53 #define __FUNC__ "MatGetSize_Shell"
54 int MatGetSize_Shell(Mat mat,int *M,int *N)
55 {
56   Mat_Shell *shell = (Mat_Shell *) mat->data;
57 
58   PetscFunctionBegin;
59   if (M) *M = shell->M;
60   if (N) *N = shell->N;
61   PetscFunctionReturn(0);
62 }
63 
64 #undef __FUNC__
65 #define __FUNC__ "MatGetLocalSize_Shell"
66 int MatGetLocalSize_Shell(Mat mat,int *m,int *n)
67 {
68   Mat_Shell *shell = (Mat_Shell *) mat->data;
69 
70   PetscFunctionBegin;
71   if (m) *m = shell->m;
72   if (n) *n = shell->n;
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNC__
77 #define __FUNC__ "MatDestroy_Shell"
78 int MatDestroy_Shell(Mat mat)
79 {
80   int       ierr;
81   Mat_Shell *shell;
82 
83   PetscFunctionBegin;
84   if (--mat->refct > 0) PetscFunctionReturn(0);
85 
86   if (mat->mapping) {
87     ierr = ISLocalToGlobalMappingDestroy(mat->mapping); CHKERRQ(ierr);
88   }
89   if (mat->bmapping) {
90     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping); CHKERRQ(ierr);
91   }
92   if (mat->rmap) {
93     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
94   }
95   if (mat->cmap) {
96     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
97   }
98   shell = (Mat_Shell *) mat->data;
99   if (shell->destroy) {ierr = (*shell->destroy)(mat);CHKERRQ(ierr);}
100   PetscFree(shell);
101   PLogObjectDestroy(mat);
102   PetscHeaderDestroy(mat);
103   PetscFunctionReturn(0);
104 }
105 
106 int MatGetOwnershipRange_Shell(Mat mat, int *rstart,int *rend)
107 {
108   int ierr;
109 
110   PetscFunctionBegin;
111   ierr = MPI_Scan(&mat->m,rend,1,MPI_INT,MPI_SUM,mat->comm);CHKERRQ(ierr);
112   *rstart = *rend - mat->m;
113   PetscFunctionReturn(0);
114 }
115 
116 static struct _MatOps MatOps_Values = {0,
117        0,
118        0,
119        0,
120        0,
121        0,
122        0,
123        0,
124        0,
125        0,
126        0,
127        0,
128        0,
129        0,
130        0,
131        0,
132        0,
133        0,
134        0,
135        0,
136        0,
137        0,
138        0,
139        0,
140        0,
141        0,
142        0,
143        0,
144        0,
145        0,
146        MatGetSize_Shell,
147        MatGetLocalSize_Shell,
148        MatGetOwnershipRange_Shell,
149        0,
150        0,
151        0,
152        0,
153        0,
154        0,
155        0,
156        0,
157        0,
158        0,
159        0,
160        0,
161        0,
162        0,
163        0,
164        0,
165        0,
166        0,
167        0,
168        0,
169        0,
170        0,
171        0,
172        0,
173        0,
174        0,
175        0,
176        0,
177        0,
178        0,
179        0,
180        0,
181        MatGetMaps_Petsc};
182 
183 #undef __FUNC__
184 #define __FUNC__ "MatCreateShell"
185 /*@C
186    MatCreateShell - Creates a new matrix class for use with a user-defined
187    private data storage format.
188 
189   Collective on MPI_Comm
190 
191    Input Parameters:
192 +  comm - MPI communicator
193 .  m - number of local rows (must be given)
194 .  n - number of local columns (must be given)
195 .  M - number of global rows (may be PETSC_DETERMINE)
196 .  N - number of global columns (may be PETSC_DETERMINE)
197 -  ctx - pointer to data needed by the shell matrix routines
198 
199    Output Parameter:
200 .  A - the matrix
201 
202    Level: advanced
203 
204   Usage:
205 $    extern int mult(Mat,Vec,Vec);
206 $    MatCreateShell(comm,m,n,M,N,ctx,&mat);
207 $    MatShellSetOperation(mat,MATOP_MULT,(void *)mult);
208 $    [ Use matrix for operations that have been set ]
209 $    MatDestroy(mat);
210 
211    Notes:
212    The shell matrix type is intended to provide a simple class to use
213    with KSP (such as, for use with matrix-free methods). You should not
214    use the shell type if you plan to define a complete matrix class.
215 
216    PETSc requires that matrices and vectors being used for certain
217    operations are partitioned accordingly.  For example, when
218    creating a shell matrix, A, that supports parallel matrix-vector
219    products using MatMult(A,x,y) the user should set the number
220    of local matrix rows to be the number of local elements of the
221    corresponding result vector, y. Note that this is information is
222    required for use of the matrix interface routines, even though
223    the shell matrix may not actually be physically partitioned.
224    For example,
225 
226 $
227 $     Vec x, y
228 $     extern int mult(Mat,Vec,Vec);
229 $     Mat A
230 $
231 $     VecCreateMPI(comm,PETSC_DECIDE,M,&y);
232 $     VecCreateMPI(comm,PETSC_DECIDE,N,&x);
233 $     VecGetLocalSize(y,&m);
234 $     VecGetLocalSize(x,&n);
235 $     MatCreateShell(comm,m,n,M,N,ctx,&A);
236 $     MatShellSetOperation(mat,MATOP_MULT,(void *)mult);
237 $     MatMult(A,x,y);
238 $     MatDestroy(A);
239 $     VecDestroy(y); VecDestroy(x);
240 $
241 
242 .keywords: matrix, shell, create
243 
244 .seealso: MatShellSetOperation(), MatHasOperation(), MatShellGetContext()
245 @*/
246 int MatCreateShell(MPI_Comm comm,int m,int n,int M,int N,void *ctx,Mat *A)
247 {
248   Mat       B;
249   Mat_Shell *b;
250   int       ierr;
251 
252   PetscFunctionBegin;
253   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATSHELL,"Mat",comm,MatDestroy,MatView);
254   PLogObjectCreate(B);
255   B->factor    = 0;
256   B->assembled = PETSC_TRUE;
257   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
258   B->ops->destroy   = MatDestroy_Shell;
259 
260   b          = PetscNew(Mat_Shell); CHKPTRQ(b);
261   PLogObjectMemory(B,sizeof(struct _p_Mat)+sizeof(Mat_Shell));
262   PetscMemzero(b,sizeof(Mat_Shell));
263   B->data   = (void *) b;
264 
265   if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
266     SETERRQ(1,1,"Must give local row and column count for matrix");
267   }
268 
269   ierr = PetscSplitOwnership(comm,&m,&M);CHKERRQ(ierr);
270   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
271   b->M = M; B->M = M;
272   b->N = N; B->N = N;
273   b->m = m; B->m = m;
274   b->n = n; B->n = n;
275 
276   ierr = MapCreateMPI(comm,m,M,&B->rmap);CHKERRQ(ierr);
277   ierr = MapCreateMPI(comm,n,N,&B->cmap);CHKERRQ(ierr);
278 
279   b->ctx = ctx;
280   *A     = B;
281   PetscFunctionReturn(0);
282 }
283 
284 #undef __FUNC__
285 #define __FUNC__ "MatShellSetOperation"
286 /*@C
287     MatShellSetOperation - Allows user to set a matrix operation for
288                            a shell matrix.
289 
290    Collective on Mat
291 
292     Input Parameters:
293 +   mat - the shell matrix
294 .   op - the name of the operation
295 -   f - the function that provides the operation.
296 
297     Usage:
298 $      extern int usermult(Mat,Vec,Vec);
299 $      ierr = MatCreateShell(comm,m,n,M,N,ctx,&A);
300 $      ierr = MatShellSetOperation(A,MATOP_MULT,(void*) usermult);
301 
302     Notes:
303     See the file petsc/include/mat.h for a complete list of matrix
304     operations, which all have the form MATOP_<OPERATION>, where
305     <OPERATION> is the name (in all capital letters) of the
306     user interface routine (e.g., MatMult() -> MATOP_MULT).
307 
308     All user-provided functions should have the same calling
309     sequence as the usual matrix interface routines, since they
310     are intended to be accessed via the usual matrix interface
311     routines, e.g.,
312 $       MatMult(Mat,Vec,Vec) -> usermult(Mat,Vec,Vec)
313 
314     Within each user-defined routine, the user should call
315     MatShellGetContext() to obtain the user-defined context that was
316     set by MatCreateShell().
317 
318 .keywords: matrix, shell, set, operation
319 
320 .seealso: MatCreateShell(), MatShellGetContext(), MatShellGetOperation()
321 @*/
322 int MatShellSetOperation(Mat mat,MatOperation op, void *f)
323 {
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(mat,MAT_COOKIE);
326 
327   if (op == MATOP_DESTROY) {
328     if (mat->type == MATSHELL) {
329        Mat_Shell *shell = (Mat_Shell *) mat->data;
330        shell->destroy                 = (int (*)(Mat)) f;
331     }
332     else mat->ops->destroy            = (int (*)(Mat)) f;
333   }
334   else if (op == MATOP_VIEW) mat->ops->view  = (int (*)(Mat,Viewer)) f;
335   else      (((void**)mat->ops)[op]) = f;
336 
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNC__
341 #define __FUNC__ "MatShellGetOperation"
342 /*@C
343     MatShellGetOperation - Gets a matrix function for a shell matrix.
344 
345     Not Collective
346 
347     Input Parameters:
348 +   mat - the shell matrix
349 -   op - the name of the operation
350 
351     Output Parameter:
352 .   f - the function that provides the operation.
353 
354     Notes:
355     See the file petsc/include/mat.h for a complete list of matrix
356     operations, which all have the form MATOP_<OPERATION>, where
357     <OPERATION> is the name (in all capital letters) of the
358     user interface routine (e.g., MatMult() -> MATOP_MULT).
359 
360     All user-provided functions have the same calling
361     sequence as the usual matrix interface routines, since they
362     are intended to be accessed via the usual matrix interface
363     routines, e.g.,
364 $       MatMult(Mat,Vec,Vec) -> usermult(Mat,Vec,Vec)
365 
366     Within each user-defined routine, the user should call
367     MatShellGetContext() to obtain the user-defined context that was
368     set by MatCreateShell().
369 
370 .keywords: matrix, shell, set, operation
371 
372 .seealso: MatCreateShell(), MatShellGetContext(), MatShellSetOperation()
373 @*/
374 int MatShellGetOperation(Mat mat,MatOperation op, void **f)
375 {
376   PetscFunctionBegin;
377   PetscValidHeaderSpecific(mat,MAT_COOKIE);
378 
379   if (op == MATOP_DESTROY) {
380     if (mat->type == MATSHELL) {
381       Mat_Shell *shell = (Mat_Shell *) mat->data;
382       *f = (void *) shell->destroy;
383     } else {
384       *f = (void *) mat->ops->destroy;
385     }
386   } else if (op == MATOP_VIEW) {
387     *f = (void *) mat->ops->view;
388   } else {
389     *f = (((void**)&mat->ops)[op]);
390   }
391 
392   PetscFunctionReturn(0);
393 }
394 
395