xref: /petsc/src/mat/impls/shell/shell.c (revision 0462333d488596d100b51ddd590820783ab7b437)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: shell.c,v 1.64 1999/01/12 23:15:26 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6    This provides a simple shell for Fortran (and C programmers) to
7   create a very simple matrix class for use with KSP without coding
8   much of anything.
9 */
10 
11 #include "petsc.h"
12 #include "src/mat/matimpl.h"        /*I "mat.h" I*/
13 #include "src/vec/vecimpl.h"
14 
15 typedef struct {
16   int  M, N;                  /* number of global rows, columns */
17   int  m, n;                  /* number of local rows, columns */
18   int  (*destroy)(Mat);
19   void *ctx;
20 } Mat_Shell;
21 
22 #undef __FUNC__
23 #define __FUNC__ "MatShellGetContext"
24 /*@
25     MatShellGetContext - Returns the user-provided context associated with a shell matrix.
26 
27     Not Collective
28 
29     Input Parameter:
30 .   mat - the matrix, should have been created with MatCreateShell()
31 
32     Output Parameter:
33 .   ctx - the user provided context
34 
35     Notes:
36     This routine is intended for use within various shell matrix routines,
37     as set with MatShellSetOperation().
38 
39 .keywords: matrix, shell, get, context
40 
41 .seealso: MatCreateShell(), MatShellSetOperation()
42 @*/
43 int MatShellGetContext(Mat mat,void **ctx)
44 {
45   PetscFunctionBegin;
46   PetscValidHeaderSpecific(mat,MAT_COOKIE);
47   if (mat->type != MATSHELL) *ctx = 0;
48   else                       *ctx = ((Mat_Shell *) (mat->data))->ctx;
49   PetscFunctionReturn(0);
50 }
51 
52 #undef __FUNC__
53 #define __FUNC__ "MatGetSize_Shell"
54 int MatGetSize_Shell(Mat mat,int *M,int *N)
55 {
56   Mat_Shell *shell = (Mat_Shell *) mat->data;
57 
58   PetscFunctionBegin;
59   if (M) *M = shell->M;
60   if (N) *N = shell->N;
61   PetscFunctionReturn(0);
62 }
63 
64 #undef __FUNC__
65 #define __FUNC__ "MatGetLocalSize_Shell"
66 int MatGetLocalSize_Shell(Mat mat,int *m,int *n)
67 {
68   Mat_Shell *shell = (Mat_Shell *) mat->data;
69 
70   PetscFunctionBegin;
71   if (m) *m = shell->m;
72   if (n) *n = shell->n;
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNC__
77 #define __FUNC__ "MatDestroy_Shell"
78 int MatDestroy_Shell(Mat mat)
79 {
80   int       ierr;
81   Mat_Shell *shell;
82 
83   PetscFunctionBegin;
84   if (--mat->refct > 0) PetscFunctionReturn(0);
85 
86   if (mat->mapping) {
87     ierr = ISLocalToGlobalMappingDestroy(mat->mapping); CHKERRQ(ierr);
88   }
89   if (mat->bmapping) {
90     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping); CHKERRQ(ierr);
91   }
92   if (mat->rmap) {
93     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
94   }
95   if (mat->cmap) {
96     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
97   }
98   shell = (Mat_Shell *) mat->data;
99   if (shell->destroy) {ierr = (*shell->destroy)(mat);CHKERRQ(ierr);}
100   PetscFree(shell);
101   PLogObjectDestroy(mat);
102   PetscHeaderDestroy(mat);
103   PetscFunctionReturn(0);
104 }
105 
106 int MatGetOwnershipRange_Shell(Mat mat, int *rstart,int *rend)
107 {
108   int ierr;
109 
110   PetscFunctionBegin;
111   ierr = MPI_Scan(&mat->m,rend,1,MPI_INT,MPI_SUM,mat->comm);CHKERRQ(ierr);
112   *rstart = *rend - mat->m;
113   PetscFunctionReturn(0);
114 }
115 
116 static struct _MatOps MatOps_Values = {0,
117        0,
118        0,
119        0,
120        0,
121        0,
122        0,
123        0,
124        0,
125        0,
126        0,
127        0,
128        0,
129        0,
130        0,
131        0,
132        0,
133        0,
134        0,
135        0,
136        0,
137        0,
138        0,
139        0,
140        0,
141        0,
142        0,
143        0,
144        0,
145        0,
146        MatGetSize_Shell,
147        MatGetLocalSize_Shell,
148        MatGetOwnershipRange_Shell,
149        0,
150        0,
151        0,
152        0,
153        0,
154        0,
155        0,
156        0,
157        0,
158        0,
159        0,
160        0,
161        0,
162        0,
163        0,
164        0,
165        0,
166        0,
167        0,
168        0,
169        0,
170        0,
171        0,
172        0,
173        0,
174        0,
175        0,
176        0,
177        0,
178        0,
179        0,
180        0,
181        MatGetMaps_Petsc};
182 
183 #undef __FUNC__
184 #define __FUNC__ "MatCreateShell"
185 /*@C
186    MatCreateShell - Creates a new matrix class for use with a user-defined
187    private data storage format.
188 
189   Collective on MPI_Comm
190 
191    Input Parameters:
192 +  comm - MPI communicator
193 .  m - number of local rows (must be given)
194 .  n - number of local columns (must be given)
195 .  M - number of global rows (may be PETSC_DETERMINE)
196 .  N - number of global columns (may be PETSC_DETERMINE)
197 -  ctx - pointer to data needed by the shell matrix routines
198 
199    Output Parameter:
200 .  A - the matrix
201 
202   Usage:
203 $    extern int mult(Mat,Vec,Vec);
204 $    MatCreateShell(comm,m,n,M,N,ctx,&mat);
205 $    MatShellSetOperation(mat,MATOP_MULT,(void *)mult);
206 $    [ Use matrix for operations that have been set ]
207 $    MatDestroy(mat);
208 
209    Notes:
210    The shell matrix type is intended to provide a simple class to use
211    with KSP (such as, for use with matrix-free methods). You should not
212    use the shell type if you plan to define a complete matrix class.
213 
214    PETSc requires that matrices and vectors being used for certain
215    operations are partitioned accordingly.  For example, when
216    creating a shell matrix, A, that supports parallel matrix-vector
217    products using MatMult(A,x,y) the user should set the number
218    of local matrix rows to be the number of local elements of the
219    corresponding result vector, y. Note that this is information is
220    required for use of the matrix interface routines, even though
221    the shell matrix may not actually be physically partitioned.
222    For example,
223 
224 $
225 $     Vec x, y
226 $     extern int mult(Mat,Vec,Vec);
227 $     Mat A
228 $
229 $     VecCreateMPI(comm,PETSC_DECIDE,M,&y);
230 $     VecCreateMPI(comm,PETSC_DECIDE,N,&x);
231 $     VecGetLocalSize(y,&m);
232 $     VecGetLocalSize(x,&n);
233 $     MatCreateShell(comm,m,n,M,N,ctx,&A);
234 $     MatShellSetOperation(mat,MATOP_MULT,(void *)mult);
235 $     MatMult(A,x,y);
236 $     MatDestroy(A);
237 $     VecDestroy(y); VecDestroy(x);
238 $
239 
240 .keywords: matrix, shell, create
241 
242 .seealso: MatShellSetOperation(), MatHasOperation(), MatShellGetContext()
243 @*/
244 int MatCreateShell(MPI_Comm comm,int m,int n,int M,int N,void *ctx,Mat *A)
245 {
246   Mat       B;
247   Mat_Shell *b;
248   int       ierr;
249 
250   PetscFunctionBegin;
251   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATSHELL,"Mat",comm,MatDestroy,MatView);
252   PLogObjectCreate(B);
253   B->factor    = 0;
254   B->assembled = PETSC_TRUE;
255   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
256   B->ops->destroy   = MatDestroy_Shell;
257 
258   b          = PetscNew(Mat_Shell); CHKPTRQ(b);
259   PLogObjectMemory(B,sizeof(struct _p_Mat)+sizeof(Mat_Shell));
260   PetscMemzero(b,sizeof(Mat_Shell));
261   B->data   = (void *) b;
262 
263   if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
264     SETERRQ(1,1,"Must give local row and column count for matrix");
265   }
266 
267   ierr = PetscSplitOwnership(comm,&m,&M);CHKERRQ(ierr);
268   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
269   b->M = M; B->M = M;
270   b->N = N; B->N = N;
271   b->m = m; B->m = m;
272   b->n = n; B->n = n;
273 
274   ierr = MapCreateMPI(comm,m,M,&B->rmap);CHKERRQ(ierr);
275   ierr = MapCreateMPI(comm,n,N,&B->cmap);CHKERRQ(ierr);
276 
277   b->ctx = ctx;
278   *A     = B;
279   PetscFunctionReturn(0);
280 }
281 
282 #undef __FUNC__
283 #define __FUNC__ "MatShellSetOperation"
284 /*@C
285     MatShellSetOperation - Allows user to set a matrix operation for
286                            a shell matrix.
287 
288    Collective on Mat
289 
290     Input Parameters:
291 +   mat - the shell matrix
292 .   op - the name of the operation
293 -   f - the function that provides the operation.
294 
295     Usage:
296 $      extern int usermult(Mat,Vec,Vec);
297 $      ierr = MatCreateShell(comm,m,n,M,N,ctx,&A);
298 $      ierr = MatShellSetOperation(A,MATOP_MULT,(void*) usermult);
299 
300     Notes:
301     See the file petsc/include/mat.h for a complete list of matrix
302     operations, which all have the form MATOP_<OPERATION>, where
303     <OPERATION> is the name (in all capital letters) of the
304     user interface routine (e.g., MatMult() -> MATOP_MULT).
305 
306     All user-provided functions should have the same calling
307     sequence as the usual matrix interface routines, since they
308     are intended to be accessed via the usual matrix interface
309     routines, e.g.,
310 $       MatMult(Mat,Vec,Vec) -> usermult(Mat,Vec,Vec)
311 
312     Within each user-defined routine, the user should call
313     MatShellGetContext() to obtain the user-defined context that was
314     set by MatCreateShell().
315 
316 .keywords: matrix, shell, set, operation
317 
318 .seealso: MatCreateShell(), MatShellGetContext(), MatShellGetOperation()
319 @*/
320 int MatShellSetOperation(Mat mat,MatOperation op, void *f)
321 {
322   PetscFunctionBegin;
323   PetscValidHeaderSpecific(mat,MAT_COOKIE);
324 
325   if (op == MATOP_DESTROY) {
326     if (mat->type == MATSHELL) {
327        Mat_Shell *shell = (Mat_Shell *) mat->data;
328        shell->destroy                 = (int (*)(Mat)) f;
329     }
330     else mat->ops->destroy            = (int (*)(Mat)) f;
331   }
332   else if (op == MATOP_VIEW) mat->ops->view  = (int (*)(Mat,Viewer)) f;
333   else      (((void**)mat->ops)[op]) = f;
334 
335   PetscFunctionReturn(0);
336 }
337 
338 #undef __FUNC__
339 #define __FUNC__ "MatShellGetOperation"
340 /*@C
341     MatShellGetOperation - Gets a matrix function for a shell matrix.
342 
343     Not Collective
344 
345     Input Parameters:
346 +   mat - the shell matrix
347 -   op - the name of the operation
348 
349     Output Parameter:
350 .   f - the function that provides the operation.
351 
352     Notes:
353     See the file petsc/include/mat.h for a complete list of matrix
354     operations, which all have the form MATOP_<OPERATION>, where
355     <OPERATION> is the name (in all capital letters) of the
356     user interface routine (e.g., MatMult() -> MATOP_MULT).
357 
358     All user-provided functions have the same calling
359     sequence as the usual matrix interface routines, since they
360     are intended to be accessed via the usual matrix interface
361     routines, e.g.,
362 $       MatMult(Mat,Vec,Vec) -> usermult(Mat,Vec,Vec)
363 
364     Within each user-defined routine, the user should call
365     MatShellGetContext() to obtain the user-defined context that was
366     set by MatCreateShell().
367 
368 .keywords: matrix, shell, set, operation
369 
370 .seealso: MatCreateShell(), MatShellGetContext(), MatShellSetOperation()
371 @*/
372 int MatShellGetOperation(Mat mat,MatOperation op, void **f)
373 {
374   PetscFunctionBegin;
375   PetscValidHeaderSpecific(mat,MAT_COOKIE);
376 
377   if (op == MATOP_DESTROY) {
378     if (mat->type == MATSHELL) {
379       Mat_Shell *shell = (Mat_Shell *) mat->data;
380       *f = (void *) shell->destroy;
381     } else {
382       *f = (void *) mat->ops->destroy;
383     }
384   } else if (op == MATOP_VIEW) {
385     *f = (void *) mat->ops->view;
386   } else {
387     *f = (((void**)&mat->ops)[op]);
388   }
389 
390   PetscFunctionReturn(0);
391 }
392 
393