xref: /petsc/src/mat/impls/shell/shell.c (revision 7b2a1423c364accaa7ca5cca0f9cdfb1ad6de450)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: shell.c,v 1.63 1999/01/12 20:28:22 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6    This provides a simple shell for Fortran (and C programmers) to
7   create a very simple matrix class for use with KSP without coding
8   much of anything.
9 */
10 
11 #include "petsc.h"
12 #include "src/mat/matimpl.h"        /*I "mat.h" I*/
13 #include "src/vec/vecimpl.h"
14 
15 typedef struct {
16   int  M, N;                  /* number of global rows, columns */
17   int  m, n;                  /* number of local rows, columns */
18   int  (*destroy)(Mat);
19   void *ctx;
20 } Mat_Shell;
21 
22 #undef __FUNC__
23 #define __FUNC__ "MatShellGetContext"
24 /*@
25     MatShellGetContext - Returns the user-provided context associated with a shell matrix.
26 
27     Not Collective
28 
29     Input Parameter:
30 .   mat - the matrix, should have been created with MatCreateShell()
31 
32     Output Parameter:
33 .   ctx - the user provided context
34 
35     Notes:
36     This routine is intended for use within various shell matrix routines,
37     as set with MatShellSetOperation().
38 
39 .keywords: matrix, shell, get, context
40 
41 .seealso: MatCreateShell(), MatShellSetOperation()
42 @*/
43 int MatShellGetContext(Mat mat,void **ctx)
44 {
45   PetscFunctionBegin;
46   PetscValidHeaderSpecific(mat,MAT_COOKIE);
47   if (mat->type != MATSHELL) *ctx = 0;
48   else                       *ctx = ((Mat_Shell *) (mat->data))->ctx;
49   PetscFunctionReturn(0);
50 }
51 
52 #undef __FUNC__
53 #define __FUNC__ "MatGetSize_Shell"
54 int MatGetSize_Shell(Mat mat,int *M,int *N)
55 {
56   Mat_Shell *shell = (Mat_Shell *) mat->data;
57 
58   PetscFunctionBegin;
59   if (M) *M = shell->M;
60   if (N) *N = shell->N;
61   PetscFunctionReturn(0);
62 }
63 
64 #undef __FUNC__
65 #define __FUNC__ "MatGetLocalSize_Shell"
66 int MatGetLocalSize_Shell(Mat mat,int *m,int *n)
67 {
68   Mat_Shell *shell = (Mat_Shell *) mat->data;
69 
70   PetscFunctionBegin;
71   if (m) *m = shell->m;
72   if (n) *n = shell->n;
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNC__
77 #define __FUNC__ "MatDestroy_Shell"
78 int MatDestroy_Shell(Mat mat)
79 {
80   int       ierr;
81   Mat_Shell *shell;
82 
83   PetscFunctionBegin;
84   if (--mat->refct > 0) PetscFunctionReturn(0);
85 
86   if (mat->mapping) {
87     ierr = ISLocalToGlobalMappingDestroy(mat->mapping); CHKERRQ(ierr);
88   }
89   if (mat->bmapping) {
90     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping); CHKERRQ(ierr);
91   }
92   if (mat->rmap) {
93     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
94   }
95   if (mat->cmap) {
96     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
97   }
98   shell = (Mat_Shell *) mat->data;
99   if (shell->destroy) {ierr = (*shell->destroy)(mat);CHKERRQ(ierr);}
100   PetscFree(shell);
101   PLogObjectDestroy(mat);
102   PetscHeaderDestroy(mat);
103   PetscFunctionReturn(0);
104 }
105 
106 int MatGetOwnershipRange_Shell(Mat mat, int *rstart,int *rend)
107 {
108   int ierr;
109 
110   PetscFunctionBegin;
111   ierr = MPI_Scan(&mat->m,rend,1,MPI_INT,MPI_SUM,mat->comm);CHKERRQ(ierr);
112   *rstart = *rend - mat->m;
113   PetscFunctionReturn(0);
114 }
115 
116 static struct _MatOps MatOps_Values = {0,
117        0,
118        0,
119        0,
120        0,
121        0,
122        0,
123        0,
124        0,
125        0,
126        0,
127        0,
128        0,
129        0,
130        0,
131        0,
132        0,
133        0,
134        0,
135        0,
136        0,
137        0,
138        0,
139        0,
140        0,
141        0,
142        0,
143        0,
144        0,
145        0,
146        MatGetSize_Shell,
147        MatGetLocalSize_Shell,
148        MatGetOwnershipRange_Shell,
149        0,
150        0,
151        0,
152        0,
153        0,
154        0,
155        0,
156        0,
157        0,
158        0,
159        0,
160        0,
161        0,
162        0,
163        0,
164        0,
165        0,
166        0,
167        0,
168        0,
169        0,
170        0,
171        0,
172        0,
173        0,
174        0,
175        0,
176        0,
177        0,
178        0,
179        0,
180        0,
181        MatGetMaps_Petsc};
182 
183 #undef __FUNC__
184 #define __FUNC__ "MatCreateShell"
185 /*@C
186    MatCreateShell - Creates a new matrix class for use with a user-defined
187    private data storage format.
188 
189   Collective on MPI_Comm
190 
191    Input Parameters:
192 +  comm - MPI communicator
193 .  m - number of local rows (must be given)
194 .  n - number of local columns (must be given)
195 .  M - number of global rows (may be PETSC_DETERMINE)
196 .  N - number of global columns (may be PETSC_DETERMINE)
197 -  ctx - pointer to data needed by the shell matrix routines
198 
199    Output Parameter:
200 .  A - the matrix
201 
202   Usage:
203 $    extern int mult(Mat,Vec,Vec);
204 $    MatCreateShell(comm,m,n,M,N,ctx,&mat);
205 $    MatShellSetOperation(mat,MATOP_MULT,(void *)mult);
206 $    [ Use matrix for operations that have been set ]
207 $    MatDestroy(mat);
208 
209    Notes:
210    The shell matrix type is intended to provide a simple class to use
211    with KSP (such as, for use with matrix-free methods). You should not
212    use the shell type if you plan to define a complete matrix class.
213 
214    PETSc requires that matrices and vectors being used for certain
215    operations are partitioned accordingly.  For example, when
216    creating a shell matrix, A, that supports parallel matrix-vector
217    products using MatMult(A,x,y) the user should set the number
218    of local matrix rows to be the number of local elements of the
219    corresponding result vector, y. Note that this is information is
220    required for use of the matrix interface routines, even though
221    the shell matrix may not actually be physically partitioned.
222    For example,
223 
224 $
225 $     Vec x, y
226 $     extern int mult(Mat,Vec,Vec);
227 $     Mat A
228 $
229 $     VecCreateMPI(comm,PETSC_DECIDE,M,&y);
230 $     VecCreateMPI(comm,PETSC_DECIDE,N,&x);
231 $     VecGetLocalSize(y,&m);
232 $     VecGetLocalSize(x,&n);
233 $     MatCreateShell(comm,m,n,M,N,ctx,&A);
234 $     MatShellSetOperation(mat,MATOP_MULT,(void *)mult);
235 $     MatMult(A,x,y);
236 $     MatDestroy(A);
237 $     VecDestroy(y); VecDestroy(x);
238 $
239 
240 .keywords: matrix, shell, create
241 
242 .seealso: MatShellSetOperation(), MatHasOperation(), MatShellGetContext()
243 @*/
244 int MatCreateShell(MPI_Comm comm,int m,int n,int M,int N,void *ctx,Mat *A)
245 {
246   Mat       B;
247   Mat_Shell *b;
248   int       ierr;
249 
250   PetscFunctionBegin;
251   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATSHELL,"Mat",comm,MatDestroy,MatView);
252   PLogObjectCreate(B);
253   B->factor    = 0;
254   B->assembled = PETSC_TRUE;
255   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
256   B->ops->destroy   = MatDestroy_Shell;
257 
258   b          = PetscNew(Mat_Shell); CHKPTRQ(b);
259   PLogObjectMemory(B,sizeof(struct _p_Mat)+sizeof(Mat_Shell));
260   PetscMemzero(b,sizeof(Mat_Shell));
261   B->data   = (void *) b;
262 
263   if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
264     SETERRQ(1,1,"Must give local row and column count for matrix");
265   }
266 
267   if (M == PETSC_DETERMINE || N == PETSC_DETERMINE) {
268     int work[2], sum[2];
269 
270     work[0] = m; work[1] = n;
271     ierr = MPI_Allreduce( work, sum,2,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
272     if (M == PETSC_DECIDE) M = sum[0];
273     if (N == PETSC_DECIDE) N = sum[1];
274   }
275   b->M = M; B->M = M;
276   b->N = N; B->N = N;
277   b->m = m; B->m = m;
278   b->n = n; B->n = n;
279 
280   ierr = MapCreateMPI(comm,m,M,&B->rmap);CHKERRQ(ierr);
281   ierr = MapCreateMPI(comm,n,N,&B->cmap);CHKERRQ(ierr);
282 
283   b->ctx = ctx;
284   *A     = B;
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNC__
289 #define __FUNC__ "MatShellSetOperation"
290 /*@C
291     MatShellSetOperation - Allows user to set a matrix operation for
292                            a shell matrix.
293 
294    Collective on Mat
295 
296     Input Parameters:
297 +   mat - the shell matrix
298 .   op - the name of the operation
299 -   f - the function that provides the operation.
300 
301     Usage:
302 $      extern int usermult(Mat,Vec,Vec);
303 $      ierr = MatCreateShell(comm,m,n,M,N,ctx,&A);
304 $      ierr = MatShellSetOperation(A,MATOP_MULT,(void*) usermult);
305 
306     Notes:
307     See the file petsc/include/mat.h for a complete list of matrix
308     operations, which all have the form MATOP_<OPERATION>, where
309     <OPERATION> is the name (in all capital letters) of the
310     user interface routine (e.g., MatMult() -> MATOP_MULT).
311 
312     All user-provided functions should have the same calling
313     sequence as the usual matrix interface routines, since they
314     are intended to be accessed via the usual matrix interface
315     routines, e.g.,
316 $       MatMult(Mat,Vec,Vec) -> usermult(Mat,Vec,Vec)
317 
318     Within each user-defined routine, the user should call
319     MatShellGetContext() to obtain the user-defined context that was
320     set by MatCreateShell().
321 
322 .keywords: matrix, shell, set, operation
323 
324 .seealso: MatCreateShell(), MatShellGetContext(), MatShellGetOperation()
325 @*/
326 int MatShellSetOperation(Mat mat,MatOperation op, void *f)
327 {
328   PetscFunctionBegin;
329   PetscValidHeaderSpecific(mat,MAT_COOKIE);
330 
331   if (op == MATOP_DESTROY) {
332     if (mat->type == MATSHELL) {
333        Mat_Shell *shell = (Mat_Shell *) mat->data;
334        shell->destroy                 = (int (*)(Mat)) f;
335     }
336     else mat->ops->destroy            = (int (*)(Mat)) f;
337   }
338   else if (op == MATOP_VIEW) mat->ops->view  = (int (*)(Mat,Viewer)) f;
339   else      (((void**)mat->ops)[op]) = f;
340 
341   PetscFunctionReturn(0);
342 }
343 
344 #undef __FUNC__
345 #define __FUNC__ "MatShellGetOperation"
346 /*@C
347     MatShellGetOperation - Gets a matrix function for a shell matrix.
348 
349     Not Collective
350 
351     Input Parameters:
352 +   mat - the shell matrix
353 -   op - the name of the operation
354 
355     Output Parameter:
356 .   f - the function that provides the operation.
357 
358     Notes:
359     See the file petsc/include/mat.h for a complete list of matrix
360     operations, which all have the form MATOP_<OPERATION>, where
361     <OPERATION> is the name (in all capital letters) of the
362     user interface routine (e.g., MatMult() -> MATOP_MULT).
363 
364     All user-provided functions have the same calling
365     sequence as the usual matrix interface routines, since they
366     are intended to be accessed via the usual matrix interface
367     routines, e.g.,
368 $       MatMult(Mat,Vec,Vec) -> usermult(Mat,Vec,Vec)
369 
370     Within each user-defined routine, the user should call
371     MatShellGetContext() to obtain the user-defined context that was
372     set by MatCreateShell().
373 
374 .keywords: matrix, shell, set, operation
375 
376 .seealso: MatCreateShell(), MatShellGetContext(), MatShellSetOperation()
377 @*/
378 int MatShellGetOperation(Mat mat,MatOperation op, void **f)
379 {
380   PetscFunctionBegin;
381   PetscValidHeaderSpecific(mat,MAT_COOKIE);
382 
383   if (op == MATOP_DESTROY) {
384     if (mat->type == MATSHELL) {
385       Mat_Shell *shell = (Mat_Shell *) mat->data;
386       *f = (void *) shell->destroy;
387     } else {
388       *f = (void *) mat->ops->destroy;
389     }
390   } else if (op == MATOP_VIEW) {
391     *f = (void *) mat->ops->view;
392   } else {
393     *f = (((void**)&mat->ops)[op]);
394   }
395 
396   PetscFunctionReturn(0);
397 }
398 
399