xref: /petsc/src/mat/utils/axpy.c (revision be7c243fa330abc10ff5da07cb1acea58678985d)
1 
2 #include <private/matimpl.h>  /*I   "petscmat.h"  I*/
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "MatAXPY"
6 /*@
7    MatAXPY - Computes Y = a*X + Y.
8 
9    Logically  Collective on Mat
10 
11    Input Parameters:
12 +  a - the scalar multiplier
13 .  X - the first matrix
14 .  Y - the second matrix
15 -  str - either SAME_NONZERO_PATTERN, DIFFERENT_NONZERO_PATTERN
16          or SUBSET_NONZERO_PATTERN (nonzeros of X is a subset of Y's)
17 
18    Level: intermediate
19 
20 .keywords: matrix, add
21 
22 .seealso: MatAYPX()
23  @*/
24 PetscErrorCode  MatAXPY(Mat Y,PetscScalar a,Mat X,MatStructure str)
25 {
26   PetscErrorCode ierr;
27   PetscInt       m1,m2,n1,n2;
28 
29   PetscFunctionBegin;
30   PetscValidHeaderSpecific(X,MAT_CLASSID,3);
31   PetscValidHeaderSpecific(Y,MAT_CLASSID,1);
32   PetscValidLogicalCollectiveScalar(Y,a,2);
33   ierr = MatGetSize(X,&m1,&n1);CHKERRQ(ierr);
34   ierr = MatGetSize(Y,&m2,&n2);CHKERRQ(ierr);
35   if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Non conforming matrix add: %D %D %D %D",m1,m2,n1,n2);
36 
37   ierr = PetscLogEventBegin(MAT_AXPY,Y,0,0,0);CHKERRQ(ierr);
38   if (Y->ops->axpy) {
39     ierr = (*Y->ops->axpy)(Y,a,X,str);CHKERRQ(ierr);
40   } else {
41     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
42   }
43   ierr = PetscLogEventEnd(MAT_AXPY,Y,0,0,0);CHKERRQ(ierr);
44 #if defined(PETSC_HAVE_CUSP)
45   if (Y->valid_GPU_matrix != PETSC_CUSP_UNALLOCATED) {
46     Y->valid_GPU_matrix = PETSC_CUSP_CPU;
47   }
48 #endif
49   PetscFunctionReturn(0);
50 }
51 
52 #undef __FUNCT__
53 #define __FUNCT__ "MatAXPY_Basic"
54 PetscErrorCode MatAXPY_Basic(Mat Y,PetscScalar a,Mat X,MatStructure str)
55 {
56   PetscInt          i,start,end,j,ncols,m,n;
57   PetscErrorCode    ierr;
58   const PetscInt    *row;
59   PetscScalar       *val;
60   const PetscScalar *vals;
61 
62   PetscFunctionBegin;
63   ierr = MatGetSize(X,&m,&n);CHKERRQ(ierr);
64   ierr = MatGetOwnershipRange(X,&start,&end);CHKERRQ(ierr);
65   if (a == 1.0) {
66     for (i = start; i < end; i++) {
67       ierr = MatGetRow(X,i,&ncols,&row,&vals);CHKERRQ(ierr);
68       ierr = MatSetValues(Y,1,&i,ncols,row,vals,ADD_VALUES);CHKERRQ(ierr);
69       ierr = MatRestoreRow(X,i,&ncols,&row,&vals);CHKERRQ(ierr);
70     }
71   } else {
72     ierr = PetscMalloc((n+1)*sizeof(PetscScalar),&val);CHKERRQ(ierr);
73     for (i=start; i<end; i++) {
74       ierr = MatGetRow(X,i,&ncols,&row,&vals);CHKERRQ(ierr);
75       for (j=0; j<ncols; j++) {
76 	val[j] = a*vals[j];
77       }
78       ierr = MatSetValues(Y,1,&i,ncols,row,val,ADD_VALUES);CHKERRQ(ierr);
79       ierr = MatRestoreRow(X,i,&ncols,&row,&vals);CHKERRQ(ierr);
80     }
81     ierr = PetscFree(val);CHKERRQ(ierr);
82   }
83   ierr = MatAssemblyBegin(Y,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
84   ierr = MatAssemblyEnd(Y,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
85   PetscFunctionReturn(0);
86 }
87 
88 #undef __FUNCT__
89 #define __FUNCT__ "MatAXPY_BasicWithPreallocation"
90 PetscErrorCode MatAXPY_BasicWithPreallocation(Mat B,Mat Y,PetscScalar a,Mat X,MatStructure str)
91 {
92   PetscInt          i,start,end,j,ncols,m,n;
93   PetscErrorCode    ierr;
94   const PetscInt    *row;
95   PetscScalar       *val;
96   const PetscScalar *vals;
97 
98   PetscFunctionBegin;
99   ierr = MatGetSize(X,&m,&n);CHKERRQ(ierr);
100   ierr = MatGetOwnershipRange(X,&start,&end);CHKERRQ(ierr);
101   if (a == 1.0) {
102     for (i = start; i < end; i++) {
103       ierr = MatGetRow(Y,i,&ncols,&row,&vals);CHKERRQ(ierr);
104       ierr = MatSetValues(B,1,&i,ncols,row,vals,ADD_VALUES);CHKERRQ(ierr);
105       ierr = MatRestoreRow(Y,i,&ncols,&row,&vals);CHKERRQ(ierr);
106 
107       ierr = MatGetRow(X,i,&ncols,&row,&vals);CHKERRQ(ierr);
108       ierr = MatSetValues(B,1,&i,ncols,row,vals,ADD_VALUES);CHKERRQ(ierr);
109       ierr = MatRestoreRow(X,i,&ncols,&row,&vals);CHKERRQ(ierr);
110     }
111   } else {
112     ierr = PetscMalloc((n+1)*sizeof(PetscScalar),&val);CHKERRQ(ierr);
113     for (i=start; i<end; i++) {
114       ierr = MatGetRow(Y,i,&ncols,&row,&vals);CHKERRQ(ierr);
115       ierr = MatSetValues(B,1,&i,ncols,row,vals,ADD_VALUES);CHKERRQ(ierr);
116       ierr = MatRestoreRow(Y,i,&ncols,&row,&vals);CHKERRQ(ierr);
117 
118       ierr = MatGetRow(X,i,&ncols,&row,&vals);CHKERRQ(ierr);
119       for (j=0; j<ncols; j++) {
120 	val[j] = a*vals[j];
121       }
122       ierr = MatSetValues(B,1,&i,ncols,row,val,ADD_VALUES);CHKERRQ(ierr);
123       ierr = MatRestoreRow(X,i,&ncols,&row,&vals);CHKERRQ(ierr);
124     }
125     ierr = PetscFree(val);CHKERRQ(ierr);
126   }
127   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
128   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNCT__
133 #define __FUNCT__ "MatShift"
134 /*@
135    MatShift - Computes Y =  Y + a I, where a is a PetscScalar and I is the identity matrix.
136 
137    Neighbor-wise Collective on Mat
138 
139    Input Parameters:
140 +  Y - the matrices
141 -  a - the PetscScalar
142 
143    Level: intermediate
144 
145 .keywords: matrix, add, shift
146 
147 .seealso: MatDiagonalSet()
148  @*/
149 PetscErrorCode  MatShift(Mat Y,PetscScalar a)
150 {
151   PetscErrorCode ierr;
152   PetscInt       i,start,end;
153 
154   PetscFunctionBegin;
155   PetscValidHeaderSpecific(Y,MAT_CLASSID,1);
156   if (!Y->assembled) SETERRQ(((PetscObject)Y)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
157   if (Y->factortype) SETERRQ(((PetscObject)Y)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
158   MatCheckPreallocated(Y,1);
159 
160   if (Y->ops->shift) {
161     ierr = (*Y->ops->shift)(Y,a);CHKERRQ(ierr);
162   } else {
163     PetscScalar alpha = a;
164     ierr = MatGetOwnershipRange(Y,&start,&end);CHKERRQ(ierr);
165     for (i=start; i<end; i++) {
166       ierr = MatSetValues(Y,1,&i,1,&i,&alpha,ADD_VALUES);CHKERRQ(ierr);
167     }
168     ierr = MatAssemblyBegin(Y,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
169     ierr = MatAssemblyEnd(Y,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
170   }
171 #if defined(PETSC_HAVE_CUSP)
172   if (Y->valid_GPU_matrix != PETSC_CUSP_UNALLOCATED) {
173     Y->valid_GPU_matrix = PETSC_CUSP_CPU;
174   }
175 #endif
176   PetscFunctionReturn(0);
177 }
178 
179 #undef __FUNCT__
180 #define __FUNCT__ "MatDiagonalSet_Default"
181 PetscErrorCode  MatDiagonalSet_Default(Mat Y,Vec D,InsertMode is)
182 {
183   PetscErrorCode ierr;
184   PetscInt       i,start,end,vstart,vend;
185   PetscScalar    *v;
186 
187   PetscFunctionBegin;
188   ierr = VecGetOwnershipRange(D,&vstart,&vend);CHKERRQ(ierr);
189   ierr = MatGetOwnershipRange(Y,&start,&end);CHKERRQ(ierr);
190   if (vstart != start || vend != end) {
191     SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Vector ownership range not compatible with matrix: %D %D vec %D %D mat",vstart,vend,start,end);
192   }
193   ierr = VecGetArray(D,&v);CHKERRQ(ierr);
194   for (i=start; i<end; i++) {
195     ierr = MatSetValues(Y,1,&i,1,&i,v+i-start,is);CHKERRQ(ierr);
196   }
197   ierr = VecRestoreArray(D,&v);CHKERRQ(ierr);
198   ierr = MatAssemblyBegin(Y,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
199   ierr = MatAssemblyEnd(Y,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "MatDiagonalSet"
205 /*@
206    MatDiagonalSet - Computes Y = Y + D, where D is a diagonal matrix
207    that is represented as a vector. Or Y[i,i] = D[i] if InsertMode is
208    INSERT_VALUES.
209 
210    Input Parameters:
211 +  Y - the input matrix
212 .  D - the diagonal matrix, represented as a vector
213 -  i - INSERT_VALUES or ADD_VALUES
214 
215    Neighbor-wise Collective on Mat and Vec
216 
217    Level: intermediate
218 
219 .keywords: matrix, add, shift, diagonal
220 
221 .seealso: MatShift()
222 @*/
223 PetscErrorCode  MatDiagonalSet(Mat Y,Vec D,InsertMode is)
224 {
225   PetscErrorCode ierr;
226 
227   PetscFunctionBegin;
228   PetscValidHeaderSpecific(Y,MAT_CLASSID,1);
229   PetscValidHeaderSpecific(D,VEC_CLASSID,2);
230   if (Y->ops->diagonalset) {
231     ierr = (*Y->ops->diagonalset)(Y,D,is);CHKERRQ(ierr);
232   } else {
233     ierr = MatDiagonalSet_Default(Y,D,is);CHKERRQ(ierr);
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "MatAYPX"
240 /*@
241    MatAYPX - Computes Y = a*Y + X.
242 
243    Logically on Mat
244 
245    Input Parameters:
246 +  a - the PetscScalar multiplier
247 .  Y - the first matrix
248 .  X - the second matrix
249 -  str - either SAME_NONZERO_PATTERN, DIFFERENT_NONZERO_PATTERN or SUBSET_NONZERO_PATTERN
250 
251    Level: intermediate
252 
253 .keywords: matrix, add
254 
255 .seealso: MatAXPY()
256  @*/
257 PetscErrorCode  MatAYPX(Mat Y,PetscScalar a,Mat X,MatStructure str)
258 {
259   PetscScalar    one = 1.0;
260   PetscErrorCode ierr;
261   PetscInt       mX,mY,nX,nY;
262 
263   PetscFunctionBegin;
264   PetscValidHeaderSpecific(X,MAT_CLASSID,3);
265   PetscValidHeaderSpecific(Y,MAT_CLASSID,1);
266   PetscValidLogicalCollectiveScalar(Y,a,2);
267   ierr = MatGetSize(X,&mX,&nX);CHKERRQ(ierr);
268   ierr = MatGetSize(X,&mY,&nY);CHKERRQ(ierr);
269   if (mX != mY || nX != nY) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Non conforming matrices: %D %D first %D %D second",mX,mY,nX,nY);
270 
271   ierr = MatScale(Y,a);CHKERRQ(ierr);
272   ierr = MatAXPY(Y,one,X,str);CHKERRQ(ierr);
273   PetscFunctionReturn(0);
274 }
275 
276 #undef __FUNCT__
277 #define __FUNCT__ "MatComputeExplicitOperator"
278 /*@
279     MatComputeExplicitOperator - Computes the explicit matrix
280 
281     Collective on Mat
282 
283     Input Parameter:
284 .   inmat - the matrix
285 
286     Output Parameter:
287 .   mat - the explict preconditioned operator
288 
289     Notes:
290     This computation is done by applying the operators to columns of the
291     identity matrix.
292 
293     Currently, this routine uses a dense matrix format when 1 processor
294     is used and a sparse format otherwise.  This routine is costly in general,
295     and is recommended for use only with relatively small systems.
296 
297     Level: advanced
298 
299 .keywords: Mat, compute, explicit, operator
300 
301 @*/
302 PetscErrorCode  MatComputeExplicitOperator(Mat inmat,Mat *mat)
303 {
304   Vec            in,out;
305   PetscErrorCode ierr;
306   PetscInt       i,m,n,M,N,*rows,start,end;
307   MPI_Comm       comm;
308   PetscScalar    *array,zero = 0.0,one = 1.0;
309   PetscMPIInt    size;
310 
311   PetscFunctionBegin;
312   PetscValidHeaderSpecific(inmat,MAT_CLASSID,1);
313   PetscValidPointer(mat,2);
314 
315   comm = ((PetscObject)inmat)->comm;
316   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
317 
318   ierr = MatGetLocalSize(inmat,&m,&n);CHKERRQ(ierr);
319   ierr = MatGetSize(inmat,&M,&N);CHKERRQ(ierr);
320   ierr = MatGetVecs(inmat,&in,&out);CHKERRQ(ierr);
321   ierr = VecSetOption(in,VEC_IGNORE_OFF_PROC_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
322   ierr = VecGetOwnershipRange(out,&start,&end);CHKERRQ(ierr);
323   ierr = PetscMalloc(m*sizeof(PetscInt),&rows);CHKERRQ(ierr);
324   for (i=0; i<m; i++) {rows[i] = start + i;}
325 
326   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
327   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
328   if (size == 1) {
329     ierr = MatSetType(*mat,MATSEQDENSE);CHKERRQ(ierr);
330     ierr = MatSeqDenseSetPreallocation(*mat,PETSC_NULL);CHKERRQ(ierr);
331   } else {
332     ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
333     ierr = MatMPIAIJSetPreallocation(*mat,n,PETSC_NULL,N-n,PETSC_NULL);CHKERRQ(ierr);
334   }
335 
336   for (i=0; i<N; i++) {
337 
338     ierr = VecSet(in,zero);CHKERRQ(ierr);
339     ierr = VecSetValues(in,1,&i,&one,INSERT_VALUES);CHKERRQ(ierr);
340     ierr = VecAssemblyBegin(in);CHKERRQ(ierr);
341     ierr = VecAssemblyEnd(in);CHKERRQ(ierr);
342 
343     ierr = MatMult(inmat,in,out);CHKERRQ(ierr);
344 
345     ierr = VecGetArray(out,&array);CHKERRQ(ierr);
346     ierr = MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);CHKERRQ(ierr);
347     ierr = VecRestoreArray(out,&array);CHKERRQ(ierr);
348 
349   }
350   ierr = PetscFree(rows);CHKERRQ(ierr);
351   ierr = VecDestroy(&out);CHKERRQ(ierr);
352   ierr = VecDestroy(&in);CHKERRQ(ierr);
353   ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
354   ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 /* Get the map xtoy which is used by MatAXPY() in the case of SUBSET_NONZERO_PATTERN */
359 #undef __FUNCT__
360 #define __FUNCT__ "MatAXPYGetxtoy_Private"
361 PetscErrorCode MatAXPYGetxtoy_Private(PetscInt m,PetscInt *xi,PetscInt *xj,PetscInt *xgarray, PetscInt *yi,PetscInt *yj,PetscInt *ygarray, PetscInt **xtoy)
362 {
363   PetscErrorCode ierr;
364   PetscInt       row,i,nz,xcol,ycol,jx,jy,*x2y;
365 
366   PetscFunctionBegin;
367   ierr = PetscMalloc(xi[m]*sizeof(PetscInt),&x2y);CHKERRQ(ierr);
368   i = 0;
369   for (row=0; row<m; row++){
370     nz = xi[1] - xi[0];
371     jy = 0;
372     for (jx=0; jx<nz; jx++,jy++){
373       if (xgarray && ygarray){
374         xcol = xgarray[xj[*xi + jx]];
375         ycol = ygarray[yj[*yi + jy]];
376       } else {
377         xcol = xj[*xi + jx];
378         ycol = yj[*yi + jy];  /* col index for y */
379       }
380       while ( ycol < xcol ) {
381         jy++;
382         if (ygarray){
383           ycol = ygarray[yj[*yi + jy]];
384         } else {
385           ycol = yj[*yi + jy];
386         }
387       }
388       if (xcol != ycol) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"X matrix entry (%D,%D) is not in Y matrix",row,ycol);
389       x2y[i++] = *yi + jy;
390     }
391     xi++; yi++;
392   }
393   *xtoy = x2y;
394   PetscFunctionReturn(0);
395 }
396