xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision c877dd1b0ef103f8a5e1c6faaa97a532fd33b12d)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include "src/mat/impls/adj/mpi/mpiadj.h"
5 #include "petscsys.h"
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->m;
14   char              *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatView_MPIAdj"
41 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
42 {
43   PetscErrorCode ierr;
44   PetscTruth     iascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
48   if (iascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75 
76   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "MatSetOption_MPIAdj"
82 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
83 {
84   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
85 
86   PetscFunctionBegin;
87   switch (op) {
88   case MAT_SYMMETRIC:
89   case MAT_STRUCTURALLY_SYMMETRIC:
90   case MAT_HERMITIAN:
91     a->symmetric = PETSC_TRUE;
92     break;
93   case MAT_NOT_SYMMETRIC:
94   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
95   case MAT_NOT_HERMITIAN:
96     a->symmetric = PETSC_FALSE;
97     break;
98   case MAT_SYMMETRY_ETERNAL:
99   case MAT_NOT_SYMMETRY_ETERNAL:
100     break;
101   default:
102     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
103     break;
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
115 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
118   PetscErrorCode ierr;
119   PetscInt       i,j,*diag,m = A->m;
120 
121   PetscFunctionBegin;
122   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&diag);CHKERRQ(ierr);
123   PetscLogObjectMemory(A,(m+1)*sizeof(PetscInt));
124   for (i=0; i<A->m; i++) {
125     for (j=a->i[i]; j<a->i[i+1]; j++) {
126       if (a->j[j] == i) {
127         diag[i] = j;
128         break;
129       }
130     }
131   }
132   a->diag = diag;
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "MatGetRow_MPIAdj"
138 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141   PetscInt   *itmp;
142 
143   PetscFunctionBegin;
144   row -= a->rstart;
145 
146   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147 
148   *nz = a->i[row+1] - a->i[row];
149   if (v) *v = PETSC_NULL;
150   if (idx) {
151     itmp = a->j + a->i[row];
152     if (*nz) {
153       *idx = itmp;
154     }
155     else *idx = 0;
156   }
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "MatRestoreRow_MPIAdj"
162 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
163 {
164   PetscFunctionBegin;
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
170 PetscErrorCode MatGetBlockSize_MPIAdj(Mat A,PetscInt *bs)
171 {
172   PetscFunctionBegin;
173   *bs = 1;
174   PetscFunctionReturn(0);
175 }
176 
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatEqual_MPIAdj"
180 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
181 {
182   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
183   PetscErrorCode ierr;
184   PetscTruth     flag;
185 
186   PetscFunctionBegin;
187   /* If the  matrix dimensions are not equal,or no of nonzeros */
188   if ((A->m != B->m) ||(a->nz != b->nz)) {
189     flag = PETSC_FALSE;
190   }
191 
192   /* if the a->i are the same */
193   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
194 
195   /* if a->j are the same */
196   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
197 
198   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
199   PetscFunctionReturn(0);
200 }
201 
202 #undef __FUNCT__
203 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
204 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
205 {
206   PetscErrorCode ierr;
207   PetscMPIInt    size;
208   PetscInt       i;
209   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
210 
211   PetscFunctionBegin;
212   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
213   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
214   *m    = A->m;
215   *ia   = a->i;
216   *ja   = a->j;
217   *done = PETSC_TRUE;
218   if (oshift) {
219     for (i=0; i<(*ia)[*m]; i++) {
220       (*ja)[i]++;
221     }
222     for (i=0; i<=(*m); i++) (*ia)[i]++;
223   }
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
229 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
230 {
231   PetscInt   i;
232   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
233 
234   PetscFunctionBegin;
235   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
236   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
237   if (oshift) {
238     for (i=0; i<=(*m); i++) (*ia)[i]--;
239     for (i=0; i<(*ia)[*m]; i++) {
240       (*ja)[i]--;
241     }
242   }
243   PetscFunctionReturn(0);
244 }
245 
246 /* -------------------------------------------------------------------*/
247 static struct _MatOps MatOps_Values = {0,
248        MatGetRow_MPIAdj,
249        MatRestoreRow_MPIAdj,
250        0,
251 /* 4*/ 0,
252        0,
253        0,
254        0,
255        0,
256        0,
257 /*10*/ 0,
258        0,
259        0,
260        0,
261        0,
262 /*15*/ 0,
263        MatEqual_MPIAdj,
264        0,
265        0,
266        0,
267 /*20*/ 0,
268        0,
269        0,
270        MatSetOption_MPIAdj,
271        0,
272 /*25*/ 0,
273        0,
274        0,
275        0,
276        0,
277 /*30*/ 0,
278        0,
279        0,
280        0,
281        0,
282 /*35*/ 0,
283        0,
284        0,
285        0,
286        0,
287 /*40*/ 0,
288        0,
289        0,
290        0,
291        0,
292 /*45*/ 0,
293        0,
294        0,
295        0,
296        0,
297 /*50*/ MatGetBlockSize_MPIAdj,
298        MatGetRowIJ_MPIAdj,
299        MatRestoreRowIJ_MPIAdj,
300        0,
301        0,
302 /*55*/ 0,
303        0,
304        0,
305        0,
306        0,
307 /*60*/ 0,
308        MatDestroy_MPIAdj,
309        MatView_MPIAdj,
310        MatGetPetscMaps_Petsc,
311        0,
312 /*65*/ 0,
313        0,
314        0,
315        0,
316        0,
317 /*70*/ 0,
318        0,
319        0,
320        0,
321        0,
322 /*75*/ 0,
323        0,
324        0,
325        0,
326        0,
327 /*80*/ 0,
328        0,
329        0,
330        0,
331        0,
332 /*85*/ 0,
333        0,
334        0,
335        0,
336        0,
337 /*90*/ 0,
338        0,
339        0,
340        0,
341        0,
342 /*95*/ 0,
343        0,
344        0,
345        0};
346 
347 EXTERN_C_BEGIN
348 #undef __FUNCT__
349 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
350 PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
351 {
352   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
353   PetscErrorCode ierr;
354 #if defined(PETSC_USE_BOPT_g)
355   PetscInt       ii;
356 #endif
357 
358   PetscFunctionBegin;
359   B->preallocated = PETSC_TRUE;
360 #if defined(PETSC_USE_BOPT_g)
361   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
362   for (ii=1; ii<B->m; ii++) {
363     if (i[ii] < 0 || i[ii] < i[ii-1]) {
364       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
365     }
366   }
367   for (ii=0; ii<i[B->m]; ii++) {
368     if (j[ii] < 0 || j[ii] >= B->N) {
369       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
370     }
371   }
372 #endif
373 
374   b->j      = j;
375   b->i      = i;
376   b->values = values;
377 
378   b->nz               = i[B->m];
379   b->diag             = 0;
380   b->symmetric        = PETSC_FALSE;
381   b->freeaij          = PETSC_TRUE;
382 
383   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
384   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
385   PetscFunctionReturn(0);
386 }
387 EXTERN_C_END
388 
389 /*MC
390    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
391    intended for use constructing orderings and partitionings.
392 
393   Level: beginner
394 
395 .seealso: MatCreateMPIAdj
396 M*/
397 
398 EXTERN_C_BEGIN
399 #undef __FUNCT__
400 #define __FUNCT__ "MatCreate_MPIAdj"
401 PetscErrorCode MatCreate_MPIAdj(Mat B)
402 {
403   Mat_MPIAdj     *b;
404   PetscErrorCode ierr;
405   PetscInt       ii;
406   PetscMPIInt    size,rank;
407 
408   PetscFunctionBegin;
409   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
410   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
411 
412   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
413   B->data             = (void*)b;
414   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
415   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
416   B->factor           = 0;
417   B->lupivotthreshold = 1.0;
418   B->mapping          = 0;
419   B->assembled        = PETSC_FALSE;
420 
421   ierr = PetscSplitOwnership(B->comm,&B->m,&B->M);CHKERRQ(ierr);
422   B->n = B->N = PetscMax(B->N,B->n);CHKERRQ(ierr);
423 
424   /* the information in the maps duplicates the information computed below, eventually
425      we should remove the duplicate information that is not contained in the maps */
426   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
427   /* we don't know the "local columns" so just use the row information :-(*/
428   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
429 
430   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&b->rowners);CHKERRQ(ierr);
431   PetscLogObjectMemory(B,(size+2)*sizeof(PetscInt)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
432   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
433   b->rowners[0] = 0;
434   for (ii=2; ii<=size; ii++) {
435     b->rowners[ii] += b->rowners[ii-1];
436   }
437   b->rstart = b->rowners[rank];
438   b->rend   = b->rowners[rank+1];
439   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
440                                     "MatMPIAdjSetPreallocation_MPIAdj",
441                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 EXTERN_C_END
445 
446 #undef __FUNCT__
447 #define __FUNCT__ "MatMPIAdjSetPreallocation"
448 /*@C
449    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
450 
451    Collective on MPI_Comm
452 
453    Input Parameters:
454 +  A - the matrix
455 .  i - the indices into j for the start of each row
456 .  j - the column indices for each row (sorted for each row).
457        The indices in i and j start with zero (NOT with one).
458 -  values - [optional] edge weights
459 
460    Level: intermediate
461 
462 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
463 @*/
464 PetscErrorCode MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
465 {
466   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
467 
468   PetscFunctionBegin;
469   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
470   if (f) {
471     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
472   }
473   PetscFunctionReturn(0);
474 }
475 
476 #undef __FUNCT__
477 #define __FUNCT__ "MatCreateMPIAdj"
478 /*@C
479    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
480    The matrix does not have numerical values associated with it, but is
481    intended for ordering (to reduce bandwidth etc) and partitioning.
482 
483    Collective on MPI_Comm
484 
485    Input Parameters:
486 +  comm - MPI communicator
487 .  m - number of local rows
488 .  n - number of columns
489 .  i - the indices into j for the start of each row
490 .  j - the column indices for each row (sorted for each row).
491        The indices in i and j start with zero (NOT with one).
492 -  values -[optional] edge weights
493 
494    Output Parameter:
495 .  A - the matrix
496 
497    Level: intermediate
498 
499    Notes: This matrix object does not support most matrix operations, include
500    MatSetValues().
501    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
502    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
503     call from Fortran you need not create the arrays with PetscMalloc().
504    Should not include the matrix diagonals.
505 
506    If you already have a matrix, you can create its adjacency matrix by a call
507    to MatConvert, specifying a type of MATMPIADJ.
508 
509    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
510 
511 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
512 @*/
513 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
519   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
520   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 EXTERN_C_BEGIN
525 #undef __FUNCT__
526 #define __FUNCT__ "MatConvertTo_MPIAdj"
527 PetscErrorCode MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat)
528 {
529   Mat               B;
530   PetscErrorCode    ierr;
531   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
532   const PetscInt    *rj;
533   const PetscScalar *ra;
534   MPI_Comm          comm;
535 
536   PetscFunctionBegin;
537   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
538   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
539   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
540 
541   /* count the number of nonzeros per row */
542   for (i=0; i<m; i++) {
543     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
544     for (j=0; j<len; j++) {
545       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
546     }
547     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
548     nzeros += len;
549   }
550 
551   /* malloc space for nonzeros */
552   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
553   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
554   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
555 
556   nzeros = 0;
557   ia[0]  = 0;
558   for (i=0; i<m; i++) {
559     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
560     cnt     = 0;
561     for (j=0; j<len; j++) {
562       if (rj[j] != i+rstart) { /* if not diagonal */
563         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
564         ja[nzeros+cnt++] = rj[j];
565       }
566     }
567     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
568     nzeros += cnt;
569     ia[i+1] = nzeros;
570   }
571 
572   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
573   ierr = MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);CHKERRQ(ierr);
574   ierr = MatSetType(B,type);CHKERRQ(ierr);
575   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
576 
577   /* Fake support for "inplace" convert. */
578   if (*newmat == A) {
579     ierr = MatDestroy(A);CHKERRQ(ierr);
580   }
581   *newmat = B;
582   PetscFunctionReturn(0);
583 }
584 EXTERN_C_END
585 
586 
587 
588 
589 
590