xref: /petsc/src/sys/error/err.c (revision a16fd2c93c02146fccd68469496ac02ca99b9ebe)
1 
2 /*
3       Code that allows one to set the error handlers
4 */
5 #include <petsc/private/petscimpl.h> /*I "petscsys.h" I*/
6 #include <petscviewer.h>
7 
8 typedef struct _EH *EH;
9 struct _EH {
10   PetscErrorCode (*handler)(MPI_Comm, int, const char *, const char *, PetscErrorCode, PetscErrorType, const char *, void *);
11   void *ctx;
12   EH    previous;
13 };
14 
15 static EH eh = NULL;
16 
17 /*@C
18    PetscEmacsClientErrorHandler - Error handler that uses the emacsclient program to
19     load the file where the error occurred. Then calls the "previous" error handler.
20 
21    Not Collective
22 
23    Input Parameters:
24 +  comm - communicator over which error occurred
25 .  line - the line number of the error (indicated by __LINE__)
26 .  file - the file in which the error was detected (indicated by __FILE__)
27 .  mess - an error text string, usually just printed to the screen
28 .  n - the generic error number
29 .  p - specific error number
30 -  ctx - error handler context
31 
32    Options Database Key:
33 .   -on_error_emacs <machinename> - will contact machinename to open the Emacs client there
34 
35    Level: developer
36 
37    Notes:
38    You must put (server-start) in your .emacs file for the emacsclient software to work
39 
40    Developer Note:
41    Since this is an error handler it cannot call PetscCall(); thus we just return if an error is detected.
42 
43 .seealso: `PetscError()`, `PetscPushErrorHandler()`, `PetscPopErrorHandler()`, `PetscAttachDebuggerErrorHandler()`,
44           `PetscAbortErrorHandler()`, `PetscMPIAbortErrorHandler()`, `PetscTraceBackErrorHandler()`, `PetscReturnErrorHandler()`
45  @*/
46 PetscErrorCode PetscEmacsClientErrorHandler(MPI_Comm comm, int line, const char *fun, const char *file, PetscErrorCode n, PetscErrorType p, const char *mess, void *ctx) {
47   PetscErrorCode ierr;
48   char           command[PETSC_MAX_PATH_LEN];
49   const char    *pdir;
50   FILE          *fp;
51 
52   PetscFunctionBegin;
53   ierr = PetscGetPetscDir(&pdir);
54   if (ierr) PetscFunctionReturn(ierr);
55   sprintf(command, "cd %s; emacsclient --no-wait +%d %s\n", pdir, line, file);
56 #if defined(PETSC_HAVE_POPEN)
57   ierr = PetscPOpen(MPI_COMM_WORLD, (char *)ctx, command, "r", &fp);
58   if (ierr) PetscFunctionReturn(ierr);
59   ierr = PetscPClose(MPI_COMM_WORLD, fp);
60   if (ierr) PetscFunctionReturn(ierr);
61 #else
62   SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP_SYS, "Cannot run external programs on this machine");
63 #endif
64   ierr = PetscPopErrorHandler();
65   if (ierr) PetscFunctionReturn(ierr); /* remove this handler from the stack of handlers */
66   if (!eh) {
67     ierr = PetscTraceBackErrorHandler(comm, line, fun, file, n, p, mess, NULL);
68     if (ierr) PetscFunctionReturn(ierr);
69   } else {
70     ierr = (*eh->handler)(comm, line, fun, file, n, p, mess, eh->ctx);
71     if (ierr) PetscFunctionReturn(ierr);
72   }
73   PetscFunctionReturn(ierr);
74 }
75 
76 /*@C
77    PetscPushErrorHandler - Sets a routine to be called on detection of errors.
78 
79    Not Collective
80 
81    Input Parameters:
82 +  handler - error handler routine
83 -  ctx - optional handler context that contains information needed by the handler (for
84          example file pointers for error messages etc.)
85 
86    Calling sequence of handler:
87 $    int handler(MPI_Comm comm,int line,char *func,char *file,PetscErrorCode n,int p,char *mess,void *ctx);
88 
89 +  comm - communicator over which error occurred
90 .  line - the line number of the error (indicated by __LINE__)
91 .  file - the file in which the error was detected (indicated by __FILE__)
92 .  n - the generic error number (see list defined in include/petscerror.h)
93 .  p - PETSC_ERROR_INITIAL if error just detected, otherwise PETSC_ERROR_REPEAT
94 .  mess - an error text string, usually just printed to the screen
95 -  ctx - the error handler context
96 
97    Options Database Keys:
98 +   -on_error_attach_debugger <noxterm,gdb or dbx> - starts up the debugger if an error occurs
99 -   -on_error_abort - aborts the program if an error occurs
100 
101    Level: intermediate
102 
103    Notes:
104    The currently available PETSc error handlers include PetscTraceBackErrorHandler(),
105    PetscAttachDebuggerErrorHandler(), PetscAbortErrorHandler(), and PetscMPIAbortErrorHandler(), PetscReturnErrorHandler().
106 
107    Fortran Notes:
108     You can only push one error handler from Fortran before poping it.
109 
110 .seealso: `PetscPopErrorHandler()`, `PetscAttachDebuggerErrorHandler()`, `PetscAbortErrorHandler()`, `PetscTraceBackErrorHandler()`, `PetscPushSignalHandler()`
111 
112 @*/
113 PetscErrorCode PetscPushErrorHandler(PetscErrorCode (*handler)(MPI_Comm comm, int, const char *, const char *, PetscErrorCode, PetscErrorType, const char *, void *), void *ctx) {
114   EH neweh;
115 
116   PetscFunctionBegin;
117   PetscCall(PetscNew(&neweh));
118   if (eh) neweh->previous = eh;
119   else neweh->previous = NULL;
120   neweh->handler = handler;
121   neweh->ctx     = ctx;
122   eh             = neweh;
123   PetscFunctionReturn(0);
124 }
125 
126 /*@
127    PetscPopErrorHandler - Removes the latest error handler that was
128    pushed with PetscPushErrorHandler().
129 
130    Not Collective
131 
132    Level: intermediate
133 
134 .seealso: `PetscPushErrorHandler()`
135 @*/
136 PetscErrorCode PetscPopErrorHandler(void) {
137   EH tmp;
138 
139   PetscFunctionBegin;
140   if (!eh) PetscFunctionReturn(0);
141   tmp = eh;
142   eh  = eh->previous;
143   PetscCall(PetscFree(tmp));
144   PetscFunctionReturn(0);
145 }
146 
147 /*@C
148   PetscReturnErrorHandler - Error handler that causes a return without printing an error message.
149 
150    Not Collective
151 
152    Input Parameters:
153 +  comm - communicator over which error occurred
154 .  line - the line number of the error (indicated by __LINE__)
155 .  file - the file in which the error was detected (indicated by __FILE__)
156 .  mess - an error text string, usually just printed to the screen
157 .  n - the generic error number
158 .  p - specific error number
159 -  ctx - error handler context
160 
161    Level: developer
162 
163    Notes:
164    Most users need not directly employ this routine and the other error
165    handlers, but can instead use the simplified interface SETERRQ, which has
166    the calling sequence
167 $     SETERRQ(comm,number,mess)
168 
169    PetscIgnoreErrorHandler() does the same thing as this function, but is deprecated, you should use this function.
170 
171    Use PetscPushErrorHandler() to set the desired error handler.
172 
173 .seealso: `PetscPushErrorHandler()`, `PetscPopErrorHandler()`, `PetscError()`, `PetscAbortErrorHandler()`, `PetscMPIAbortErrorHandler()`, `PetscTraceBackErrorHandler()`,
174           `PetscAttachDebuggerErrorHandler()`, `PetscEmacsClientErrorHandler()`
175  @*/
176 PetscErrorCode PetscReturnErrorHandler(MPI_Comm comm, int line, const char *fun, const char *file, PetscErrorCode n, PetscErrorType p, const char *mess, void *ctx) {
177   return n;
178 }
179 
180 static char        PetscErrorBaseMessage[1024];
181 /*
182        The numerical values for these are defined in include/petscerror.h; any changes
183    there must also be made here
184 */
185 static const char *PetscErrorStrings[] = {
186   /*55 */ "Out of memory",
187   "No support for this operation for this object type",
188   "No support for this operation on this system",
189   /*58 */ "Operation done in wrong order",
190   /*59 */ "Signal received",
191   /*60 */ "Nonconforming object sizes",
192   "Argument aliasing not permitted",
193   "Invalid argument",
194   /*63 */ "Argument out of range",
195   "Corrupt argument: https://petsc.org/release/faq/#valgrind",
196   "Unable to open file",
197   "Read from file failed",
198   "Write to file failed",
199   "Invalid pointer",
200   /*69 */ "Arguments must have same type",
201   /*70 */ "Attempt to use a pointer that does not point to a valid accessible location",
202   /*71 */ "Zero pivot in LU factorization: https://petsc.org/release/faq/#zeropivot",
203   /*72 */ "Floating point exception",
204   /*73 */ "Object is in wrong state",
205   "Corrupted Petsc object",
206   "Arguments are incompatible",
207   "Error in external library",
208   /*77 */ "Petsc has generated inconsistent data",
209   "Memory corruption: https://petsc.org/release/faq/#valgrind",
210   "Unexpected data in file",
211   /*80 */ "Arguments must have same communicators",
212   /*81 */ "Zero pivot in Cholesky factorization: https://petsc.org/release/faq/#zeropivot",
213   "",
214   "",
215   "Overflow in integer operation: https://petsc.org/release/faq/#64-bit-indices",
216   /*85 */ "Null argument, when expecting valid pointer",
217   /*86 */ "Unknown type. Check for miss-spelling or missing package: https://petsc.org/release/install/install/#external-packages",
218   /*87 */ "MPI library at runtime is not compatible with MPI used at compile time",
219   /*88 */ "Error in system call",
220   /*89 */ "Object Type not set: https://petsc.org/release/faq/#object-type-not-set",
221   /*90 */ "",
222   /*   */ "",
223   /*92 */ "See https://petsc.org/release/overview/linear_solve_table/ for possible LU and Cholesky solvers",
224   /*93 */ "You cannot overwrite this option since that will conflict with other previously set options",
225   /*94 */ "Example/application run with number of MPI ranks it does not support",
226   /*95 */ "Missing or incorrect user input",
227   /*96 */ "GPU resources unavailable",
228   /*97 */ "GPU error",
229   /*98 */ "General MPI error"};
230 
231 /*@C
232    PetscErrorMessage - returns the text string associated with a PETSc error code.
233 
234    Not Collective
235 
236    Input Parameter:
237 .   errnum - the error code
238 
239    Output Parameters:
240 +  text - the error message (NULL if not desired)
241 -  specific - the specific error message that was set with SETERRxxx() or PetscError().  (NULL if not desired)
242 
243    Level: developer
244 
245 .seealso: `PetscPushErrorHandler()`, `PetscAttachDebuggerErrorHandler()`, `PetscError()`, `SETERRQ()`, `PetscCall()`
246           `PetscAbortErrorHandler()`, `PetscTraceBackErrorHandler()`
247  @*/
248 PetscErrorCode PetscErrorMessage(int errnum, const char *text[], char **specific) {
249   size_t len;
250 
251   PetscFunctionBegin;
252   if (text && errnum > PETSC_ERR_MIN_VALUE && errnum < PETSC_ERR_MAX_VALUE) {
253     *text = PetscErrorStrings[errnum - PETSC_ERR_MIN_VALUE - 1];
254     PetscCall(PetscStrlen(*text, &len));
255     if (!len) *text = NULL;
256   } else if (text) *text = NULL;
257 
258   if (specific) *specific = PetscErrorBaseMessage;
259   PetscFunctionReturn(0);
260 }
261 
262 #if defined(PETSC_CLANGUAGE_CXX)
263 /* C++ exceptions are formally not allowed to propagate through extern "C" code. In practice, far too much software
264  * would be broken if implementations did not handle it it some common cases. However, keep in mind
265  *
266  *   Rule 62. Don't allow exceptions to propagate across module boundaries
267  *
268  * in "C++ Coding Standards" by Sutter and Alexandrescu. (This accounts for part of the ongoing C++ binary interface
269  * instability.) Having PETSc raise errors as C++ exceptions was probably misguided and should eventually be removed.
270  *
271  * Here is the problem: You have a C++ function call a PETSc function, and you would like to maintain the error message
272  * and stack information from the PETSc error. You could make everyone write exactly this code in their C++, but that
273  * seems crazy to me.
274  */
275 #include <sstream>
276 #include <stdexcept>
277 static void PetscCxxErrorThrow() {
278   const char *str;
279   if (eh && eh->ctx) {
280     std::ostringstream *msg;
281     msg = (std::ostringstream *)eh->ctx;
282     str = msg->str().c_str();
283   } else str = "Error detected in C PETSc";
284 
285   throw std::runtime_error(str);
286 }
287 #endif
288 
289 /*@C
290    PetscError - Routine that is called when an error has been detected, usually called through the macro SETERRQ(PETSC_COMM_SELF,).
291 
292   Collective on comm
293 
294    Input Parameters:
295 +  comm - communicator over which error occurred.  ALL ranks of this communicator MUST call this routine
296 .  line - the line number of the error (indicated by __LINE__)
297 .  func - the function name in which the error was detected
298 .  file - the file in which the error was detected (indicated by __FILE__)
299 .  n - the generic error number
300 .  p - PETSC_ERROR_INITIAL indicates the error was initially detected, PETSC_ERROR_REPEAT indicates this is a traceback from a previously detected error
301 -  mess - formatted message string - aka printf
302 
303   Options Database:
304 +  -error_output_stdout - output the error messages to stdout instead of the default stderr
305 -  -error_output_none - do not output the error messages
306 
307   Level: intermediate
308 
309    Notes:
310    PETSc error handling is done with error return codes. A non-zero return indicates an error was detected. Errors are generally not something that the code
311    can recover from. Note that numerical errors (potential divide by zero, for example) are not managed by the error return codes; they are managed via, for example,
312    KSPGetConvergedReason() that indicates if the solve was successful or not. The option -ksp_error_if_not_converged, for example, turns numerical failures into
313    hard errors managed via PetscError().
314 
315    PETSc provides a rich supply of error handlers, see the list below, and users can also provide their own error handlers.
316 
317    Most users need not directly use this routine and the error handlers, but
318    can instead use the simplified interface SETERRQ, which has the calling
319    sequence
320 $     SETERRQ(comm,n,mess)
321 
322    Fortran Note:
323    This routine is used differently from Fortran
324 $    PetscError(MPI_Comm comm,PetscErrorCode n,PetscErrorType p,char *message)
325 
326    Set the error handler with PetscPushErrorHandler().
327 
328    Developer Note: Since this is called after an error condition it should not be calling any error handlers (currently it ignores any error codes)
329    BUT this routine does call regular PETSc functions that may call error handlers, this is problematic and could be fixed by never calling other PETSc routines
330    but this annoying.
331 
332 .seealso: `PetscErrorCode`, `PetscPushErrorHandler()`, `PetscPopErrorHandler()`, `PetscTraceBackErrorHandler()`, `PetscAbortErrorHandler()`, `PetscMPIAbortErrorHandler()`,
333           `PetscReturnErrorHandler()`, `PetscAttachDebuggerErrorHandler()`, `PetscEmacsClientErrorHandler()`,
334           `SETERRQ()`, `PetscCall()`, `CHKMEMQ`, `SETERRQ()`, `SETERRQ()`, `PetscErrorMessage()`, `PETSCABORT()`
335 @*/
336 PetscErrorCode PetscError(MPI_Comm comm, int line, const char *func, const char *file, PetscErrorCode n, PetscErrorType p, const char *mess, ...) {
337   va_list        Argp;
338   size_t         fullLength;
339   char           buf[2048], *lbuf = NULL;
340   PetscBool      ismain;
341   PetscErrorCode ierr;
342 
343   if (!PetscErrorHandlingInitialized) return n;
344   if (comm == MPI_COMM_NULL) comm = PETSC_COMM_SELF;
345 
346   /* Compose the message evaluating the print format */
347   if (mess) {
348     va_start(Argp, mess);
349     PetscVSNPrintf(buf, 2048, mess, &fullLength, Argp);
350     va_end(Argp);
351     lbuf = buf;
352     if (p == PETSC_ERROR_INITIAL) PetscStrncpy(PetscErrorBaseMessage, lbuf, 1023);
353   }
354 
355   if (p == PETSC_ERROR_INITIAL && n != PETSC_ERR_MEMC) PetscMallocValidate(__LINE__, PETSC_FUNCTION_NAME, __FILE__);
356 
357   if (!eh) ierr = PetscTraceBackErrorHandler(comm, line, func, file, n, p, lbuf, NULL);
358   else ierr = (*eh->handler)(comm, line, func, file, n, p, lbuf, eh->ctx);
359   PetscStackClearTop;
360 
361   /*
362       If this is called from the main() routine we call MPI_Abort() instead of
363     return to allow the parallel program to be properly shutdown.
364 
365     Does not call PETSCABORT() since that would provide the wrong source file and line number information
366   */
367   if (func) {
368     PetscStrncmp(func, "main", 4, &ismain);
369     if (ismain) {
370       if (petscwaitonerrorflg) PetscSleep(1000);
371       PETSCABORT(comm, ierr);
372     }
373   }
374 #if defined(PETSC_CLANGUAGE_CXX)
375   if (p == PETSC_ERROR_IN_CXX) PetscCxxErrorThrow();
376 #endif
377   return ierr;
378 }
379 
380 /* -------------------------------------------------------------------------*/
381 
382 /*@C
383     PetscIntView - Prints an array of integers; useful for debugging.
384 
385     Collective on PetscViewer
386 
387     Input Parameters:
388 +   N - number of integers in array
389 .   idx - array of integers
390 -   viewer - location to print array,  PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_STDOUT_SELF or 0
391 
392   Level: intermediate
393 
394     Developer Notes:
395     idx cannot be const because may be passed to binary viewer where byte swapping is done
396 
397 .seealso: `PetscRealView()`
398 @*/
399 PetscErrorCode PetscIntView(PetscInt N, const PetscInt idx[], PetscViewer viewer) {
400   PetscMPIInt rank, size;
401   PetscInt    j, i, n = N / 20, p = N % 20;
402   PetscBool   iascii, isbinary;
403   MPI_Comm    comm;
404 
405   PetscFunctionBegin;
406   if (!viewer) viewer = PETSC_VIEWER_STDOUT_SELF;
407   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 3);
408   if (N) PetscValidIntPointer(idx, 2);
409   PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
410   PetscCallMPI(MPI_Comm_size(comm, &size));
411   PetscCallMPI(MPI_Comm_rank(comm, &rank));
412 
413   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
414   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
415   if (iascii) {
416     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
417     for (i = 0; i < n; i++) {
418       if (size > 1) {
419         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT ":", rank, 20 * i));
420       } else {
421         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%" PetscInt_FMT ":", 20 * i));
422       }
423       for (j = 0; j < 20; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT, idx[i * 20 + j]));
424       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
425     }
426     if (p) {
427       if (size > 1) {
428         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT ":", rank, 20 * n));
429       } else {
430         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%" PetscInt_FMT ":", 20 * n));
431       }
432       for (i = 0; i < p; i++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT, idx[20 * n + i]));
433       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
434     }
435     PetscCall(PetscViewerFlush(viewer));
436     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
437   } else if (isbinary) {
438     PetscMPIInt *sizes, Ntotal, *displs, NN;
439     PetscInt    *array;
440 
441     PetscCall(PetscMPIIntCast(N, &NN));
442 
443     if (size > 1) {
444       if (rank) {
445         PetscCallMPI(MPI_Gather(&NN, 1, MPI_INT, NULL, 0, MPI_INT, 0, comm));
446         PetscCallMPI(MPI_Gatherv((void *)idx, NN, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, comm));
447       } else {
448         PetscCall(PetscMalloc1(size, &sizes));
449         PetscCallMPI(MPI_Gather(&NN, 1, MPI_INT, sizes, 1, MPI_INT, 0, comm));
450         Ntotal = sizes[0];
451         PetscCall(PetscMalloc1(size, &displs));
452         displs[0] = 0;
453         for (i = 1; i < size; i++) {
454           Ntotal += sizes[i];
455           displs[i] = displs[i - 1] + sizes[i - 1];
456         }
457         PetscCall(PetscMalloc1(Ntotal, &array));
458         PetscCallMPI(MPI_Gatherv((void *)idx, NN, MPIU_INT, array, sizes, displs, MPIU_INT, 0, comm));
459         PetscCall(PetscViewerBinaryWrite(viewer, array, Ntotal, PETSC_INT));
460         PetscCall(PetscFree(sizes));
461         PetscCall(PetscFree(displs));
462         PetscCall(PetscFree(array));
463       }
464     } else {
465       PetscCall(PetscViewerBinaryWrite(viewer, idx, N, PETSC_INT));
466     }
467   } else {
468     const char *tname;
469     PetscCall(PetscObjectGetName((PetscObject)viewer, &tname));
470     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot handle that PetscViewer of type %s", tname);
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 /*@C
476     PetscRealView - Prints an array of doubles; useful for debugging.
477 
478     Collective on PetscViewer
479 
480     Input Parameters:
481 +   N - number of PetscReal in array
482 .   idx - array of PetscReal
483 -   viewer - location to print array,  PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_STDOUT_SELF or 0
484 
485   Level: intermediate
486 
487     Developer Notes:
488     idx cannot be const because may be passed to binary viewer where byte swapping is done
489 
490 .seealso: `PetscIntView()`
491 @*/
492 PetscErrorCode PetscRealView(PetscInt N, const PetscReal idx[], PetscViewer viewer) {
493   PetscMPIInt rank, size;
494   PetscInt    j, i, n = N / 5, p = N % 5;
495   PetscBool   iascii, isbinary;
496   MPI_Comm    comm;
497 
498   PetscFunctionBegin;
499   if (!viewer) viewer = PETSC_VIEWER_STDOUT_SELF;
500   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 3);
501   PetscValidRealPointer(idx, 2);
502   PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
503   PetscCallMPI(MPI_Comm_size(comm, &size));
504   PetscCallMPI(MPI_Comm_rank(comm, &rank));
505 
506   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
507   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
508   if (iascii) {
509     PetscInt tab;
510 
511     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
512     PetscCall(PetscViewerASCIIGetTab(viewer, &tab));
513     for (i = 0; i < n; i++) {
514       PetscCall(PetscViewerASCIISetTab(viewer, tab));
515       if (size > 1) {
516         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %2" PetscInt_FMT ":", rank, 5 * i));
517       } else {
518         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%2" PetscInt_FMT ":", 5 * i));
519       }
520       PetscCall(PetscViewerASCIISetTab(viewer, 0));
521       for (j = 0; j < 5; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %12.4e", (double)idx[i * 5 + j]));
522       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
523     }
524     if (p) {
525       PetscCall(PetscViewerASCIISetTab(viewer, tab));
526       if (size > 1) {
527         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %2" PetscInt_FMT ":", rank, 5 * n));
528       } else {
529         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%2" PetscInt_FMT ":", 5 * n));
530       }
531       PetscCall(PetscViewerASCIISetTab(viewer, 0));
532       for (i = 0; i < p; i++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %12.4e", (double)idx[5 * n + i]));
533       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
534     }
535     PetscCall(PetscViewerFlush(viewer));
536     PetscCall(PetscViewerASCIISetTab(viewer, tab));
537     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
538   } else if (isbinary) {
539     PetscMPIInt *sizes, *displs, Ntotal, NN;
540     PetscReal   *array;
541 
542     PetscCall(PetscMPIIntCast(N, &NN));
543 
544     if (size > 1) {
545       if (rank) {
546         PetscCallMPI(MPI_Gather(&NN, 1, MPI_INT, NULL, 0, MPI_INT, 0, comm));
547         PetscCallMPI(MPI_Gatherv((PetscReal *)idx, NN, MPIU_REAL, NULL, NULL, NULL, MPIU_REAL, 0, comm));
548       } else {
549         PetscCall(PetscMalloc1(size, &sizes));
550         PetscCallMPI(MPI_Gather(&NN, 1, MPI_INT, sizes, 1, MPI_INT, 0, comm));
551         Ntotal = sizes[0];
552         PetscCall(PetscMalloc1(size, &displs));
553         displs[0] = 0;
554         for (i = 1; i < size; i++) {
555           Ntotal += sizes[i];
556           displs[i] = displs[i - 1] + sizes[i - 1];
557         }
558         PetscCall(PetscMalloc1(Ntotal, &array));
559         PetscCallMPI(MPI_Gatherv((PetscReal *)idx, NN, MPIU_REAL, array, sizes, displs, MPIU_REAL, 0, comm));
560         PetscCall(PetscViewerBinaryWrite(viewer, array, Ntotal, PETSC_REAL));
561         PetscCall(PetscFree(sizes));
562         PetscCall(PetscFree(displs));
563         PetscCall(PetscFree(array));
564       }
565     } else {
566       PetscCall(PetscViewerBinaryWrite(viewer, (void *)idx, N, PETSC_REAL));
567     }
568   } else {
569     const char *tname;
570     PetscCall(PetscObjectGetName((PetscObject)viewer, &tname));
571     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot handle that PetscViewer of type %s", tname);
572   }
573   PetscFunctionReturn(0);
574 }
575 
576 /*@C
577     PetscScalarView - Prints an array of scalars; useful for debugging.
578 
579     Collective on PetscViewer
580 
581     Input Parameters:
582 +   N - number of scalars in array
583 .   idx - array of scalars
584 -   viewer - location to print array,  PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_STDOUT_SELF or 0
585 
586   Level: intermediate
587 
588     Developer Notes:
589     idx cannot be const because may be passed to binary viewer where byte swapping is done
590 
591 .seealso: `PetscIntView()`, `PetscRealView()`
592 @*/
593 PetscErrorCode PetscScalarView(PetscInt N, const PetscScalar idx[], PetscViewer viewer) {
594   PetscMPIInt rank, size;
595   PetscInt    j, i, n = N / 3, p = N % 3;
596   PetscBool   iascii, isbinary;
597   MPI_Comm    comm;
598 
599   PetscFunctionBegin;
600   if (!viewer) viewer = PETSC_VIEWER_STDOUT_SELF;
601   PetscValidHeader(viewer, 3);
602   if (N) PetscValidScalarPointer(idx, 2);
603   PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
604   PetscCallMPI(MPI_Comm_size(comm, &size));
605   PetscCallMPI(MPI_Comm_rank(comm, &rank));
606 
607   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
608   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
609   if (iascii) {
610     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
611     for (i = 0; i < n; i++) {
612       if (size > 1) {
613         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %2" PetscInt_FMT ":", rank, 3 * i));
614       } else {
615         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%2" PetscInt_FMT ":", 3 * i));
616       }
617       for (j = 0; j < 3; j++) {
618 #if defined(PETSC_USE_COMPLEX)
619         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%12.4e,%12.4e)", (double)PetscRealPart(idx[i * 3 + j]), (double)PetscImaginaryPart(idx[i * 3 + j])));
620 #else
621         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %12.4e", (double)idx[i * 3 + j]));
622 #endif
623       }
624       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
625     }
626     if (p) {
627       if (size > 1) {
628         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %2" PetscInt_FMT ":", rank, 3 * n));
629       } else {
630         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%2" PetscInt_FMT ":", 3 * n));
631       }
632       for (i = 0; i < p; i++) {
633 #if defined(PETSC_USE_COMPLEX)
634         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%12.4e,%12.4e)", (double)PetscRealPart(idx[n * 3 + i]), (double)PetscImaginaryPart(idx[n * 3 + i])));
635 #else
636         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %12.4e", (double)idx[3 * n + i]));
637 #endif
638       }
639       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
640     }
641     PetscCall(PetscViewerFlush(viewer));
642     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
643   } else if (isbinary) {
644     PetscMPIInt *sizes, Ntotal, *displs, NN;
645     PetscScalar *array;
646 
647     PetscCall(PetscMPIIntCast(N, &NN));
648 
649     if (size > 1) {
650       if (rank) {
651         PetscCallMPI(MPI_Gather(&NN, 1, MPI_INT, NULL, 0, MPI_INT, 0, comm));
652         PetscCallMPI(MPI_Gatherv((void *)idx, NN, MPIU_SCALAR, NULL, NULL, NULL, MPIU_SCALAR, 0, comm));
653       } else {
654         PetscCall(PetscMalloc1(size, &sizes));
655         PetscCallMPI(MPI_Gather(&NN, 1, MPI_INT, sizes, 1, MPI_INT, 0, comm));
656         Ntotal = sizes[0];
657         PetscCall(PetscMalloc1(size, &displs));
658         displs[0] = 0;
659         for (i = 1; i < size; i++) {
660           Ntotal += sizes[i];
661           displs[i] = displs[i - 1] + sizes[i - 1];
662         }
663         PetscCall(PetscMalloc1(Ntotal, &array));
664         PetscCallMPI(MPI_Gatherv((void *)idx, NN, MPIU_SCALAR, array, sizes, displs, MPIU_SCALAR, 0, comm));
665         PetscCall(PetscViewerBinaryWrite(viewer, array, Ntotal, PETSC_SCALAR));
666         PetscCall(PetscFree(sizes));
667         PetscCall(PetscFree(displs));
668         PetscCall(PetscFree(array));
669       }
670     } else {
671       PetscCall(PetscViewerBinaryWrite(viewer, (void *)idx, N, PETSC_SCALAR));
672     }
673   } else {
674     const char *tname;
675     PetscCall(PetscObjectGetName((PetscObject)viewer, &tname));
676     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot handle that PetscViewer of type %s", tname);
677   }
678   PetscFunctionReturn(0);
679 }
680 
681 #if defined(PETSC_HAVE_CUDA)
682 #include <petscdevice.h>
683 PETSC_EXTERN const char *PetscCUBLASGetErrorName(cublasStatus_t status) {
684   switch (status) {
685 #if (CUDART_VERSION >= 8000) /* At least CUDA 8.0 of Sep. 2016 had these */
686   case CUBLAS_STATUS_SUCCESS: return "CUBLAS_STATUS_SUCCESS";
687   case CUBLAS_STATUS_NOT_INITIALIZED: return "CUBLAS_STATUS_NOT_INITIALIZED";
688   case CUBLAS_STATUS_ALLOC_FAILED: return "CUBLAS_STATUS_ALLOC_FAILED";
689   case CUBLAS_STATUS_INVALID_VALUE: return "CUBLAS_STATUS_INVALID_VALUE";
690   case CUBLAS_STATUS_ARCH_MISMATCH: return "CUBLAS_STATUS_ARCH_MISMATCH";
691   case CUBLAS_STATUS_MAPPING_ERROR: return "CUBLAS_STATUS_MAPPING_ERROR";
692   case CUBLAS_STATUS_EXECUTION_FAILED: return "CUBLAS_STATUS_EXECUTION_FAILED";
693   case CUBLAS_STATUS_INTERNAL_ERROR: return "CUBLAS_STATUS_INTERNAL_ERROR";
694   case CUBLAS_STATUS_NOT_SUPPORTED: return "CUBLAS_STATUS_NOT_SUPPORTED";
695   case CUBLAS_STATUS_LICENSE_ERROR: return "CUBLAS_STATUS_LICENSE_ERROR";
696 #endif
697   default: return "unknown error";
698   }
699 }
700 PETSC_EXTERN const char *PetscCUSolverGetErrorName(cusolverStatus_t status) {
701   switch (status) {
702 #if (CUDART_VERSION >= 8000) /* At least CUDA 8.0 of Sep. 2016 had these */
703   case CUSOLVER_STATUS_SUCCESS: return "CUSOLVER_STATUS_SUCCESS";
704   case CUSOLVER_STATUS_NOT_INITIALIZED: return "CUSOLVER_STATUS_NOT_INITIALIZED";
705   case CUSOLVER_STATUS_INVALID_VALUE: return "CUSOLVER_STATUS_INVALID_VALUE";
706   case CUSOLVER_STATUS_ARCH_MISMATCH: return "CUSOLVER_STATUS_ARCH_MISMATCH";
707   case CUSOLVER_STATUS_INTERNAL_ERROR: return "CUSOLVER_STATUS_INTERNAL_ERROR";
708 #if (CUDART_VERSION >= 9000) /* CUDA 9.0 had these defined on June 2021 */
709   case CUSOLVER_STATUS_ALLOC_FAILED: return "CUSOLVER_STATUS_ALLOC_FAILED";
710   case CUSOLVER_STATUS_MAPPING_ERROR: return "CUSOLVER_STATUS_MAPPING_ERROR";
711   case CUSOLVER_STATUS_EXECUTION_FAILED: return "CUSOLVER_STATUS_EXECUTION_FAILED";
712   case CUSOLVER_STATUS_MATRIX_TYPE_NOT_SUPPORTED: return "CUSOLVER_STATUS_MATRIX_TYPE_NOT_SUPPORTED";
713   case CUSOLVER_STATUS_NOT_SUPPORTED: return "CUSOLVER_STATUS_NOT_SUPPORTED ";
714   case CUSOLVER_STATUS_ZERO_PIVOT: return "CUSOLVER_STATUS_ZERO_PIVOT";
715   case CUSOLVER_STATUS_INVALID_LICENSE: return "CUSOLVER_STATUS_INVALID_LICENSE";
716 #endif
717 #endif
718   default: return "unknown error";
719   }
720 }
721 PETSC_EXTERN const char *PetscCUFFTGetErrorName(cufftResult result) {
722   switch (result) {
723   case CUFFT_SUCCESS: return "CUFFT_SUCCESS";
724   case CUFFT_INVALID_PLAN: return "CUFFT_INVALID_PLAN";
725   case CUFFT_ALLOC_FAILED: return "CUFFT_ALLOC_FAILED";
726   case CUFFT_INVALID_TYPE: return "CUFFT_INVALID_TYPE";
727   case CUFFT_INVALID_VALUE: return "CUFFT_INVALID_VALUE";
728   case CUFFT_INTERNAL_ERROR: return "CUFFT_INTERNAL_ERROR";
729   case CUFFT_EXEC_FAILED: return "CUFFT_EXEC_FAILED";
730   case CUFFT_SETUP_FAILED: return "CUFFT_SETUP_FAILED";
731   case CUFFT_INVALID_SIZE: return "CUFFT_INVALID_SIZE";
732   case CUFFT_UNALIGNED_DATA: return "CUFFT_UNALIGNED_DATA";
733   case CUFFT_INCOMPLETE_PARAMETER_LIST: return "CUFFT_INCOMPLETE_PARAMETER_LIST";
734   case CUFFT_INVALID_DEVICE: return "CUFFT_INVALID_DEVICE";
735   case CUFFT_PARSE_ERROR: return "CUFFT_PARSE_ERROR";
736   case CUFFT_NO_WORKSPACE: return "CUFFT_NO_WORKSPACE";
737   case CUFFT_NOT_IMPLEMENTED: return "CUFFT_NOT_IMPLEMENTED";
738   case CUFFT_LICENSE_ERROR: return "CUFFT_LICENSE_ERROR";
739   case CUFFT_NOT_SUPPORTED: return "CUFFT_NOT_SUPPORTED";
740   default: return "unknown error";
741   }
742 }
743 #endif
744 
745 #if defined(PETSC_HAVE_HIP)
746 #include <petscdevice.h>
747 PETSC_EXTERN const char *PetscHIPBLASGetErrorName(hipblasStatus_t status) {
748   switch (status) {
749   case HIPBLAS_STATUS_SUCCESS: return "HIPBLAS_STATUS_SUCCESS";
750   case HIPBLAS_STATUS_NOT_INITIALIZED: return "HIPBLAS_STATUS_NOT_INITIALIZED";
751   case HIPBLAS_STATUS_ALLOC_FAILED: return "HIPBLAS_STATUS_ALLOC_FAILED";
752   case HIPBLAS_STATUS_INVALID_VALUE: return "HIPBLAS_STATUS_INVALID_VALUE";
753   case HIPBLAS_STATUS_ARCH_MISMATCH: return "HIPBLAS_STATUS_ARCH_MISMATCH";
754   case HIPBLAS_STATUS_MAPPING_ERROR: return "HIPBLAS_STATUS_MAPPING_ERROR";
755   case HIPBLAS_STATUS_EXECUTION_FAILED: return "HIPBLAS_STATUS_EXECUTION_FAILED";
756   case HIPBLAS_STATUS_INTERNAL_ERROR: return "HIPBLAS_STATUS_INTERNAL_ERROR";
757   case HIPBLAS_STATUS_NOT_SUPPORTED: return "HIPBLAS_STATUS_NOT_SUPPORTED";
758   default: return "unknown error";
759   }
760 }
761 #endif
762 
763 /*@
764       PetscMPIErrorString - Given an MPI error code returns the MPI_Error_string() appropriately
765            formatted for displaying with the PETSc error handlers.
766 
767  Input Parameter:
768 .  err - the MPI error code
769 
770  Output Parameter:
771 .  string - the MPI error message, should declare its length to be larger than MPI_MAX_ERROR_STRING
772 
773    Level: developer
774 
775  Notes:
776     Does not return an error code or do error handling because it may be called from inside an error handler
777 
778 @*/
779 void PetscMPIErrorString(PetscMPIInt err, char *string) {
780   char        errorstring[MPI_MAX_ERROR_STRING];
781   PetscMPIInt len, j = 0;
782 
783   MPI_Error_string(err, (char *)errorstring, &len);
784   for (PetscMPIInt i = 0; i < len; i++) {
785     string[j++] = errorstring[i];
786     if (errorstring[i] == '\n') {
787       for (PetscMPIInt k = 0; k < 16; k++) string[j++] = ' ';
788     }
789   }
790   string[j] = 0;
791 }
792