xref: /petsc/src/sys/objects/ftn-custom/zstart.c (revision 00d931fe9835bef04c3bcd2a9a1bf118d64cc4c2)
1 /*
2   This file contains Fortran stubs for PetscInitialize and Finalize.
3 */
4 
5 /*
6     This is to prevent the Cray T3D version of MPI (University of Edinburgh)
7   from stupidly redefining MPI_INIT(). They put this in to detect errors
8   in C code,but here I do want to be calling the Fortran version from a
9   C subroutine.
10 */
11 #define T3DMPI_FORTRAN
12 #define T3EMPI_FORTRAN
13 
14 #include <petsc/private/fortranimpl.h>
15 
16 #if defined(PETSC_HAVE_CUDA)
17 #include <cublas.h>
18 #endif
19 
20 #if defined(PETSC_HAVE_FORTRAN_CAPS)
21 #define petscinitialize_              PETSCINITIALIZE
22 #define petscfinalize_                PETSCFINALIZE
23 #define petscend_                     PETSCEND
24 #define iargc_                        IARGC
25 #define getarg_                       GETARG
26 #define mpi_init_                     MPI_INIT
27 #define petscgetcommoncomm_           PETSCGETCOMMONCOMM
28 #define petsccommandargumentcount_    PETSCCOMMANDARGUMENTCOUNT
29 #define petscgetcommandargument_      PETSCGETCOMMANDARGUMENT
30 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
31 #define petscinitialize_              petscinitialize
32 #define petscfinalize_                petscfinalize
33 #define petscend_                     petscend
34 #define mpi_init_                     mpi_init
35 #define iargc_                        iargc
36 #define getarg_                       getarg
37 #define petscgetcommoncomm_           petscgetcommoncomm
38 #define petsccommandargumentcount_    petsccommandargumentcount
39 #define petscgetcommandargument_      petscgetcommandargument
40 #endif
41 
42 #if defined(PETSC_HAVE_NAGF90)
43 #undef iargc_
44 #undef getarg_
45 #define iargc_  f90_unix_MP_iargc
46 #define getarg_ f90_unix_MP_getarg
47 #endif
48 #if defined(PETSC_USE_NARGS) /* Digital Fortran */
49 #undef iargc_
50 #undef getarg_
51 #define iargc_  NARGS
52 #define getarg_ GETARG
53 #elif defined(PETSC_HAVE_PXFGETARG_NEW)  /* cray x1 */
54 #undef iargc_
55 #undef getarg_
56 #define iargc_  ipxfargc_
57 #define getarg_ pxfgetarg_
58 #endif
59 #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */
60 #undef iargc_
61 #undef getarg_
62 #define iargc_   iargc_
63 #define getarg_  getarg_
64 #endif
65 
66 #if defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* Fortran 2003 */
67 #undef iargc_
68 #undef getarg_
69 #define iargc_ petsccommandargumentcount_
70 #define getarg_ petscgetcommandargument_
71 #elif defined(PETSC_HAVE_GFORTRAN_IARGC) /* gfortran from gcc4 */
72 #undef iargc_
73 #undef getarg_
74 #define iargc_  _gfortran_iargc
75 #define getarg_ _gfortran_getarg_i4
76 #elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */
77 #undef iargc_
78 #undef getarg_
79 #define iargc  iargc_
80 #define getarg getarg_
81 #endif
82 
83 /*
84     The extra _ is because the f2c compiler puts an
85   extra _ at the end if the original routine name
86   contained any _.
87 */
88 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE)
89 #undef mpi_init_
90 #define mpi_init_             mpi_init__
91 #endif
92 
93 PETSC_EXTERN void PETSC_STDCALL mpi_init_(int*);
94 PETSC_EXTERN void PETSC_STDCALL petscgetcommoncomm_(PetscMPIInt*);
95 
96 /*
97      Different Fortran compilers handle command lines in different ways
98 */
99 #if defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* Fortran 2003  - same as 'else' case */
100 PETSC_EXTERN int iargc_();
101 PETSC_EXTERN void getarg_(int*,char*,int);
102 #elif defined(PETSC_USE_NARGS)
103 PETSC_EXTERN short __stdcall NARGS();
104 PETSC_EXTERN void __stdcall GETARG(short*,char*,int,short *);
105 
106 #elif defined(PETSC_HAVE_FORTRAN_STDCALL)
107 PETSC_EXTERN int PETSC_STDCALL IARGC();
108 PETSC_EXTERN void PETSC_STDCALL GETARG(int *,char *,int);
109 
110 #elif defined(PETSC_HAVE_PXFGETARG_NEW)
111 PETSC_EXTERN int iargc_();
112 PETSC_EXTERN void getarg_(int*,char*,int*,int*,int);
113 
114 #else
115 PETSC_EXTERN int iargc_();
116 PETSC_EXTERN void getarg_(int*,char*,int);
117 /*
118       The Cray T3D/T3E use the PXFGETARG() function
119 */
120 #if defined(PETSC_HAVE_PXFGETARG)
121 PETSC_EXTERN void PXFGETARG(int*,_fcd,int*,int*);
122 #endif
123 #endif
124 
125 #if (defined(PETSC_HAVE_COMPLEX) && !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)) || defined(PETSC_USE_REAL___FLOAT128)
126 extern MPI_Op MPIU_SUM;
127 
128 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
129 
130 #endif
131 #if defined(PETSC_USE_REAL___FLOAT128)
132 
133 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
134 PETSC_EXTERN void MPIAPI PetscMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
135 PETSC_EXTERN void MPIAPI PetscMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
136 #endif
137 
138 extern MPI_Op PetscMaxSum_Op;
139 
140 PETSC_EXTERN void MPIAPI PetscMaxSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
141 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelCounter(MPI_Comm,PetscMPIInt,void*,void*);
142 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Inner(MPI_Comm,PetscMPIInt,void*,void*);
143 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Outer(MPI_Comm,PetscMPIInt,void*,void*);
144 
145 extern PetscErrorCode  PetscOptionsCheckInitial_Private(void);
146 extern PetscErrorCode  PetscOptionsCheckInitial_Components(void);
147 extern PetscErrorCode  PetscInitialize_DynamicLibraries(void);
148 #if defined(PETSC_USE_LOG)
149 extern PetscErrorCode  PetscLogInitialize(void);
150 #endif
151 extern PetscErrorCode  PetscMallocAlign(size_t,int,const char[],const char[],void**);
152 extern PetscErrorCode  PetscFreeAlign(void*,int,const char[],const char[]);
153 extern int  PetscGlobalArgc;
154 extern char **PetscGlobalArgs;
155 
156 /*
157     Reads in Fortran command line argments and sends them to
158   all processors and adds them to Options database.
159 */
160 
161 PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv)
162 {
163 #if defined(PETSC_USE_NARGS)
164   short          i,flg;
165 #else
166   int            i;
167 #endif
168   PetscErrorCode ierr;
169   int            warg = 256;
170   PetscMPIInt    rank;
171   char           *p;
172 
173   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
174   if (!rank) {
175 #if defined(PETSC_HAVE_IARG_COUNT_PROGNAME)
176     *argc = iargc_();
177 #else
178     /* most compilers do not count the program name for argv[0] */
179     *argc = 1 + iargc_();
180 #endif
181   }
182   ierr = MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
183 
184   /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */
185   ierr = PetscMallocAlign((*argc+1)*(warg*sizeof(char)+sizeof(char*)),0,0,0,(void**)argv);CHKERRQ(ierr);
186   (*argv)[0] = (char*)(*argv + *argc + 1);
187 
188   if (!rank) {
189     ierr = PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));CHKERRQ(ierr);
190     for (i=0; i<*argc; i++) {
191       (*argv)[i+1] = (*argv)[i] + warg;
192 #if defined (PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* same as 'else' case */
193       getarg_(&i,(*argv)[i],warg);
194 #elif defined(PETSC_HAVE_PXFGETARG_NEW)
195       {char *tmp = (*argv)[i];
196       int ilen;
197       getarg_(&i,tmp,&ilen,&ierr,warg);CHKERRQ(ierr);
198       tmp[ilen] = 0;}
199 #elif defined(PETSC_USE_NARGS)
200       GETARG(&i,(*argv)[i],warg,&flg);
201 #else
202       /*
203       Because the stupid #defines above define all kinds of things to getarg_ we cannot do this test
204       #elif defined(PETSC_HAVE_GETARG)
205       getarg_(&i,(*argv)[i],warg);
206       #else
207          SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot get Fortran command line arguments");
208       */
209       getarg_(&i,(*argv)[i],warg);
210 #endif
211       /* zero out garbage at end of each argument */
212       p = (*argv)[i] + warg-1;
213       while (p > (*argv)[i]) {
214         if (*p == ' ') *p = 0;
215         p--;
216       }
217     }
218   }
219   ierr = MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
220   if (rank) {
221     for (i=0; i<*argc; i++) (*argv)[i+1] = (*argv)[i] + warg;
222   }
223   return 0;
224 }
225 
226 #if defined(PETSC_SERIALIZE_FUNCTIONS)
227 extern PetscFPT PetscFPTData;
228 #endif
229 
230 #if defined(PETSC_HAVE_THREADSAFETY)
231 PetscSpinlock PetscViewerASCIISpinLockOpen;
232 PetscSpinlock PetscViewerASCIISpinLockStdout;
233 PetscSpinlock PetscViewerASCIISpinLockStderr;
234 PetscSpinlock PetscCommSpinLock;
235 #endif
236 
237 /* -----------------------------------------------------------------------------------------------*/
238 
239 #if defined(PETSC_HAVE_SAWS)
240 #include <petscviewersaws.h>
241 extern PetscErrorCode  PetscInitializeSAWs(const char[]);
242 #endif
243 
244 /*
245     petscinitialize - Version called from Fortran.
246 
247     Notes:
248       Since this is called from Fortran it does not return error codes
249 
250 */
251 PETSC_EXTERN void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
252 {
253 #if defined (PETSC_USE_NARGS)
254   short       flg,i;
255 #else
256   int         i;
257 #if !defined(PETSC_HAVE_PXFGETARG_NEW) && !defined (PETSC_HAVE_PXFGETARG_NEW) && !defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT)
258   int         j;
259 #endif
260 #endif
261 #if defined(PETSC_HAVE_CUDA)
262   PetscBool   flg2;
263 #endif
264   int         flag;
265   PetscMPIInt size;
266   char        *t1,name[256],hostname[64];
267   PetscMPIInt f_petsc_comm_world;
268 
269   *ierr = PetscMemzero(name,256); if (*ierr) return;
270   if (PetscInitializeCalled) {*ierr = 0; return;}
271 
272   /* this must be initialized in a routine, not as a constant declaration*/
273   PETSC_STDOUT = stdout;
274   PETSC_STDERR = stderr;
275 
276   /* on Windows - set printf to default to printing 2 digit exponents */
277 #if defined(PETSC_HAVE__SET_OUTPUT_FORMAT)
278   _set_output_format(_TWO_DIGIT_EXPONENT);
279 #endif
280 
281   *ierr = PetscOptionsCreateDefault();
282   if (*ierr) return;
283   i = 0;
284 #if defined (PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* same as 'else' case */
285   getarg_(&i,name,256);
286 #elif defined (PETSC_HAVE_PXFGETARG_NEW)
287   { int ilen,sierr;
288     getarg_(&i,name,&ilen,&sierr,256);
289     if (sierr) PetscStrncpy(name,"Unknown Name",256);
290     else name[ilen] = 0;
291   }
292 #elif defined(PETSC_USE_NARGS)
293   GETARG(&i,name,256,&flg);
294 #else
295   getarg_(&i,name,256);
296   /* Eliminate spaces at the end of the string */
297   for (j=254; j>=0; j--) {
298     if (name[j] != ' ') {
299       name[j+1] = 0;
300       break;
301     }
302   }
303   if (j<0) PetscStrncpy(name,"Unknown Name",256);
304 #endif
305   *ierr = PetscSetProgramName(name);
306   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()\n");return;}
307 
308   /* check if PETSC_COMM_WORLD is initialized by the user in fortran */
309   petscgetcommoncomm_(&f_petsc_comm_world);
310   MPI_Initialized(&flag);
311   if (!flag) {
312     PetscMPIInt mierr;
313 
314     if (f_petsc_comm_world) {(*PetscErrorPrintf)("You cannot set PETSC_COMM_WORLD if you have not initialized MPI first\n");return;}
315     /* MPI requires calling Fortran mpi_init() if main program is Fortran */
316 #if defined(PETSC_HAVE_MPIUNI) && defined(MPIUNI_AVOID_MPI_NAMESPACE)
317     mierr = MPI_Init((int*)0, (char***)0);
318 #else
319     mpi_init_(&mierr);
320 #endif
321     if (mierr) {
322       *ierr = mierr;
323       (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()\n");
324       return;
325     }
326     PetscBeganMPI = PETSC_TRUE;
327   }
328   if (f_petsc_comm_world) PETSC_COMM_WORLD = MPI_Comm_f2c(*(MPI_Fint*)&f_petsc_comm_world); /* User called MPI_INITIALIZE() and changed PETSC_COMM_WORLD */
329   else PETSC_COMM_WORLD = MPI_COMM_WORLD;
330   PetscInitializeCalled = PETSC_TRUE;
331 
332   *ierr = PetscSpinlockCreate(&PetscViewerASCIISpinLockOpen);
333   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;}
334   *ierr = PetscSpinlockCreate(&PetscViewerASCIISpinLockStdout);
335   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;}
336   *ierr = PetscSpinlockCreate(&PetscViewerASCIISpinLockStderr);
337   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;}
338   *ierr = PetscSpinlockCreate(&PetscCommSpinLock);
339   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;}
340 
341   *ierr = PetscErrorPrintfInitialize();
342   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()\n");return;}
343   *ierr = MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank);
344   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank\n");return;}
345   *ierr = MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize);
346   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize\n");return;}
347 
348   MPIU_BOOL = MPI_INT;
349   MPIU_ENUM = MPI_INT;
350 
351 #if defined(PETSC_HAVE_COMPLEX)
352   /*
353      Initialized the global variable; this is because with
354      shared libraries the constructors for global variables
355      are not called; at least on IRIX.
356   */
357   {
358 #if defined(PETSC_CLANGUAGE_CXX)
359     PetscComplex ic(0.0,1.0);
360     PETSC_i = ic;
361 #else
362     PETSC_i = _Complex_I;
363 #endif
364   }
365 
366 #if !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)
367   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU_C_DOUBLE_COMPLEX);
368   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
369   *ierr = MPI_Type_commit(&MPIU_C_DOUBLE_COMPLEX);
370   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
371   *ierr = MPI_Type_contiguous(2,MPI_FLOAT,&MPIU_C_COMPLEX);
372   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
373   *ierr = MPI_Type_commit(&MPIU_C_COMPLEX);
374   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
375   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
376   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
377 #endif
378 
379 #endif
380 
381 #if defined(PETSC_USE_REAL___FLOAT128)
382   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___FLOAT128);
383   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
384   *ierr = MPI_Type_commit(&MPIU___FLOAT128);
385   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
386 #if defined(PETSC_HAVE_COMPLEX)
387   *ierr = MPI_Type_contiguous(4,MPI_DOUBLE,&MPIU___COMPLEX128);
388   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
389   *ierr = MPI_Type_commit(&MPIU___COMPLEX128);
390   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
391 #endif
392   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
393   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
394   *ierr = MPI_Op_create(PetscMax_Local,1,&MPIU_MAX);
395   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
396   *ierr = MPI_Op_create(PetscMin_Local,1,&MPIU_MIN);
397   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
398 #endif
399 
400   /*
401        Create the PETSc MPI reduction operator that sums of the first
402      half of the entries and maxes the second half.
403   */
404   *ierr = MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op);
405   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
406 
407   *ierr = MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR);
408   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
409   *ierr = MPI_Type_commit(&MPIU_2SCALAR);
410   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
411 #if defined(PETSC_USE_64BIT_INDICES) || !defined(MPI_2INT)
412   *ierr = MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT);
413   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
414   *ierr = MPI_Type_commit(&MPIU_2INT);
415   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
416 #endif
417   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelCounter,&Petsc_Counter_keyval,(void*)0);
418   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
419   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Outer,&Petsc_InnerComm_keyval,(void*)0);
420   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
421   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Inner,&Petsc_OuterComm_keyval,(void*)0);
422   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
423 
424   /*
425      PetscInitializeFortran() is called twice. Here it initializes
426      PETSC_NULL_CHARACTER_Fortran. Below it initializes the PETSC_VIEWERs.
427      The PETSC_VIEWERs have not been created yet, so they must be initialized
428      below.
429   */
430   PetscInitializeFortran();
431   PETScParseFortranArgs_Private(&PetscGlobalArgc,&PetscGlobalArgs);
432   FIXCHAR(filename,len,t1);
433   *ierr = PetscOptionsInsert(NULL,&PetscGlobalArgc,&PetscGlobalArgs,t1);
434   FREECHAR(filename,t1);
435   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database\n");return;}
436   *ierr = PetscOptionsCheckInitial_Private();
437   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
438   *ierr = PetscCitationsInitialize();
439   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscCitationsInitialize()\n");return;}
440 #if defined(PETSC_HAVE_SAWS)
441   *ierr = PetscInitializeSAWs(NULL);
442   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing SAWs\n");return;}
443 #endif
444 #if defined(PETSC_USE_LOG)
445   *ierr = PetscLogInitialize();
446   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging\n");return;}
447 #endif
448   *ierr = PetscInitialize_DynamicLibraries();
449   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries\n");return;}
450 
451   *ierr = PetscInitializeFortran();
452   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block\n");return;}
453 
454   *ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);
455   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()\n");return;}
456   *ierr = PetscInfo1(0,"(Fortran):PETSc successfully started: procs %d\n",size);
457   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
458   *ierr = PetscGetHostName(hostname,64);
459   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname\n");return;}
460   *ierr = PetscInfo1(0,"Running on machine: %s\n",hostname);
461   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
462   *ierr = PetscOptionsCheckInitial_Components();
463   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
464 
465 #if defined(PETSC_USE_DEBUG) && !defined(PETSC_HAVE_THREADSAFETY)
466   *ierr = PetscStackCreate();
467   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscStackCreate()\n");return;}
468 #endif
469 
470 #if defined(PETSC_SERIALIZE_FUNCTIONS)
471   *ierr = PetscFPTCreate(10000);
472   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscFPTCreate()\n");return;}
473 #endif
474 
475 #if defined(PETSC_HAVE_CUDA)
476   flg2  = PETSC_TRUE;
477   *ierr = PetscOptionsGetBool(NULL,NULL,"-cublas",&flg2,NULL);
478   if (flg2) cublasInit();
479 #endif
480 }
481 
482 PETSC_EXTERN void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr)
483 {
484 #if defined(PETSC_HAVE_SUNMATHPRO)
485   extern void standard_arithmetic();
486   standard_arithmetic();
487 #endif
488   /* was malloced with PetscMallocAlign() so free the same way */
489   *ierr = PetscFreeAlign(PetscGlobalArgs,0,0,0);if (*ierr) {(*PetscErrorPrintf)("PetscFinalize:Freeing args\n");return;}
490 
491   *ierr = PetscFinalize();
492 }
493 
494 void PETSC_STDCALL petscend_(PetscErrorCode *ierr)
495 {
496 #if defined(PETSC_HAVE_SUNMATHPRO)
497   extern void standard_arithmetic();
498   standard_arithmetic();
499 #endif
500 
501   *ierr = PetscEnd();
502 }
503 
504