xref: /petsc/src/sys/objects/ftn-custom/zstart.c (revision 785e854f82a3c614b452fca2cf5ad4f2afe8bdde)
1 /*
2   This file contains Fortran stubs for PetscInitialize and Finalize.
3 */
4 
5 /*
6     This is to prevent the Cray T3D version of MPI (University of Edinburgh)
7   from stupidly redefining MPI_INIT(). They put this in to detect errors
8   in C code,but here I do want to be calling the Fortran version from a
9   C subroutine.
10 */
11 #define T3DMPI_FORTRAN
12 #define T3EMPI_FORTRAN
13 
14 #define PETSC_DESIRE_COMPLEX
15 #include <petsc-private/fortranimpl.h>
16 
17 #if defined(PETSC_HAVE_CUSP)
18 #include <cublas.h>
19 #endif
20 #include <petscthreadcomm.h>
21 
22 extern PetscBool PetscHMPIWorker;
23 
24 #if defined(PETSC_HAVE_FORTRAN_CAPS)
25 #define petscinitialize_              PETSCINITIALIZE
26 #define petscfinalize_                PETSCFINALIZE
27 #define petscend_                     PETSCEND
28 #define iargc_                        IARGC
29 #define getarg_                       GETARG
30 #define mpi_init_                     MPI_INIT
31 #define petscgetcommoncomm_           PETSCGETCOMMONCOMM
32 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
33 #define petscinitialize_              petscinitialize
34 #define petscfinalize_                petscfinalize
35 #define petscend_                     petscend
36 #define mpi_init_                     mpi_init
37 #define iargc_                        iargc
38 #define getarg_                       getarg
39 #define petscgetcommoncomm_           petscgetcommoncomm
40 #endif
41 
42 #if defined(PETSC_HAVE_NAGF90)
43 #undef iargc_
44 #undef getarg_
45 #define iargc_  f90_unix_MP_iargc
46 #define getarg_ f90_unix_MP_getarg
47 #endif
48 #if defined(PETSC_USE_NARGS) /* Digital Fortran */
49 #undef iargc_
50 #undef getarg_
51 #define iargc_  NARGS
52 #define getarg_ GETARG
53 #elif defined(PETSC_HAVE_PXFGETARG_NEW)  /* cray x1 */
54 #undef iargc_
55 #undef getarg_
56 #define iargc_  ipxfargc_
57 #define getarg_ pxfgetarg_
58 #endif
59 #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */
60 #undef iargc_
61 #undef getarg_
62 #define iargc_   iargc_
63 #define getarg_  getarg_
64 #endif
65 #if defined(PETSC_HAVE_GFORTRAN_IARGC) /* gfortran from gcc4 */
66 #undef iargc_
67 #undef getarg_
68 #define iargc_  _gfortran_iargc
69 #define getarg_ _gfortran_getarg_i4
70 #elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */
71 #undef iargc_
72 #undef getarg_
73 #define iargc  iargc_
74 #define getarg getarg_
75 #endif
76 
77 /*
78     The extra _ is because the f2c compiler puts an
79   extra _ at the end if the original routine name
80   contained any _.
81 */
82 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE)
83 #undef mpi_init_
84 #define mpi_init_             mpi_init__
85 #endif
86 
87 PETSC_EXTERN void PETSC_STDCALL mpi_init_(int*);
88 PETSC_EXTERN void PETSC_STDCALL petscgetcommoncomm_(PetscMPIInt*);
89 
90 /*
91      Different Fortran compilers handle command lines in different ways
92 */
93 #if defined(PETSC_USE_NARGS)
94 PETSC_EXTERN short __stdcall NARGS();
95 PETSC_EXTERN void __stdcall GETARG(short*,char*,int,short *);
96 
97 #elif defined(PETSC_HAVE_FORTRAN_STDCALL)
98 PETSC_EXTERN int PETSC_STDCALL IARGC();
99 PETSC_EXTERN void PETSC_STDCALL GETARG(int *,char *,int);
100 
101 #elif defined(PETSC_HAVE_PXFGETARG_NEW)
102 PETSC_EXTERN int iargc_();
103 PETSC_EXTERN void getarg_(int*,char*,int*,int*,int);
104 
105 #else
106 PETSC_EXTERN int iargc_();
107 PETSC_EXTERN void getarg_(int*,char*,int);
108 /*
109       The Cray T3D/T3E use the PXFGETARG() function
110 */
111 #if defined(PETSC_HAVE_PXFGETARG)
112 PETSC_EXTERN void PXFGETARG(int*,_fcd,int*,int*);
113 #endif
114 #endif
115 
116 #if (defined(PETSC_HAVE_COMPLEX) && !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)) || defined(PETSC_USE_REAL___FLOAT128)
117 extern MPI_Op MPIU_SUM;
118 
119 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
120 
121 #endif
122 #if defined(PETSC_USE_REAL___FLOAT128)
123 
124 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
125 PETSC_EXTERN void MPIAPI PetscMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
126 PETSC_EXTERN void MPIAPI PetscMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
127 #endif
128 
129 extern MPI_Op PetscMaxSum_Op;
130 
131 PETSC_EXTERN void MPIAPI PetscMaxSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
132 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelCounter(MPI_Comm,PetscMPIInt,void*,void*);
133 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Inner(MPI_Comm,PetscMPIInt,void*,void*);
134 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Outer(MPI_Comm,PetscMPIInt,void*,void*);
135 
136 extern PetscErrorCode  PetscOptionsCheckInitial_Private(void);
137 extern PetscErrorCode  PetscOptionsCheckInitial_Components(void);
138 extern PetscErrorCode  PetscInitialize_DynamicLibraries(void);
139 #if defined(PETSC_USE_LOG)
140 extern PetscErrorCode  PetscLogBegin_Private(void);
141 #endif
142 extern PetscErrorCode  PetscMallocAlign(size_t,int,const char[],const char[],void**);
143 extern PetscErrorCode  PetscFreeAlign(void*,int,const char[],const char[]);
144 extern int  PetscGlobalArgc;
145 extern char **PetscGlobalArgs;
146 
147 /*
148     Reads in Fortran command line argments and sends them to
149   all processors and adds them to Options database.
150 */
151 
152 PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv)
153 {
154 #if defined(PETSC_USE_NARGS)
155   short          i,flg;
156 #else
157   int            i;
158 #endif
159   PetscErrorCode ierr;
160   int            warg = 256;
161   PetscMPIInt    rank;
162   char           *p;
163 
164   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
165   if (!rank) {
166 #if defined(PETSC_HAVE_IARG_COUNT_PROGNAME)
167     *argc = iargc_();
168 #else
169     /* most compilers do not count the program name for argv[0] */
170     *argc = 1 + iargc_();
171 #endif
172   }
173   ierr = MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
174 
175   /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */
176   ierr = PetscMallocAlign((*argc+1)*(warg*sizeof(char)+sizeof(char*)),0,0,0,(void**)argv);CHKERRQ(ierr);
177   (*argv)[0] = (char*)(*argv + *argc + 1);
178 
179   if (!rank) {
180     ierr = PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));CHKERRQ(ierr);
181     for (i=0; i<*argc; i++) {
182       (*argv)[i+1] = (*argv)[i] + warg;
183 #if defined(PETSC_HAVE_PXFGETARG_NEW)
184       {char *tmp = (*argv)[i];
185       int ilen;
186       getarg_(&i,tmp,&ilen,&ierr,warg);CHKERRQ(ierr);
187       tmp[ilen] = 0;}
188 #elif defined(PETSC_USE_NARGS)
189       GETARG(&i,(*argv)[i],warg,&flg);
190 #else
191       /*
192       Because the stupid #defines above define all kinds of things to getarg_ we cannot do this test
193       #elif defined(PETSC_HAVE_GETARG)
194       getarg_(&i,(*argv)[i],warg);
195       #else
196          SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot get Fortran command line arguments");
197       */
198       getarg_(&i,(*argv)[i],warg);
199 #endif
200       /* zero out garbage at end of each argument */
201       p = (*argv)[i] + warg-1;
202       while (p > (*argv)[i]) {
203         if (*p == ' ') *p = 0;
204         p--;
205       }
206     }
207   }
208   ierr = MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
209   if (rank) {
210     for (i=0; i<*argc; i++) (*argv)[i+1] = (*argv)[i] + warg;
211   }
212   return 0;
213 }
214 
215 /* -----------------------------------------------------------------------------------------------*/
216 
217 extern MPI_Op PetscADMax_Op;
218 extern MPI_Op PetscADMin_Op;
219 PETSC_EXTERN void MPIAPI PetscADMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
220 PETSC_EXTERN void MPIAPI PetscADMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
221 
222 #if defined(PETSC_HAVE_SAWS)
223 #include <petscviewersaws.h>
224 extern PetscErrorCode  PetscInitializeSAWs(const char[]);
225 #endif
226 
227 /*
228     petscinitialize - Version called from Fortran.
229 
230     Notes:
231       Since this is called from Fortran it does not return error codes
232 
233 */
234 PETSC_EXTERN void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
235 {
236 #if defined (PETSC_USE_NARGS)
237   short       flg,i;
238 #else
239   int         i;
240 #if !defined(PETSC_HAVE_PXFGETARG_NEW) && !defined (PETSC_HAVE_PXFGETARG_NEW)
241   int         j;
242 #endif
243 #endif
244   int         flag;
245   PetscMPIInt size;
246   char        *t1,name[256],hostname[64];
247   PetscMPIInt f_petsc_comm_world;
248   PetscInt    nodesize;
249   PetscBool   flg;
250 
251   *ierr = PetscMemzero(name,256); if (*ierr) return;
252   if (PetscInitializeCalled) {*ierr = 0; return;}
253 
254   /* this must be initialized in a routine, not as a constant declaration*/
255   PETSC_STDOUT = stdout;
256   PETSC_STDERR = stderr;
257 
258   *ierr = PetscOptionsCreate();
259   if (*ierr) return;
260   i = 0;
261 #if defined (PETSC_HAVE_PXFGETARG_NEW)
262   { int ilen,sierr;
263     getarg_(&i,name,&ilen,&sierr,256);
264     if (sierr) PetscStrncpy(name,"Unknown Name",256);
265     else name[ilen] = 0;
266   }
267 #elif defined(PETSC_USE_NARGS)
268   GETARG(&i,name,256,&flg);
269 #else
270   getarg_(&i,name,256);
271   /* Eliminate spaces at the end of the string */
272   for (j=254; j>=0; j--) {
273     if (name[j] != ' ') {
274       name[j+1] = 0;
275       break;
276     }
277   }
278   if (j<0) PetscStrncpy(name,"Unknown Name",256);
279 #endif
280   *ierr = PetscSetProgramName(name);
281   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()\n");return;}
282 
283   /* check if PETSC_COMM_WORLD is initialized by the user in fortran */
284   petscgetcommoncomm_(&f_petsc_comm_world);
285   MPI_Initialized(&flag);
286   if (!flag) {
287     PetscMPIInt mierr;
288 
289     if (f_petsc_comm_world) {(*PetscErrorPrintf)("You cannot set PETSC_COMM_WORLD if you have not initialized MPI first\n");return;}
290     /* MPI requires calling Fortran mpi_init() if main program is Fortran */
291 #if defined(PETSC_HAVE_MPIUNI) && defined(MPIUNI_AVOID_MPI_NAMESPACE)
292     mierr = MPI_Init((int*)0, (char***)0);
293 #else
294     mpi_init_(&mierr);
295 #endif
296     if (mierr) {
297       *ierr = mierr;
298       (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()\n");
299       return;
300     }
301     PetscBeganMPI = PETSC_TRUE;
302   }
303   if (f_petsc_comm_world) PETSC_COMM_WORLD = MPI_Comm_f2c(*(MPI_Fint*)&f_petsc_comm_world); /* User called MPI_INITIALIZE() and changed PETSC_COMM_WORLD */
304   else PETSC_COMM_WORLD = MPI_COMM_WORLD;
305   PetscInitializeCalled = PETSC_TRUE;
306 
307   *ierr = PetscErrorPrintfInitialize();
308   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()\n");return;}
309   *ierr = MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank);
310   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank\n");return;}
311   *ierr = MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize);
312   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize\n");return;}
313 
314   MPIU_BOOL = MPI_INT;
315   MPIU_ENUM = MPI_INT;
316 
317 #if defined(PETSC_HAVE_COMPLEX)
318   /*
319      Initialized the global variable; this is because with
320      shared libraries the constructors for global variables
321      are not called; at least on IRIX.
322   */
323   {
324 #if defined(PETSC_CLANGUAGE_CXX)
325     PetscComplex ic(0.0,1.0);
326     PETSC_i = ic;
327 #else
328     PETSC_i = _Complex_I;
329 #endif
330   }
331 
332 #if !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)
333   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU_C_DOUBLE_COMPLEX);
334   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
335   *ierr = MPI_Type_commit(&MPIU_C_DOUBLE_COMPLEX);
336   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
337   *ierr = MPI_Type_contiguous(2,MPI_FLOAT,&MPIU_C_COMPLEX);
338   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
339   *ierr = MPI_Type_commit(&MPIU_C_COMPLEX);
340   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
341   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
342   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
343 #endif
344 
345 #endif
346 
347 #if defined(PETSC_USE_REAL___FLOAT128)
348   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___FLOAT128);
349   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
350   *ierr = MPI_Type_commit(&MPIU___FLOAT128);
351   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
352 #if defined(PETSC_HAVE_COMPLEX)
353   *ierr = MPI_Type_contiguous(4,MPI_DOUBLE,&MPIU___COMPLEX128);
354   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
355   *ierr = MPI_Type_commit(&MPIU___COMPLEX128);
356   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
357 #endif
358   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
359   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
360   *ierr = MPI_Op_create(PetscMax_Local,1,&MPIU_MAX);
361   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
362   *ierr = MPI_Op_create(PetscMin_Local,1,&MPIU_MIN);
363   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
364 #endif
365 
366   /*
367        Create the PETSc MPI reduction operator that sums of the first
368      half of the entries and maxes the second half.
369   */
370   *ierr = MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op);
371   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
372 
373   *ierr = MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR);
374   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
375   *ierr = MPI_Type_commit(&MPIU_2SCALAR);
376   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
377 #if defined(PETSC_USE_64BIT_INDICES) || !defined(MPI_2INT)
378   *ierr = MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT);
379   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
380   *ierr = MPI_Type_commit(&MPIU_2INT);
381   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
382 #endif
383   *ierr = MPI_Op_create(PetscADMax_Local,1,&PetscADMax_Op);
384   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
385   *ierr = MPI_Op_create(PetscADMin_Local,1,&PetscADMin_Op);
386   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
387   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelCounter,&Petsc_Counter_keyval,(void*)0);
388   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
389   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Outer,&Petsc_InnerComm_keyval,(void*)0);
390   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
391   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Inner,&Petsc_OuterComm_keyval,(void*)0);
392   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
393 
394   /*
395      PetscInitializeFortran() is called twice. Here it initializes
396      PETSC_NULL_CHARACTER_Fortran. Below it initializes the PETSC_VIEWERs.
397      The PETSC_VIEWERs have not been created yet, so they must be initialized
398      below.
399   */
400   PetscInitializeFortran();
401   PETScParseFortranArgs_Private(&PetscGlobalArgc,&PetscGlobalArgs);
402   FIXCHAR(filename,len,t1);
403   *ierr = PetscOptionsInsert(&PetscGlobalArgc,&PetscGlobalArgs,t1);
404   FREECHAR(filename,t1);
405   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database\n");return;}
406   *ierr = PetscOptionsCheckInitial_Private();
407   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
408   *ierr = PetscCitationsInitialize();
409   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscCitationsInitialize()\n");return;}
410 #if defined(PETSC_HAVE_SAWS)
411   *ierr = PetscInitializeSAWs(NULL);
412   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing SAWs\n");return;}
413 #endif
414 #if defined(PETSC_USE_LOG)
415   *ierr = PetscLogBegin_Private();
416   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging\n");return;}
417 #endif
418   *ierr = PetscInitialize_DynamicLibraries();
419   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries\n");return;}
420 
421   *ierr = PetscInitializeFortran();
422   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block\n");return;}
423 
424   *ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);
425   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()\n");return;}
426   *ierr = PetscInfo1(0,"(Fortran):PETSc successfully started: procs %d\n",size);
427   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
428   *ierr = PetscGetHostName(hostname,64);
429   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname\n");return;}
430   *ierr = PetscInfo1(0,"Running on machine: %s\n",hostname);
431   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
432   *ierr = PetscOptionsCheckInitial_Components();
433   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
434 
435   *ierr = PetscThreadCommInitializePackage();
436   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Calling PetscThreadCommInitialize()\n");return;}
437 
438   PetscThreadLocalRegister((PetscThreadKey*)&petscstack); /* Creates pthread_key */
439 #if defined(PETSC_USE_DEBUG)
440   *ierr = PetscStackCreate();
441   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscStackCreate()\n");return;}
442 #endif
443 
444   *ierr = PetscOptionsGetInt(NULL,"-hmpi_spawn_size",&nodesize,&flg);
445   if (flg) {
446 #if defined(PETSC_HAVE_MPI_COMM_SPAWN)
447     *ierr = PetscHMPISpawn((PetscMPIInt) nodesize); /* worker nodes never return from here; they go directly to PetscEnd() */
448     if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIS-pawn()\n");return;}
449 #else
450     *ierr = PETSC_ERR_SUP;
451     (*PetscErrorPrintf)("PetscInitialize: PETSc built without MPI 2 (MPI_Comm_spawn) support, use -hmpi_merge_size instead");
452     return;
453 #endif
454   } else {
455     *ierr = PetscOptionsGetInt(NULL,"-hmpi_merge_size",&nodesize,&flg);
456     if (flg) {
457       *ierr = PetscHMPIMerge((PetscMPIInt) nodesize,NULL,NULL);
458       if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIMerge()\n");return;}
459       if (PetscHMPIWorker) { /* if worker then never enter user code */
460         PetscInitializeCalled = PETSC_TRUE;
461         *ierr = PetscEnd();
462       }
463     }
464   }
465 
466 #if defined(PETSC_HAVE_CUDA)
467   flg  = PETSC_TRUE;
468   *ierr = PetscOptionsGetBool(NULL,"-cublas",&flg,NULL);
469   if (flg) cublasInit();
470 #endif
471 }
472 
473 PETSC_EXTERN void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr)
474 {
475 #if defined(PETSC_HAVE_SUNMATHPRO)
476   extern void standard_arithmetic();
477   standard_arithmetic();
478 #endif
479   /* was malloced with PetscMallocAlign() so free the same way */
480   *ierr = PetscFreeAlign(PetscGlobalArgs,0,0,0);if (*ierr) {(*PetscErrorPrintf)("PetscFinalize:Freeing args\n");return;}
481 
482   *ierr = PetscFinalize();
483 }
484 
485 void PETSC_STDCALL petscend_(PetscErrorCode *ierr)
486 {
487 #if defined(PETSC_HAVE_SUNMATHPRO)
488   extern void standard_arithmetic();
489   standard_arithmetic();
490 #endif
491 
492   *ierr = PetscEnd();
493 }
494 
495