xref: /petsc/src/sys/objects/ftn-custom/zstart.c (revision fce0c873789145caee477924bfa4ad26b4cd6ea4)
1 /*
2   This file contains Fortran stubs for PetscInitialize and Finalize.
3 */
4 
5 /*
6     This is to prevent the Cray T3D version of MPI (University of Edinburgh)
7   from stupidly redefining MPI_INIT(). They put this in to detect errors
8   in C code,but here I do want to be calling the Fortran version from a
9   C subroutine.
10 */
11 #define T3DMPI_FORTRAN
12 #define T3EMPI_FORTRAN
13 
14 #define PETSC_DESIRE_COMPLEX
15 #include <petsc-private/fortranimpl.h>
16 
17 #if defined(PETSC_HAVE_CUDA)
18 #include <cublas.h>
19 #endif
20 #include <petscthreadcomm.h>
21 
22 #if defined(PETSC_HAVE_FORTRAN_CAPS)
23 #define petscinitialize_              PETSCINITIALIZE
24 #define petscfinalize_                PETSCFINALIZE
25 #define petscend_                     PETSCEND
26 #define iargc_                        IARGC
27 #define getarg_                       GETARG
28 #define mpi_init_                     MPI_INIT
29 #define petscgetcommoncomm_           PETSCGETCOMMONCOMM
30 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
31 #define petscinitialize_              petscinitialize
32 #define petscfinalize_                petscfinalize
33 #define petscend_                     petscend
34 #define mpi_init_                     mpi_init
35 #define iargc_                        iargc
36 #define getarg_                       getarg
37 #define petscgetcommoncomm_           petscgetcommoncomm
38 #endif
39 
40 #if defined(PETSC_HAVE_NAGF90)
41 #undef iargc_
42 #undef getarg_
43 #define iargc_  f90_unix_MP_iargc
44 #define getarg_ f90_unix_MP_getarg
45 #endif
46 #if defined(PETSC_USE_NARGS) /* Digital Fortran */
47 #undef iargc_
48 #undef getarg_
49 #define iargc_  NARGS
50 #define getarg_ GETARG
51 #elif defined(PETSC_HAVE_PXFGETARG_NEW)  /* cray x1 */
52 #undef iargc_
53 #undef getarg_
54 #define iargc_  ipxfargc_
55 #define getarg_ pxfgetarg_
56 #endif
57 #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */
58 #undef iargc_
59 #undef getarg_
60 #define iargc_   iargc_
61 #define getarg_  getarg_
62 #endif
63 #if defined(PETSC_HAVE_GFORTRAN_IARGC) /* gfortran from gcc4 */
64 #undef iargc_
65 #undef getarg_
66 #define iargc_  _gfortran_iargc
67 #define getarg_ _gfortran_getarg_i4
68 #elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */
69 #undef iargc_
70 #undef getarg_
71 #define iargc  iargc_
72 #define getarg getarg_
73 #endif
74 
75 /*
76     The extra _ is because the f2c compiler puts an
77   extra _ at the end if the original routine name
78   contained any _.
79 */
80 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE)
81 #undef mpi_init_
82 #define mpi_init_             mpi_init__
83 #endif
84 
85 PETSC_EXTERN void PETSC_STDCALL mpi_init_(int*);
86 PETSC_EXTERN void PETSC_STDCALL petscgetcommoncomm_(PetscMPIInt*);
87 
88 /*
89      Different Fortran compilers handle command lines in different ways
90 */
91 #if defined(PETSC_USE_NARGS)
92 PETSC_EXTERN short __stdcall NARGS();
93 PETSC_EXTERN void __stdcall GETARG(short*,char*,int,short *);
94 
95 #elif defined(PETSC_HAVE_FORTRAN_STDCALL)
96 PETSC_EXTERN int PETSC_STDCALL IARGC();
97 PETSC_EXTERN void PETSC_STDCALL GETARG(int *,char *,int);
98 
99 #elif defined(PETSC_HAVE_PXFGETARG_NEW)
100 PETSC_EXTERN int iargc_();
101 PETSC_EXTERN void getarg_(int*,char*,int*,int*,int);
102 
103 #else
104 PETSC_EXTERN int iargc_();
105 PETSC_EXTERN void getarg_(int*,char*,int);
106 /*
107       The Cray T3D/T3E use the PXFGETARG() function
108 */
109 #if defined(PETSC_HAVE_PXFGETARG)
110 PETSC_EXTERN void PXFGETARG(int*,_fcd,int*,int*);
111 #endif
112 #endif
113 
114 #if (defined(PETSC_HAVE_COMPLEX) && !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)) || defined(PETSC_USE_REAL___FLOAT128)
115 extern MPI_Op MPIU_SUM;
116 
117 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
118 
119 #endif
120 #if defined(PETSC_USE_REAL___FLOAT128)
121 
122 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
123 PETSC_EXTERN void MPIAPI PetscMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
124 PETSC_EXTERN void MPIAPI PetscMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
125 #endif
126 
127 extern MPI_Op PetscMaxSum_Op;
128 
129 PETSC_EXTERN void MPIAPI PetscMaxSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
130 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelCounter(MPI_Comm,PetscMPIInt,void*,void*);
131 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Inner(MPI_Comm,PetscMPIInt,void*,void*);
132 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Outer(MPI_Comm,PetscMPIInt,void*,void*);
133 
134 extern PetscErrorCode  PetscOptionsCheckInitial_Private(void);
135 extern PetscErrorCode  PetscOptionsCheckInitial_Components(void);
136 extern PetscErrorCode  PetscInitialize_DynamicLibraries(void);
137 #if defined(PETSC_USE_LOG)
138 extern PetscErrorCode  PetscLogBegin_Private(void);
139 #endif
140 extern PetscErrorCode  PetscMallocAlign(size_t,int,const char[],const char[],void**);
141 extern PetscErrorCode  PetscFreeAlign(void*,int,const char[],const char[]);
142 extern int  PetscGlobalArgc;
143 extern char **PetscGlobalArgs;
144 
145 /*
146     Reads in Fortran command line argments and sends them to
147   all processors and adds them to Options database.
148 */
149 
150 PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv)
151 {
152 #if defined(PETSC_USE_NARGS)
153   short          i,flg;
154 #else
155   int            i;
156 #endif
157   PetscErrorCode ierr;
158   int            warg = 256;
159   PetscMPIInt    rank;
160   char           *p;
161 
162   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
163   if (!rank) {
164 #if defined(PETSC_HAVE_IARG_COUNT_PROGNAME)
165     *argc = iargc_();
166 #else
167     /* most compilers do not count the program name for argv[0] */
168     *argc = 1 + iargc_();
169 #endif
170   }
171   ierr = MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
172 
173   /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */
174   ierr = PetscMallocAlign((*argc+1)*(warg*sizeof(char)+sizeof(char*)),0,0,0,(void**)argv);CHKERRQ(ierr);
175   (*argv)[0] = (char*)(*argv + *argc + 1);
176 
177   if (!rank) {
178     ierr = PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));CHKERRQ(ierr);
179     for (i=0; i<*argc; i++) {
180       (*argv)[i+1] = (*argv)[i] + warg;
181 #if defined(PETSC_HAVE_PXFGETARG_NEW)
182       {char *tmp = (*argv)[i];
183       int ilen;
184       getarg_(&i,tmp,&ilen,&ierr,warg);CHKERRQ(ierr);
185       tmp[ilen] = 0;}
186 #elif defined(PETSC_USE_NARGS)
187       GETARG(&i,(*argv)[i],warg,&flg);
188 #else
189       /*
190       Because the stupid #defines above define all kinds of things to getarg_ we cannot do this test
191       #elif defined(PETSC_HAVE_GETARG)
192       getarg_(&i,(*argv)[i],warg);
193       #else
194          SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot get Fortran command line arguments");
195       */
196       getarg_(&i,(*argv)[i],warg);
197 #endif
198       /* zero out garbage at end of each argument */
199       p = (*argv)[i] + warg-1;
200       while (p > (*argv)[i]) {
201         if (*p == ' ') *p = 0;
202         p--;
203       }
204     }
205   }
206   ierr = MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
207   if (rank) {
208     for (i=0; i<*argc; i++) (*argv)[i+1] = (*argv)[i] + warg;
209   }
210   return 0;
211 }
212 
213 /* -----------------------------------------------------------------------------------------------*/
214 
215 extern MPI_Op PetscADMax_Op;
216 extern MPI_Op PetscADMin_Op;
217 PETSC_EXTERN void MPIAPI PetscADMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
218 PETSC_EXTERN void MPIAPI PetscADMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*);
219 
220 #if defined(PETSC_HAVE_SAWS)
221 #include <petscviewersaws.h>
222 extern PetscErrorCode  PetscInitializeSAWs(const char[]);
223 #endif
224 
225 /*
226     petscinitialize - Version called from Fortran.
227 
228     Notes:
229       Since this is called from Fortran it does not return error codes
230 
231 */
232 PETSC_EXTERN void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
233 {
234 #if defined (PETSC_USE_NARGS)
235   short       flg,i;
236 #else
237   int         i;
238 #if !defined(PETSC_HAVE_PXFGETARG_NEW) && !defined (PETSC_HAVE_PXFGETARG_NEW)
239   int         j;
240 #endif
241 #endif
242 #if defined(PETSC_HAVE_CUDA)
243   PetscBool   flg2;
244 #endif
245   int         flag;
246   PetscMPIInt size;
247   char        *t1,name[256],hostname[64];
248   PetscMPIInt f_petsc_comm_world;
249 
250   *ierr = PetscMemzero(name,256); if (*ierr) return;
251   if (PetscInitializeCalled) {*ierr = 0; return;}
252 
253   /* this must be initialized in a routine, not as a constant declaration*/
254   PETSC_STDOUT = stdout;
255   PETSC_STDERR = stderr;
256 
257   /* on Windows - set printf to default to printing 2 digit exponents */
258 #if defined(PETSC_HAVE__SET_OUTPUT_FORMAT)
259   _set_output_format(_TWO_DIGIT_EXPONENT);
260 #endif
261 
262   *ierr = PetscOptionsCreate();
263   if (*ierr) return;
264   i = 0;
265 #if defined (PETSC_HAVE_PXFGETARG_NEW)
266   { int ilen,sierr;
267     getarg_(&i,name,&ilen,&sierr,256);
268     if (sierr) PetscStrncpy(name,"Unknown Name",256);
269     else name[ilen] = 0;
270   }
271 #elif defined(PETSC_USE_NARGS)
272   GETARG(&i,name,256,&flg);
273 #else
274   getarg_(&i,name,256);
275   /* Eliminate spaces at the end of the string */
276   for (j=254; j>=0; j--) {
277     if (name[j] != ' ') {
278       name[j+1] = 0;
279       break;
280     }
281   }
282   if (j<0) PetscStrncpy(name,"Unknown Name",256);
283 #endif
284   *ierr = PetscSetProgramName(name);
285   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()\n");return;}
286 
287   /* check if PETSC_COMM_WORLD is initialized by the user in fortran */
288   petscgetcommoncomm_(&f_petsc_comm_world);
289   MPI_Initialized(&flag);
290   if (!flag) {
291     PetscMPIInt mierr;
292 
293     if (f_petsc_comm_world) {(*PetscErrorPrintf)("You cannot set PETSC_COMM_WORLD if you have not initialized MPI first\n");return;}
294     /* MPI requires calling Fortran mpi_init() if main program is Fortran */
295 #if defined(PETSC_HAVE_MPIUNI) && defined(MPIUNI_AVOID_MPI_NAMESPACE)
296     mierr = MPI_Init((int*)0, (char***)0);
297 #else
298     mpi_init_(&mierr);
299 #endif
300     if (mierr) {
301       *ierr = mierr;
302       (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()\n");
303       return;
304     }
305     PetscBeganMPI = PETSC_TRUE;
306   }
307   if (f_petsc_comm_world) PETSC_COMM_WORLD = MPI_Comm_f2c(*(MPI_Fint*)&f_petsc_comm_world); /* User called MPI_INITIALIZE() and changed PETSC_COMM_WORLD */
308   else PETSC_COMM_WORLD = MPI_COMM_WORLD;
309   PetscInitializeCalled = PETSC_TRUE;
310 
311   *ierr = PetscErrorPrintfInitialize();
312   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()\n");return;}
313   *ierr = MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank);
314   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank\n");return;}
315   *ierr = MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize);
316   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize\n");return;}
317 
318   MPIU_BOOL = MPI_INT;
319   MPIU_ENUM = MPI_INT;
320 
321 #if defined(PETSC_HAVE_COMPLEX)
322   /*
323      Initialized the global variable; this is because with
324      shared libraries the constructors for global variables
325      are not called; at least on IRIX.
326   */
327   {
328 #if defined(PETSC_CLANGUAGE_CXX)
329     PetscComplex ic(0.0,1.0);
330     PETSC_i = ic;
331 #else
332     PETSC_i = _Complex_I;
333 #endif
334   }
335 
336 #if !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)
337   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU_C_DOUBLE_COMPLEX);
338   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
339   *ierr = MPI_Type_commit(&MPIU_C_DOUBLE_COMPLEX);
340   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
341   *ierr = MPI_Type_contiguous(2,MPI_FLOAT,&MPIU_C_COMPLEX);
342   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
343   *ierr = MPI_Type_commit(&MPIU_C_COMPLEX);
344   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
345   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
346   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
347 #endif
348 
349 #endif
350 
351 #if defined(PETSC_USE_REAL___FLOAT128)
352   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___FLOAT128);
353   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
354   *ierr = MPI_Type_commit(&MPIU___FLOAT128);
355   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
356 #if defined(PETSC_HAVE_COMPLEX)
357   *ierr = MPI_Type_contiguous(4,MPI_DOUBLE,&MPIU___COMPLEX128);
358   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
359   *ierr = MPI_Type_commit(&MPIU___COMPLEX128);
360   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
361 #endif
362   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
363   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
364   *ierr = MPI_Op_create(PetscMax_Local,1,&MPIU_MAX);
365   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
366   *ierr = MPI_Op_create(PetscMin_Local,1,&MPIU_MIN);
367   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
368 #endif
369 
370   /*
371        Create the PETSc MPI reduction operator that sums of the first
372      half of the entries and maxes the second half.
373   */
374   *ierr = MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op);
375   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
376 
377   *ierr = MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR);
378   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
379   *ierr = MPI_Type_commit(&MPIU_2SCALAR);
380   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
381 #if defined(PETSC_USE_64BIT_INDICES) || !defined(MPI_2INT)
382   *ierr = MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT);
383   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
384   *ierr = MPI_Type_commit(&MPIU_2INT);
385   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
386 #endif
387   *ierr = MPI_Op_create(PetscADMax_Local,1,&PetscADMax_Op);
388   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
389   *ierr = MPI_Op_create(PetscADMin_Local,1,&PetscADMin_Op);
390   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
391   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelCounter,&Petsc_Counter_keyval,(void*)0);
392   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
393   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Outer,&Petsc_InnerComm_keyval,(void*)0);
394   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
395   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Inner,&Petsc_OuterComm_keyval,(void*)0);
396   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
397 
398   /*
399      PetscInitializeFortran() is called twice. Here it initializes
400      PETSC_NULL_CHARACTER_Fortran. Below it initializes the PETSC_VIEWERs.
401      The PETSC_VIEWERs have not been created yet, so they must be initialized
402      below.
403   */
404   PetscInitializeFortran();
405   PETScParseFortranArgs_Private(&PetscGlobalArgc,&PetscGlobalArgs);
406   FIXCHAR(filename,len,t1);
407   *ierr = PetscOptionsInsert(&PetscGlobalArgc,&PetscGlobalArgs,t1);
408   FREECHAR(filename,t1);
409   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database\n");return;}
410   *ierr = PetscOptionsCheckInitial_Private();
411   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
412   *ierr = PetscCitationsInitialize();
413   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscCitationsInitialize()\n");return;}
414 #if defined(PETSC_HAVE_SAWS)
415   *ierr = PetscInitializeSAWs(NULL);
416   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing SAWs\n");return;}
417 #endif
418 #if defined(PETSC_USE_LOG)
419   *ierr = PetscLogBegin_Private();
420   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging\n");return;}
421 #endif
422   *ierr = PetscInitialize_DynamicLibraries();
423   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries\n");return;}
424 
425   *ierr = PetscInitializeFortran();
426   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block\n");return;}
427 
428   *ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);
429   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()\n");return;}
430   *ierr = PetscInfo1(0,"(Fortran):PETSc successfully started: procs %d\n",size);
431   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
432   *ierr = PetscGetHostName(hostname,64);
433   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname\n");return;}
434   *ierr = PetscInfo1(0,"Running on machine: %s\n",hostname);
435   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
436   *ierr = PetscOptionsCheckInitial_Components();
437   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
438 
439   *ierr = PetscThreadCommInitializePackage();
440   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Calling PetscThreadCommInitialize()\n");return;}
441 
442   PetscThreadLocalRegister((PetscThreadKey*)&petscstack); /* Creates pthread_key */
443 #if defined(PETSC_USE_DEBUG)
444   *ierr = PetscStackCreate();
445   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscStackCreate()\n");return;}
446 #endif
447 
448 #if defined(PETSC_HAVE_CUDA)
449   flg2  = PETSC_TRUE;
450   *ierr = PetscOptionsGetBool(NULL,"-cublas",&flg2,NULL);
451   if (flg2) cublasInit();
452 #endif
453 }
454 
455 PETSC_EXTERN void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr)
456 {
457 #if defined(PETSC_HAVE_SUNMATHPRO)
458   extern void standard_arithmetic();
459   standard_arithmetic();
460 #endif
461   /* was malloced with PetscMallocAlign() so free the same way */
462   *ierr = PetscFreeAlign(PetscGlobalArgs,0,0,0);if (*ierr) {(*PetscErrorPrintf)("PetscFinalize:Freeing args\n");return;}
463 
464   *ierr = PetscFinalize();
465 }
466 
467 void PETSC_STDCALL petscend_(PetscErrorCode *ierr)
468 {
469 #if defined(PETSC_HAVE_SUNMATHPRO)
470   extern void standard_arithmetic();
471   standard_arithmetic();
472 #endif
473 
474   *ierr = PetscEnd();
475 }
476 
477