xref: /petsc/src/sys/objects/ftn-custom/zstart.c (revision 6e111a19f6677190c8cb13236301fcb65e0e3d3b)
1 /*
2   This file contains Fortran stubs for PetscInitialize and Finalize.
3 */
4 
5 /*
6     This is to prevent the Cray T3D version of MPI (University of Edinburgh)
7   from stupidly redefining MPI_INIT(). They put this in to detect errors
8   in C code,but here I do want to be calling the Fortran version from a
9   C subroutine.
10 */
11 #define T3DMPI_FORTRAN
12 #define T3EMPI_FORTRAN
13 
14 #define PETSC_DESIRE_COMPLEX
15 #include <petsc-private/fortranimpl.h>
16 
17 #if defined(PETSC_HAVE_CUSP)
18 #include <cublas.h>
19 #endif
20 #include <petscthreadcomm.h>
21 
22 extern  PetscBool  PetscBeganMPI;
23 
24 extern PetscBool  PetscHMPIWorker;
25 
26 #if defined(PETSC_HAVE_FORTRAN_CAPS)
27 #define petscinitialize_              PETSCINITIALIZE
28 #define petscfinalize_                PETSCFINALIZE
29 #define petscend_                     PETSCEND
30 #define iargc_                        IARGC
31 #define getarg_                       GETARG
32 #define mpi_init_                     MPI_INIT
33 #define petscgetcommoncomm_           PETSCGETCOMMONCOMM
34 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
35 #define petscinitialize_              petscinitialize
36 #define petscfinalize_                petscfinalize
37 #define petscend_                     petscend
38 #define mpi_init_                     mpi_init
39 #define iargc_                        iargc
40 #define getarg_                       getarg
41 #define petscgetcommoncomm_           petscgetcommoncomm
42 #endif
43 
44 #if defined(PETSC_HAVE_NAGF90)
45 #undef iargc_
46 #undef getarg_
47 #define iargc_  f90_unix_MP_iargc
48 #define getarg_ f90_unix_MP_getarg
49 #endif
50 #if defined(PETSC_USE_NARGS) /* Digital Fortran */
51 #undef iargc_
52 #undef getarg_
53 #define iargc_  NARGS
54 #define getarg_ GETARG
55 #elif defined (PETSC_HAVE_PXFGETARG_NEW) /* cray x1 */
56 #undef iargc_
57 #undef getarg_
58 #define iargc_  ipxfargc_
59 #define getarg_ pxfgetarg_
60 #endif
61 #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */
62 #undef iargc_
63 #undef getarg_
64 #define iargc_   iargc_
65 #define getarg_  getarg_
66 #endif
67 #if defined(PETSC_HAVE_GFORTRAN_IARGC) /* gfortran from gcc4 */
68 #undef iargc_
69 #undef getarg_
70 #define iargc_  _gfortran_iargc
71 #define getarg_ _gfortran_getarg_i4
72 #elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */
73 #undef iargc_
74 #undef getarg_
75 #define iargc  iargc_
76 #define getarg getarg_
77 #endif
78 
79 /*
80     The extra _ is because the f2c compiler puts an
81   extra _ at the end if the original routine name
82   contained any _.
83 */
84 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE)
85 #undef mpi_init_
86 #define mpi_init_             mpi_init__
87 #endif
88 
89 EXTERN_C_BEGIN
90 extern void PETSC_STDCALL mpi_init_(int*);
91 extern void PETSC_STDCALL petscgetcommoncomm_(PetscMPIInt*);
92 
93 /*
94      Different Fortran compilers handle command lines in different ways
95 */
96 #if defined(PETSC_USE_NARGS)
97 extern short __stdcall NARGS();
98 extern void  __stdcall GETARG(short*,char*,int,short *);
99 
100 #elif defined(PETSC_HAVE_FORTRAN_STDCALL)
101 extern int  PETSC_STDCALL IARGC();
102 extern void PETSC_STDCALL GETARG(int *,char *,int);
103 
104 #elif defined (PETSC_HAVE_PXFGETARG_NEW)
105 extern int  iargc_();
106 extern void getarg_(int*,char*,int*,int*,int);
107 
108 #else
109 extern int  iargc_();
110 extern void getarg_(int*,char*,int);
111 /*
112       The Cray T3D/T3E use the PXFGETARG() function
113 */
114 #if defined(PETSC_HAVE_PXFGETARG)
115 extern void PXFGETARG(int*,_fcd,int*,int*);
116 #endif
117 #endif
118 EXTERN_C_END
119 
120 #if (defined(PETSC_HAVE_COMPLEX) && !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)) || defined(PETSC_USE_REAL___FLOAT128)
121 extern MPI_Op MPIU_SUM;
122 EXTERN_C_BEGIN
123 extern void  MPIAPI PetscSum_Local(void*,void *,PetscMPIInt *,MPI_Datatype *);
124 EXTERN_C_END
125 #endif
126 #if defined(PETSC_USE_REAL___FLOAT128)
127 EXTERN_C_BEGIN
128 void  MPIAPI PetscSum_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
129 void  MPIAPI PetscMax_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
130 void  MPIAPI PetscMin_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
131 EXTERN_C_END
132 #endif
133 
134 extern  MPI_Op PetscMaxSum_Op;
135 
136 EXTERN_C_BEGIN
137 extern void  MPIAPI PetscMaxSum_Local(void*,void *,PetscMPIInt *,MPI_Datatype *);
138 extern PetscMPIInt  MPIAPI Petsc_DelCounter(MPI_Comm,PetscMPIInt,void*,void*);
139 extern PetscMPIInt  MPIAPI Petsc_DelComm(MPI_Comm,PetscMPIInt,void*,void*);
140 EXTERN_C_END
141 
142 extern PetscErrorCode  PetscOptionsCheckInitial_Private(void);
143 extern PetscErrorCode  PetscOptionsCheckInitial_Components(void);
144 extern PetscErrorCode  PetscInitialize_DynamicLibraries(void);
145 #if defined(PETSC_USE_LOG)
146 extern PetscErrorCode  PetscLogBegin_Private(void);
147 #endif
148 extern PetscErrorCode  PetscMallocAlign(size_t,int,const char[],const char[],const char[],void**);
149 extern PetscErrorCode  PetscFreeAlign(void*,int,const char[],const char[],const char[]);
150 extern int PetscGlobalArgc;
151 extern char **PetscGlobalArgs;
152 
153 /*
154     Reads in Fortran command line argments and sends them to
155   all processors and adds them to Options database.
156 */
157 
158 PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv)
159 {
160 #if defined (PETSC_USE_NARGS)
161   short          i,flg;
162 #else
163   int            i;
164 #endif
165   PetscErrorCode ierr;
166   int            warg = 256;
167   PetscMPIInt    rank;
168   char           *p;
169 
170   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
171   if (!rank) {
172 #if defined (PETSC_HAVE_IARG_COUNT_PROGNAME)
173     *argc = iargc_();
174 #else
175     /* most compilers do not count the program name for argv[0] */
176     *argc = 1 + iargc_();
177 #endif
178   }
179   ierr = MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
180 
181   /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */
182   ierr = PetscMallocAlign((*argc+1)*(warg*sizeof(char)+sizeof(char*)),0,0,0,0,(void**)argv);CHKERRQ(ierr);
183   (*argv)[0] = (char*)(*argv + *argc + 1);
184 
185   if (!rank) {
186     ierr = PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));CHKERRQ(ierr);
187     for (i=0; i<*argc; i++) {
188       (*argv)[i+1] = (*argv)[i] + warg;
189 #if defined (PETSC_HAVE_PXFGETARG_NEW)
190       {char *tmp = (*argv)[i];
191       int ilen;
192       getarg_(&i,tmp,&ilen,&ierr,warg);CHKERRQ(ierr);
193       tmp[ilen] = 0;
194       }
195 #elif defined (PETSC_USE_NARGS)
196       GETARG(&i,(*argv)[i],warg,&flg);
197 #else
198       /*
199       Because the stupid #defines above define all kinds of things to getarg_ we cannot do this test
200       #elif defined(PETSC_HAVE_GETARG)
201       getarg_(&i,(*argv)[i],warg);
202       #else
203          SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot get Fortran command line arguments");
204       */
205       getarg_(&i,(*argv)[i],warg);
206 #endif
207       /* zero out garbage at end of each argument */
208       p = (*argv)[i] + warg-1;
209       while (p > (*argv)[i]) {
210         if (*p == ' ') *p = 0;
211         p--;
212       }
213     }
214   }
215   ierr = MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
216   if (rank) {
217     for (i=0; i<*argc; i++) {
218       (*argv)[i+1] = (*argv)[i] + warg;
219     }
220   }
221   return 0;
222 }
223 
224 /* -----------------------------------------------------------------------------------------------*/
225 
226 extern  MPI_Op PetscADMax_Op;
227 extern  MPI_Op PetscADMin_Op;
228 EXTERN_C_BEGIN
229 extern void  MPIAPI PetscADMax_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
230 extern void  MPIAPI PetscADMin_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
231 EXTERN_C_END
232 
233 
234 EXTERN_C_BEGIN
235 /*
236     petscinitialize - Version called from Fortran.
237 
238     Notes:
239       Since this is called from Fortran it does not return error codes
240 
241 */
242 void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
243 {
244 #if defined (PETSC_USE_NARGS)
245   short       flg,i;
246 #else
247   int         i;
248 #if !defined(PETSC_HAVE_PXFGETARG_NEW) && !defined (PETSC_HAVE_PXFGETARG_NEW)
249   int         j;
250 #endif
251 #endif
252   int         flag;
253   PetscMPIInt size;
254   char        *t1,name[256],hostname[64];
255   PetscMPIInt f_petsc_comm_world;
256   PetscInt    nodesize;
257   PetscBool   flg;
258 
259   *ierr = PetscMemzero(name,256); if (*ierr) return;
260   if (PetscInitializeCalled) {*ierr = 0; return;}
261 
262   /* this must be initialized in a routine, not as a constant declaration*/
263   PETSC_STDOUT = stdout;
264   PETSC_STDERR = stderr;
265 
266   *ierr = PetscOptionsCreate();
267   if (*ierr) return;
268   i = 0;
269 #if defined (PETSC_HAVE_PXFGETARG_NEW)
270   { int ilen,sierr;
271     getarg_(&i,name,&ilen,&sierr,256);
272     if (sierr) {
273       PetscStrncpy(name,"Unknown Name",256);
274     } else {
275       name[ilen] = 0;
276     }
277   }
278 #elif defined (PETSC_USE_NARGS)
279   GETARG(&i,name,256,&flg);
280 #else
281   getarg_(&i,name,256);
282   /* Eliminate spaces at the end of the string */
283   for (j=254; j>=0; j--) {
284     if (name[j] != ' ') {
285       name[j+1] = 0;
286       break;
287     }
288   }
289   if (j<0) {
290     PetscStrncpy(name,"Unknown Name",256);
291   }
292 #endif
293   *ierr = PetscSetProgramName(name);
294   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()\n");return;}
295 
296   /* check if PETSC_COMM_WORLD is initialized by the user in fortran */
297   petscgetcommoncomm_(&f_petsc_comm_world);
298   MPI_Initialized(&flag);
299   if (!flag) {
300     PetscMPIInt mierr;
301 
302     if (f_petsc_comm_world) {(*PetscErrorPrintf)("You cannot set PETSC_COMM_WORLD if you have not initialized MPI first\n");return;}
303     /* MPI requires calling Fortran mpi_init() if main program is Fortran */
304     mpi_init_(&mierr);
305     if (mierr) {
306       *ierr = mierr;
307       (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()\n");
308       return;
309     }
310     PetscBeganMPI    = PETSC_TRUE;
311   }
312   if (f_petsc_comm_world) { /* User called MPI_INITIALIZE() and changed PETSC_COMM_WORLD */
313     PETSC_COMM_WORLD = MPI_Comm_f2c(*(MPI_Fint *)&f_petsc_comm_world);
314   } else {
315     PETSC_COMM_WORLD = MPI_COMM_WORLD;
316   }
317   PetscInitializeCalled = PETSC_TRUE;
318 
319   *ierr = PetscErrorPrintfInitialize();
320   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()\n");return;}
321   *ierr = MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank);
322   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank\n");return;}
323   *ierr = MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize);
324   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize\n");return;}
325 
326   MPIU_BOOL = MPI_INT;
327   MPIU_ENUM = MPI_INT;
328 
329 #if defined(PETSC_HAVE_COMPLEX)
330   /*
331      Initialized the global variable; this is because with
332      shared libraries the constructors for global variables
333      are not called; at least on IRIX.
334   */
335   {
336 #if defined(PETSC_CLANGUAGE_CXX)
337     PetscComplex ic(0.0,1.0);
338     PETSC_i = ic;
339 #else
340     PETSC_i = _Complex_I;
341 #endif
342   }
343 
344 #if !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)
345   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU_C_DOUBLE_COMPLEX);
346   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
347   *ierr = MPI_Type_commit(&MPIU_C_DOUBLE_COMPLEX);
348   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
349   *ierr = MPI_Type_contiguous(2,MPI_FLOAT,&MPIU_C_COMPLEX);
350   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
351   *ierr = MPI_Type_commit(&MPIU_C_COMPLEX);
352   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
353   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
354   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
355 #endif
356 
357 #endif
358 
359 #if defined(PETSC_USE_REAL___FLOAT128)
360   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___FLOAT128);
361   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
362   *ierr = MPI_Type_commit(&MPIU___FLOAT128);
363   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
364 #if defined(PETSC_HAVE_COMPLEX)
365   *ierr = MPI_Type_contiguous(4,MPI_DOUBLE,&MPIU___COMPLEX128);
366   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
367   *ierr = MPI_Type_commit(&MPIU___COMPLEX128);
368   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
369 #endif
370   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
371   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
372   *ierr = MPI_Op_create(PetscMax_Local,1,&MPIU_MAX);
373   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
374   *ierr = MPI_Op_create(PetscMin_Local,1,&MPIU_MIN);
375   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
376 #endif
377 
378   /*
379        Create the PETSc MPI reduction operator that sums of the first
380      half of the entries and maxes the second half.
381   */
382   *ierr = MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op);
383   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
384 
385   *ierr = MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR);
386   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
387   *ierr = MPI_Type_commit(&MPIU_2SCALAR);
388   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
389 #if defined(PETSC_USE_64BIT_INDICES) || !defined(MPI_2INT)
390   *ierr = MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT);
391   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
392   *ierr = MPI_Type_commit(&MPIU_2INT);
393   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
394 #endif
395   *ierr = MPI_Op_create(PetscADMax_Local,1,&PetscADMax_Op);
396   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
397   *ierr = MPI_Op_create(PetscADMin_Local,1,&PetscADMin_Op);
398   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
399   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelCounter,&Petsc_Counter_keyval,(void*)0);
400   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
401   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm,&Petsc_InnerComm_keyval,(void*)0);
402   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
403   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm,&Petsc_OuterComm_keyval,(void*)0);
404   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
405 
406   /*
407      PetscInitializeFortran() is called twice. Here it initializes
408      PETSC_NULL_CHARACTER_Fortran. Below it initializes the PETSC_VIEWERs.
409      The PETSC_VIEWERs have not been created yet, so they must be initialized
410      below.
411   */
412   PetscInitializeFortran();
413   PETScParseFortranArgs_Private(&PetscGlobalArgc,&PetscGlobalArgs);
414   FIXCHAR(filename,len,t1);
415   *ierr = PetscOptionsInsert(&PetscGlobalArgc,&PetscGlobalArgs,t1);
416   FREECHAR(filename,t1);
417   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database\n");return;}
418   *ierr = PetscOptionsCheckInitial_Private();
419   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
420 #if defined (PETSC_USE_LOG)
421   *ierr = PetscLogBegin_Private();
422   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging\n");return;}
423 #endif
424   *ierr = PetscInitialize_DynamicLibraries();
425   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries\n");return;}
426 
427   *ierr = PetscInitializeFortran();
428   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block\n");return;}
429 
430   *ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);
431   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()\n");return;}
432   *ierr = PetscInfo1(0,"(Fortran):PETSc successfully started: procs %d\n",size);
433   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
434   *ierr = PetscGetHostName(hostname,64);
435   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname\n");return;}
436   *ierr = PetscInfo1(0,"Running on machine: %s\n",hostname);
437   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
438   *ierr = PetscOptionsCheckInitial_Components();
439   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
440 
441   *ierr = PetscThreadCommInitializePackage(PETSC_NULL);
442   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Calling PetscThreadCommInitialize()\n");return;}
443 
444 #if defined(PETSC_USE_DEBUG)
445   PetscThreadLocalRegister(&petscstack); /* Creates petscstack_key if needed */
446   *ierr = PetscStackCreate();
447   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscStackCreate()\n");return;}
448 #endif
449 
450   *ierr = PetscOptionsGetInt(PETSC_NULL,"-hmpi_spawn_size",&nodesize,&flg);
451   if (flg) {
452 #if defined(PETSC_HAVE_MPI_COMM_SPAWN)
453     *ierr = PetscHMPISpawn((PetscMPIInt) nodesize);/* worker nodes never return from here; they go directly to PetscEnd() */
454     if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIS-pawn()\n");return;}
455 #else
456     *ierr = PETSC_ERR_SUP;
457     (*PetscErrorPrintf)("PetscInitialize: PETSc built without MPI 2 (MPI_Comm_spawn) support, use -hmpi_merge_size instead");
458     return;
459 #endif
460   } else {
461     *ierr = PetscOptionsGetInt(PETSC_NULL,"-hmpi_merge_size",&nodesize,&flg);
462     if (flg) {
463       *ierr = PetscHMPIMerge((PetscMPIInt) nodesize,PETSC_NULL,PETSC_NULL);
464       if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIMerge()\n");return;}
465       if (PetscHMPIWorker) { /* if worker then never enter user code */
466         PetscInitializeCalled = PETSC_TRUE;
467         *ierr = PetscEnd();
468       }
469     }
470   }
471 
472 #if defined(PETSC_HAVE_CUDA)
473   cublasInit();
474 #endif
475 }
476 
477 void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr)
478 {
479 #if defined(PETSC_HAVE_SUNMATHPRO)
480   extern void standard_arithmetic();
481   standard_arithmetic();
482 #endif
483   /* was malloced with PetscMallocAlign() so free the same way */
484   *ierr = PetscFreeAlign(PetscGlobalArgs,0,0,0,0);if (*ierr) {(*PetscErrorPrintf)("PetscFinalize:Freeing args\n");return;}
485 
486   *ierr = PetscFinalize();
487 }
488 
489 void PETSC_STDCALL petscend_(PetscErrorCode *ierr)
490 {
491 #if defined(PETSC_HAVE_SUNMATHPRO)
492   extern void standard_arithmetic();
493   standard_arithmetic();
494 #endif
495 
496   *ierr = PetscEnd();
497 }
498 
499 
500 EXTERN_C_END
501