xref: /petsc/src/sys/objects/ftn-custom/zstart.c (revision 74b43855c84902f9769613ba74e50feff6557f3a)
1 /*
2   This file contains Fortran stubs for PetscInitialize and Finalize.
3 */
4 
5 /*
6     This is to prevent the Cray T3D version of MPI (University of Edinburgh)
7   from stupidly redefining MPI_INIT(). They put this in to detect errors
8   in C code,but here I do want to be calling the Fortran version from a
9   C subroutine.
10 */
11 #define T3DMPI_FORTRAN
12 #define T3EMPI_FORTRAN
13 
14 #include <petsc-private/fortranimpl.h>
15 
16 #if defined(PETSC_HAVE_CUSP)
17 #include <cublas.h>
18 #endif
19 #include <petscthreadcomm.h>
20 
21 extern  PetscBool  PetscBeganMPI;
22 
23 extern PetscBool  PetscHMPIWorker;
24 
25 #ifdef PETSC_HAVE_FORTRAN_CAPS
26 #define petscinitialize_              PETSCINITIALIZE
27 #define petscfinalize_                PETSCFINALIZE
28 #define petscend_                     PETSCEND
29 #define iargc_                        IARGC
30 #define getarg_                       GETARG
31 #define mpi_init_                     MPI_INIT
32 #define petscgetcommoncomm_           PETSCGETCOMMONCOMM
33 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
34 #define petscinitialize_              petscinitialize
35 #define petscfinalize_                petscfinalize
36 #define petscend_                     petscend
37 #define mpi_init_                     mpi_init
38 #define iargc_                        iargc
39 #define getarg_                       getarg
40 #define petscgetcommoncomm_           petscgetcommoncomm
41 #endif
42 
43 #if defined(PETSC_HAVE_NAGF90)
44 #undef iargc_
45 #undef getarg_
46 #define iargc_  f90_unix_MP_iargc
47 #define getarg_ f90_unix_MP_getarg
48 #endif
49 #if defined(PETSC_USE_NARGS) /* Digital Fortran */
50 #undef iargc_
51 #undef getarg_
52 #define iargc_  NARGS
53 #define getarg_ GETARG
54 #elif defined (PETSC_HAVE_PXFGETARG_NEW) /* cray x1 */
55 #undef iargc_
56 #undef getarg_
57 #define iargc_  ipxfargc_
58 #define getarg_ pxfgetarg_
59 #endif
60 #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */
61 #undef iargc_
62 #undef getarg_
63 #define iargc_   iargc_
64 #define getarg_  getarg_
65 #endif
66 #if defined(PETSC_HAVE_GFORTRAN_IARGC) /* gfortran from gcc4 */
67 #undef iargc_
68 #undef getarg_
69 #define iargc_  _gfortran_iargc
70 #define getarg_ _gfortran_getarg_i4
71 #elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */
72 #undef iargc_
73 #undef getarg_
74 #define iargc  iargc_
75 #define getarg getarg_
76 #endif
77 
78 /*
79     The extra _ is because the f2c compiler puts an
80   extra _ at the end if the original routine name
81   contained any _.
82 */
83 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE)
84 #undef mpi_init_
85 #define mpi_init_             mpi_init__
86 #endif
87 
88 EXTERN_C_BEGIN
89 extern void PETSC_STDCALL mpi_init_(int*);
90 extern void PETSC_STDCALL petscgetcommoncomm_(PetscMPIInt*);
91 
92 /*
93      Different Fortran compilers handle command lines in different ways
94 */
95 #if defined(PETSC_USE_NARGS)
96 extern short __stdcall NARGS();
97 extern void  __stdcall GETARG(short*,char*,int,short *);
98 
99 #elif defined(PETSC_HAVE_FORTRAN_STDCALL)
100 extern int  PETSC_STDCALL IARGC();
101 extern void PETSC_STDCALL GETARG(int *,char *,int);
102 
103 #elif defined (PETSC_HAVE_PXFGETARG_NEW)
104 extern int  iargc_();
105 extern void getarg_(int*,char*,int*,int*,int);
106 
107 #else
108 extern int  iargc_();
109 extern void getarg_(int*,char*,int);
110 /*
111       The Cray T3D/T3E use the PXFGETARG() function
112 */
113 #if defined(PETSC_HAVE_PXFGETARG)
114 extern void PXFGETARG(int*,_fcd,int*,int*);
115 #endif
116 #endif
117 EXTERN_C_END
118 
119 #if (defined(PETSC_USE_COMPLEX) && !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)) || defined(PETSC_USE_REAL___FLOAT128)
120 extern MPI_Op MPIU_SUM;
121 EXTERN_C_BEGIN
122 extern void  MPIAPI PetscSum_Local(void*,void *,PetscMPIInt *,MPI_Datatype *);
123 EXTERN_C_END
124 #endif
125 #if defined(PETSC_USE_REAL___FLOAT128)
126 EXTERN_C_BEGIN
127 void  MPIAPI PetscSum_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
128 void  MPIAPI PetscMax_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
129 void  MPIAPI PetscMin_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
130 EXTERN_C_END
131 #endif
132 
133 extern  MPI_Op PetscMaxSum_Op;
134 
135 EXTERN_C_BEGIN
136 extern void  MPIAPI PetscMaxSum_Local(void*,void *,PetscMPIInt *,MPI_Datatype *);
137 extern PetscMPIInt  MPIAPI Petsc_DelCounter(MPI_Comm,PetscMPIInt,void*,void*);
138 extern PetscMPIInt  MPIAPI Petsc_DelComm(MPI_Comm,PetscMPIInt,void*,void*);
139 EXTERN_C_END
140 
141 extern PetscErrorCode  PetscOptionsCheckInitial_Private(void);
142 extern PetscErrorCode  PetscOptionsCheckInitial_Components(void);
143 extern PetscErrorCode  PetscInitialize_DynamicLibraries(void);
144 #if defined(PETSC_USE_LOG)
145 extern PetscErrorCode  PetscLogBegin_Private(void);
146 #endif
147 extern PetscErrorCode  PetscMallocAlign(size_t,int,const char[],const char[],const char[],void**);
148 extern PetscErrorCode  PetscFreeAlign(void*,int,const char[],const char[],const char[]);
149 extern int PetscGlobalArgc;
150 extern char **PetscGlobalArgs;
151 
152 /*
153     Reads in Fortran command line argments and sends them to
154   all processors and adds them to Options database.
155 */
156 
157 PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv)
158 {
159 #if defined (PETSC_USE_NARGS)
160   short          i,flg;
161 #else
162   int            i;
163 #endif
164   PetscErrorCode ierr;
165   int            warg = 256;
166   PetscMPIInt    rank;
167   char           *p;
168 
169   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
170   if (!rank) {
171 #if defined (PETSC_HAVE_IARG_COUNT_PROGNAME)
172     *argc = iargc_();
173 #else
174     /* most compilers do not count the program name for argv[0] */
175     *argc = 1 + iargc_();
176 #endif
177   }
178   ierr = MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
179 
180   /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */
181   ierr = PetscMallocAlign((*argc+1)*(warg*sizeof(char)+sizeof(char*)),0,0,0,0,(void**)argv);CHKERRQ(ierr);
182   (*argv)[0] = (char*)(*argv + *argc + 1);
183 
184   if (!rank) {
185     ierr = PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));CHKERRQ(ierr);
186     for (i=0; i<*argc; i++) {
187       (*argv)[i+1] = (*argv)[i] + warg;
188 #if defined (PETSC_HAVE_PXFGETARG_NEW)
189       {char *tmp = (*argv)[i];
190       int ilen;
191       getarg_(&i,tmp,&ilen,&ierr,warg);CHKERRQ(ierr);
192       tmp[ilen] = 0;
193       }
194 #elif defined (PETSC_USE_NARGS)
195       GETARG(&i,(*argv)[i],warg,&flg);
196 #else
197       /*
198       Because the stupid #defines above define all kinds of things to getarg_ we cannot do this test
199       #elif defined(PETSC_HAVE_GETARG)
200       getarg_(&i,(*argv)[i],warg);
201       #else
202          SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot get Fortran command line arguments");
203       */
204       getarg_(&i,(*argv)[i],warg);
205 #endif
206       /* zero out garbage at end of each argument */
207       p = (*argv)[i] + warg-1;
208       while (p > (*argv)[i]) {
209         if (*p == ' ') *p = 0;
210         p--;
211       }
212     }
213   }
214   ierr = MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);CHKERRQ(ierr);
215   if (rank) {
216     for (i=0; i<*argc; i++) {
217       (*argv)[i+1] = (*argv)[i] + warg;
218     }
219   }
220   return 0;
221 }
222 
223 /* -----------------------------------------------------------------------------------------------*/
224 
225 extern  MPI_Op PetscADMax_Op;
226 extern  MPI_Op PetscADMin_Op;
227 EXTERN_C_BEGIN
228 extern void  MPIAPI PetscADMax_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
229 extern void  MPIAPI PetscADMin_Local(void *,void *,PetscMPIInt *,MPI_Datatype *);
230 EXTERN_C_END
231 
232 
233 EXTERN_C_BEGIN
234 /*
235     petscinitialize - Version called from Fortran.
236 
237     Notes:
238       Since this is called from Fortran it does not return error codes
239 
240 */
241 void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
242 {
243 #if defined (PETSC_USE_NARGS)
244   short       flg,i;
245 #else
246   int         i;
247 #if !defined(PETSC_HAVE_PXFGETARG_NEW) && !defined (PETSC_HAVE_PXFGETARG_NEW)
248   int         j;
249 #endif
250 #endif
251   int         flag;
252   PetscMPIInt size;
253   char        *t1,name[256],hostname[64];
254   PetscMPIInt f_petsc_comm_world;
255   PetscInt    nodesize;
256   PetscBool   flg;
257 
258   *ierr = PetscMemzero(name,256); if (*ierr) return;
259   if (PetscInitializeCalled) {*ierr = 0; return;}
260 
261   /* this must be initialized in a routine, not as a constant declaration*/
262   PETSC_STDOUT = stdout;
263   PETSC_STDERR = stderr;
264 
265   *ierr = PetscOptionsCreate();
266   if (*ierr) return;
267   i = 0;
268 #if defined (PETSC_HAVE_PXFGETARG_NEW)
269   { int ilen,sierr;
270     getarg_(&i,name,&ilen,&sierr,256);
271     if (sierr) {
272       PetscStrncpy(name,"Unknown Name",256);
273     } else {
274       name[ilen] = 0;
275     }
276   }
277 #elif defined (PETSC_USE_NARGS)
278   GETARG(&i,name,256,&flg);
279 #else
280   getarg_(&i,name,256);
281   /* Eliminate spaces at the end of the string */
282   for (j=254; j>=0; j--) {
283     if (name[j] != ' ') {
284       name[j+1] = 0;
285       break;
286     }
287   }
288   if (j<0) {
289     PetscStrncpy(name,"Unknown Name",256);
290   }
291 #endif
292   *ierr = PetscSetProgramName(name);
293   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()\n");return;}
294 
295   /* check if PETSC_COMM_WORLD is initialized by the user in fortran */
296   petscgetcommoncomm_(&f_petsc_comm_world);
297   MPI_Initialized(&flag);
298   if (!flag) {
299     PetscMPIInt mierr;
300 
301     if (f_petsc_comm_world) {(*PetscErrorPrintf)("You cannot set PETSC_COMM_WORLD if you have not initialized MPI first\n");return;}
302     /* MPI requires calling Fortran mpi_init() if main program is Fortran */
303     mpi_init_(&mierr);
304     if (mierr) {
305       *ierr = mierr;
306       (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()\n");
307       return;
308     }
309     PetscBeganMPI    = PETSC_TRUE;
310   }
311   if (f_petsc_comm_world) { /* User called MPI_INITIALIZE() and changed PETSC_COMM_WORLD */
312     PETSC_COMM_WORLD = MPI_Comm_f2c(*(MPI_Fint *)&f_petsc_comm_world);
313   } else {
314     PETSC_COMM_WORLD = MPI_COMM_WORLD;
315   }
316   PetscInitializeCalled = PETSC_TRUE;
317 
318   *ierr = PetscErrorPrintfInitialize();
319   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()\n");return;}
320   *ierr = MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank);
321   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank\n");return;}
322   *ierr = MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize);
323   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize\n");return;}
324 #if defined(PETSC_USE_COMPLEX)
325   /*
326      Initialized the global variable; this is because with
327      shared libraries the constructors for global variables
328      are not called; at least on IRIX.
329   */
330   {
331 #if defined(PETSC_CLANGUAGE_CXX)
332     PetscScalar ic(0.0,1.0);
333     PETSC_i = ic;
334 #else
335     PetscScalar ic;
336     ic = 1.0*I;
337     PETSC_i = ic;
338 #endif
339   }
340 
341 #if !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)
342   *ierr = MPI_Type_contiguous(2,MPIU_REAL,&MPIU_C_DOUBLE_COMPLEX);
343   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
344   *ierr = MPI_Type_commit(&MPIU_C_DOUBLE_COMPLEX);
345   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
346   *ierr = MPI_Type_contiguous(2,MPI_FLOAT,&MPIU_C_COMPLEX);
347   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
348   *ierr = MPI_Type_commit(&MPIU_C_COMPLEX);
349   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
350   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
351   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
352 #endif
353 
354 #endif
355 
356 #if defined(PETSC_USE_REAL___FLOAT128)
357   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___FLOAT128);
358   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
359   *ierr = MPI_Type_commit(&MPIU___FLOAT128);
360   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
361 #if defined(PETSC_USE_COMPLEX)
362   *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___COMPLEX128);
363   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
364   *ierr = MPI_Type_commit(&MPIU___COMPLEX128);
365   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
366 #endif
367   *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM);
368   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
369   *ierr = MPI_Op_create(PetscMax_Local,1,&MPIU_MAX);
370   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
371   *ierr = MPI_Op_create(PetscMin_Local,1,&MPIU_MIN);
372   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
373 #endif
374 
375   /*
376        Create the PETSc MPI reduction operator that sums of the first
377      half of the entries and maxes the second half.
378   */
379   *ierr = MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op);
380   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
381 
382   *ierr = MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR);
383   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
384   *ierr = MPI_Type_commit(&MPIU_2SCALAR);
385   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
386   *ierr = MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT);
387   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
388   *ierr = MPI_Type_commit(&MPIU_2INT);
389   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;}
390   *ierr = MPI_Op_create(PetscADMax_Local,1,&PetscADMax_Op);
391   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
392   *ierr = MPI_Op_create(PetscADMin_Local,1,&PetscADMin_Op);
393   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;}
394   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelCounter,&Petsc_Counter_keyval,(void*)0);
395   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
396   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm,&Petsc_InnerComm_keyval,(void*)0);
397   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
398   *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm,&Petsc_OuterComm_keyval,(void*)0);
399   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;}
400 
401   /*
402      PetscInitializeFortran() is called twice. Here it initializes
403      PETSC_NULL_CHARACTER_Fortran. Below it initializes the PETSC_VIEWERs.
404      The PETSC_VIEWERs have not been created yet, so they must be initialized
405      below.
406   */
407   PetscInitializeFortran();
408   PETScParseFortranArgs_Private(&PetscGlobalArgc,&PetscGlobalArgs);
409   FIXCHAR(filename,len,t1);
410   *ierr = PetscOptionsInsert(&PetscGlobalArgc,&PetscGlobalArgs,t1);
411   FREECHAR(filename,t1);
412   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database\n");return;}
413   *ierr = PetscOptionsCheckInitial_Private();
414   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
415 #if defined (PETSC_USE_LOG)
416   *ierr = PetscLogBegin_Private();
417   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging\n");return;}
418 #endif
419   *ierr = PetscInitialize_DynamicLibraries();
420   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries\n");return;}
421 
422   *ierr = PetscInitializeFortran();
423   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block\n");return;}
424 
425   *ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);
426   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()\n");return;}
427   *ierr = PetscInfo1(0,"(Fortran):PETSc successfully started: procs %d\n",size);
428   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
429   *ierr = PetscGetHostName(hostname,64);
430   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname\n");return;}
431   *ierr = PetscInfo1(0,"Running on machine: %s\n",hostname);
432   if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;}
433   *ierr = PetscOptionsCheckInitial_Components();
434   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;}
435 
436   *ierr = PetscThreadCommInitializePackage(PETSC_NULL);
437   if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Calling PetscThreadCommInitialize()\n");return;}
438 
439   *ierr = PetscOptionsGetInt(PETSC_NULL,"-hmpi_spawn_size",&nodesize,&flg);
440   if (flg) {
441 #if defined(PETSC_HAVE_MPI_COMM_SPAWN)
442     *ierr = PetscHMPISpawn((PetscMPIInt) nodesize);/* worker nodes never return from here; they go directly to PetscEnd() */
443     if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIS-pawn()\n");return;}
444 #else
445     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"PETSc built without MPI 2 (MPI_Comm_spawn) support, use -hmpi_merge_size instead");
446 #endif
447   } else {
448     *ierr = PetscOptionsGetInt(PETSC_NULL,"-hmpi_merge_size",&nodesize,&flg);
449     if (flg) {
450       *ierr = PetscHMPIMerge((PetscMPIInt) nodesize,PETSC_NULL,PETSC_NULL);
451       if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIMerge()\n");return;}
452       if (PetscHMPIWorker) { /* if worker then never enter user code */
453         PetscInitializeCalled = PETSC_TRUE;
454         *ierr = PetscEnd();
455       }
456     }
457   }
458 
459 #if defined(PETSC_HAVE_CUDA)
460   cublasInit();
461 #endif
462 }
463 
464 void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr)
465 {
466 #if defined(PETSC_HAVE_SUNMATHPRO)
467   extern void standard_arithmetic();
468   standard_arithmetic();
469 #endif
470   /* was malloced with PetscMallocAlign() so free the same way */
471   *ierr = PetscFreeAlign(PetscGlobalArgs,0,0,0,0);if (*ierr) {(*PetscErrorPrintf)("PetscFinalize:Freeing args\n");return;}
472 
473   *ierr = PetscFinalize();
474 }
475 
476 void PETSC_STDCALL petscend_(PetscErrorCode *ierr)
477 {
478 #if defined(PETSC_HAVE_SUNMATHPRO)
479   extern void standard_arithmetic();
480   standard_arithmetic();
481 #endif
482 
483   *ierr = PetscEnd();
484 }
485 
486 
487 EXTERN_C_END
488