1 /* 2 This file contains Fortran stubs for PetscInitialize and Finalize. 3 */ 4 5 /* 6 This is to prevent the Cray T3D version of MPI (University of Edinburgh) 7 from stupidly redefining MPI_INIT(). They put this in to detect errors 8 in C code,but here I do want to be calling the Fortran version from a 9 C subroutine. 10 */ 11 #define T3DMPI_FORTRAN 12 #define T3EMPI_FORTRAN 13 14 #define PETSC_DESIRE_COMPLEX 15 #include <petsc-private/fortranimpl.h> 16 17 #if defined(PETSC_HAVE_CUSP) 18 #include <cublas.h> 19 #endif 20 #include <petscthreadcomm.h> 21 22 extern PetscBool PetscBeganMPI; 23 24 extern PetscBool PetscHMPIWorker; 25 26 #if defined(PETSC_HAVE_FORTRAN_CAPS) 27 #define petscinitialize_ PETSCINITIALIZE 28 #define petscfinalize_ PETSCFINALIZE 29 #define petscend_ PETSCEND 30 #define iargc_ IARGC 31 #define getarg_ GETARG 32 #define mpi_init_ MPI_INIT 33 #define petscgetcommoncomm_ PETSCGETCOMMONCOMM 34 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 35 #define petscinitialize_ petscinitialize 36 #define petscfinalize_ petscfinalize 37 #define petscend_ petscend 38 #define mpi_init_ mpi_init 39 #define iargc_ iargc 40 #define getarg_ getarg 41 #define petscgetcommoncomm_ petscgetcommoncomm 42 #endif 43 44 #if defined(PETSC_HAVE_NAGF90) 45 #undef iargc_ 46 #undef getarg_ 47 #define iargc_ f90_unix_MP_iargc 48 #define getarg_ f90_unix_MP_getarg 49 #endif 50 #if defined(PETSC_USE_NARGS) /* Digital Fortran */ 51 #undef iargc_ 52 #undef getarg_ 53 #define iargc_ NARGS 54 #define getarg_ GETARG 55 #elif defined(PETSC_HAVE_PXFGETARG_NEW) /* cray x1 */ 56 #undef iargc_ 57 #undef getarg_ 58 #define iargc_ ipxfargc_ 59 #define getarg_ pxfgetarg_ 60 #endif 61 #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */ 62 #undef iargc_ 63 #undef getarg_ 64 #define iargc_ iargc_ 65 #define getarg_ getarg_ 66 #endif 67 #if defined(PETSC_HAVE_GFORTRAN_IARGC) /* gfortran from gcc4 */ 68 #undef iargc_ 69 #undef getarg_ 70 #define iargc_ _gfortran_iargc 71 #define getarg_ _gfortran_getarg_i4 72 #elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */ 73 #undef iargc_ 74 #undef getarg_ 75 #define iargc iargc_ 76 #define getarg getarg_ 77 #endif 78 79 /* 80 The extra _ is because the f2c compiler puts an 81 extra _ at the end if the original routine name 82 contained any _. 83 */ 84 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE) 85 #undef mpi_init_ 86 #define mpi_init_ mpi_init__ 87 #endif 88 89 PETSC_EXTERN_C void PETSC_STDCALL mpi_init_(int*); 90 PETSC_EXTERN_C void PETSC_STDCALL petscgetcommoncomm_(PetscMPIInt*); 91 92 /* 93 Different Fortran compilers handle command lines in different ways 94 */ 95 #if defined(PETSC_USE_NARGS) 96 PETSC_EXTERN_C short __stdcall NARGS(); 97 PETSC_EXTERN_C void __stdcall GETARG(short*,char*,int,short *); 98 99 #elif defined(PETSC_HAVE_FORTRAN_STDCALL) 100 PETSC_EXTERN_C int PETSC_STDCALL IARGC(); 101 PETSC_EXTERN_C void PETSC_STDCALL GETARG(int *,char *,int); 102 103 #elif defined(PETSC_HAVE_PXFGETARG_NEW) 104 PETSC_EXTERN_C int iargc_(); 105 PETSC_EXTERN_C void getarg_(int*,char*,int*,int*,int); 106 107 #else 108 PETSC_EXTERN_C int iargc_(); 109 PETSC_EXTERN_C void getarg_(int*,char*,int); 110 /* 111 The Cray T3D/T3E use the PXFGETARG() function 112 */ 113 #if defined(PETSC_HAVE_PXFGETARG) 114 PETSC_EXTERN_C void PXFGETARG(int*,_fcd,int*,int*); 115 #endif 116 #endif 117 118 #if (defined(PETSC_HAVE_COMPLEX) && !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)) || defined(PETSC_USE_REAL___FLOAT128) 119 extern MPI_Op MPIU_SUM; 120 121 PETSC_EXTERN_C void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 122 123 #endif 124 #if defined(PETSC_USE_REAL___FLOAT128) 125 126 PETSC_EXTERN_C void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 127 PETSC_EXTERN_C void MPIAPI PetscMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 128 PETSC_EXTERN_C void MPIAPI PetscMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 129 #endif 130 131 extern MPI_Op PetscMaxSum_Op; 132 133 PETSC_EXTERN_C void MPIAPI PetscMaxSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 134 PETSC_EXTERN_C PetscMPIInt MPIAPI Petsc_DelCounter(MPI_Comm,PetscMPIInt,void*,void*); 135 PETSC_EXTERN_C PetscMPIInt MPIAPI Petsc_DelComm(MPI_Comm,PetscMPIInt,void*,void*); 136 137 extern PetscErrorCode PetscOptionsCheckInitial_Private(void); 138 extern PetscErrorCode PetscOptionsCheckInitial_Components(void); 139 extern PetscErrorCode PetscInitialize_DynamicLibraries(void); 140 #if defined(PETSC_USE_LOG) 141 extern PetscErrorCode PetscLogBegin_Private(void); 142 #endif 143 extern PetscErrorCode PetscMallocAlign(size_t,int,const char[],const char[],const char[],void**); 144 extern PetscErrorCode PetscFreeAlign(void*,int,const char[],const char[],const char[]); 145 extern int PetscGlobalArgc; 146 extern char **PetscGlobalArgs; 147 148 /* 149 Reads in Fortran command line argments and sends them to 150 all processors and adds them to Options database. 151 */ 152 153 PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv) 154 { 155 #if defined(PETSC_USE_NARGS) 156 short i,flg; 157 #else 158 int i; 159 #endif 160 PetscErrorCode ierr; 161 int warg = 256; 162 PetscMPIInt rank; 163 char *p; 164 165 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr); 166 if (!rank) { 167 #if defined(PETSC_HAVE_IARG_COUNT_PROGNAME) 168 *argc = iargc_(); 169 #else 170 /* most compilers do not count the program name for argv[0] */ 171 *argc = 1 + iargc_(); 172 #endif 173 } 174 ierr = MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);CHKERRQ(ierr); 175 176 /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */ 177 ierr = PetscMallocAlign((*argc+1)*(warg*sizeof(char)+sizeof(char*)),0,0,0,0,(void**)argv);CHKERRQ(ierr); 178 (*argv)[0] = (char*)(*argv + *argc + 1); 179 180 if (!rank) { 181 ierr = PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));CHKERRQ(ierr); 182 for (i=0; i<*argc; i++) { 183 (*argv)[i+1] = (*argv)[i] + warg; 184 #if defined(PETSC_HAVE_PXFGETARG_NEW) 185 {char *tmp = (*argv)[i]; 186 int ilen; 187 getarg_(&i,tmp,&ilen,&ierr,warg);CHKERRQ(ierr); 188 tmp[ilen] = 0;} 189 #elif defined(PETSC_USE_NARGS) 190 GETARG(&i,(*argv)[i],warg,&flg); 191 #else 192 /* 193 Because the stupid #defines above define all kinds of things to getarg_ we cannot do this test 194 #elif defined(PETSC_HAVE_GETARG) 195 getarg_(&i,(*argv)[i],warg); 196 #else 197 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot get Fortran command line arguments"); 198 */ 199 getarg_(&i,(*argv)[i],warg); 200 #endif 201 /* zero out garbage at end of each argument */ 202 p = (*argv)[i] + warg-1; 203 while (p > (*argv)[i]) { 204 if (*p == ' ') *p = 0; 205 p--; 206 } 207 } 208 } 209 ierr = MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);CHKERRQ(ierr); 210 if (rank) { 211 for (i=0; i<*argc; i++) (*argv)[i+1] = (*argv)[i] + warg; 212 } 213 return 0; 214 } 215 216 /* -----------------------------------------------------------------------------------------------*/ 217 218 extern MPI_Op PetscADMax_Op; 219 extern MPI_Op PetscADMin_Op; 220 PETSC_EXTERN_C void MPIAPI PetscADMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 221 PETSC_EXTERN_C void MPIAPI PetscADMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 222 223 224 /* 225 petscinitialize - Version called from Fortran. 226 227 Notes: 228 Since this is called from Fortran it does not return error codes 229 230 */ 231 PETSC_EXTERN_C void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len)) 232 { 233 #if defined (PETSC_USE_NARGS) 234 short flg,i; 235 #else 236 int i; 237 #if !defined(PETSC_HAVE_PXFGETARG_NEW) && !defined (PETSC_HAVE_PXFGETARG_NEW) 238 int j; 239 #endif 240 #endif 241 int flag; 242 PetscMPIInt size; 243 char *t1,name[256],hostname[64]; 244 PetscMPIInt f_petsc_comm_world; 245 PetscInt nodesize; 246 PetscBool flg; 247 248 *ierr = PetscMemzero(name,256); if (*ierr) return; 249 if (PetscInitializeCalled) {*ierr = 0; return;} 250 251 /* this must be initialized in a routine, not as a constant declaration*/ 252 PETSC_STDOUT = stdout; 253 PETSC_STDERR = stderr; 254 255 *ierr = PetscOptionsCreate(); 256 if (*ierr) return; 257 i = 0; 258 #if defined (PETSC_HAVE_PXFGETARG_NEW) 259 { int ilen,sierr; 260 getarg_(&i,name,&ilen,&sierr,256); 261 if (sierr) PetscStrncpy(name,"Unknown Name",256); 262 else name[ilen] = 0; 263 } 264 #elif defined(PETSC_USE_NARGS) 265 GETARG(&i,name,256,&flg); 266 #else 267 getarg_(&i,name,256); 268 /* Eliminate spaces at the end of the string */ 269 for (j=254; j>=0; j--) { 270 if (name[j] != ' ') { 271 name[j+1] = 0; 272 break; 273 } 274 } 275 if (j<0) PetscStrncpy(name,"Unknown Name",256); 276 #endif 277 *ierr = PetscSetProgramName(name); 278 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()\n");return;} 279 280 /* check if PETSC_COMM_WORLD is initialized by the user in fortran */ 281 petscgetcommoncomm_(&f_petsc_comm_world); 282 MPI_Initialized(&flag); 283 if (!flag) { 284 PetscMPIInt mierr; 285 286 if (f_petsc_comm_world) {(*PetscErrorPrintf)("You cannot set PETSC_COMM_WORLD if you have not initialized MPI first\n");return;} 287 /* MPI requires calling Fortran mpi_init() if main program is Fortran */ 288 mpi_init_(&mierr); 289 if (mierr) { 290 *ierr = mierr; 291 (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()\n"); 292 return; 293 } 294 PetscBeganMPI = PETSC_TRUE; 295 } 296 if (f_petsc_comm_world) PETSC_COMM_WORLD = MPI_Comm_f2c(*(MPI_Fint*)&f_petsc_comm_world); /* User called MPI_INITIALIZE() and changed PETSC_COMM_WORLD */ 297 else PETSC_COMM_WORLD = MPI_COMM_WORLD; 298 PetscInitializeCalled = PETSC_TRUE; 299 300 *ierr = PetscErrorPrintfInitialize(); 301 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()\n");return;} 302 *ierr = MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank); 303 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank\n");return;} 304 *ierr = MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize); 305 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize\n");return;} 306 307 MPIU_BOOL = MPI_INT; 308 MPIU_ENUM = MPI_INT; 309 310 #if defined(PETSC_HAVE_COMPLEX) 311 /* 312 Initialized the global variable; this is because with 313 shared libraries the constructors for global variables 314 are not called; at least on IRIX. 315 */ 316 { 317 #if defined(PETSC_CLANGUAGE_CXX) 318 PetscComplex ic(0.0,1.0); 319 PETSC_i = ic; 320 #else 321 PETSC_i = _Complex_I; 322 #endif 323 } 324 325 #if !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX) 326 *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU_C_DOUBLE_COMPLEX); 327 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 328 *ierr = MPI_Type_commit(&MPIU_C_DOUBLE_COMPLEX); 329 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 330 *ierr = MPI_Type_contiguous(2,MPI_FLOAT,&MPIU_C_COMPLEX); 331 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 332 *ierr = MPI_Type_commit(&MPIU_C_COMPLEX); 333 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 334 *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM); 335 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 336 #endif 337 338 #endif 339 340 #if defined(PETSC_USE_REAL___FLOAT128) 341 *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___FLOAT128); 342 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 343 *ierr = MPI_Type_commit(&MPIU___FLOAT128); 344 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 345 #if defined(PETSC_HAVE_COMPLEX) 346 *ierr = MPI_Type_contiguous(4,MPI_DOUBLE,&MPIU___COMPLEX128); 347 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 348 *ierr = MPI_Type_commit(&MPIU___COMPLEX128); 349 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 350 #endif 351 *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM); 352 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 353 *ierr = MPI_Op_create(PetscMax_Local,1,&MPIU_MAX); 354 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 355 *ierr = MPI_Op_create(PetscMin_Local,1,&MPIU_MIN); 356 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 357 #endif 358 359 /* 360 Create the PETSc MPI reduction operator that sums of the first 361 half of the entries and maxes the second half. 362 */ 363 *ierr = MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op); 364 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 365 366 *ierr = MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR); 367 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 368 *ierr = MPI_Type_commit(&MPIU_2SCALAR); 369 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 370 #if defined(PETSC_USE_64BIT_INDICES) || !defined(MPI_2INT) 371 *ierr = MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT); 372 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 373 *ierr = MPI_Type_commit(&MPIU_2INT); 374 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 375 #endif 376 *ierr = MPI_Op_create(PetscADMax_Local,1,&PetscADMax_Op); 377 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 378 *ierr = MPI_Op_create(PetscADMin_Local,1,&PetscADMin_Op); 379 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 380 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelCounter,&Petsc_Counter_keyval,(void*)0); 381 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 382 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm,&Petsc_InnerComm_keyval,(void*)0); 383 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 384 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm,&Petsc_OuterComm_keyval,(void*)0); 385 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 386 387 /* 388 PetscInitializeFortran() is called twice. Here it initializes 389 PETSC_NULL_CHARACTER_Fortran. Below it initializes the PETSC_VIEWERs. 390 The PETSC_VIEWERs have not been created yet, so they must be initialized 391 below. 392 */ 393 PetscInitializeFortran(); 394 PETScParseFortranArgs_Private(&PetscGlobalArgc,&PetscGlobalArgs); 395 FIXCHAR(filename,len,t1); 396 *ierr = PetscOptionsInsert(&PetscGlobalArgc,&PetscGlobalArgs,t1); 397 FREECHAR(filename,t1); 398 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database\n");return;} 399 *ierr = PetscOptionsCheckInitial_Private(); 400 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;} 401 #if defined(PETSC_USE_LOG) 402 *ierr = PetscLogBegin_Private(); 403 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging\n");return;} 404 #endif 405 *ierr = PetscInitialize_DynamicLibraries(); 406 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries\n");return;} 407 408 *ierr = PetscInitializeFortran(); 409 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block\n");return;} 410 411 *ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size); 412 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()\n");return;} 413 *ierr = PetscInfo1(0,"(Fortran):PETSc successfully started: procs %d\n",size); 414 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;} 415 *ierr = PetscGetHostName(hostname,64); 416 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname\n");return;} 417 *ierr = PetscInfo1(0,"Running on machine: %s\n",hostname); 418 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;} 419 *ierr = PetscOptionsCheckInitial_Components(); 420 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;} 421 422 *ierr = PetscThreadCommInitializePackage(NULL); 423 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Calling PetscThreadCommInitialize()\n");return;} 424 425 #if defined(PETSC_USE_DEBUG) 426 PetscThreadLocalRegister((PetscThreadKey*)&petscstack); /* Creates petscstack_key if needed */ 427 *ierr = PetscStackCreate(); 428 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscStackCreate()\n");return;} 429 #endif 430 431 *ierr = PetscOptionsGetInt(NULL,"-hmpi_spawn_size",&nodesize,&flg); 432 if (flg) { 433 #if defined(PETSC_HAVE_MPI_COMM_SPAWN) 434 *ierr = PetscHMPISpawn((PetscMPIInt) nodesize); /* worker nodes never return from here; they go directly to PetscEnd() */ 435 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIS-pawn()\n");return;} 436 #else 437 *ierr = PETSC_ERR_SUP; 438 (*PetscErrorPrintf)("PetscInitialize: PETSc built without MPI 2 (MPI_Comm_spawn) support, use -hmpi_merge_size instead"); 439 return; 440 #endif 441 } else { 442 *ierr = PetscOptionsGetInt(NULL,"-hmpi_merge_size",&nodesize,&flg); 443 if (flg) { 444 *ierr = PetscHMPIMerge((PetscMPIInt) nodesize,NULL,NULL); 445 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIMerge()\n");return;} 446 if (PetscHMPIWorker) { /* if worker then never enter user code */ 447 PetscInitializeCalled = PETSC_TRUE; 448 *ierr = PetscEnd(); 449 } 450 } 451 } 452 453 #if defined(PETSC_HAVE_CUDA) 454 cublasInit(); 455 #endif 456 } 457 458 PETSC_EXTERN_C void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr) 459 { 460 #if defined(PETSC_HAVE_SUNMATHPRO) 461 extern void standard_arithmetic(); 462 standard_arithmetic(); 463 #endif 464 /* was malloced with PetscMallocAlign() so free the same way */ 465 *ierr = PetscFreeAlign(PetscGlobalArgs,0,0,0,0);if (*ierr) {(*PetscErrorPrintf)("PetscFinalize:Freeing args\n");return;} 466 467 *ierr = PetscFinalize(); 468 } 469 470 void PETSC_STDCALL petscend_(PetscErrorCode *ierr) 471 { 472 #if defined(PETSC_HAVE_SUNMATHPRO) 473 extern void standard_arithmetic(); 474 standard_arithmetic(); 475 #endif 476 477 *ierr = PetscEnd(); 478 } 479 480