1 /* 2 This file contains Fortran stubs for PetscInitialize and Finalize. 3 */ 4 5 /* 6 This is to prevent the Cray T3D version of MPI (University of Edinburgh) 7 from stupidly redefining MPI_INIT(). They put this in to detect errors 8 in C code,but here I do want to be calling the Fortran version from a 9 C subroutine. 10 */ 11 #define T3DMPI_FORTRAN 12 #define T3EMPI_FORTRAN 13 14 #include <petsc/private/fortranimpl.h> 15 16 #if defined(PETSC_HAVE_CUDA) 17 #include <cublas.h> 18 #endif 19 20 #if defined(PETSC_HAVE_FORTRAN_CAPS) 21 #define petscinitialize_ PETSCINITIALIZE 22 #define petscfinalize_ PETSCFINALIZE 23 #define petscend_ PETSCEND 24 #define iargc_ IARGC 25 #define getarg_ GETARG 26 #define mpi_init_ MPI_INIT 27 #define petscgetcommoncomm_ PETSCGETCOMMONCOMM 28 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 29 #define petscinitialize_ petscinitialize 30 #define petscfinalize_ petscfinalize 31 #define petscend_ petscend 32 #define mpi_init_ mpi_init 33 #define iargc_ iargc 34 #define getarg_ getarg 35 #define petscgetcommoncomm_ petscgetcommoncomm 36 #endif 37 38 #if defined(PETSC_HAVE_NAGF90) 39 #undef iargc_ 40 #undef getarg_ 41 #define iargc_ f90_unix_MP_iargc 42 #define getarg_ f90_unix_MP_getarg 43 #endif 44 #if defined(PETSC_USE_NARGS) /* Digital Fortran */ 45 #undef iargc_ 46 #undef getarg_ 47 #define iargc_ NARGS 48 #define getarg_ GETARG 49 #elif defined(PETSC_HAVE_PXFGETARG_NEW) /* cray x1 */ 50 #undef iargc_ 51 #undef getarg_ 52 #define iargc_ ipxfargc_ 53 #define getarg_ pxfgetarg_ 54 #endif 55 #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */ 56 #undef iargc_ 57 #undef getarg_ 58 #define iargc_ iargc_ 59 #define getarg_ getarg_ 60 #endif 61 #if defined(PETSC_HAVE_GFORTRAN_IARGC) /* gfortran from gcc4 */ 62 #undef iargc_ 63 #undef getarg_ 64 #define iargc_ _gfortran_iargc 65 #define getarg_ _gfortran_getarg_i4 66 #elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */ 67 #undef iargc_ 68 #undef getarg_ 69 #define iargc iargc_ 70 #define getarg getarg_ 71 #endif 72 73 /* 74 The extra _ is because the f2c compiler puts an 75 extra _ at the end if the original routine name 76 contained any _. 77 */ 78 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE) 79 #undef mpi_init_ 80 #define mpi_init_ mpi_init__ 81 #endif 82 83 PETSC_EXTERN void PETSC_STDCALL mpi_init_(int*); 84 PETSC_EXTERN void PETSC_STDCALL petscgetcommoncomm_(PetscMPIInt*); 85 86 /* 87 Different Fortran compilers handle command lines in different ways 88 */ 89 #if defined(PETSC_USE_NARGS) 90 PETSC_EXTERN short __stdcall NARGS(); 91 PETSC_EXTERN void __stdcall GETARG(short*,char*,int,short *); 92 93 #elif defined(PETSC_HAVE_FORTRAN_STDCALL) 94 PETSC_EXTERN int PETSC_STDCALL IARGC(); 95 PETSC_EXTERN void PETSC_STDCALL GETARG(int *,char *,int); 96 97 #elif defined(PETSC_HAVE_PXFGETARG_NEW) 98 PETSC_EXTERN int iargc_(); 99 PETSC_EXTERN void getarg_(int*,char*,int*,int*,int); 100 101 #else 102 PETSC_EXTERN int iargc_(); 103 PETSC_EXTERN void getarg_(int*,char*,int); 104 /* 105 The Cray T3D/T3E use the PXFGETARG() function 106 */ 107 #if defined(PETSC_HAVE_PXFGETARG) 108 PETSC_EXTERN void PXFGETARG(int*,_fcd,int*,int*); 109 #endif 110 #endif 111 112 #if (defined(PETSC_HAVE_COMPLEX) && !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)) || defined(PETSC_USE_REAL___FLOAT128) 113 extern MPI_Op MPIU_SUM; 114 115 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 116 117 #endif 118 #if defined(PETSC_USE_REAL___FLOAT128) 119 120 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 121 PETSC_EXTERN void MPIAPI PetscMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 122 PETSC_EXTERN void MPIAPI PetscMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 123 #endif 124 125 extern MPI_Op PetscMaxSum_Op; 126 127 PETSC_EXTERN void MPIAPI PetscMaxSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 128 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelCounter(MPI_Comm,PetscMPIInt,void*,void*); 129 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Inner(MPI_Comm,PetscMPIInt,void*,void*); 130 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Outer(MPI_Comm,PetscMPIInt,void*,void*); 131 132 extern PetscErrorCode PetscOptionsCheckInitial_Private(void); 133 extern PetscErrorCode PetscOptionsCheckInitial_Components(void); 134 extern PetscErrorCode PetscInitialize_DynamicLibraries(void); 135 #if defined(PETSC_USE_LOG) 136 extern PetscErrorCode PetscLogInitialize(void); 137 #endif 138 extern PetscErrorCode PetscMallocAlign(size_t,int,const char[],const char[],void**); 139 extern PetscErrorCode PetscFreeAlign(void*,int,const char[],const char[]); 140 extern int PetscGlobalArgc; 141 extern char **PetscGlobalArgs; 142 143 /* 144 Reads in Fortran command line argments and sends them to 145 all processors and adds them to Options database. 146 */ 147 148 PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv) 149 { 150 #if defined(PETSC_USE_NARGS) 151 short i,flg; 152 #else 153 int i; 154 #endif 155 PetscErrorCode ierr; 156 int warg = 256; 157 PetscMPIInt rank; 158 char *p; 159 160 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr); 161 if (!rank) { 162 #if defined(PETSC_HAVE_IARG_COUNT_PROGNAME) 163 *argc = iargc_(); 164 #else 165 /* most compilers do not count the program name for argv[0] */ 166 *argc = 1 + iargc_(); 167 #endif 168 } 169 ierr = MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);CHKERRQ(ierr); 170 171 /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */ 172 ierr = PetscMallocAlign((*argc+1)*(warg*sizeof(char)+sizeof(char*)),0,0,0,(void**)argv);CHKERRQ(ierr); 173 (*argv)[0] = (char*)(*argv + *argc + 1); 174 175 if (!rank) { 176 ierr = PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));CHKERRQ(ierr); 177 for (i=0; i<*argc; i++) { 178 (*argv)[i+1] = (*argv)[i] + warg; 179 #if defined(PETSC_HAVE_PXFGETARG_NEW) 180 {char *tmp = (*argv)[i]; 181 int ilen; 182 getarg_(&i,tmp,&ilen,&ierr,warg);CHKERRQ(ierr); 183 tmp[ilen] = 0;} 184 #elif defined(PETSC_USE_NARGS) 185 GETARG(&i,(*argv)[i],warg,&flg); 186 #else 187 /* 188 Because the stupid #defines above define all kinds of things to getarg_ we cannot do this test 189 #elif defined(PETSC_HAVE_GETARG) 190 getarg_(&i,(*argv)[i],warg); 191 #else 192 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot get Fortran command line arguments"); 193 */ 194 getarg_(&i,(*argv)[i],warg); 195 #endif 196 /* zero out garbage at end of each argument */ 197 p = (*argv)[i] + warg-1; 198 while (p > (*argv)[i]) { 199 if (*p == ' ') *p = 0; 200 p--; 201 } 202 } 203 } 204 ierr = MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);CHKERRQ(ierr); 205 if (rank) { 206 for (i=0; i<*argc; i++) (*argv)[i+1] = (*argv)[i] + warg; 207 } 208 return 0; 209 } 210 211 #if defined(PETSC_SERIALIZE_FUNCTIONS) 212 extern PetscFPT PetscFPTData; 213 #endif 214 215 #if defined(PETSC_HAVE_THREADSAFETY) 216 PetscSpinlock PetscViewerASCIISpinLock; 217 PetscSpinlock PetscCommSpinLock; 218 #endif 219 220 /* -----------------------------------------------------------------------------------------------*/ 221 222 #if defined(PETSC_HAVE_SAWS) 223 #include <petscviewersaws.h> 224 extern PetscErrorCode PetscInitializeSAWs(const char[]); 225 #endif 226 227 /* 228 petscinitialize - Version called from Fortran. 229 230 Notes: 231 Since this is called from Fortran it does not return error codes 232 233 */ 234 PETSC_EXTERN void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len)) 235 { 236 #if defined (PETSC_USE_NARGS) 237 short flg,i; 238 #else 239 int i; 240 #if !defined(PETSC_HAVE_PXFGETARG_NEW) && !defined (PETSC_HAVE_PXFGETARG_NEW) 241 int j; 242 #endif 243 #endif 244 #if defined(PETSC_HAVE_CUDA) 245 PetscBool flg2; 246 #endif 247 int flag; 248 PetscMPIInt size; 249 char *t1,name[256],hostname[64]; 250 PetscMPIInt f_petsc_comm_world; 251 252 *ierr = PetscMemzero(name,256); if (*ierr) return; 253 if (PetscInitializeCalled) {*ierr = 0; return;} 254 255 /* this must be initialized in a routine, not as a constant declaration*/ 256 PETSC_STDOUT = stdout; 257 PETSC_STDERR = stderr; 258 259 /* on Windows - set printf to default to printing 2 digit exponents */ 260 #if defined(PETSC_HAVE__SET_OUTPUT_FORMAT) 261 _set_output_format(_TWO_DIGIT_EXPONENT); 262 #endif 263 264 *ierr = PetscOptionsCreate(); 265 if (*ierr) return; 266 i = 0; 267 #if defined (PETSC_HAVE_PXFGETARG_NEW) 268 { int ilen,sierr; 269 getarg_(&i,name,&ilen,&sierr,256); 270 if (sierr) PetscStrncpy(name,"Unknown Name",256); 271 else name[ilen] = 0; 272 } 273 #elif defined(PETSC_USE_NARGS) 274 GETARG(&i,name,256,&flg); 275 #else 276 getarg_(&i,name,256); 277 /* Eliminate spaces at the end of the string */ 278 for (j=254; j>=0; j--) { 279 if (name[j] != ' ') { 280 name[j+1] = 0; 281 break; 282 } 283 } 284 if (j<0) PetscStrncpy(name,"Unknown Name",256); 285 #endif 286 *ierr = PetscSetProgramName(name); 287 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()\n");return;} 288 289 /* check if PETSC_COMM_WORLD is initialized by the user in fortran */ 290 petscgetcommoncomm_(&f_petsc_comm_world); 291 MPI_Initialized(&flag); 292 if (!flag) { 293 PetscMPIInt mierr; 294 295 if (f_petsc_comm_world) {(*PetscErrorPrintf)("You cannot set PETSC_COMM_WORLD if you have not initialized MPI first\n");return;} 296 /* MPI requires calling Fortran mpi_init() if main program is Fortran */ 297 #if defined(PETSC_HAVE_MPIUNI) && defined(MPIUNI_AVOID_MPI_NAMESPACE) 298 mierr = MPI_Init((int*)0, (char***)0); 299 #else 300 mpi_init_(&mierr); 301 #endif 302 if (mierr) { 303 *ierr = mierr; 304 (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()\n"); 305 return; 306 } 307 PetscBeganMPI = PETSC_TRUE; 308 } 309 if (f_petsc_comm_world) PETSC_COMM_WORLD = MPI_Comm_f2c(*(MPI_Fint*)&f_petsc_comm_world); /* User called MPI_INITIALIZE() and changed PETSC_COMM_WORLD */ 310 else PETSC_COMM_WORLD = MPI_COMM_WORLD; 311 PetscInitializeCalled = PETSC_TRUE; 312 313 *ierr = PetscSpinlockCreate(&PetscViewerASCIISpinLock); 314 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;} 315 *ierr = PetscSpinlockCreate(&PetscCommSpinLock); 316 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;} 317 318 *ierr = PetscErrorPrintfInitialize(); 319 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()\n");return;} 320 *ierr = MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank); 321 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank\n");return;} 322 *ierr = MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize); 323 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize\n");return;} 324 325 MPIU_BOOL = MPI_INT; 326 MPIU_ENUM = MPI_INT; 327 328 #if defined(PETSC_HAVE_COMPLEX) 329 /* 330 Initialized the global variable; this is because with 331 shared libraries the constructors for global variables 332 are not called; at least on IRIX. 333 */ 334 { 335 #if defined(PETSC_CLANGUAGE_CXX) 336 PetscComplex ic(0.0,1.0); 337 PETSC_i = ic; 338 #else 339 PETSC_i = _Complex_I; 340 #endif 341 } 342 343 #if !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX) 344 *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU_C_DOUBLE_COMPLEX); 345 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 346 *ierr = MPI_Type_commit(&MPIU_C_DOUBLE_COMPLEX); 347 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 348 *ierr = MPI_Type_contiguous(2,MPI_FLOAT,&MPIU_C_COMPLEX); 349 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 350 *ierr = MPI_Type_commit(&MPIU_C_COMPLEX); 351 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 352 *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM); 353 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 354 #endif 355 356 #endif 357 358 #if defined(PETSC_USE_REAL___FLOAT128) 359 *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___FLOAT128); 360 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 361 *ierr = MPI_Type_commit(&MPIU___FLOAT128); 362 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 363 #if defined(PETSC_HAVE_COMPLEX) 364 *ierr = MPI_Type_contiguous(4,MPI_DOUBLE,&MPIU___COMPLEX128); 365 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 366 *ierr = MPI_Type_commit(&MPIU___COMPLEX128); 367 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 368 #endif 369 *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM); 370 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 371 *ierr = MPI_Op_create(PetscMax_Local,1,&MPIU_MAX); 372 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 373 *ierr = MPI_Op_create(PetscMin_Local,1,&MPIU_MIN); 374 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 375 #endif 376 377 /* 378 Create the PETSc MPI reduction operator that sums of the first 379 half of the entries and maxes the second half. 380 */ 381 *ierr = MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op); 382 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 383 384 *ierr = MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR); 385 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 386 *ierr = MPI_Type_commit(&MPIU_2SCALAR); 387 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 388 #if defined(PETSC_USE_64BIT_INDICES) || !defined(MPI_2INT) 389 *ierr = MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT); 390 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 391 *ierr = MPI_Type_commit(&MPIU_2INT); 392 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 393 #endif 394 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelCounter,&Petsc_Counter_keyval,(void*)0); 395 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 396 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Outer,&Petsc_InnerComm_keyval,(void*)0); 397 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 398 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Inner,&Petsc_OuterComm_keyval,(void*)0); 399 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 400 401 /* 402 PetscInitializeFortran() is called twice. Here it initializes 403 PETSC_NULL_CHARACTER_Fortran. Below it initializes the PETSC_VIEWERs. 404 The PETSC_VIEWERs have not been created yet, so they must be initialized 405 below. 406 */ 407 PetscInitializeFortran(); 408 PETScParseFortranArgs_Private(&PetscGlobalArgc,&PetscGlobalArgs); 409 FIXCHAR(filename,len,t1); 410 *ierr = PetscOptionsInsert(&PetscGlobalArgc,&PetscGlobalArgs,t1); 411 FREECHAR(filename,t1); 412 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database\n");return;} 413 *ierr = PetscOptionsCheckInitial_Private(); 414 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;} 415 *ierr = PetscCitationsInitialize(); 416 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscCitationsInitialize()\n");return;} 417 #if defined(PETSC_HAVE_SAWS) 418 *ierr = PetscInitializeSAWs(NULL); 419 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing SAWs\n");return;} 420 #endif 421 #if defined(PETSC_USE_LOG) 422 *ierr = PetscLogInitialize(); 423 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging\n");return;} 424 #endif 425 *ierr = PetscInitialize_DynamicLibraries(); 426 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries\n");return;} 427 428 *ierr = PetscInitializeFortran(); 429 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block\n");return;} 430 431 *ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size); 432 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()\n");return;} 433 *ierr = PetscInfo1(0,"(Fortran):PETSc successfully started: procs %d\n",size); 434 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;} 435 *ierr = PetscGetHostName(hostname,64); 436 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname\n");return;} 437 *ierr = PetscInfo1(0,"Running on machine: %s\n",hostname); 438 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;} 439 *ierr = PetscOptionsCheckInitial_Components(); 440 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;} 441 442 #if defined(PETSC_USE_DEBUG) && !defined(PETSC_HAVE_THREADSAFETY) 443 *ierr = PetscStackCreate(); 444 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscStackCreate()\n");return;} 445 #endif 446 447 #if defined(PETSC_SERIALIZE_FUNCTIONS) 448 *ierr = PetscFPTCreate(10000); 449 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscFPTCreate()\n");return;} 450 #endif 451 452 #if defined(PETSC_HAVE_CUDA) 453 flg2 = PETSC_TRUE; 454 *ierr = PetscOptionsGetBool(NULL,"-cublas",&flg2,NULL); 455 if (flg2) cublasInit(); 456 #endif 457 } 458 459 PETSC_EXTERN void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr) 460 { 461 #if defined(PETSC_HAVE_SUNMATHPRO) 462 extern void standard_arithmetic(); 463 standard_arithmetic(); 464 #endif 465 /* was malloced with PetscMallocAlign() so free the same way */ 466 *ierr = PetscFreeAlign(PetscGlobalArgs,0,0,0);if (*ierr) {(*PetscErrorPrintf)("PetscFinalize:Freeing args\n");return;} 467 468 *ierr = PetscFinalize(); 469 } 470 471 void PETSC_STDCALL petscend_(PetscErrorCode *ierr) 472 { 473 #if defined(PETSC_HAVE_SUNMATHPRO) 474 extern void standard_arithmetic(); 475 standard_arithmetic(); 476 #endif 477 478 *ierr = PetscEnd(); 479 } 480 481