1 /* 2 This file contains Fortran stubs for PetscInitialize and Finalize. 3 */ 4 5 /* 6 This is to prevent the Cray T3D version of MPI (University of Edinburgh) 7 from stupidly redefining MPI_INIT(). They put this in to detect errors 8 in C code,but here I do want to be calling the Fortran version from a 9 C subroutine. 10 */ 11 #define T3DMPI_FORTRAN 12 #define T3EMPI_FORTRAN 13 14 #define PETSC_DESIRE_COMPLEX 15 #include <petsc-private/fortranimpl.h> 16 17 #if defined(PETSC_HAVE_CUSP) 18 #include <cublas.h> 19 #endif 20 #include <petscthreadcomm.h> 21 22 extern PetscBool PetscHMPIWorker; 23 24 #if defined(PETSC_HAVE_FORTRAN_CAPS) 25 #define petscinitialize_ PETSCINITIALIZE 26 #define petscfinalize_ PETSCFINALIZE 27 #define petscend_ PETSCEND 28 #define iargc_ IARGC 29 #define getarg_ GETARG 30 #define mpi_init_ MPI_INIT 31 #define petscgetcommoncomm_ PETSCGETCOMMONCOMM 32 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 33 #define petscinitialize_ petscinitialize 34 #define petscfinalize_ petscfinalize 35 #define petscend_ petscend 36 #define mpi_init_ mpi_init 37 #define iargc_ iargc 38 #define getarg_ getarg 39 #define petscgetcommoncomm_ petscgetcommoncomm 40 #endif 41 42 #if defined(PETSC_HAVE_NAGF90) 43 #undef iargc_ 44 #undef getarg_ 45 #define iargc_ f90_unix_MP_iargc 46 #define getarg_ f90_unix_MP_getarg 47 #endif 48 #if defined(PETSC_USE_NARGS) /* Digital Fortran */ 49 #undef iargc_ 50 #undef getarg_ 51 #define iargc_ NARGS 52 #define getarg_ GETARG 53 #elif defined(PETSC_HAVE_PXFGETARG_NEW) /* cray x1 */ 54 #undef iargc_ 55 #undef getarg_ 56 #define iargc_ ipxfargc_ 57 #define getarg_ pxfgetarg_ 58 #endif 59 #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */ 60 #undef iargc_ 61 #undef getarg_ 62 #define iargc_ iargc_ 63 #define getarg_ getarg_ 64 #endif 65 #if defined(PETSC_HAVE_GFORTRAN_IARGC) /* gfortran from gcc4 */ 66 #undef iargc_ 67 #undef getarg_ 68 #define iargc_ _gfortran_iargc 69 #define getarg_ _gfortran_getarg_i4 70 #elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */ 71 #undef iargc_ 72 #undef getarg_ 73 #define iargc iargc_ 74 #define getarg getarg_ 75 #endif 76 77 /* 78 The extra _ is because the f2c compiler puts an 79 extra _ at the end if the original routine name 80 contained any _. 81 */ 82 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE) 83 #undef mpi_init_ 84 #define mpi_init_ mpi_init__ 85 #endif 86 87 PETSC_EXTERN void PETSC_STDCALL mpi_init_(int*); 88 PETSC_EXTERN void PETSC_STDCALL petscgetcommoncomm_(PetscMPIInt*); 89 90 /* 91 Different Fortran compilers handle command lines in different ways 92 */ 93 #if defined(PETSC_USE_NARGS) 94 PETSC_EXTERN short __stdcall NARGS(); 95 PETSC_EXTERN void __stdcall GETARG(short*,char*,int,short *); 96 97 #elif defined(PETSC_HAVE_FORTRAN_STDCALL) 98 PETSC_EXTERN int PETSC_STDCALL IARGC(); 99 PETSC_EXTERN void PETSC_STDCALL GETARG(int *,char *,int); 100 101 #elif defined(PETSC_HAVE_PXFGETARG_NEW) 102 PETSC_EXTERN int iargc_(); 103 PETSC_EXTERN void getarg_(int*,char*,int*,int*,int); 104 105 #else 106 PETSC_EXTERN int iargc_(); 107 PETSC_EXTERN void getarg_(int*,char*,int); 108 /* 109 The Cray T3D/T3E use the PXFGETARG() function 110 */ 111 #if defined(PETSC_HAVE_PXFGETARG) 112 PETSC_EXTERN void PXFGETARG(int*,_fcd,int*,int*); 113 #endif 114 #endif 115 116 #if (defined(PETSC_HAVE_COMPLEX) && !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)) || defined(PETSC_USE_REAL___FLOAT128) 117 extern MPI_Op MPIU_SUM; 118 119 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 120 121 #endif 122 #if defined(PETSC_USE_REAL___FLOAT128) 123 124 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 125 PETSC_EXTERN void MPIAPI PetscMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 126 PETSC_EXTERN void MPIAPI PetscMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 127 #endif 128 129 extern MPI_Op PetscMaxSum_Op; 130 131 PETSC_EXTERN void MPIAPI PetscMaxSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 132 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelCounter(MPI_Comm,PetscMPIInt,void*,void*); 133 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Inner(MPI_Comm,PetscMPIInt,void*,void*); 134 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Outer(MPI_Comm,PetscMPIInt,void*,void*); 135 136 extern PetscErrorCode PetscOptionsCheckInitial_Private(void); 137 extern PetscErrorCode PetscOptionsCheckInitial_Components(void); 138 extern PetscErrorCode PetscInitialize_DynamicLibraries(void); 139 #if defined(PETSC_USE_LOG) 140 extern PetscErrorCode PetscLogBegin_Private(void); 141 #endif 142 extern PetscErrorCode PetscMallocAlign(size_t,int,const char[],const char[],const char[],void**); 143 extern PetscErrorCode PetscFreeAlign(void*,int,const char[],const char[],const char[]); 144 extern int PetscGlobalArgc; 145 extern char **PetscGlobalArgs; 146 147 /* 148 Reads in Fortran command line argments and sends them to 149 all processors and adds them to Options database. 150 */ 151 152 PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv) 153 { 154 #if defined(PETSC_USE_NARGS) 155 short i,flg; 156 #else 157 int i; 158 #endif 159 PetscErrorCode ierr; 160 int warg = 256; 161 PetscMPIInt rank; 162 char *p; 163 164 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr); 165 if (!rank) { 166 #if defined(PETSC_HAVE_IARG_COUNT_PROGNAME) 167 *argc = iargc_(); 168 #else 169 /* most compilers do not count the program name for argv[0] */ 170 *argc = 1 + iargc_(); 171 #endif 172 } 173 ierr = MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);CHKERRQ(ierr); 174 175 /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */ 176 ierr = PetscMallocAlign((*argc+1)*(warg*sizeof(char)+sizeof(char*)),0,0,0,0,(void**)argv);CHKERRQ(ierr); 177 (*argv)[0] = (char*)(*argv + *argc + 1); 178 179 if (!rank) { 180 ierr = PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));CHKERRQ(ierr); 181 for (i=0; i<*argc; i++) { 182 (*argv)[i+1] = (*argv)[i] + warg; 183 #if defined(PETSC_HAVE_PXFGETARG_NEW) 184 {char *tmp = (*argv)[i]; 185 int ilen; 186 getarg_(&i,tmp,&ilen,&ierr,warg);CHKERRQ(ierr); 187 tmp[ilen] = 0;} 188 #elif defined(PETSC_USE_NARGS) 189 GETARG(&i,(*argv)[i],warg,&flg); 190 #else 191 /* 192 Because the stupid #defines above define all kinds of things to getarg_ we cannot do this test 193 #elif defined(PETSC_HAVE_GETARG) 194 getarg_(&i,(*argv)[i],warg); 195 #else 196 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot get Fortran command line arguments"); 197 */ 198 getarg_(&i,(*argv)[i],warg); 199 #endif 200 /* zero out garbage at end of each argument */ 201 p = (*argv)[i] + warg-1; 202 while (p > (*argv)[i]) { 203 if (*p == ' ') *p = 0; 204 p--; 205 } 206 } 207 } 208 ierr = MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);CHKERRQ(ierr); 209 if (rank) { 210 for (i=0; i<*argc; i++) (*argv)[i+1] = (*argv)[i] + warg; 211 } 212 return 0; 213 } 214 215 /* -----------------------------------------------------------------------------------------------*/ 216 217 extern MPI_Op PetscADMax_Op; 218 extern MPI_Op PetscADMin_Op; 219 PETSC_EXTERN void MPIAPI PetscADMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 220 PETSC_EXTERN void MPIAPI PetscADMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 221 222 223 /* 224 petscinitialize - Version called from Fortran. 225 226 Notes: 227 Since this is called from Fortran it does not return error codes 228 229 */ 230 PETSC_EXTERN void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len)) 231 { 232 #if defined (PETSC_USE_NARGS) 233 short flg,i; 234 #else 235 int i; 236 #if !defined(PETSC_HAVE_PXFGETARG_NEW) && !defined (PETSC_HAVE_PXFGETARG_NEW) 237 int j; 238 #endif 239 #endif 240 int flag; 241 PetscMPIInt size; 242 char *t1,name[256],hostname[64]; 243 PetscMPIInt f_petsc_comm_world; 244 PetscInt nodesize; 245 PetscBool flg; 246 247 *ierr = PetscMemzero(name,256); if (*ierr) return; 248 if (PetscInitializeCalled) {*ierr = 0; return;} 249 250 /* this must be initialized in a routine, not as a constant declaration*/ 251 PETSC_STDOUT = stdout; 252 PETSC_STDERR = stderr; 253 254 *ierr = PetscOptionsCreate(); 255 if (*ierr) return; 256 i = 0; 257 #if defined (PETSC_HAVE_PXFGETARG_NEW) 258 { int ilen,sierr; 259 getarg_(&i,name,&ilen,&sierr,256); 260 if (sierr) PetscStrncpy(name,"Unknown Name",256); 261 else name[ilen] = 0; 262 } 263 #elif defined(PETSC_USE_NARGS) 264 GETARG(&i,name,256,&flg); 265 #else 266 getarg_(&i,name,256); 267 /* Eliminate spaces at the end of the string */ 268 for (j=254; j>=0; j--) { 269 if (name[j] != ' ') { 270 name[j+1] = 0; 271 break; 272 } 273 } 274 if (j<0) PetscStrncpy(name,"Unknown Name",256); 275 #endif 276 *ierr = PetscSetProgramName(name); 277 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()\n");return;} 278 279 /* check if PETSC_COMM_WORLD is initialized by the user in fortran */ 280 petscgetcommoncomm_(&f_petsc_comm_world); 281 MPI_Initialized(&flag); 282 if (!flag) { 283 PetscMPIInt mierr; 284 285 if (f_petsc_comm_world) {(*PetscErrorPrintf)("You cannot set PETSC_COMM_WORLD if you have not initialized MPI first\n");return;} 286 /* MPI requires calling Fortran mpi_init() if main program is Fortran */ 287 #if defined(PETSC_HAVE_MPIUNI) && defined(MPIUNI_AVOID_MPI_NAMESPACE) 288 mierr = MPI_Init((int*)0, (char***)0); 289 #else 290 mpi_init_(&mierr); 291 #endif 292 if (mierr) { 293 *ierr = mierr; 294 (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()\n"); 295 return; 296 } 297 PetscBeganMPI = PETSC_TRUE; 298 } 299 if (f_petsc_comm_world) PETSC_COMM_WORLD = MPI_Comm_f2c(*(MPI_Fint*)&f_petsc_comm_world); /* User called MPI_INITIALIZE() and changed PETSC_COMM_WORLD */ 300 else PETSC_COMM_WORLD = MPI_COMM_WORLD; 301 PetscInitializeCalled = PETSC_TRUE; 302 303 *ierr = PetscErrorPrintfInitialize(); 304 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()\n");return;} 305 *ierr = MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank); 306 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank\n");return;} 307 *ierr = MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize); 308 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize\n");return;} 309 310 MPIU_BOOL = MPI_INT; 311 MPIU_ENUM = MPI_INT; 312 313 #if defined(PETSC_HAVE_COMPLEX) 314 /* 315 Initialized the global variable; this is because with 316 shared libraries the constructors for global variables 317 are not called; at least on IRIX. 318 */ 319 { 320 #if defined(PETSC_CLANGUAGE_CXX) 321 PetscComplex ic(0.0,1.0); 322 PETSC_i = ic; 323 #else 324 PETSC_i = _Complex_I; 325 #endif 326 } 327 328 #if !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX) 329 *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU_C_DOUBLE_COMPLEX); 330 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 331 *ierr = MPI_Type_commit(&MPIU_C_DOUBLE_COMPLEX); 332 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 333 *ierr = MPI_Type_contiguous(2,MPI_FLOAT,&MPIU_C_COMPLEX); 334 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 335 *ierr = MPI_Type_commit(&MPIU_C_COMPLEX); 336 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 337 *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM); 338 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 339 #endif 340 341 #endif 342 343 #if defined(PETSC_USE_REAL___FLOAT128) 344 *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___FLOAT128); 345 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 346 *ierr = MPI_Type_commit(&MPIU___FLOAT128); 347 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 348 #if defined(PETSC_HAVE_COMPLEX) 349 *ierr = MPI_Type_contiguous(4,MPI_DOUBLE,&MPIU___COMPLEX128); 350 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 351 *ierr = MPI_Type_commit(&MPIU___COMPLEX128); 352 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 353 #endif 354 *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM); 355 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 356 *ierr = MPI_Op_create(PetscMax_Local,1,&MPIU_MAX); 357 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 358 *ierr = MPI_Op_create(PetscMin_Local,1,&MPIU_MIN); 359 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 360 #endif 361 362 /* 363 Create the PETSc MPI reduction operator that sums of the first 364 half of the entries and maxes the second half. 365 */ 366 *ierr = MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op); 367 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 368 369 *ierr = MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR); 370 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 371 *ierr = MPI_Type_commit(&MPIU_2SCALAR); 372 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 373 #if defined(PETSC_USE_64BIT_INDICES) || !defined(MPI_2INT) 374 *ierr = MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT); 375 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 376 *ierr = MPI_Type_commit(&MPIU_2INT); 377 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 378 #endif 379 *ierr = MPI_Op_create(PetscADMax_Local,1,&PetscADMax_Op); 380 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 381 *ierr = MPI_Op_create(PetscADMin_Local,1,&PetscADMin_Op); 382 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 383 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelCounter,&Petsc_Counter_keyval,(void*)0); 384 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 385 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Outer,&Petsc_InnerComm_keyval,(void*)0); 386 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 387 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Inner,&Petsc_OuterComm_keyval,(void*)0); 388 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 389 390 /* 391 PetscInitializeFortran() is called twice. Here it initializes 392 PETSC_NULL_CHARACTER_Fortran. Below it initializes the PETSC_VIEWERs. 393 The PETSC_VIEWERs have not been created yet, so they must be initialized 394 below. 395 */ 396 PetscInitializeFortran(); 397 PETScParseFortranArgs_Private(&PetscGlobalArgc,&PetscGlobalArgs); 398 FIXCHAR(filename,len,t1); 399 *ierr = PetscOptionsInsert(&PetscGlobalArgc,&PetscGlobalArgs,t1); 400 FREECHAR(filename,t1); 401 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database\n");return;} 402 *ierr = PetscOptionsCheckInitial_Private(); 403 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;} 404 #if defined(PETSC_USE_LOG) 405 *ierr = PetscLogBegin_Private(); 406 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging\n");return;} 407 #endif 408 *ierr = PetscInitialize_DynamicLibraries(); 409 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries\n");return;} 410 411 *ierr = PetscInitializeFortran(); 412 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block\n");return;} 413 414 *ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size); 415 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()\n");return;} 416 *ierr = PetscInfo1(0,"(Fortran):PETSc successfully started: procs %d\n",size); 417 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;} 418 *ierr = PetscGetHostName(hostname,64); 419 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname\n");return;} 420 *ierr = PetscInfo1(0,"Running on machine: %s\n",hostname); 421 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;} 422 *ierr = PetscOptionsCheckInitial_Components(); 423 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;} 424 425 *ierr = PetscThreadCommInitializePackage(); 426 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Calling PetscThreadCommInitialize()\n");return;} 427 428 #if defined(PETSC_USE_DEBUG) 429 PetscThreadLocalRegister((PetscThreadKey*)&petscstack); /* Creates petscstack_key if needed */ 430 *ierr = PetscStackCreate(); 431 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscStackCreate()\n");return;} 432 #endif 433 434 *ierr = PetscCitationsInitialize(); 435 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscCitationsInitialize()\n");return;} 436 437 *ierr = PetscOptionsGetInt(NULL,"-hmpi_spawn_size",&nodesize,&flg); 438 if (flg) { 439 #if defined(PETSC_HAVE_MPI_COMM_SPAWN) 440 *ierr = PetscHMPISpawn((PetscMPIInt) nodesize); /* worker nodes never return from here; they go directly to PetscEnd() */ 441 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIS-pawn()\n");return;} 442 #else 443 *ierr = PETSC_ERR_SUP; 444 (*PetscErrorPrintf)("PetscInitialize: PETSc built without MPI 2 (MPI_Comm_spawn) support, use -hmpi_merge_size instead"); 445 return; 446 #endif 447 } else { 448 *ierr = PetscOptionsGetInt(NULL,"-hmpi_merge_size",&nodesize,&flg); 449 if (flg) { 450 *ierr = PetscHMPIMerge((PetscMPIInt) nodesize,NULL,NULL); 451 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscHMPIMerge()\n");return;} 452 if (PetscHMPIWorker) { /* if worker then never enter user code */ 453 PetscInitializeCalled = PETSC_TRUE; 454 *ierr = PetscEnd(); 455 } 456 } 457 } 458 459 #if defined(PETSC_HAVE_CUDA) 460 cublasInit(); 461 #endif 462 } 463 464 PETSC_EXTERN void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr) 465 { 466 #if defined(PETSC_HAVE_SUNMATHPRO) 467 extern void standard_arithmetic(); 468 standard_arithmetic(); 469 #endif 470 /* was malloced with PetscMallocAlign() so free the same way */ 471 *ierr = PetscFreeAlign(PetscGlobalArgs,0,0,0,0);if (*ierr) {(*PetscErrorPrintf)("PetscFinalize:Freeing args\n");return;} 472 473 *ierr = PetscFinalize(); 474 } 475 476 void PETSC_STDCALL petscend_(PetscErrorCode *ierr) 477 { 478 #if defined(PETSC_HAVE_SUNMATHPRO) 479 extern void standard_arithmetic(); 480 standard_arithmetic(); 481 #endif 482 483 *ierr = PetscEnd(); 484 } 485 486