1 /* 2 This file contains Fortran stubs for PetscInitialize and Finalize. 3 */ 4 5 /* 6 This is to prevent the Cray T3D version of MPI (University of Edinburgh) 7 from stupidly redefining MPI_INIT(). They put this in to detect errors 8 in C code,but here I do want to be calling the Fortran version from a 9 C subroutine. 10 */ 11 #define T3DMPI_FORTRAN 12 #define T3EMPI_FORTRAN 13 14 #include <petsc/private/fortranimpl.h> 15 16 #if defined(PETSC_HAVE_CUDA) 17 #include <cublas_v2.h> 18 #endif 19 20 #if defined(PETSC_HAVE_FORTRAN_CAPS) 21 #define petscinitialize_ PETSCINITIALIZE 22 #define petscfinalize_ PETSCFINALIZE 23 #define petscend_ PETSCEND 24 #define iargc_ IARGC 25 #define getarg_ GETARG 26 #define mpi_init_ MPI_INIT 27 #define petscgetcommoncomm_ PETSCGETCOMMONCOMM 28 #define petsccommandargumentcount_ PETSCCOMMANDARGUMENTCOUNT 29 #define petscgetcommandargument_ PETSCGETCOMMANDARGUMENT 30 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 31 #define petscinitialize_ petscinitialize 32 #define petscfinalize_ petscfinalize 33 #define petscend_ petscend 34 #define mpi_init_ mpi_init 35 #define iargc_ iargc 36 #define getarg_ getarg 37 #define petscgetcommoncomm_ petscgetcommoncomm 38 #define petsccommandargumentcount_ petsccommandargumentcount 39 #define petscgetcommandargument_ petscgetcommandargument 40 #endif 41 42 #if defined(PETSC_HAVE_NAGF90) 43 #undef iargc_ 44 #undef getarg_ 45 #define iargc_ f90_unix_MP_iargc 46 #define getarg_ f90_unix_MP_getarg 47 #endif 48 #if defined(PETSC_USE_NARGS) /* Digital Fortran */ 49 #undef iargc_ 50 #undef getarg_ 51 #define iargc_ NARGS 52 #define getarg_ GETARG 53 #elif defined(PETSC_HAVE_PXFGETARG_NEW) /* cray x1 */ 54 #undef iargc_ 55 #undef getarg_ 56 #define iargc_ ipxfargc_ 57 #define getarg_ pxfgetarg_ 58 #endif 59 #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */ 60 #undef iargc_ 61 #undef getarg_ 62 #define iargc_ iargc_ 63 #define getarg_ getarg_ 64 #endif 65 66 #if defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* Fortran 2003 */ 67 #undef iargc_ 68 #undef getarg_ 69 #define iargc_ petsccommandargumentcount_ 70 #define getarg_ petscgetcommandargument_ 71 #elif defined(PETSC_HAVE_GFORTRAN_IARGC) /* gfortran from gcc4 */ 72 #undef iargc_ 73 #undef getarg_ 74 #define iargc_ _gfortran_iargc 75 #define getarg_ _gfortran_getarg_i4 76 #elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */ 77 #undef iargc_ 78 #undef getarg_ 79 #define iargc iargc_ 80 #define getarg getarg_ 81 #endif 82 83 /* 84 The extra _ is because the f2c compiler puts an 85 extra _ at the end if the original routine name 86 contained any _. 87 */ 88 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE) 89 #undef mpi_init_ 90 #define mpi_init_ mpi_init__ 91 #endif 92 93 PETSC_EXTERN void PETSC_STDCALL mpi_init_(int*); 94 PETSC_EXTERN void PETSC_STDCALL petscgetcommoncomm_(PetscMPIInt*); 95 96 /* 97 Different Fortran compilers handle command lines in different ways 98 */ 99 #if defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* Fortran 2003 - same as 'else' case */ 100 PETSC_EXTERN int iargc_(); 101 PETSC_EXTERN void getarg_(int*,char*,int); 102 #elif defined(PETSC_USE_NARGS) 103 PETSC_EXTERN short __stdcall NARGS(); 104 PETSC_EXTERN void __stdcall GETARG(short*,char*,int,short *); 105 106 #elif defined(PETSC_HAVE_FORTRAN_STDCALL) 107 PETSC_EXTERN int PETSC_STDCALL IARGC(); 108 PETSC_EXTERN void PETSC_STDCALL GETARG(int *,char *,int); 109 110 #elif defined(PETSC_HAVE_PXFGETARG_NEW) 111 PETSC_EXTERN int iargc_(); 112 PETSC_EXTERN void getarg_(int*,char*,int*,int*,int); 113 114 #else 115 PETSC_EXTERN int iargc_(); 116 PETSC_EXTERN void getarg_(int*,char*,int); 117 /* 118 The Cray T3D/T3E use the PXFGETARG() function 119 */ 120 #if defined(PETSC_HAVE_PXFGETARG) 121 PETSC_EXTERN void PXFGETARG(int*,_fcd,int*,int*); 122 #endif 123 #endif 124 125 #if (defined(PETSC_HAVE_COMPLEX) && !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX)) || defined(PETSC_USE_REAL___FLOAT128) 126 extern MPI_Op MPIU_SUM; 127 128 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 129 130 #endif 131 #if defined(PETSC_USE_REAL___FLOAT128) 132 133 PETSC_EXTERN void MPIAPI PetscSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 134 PETSC_EXTERN void MPIAPI PetscMax_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 135 PETSC_EXTERN void MPIAPI PetscMin_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 136 #endif 137 138 extern MPI_Op PetscMaxSum_Op; 139 140 PETSC_EXTERN void MPIAPI PetscMaxSum_Local(void*,void*,PetscMPIInt*,MPI_Datatype*); 141 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelCounter(MPI_Comm,PetscMPIInt,void*,void*); 142 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Inner(MPI_Comm,PetscMPIInt,void*,void*); 143 PETSC_EXTERN PetscMPIInt MPIAPI Petsc_DelComm_Outer(MPI_Comm,PetscMPIInt,void*,void*); 144 145 extern PetscErrorCode PetscOptionsCheckInitial_Private(void); 146 extern PetscErrorCode PetscOptionsCheckInitial_Components(void); 147 extern PetscErrorCode PetscInitialize_DynamicLibraries(void); 148 #if defined(PETSC_USE_LOG) 149 extern PetscErrorCode PetscLogInitialize(void); 150 #endif 151 extern PetscErrorCode PetscMallocAlign(size_t,int,const char[],const char[],void**); 152 extern PetscErrorCode PetscFreeAlign(void*,int,const char[],const char[]); 153 extern int PetscGlobalArgc; 154 extern char **PetscGlobalArgs; 155 156 /* 157 Reads in Fortran command line argments and sends them to 158 all processors and adds them to Options database. 159 */ 160 161 PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv) 162 { 163 #if defined(PETSC_USE_NARGS) 164 short i,flg; 165 #else 166 int i; 167 #endif 168 PetscErrorCode ierr; 169 int warg = 256; 170 PetscMPIInt rank; 171 char *p; 172 173 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr); 174 if (!rank) { 175 #if defined(PETSC_HAVE_IARG_COUNT_PROGNAME) 176 *argc = iargc_(); 177 #else 178 /* most compilers do not count the program name for argv[0] */ 179 *argc = 1 + iargc_(); 180 #endif 181 } 182 ierr = MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);CHKERRQ(ierr); 183 184 /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */ 185 ierr = PetscMallocAlign((*argc+1)*(warg*sizeof(char)+sizeof(char*)),0,0,0,(void**)argv);CHKERRQ(ierr); 186 (*argv)[0] = (char*)(*argv + *argc + 1); 187 188 if (!rank) { 189 ierr = PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));CHKERRQ(ierr); 190 for (i=0; i<*argc; i++) { 191 (*argv)[i+1] = (*argv)[i] + warg; 192 #if defined (PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* same as 'else' case */ 193 getarg_(&i,(*argv)[i],warg); 194 #elif defined(PETSC_HAVE_PXFGETARG_NEW) 195 {char *tmp = (*argv)[i]; 196 int ilen; 197 getarg_(&i,tmp,&ilen,&ierr,warg);CHKERRQ(ierr); 198 tmp[ilen] = 0;} 199 #elif defined(PETSC_USE_NARGS) 200 GETARG(&i,(*argv)[i],warg,&flg); 201 #else 202 /* 203 Because the stupid #defines above define all kinds of things to getarg_ we cannot do this test 204 #elif defined(PETSC_HAVE_GETARG) 205 getarg_(&i,(*argv)[i],warg); 206 #else 207 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot get Fortran command line arguments"); 208 */ 209 getarg_(&i,(*argv)[i],warg); 210 #endif 211 /* zero out garbage at end of each argument */ 212 p = (*argv)[i] + warg-1; 213 while (p > (*argv)[i]) { 214 if (*p == ' ') *p = 0; 215 p--; 216 } 217 } 218 } 219 ierr = MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);CHKERRQ(ierr); 220 if (rank) { 221 for (i=0; i<*argc; i++) (*argv)[i+1] = (*argv)[i] + warg; 222 } 223 return 0; 224 } 225 226 #if defined(PETSC_SERIALIZE_FUNCTIONS) 227 extern PetscFPT PetscFPTData; 228 #endif 229 230 #if defined(PETSC_HAVE_THREADSAFETY) 231 extern PetscSpinlock PetscViewerASCIISpinLockOpen; 232 extern PetscSpinlock PetscViewerASCIISpinLockStdout; 233 extern PetscSpinlock PetscViewerASCIISpinLockStderr; 234 extern PetscSpinlock PetscCommSpinLock; 235 #endif 236 237 /* -----------------------------------------------------------------------------------------------*/ 238 239 #if defined(PETSC_HAVE_SAWS) 240 #include <petscviewersaws.h> 241 extern PetscErrorCode PetscInitializeSAWs(const char[]); 242 #endif 243 244 /* 245 petscinitialize - Version called from Fortran. 246 247 Notes: 248 Since this is called from Fortran it does not return error codes 249 250 */ 251 PETSC_EXTERN void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len)) 252 { 253 #if defined (PETSC_USE_NARGS) 254 short flg,i; 255 #else 256 int i; 257 #if !defined(PETSC_HAVE_PXFGETARG_NEW) && !defined (PETSC_HAVE_PXFGETARG_NEW) && !defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) 258 int j; 259 #endif 260 #endif 261 #if defined(PETSC_HAVE_CUDA) 262 PetscBool flg2; 263 cublasStatus_t cberr; 264 #endif 265 int flag; 266 PetscMPIInt size; 267 char *t1,name[256],hostname[64]; 268 PetscMPIInt f_petsc_comm_world; 269 270 *ierr = PetscMemzero(name,256); if (*ierr) return; 271 if (PetscInitializeCalled) {*ierr = 0; return;} 272 273 /* this must be initialized in a routine, not as a constant declaration*/ 274 PETSC_STDOUT = stdout; 275 PETSC_STDERR = stderr; 276 277 /* on Windows - set printf to default to printing 2 digit exponents */ 278 #if defined(PETSC_HAVE__SET_OUTPUT_FORMAT) 279 _set_output_format(_TWO_DIGIT_EXPONENT); 280 #endif 281 282 *ierr = PetscOptionsCreateDefault(); 283 if (*ierr) return; 284 i = 0; 285 #if defined (PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* same as 'else' case */ 286 getarg_(&i,name,256); 287 #elif defined (PETSC_HAVE_PXFGETARG_NEW) 288 { int ilen,sierr; 289 getarg_(&i,name,&ilen,&sierr,256); 290 if (sierr) PetscStrncpy(name,"Unknown Name",256); 291 else name[ilen] = 0; 292 } 293 #elif defined(PETSC_USE_NARGS) 294 GETARG(&i,name,256,&flg); 295 #else 296 getarg_(&i,name,256); 297 /* Eliminate spaces at the end of the string */ 298 for (j=254; j>=0; j--) { 299 if (name[j] != ' ') { 300 name[j+1] = 0; 301 break; 302 } 303 } 304 if (j<0) PetscStrncpy(name,"Unknown Name",256); 305 #endif 306 *ierr = PetscSetProgramName(name); 307 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()\n");return;} 308 309 /* check if PETSC_COMM_WORLD is initialized by the user in fortran */ 310 petscgetcommoncomm_(&f_petsc_comm_world); 311 MPI_Initialized(&flag); 312 if (!flag) { 313 PetscMPIInt mierr; 314 315 if (f_petsc_comm_world) {(*PetscErrorPrintf)("You cannot set PETSC_COMM_WORLD if you have not initialized MPI first\n");return;} 316 /* MPI requires calling Fortran mpi_init() if main program is Fortran */ 317 #if defined(PETSC_HAVE_MPIUNI) && defined(MPIUNI_AVOID_MPI_NAMESPACE) 318 mierr = MPI_Init((int*)0, (char***)0); 319 #else 320 mpi_init_(&mierr); 321 #endif 322 if (mierr) { 323 *ierr = mierr; 324 (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()\n"); 325 return; 326 } 327 PetscBeganMPI = PETSC_TRUE; 328 } 329 if (f_petsc_comm_world) PETSC_COMM_WORLD = MPI_Comm_f2c(*(MPI_Fint*)&f_petsc_comm_world); /* User called MPI_INITIALIZE() and changed PETSC_COMM_WORLD */ 330 else PETSC_COMM_WORLD = MPI_COMM_WORLD; 331 PetscInitializeCalled = PETSC_TRUE; 332 333 *ierr = PetscSpinlockCreate(&PetscViewerASCIISpinLockOpen); 334 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;} 335 *ierr = PetscSpinlockCreate(&PetscViewerASCIISpinLockStdout); 336 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;} 337 *ierr = PetscSpinlockCreate(&PetscViewerASCIISpinLockStderr); 338 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;} 339 *ierr = PetscSpinlockCreate(&PetscCommSpinLock); 340 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Creating global spin lock\n");return;} 341 342 *ierr = PetscErrorPrintfInitialize(); 343 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()\n");return;} 344 *ierr = MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank); 345 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank\n");return;} 346 *ierr = MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize); 347 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize\n");return;} 348 349 MPIU_BOOL = MPI_INT; 350 MPIU_ENUM = MPI_INT; 351 352 #if defined(PETSC_HAVE_COMPLEX) 353 /* 354 Initialized the global variable; this is because with 355 shared libraries the constructors for global variables 356 are not called; at least on IRIX. 357 */ 358 { 359 #if defined(PETSC_CLANGUAGE_CXX) 360 PetscComplex ic(0.0,1.0); 361 PETSC_i = ic; 362 #else 363 PETSC_i = _Complex_I; 364 #endif 365 } 366 367 #if !defined(PETSC_HAVE_MPI_C_DOUBLE_COMPLEX) 368 *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU_C_DOUBLE_COMPLEX); 369 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 370 *ierr = MPI_Type_commit(&MPIU_C_DOUBLE_COMPLEX); 371 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 372 *ierr = MPI_Type_contiguous(2,MPI_FLOAT,&MPIU_C_COMPLEX); 373 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 374 *ierr = MPI_Type_commit(&MPIU_C_COMPLEX); 375 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 376 *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM); 377 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 378 #endif 379 380 #endif 381 382 #if defined(PETSC_USE_REAL___FLOAT128) 383 *ierr = MPI_Type_contiguous(2,MPI_DOUBLE,&MPIU___FLOAT128); 384 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 385 *ierr = MPI_Type_commit(&MPIU___FLOAT128); 386 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 387 #if defined(PETSC_HAVE_COMPLEX) 388 *ierr = MPI_Type_contiguous(4,MPI_DOUBLE,&MPIU___COMPLEX128); 389 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 390 *ierr = MPI_Type_commit(&MPIU___COMPLEX128); 391 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 392 #endif 393 *ierr = MPI_Op_create(PetscSum_Local,1,&MPIU_SUM); 394 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 395 *ierr = MPI_Op_create(PetscMax_Local,1,&MPIU_MAX); 396 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 397 *ierr = MPI_Op_create(PetscMin_Local,1,&MPIU_MIN); 398 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 399 #endif 400 401 /* 402 Create the PETSc MPI reduction operator that sums of the first 403 half of the entries and maxes the second half. 404 */ 405 *ierr = MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op); 406 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops\n");return;} 407 408 *ierr = MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR); 409 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 410 *ierr = MPI_Type_commit(&MPIU_2SCALAR); 411 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 412 #if defined(PETSC_USE_64BIT_INDICES) || !defined(MPI_2INT) 413 *ierr = MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT); 414 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 415 *ierr = MPI_Type_commit(&MPIU_2INT); 416 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types\n");return;} 417 #endif 418 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelCounter,&Petsc_Counter_keyval,(void*)0); 419 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 420 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Outer,&Petsc_InnerComm_keyval,(void*)0); 421 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 422 *ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DelComm_Inner,&Petsc_OuterComm_keyval,(void*)0); 423 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI keyvals\n");return;} 424 425 /* 426 PetscInitializeFortran() is called twice. Here it initializes 427 PETSC_NULL_CHARACTER_Fortran. Below it initializes the PETSC_VIEWERs. 428 The PETSC_VIEWERs have not been created yet, so they must be initialized 429 below. 430 */ 431 PetscInitializeFortran(); 432 PETScParseFortranArgs_Private(&PetscGlobalArgc,&PetscGlobalArgs); 433 FIXCHAR(filename,len,t1); 434 *ierr = PetscOptionsInsert(NULL,&PetscGlobalArgc,&PetscGlobalArgs,t1); 435 FREECHAR(filename,t1); 436 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database\n");return;} 437 *ierr = PetscOptionsCheckInitial_Private(); 438 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;} 439 *ierr = PetscCitationsInitialize(); 440 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscCitationsInitialize()\n");return;} 441 #if defined(PETSC_HAVE_SAWS) 442 *ierr = PetscInitializeSAWs(NULL); 443 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing SAWs\n");return;} 444 #endif 445 #if defined(PETSC_USE_LOG) 446 *ierr = PetscLogInitialize(); 447 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging\n");return;} 448 #endif 449 *ierr = PetscInitialize_DynamicLibraries(); 450 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries\n");return;} 451 452 *ierr = PetscInitializeFortran(); 453 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block\n");return;} 454 455 *ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size); 456 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()\n");return;} 457 *ierr = PetscInfo1(0,"(Fortran):PETSc successfully started: procs %d\n",size); 458 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;} 459 *ierr = PetscGetHostName(hostname,64); 460 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname\n");return;} 461 *ierr = PetscInfo1(0,"Running on machine: %s\n",hostname); 462 if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Calling PetscInfo()\n");return;} 463 *ierr = PetscOptionsCheckInitial_Components(); 464 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options\n");return;} 465 466 #if defined(PETSC_USE_DEBUG) && !defined(PETSC_HAVE_THREADSAFETY) 467 *ierr = PetscStackCreate(); 468 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscStackCreate()\n");return;} 469 #endif 470 471 #if defined(PETSC_SERIALIZE_FUNCTIONS) 472 *ierr = PetscFPTCreate(10000); 473 if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:PetscFPTCreate()\n");return;} 474 #endif 475 476 #if defined(PETSC_HAVE_CUDA) 477 flg2 = PETSC_TRUE; 478 *ierr = PetscOptionsGetBool(NULL,NULL,"-cublas",&flg2,NULL); 479 if (flg2) { 480 cberr = cublasCreate(&cublasv2handle); 481 if (((int)cberr) != (int)CUBLAS_STATUS_SUCCESS) {(*PetscErrorPrintf)("PetscInitialize:CUBLAS error %d\n",cberr);return;} 482 } 483 #endif 484 } 485 486 PETSC_EXTERN void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr) 487 { 488 #if defined(PETSC_HAVE_SUNMATHPRO) 489 extern void standard_arithmetic(); 490 standard_arithmetic(); 491 #endif 492 /* was malloced with PetscMallocAlign() so free the same way */ 493 *ierr = PetscFreeAlign(PetscGlobalArgs,0,0,0);if (*ierr) {(*PetscErrorPrintf)("PetscFinalize:Freeing args\n");return;} 494 495 *ierr = PetscFinalize(); 496 } 497 498 PETSC_EXTERN void PETSC_STDCALL petscend_(PetscErrorCode *ierr) 499 { 500 #if defined(PETSC_HAVE_SUNMATHPRO) 501 extern void standard_arithmetic(); 502 standard_arithmetic(); 503 #endif 504 505 *ierr = PetscEnd(); 506 } 507 508