1''' 2 The Framework object serves as the central control for a configure run. It 3maintains a graph of all the configure modules involved, which is also used to 4track dependencies between them. It initiates the run, compiles the results, and 5handles the final output. It maintains the help list for all options available 6in the run. 7 8 The setup() method performs generic Script setup and then is called recursively 9on all the child modules. The cleanup() method performs the final output and 10logging actions 11 - Substitute files 12 - Output configure header 13 - Log filesystem actions 14 15 Children may be added to the Framework using addChild() or getChild(), but the 16far more frequent method is to use require(). Here a module is requested, as in 17getChild(), but it is also required to run before another module, usually the one 18executing the require(). This provides a simple local interface to establish 19dependencies between the child modules, and provides a partial order on the 20children to the Framework. 21 22 A backwards compatibility mode is provided for which the user specifies a 23configure header and set of files to experience substitution, mirroring the 24common usage of Autoconf. Slight improvements have been made in that all 25defines are now guarded, various prefixes are allowed for defines and 26substitutions, and C specific constructs such as function prototypes and 27typedefs are removed to a separate header. However, this is not the intended 28future usage. 29 30 The use of configure modules by other modules in the same run provides a model 31for the suggested interaction of a new build system with the Framework. If a 32module requires another, it merely executes a require(). For instance, the PETSc 33configure module for HYPRE requires information about MPI, and thus contains 34 35 self.mpi = self.framework.require("config.packages.MPI", self) 36 37Notice that passing self for the last arguments means that the MPI module will 38run before the HYPRE module. Furthermore, we save the resulting object as 39self.mpi so that we may interogate it later. HYPRE can initially test whether 40MPI was indeed found using self.mpi.found. When HYPRE requires the list of 41MPI libraries in order to link a test object, the module can use self.mpi.lib. 42''' 43from __future__ import print_function 44import script 45import config.base 46import time 47import tempfile 48import graph 49 50import os 51import re 52import sys 53import platform 54import pickle 55from hashlib import sha256 as checksum_algo 56 57class Framework(config.base.Configure, script.LanguageProcessor): 58 '''This needs to manage configure information in itself just as Builder manages it for configurations''' 59 def __init__(self, clArgs = None, argDB = None, loadArgDB = 1, tmpDir = None): 60 import nargs 61 62 if argDB is None: 63 import RDict 64 65 argDB = RDict.RDict(load = loadArgDB) 66 67 # Storage for intermediate test results 68 self.tmpDir = tmpDir 69 script.LanguageProcessor.__init__(self, clArgs, argDB) 70 config.base.Configure.__init__(self, self) 71 self.childGraph = graph.DirectedGraph() 72 self.substRE = re.compile(r'@(?P<name>[^@]+)@') 73 self.substFiles = {} 74 self.logName = 'configure.log' 75 self.header = 'matt_config.h' 76 self.makeMacroHeader = '' 77 self.makeRuleHeader = '' 78 self.cHeader = 'matt_fix.h' 79 self.enablepoison = False 80 self.poisonheader = 'matt_poison.h' 81 self.headerPrefix = '' 82 self.substPrefix = '' 83 self.pkgheader = '' 84 self.warningRE = re.compile('warning', re.I) 85 if not nargs.Arg.findArgument('debugSections', self.clArgs): 86 self.argDB['debugSections'] = ['screen'] 87 # Perhaps these initializations should just be local temporary arguments 88 self.argDB['CPPFLAGS'] = '' 89 if not 'LDFLAGS' in self.argDB: 90 self.argDB['LDFLAGS'] = '' 91 self.batchSetup = [] 92 self.batchIncludes = [] 93 self.batchBodies = [] 94 self.batchCleanup = [] 95 self.batchIncludeDirs = [] 96 self.batchLibs = [] 97 self.dependencies = {} 98 self.configureParent = None 99 # List of packages actually found 100 self.packages = [] 101 self.postbuilds = [] # list of package builds needed to be made after PETSc is built 102 self.postinstalls = [] # list of package builds/installs needed to be made after PETSc is installed 103 self.postchecks = [] # list of package checks needed to be made after PETSc is make check 104 self.createChildren() 105 # Create argDB for user specified options only 106 self.clArgDB = dict([(nargs.Arg.parseArgument(arg)[0], arg) for arg in self.clArgs]) 107 return 108 109 def __getstate__(self): 110 '''We do not want to pickle the default log stream''' 111 d = config.base.Configure.__getstate__(self) 112 d = script.LanguageProcessor.__getstate__(self, d) 113 if 'configureParent' in d: 114 del d['configureParent'] 115 return d 116 117 def __setstate__(self, d): 118 '''We must create the default log stream''' 119 config.base.Configure.__setstate__(self, d) 120 script.LanguageProcessor.__setstate__(self, d) 121 self.__dict__.update(d) 122 return 123 124 def listDirs(self, base, variable): 125 '''Returns a list of all directories of the form base/variable where variable can be regular expression syntax''' 126 if not variable: return [base] 127 dirs = [] 128 nextDirs = variable.split(os.sep) 129 if os.path.isdir(base): 130 try: 131 files = os.listdir(base) 132 files.sort() 133 except: pass 134 for dir in files: 135 if re.match(nextDirs[0], dir): 136 if nextDirs[1:]: 137 rest = os.path.join(*nextDirs[1:]) 138 else: 139 rest = None 140 dirs.extend(self.listDirs(os.path.join(base, dir),rest )) 141 return dirs 142 143 def getTmpDir(self): 144 if not hasattr(self, '_tmpDir'): 145 self._tmpDir = tempfile.mkdtemp(prefix = 'petsc-') 146 if not os.access(self._tmpDir, os.X_OK): 147 raise RuntimeError('Cannot execute things in tmp directory '+self._tmpDir+'. Consider setting TMPDIR to something else.') 148 return self._tmpDir 149 def setTmpDir(self, temp): 150 if hasattr(self, '_tmpDir'): 151 if os.path.isdir(self._tmpDir): 152 import shutil 153 shutil.rmtree(self._tmpDir) 154 if temp is None: 155 delattr(self, '_tmpDir') 156 if not temp is None: 157 self._tmpDir = temp 158 return 159 tmpDir = property(getTmpDir, setTmpDir, doc = 'Temporary directory for test byproducts') 160 def getFileCreatePause(self): 161 if not hasattr(self, '_file_create_pause'): 162 return self.argDB['with-file-create-pause'] 163 return self._file_create_pause 164 def setFileCreatePause(self, file_create_pause): 165 self.file_create_pause = file_create_pause 166 file_create_pause = property(getFileCreatePause, setFileCreatePause, doc = 'Add 1 sec pause between config temp file delete/recreate') 167 168 def setupHelp(self, help): 169 import nargs 170 171 help = config.base.Configure.setupHelp(self, help) 172 searchdirs = [] 173 packagedirs = [] 174 175 help.addArgument('Framework', '-configModules', nargs.Arg(None, None, 'A list of Python modules with a Configure class')) 176 help.addArgument('Framework', '-ignoreCompileOutput=<bool>', nargs.ArgBool(None, 1, 'Ignore compiler terminal output when checking if compiles succeed')) 177 help.addArgument('Framework', '-ignoreLinkOutput=<bool>', nargs.ArgBool(None, 1, 'Ignore linker terminal output when checking if links succeed')) 178 help.addArgument('Framework', '-ignoreWarnings=<bool>', nargs.ArgBool(None, 0, 'Ignore compiler and linker warnings in terminal output when checking if it succeeded')) 179 help.addArgument('Framework', '-ignoreCxxBoundCheck=<bool>', nargs.ArgBool(None, 0, 'Ignore Cxx dialect bound check')) 180 help.addArgument('Framework', '-doCleanup=<bool>', nargs.ArgBool(None, 1, 'Delete any configure generated files (turn off for debugging)')) 181 help.addArgument('Framework', '-with-executables-search-path', nargs.Arg(None, searchdirs, 'A list of directories used to search for executables')) 182 help.addArgument('Framework', '-with-packages-search-path', nargs.Arg(None, packagedirs, 'A list of directories used to search for packages')) 183 help.addArgument('Framework', '-with-packages-build-dir=<dir>', nargs.Arg(None, None, 'Location to unpack and run the build process for downloaded packages')) 184 help.addArgument('Framework', '-with-batch=<bool>', nargs.ArgBool(None, 0, 'Machine using cross-compilers or a batch system to submit jobs')) 185 help.addArgument('Framework', '-with-file-create-pause=<bool>', nargs.ArgBool(None, 0, 'Add 1 sec pause between config temp file delete/recreate')) 186 return help 187 188 def getCleanup(self): 189 if not hasattr(self, '_doCleanup'): 190 return self.argDB['doCleanup'] 191 return self._doCleanup 192 193 def setCleanup(self, doCleanup): 194 self._doCleanup = doCleanup 195 return 196 doCleanup = property(getCleanup, setCleanup, doc = 'Flag for deleting generated files') 197 198 def setupArguments(self, argDB): 199 '''Change titles and setup all children''' 200 argDB = script.Script.setupArguments(self, argDB) 201 202 self.help.title = 'Configure Help\n Comma separated lists should be given between [] (use \\[ \\] in tcsh/csh)\n For example: --with-mpi-lib=\\[/usr/local/lib/libmpich.a,/usr/local/lib/libpmpich.a\\]\n Options beginning with --known- are to provide values you already know\n Options beginning with --with- indicate that you are requesting something\n For example: --with-clanguage=c++\n <prog> means a program name or a full path to a program\n For example:--with-cmake-exec=/Users/bsmith/bin/cmake\n <bool> means a boolean, use either 0 or 1\n <dir> means a directory\n For example: --with-packages-download-dir=/Users/bsmith/Downloads\n For packages use --with-PACKAGE-dir=<dir> OR\n --with-PACKAGE-include=<dir> --with-PACKAGE-lib=<lib> OR --download-PACKAGE' 203 self.actions.title = 'Configure Actions\n These are the actions performed by configure on the filesystem' 204 205 for child in self.childGraph.vertices: 206 if hasattr(child, 'setupHelp'): child.setupHelp(self.help) 207 return argDB 208 209 def outputBasics(self): 210 if 'CONDA_PREFIX' in os.environ and os.environ['CONDA_PREFIX'] is not None: 211 self.conda_active = True 212 self.addMakeMacro('CONDA_ACTIVE',1) 213 214 buf = 'Environmental variables' 215 for key,val in os.environ.items(): 216 if key.find('KEY') > -1: continue 217 buf += '\n'+str(key)+'='+str(val) 218 self.logPrint(buf) 219 def logPrintFilesInPath(path): 220 for d in path: 221 try: 222 self.logWrite(' '+d+': '+' '.join(os.listdir(d))+'\n') 223 except Exception as e: 224 self.logWrite(' Warning accessing '+d+' gives errors: '+str(e)+'\n') 225 return 226 if os.environ['PATH'].split(os.path.pathsep): 227 self.logWrite(' Files in path provided by default path\n') 228 logPrintFilesInPath(os.environ['PATH'].split(os.path.pathsep)) 229 dirs = self.argDB['with-executables-search-path'] 230 if not isinstance(dirs, list): dirs = dirs.split(os.path.pathsep) 231 if dirs: 232 self.logWrite(' Files in path provided by --with-executables-search-path\n') 233 logPrintFilesInPath(dirs) 234 235 def dumpConfFiles(self): 236 '''Performs: 237 - Substitute files 238 - Output configure header 239 - Log actions''' 240 self.substitute() 241 if self.makeMacroHeader: 242 self.outputMakeMacroHeader(self.makeMacroHeader) 243 self.log.write('**** ' + self.makeMacroHeader + ' ****\n') 244 self.outputMakeMacroHeader(self.log) 245 self.actions.addArgument('Framework', 'File creation', 'Created makefile configure header '+self.makeMacroHeader) 246 if self.makeRuleHeader: 247 self.outputMakeRuleHeader(self.makeRuleHeader) 248 self.log.write('**** ' + self.makeRuleHeader + ' ****\n') 249 self.outputMakeRuleHeader(self.log) 250 self.actions.addArgument('Framework', 'File creation', 'Created makefile configure header '+self.makeRuleHeader) 251 if self.header: 252 self.outputHeader(self.header, petscconf=True) 253 self.log.write('**** ' + self.header + ' ****\n') 254 self.outputHeader(self.log) 255 self.actions.addArgument('Framework', 'File creation', 'Created configure header '+self.header) 256 if self.cHeader: 257 self.outputCHeader(self.cHeader) 258 self.log.write('**** ' + self.cHeader + ' ****\n') 259 self.outputCHeader(self.log) 260 self.actions.addArgument('Framework', 'File creation', 'Created C specific configure header '+self.cHeader) 261 if self.pkgheader: 262 self.outputPkgHeader(self.pkgheader) 263 self.log.write('**** ' + self.pkgheader + ' ****\n') 264 self.outputPkgHeader(self.log) 265 self.actions.addArgument('Framework', 'File creation', 'Created configure pkg header '+self.pkgheader) 266 self.log.write('\n') 267 return 268 269 def saveHash(self): 270 '''Saves the hash for configure (created in arch.py)''' 271 for hf in ['hashfile','hashfilepackages']: 272 if hasattr(self,'hash') and hasattr(self,hf): 273 self.logPrint('Attempting to save configure hash file: '+getattr(self,hf)) 274 try: 275 with open(getattr(self,hf), 'w') as f: 276 f.write(self.hash) 277 self.logPrint('Saved configure hash file: '+getattr(self,hf)) 278 except: 279 self.logPrint('Unable to save configure hash file: '+getattr(self,hf)) 280 281 def cleanup(self): 282 self.actions.output(self.log) 283 self.tmpDir = None 284 return 285 286 def printSummary(self): 287 # __str__(), __str1__(), __str2__() are used to create 3 different groups of summary outputs. 288 for child in self.childGraph.vertices: 289 self.logWrite(str(child), debugSection = 'screen', forceScroll = 1, rmDir = 0) 290 for child in self.childGraph.vertices: 291 if hasattr(child,'__str1__'): 292 self.logWrite(child.__str1__(), debugSection = 'screen', forceScroll = 1) 293 for child in self.childGraph.vertices: 294 if hasattr(child,'__str2__'): 295 self.logWrite(child.__str2__(), debugSection = 'screen', forceScroll = 1) 296 return 297 298 def addChild(self, config): 299 '''Add a configure module to the framework''' 300 self.childGraph.addVertex(config) 301 return 302 303 def getChild(self, moduleName, keywordArgs = {}): 304 '''Returns the child matching the given module if present, and otherwise creates and appends it''' 305 type = __import__(moduleName, globals(), locals(), ['Configure']).Configure 306 config = None 307 for child in self.childGraph.vertices: 308 if isinstance(child, type): 309 config = child 310 break 311 if config is None and hasattr(self, 'configureParent') and not self.configureParent is None: 312 for child in self.configureParent.childGraph.vertices: 313 if isinstance(child, type): 314 config = child 315 self.addChild(config) 316 config.showHelp = 0 317 config.logName = self.logName 318 # If it was pickled with a nonstandard RDict, supply one 319 if config.argDB is None: 320 config.argDB = self.argDB 321 config.setup() 322 config.setupPackageDependencies(self) 323 config.setupDependencies(self) 324 break 325 if config is None: 326 config = type(self, *keywordArgs) 327 self.addChild(config) 328 config.showHelp = 0 329 config.logName = self.logName 330 config.setup() 331 config.setupPackageDependencies(self) 332 config.setupDependencies(self) 333 return config 334 335 def createChildren(self): 336 '''Create all children specified by --configModules''' 337 import nargs 338 339 modules = nargs.Arg.findArgument('configModules', self.clArgs) 340 if modules is None: 341 self.argDB['configModules'] = [] 342 elif not isinstance(modules, list): 343 self.argDB['configModules'] = [modules] 344 else: 345 self.argDB['configModules'] = modules 346 for moduleName in self.argDB['configModules']: 347 self.getChild(moduleName) 348 return 349 350 def requireModule(self, mod, depChild): 351 '''Return the input module, making sure it runs before depChild''' 352 if not mod is depChild: 353 self.childGraph.addEdges(depChild, [mod]) 354 return mod 355 356 def require(self, moduleName, depChild = None, keywordArgs = {}): 357 '''Return a child from moduleName, creating it if necessary and making sure it runs before depChild''' 358 return self.requireModule(self.getChild(moduleName, keywordArgs), depChild) 359 360 @staticmethod 361 def findModule(obj, module): 362 """ 363 Search OBJ's attributes for an attribute of type MODULE_TYPE. 364 365 Return the module if found, otherwise return None. 366 """ 367 import inspect 368 369 if not inspect.ismodule(module): 370 raise NotImplementedError 371 372 if isinstance(module, str): 373 module_name = module 374 else: 375 module_name = module.__name__ 376 377 for attr in dir(obj): 378 obj_attr = getattr(obj, attr) 379 if inspect.ismodule(obj_attr) and obj_attr.__name__ == module_name: 380 return module 381 return 382 383 ############################################### 384 # Dependency Mechanisms 385 def loadFramework(self, path): 386 import RDict 387 oldDir = os.getcwd() 388 os.chdir(path) 389 argDB = RDict.RDict() 390 os.chdir(oldDir) 391 framework = self.loadConfigure(argDB) 392 if framework is None: 393 self.logPrint('Failed to load cached configuration from '+path) 394 else: 395 self.logPrint('Loaded cached configuration from '+path) 396 return framework 397 398 def addPackageDependency(self, dependency, depPath = None): 399 if not dependency: 400 return 401 if isinstance(dependency, str): 402 framework = self.loadFramework(dependency) 403 if not framework: 404 return 405 else: 406 framework = dependency 407 if depPath: 408 dependency = depPath 409 else: 410 dependency = os.path.dirname(dependency.__file__) 411 self.dependencies[dependency] = checksum_algo( pickle.dumps(framework) ).hexdigest() 412 self.logPrint('Added configure dependency from '+dependency+'('+str(self.dependencies[dependency])+')') 413 for child in framework.childGraph.vertices: 414 child.argDB = self.argDB 415 child.showHelp = 0 416 child.logName = self.logName 417 child.setup() 418 self.childGraph.replaceVertex(self.require(child.__module__, None), child) 419 return 420 421 def updatePackageDependencies(self): 422 for dependency, digest in self.dependencies.items(): 423 framework = self.loadFramework(dependency) 424 if digest == checksum_algo( pickle.dumps(framework) ).hexdigest(): 425 continue 426 self.logPrint('Configure dependency from '+dependency+' has changed. Reloading...') 427 for child in framework.childGraph.vertices: 428 self.childGraph.replaceVertex(self.require(child.__module__, None), child) 429 self.logPrint(' Reloaded '+child.__module__) 430 self.updateDependencies() 431 for child in framework.childGraph.vertices: 432 for depChild in self.childGraph.depthFirstVisit(child, outEdges = 0): 433 if hasattr(depChild, '_configured'): 434 del depChild._configured 435 self.logPrint(' Will reconfigure subtree for '+child.__module__) 436 return 437 438 def updateDependencies(self): 439 for child in self.childGraph.vertices: 440 child.setupDependencies(self) 441 return 442 443 def setConfigureParent(self, parent): 444 self.configureParent = parent 445 return 446 447 ############################################### 448 # Filtering Mechanisms 449 450 def filterPreprocessOutput(self,output, log = None): 451 output = output.strip() 452 if log is None: log = self.log 453 log.write("Preprocess output before filtering:\n"+(output if not output else output+'\n')) 454 # Another PGI license warning, multiline so have to discard all 455 if output.find('your evaluation license will expire') > -1 and output.lower().find('error') == -1: 456 output = '' 457 lines = output.splitlines() 458 # Intel 459 lines = [s for s in lines if s.find("icc: command line remark #10148: option '-i-dynamic' not supported") < 0] 460 lines = [s for s in lines if s.find("[: unexpected operator") < 0] # Deals with error in mpiicc and mpiicpc wrappers from some versions of Intel MPI. 461 lines = [s for s in lines if s.find(': remark #10441:') < 0] 462 lines = [s for s in lines if s.find("'linker' input unused") < 0] 463 # IBM: 464 lines = [s for s in lines if not s.startswith('cc_r:')] 465 # PGI: Ignore warning about temporary license 466 lines = [s for s in lines if s.find('license.dat') < 0] 467 # Cray XT3 468 lines = [s for s in lines if s.find('INFO: catamount target') < 0] 469 lines = [s for s in lines if s.find('INFO: linux target') < 0] 470 # Lahey/Fujitsu 471 lines = [s for s in lines if s.find('Encountered 0 errors') < 0] 472 # Cray GPU system at Nersc 473 lines = [s for s in lines if s.find('No supported cpu target is set, CRAY_CPU_TARGET=x86-64 will be used.') < 0] 474 lines = [s for s in lines if s.find('Load a valid targeting module or set CRAY_CPU_TARGET') < 0] 475 lines = [s for s in lines if s.find('The -gpu option has no effect unless a language-specific option to enable GPU code generation is used') < 0] 476 # pgi dumps filename on stderr - but returns 0 errorcode' 477 lines = [s for s in lines if lines != 'conftest.c:'] 478 # nvcc 479 lines = [s for s in lines if s.find('incompatible redefinition for option \'compiler-bindir\', the last value of this option was used') < 0] 480 lines = [s for s in lines if s.find('Support for offline compilation for architectures prior to \'<compute/sm/lto>_75\' will be removed in a future release') < 0] 481 482 lines = [s for s in lines if len(s)] 483 if lines: output = '\n'.join(lines) 484 else: output = '' 485 log.write("Preprocess output after filtering:\n"+(output if not output else output+'\n')) 486 return output 487 488 def filterCompileOutput(self, output,flag = '', filterAlways = 0): 489 ''' 490 With --ignoreCompileOutput=1 (default), it filters all compiler messages 491 With --ignoreCompileOutput=0 it filters only compiler messages known to be harmless 492 ''' 493 output = output.strip() 494 if flag and output.find("ignoring unknown option '"+flag+"'"): return output 495 if flag and output.find("invalid value"): return output 496 if output.find('warning: attribute "deprecated" is unknown, ignored') >= 0: return output 497 if output.find('PGC-W-0129-Floating point overflow') >= 0: return output 498 if output.find('warning #264: floating-point value does not fit in required floating-point type') >= 0: return output 499 if output.find('warning: ISO C90 does not support') >= 0: return output 500 if output.find('warning: ISO C does not support') >= 0: return output 501 if output.find('warning #2650: attributes ignored here') >= 0: return output 502 if output.find('warning #3175: unrecognized gcc optimization level') >= 0: return output 503 if output.find('warning: unknown attribute') >= 0: return output 504 if output.find('Warning: attribute visibility is unsupported and will be skipped') >= 0: return output 505 if output.find('(E) Invalid statement found within an interface block. Executable statement, statement function or syntax error encountered.') >= 0: return output 506 elif self.argDB['ignoreCompileOutput'] and not filterAlways: 507 output = '' 508 elif output: 509 self.log.write("Compiler output before filtering:\n"+(output if not output or output.endswith('\n') else output+'\n')) 510 lines = output.splitlines() 511 if self.argDB['ignoreWarnings']: 512 # ACCEPT compiler warnings 513 extraLines = [s for s in lines if s.find('implicit declaration of function') >= 0] 514 lines = [s for s in lines if not self.warningRE.search(s)] 515 lines = [s for s in lines if s.find('In file included from') < 0] 516 lines = [s for s in lines if s.find('from ') < 0] 517 lines += extraLines 518 # GCC: Ignore headers to toplevel 519 lines = [s for s in lines if s.find('At the top level') < 0] 520 # GCC: Ignore headers to functions 521 lines = [s for s in lines if s.find(': In function') < 0] 522 # GCC: Ignore stupid warning about builtins 523 lines = [s for s in lines if s.find('warning: conflicting types for built-in function') < 0] 524 # GCC: Ignore stupid warning about unused variables 525 lines = [s for s in lines if s.find('warning: unused variable') < 0] 526 # Intel 527 lines = [s for s in lines if s.find("icc: command line remark #10148: option '-i-dynamic' not supported") < 0] 528 lines = [s for s in lines if s.find(': remark #10441:') < 0] 529 lines = [s for s in lines if s.find("'linker' input unused") < 0] 530 # PGI: Ignore warning about temporary license 531 lines = [s for s in lines if s.find('license.dat') < 0] 532 # Cray XT3 533 lines = [s for s in lines if s.find('INFO: catamount target') < 0] 534 lines = [s for s in lines if s.find('INFO: linux target') < 0] 535 lines = [s for s in lines if s.find('Successful compile:') < 0] 536 # Lahey/Fujitsu 537 lines = [s for s in lines if s.find('Encountered 0 errors') < 0] 538 # Cray GPU system at Nersc 539 lines = [s for s in lines if s.find('No supported cpu target is set, CRAY_CPU_TARGET=x86-64 will be used.') < 0] 540 lines = [s for s in lines if s.find('Load a valid targeting module or set CRAY_CPU_TARGET') < 0] 541 lines = [s for s in lines if s.find('The -gpu option has no effect unless a language-specific option to enable GPU code generation is used') < 0] 542 # pgi dumps filename on stderr - but returns 0 errorcode' 543 lines = [s for s in lines if lines != 'conftest.c:'] 544 lines = [s for s in lines if len(s)] 545 # nvcc 546 lines = [s for s in lines if s.find('incompatible redefinition for option \'compiler-bindir\', the last value of this option was used') < 0] 547 lines = [s for s in lines if s.find('Support for offline compilation for architectures prior to \'<compute/sm/lto>_75\' will be removed in a future release') < 0] 548 if lines: output = '\n'.join(lines) 549 else: output = '' 550 self.log.write("Compiler output after filtering:\n"+(output if not output else output+'\n')) 551 return output 552 553 def filterLinkOutput(self, output, filterAlways = 0): 554 ''' 555 With --ignoreLinkOutput=1 (default), it filters all linker messages 556 With --ignoreLinkOutput=0 it filters only linker messages known to be harmless 557 ''' 558 output = output.strip() 559 if output.find('relocation R_AARCH64_ADR_PREL_PG_HI21 against symbol') >= 0: return output 560 elif self.argDB['ignoreLinkOutput'] and not filterAlways: 561 output = '' 562 elif output: 563 self.log.write("Linker output before filtering:\n"+(output if not output else output+'\n')) 564 lines = output.splitlines() 565 if self.argDB['ignoreWarnings']: 566 lines = [s for s in lines if not self.warningRE.search(s)] 567 #Intel 568 lines = [s for s in lines if s.find(": command line warning #10121: overriding") < 0] 569 lines = [s for s in lines if s.find(': remark #10441:') < 0] 570 #Intel icpx 571 lines = [s for s in lines if s.find("warning: Note that use of '-g' without any optimization-level option will turn off most compiler optimizations similar to use") < 0] 572 # PGI: Ignore warning about temporary license 573 lines = [s for s in lines if s.find('license.dat') < 0] 574 # Cray XT3 575 lines = [s for s in lines if s.find('INFO: catamount target') < 0] 576 lines = [s for s in lines if s.find('INFO: linux target') < 0] 577 # Lahey/Fujitsu 578 lines = [s for s in lines if s.find('Encountered 0 errors') < 0] 579 # Cray GPU system at Nersc 580 lines = [s for s in lines if s.find('No supported cpu target is set, CRAY_CPU_TARGET=x86-64 will be used.') < 0] 581 lines = [s for s in lines if s.find('Load a valid targeting module or set CRAY_CPU_TARGET') < 0] 582 lines = [s for s in lines if s.find('The -gpu option has no effect unless a language-specific option to enable GPU code generation is used') < 0] 583 # Cray link warnings 584 rmidx = [] 585 for i in range(len(lines)-1): 586 if ((lines[i].find('in function') >=0) and (lines[i+1].find('in statically linked applications requires at runtime the shared libraries') >=0)) \ 587 or ((lines[i].find('Warning:') >=0) and (lines[i+1].find('-dynamic was already seen on command line, overriding with -shared.') >=0)): 588 rmidx.extend([i,i+1]) 589 lines = [lines[i] for i in range(len(lines)) if i not in rmidx] 590 591 # pgi dumps filename on stderr - but returns 0 errorcode' 592 lines = [s for s in lines if lines != 'conftest.c:'] 593 # in case -pie is always being passed to linker 594 lines = [s for s in lines if s.find('-pie being ignored. It is only used when linking a main executable') < 0] 595 # Microsoft outputs these strings when linking 596 lines = [s for s in lines if s.find('Creating library ') < 0] 597 lines = [s for s in lines if s.find('performing full link') < 0] 598 lines = [s for s in lines if s.find('linking object as if no debug info') < 0] 599 lines = [s for s in lines if s.find('skipping incompatible') < 0] 600 # Multiple gfortran libraries present 601 lines = [s for s in lines if s.find('may conflict with libgfortran') < 0] 602 # macOS libraries built for different macOS versions 603 lines = [s for s in lines if s.find(' was built for newer macOS version') < 0] 604 lines = [s for s in lines if s.find(' was built for newer OSX version') < 0] 605 lines = [s for s in lines if s.find(' stack subq instruction is too different from dwarf stack size') < 0] 606 lines = [s for s in lines if s.find('could not create compact unwind') < 0] 607 lines = [s for s in lines if s.find('ld: warning: -undefined dynamic_lookup may not work with chained fixups') < 0] 608 # Nvidia linker 609 lines = [s for s in lines if s.find('nvhpc.ld contains output sections') < 0] 610 lines = [s for s in lines if s.find('incompatible redefinition for option \'compiler-bindir\', the last value of this option was used') < 0] 611 # Intel dpcpp linker 612 # Ex. clang-offload-bundler: error: '/home/jczhang/mpich/lib': Is a directory 613 lines = [s for s in lines if s.find('clang-offload-bundler: error:') < 0] 614 lines = [s for s in lines if s.find('Compilation from IR - skipping loading of FCL') < 0] 615 lines = [s for s in lines if s.find('Build succeeded') < 0] 616 # emcc complaints incompatible linking 617 lines = [s for s in lines if s.find('wasm-ld: warning: function signature mismatch') < 0] 618 lines = [s for s in lines if s.find('>>> defined as') < 0] 619 620 lines = [s for s in lines if len(s)] 621 # a line with a single : can be created on macOS when the linker jumbles the output from warning messages with was "built for newer" warnings 622 lines = [s for s in lines if s != ':'] 623 624 if lines: output = '\n'.join(lines) 625 else: output = '' 626 self.log.write("Linker output after filtering:\n"+(output if not output else output+'\n')) 627 return output 628 629 ############################################### 630 # Output Mechanisms 631 def addSubstitutionFile(self, inName, outName = ''): 632 '''Designate that file should experience substitution 633 - If outName is given, inName --> outName 634 - If inName == foo.in, foo.in --> foo 635 - If inName == foo, foo.in --> foo 636 ''' 637 if outName: 638 if inName == outName: 639 raise RuntimeError('Input and output substitution files identical: '+inName) 640 else: 641 if inName[-3:] == '.in': 642 root = inName[-3:] 643 else: 644 root = inName 645 inName = root+'.in' 646 outName = root 647 if not os.path.exists(inName): 648 raise RuntimeError('Nonexistent substitution file: '+inName) 649 self.substFiles[inName] = outName 650 return 651 652 def getPrefix(self, child): 653 '''Get the default prefix for a given child Configure''' 654 mod = child.__class__.__module__ 655 if not mod == '__main__': 656 prefix = mod.replace('.', '_') 657 else: 658 prefix = '' 659 return prefix 660 661 def getHeaderPrefix(self, child): 662 '''Get the prefix for variables in the configuration header for a given child''' 663 if hasattr(child, 'headerPrefix'): 664 prefix = child.headerPrefix 665 else: 666 prefix = self.getPrefix(child) 667 return prefix 668 669 def getSubstitutionPrefix(self, child): 670 '''Get the prefix for variables during substitution for a given child''' 671 if hasattr(child, 'substPrefix'): 672 prefix = child.substPrefix 673 else: 674 prefix = self.getPrefix(child) 675 return prefix 676 677 def substituteName(self, match, prefix = None): 678 '''Return the substitution value for a given name, or return "@name_UNKNOWN@"''' 679 name = match.group('name') 680 if name in self.subst: 681 return self.subst[name] 682 elif name in self.argSubst: 683 return self.argDB[self.argSubst[name]] 684 else: 685 for child in self.childGraph.vertices: 686 if not hasattr(child, 'subst') or not isinstance(child.subst, dict): 687 continue 688 if prefix is None: 689 substPrefix = self.getSubstitutionPrefix(child) 690 else: 691 substPrefix = prefix 692 if substPrefix: 693 substPrefix = substPrefix+'_' 694 if name.startswith(substPrefix): 695 childName = name.replace(substPrefix, '', 1) 696 else: 697 continue 698 else: 699 childName = name 700 if childName in child.subst: 701 return child.subst[childName] 702 elif childName in child.argSubst: 703 return self.argDB[child.argSubst[childName]] 704 return '@'+name+'_UNKNOWN@' 705 706 def substituteFile(self, inName, outName): 707 '''Carry out substitution on the file "inName", creating "outName"''' 708 inFile = open(inName) 709 if os.path.dirname(outName): 710 if not os.path.exists(os.path.dirname(outName)): 711 os.makedirs(os.path.dirname(outName)) 712 if self.file_create_pause: time.sleep(1) 713 outFile = open(outName, 'w') 714 for line in inFile: 715 outFile.write(self.substRE.sub(self.substituteName, line)) 716 outFile.close() 717 inFile.close() 718 self.actions.addArgument('Framework', 'Substitution', inName+' was substituted to produce '+outName) 719 return 720 721 def substitute(self): 722 '''Preform all substitution''' 723 for pair in self.substFiles.items(): 724 self.substituteFile(pair[0], pair[1]) 725 return 726 727 def dumpSubstitutions(self): 728 for pair in self.subst.items(): 729 print(pair[0]+' ---> '+pair[1]) 730 for pair in self.argSubst.items(): 731 print(pair[0]+' ---> '+self.argDB[pair[1]]) 732 for child in self.childGraph.vertices: 733 if not hasattr(child, 'subst') or not isinstance(child.subst, dict): continue 734 substPrefix = self.getSubstitutionPrefix(child) 735 for pair in child.subst.items(): 736 if substPrefix: 737 print(substPrefix+'_'+pair[0]+' ---> '+str(pair[1])) 738 else: 739 print(pair[0]+' ---> '+str(pair[1])) 740 for pair in child.argSubst.items(): 741 if substPrefix: 742 print(substPrefix+'_'+pair[0]+' ---> '+str(self.argDB[pair[1]])) 743 else: 744 print(pair[0]+' ---> '+str(self.argDB[pair[1]])) 745 return 746 747 def storeSubstitutions(self, argDB): 748 '''Store all the substitutions in the argument database''' 749 argDB.update(self.subst) 750 argDB.update(dict(map(lambda k: (k, self.argDB[self.argSubst[k]]), self.argSubst))) 751 for child in self.childGraph.vertices: 752 if not hasattr(child, 'subst') or not isinstance(child.subst, dict): continue 753 substPrefix = self.getSubstitutionPrefix(child) 754 if substPrefix: 755 argDB.update(dict(map(lambda k: (substPrefix+'_'+k, child.subst[k]), child.subst))) 756 argDB.update(dict(map(lambda k: (substPrefix+'_'+k, self.argDB[child.argSubst[k]]), child.argSubst))) 757 else: 758 argDB.update(child.subst) 759 argDB.update(dict(map(lambda k: (k, self.argDB[child.argSubst[k]]), child.argSubst))) 760 self.actions.addArgument('Framework', 'RDict update', 'Substitutions were stored in RDict with parent '+str(argDB.parentDirectory)) 761 return 762 763 def outputDefine(self, f, name, value = None, condition = None): 764 '''Define "name" to "value" in the configuration header''' 765 # we need to keep the libraries in this list and simply not print them at the end 766 # because libraries.havelib() is used to find library in this list we had to list the libraries in the 767 # list even though we don't need them in petscconf.h 768 # Some packages have LIB in their name, so we have to include them here 769 if (name.startswith('PETSC_HAVE_LIB') and not name in ['PETSC_HAVE_LIBPNG','PETSC_HAVE_LIBJPEG','PETSC_HAVE_LIBCEED']) or (name.startswith('PETSC_HAVE_') and name.endswith('LIB')): return 770 if value: 771 if (condition): 772 f.write('#if (%s)\n' % condition) 773 f.write('#define %s %s\n' % (name,value)) 774 if (condition): 775 f.write('#endif\n') 776 return 777 778 def outputPoison(self, f, name): 779 '''Outputs a poison version of name to prevent accidental usage, see outputHeader''' 780 if (name.startswith('PETSC_HAVE_LIB') and not name in {'PETSC_HAVE_LIBPNG','PETSC_HAVE_LIBJPEG','PETSC_HAVE_LIBCEED'}) or (name.startswith('PETSC_HAVE_') and name.endswith('LIB')): return 781 if name.startswith(('PETSC_USE_','PETSC_HAVE_','PETSC_SKIP_')): 782 f.write('#pragma GCC poison PETSC_%s\n' % name) 783 784 def outputMakeMacro(self, f, name, value): 785 f.write(name+' = '+str(value)+'\n') 786 return 787 788 def outputMakeRule(self, f, name, dependencies,rule): 789 if rule: 790 f.write(name+': '+dependencies+'\n') 791 for r in rule: 792 f.write('\t'+r+'\n') 793 f.write('\n') 794 else: 795 f.write(name+': '+dependencies+'\n\n') 796 return 797 798 def outputMakeRules(self, f, child, prefix = None): 799 '''If the child contains a dictionary named "makerules", the entries are output in the makefile config header. 800 - No prefix is used 801 ''' 802 if not hasattr(child, 'makeRules') or not isinstance(child.makeRules, dict): return 803 for pair in child.makeRules.items(): 804 if not pair[1]: continue 805 self.outputMakeRule(f, pair[0], pair[1][0],pair[1][1]) 806 return 807 808 def outputMakeMacros(self, f, child, prefix = None): 809 '''If the child contains a dictionary named "makeMacros", the entries are output in the makefile config header. 810 - No prefix is used 811 ''' 812 if not hasattr(child, 'makeMacros') or not isinstance(child.makeMacros, dict): return 813 for pair in child.makeMacros.items(): 814 if not pair[1]: 815 self.outputMakeMacro(f, pair[0], '') 816 else: 817 self.outputMakeMacro(f, pair[0], pair[1]) 818 return 819 820 def getFullDefineName(self, child, name, prefix = None): 821 if prefix is None: prefix = self.getHeaderPrefix(child) 822 if prefix: prefix = prefix+'_' 823 return prefix+name 824 825 def processDefines(self, defineDict, child, prefix = None): 826 '''If the child contains a dictionary named "defines", the entries are output as defines in the config header. 827 The prefix to each define is calculated as follows: 828 - If the prefix argument is given, this is used, otherwise 829 - If the child contains "headerPrefix", this is used, otherwise 830 - If the module containing the child class is not "__main__", this is used, otherwise 831 - No prefix is used 832 ''' 833 if not hasattr(child, 'defines') or not isinstance(child.defines, dict): return 834 for pair in child.defines.items(): 835 if not pair[1]: continue 836 item = (self.getFullDefineName(child, pair[0], prefix), pair[1]) 837 defineDict[item[0]] = item 838 return 839 840 def outputDefines(self, defineDict, f, petscconf=False): 841 for item in sorted(defineDict): 842 cond = None 843 if petscconf and 'HIP_PLATFORM' in item: 844 cond = '!defined(__HIP__)' 845 self.outputDefine(f, *defineDict[item], condition=cond) 846 847 def outputPoisons(self, defineDict, f): 848 for item in sorted(defineDict): 849 self.outputPoison(f, defineDict[item][0]) 850 851 def outputPkgVersion(self, f, child): 852 '''If the child contains a tuple named "version_tuple", the entries are output in the config package header.''' 853 if not hasattr(child, 'version_tuple') or not isinstance(child.version_tuple, tuple): return 854 if not child.version_tuple: return 855 vt = child.version_tuple 856 prefix = 'PETSC_PKG_'+child.name.upper().replace('-','_')+('_') # Ex. convert KOKKOS-KERNELS to KOKKOS_KERNELS 857 ss = ('VERSION_MAJOR','VERSION_MINOR','VERSION_SUBMINOR') 858 # output versioning tuple 859 for t in range(min(len(vt),3)): 860 f.write('#define '+prefix+ss[t]+(' ')+str(vt[t])+'\n') 861 while (t < 2): 862 t = t+1 863 f.write('#define '+prefix+ss[t]+(' 0\n')) 864 865 # output macros following petscversion.h style 866 f.write('#define '+prefix+'VERSION_ '+prefix+'VERSION_EQ\n\n') 867 f.write('#define '+prefix+'VERSION_EQ(MAJOR,MINOR,SUBMINOR) \\\n') 868 f.write(' (('+prefix+'VERSION_MAJOR == (MAJOR)) && \\\n') 869 f.write(' ('+prefix+'VERSION_MINOR == (MINOR)) && \\\n') 870 f.write(' ('+prefix+'VERSION_SUBMINOR == (SUBMINOR)))\n\n') 871 f.write('#define '+prefix+'VERSION_LT(MAJOR,MINOR,SUBMINOR) \\\n') 872 f.write(' ('+prefix+'VERSION_MAJOR < (MAJOR) || \\\n') 873 f.write(' ('+prefix+'VERSION_MAJOR == (MAJOR) && \\\n') 874 f.write(' ('+prefix+'VERSION_MINOR < (MINOR) || \\\n') 875 f.write(' ('+prefix+'VERSION_MINOR == (MINOR) && \\\n') 876 f.write(' ('+prefix+'VERSION_SUBMINOR < (SUBMINOR))))))\n\n') 877 f.write('#define '+prefix+'VERSION_LE(MAJOR,MINOR,SUBMINOR) \\\n') 878 f.write(' ('+prefix+'VERSION_LT(MAJOR,MINOR,SUBMINOR) || \\\n') 879 f.write(' '+prefix+'VERSION_EQ(MAJOR,MINOR,SUBMINOR))\n\n') 880 f.write('#define '+prefix+'VERSION_GT(MAJOR,MINOR,SUBMINOR) \\\n') 881 f.write(' ( 0 == '+prefix+'VERSION_LE(MAJOR,MINOR,SUBMINOR))\n\n') 882 f.write('#define '+prefix+'VERSION_GE(MAJOR,MINOR,SUBMINOR) \\\n') 883 f.write(' ( 0 == '+prefix+'VERSION_LT(MAJOR,MINOR,SUBMINOR))\n\n') 884 885 def outputTypedefs(self, f, child): 886 '''If the child contains a dictionary named "typedefs", the entries are output as typedefs in the config header.''' 887 if not hasattr(child, 'typedefs') or not isinstance(child.typedefs, dict): return 888 for newType, oldType in child.typedefs.items(): 889 f.write('typedef ') 890 f.write(oldType) 891 f.write(' ') 892 f.write(newType) 893 f.write(';\n') 894 return 895 896 def outputPrototypes(self, f, child, language = 'All'): 897 '''If the child contains a dictionary named "prototypes", the entries for the given language are output as function prototypes in the C config header.''' 898 if not hasattr(child, 'prototypes') or not isinstance(child.prototypes, dict): return 899 if language in child.prototypes: 900 for prototype in child.prototypes[language]: 901 f.write(prototype) 902 f.write('\n') 903 return 904 905 def outputMakeMacroHeader(self, name): 906 '''Write the make configuration header (bmake file)''' 907 if hasattr(name, 'close'): 908 f = name 909 filename = 'Unknown' 910 else: 911 dir = os.path.dirname(name) 912 if dir and not os.path.exists(dir): 913 os.makedirs(dir) 914 if self.file_create_pause: time.sleep(1) 915 f = open(name, 'w') 916 filename = os.path.basename(name) 917 self.outputMakeMacros(f, self) 918 for child in self.childGraph.vertices: 919 self.outputMakeMacros(f, child) 920 # This options are used in all runs of the test harness 921 testoptions = '-checkfunctionlist' 922 # Additional testoptions are provided in packages/ 923 for child in self.childGraph.vertices: 924 if hasattr(child,'found') and child.found and hasattr(child,'testoptions') and child.testoptions: 925 testoptions += ' '+child.testoptions 926 if (not hasattr(child,'found') or not child.found) and hasattr(child,'testoptions_whennotfound'): 927 testoptions += ' '+child.testoptions_whennotfound 928 f.write('PETSC_TEST_OPTIONS = '+testoptions+'\n') 929 f.write('PETSC_POST_BUILDS = '+' '.join(self.framework.postbuilds)+'\n') 930 f.write('PETSC_POST_INSTALLS = '+' '.join(self.framework.postinstalls)+'\n') 931 f.write('PETSC_POST_CHECKS = '+' '.join(self.framework.postchecks)+'\n') 932 if not hasattr(name, 'close'): 933 f.close() 934 935 def outputMakeRuleHeader(self, name): 936 '''Write the make configuration header (bmake file)''' 937 if hasattr(name, 'close'): 938 f = name 939 filename = 'Unknown' 940 else: 941 dir = os.path.dirname(name) 942 if dir and not os.path.exists(dir): 943 os.makedirs(dir) 944 if self.file_create_pause: time.sleep(1) 945 f = open(name, 'w') 946 filename = os.path.basename(name) 947 self.outputMakeRules(f, self) 948 for child in self.childGraph.vertices: 949 self.outputMakeRules(f, child) 950 if not hasattr(name, 'close'): 951 f.close() 952 return 953 954 def processPackageListDefine(self, defineDict): 955 key = 'PETSC_HAVE_PACKAGES' 956 pkglist = [] 957 for pkg in self.packages: 958 pkglist.extend(pkg.pkgname.lower().split()) 959 pkglist.sort() 960 str = '":' + ':'.join(pkglist) + ':"' 961 defineDict[key] = (key, str) 962 963 def outputHeader(self, name, prefix = None, petscconf = False): 964 '''Write the configuration header''' 965 if hasattr(name, 'close'): 966 f = name 967 filename = 'Unknown' 968 else: 969 dir = os.path.dirname(name) 970 if dir and not os.path.exists(dir): 971 os.makedirs(dir) 972 if self.file_create_pause: time.sleep(1) 973 f = open(name, 'w') 974 filename = os.path.basename(name) 975 guard = 'INCLUDED_'+filename.upper().replace('.', '_') 976 f.write('#if !defined('+guard+')\n') 977 f.write('#define '+guard+'\n\n') 978 if hasattr(self, 'headerTop'): 979 f.write(str(self.headerTop)+'\n') 980 defineDict = {} 981 self.processDefines(defineDict, self, prefix) 982 for child in self.childGraph.vertices: 983 self.processDefines(defineDict, child, prefix) 984 if (petscconf): 985 self.processPackageListDefine(defineDict) 986 dir = os.path.dirname(name) 987 if dir and not os.path.exists(dir): 988 os.makedirs(dir) 989 if self.file_create_pause: time.sleep(1) 990 with open(self.poisonheader,'w') as fpoison: 991 if self.file_create_pause: time.sleep(1) 992 if self.enablepoison: 993 # it is safe to write the poison file 994 self.outputPoisons(defineDict,fpoison) 995 else: 996 # at least 1 of the languages/compilers didn't like poison 997 poisonFileName = os.path.basename(self.poisonheader,) 998 poisonGuard = 'INCLUDED_'+poisonFileName.upper().replace('.', '_') 999 lines = [''.join(['#if !defined(',poisonGuard,')\n']), 1000 ''.join(['#define ',poisonGuard,'\n']), 1001 '#endif\n'] 1002 fpoison.writelines(lines) 1003 self.outputDefines(defineDict, f,petscconf) 1004 if hasattr(self, 'headerBottom'): 1005 f.write(str(self.headerBottom)+'\n') 1006 f.write('#endif\n') 1007 if not hasattr(name, 'close'): 1008 f.close() 1009 return 1010 1011 def outputPkgHeader(self, name, prefix = None): 1012 '''Write the packages configuration header''' 1013 if hasattr(name, 'close'): 1014 f = name 1015 filename = 'Unknown' 1016 else: 1017 dir = os.path.dirname(name) 1018 if dir and not os.path.exists(dir): 1019 os.makedirs(dir) 1020 if self.file_create_pause: time.sleep(1) 1021 f = open(name, 'w') 1022 filename = os.path.basename(name) 1023 guard = 'INCLUDED_'+filename.upper().replace('.', '_') 1024 f.write('#if !defined('+guard+')\n') 1025 f.write('#define '+guard+'\n\n') 1026 if hasattr(self, 'headerTop'): 1027 f.write(str(self.headerTop)+'\n') 1028 self.outputPkgVersion(f, self) 1029 for child in self.childGraph.vertices: 1030 self.outputPkgVersion(f, child) 1031 if hasattr(self, 'headerBottom'): 1032 f.write(str(self.headerBottom)+'\n') 1033 f.write('#endif\n') 1034 if not hasattr(name, 'close'): 1035 f.close() 1036 return 1037 1038 def outputCHeader(self, name): 1039 '''Write the C specific configuration header''' 1040 if hasattr(name, 'close'): 1041 f = name 1042 filename = 'Unknown' 1043 else: 1044 dir = os.path.dirname(name) 1045 if dir and not os.path.exists(dir): 1046 os.makedirs(dir) 1047 if self.file_create_pause: time.sleep(1) 1048 f = open(name, 'w') 1049 filename = os.path.basename(name) 1050 guard = 'INCLUDED_'+filename.upper().replace('.', '_') 1051 f.write('#if !defined('+guard+')\n') 1052 f.write('#define '+guard+'\n\n') 1053 self.outputTypedefs(f, self) 1054 for child in self.childGraph.vertices: 1055 self.outputTypedefs(f, child) 1056 self.outputPrototypes(f, self) 1057 for child in self.childGraph.vertices: 1058 self.outputPrototypes(f, child) 1059 f.write('#if defined(__cplusplus)\n') 1060 self.outputPrototypes(f, self, 'Cxx') 1061 for child in self.childGraph.vertices: 1062 self.outputPrototypes(f, child, 'Cxx') 1063 f.write('extern "C" {\n') 1064 self.outputPrototypes(f, self, 'extern C') 1065 for child in self.childGraph.vertices: 1066 self.outputPrototypes(f, child, 'extern C') 1067 f.write('}\n') 1068 f.write('#else\n') 1069 self.outputPrototypes(f, self, 'C') 1070 for child in self.childGraph.vertices: 1071 self.outputPrototypes(f, child, 'C') 1072 f.write('#endif\n') 1073 f.write('#endif\n') 1074 if not hasattr(name, 'close'): 1075 f.close() 1076 return 1077 1078 def getOptionsString(self, omitArgs = []): 1079 import nargs 1080 args = self.clArgs[:] 1081 for arg in omitArgs: 1082 args = [a for a in args if not nargs.Arg.parseArgument(a)[0] == arg] 1083 for a, arg in enumerate(args): 1084 parts = arg.split('=',1) 1085 if len(parts) == 2 and (' ' in parts[1] or '[' in parts[1]): 1086 args[a] = parts[0]+'=\"'+parts[1]+'\"' 1087 return ' '.join(args) 1088 1089 def outputBanner(self): 1090 import time, sys 1091 self.log.write(('='*80)+'\n') 1092 self.log.write(('='*80)+'\n') 1093 self.log.write('Starting configure run at '+time.strftime('%a, %d %b %Y %H:%M:%S %z')+'\n') 1094 self.log.write('Configure Options: '+self.getOptionsString()+'\n') 1095 self.log.write('Working directory: '+os.getcwd()+'\n') 1096 self.log.write('Machine platform:\n' + str(platform.uname())+'\n') 1097 self.log.write('Python version:\n' + sys.version+'\n') 1098 self.log.write(('='*80)+'\n') 1099 return 1100 1101 def configureExternalPackagesDir(self): 1102 '''Set alternative directory external packages are built in''' 1103 if 'with-packages-build-dir' in self.argDB: 1104 self.externalPackagesDir = self.argDB['with-packages-build-dir'] 1105 else: 1106 self.externalPackagesDir = None 1107 return 1108 1109 def addBatchSetup(self, setup): 1110 '''Add code to be run before batch tests execute''' 1111 if not isinstance(setup, list): 1112 setup = [setup] 1113 self.batchSetup.extend(setup) 1114 return 1115 1116 def addBatchInclude(self, includes): 1117 '''Add an include or a list of includes to the batch run''' 1118 if not isinstance(includes, list): 1119 includes = [includes] 1120 self.batchIncludes.extend(includes) 1121 return 1122 1123 def addBatchLib(self, libs): 1124 '''Add a library or a list of libraries to the batch run''' 1125 if not isinstance(libs, list): 1126 libs = [libs] 1127 self.batchLibs.extend(libs) 1128 return 1129 1130 def addBatchBody(self, statements): 1131 '''Add a statement or a list of statements to the batch run''' 1132 if not isinstance(statements, list): 1133 statements = [statements] 1134 self.batchBodies.extend(statements) 1135 return 1136 1137 def addBatchCleanup(self, cleanup): 1138 '''Add code to be run after batch tests execute''' 1139 if not isinstance(cleanup, list): 1140 cleanup = [cleanup] 1141 self.batchCleanup.extend(cleanup) 1142 return 1143 1144 def configureBatch(self): 1145 '''F''' 1146 if self.batchBodies: 1147 import nargs 1148 import sys 1149 1150 if self.arch: 1151 confname = 'conftest-%s' % (self.arch,) 1152 reconfname = 'reconfigure-%s.py' % (self.arch,) 1153 else: 1154 confname = 'conftest' 1155 reconfname = reconfigure.py 1156 args = self.clArgs[:] 1157 body = ['const char reconfname[] = "' + reconfname + '";', 1158 'FILE *output = fopen(reconfname,"w");'] 1159 body.append('fprintf(output, "#!'+sys.executable+'\\n");') 1160 body.append('fprintf(output, "\\nconfigure_options = [\\n");') 1161 body.extend(self.batchSetup) 1162 body.extend(self.batchBodies) 1163 body.extend(self.batchCleanup) 1164 # pretty print repr(args.values()) 1165 for itm in args: 1166 if (itm not in ['--configModules=PETSc.Configure','--optionsModule=config.compilerOptions','--force']): 1167 body.append('fprintf(output," \'%s\',\\n","'+str(itm).replace('"', "'")+'");') 1168 body.append('fprintf(output,"]");') 1169 driver = ['fprintf(output, "\\nif __name__ == \'__main__\':', 1170 ' import os', 1171 ' import sys', 1172 ' sys.path.insert(0, os.path.abspath(\'config\'))', 1173 ' import configure', 1174 ' configure.petsc_configure(configure_options)\\n");'] 1175 body.append('\\n'.join(driver)) 1176 body.append('\nfclose(output);\n') 1177 body.append('chmod(reconfname,0744);') 1178 1179 oldFlags = self.compilers.CPPFLAGS 1180 oldLibs = self.compilers.LIBS 1181 self.compilers.CPPFLAGS += ' ' + ' '.join(self.batchIncludeDirs) 1182 self.compilers.LIBS = self.libraries.toString(self.batchLibs)+' '+self.compilers.LIBS 1183 self.batchIncludes.insert(0, '#include <stdio.h>\n#include <sys/types.h>\n#include <sys/stat.h>') 1184 if not self.checkLink('\n'.join(self.batchIncludes)+'\n', '\n'.join(body), cleanup = 0, codeBegin = '\nint main(int argc, char **argv) {\n'): 1185 sys.exit('Unable to generate test file for cross-compilers/batch-system\n') 1186 import shutil 1187 # Could use shutil.copy, but want an error if confname exists as a directory 1188 shutil.copyfile(os.path.join(self.tmpDir,'conftest'),confname) 1189 shutil.copymode(os.path.join(self.tmpDir,'conftest'),confname) 1190 self.compilers.CPPFLAGS = oldFlags 1191 self.compilers.LIBS = oldLibs 1192 self.logClear() 1193 print('=================================================================================\r') 1194 print(' Since your compute nodes require use of a batch system or mpiexec you must: \r') 1195 print('\r') 1196 print(' 1) cd '+os.getcwd()+'\r') 1197 print('\r') 1198 print(' 2) Submit ./'+confname+' to 1 processor of your batch system or system you are \r') 1199 print(' cross-compiling for; this will generate the file '+reconfname+' \r') 1200 print('\r') 1201 print(' 3) Run ./'+reconfname+' (to complete the configure process). \r') 1202 print('=================================================================================\r') 1203 sys.exit(0) 1204 return 1205 1206 # 1207 # There is a great deal of more refactorization before this code can be made parallel. 1208 # For example all the information about the current compile (including language) is contained 1209 # in self, thus each compile/link needs to be done within a lock (which would still require writing 1210 # careful locking code) and this ruins the whole purpose of threads with Python since 1211 # the only allow non-blocking IO operations etc, they don't provide real parallelism 1212 # Also changing values in LIBS is currently buggy for threads as are possible other variables 1213 def parallelQueueEvaluation(self, depGraph, numThreads = 1): 1214 import Queue # novermin 1215 from threading import Thread 1216 1217 if numThreads < 1: raise RuntimeError('Parallel configure must use at least one thread') 1218 # TODO Insert a cycle check 1219 todo = Queue.Queue() 1220 done = Queue.Queue() 1221 numChildren = len(depGraph.vertices) 1222 for child in graph.DirectedGraph.getRoots(depGraph): 1223 if not hasattr(child, '_configured'): 1224 #self.logPrint('PUSH %s to TODO' % child.__class__.__module__) 1225 todo.put(child) 1226 1227 def processChildren(num, q): 1228 emsg = '' 1229 while 1: 1230 child = q.get() # Might have to indicate blocking 1231 #self.logPrint('PROCESS %s' % child.__class__.__module__) 1232 ret = 1 1233 child.saveLog() 1234 tbo = None 1235 try: 1236 if not hasattr(child, '_configured'): 1237 child.configure() 1238 else: 1239 child.no_configure() 1240 ret = 0 1241 # the handling of logs, error messages, and tracebacks from errors in children 1242 # does not work correctly. 1243 except (RuntimeError, config.base.ConfigureSetupError) as e: 1244 tbo = sys.exc_info()[2] 1245 emsg = str(e) 1246 if not emsg.endswith('\n'): emsg = emsg+'\n' 1247 msg ='*******************************************************************************\n'\ 1248 +' UNABLE to CONFIGURE with GIVEN OPTIONS (see configure.log for details):\n' \ 1249 +'-------------------------------------------------------------------------------\n' \ 1250 +emsg+'*******************************************************************************\n' 1251 se = '' 1252 except (TypeError, ValueError) as e: 1253 tbo = sys.exc_info()[2] 1254 emsg = str(e) 1255 if not emsg.endswith('\n'): emsg = emsg+'\n' 1256 msg ='*******************************************************************************\n'\ 1257 +' ERROR in COMMAND LINE ARGUMENT to ./configure \n' \ 1258 +'-------------------------------------------------------------------------------\n' \ 1259 +emsg+'*******************************************************************************\n' 1260 se = '' 1261 except ImportError as e : 1262 tbo = sys.exc_info()[2] 1263 emsg = str(e) 1264 if not emsg.endswith('\n'): emsg = emsg+'\n' 1265 msg ='*******************************************************************************\n'\ 1266 +' UNABLE to FIND MODULE for ./configure \n' \ 1267 +'-------------------------------------------------------------------------------\n' \ 1268 +emsg+'*******************************************************************************\n' 1269 se = '' 1270 except OSError as e : 1271 tbo = sys.exc_info()[2] 1272 emsg = str(e) 1273 if not emsg.endswith('\n'): emsg = emsg+'\n' 1274 msg ='*******************************************************************************\n'\ 1275 +' UNABLE to EXECUTE BINARIES for ./configure \n' \ 1276 +'-------------------------------------------------------------------------------\n' \ 1277 +emsg+'*******************************************************************************\n' 1278 se = '' 1279 except SystemExit as e: 1280 tbo = sys.exc_info()[2] 1281 if e.code is None or e.code == 0: 1282 return 1283 msg ='*******************************************************************************\n'\ 1284 +' CONFIGURATION FAILURE (Please send configure.log to petsc-maint@mcs.anl.gov)\n' \ 1285 +'*******************************************************************************\n' 1286 se = str(e) 1287 except Exception as e: 1288 tbo = sys.exc_info()[2] 1289 msg ='*******************************************************************************\n'\ 1290 +' CONFIGURATION CRASH (Please send configure.log to petsc-maint@mcs.anl.gov)\n' \ 1291 +'*******************************************************************************\n' 1292 se = str(e) 1293 out = child.restoreLog() 1294 if ret: 1295 out += '\n'+msg+'\n'+se+'\n' 1296 try: 1297 import sys,traceback,io 1298 tb = io.StringIO() 1299 if not tbo: tbo = sys.exc_info()[2] 1300 traceback.print_tb(tbo, file = tb) 1301 out += tb.getvalue() 1302 tb.close() 1303 except: pass 1304 # Update queue 1305 #self.logPrint('PUSH %s to DONE ' % child.__class__.__module__) 1306 done.put((ret, out, emsg, child)) 1307 q.task_done() # novermin 1308 if ret: break 1309 return 1310 1311 # Set up some threads to fetch the enclosures 1312 for i in range(numThreads): 1313 worker = Thread(target = processChildren, args = (i, todo,)) 1314 worker.setDaemon(True) 1315 worker.start() 1316 1317 while numChildren > 0: 1318 ret, msg, emsg, vertex = done.get() 1319 vertex._configured = 1 1320 numChildren = numChildren - 1 1321 #self.logPrint('POP %s from DONE %d LEFT' % (vertex.__class__.__module__, numChildren)) 1322 self.logWrite(msg) 1323 if ret: 1324 self.logWrite(emsg) 1325 raise RuntimeError(emsg) 1326 for child in depGraph.outEdges[vertex]: 1327 push = True 1328 for v in depGraph.inEdges[child]: 1329 if not hasattr(v, '_configured'): 1330 #self.logPrint('DENY %s since %s is not configured' % (child.__class__.__module__, v.__class__.__module__)) 1331 push = False 1332 break 1333 if push: 1334 #self.logPrint('PUSH %s to TODO' % child.__class__.__module__) 1335 todo.put(child) 1336 done.task_done() # novermin 1337 todo.join() # novermin 1338 done.join() # novermin 1339 return 1340 1341 def serialEvaluation(self, depGraph): 1342 import graph 1343 1344 def findGraphModule(dependencyGraph,moduleType): 1345 moduleList = [c for c in dependencyGraph if isinstance(c,moduleType)] 1346 if len(moduleList) != 1: 1347 if len(moduleList) < 1: 1348 errorMessage = 'Did not find module {} in graph'.format(moduleType) 1349 else: 1350 errorMessage = 'Found multiple instances of module {} in graph'.format(moduleType) 1351 raise RuntimeError(errorMessage) 1352 return moduleList[0] 1353 1354 def checkChildCxxDialectBounds(child,minCxx,maxCxx): 1355 def assign_blame(key, blame_list, name): 1356 if key not in blame_list: 1357 blame_list[key] = set() 1358 blame_list[key].add(name) 1359 return 1360 1361 child_name = child.name 1362 if child.minCxxVersion > minCxx or 'Cxx' in child.buildLanguages: 1363 minCxx = child.minCxxVersion 1364 self.logPrint('serialEvaluation: child {child} raised minimum cxx dialect version to {minver}'.format(child=child_name,minver=minCxx)) 1365 assign_blame(minCxx, minCxxVersionBlameList, child_name) 1366 if child.maxCxxVersion < maxCxx: 1367 maxCxx = child.maxCxxVersion 1368 self.logPrint('serialEvaluation: child {child} decreased maximum cxx dialect version to {maxver}'.format(child=child_name,maxver=maxCxx)) 1369 assign_blame(maxCxx, maxCxxVersionBlameList, child_name) 1370 return minCxx,maxCxx 1371 1372 ndepGraph = list(graph.DirectedGraph.topologicalSort(depGraph)) 1373 setCompilers = findGraphModule(ndepGraph,config.setCompilers.Configure) 1374 minCxx,maxCxx = setCompilers.cxxDialectRange['Cxx'] 1375 self.logPrint('serialEvaluation: initial cxxDialectRanges {rng}'.format(rng=setCompilers.cxxDialectRange['Cxx'])) 1376 minCxxVersionBlameList = {} 1377 maxCxxVersionBlameList = {} 1378 for child in ndepGraph: 1379 if self.argDB['ignoreCxxBoundCheck']: continue 1380 if (self.argDB['with-batch'] and 1381 hasattr(child,'package') and 1382 'download-'+child.package in self.framework.clArgDB and 1383 self.argDB['download-'+child.package] and not 1384 (hasattr(setCompilers,'cross_cc') or child.installwithbatch)): 1385 errorMessage = '--download-'+child.package+' cannot be used on this batch systems' 1386 raise RuntimeError(errorMessage) 1387 1388 # note, only classes derived from package.py have this attribute 1389 if hasattr(child,'deps'): 1390 found = 0 1391 if child.required or child.lookforbydefault: found = 1 1392 elif 'download-'+child.package in self.framework.clArgDB and self.argDB['download-'+child.package]: found = 1 1393 elif 'with-'+child.package in self.framework.clArgDB and self.argDB['with-'+child.package]: found = 1 1394 elif 'with-'+child.package+'-lib' in self.framework.clArgDB and self.argDB['with-'+child.package+'-lib']: found = 1 1395 elif 'with-'+child.package+'-dir' in self.framework.clArgDB and self.argDB['with-'+child.package+'-dir']: found = 1 1396 if not found: continue 1397 msg = '' 1398 minCxx,maxCxx = checkChildCxxDialectBounds(child,minCxx,maxCxx) 1399 for dep in child.deps: 1400 if dep.required or dep.lookforbydefault: 1401 continue 1402 elif 'download-'+dep.package in self.framework.clArgDB and self.argDB['download-'+dep.package]: 1403 continue 1404 elif 'with-'+dep.package in self.framework.clArgDB and self.argDB['with-'+dep.package]: 1405 continue 1406 elif 'with-'+dep.package+'-lib' in self.framework.clArgDB and self.argDB['with-'+dep.package+'-lib']: 1407 continue 1408 elif 'with-'+dep.package+'-dir' in self.framework.clArgDB and self.argDB['with-'+dep.package+'-dir']: 1409 continue 1410 elif dep.download: 1411 emsg = '--download-'+dep.package+' or ' 1412 else: 1413 emsg = '' 1414 msg += 'Package '+child.package+' requested but dependency '+dep.package+' not requested. \n Perhaps you want '+emsg+'--with-'+dep.package+'-dir=directory or --with-'+dep.package+'-lib=libraries and --with-'+dep.package+'-include=directory\n' 1415 if msg: raise RuntimeError(msg) 1416 if 'Cxx' in child.buildLanguages and ('with-cxx' in self.framework.clArgDB) and (self.argDB['with-cxx'] == '0'): raise RuntimeError('Package '+child.package+' requested requires C++ but compiler turned off.') 1417 if 'FC' in child.buildLanguages and ('with-fc' in self.framework.clArgDB) and (self.argDB['with-fc'] == '0'): raise RuntimeError('Package '+child.package+' requested requires Fortran but compiler turned off.') 1418 1419 if maxCxx < minCxx: 1420 # low water mark 1421 loPack = ', '.join(minCxxVersionBlameList[minCxx]) 1422 # high water mark 1423 hiPack = ', '.join(maxCxxVersionBlameList[maxCxx]) 1424 raise RuntimeError('Requested package(s) have incompatible C++ requirements. Package(s) {loPack} require at least {mincxx} but package(s) {hiPack} require at most {maxcxx}'.format(loPack=loPack,mincxx=minCxx,hiPack=hiPack,maxcxx=maxCxx)) 1425 setCompilers.cxxDialectPackageRanges = (minCxxVersionBlameList,maxCxxVersionBlameList) 1426 self.logPrint('serialEvaluation: new cxxDialectRanges {rng}'.format(rng=(minCxx,maxCxx))) 1427 depGraph = graph.DirectedGraph.topologicalSort(depGraph) 1428 totaltime = 0 1429 starttime = time.time() 1430 for child in depGraph: 1431 start = time.time() 1432 if not hasattr(child, '_configured'): 1433 child.configure() 1434 else: 1435 child.no_configure() 1436 child._configured = 1 1437 ctime = time.time()-start 1438 totaltime = totaltime + ctime 1439 self.logPrint('child %s took %f seconds' % (child.__class__.__module__,ctime)) 1440 self.logPrint('child sum %f' % (totaltime)) 1441 self.logPrint('child total %f' % (time.time()-starttime)) 1442 # use grep child configure.log | sort -k3 -g 1443 return 1444 1445 def processChildren(self): 1446 import script 1447 1448 useParallel = False 1449 if script.useParallel: 1450 try: 1451 import Queue # novermin 1452 from threading import Thread 1453 if hasattr(Queue.Queue(), 'join'): useParallel = True # novermin 1454 except: pass 1455 if useParallel: 1456 self.parallelQueueEvaluation(self.childGraph, script.useParallel) 1457 else: 1458 self.serialEvaluation(self.childGraph) 1459 return 1460 1461 def configure(self, out = None): 1462 '''Configure the system 1463 - Must delay database initialization until children have contributed variable types 1464 - Any child with the "_configured" attribute will not be configured''' 1465 self.setup() 1466 self.outputBanner() 1467 self.updateDependencies() 1468 self.outputBasics() 1469 self.executeTest(self.configureExternalPackagesDir) 1470 self.processChildren() 1471 if self.argDB['with-batch']: 1472 self.configureBatch() 1473 self.dumpConfFiles() 1474 self.saveHash() 1475 self.cleanup() 1476 return 1 1477