[cig-commits] commit: Something went terribly wrong when John moved
Mercurial
hg at geodynamics.org
Mon Nov 24 11:27:44 PST 2008
changeset: 2:07515a87d26b
user: LukeHodkinson
date: Tue Jun 24 02:58:26 2008 +0000
files: Configuration.py Installation.py Node.py Package.py Platform.py Project.py SConscript TerminalController.py __init__.py check_headers.py check_libraries.py checks.py packages/BlasLapack.py packages/CompilerFlags.py packages/HDF5.py packages/HGRevision.py packages/MPI.py packages/OSMesa.py packages/OpenGL.py packages/PETSc.py packages/PETScExt.py packages/PICellerator.py packages/SDL.py packages/SVNRevision.py packages/StGermain.py packages/StgDomain.py packages/StgFEM.py packages/X11.py packages/__init__.py packages/cmath.py packages/dl.py packages/libFAME.py packages/libJPEG.py packages/libPNG.py packages/libTIFF.py packages/libXML2.py packages/libavcodec.py packages/pcu.py packages/szip.py
description:
Something went terribly wrong when John moved
things to a trunk subdirectory. Clearing everything
out and starting again.
diff -r 3aba6671e2c9 -r 07515a87d26b Configuration.py
--- a/Configuration.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,110 +0,0 @@
-class Configuration:
- def __init__(self, inst):
-
- # The installation this configuration uses.
- self.inst = inst
-
- # A list of configurations that are this configuration's
- # dependencies.
- self.deps = []
-
- # The set of headers/libraries that were found to work for this
- # configuration, or 'None' if not available.
- self.hdrs = None
- self.libs = None
-
- # Flags whether this installation/configuration combo has
- # static/shared libraries available.
- self.has_static = None
- self.has_shared = None
-
- def __eq__(self, cfg):
- if not (self.inst == cfg.inst):
- return False
- if len(self.deps) != len(cfg.deps):
- return False
- for d1, d2 in zip(self.deps, cfg.deps):
- if not (d1 == d2):
- return False
- return True
-
- def add_dependency(self, cfg):
- """Add a configuration to the list of dependencies."""
-
- if cfg not in self.deps:
- self.deps += [cfg]
-
- def enable(self, scons_env, old_state=None, lib_exclude=[]):
- """Inserts this configuration's information into the provided SCons
- environment."""
-
- # First we have to enable all of our dependencies.
- self.enable_dependencies(scons_env, old_state, lib_exclude=lib_exclude)
-
- # Then call the installation's enable routine.
- self.inst.enable(scons_env, old_state,
- libs=self.libs,
- has_shared=self.has_shared,
- lib_exclude=lib_exclude)
-
- def enable_dependencies(self, scons_env, old_state={}, lib_exclude=[]):
- """Enables all available dependencies."""
-
- for d in self.deps:
- d.enable(scons_env, old_state, lib_exclude=lib_exclude)
-
- def flatten_dependencies(self, deps=[], pkgs={}):
- """Return a list of configurations representing all dependencies of this
- configuration and it's child configurations. Uniqueness is determined by
- the package each configuration belongs to."""
-
- for d in self.deps:
- pkg = d.inst.pkg
-
- # If the package alrady exists in the dictionary of packages we've
- # already got a configuration for, then the configurations must be
- # matching, or this is not a valid set.
- if pkg in pkgs and pkgs[pkg] != d:
- return None
-
- deps += [d]
- pkgs[pkg] = d
-
- # Have to iterate over every child, not just children that are
- # unique. This is because configurations for the same package can,
- # and will, be different.
- result = d.flatten_dependencies(deps, pkgs)
- if result is None:
- return None
- deps += result
-
- def __str__(self, brief=True):
- """Convert to printable string."""
-
- # Get the installation's string first.
- txt = str(self.inst)
-
- # Add information about libraries.
- if self.has_static is not None:
- txt += ' Static libraries: %s\n' % str(self.has_static)
- if self.has_shared is not None:
- txt += ' Shared libraries: %s\n' % str(self.has_shared)
-
- # Now produce the dependency text.
- dep_txt = ''
- for dep in self.deps:
-
- # If we only want brief dependency information only print the
- # dependency's name.
- if brief:
- dep_txt += ' %s\n' % dep.inst.pkg.name
- else:
- dep_txt += ' ' + str(dep)[:-1].replace('\n', '\n ') + '\n'
-
- # If there were any dependencies, add them to the installation's
- # text.
- if dep_txt:
- txt += ' Dependencies:\n'
- txt += dep_txt
-
- return txt
diff -r 3aba6671e2c9 -r 07515a87d26b Installation.py
--- a/Installation.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,231 +0,0 @@
-import os
-
-class Installation:
- def __init__(self, pkg, base_dir='', hdr_dirs=[], lib_dirs=[], fwork=''):
-
- # Keep a reference to the package this installation is of.
- self.pkg = pkg
-
- # The base directory of this installation. Can be empty.
- self.base_dir = base_dir
-
- # A list of header/library directory extensions to be appended
- # to the base directory. Can be relative and absolute paths.
- self.hdr_dirs = list(hdr_dirs)
- self.lib_dirs = list(lib_dirs)
-
- # Lists of headers and libraries to be used in addition to the
- # package's description.
- self.hdrs = []
- self.libs = []
-
- # A list of library names that, if appearing in the above list
- # of libraries, should not be considered a core part of this
- # installation, i.e. should not be checked for.
- self.extra_libs = []
-
- # The name of a framework to use instead of everything else.
- self.fwork = fwork
-
- # A list of symbols that are required by this installation and any
- # preprocessor definitions required to identify them.
- self.syms = None
- self.sym_def = ''
-
- # A list of pre-processor definitions to be set for this installation.
- self.cpp_defines = []
-
- # We need this flag to indicate whether this installation has
- # been processed by it's owning package.
- self.is_processed = False
-
- def __eq__(self, inst):
- return (self.pkg is inst.pkg and
- self.base_dir == inst.base_dir and
- self.hdr_dirs == inst.hdr_dirs and
- self.lib_dirs == inst.lib_dirs)
-
- def add_hdr_dirs(self, hdr_dirs):
- dir_list = self.pkg.env.make_list(hdr_dirs)
- self.hdr_dirs += [d for d in dir_list if d not in self.hdr_dirs]
-
- def add_lib_dirs(self, lib_dirs, prepend=False):
- dir_list = self.pkg.env.make_list(lib_dirs)
- for d in dir_list:
- d = os.path.normpath(d)
- if d not in self.lib_dirs:
- if prepend:
- self.lib_dirs = [d] + self.lib_dirs
- else:
- self.lib_dirs += [d]
-
- def add_hdrs(self, hdrs):
- hdr_list = self.pkg.env.make_list(hdrs)
- self.hdrs += [h for h in hdr_list if h not in self.hdrs]
-
- def add_libs(self, libs):
- lib_list = self.pkg.env.make_list(libs)
- self.libs += [l for l in lib_list if l not in self.libs]
-
- def add_extra_libs(self, libs):
- lib_list = self.pkg.env.make_list(libs)
- self.extra_libs += [l for l in lib_list if l not in self.extra_libs]
-
- def add_cpp_defines(self, defs):
- def_list = self.pkg.env.make_list(defs)
- self.cpp_defines += [d for d in def_list if d not in self.cpp_defines + ['']]
-
- def enable(self, scons_env, old_state=None, libs=[], has_shared=True, lib_exclude=[]):
- """Inserts this installation's information into the provided SCons
- environment. Any environment values that are modified are backed
- up into 'old_state' providing there is not already a backup there."""
-
- # Make sure we have lists.
- lib_exclude = self.pkg.env.make_list(lib_exclude)
-
- # Insert any pre-processor definitions.
- if self.cpp_defines or self.sym_def:
- cpp_def = list(self.cpp_defines)
- if self.sym_def:
- cpp_def += [self.sym_def]
- self.pkg.backup_variable(scons_env, 'CPPDEFINES', old_state)
- scons_env.AppendUnique(CPPDEFINES=cpp_def)
-
- # Insert any header file search paths we may have.
- if self.hdr_dirs:
- self.pkg.backup_variable(scons_env, 'CPPPATH', old_state)
-
- # The ordering of the header search path list is important.
- # Because we insert them into the environment one at a time we
- # need to reverse the list to make sure the order is maintained.
- rev_hdr_dirs = self.pkg.env.reverse_list(self.hdr_dirs)
-
- # Process each path in turn.
- for d in rev_hdr_dirs:
-
- # Combine sub-directories to form the complete search path.
- full_dir = os.path.join(self.base_dir, d)
-
- # If this path is in a predefined list of system specific search
- # paths, then we need to place this path at the end of the
- # list. This way we can be sure that any default installation
- # will not interfere with custom installations.
- if full_dir in self.pkg.system_header_dirs:
-
- # If the path is relative, make sure SCons knows it needs
- # to treat it as relative to the project root.
- if not os.path.isabs(full_dir): full_dir = '#' + full_dir
- scons_env.AppendUnique(CPPPATH=[full_dir])
- else:
-
- # If the path is relative, make sure SCons knows it needs
- # to treat it as relative to the project root.
- if not os.path.isabs(full_dir): full_dir = '#' + full_dir
- scons_env.PrependUnique(CPPPATH=[full_dir])
-
- # Insert any library search paths we may have, but not if this package is in
- # out list of exclusions.
- if self.lib_dirs and self.pkg not in lib_exclude and has_shared:
- self.pkg.backup_variable(scons_env, ['LIBPATH', 'RPATH'], old_state)
-
- # The ordering of the library search path list is important.
- # Because we insert them into the environment one at a time we
- # need to reverse the list to make sure the order is maintained.
- rev_lib_dirs = self.pkg.env.reverse_list(self.lib_dirs)
-
- # Process each path in turn.
- for d in rev_lib_dirs:
-
- # Combine sub-directories to form the complete search path.
- full_dir = os.path.join(self.base_dir, d)
-
- # We need the absolute path for adding rpaths.
- abs_dir = os.path.abspath(full_dir)
-
- # If this path is in a predefined list of system specific search
- # paths, then we need to place this path at the end of the
- # list. This way we can be sure that any default installation
- # will not interfere with custom installations.
- if full_dir in self.pkg.system_library_dirs:
-
- # If the path is relative, make sure SCons knows it needs
- # to treat it as relative to the project root.
- if not os.path.isabs(full_dir): full_dir = '#' + full_dir
- scons_env.AppendUnique(LIBPATH=[full_dir])
- scons_env.AppendUnique(RPATH=[abs_dir])
- else:
-
- # If the path is relative, make sure SCons knows it needs
- # to treat it as relative to the project root.
- if not os.path.isabs(full_dir): full_dir = '#' + full_dir
- scons_env.PrependUnique(LIBPATH=[full_dir])
- scons_env.PrependUnique(RPATH=[abs_dir])
-
- # Add libraries if there are any, unless this package is part of our
- # library exclusions.
- if libs and self.pkg not in lib_exclude:
-
- # If this package is configured to be using static libraries, then
- # we need to specify the path to the library itself.
- if not has_shared:
- self.pkg.backup_variable(scons_env, 'STATICLIBS', old_state)
- libs = self.find_library(libs)
- scons_env.PrependUnique(STATICLIBS=libs)
- else:
- self.pkg.backup_variable(scons_env, 'LIBS', old_state)
- scons_env.PrependUnique(LIBS=libs)
-
- # If we have a framework, add it now.
- if self.fwork:
- self.pkg.backup_variable(scons_env, 'FRAMEWORKS', old_state)
- scons_env.PrependUnique(FRAMEWORKS=[self.fwork])
-
- def find_library(self, lib, static=True, shared=False):
- """Using the search paths we know about, try and locate the files corresponding
- to the library name(s) given."""
-
- libs = self.pkg.env.make_list(lib)
- found_libs = []
- if static:
- for l in libs:
- if l in self.pkg.extra_libraries + self.extra_libs:
- continue
- for lib_dir in self.lib_dirs:
- name = self.pkg.env.subst('${LIBPREFIX}' + l + '${LIBSUFFIX}')
- path = os.path.join(self.base_dir, lib_dir, name)
- if os.path.exists(path):
- found_libs += [path]
- if shared:
- for l in libs:
- if l in self.pkg.extra_libraries + self.extra_libs:
- continue
- if self.pkg.shared_libraries is not None and l not in sef.pkg.shared_libraries:
- continue
- for lib_dir in self.lib_dirs:
- name = self.pkg.env.subst('${SHLIBPREFIX}' + l + '${SHLIBSUFFIX}')
- path = os.path.join(self.base_dir, lib_dir, name)
- if os.path.exists(path):
- found_libs += [path]
- return found_libs
-
- def __str__(self):
- """Convert to printable string."""
-
- txt = 'Package: %s\n' % self.pkg.name
- if self.base_dir:
- txt += ' Base directory: %s\n' % self.base_dir
- if self.hdr_dirs:
- txt += ' Header extensions: %s\n' % self.hdr_dirs
- if self.lib_dirs:
- txt += ' Library extensions: %s\n' % self.lib_dirs
- if self.libs:
- txt += ' Libraries: %s\n' % self.libs
- if self.fwork:
- txt += ' Framework: %s\n' % self.fwork
- if self.cpp_defines or self.sym_def:
- cpp_def = list(self.cpp_defines)
- if self.sym_def:
- cpp_def += [self.sym_def]
- txt += ' Exporting: %s\n' % (cpp_def)
-
- return txt
diff -r 3aba6671e2c9 -r 07515a87d26b Node.py
--- a/Node.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,294 +0,0 @@
-import os
-import SConfig
-
-class Node(object):
- def __init__(self, scons_env, scons_opts, required=False):
- # Override to give a different name.
- self.name = self.__module__.split('.')[-1]
-
- # Option variables.
- self.command_name = self.name.lower()
- self.environ_name = self.name.upper()
- self.option_map = {} # Maps command-line options to their environment alternatives.
- self.options_processed = False
- self.options_result = '' # String to store the results of options processing for later
- # display.
-
- # Override with a list of methods to run during configuration.
- self.checks = []
-
- # If this flag is false, we need to run the setup method before
- # continuing on to configuring.
- self.is_setup = False
-
- # Will be set after configuration.
- self.configured = False
- self.result = False
-
- # Private stuff.
- self.env = scons_env
- self.opts = scons_opts
- self.required = required
- self.deps = []
-
- # Setup our option database.
- self.setup_options()
- self.opts.Update(self.env)
-
- def setup_options(self):
- """Setup all the options for this package."""
-
- pass
-
- def dependency(self, package_module, required=True, **kw):
- """Add another package as a dependency of this package. If required is False, the
- dependent package is not required, and thus will not cause this package to fail if
- it cannot be found."""
-
- if self.configured:
- print 'Error: Cannot add a dependency during configuration.'
- self.env.Exit()
- pkg = self.env.Package(package_module, required, **kw)
- if pkg not in [d[0] for d in self.deps]:
- self.deps += [(pkg, required)]
- return pkg
-
- def flatten_dependencies(self, deps=[]):
- """Return a list of all unique dependencies this node has."""
-
- for d in self.deps:
- if d not in deps:
- deps += [d]
- d.flatten_dependencies(deps)
-
- def setup(self):
- """Anything that needs to be finalised before continuing with the configuration needs
- to go here."""
-
- # Set the ready flag.
- self.is_setup = True
-
- # Setup dependencies first.
- for dep, req in self.deps:
- dep.setup()
-
- # Process options which will print out what options were found.
- if not self.options_processed:
- self.options_processed = True
- self.process_options()
-
- def configure(self, scons_ctx):
- """Perform the configuration of this package."""
-
- # Will need to color some stuff here.
- import TerminalController
- term = TerminalController.TerminalController()
-
- # Basic setup.
- if self.configured:
- return
- self.ctx = scons_ctx
- self.configured = True
- self.process_dependencies()
-
- # Print opening message.
- self.ctx.Message('Configuring package %s ... ' % self.name)
- self.ctx.Display('\n')
-
- # Display the result of options processing.
- self.ctx.Log(self.options_result)
- print term.render('${GREEN}%s${NORMAL}\r' % self.options_result),
-
- # Check we have all dependencies.
- result = True
- for pkg, req in self.deps:
- if req and not pkg.result:
- self.ctx.Log(' Missing dependency: %s\n' % pkg.name)
- print term.render(' ${RED}Missing dependency: %s${NORMAL}\n' % pkg.name),
- result = False
-
- # Perform as many checks as we can without failing.
- if result:
- for check in self.checks:
- result = check()
- if not result:
- break
-
- # If everything succeeded, display configuration results.
- if result:
- self.display_configuration()
-
- # Handle results.
- self.result = result
- self.ctx.Display(' ')
- if not self.result:
- print term.render('${RED}'),
- self.ctx.Result(result)
- if not self.result:
- print term.render('${NORMAL}\r'),
-
- # If this was a critical fail, try and help the user.
- if self.required and not result:
- self.ctx.Display('\nThe required package ' + self.name + ' could not be found.\n')
- self.ctx.Display('To see further details, please read the ')
- self.ctx.Display(term.render('${BLUE}\'config.log\'${NORMAL}'))
- self.ctx.Display(' file.\n')
- if len(self.option_map.keys()):
- self.ctx.Display('You can directly specify search parameters for this package\n')
- self.ctx.Display(term.render('via the following ${BLUE}command line options${NORMAL}:\n\n'))
- for opt in self.option_map.iterkeys():
- self.ctx.Display(' ' + opt + '\n')
- self.ctx.Display('\nRun \'scons help\' for more details on these options.\n\n')
- self.env.Exit()
-
- def enable(self, scons_env, old_state=None):
- """Modify the SCons environment to have this package enabled. Begin by inserting
- all options on this node into the environment."""
-
- for pkg, req in self.deps: # Enable dependencies first.
- if pkg.result:
- pkg.enable(scons_env, old_state)
- for opt in self.option_map.iterkeys(): # Now add options.
- if opt in self.env._dict:
- scons_env[opt] = self.env[opt]
-
- def backup_variable(self, scons_env, var_name, old_state):
- """Make a backup of one or more SCons environment variables into 'old_state'.
- If there is already a backup of a variable it will not be overwritten."""
-
- # If we havn't been given a dictionary to backup to, just return now.
- if old_state is None:
- return
-
- # Make sure we're dealing with a list.
- var_names = scons_env.make_list(var_name)
-
- # Backup all variables.
- for vn in var_names:
- if vn not in old_state:
- if vn in scons_env._dict:
- old_state[vn] = scons_env[vn]
- else:
- old_state[vn] = None
-
- def restore_state(self, scons_env, old_state):
- for var_name, state in old_state.iteritems():
- if state is None:
- del scons_env[var_name]
- else:
- scons_env[var_name] = state
-
- def process_options(self):
- """Do any initial option processing, including importing any values from
- the environment and validating that all options are consistent."""
-
- # Search command line options.
- cmd_opts = False
- for opt in self.option_map.iterkeys():
- if opt in self.opts.args:
- if not cmd_opts:
- self.options_result += ' Found command line options:\n'
- cmd_opts = True
- self.options_result += ' %s = %s\n' % (opt, self.opts.args[opt])
- break
-
- # We don't want to mix command line and evironment options.
- if cmd_opts:
- return
-
- # Now go for environment options.
- env_opts = False
- for cmd, env in self.option_map.iteritems():
- if cmd not in self.opts.args and env in self.env['ENV']:
- if not env_opts:
- self.options_result += ' Found environment options:\n'
- env_opts = True
- self.env[cmd] = self.env['ENV'][env]
- self.options_result += ' %s = %s\n' % (env, self.env[cmd])
-
- def process_dependencies(self):
- """Ensure all dependencies have been configured before this package."""
-
-# old_state = {}
- for pkg, req in self.deps:
- pkg.configure(self.ctx)
-# if pkg.result:
-# pkg.enable(self.env, old_state)
-# return old_state
-
- def compile_source(self, source):
- """At this point we know all our construction environment has been set up,
- so we should be able to compile some source code."""
-
- result = self.run_scons_cmd(self.ctx.TryCompile, source, '.c')
- return [result[0], result[1]]
-
- def link_source(self, source):
- """At this point we know all our construction environment has been set up,
- so we should be able to build and run the application."""
-
- result = self.run_scons_cmd(self.ctx.TryLink, source, '.c')
- return [result[0], result[1]]
-
- def library_source(self, source):
- """Build a library out of some source code."""
-
- result = self.run_scons_cmd(self.ctx.TryBuild, self.env.SharedLibrary,
- source, '.c')
- return [result[0], result[1]]
-
- def run_source(self, source):
- """At this point we know all our construction environment has been set up,
- so we should be able to build and run the application."""
-
- result = self.run_scons_cmd(self.ctx.TryRun, source, '.c')
- return [result[0][0], result[0][1], result[1]]
-
- def run_scons_cmd(self, cmd, *args, **kw):
- # Put some space between my logs and SCons's logs.
- self.ctx.Log('\n')
-
- old_log = self.ctx.sconf.logstream
- self.ctx.sconf.logstream = open('sconfig.log', 'w') # Capture the log.
- res = cmd(*args, **kw) # Execute the command.
- try:
- self.ctx.sconf.logstream.close() # Make sure the file is closed.
- finally:
- pass
- self.ctx.sconf.logstream = old_log # Replace the old log.
-
- # Return results.
- log_file = open('sconfig.log', 'r')
- log = log_file.read()
- log_file.close()
- os.remove('sconfig.log')
- old_log.write(log)
-
- # Put some space between my logs and SCons's logs.
- self.ctx.Log('\n')
-
- return [res, log]
-
- def display_configuration(self):
- """Print out a brief summary of what options we found/used."""
- pass
-
- def push_state(self, state, append=False):
- old = {}
- copy = dict(state)
- for k, v in copy.iteritems():
- if not v:
- continue
- if not isinstance(v, list):
- copy[k] = [v]
- else:
- copy[k] = v
- old[k] = self.env.get(k, [])
- if append:
- self.env.AppendUnique(**copy)
- else:
- self.env.PrependUnique(**copy)
- return old
-
- def pop_state(self, old):
- self.env.Replace(**old)
diff -r 3aba6671e2c9 -r 07515a87d26b Package.py
--- a/Package.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,476 +0,0 @@
-import sys, os, platform, glob
-import SCons.Script
-import SConfig
-import checks
-
-class Package(SConfig.Node):
- """An object to describe how to search for a package
- that has been installed on a system. There are a lot of
- options that can be modified to refine how the search
- proceeds and also how results are generated."""
-
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Node.__init__(self, scons_env, scons_opts, required)
-
- # Try and gain access to a dynamic loader in order to check for
- # consistent shared libraries. Make sure we don't try this for
- # the dl package.
- if self.name != 'dl':
- self.dl = self.dependency(SConfig.packages.dl)
-
- # This will be set in the preprocessor.
- self.have_define = kw.get('have_define', '')
-
- # Search options.
- self.base_dirs = [] #['']
- self.base_patterns = [] #['']
- self.sub_dirs = [] #[[[''], ['']]]
- self.header_sub_dir = [] #['']
- self.system_header_dirs = []
- self.system_library_dirs = []
-
- # Which headers do we require?
- self.headers = [] #[['']]
-
- # Library options.
- self.libraries = [] #[['']]
- self.shared_libraries = None # Only libraries listed here will be considered
- # when checking for shared libraries.
- self.extra_libraries = [] # These libraries are not considered locatable.
- self.require_shared = kw.get('require_shared', False)
- self.frameworks = []
- self.symbols = [([], '')] #[([''], '')]
- self.symbol_setup = ''
- self.symbol_teardown = ''
- self.symbol_prototypes = [] #['']
- self.symbol_calls = [] #['']
- self.init_code = ''
- self.fina_code = ''
- self.check_code = ''
-
- # Framework options.
- self.frameworks = [] #[['']]
-
- # Used to determine whether we've executed our default method to
- # populate the set of candidate installations.
- self.candidates_built = False
-
- # We need this to flag whether we've been given one or more candidate
- # installations from options.
- self.given_candidates = False
-
- # These is our set of candidate installations and configurations.
- self.candidates = []
- self.cand_cfgs = []
-
- # Lists of all valid installations and configurations. These are
- # populated during the configuration of the package.
- self.installations = []
- self.configurations = []
-
- # This is set to the configuration that was selected to be used.
- self.selected = None
-
-
- # Need this so we can get access to information about the
- # platform we're running on.
- self.platform = self.dependency(SConfig.Platform, True)
-
- # We have one configuration check.
- self.checks = [self.check_candidates]
-
- # Setup search defaults for the platform we're on.
- self.setup_search_defaults()
-
- def setup_options(self):
- """Two things need to happen here. The first is to tell SCons about
- any options we're going to use. Do that by calling
- 'self.opts.AddOptions'. The second is to add entries to
- 'self.option_map', which maps a command-line option to it's
- envrionment equivalent."""
-
- SConfig.Node.setup_options(self)
- self.opts.AddOptions(
- SCons.Script.BoolOption('with_' + self.command_name,
- 'Turn on/off package %s' % self.name, 1),
- SCons.Script.PathOption(self.command_name + '_dir',
- '%s installation path' % self.name,
- None, SCons.Script.PathOption.PathIsDir),
- SCons.Script.PathOption(self.command_name + '_inc_dir',
- '%s header installation path' % self.name,
- None, SCons.Script.PathOption.PathIsDir),
- SCons.Script.PathOption(self.command_name + '_lib_dir',
- '%s library installation path' % self.name,
- None, SCons.Script.PathOption.PathIsDir),
- (self.command_name + '_lib',
- '%s libraries' % self.name,
- None, None),
- (self.command_name + '_framework',
- '%s framework' % self.name,
- None, None))
- self.option_map = {'with_' + self.command_name: None,
- self.command_name + '_dir': self.environ_name + '_DIR'}
-
- def setup_search_defaults(self):
- """Setup the usual search paths for packages depending on the kind of system
- we're on."""
-
- if self.platform.system in ['Darwin', '*ix']:
- self.base_dirs = ['/usr', '/usr/local']
- self.sub_dirs = [[['include'], ['lib']]]
- if self.platform.bits == 64:
- self.sub_dirs = [[['include'], ['lib64']],
- [['include'], [os.path.join('lib', '64')]]] + self.sub_dirs
-
- # Set Darwin specific defaults.
- if self.platform.system == 'Darwin':
- self.base_dirs += ['/sw']
-
- # Set Window specific defaults.
- if self.platform.system == 'Windows':
- pass # TODO
-
- # Combine these guys to build default system paths. We need these to ensure specific
- # include paths are used before generic ones.
- for base_dir in ['/usr', '/sw']:
- for hdr_dirs, lib_dirs in self.combine_sub_dirs(base_dir):
- hdr_dirs = [os.path.join(base_dir, h) for h in hdr_dirs]
- hdr_dirs = [h for h in hdr_dirs if h not in self.system_header_dirs]
- self.system_header_dirs += hdr_dirs
- lib_dirs = [os.path.join(base_dir, l) for l in lib_dirs]
- lib_dirs = [l for l in lib_dirs if l not in self.system_library_dirs]
- self.system_library_dirs += lib_dirs
-
- def setup(self):
- """Finalise everything before running the configuration checks."""
-
- # Run the parent setup method.
- SConfig.Node.setup(self)
-
- # If we havn't already done so, build the list of candidates.
- if not self.candidates_built:
- self.candidates_built = True
- self.setup_candidates()
-
- # Now we need to traverse our set of candidate installations, looking
- # for any that havn't been processed.
- for inst in self.candidates:
- if not inst.is_processed:
- self.process_installation(inst)
-
- def setup_candidates(self):
- """Using the system specific search information currently set on
- the package class, build a list of candidate installations in
- the 'candidates' member."""
-
- # If we've been given options directly specifying the location of this
- # package we need to use those in place of searching for locations.
- base_dir = self.env.get(self.command_name + '_dir', '')
- inc_dir = self.env.get(self.command_name + '_inc_dir', '')
- lib_dir = self.env.get(self.command_name + '_lib_dir', '')
- if inc_dir or lib_dir:
- if not (inc_dir and lib_dir):
- print ' Error: must specify both of'
- print ' ' + self.command_name + '_inc_dir'
- print ' ' + self.command_name + '_lib_dir'
- env.Exit()
- self.add_candidate(SConfig.Installation(self, '', [inc_dir], [lib_dir]))
- self.given_candidates = True
- return
-
- # Combine all possible base directories.
- if not base_dir:
- base_dirs = list(self.base_dirs)
- for dir in self.base_dirs:
- for ptrn in self.base_patterns:
- base_dirs += glob.glob(os.path.join(dir, ptrn))
- else:
- base_dirs = [base_dir]
-
- # Make sure there are no symbolic links in the base directories. If there are
- # any in there, expand them and make sure there are no duplicates.
- new_base_dirs = []
- for d in base_dirs:
- d = os.path.realpath(d)
- if d not in new_base_dirs:
- new_base_dirs += [d]
- base_dirs = new_base_dirs
-
- # Traverse the list of base directories and form each
- # installation.
- for dir in base_dirs:
- for hdr, lib in self.combine_base_dir(dir):
- self.add_candidate(SConfig.Installation(self, dir, hdr, lib))
-
- # If we have any frameworks to try, create candidates for them now.
- for fw in self.frameworks:
- self.add_candidate(SConfig.Installation(self, fwork=fw))
-
- # If we were given a base directory we need to set the 'given_candidates' flag
- # so that other candidate entries are ignored.
- if base_dir:
- self.given_candidates = True
-
- def combine_base_dir(self, base_dir):
- """Yields combinations of the provided base directory and possible sub-
- directories in the form of a [header sub-directory, library sub-
- directory] list. We use a list so that it can be modified in place."""
-
- # If the path doesn't exist or isn't a directory, don't yield anything.
- if not (os.path.exists(base_dir) and os.path.isdir(base_dir)):
- return
-
- # Combine the sub-directories.
- for hdr, lib in self.combine_sub_dirs(base_dir):
-
- # Yield the immediate results.
- yield [hdr, lib]
-
- # Also try all combinations of header sub-directories, if they
- # were given.
- for sub in self.combine_header_sub_dir(base_dir, hdr):
- yield [sub, lib]
-
- def combine_sub_dirs(self, base_dir):
- """Take a base directory and combine it with the set of header and library
- subdirectories. Yields (['header_dirs'], ['lib_dirs'])."""
-
- for hdr, lib in self.sub_dirs:
- loc_okay = True
- hdr_dirs = []
- lib_dirs = []
-
- # Combine header subdirectories.
- for h in hdr:
- dir = os.path.join(base_dir, h)
- if not (os.path.exists(dir) and os.path.isdir(dir)):
- loc_okay = False
- break
- hdr_dirs += [h]
- if not loc_okay:
- continue
-
- # Combine library subdirectories.
- for l in lib:
- dir = os.path.join(base_dir, l)
- if not (os.path.exists(dir) and os.path.isdir(dir)):
- loc_okay = False
- break
- lib_dirs += [l]
- if not loc_okay:
- continue
-
- yield (hdr_dirs, lib_dirs)
-
- def combine_header_sub_dir(self, base_dir, hdr_dirs):
- if not self.header_sub_dir or not hdr_dirs:
- return
- for sub_dir in self.header_sub_dir:
- cand = [os.path.join(h, sub_dir) for h in hdr_dirs if h]
- for d in cand:
- path = os.path.join(base_dir, d)
- if not (os.path.exists(path) and os.path.isdir(path)):
- return
- yield cand
-
- def add_candidate(self, inst):
- """Add a unique candidate installation. Note that if candidate(s) were
- given via options, this method will ignore additional candidates."""
-
- if not self.given_candidates and inst not in self.candidates:
- self.candidates += [inst]
- self.is_setup = False
-
- def process_installation(self, inst):
- """This method gives us a chance to modify any of the details of this
- installation before moving on to checking it's validity. Here we can
- also determine which, if any, other dependant installations are given
- by this installation's package config (or any other means)."""
-
- inst.is_processed = True
-
- def check_candidates(self):
- """Runs a sequence of tests to confirm the validity of candidate
- installations. All that pass are moved to the 'installations'
- member."""
-
- # Combine all of the available candidate installations with the
- # available dependencies to build a set of candidate configurations.
- self.setup_configurations()
-
- # If we have no candidates, report the problem.
- if not self.candidates:
- self.ctx.Display(' No candidate installations found.\n')
- return False
- if not self.cand_cfgs:
- self.ctx.Display(' No candidate configurations found.\n')
- return False
-
- # Try out all candidates.
- cur = 1 # Keep track of which one we're currently trying.
- for cfg in self.cand_cfgs:
- # Print current status.
- self.ctx.Log(' Trialing candidate %d of %d ...\n' % (cur, len(self.cand_cfgs)))
- self.ctx.Log(' ' + cfg.__str__(False)[:-1].replace('\n', '\n ') + '\n')
- print ' Trialing candidate %d of %d ...\r' % (cur, len(self.cand_cfgs)),
- cur = cur + 1
-
- # Check for the headers and libraries.
- if self.check_headers(cfg) and self.check_libraries(cfg):
-
- # If the checks passed, include the 'have_define' if there and add
- # this installation to the list.
- if self.have_define:
- cfg.inst.add_cpp_defines(self.have_define)
- self.configurations += [cfg]
-
- # Print results.
- if len(self.configurations) == 1:
- self.ctx.Display('\n Found %d valid configuration.\n' % len(self.configurations))
- else:
- self.ctx.Display('\n Found %d valid configurations.\n' % len(self.configurations))
-
- # Log the valid configurations.
- for cfg in self.configurations:
- self.ctx.Log(cfg.__str__(False))
-
- # If we couldn't find a valid installation return negative.
- if len(self.configurations) == 0:
- return False
- return True
-
- def setup_configurations(self):
- """Copies each of the candidate installations and includes all
- permutations of dependent packages."""
-
- for deps in self.combine_dependencies():
- for inst in self.candidates:
- cfg = SConfig.Configuration(inst)
- cfg.deps = list(deps)
- self.cand_cfgs += [cfg]
-
- def combine_dependencies(self, deps=[], pkgs={}, cur_index=0):
- """Each combination of dependent installations represents a
- unique installation of this package. This method generates sets
- of unique dependency combinations."""
-
- # The dictionary 'pkgs' is to map packages to installations. This is
- # needed to prevent multiple installations being used for the same
- # packages.
-
- if cur_index == len(self.deps):
- yield deps # Complete permutation.
- else:
- cur_dep, required = self.deps[cur_index]
-
- # If the dependency isn't actually required by this package, include
- # a combination that doesn't use it.
- if not required:
- for d in self.combine_dependencies(deps, pkgs, cur_index + 1):
- yield d
-
- # We can only iterate over installations if we're dealing with a package.
- if isinstance(cur_dep, Package):
-
- # Check if we already have this package selected.
- if cur_dep in pkgs:
- deps += [pkgs[cur_dep]]
- for d in self.combine_dependencies(deps, pkgs, cur_index + 1):
- yield d
- del deps[-1]
-
- elif len(cur_dep.configurations):
-
- # Try each installation.
- for dep_cfg in cur_dep.configurations:
-
- # Traverse the dependency and collect any sub-dependencies
- # into a copied package list.
- new_pkgs = dict(pkgs)
- rem = [dep_cfg]
- while len(rem):
- cur = rem.pop()
- if cur.inst.pkg not in new_pkgs:
- new_pkgs[cur.inst.pkg] = cur
- rem += cur.deps
-
- # Set dependency and recurse.
- deps += [dep_cfg]
- for d in self.combine_dependencies(deps, new_pkgs, cur_index + 1):
- yield d
- del deps[-1]
-
- elif required:
-
- # There are no installations for this dependency. If it's
- # a required dependency then something has gone very wrong.
- # Throw an error here.
- # TODO
- sys.exit()
- else:
- for d in self.combine_dependencies(deps, pkgs, cur_index + 1):
- yield d
-
- def get_check_headers_fail_reason(self, fail_logs):
- return ''
-
- def get_check_symbols_fail_reason(self, fail_logs):
- return ''
-
- def configure(self, scons_ctx):
- # If this package is deslected just return now.
- if not self.required and not self.env['with_' + self.command_name]:
- return
-
- # Run the configuration.
- SConfig.Node.configure(self, scons_ctx)
-
- def check_headers(self, cfg):
- """Determine if the required headers are available with the current construction
- environment settings."""
-
- # If there are no headers to check, automatically pass.
- if not self.headers:
- return True
-
- # Try and find a functional set of headers.
- for cfg.hdrs in self.headers:
-
- # Add any installation specific headers.
- cfg.hdrs += [h for h in cfg.inst.hdrs if h not in cfg.hdrs]
-
- # Run the check.
- if checks.check_headers(cfg):
- return True
-
- # If we failed to find anything, set the headers entry to 'None'.
- cfg.hdrs = None
- return False
-
- def check_libraries(self, cfg):
- """Check if the currently selected location is a valid installation of the
- required package. At this stage we know that the paths given in the location
- actually exist and we need to confirm that the libraries in 'libs' exist."""
-
- # If there are no libraries or frameworks to check, automatically pass.
- if not self.libraries:
- return True
-
- # Try and find a functional set of libraries.
- for cfg.libs in self.libraries:
-
- # Add any installation specific libraries.
- cfg.libs += [l for l in cfg.inst.libs if l not in cfg.libs]
-
- # Run the check.
- if checks.check_libraries(cfg):
- return True
-
- # If we failed to find anything, set the libraries entry to 'None'.
- cfg.libs = None
- return False
-
- def enable(self, scons_env, old_state=None):
- SConfig.Node.enable(self, scons_env, old_state)
- if self.selected:
- self.selected.enable(scons_env, old_state)
diff -r 3aba6671e2c9 -r 07515a87d26b Platform.py
--- a/Platform.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,67 +0,0 @@
-import os, platform
-import SCons.Script
-import SConfig
-
-class Platform(SConfig.Node):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Node.__init__(self, scons_env, scons_opts, required)
- self.checks = [self.print_results]
-
- # Will be set after successful configuration.
- self.system = ''
- self.bits = 0
-
- # We need to do these now.
- self.check_system()
- self.check_bits()
- self.check_CC()
-
- def setup_options(self):
- self.opts.AddOptions(
- SCons.Script.BoolOption('with_32bit', 'Generate 32bit code', 0),
- SCons.Script.BoolOption('with_64bit', 'Generate 64bit code', 0),
- )
-
- def check_system(self):
- self.system = platform.system()
- if not self.system or self.system in ['Linux', 'Unix']:
- self.system = '*ix'
-
- # Need to modify building shared libraries when on Mac OS X.
- if self.system == 'Darwin':
- self.env.AppendUnique(SHLINKFLAGS=['-flat_namespace',
- '-single_module',
- '-undefined', 'suppress'])
- import SCons.Util # And fix RPATHs.
- self.env['LINKFLAGS'] = SCons.Util.CLVar('')
- self.env['RPATHPREFIX'] = ''
- self.env['RPATHSUFFIX'] = ''
- self.env['_RPATH'] = ''
-
- # Use 'install_name' instead.
- self.env.Append(SHLINKFLAGS=['-install_name', '${_abspath(TARGET)}'])
-
- self.env.AppendUnique(CONFIGVARS=['SHLINKFLAGS', 'LINKFLAGS',
- 'RPATHPREFIX', 'RPATHSUFFIX', '_RPATH'])
-
- def check_bits(self):
- if (platform.platform().find('x86_64') != -1 or \
- platform.platform().find('ppc64') != -1 or \
- platform.architecture()[0].find('64') != -1 or \
- self.env['with_64bit']) and \
- not self.env['with_32bit']:
- self.bits = 64
- else:
- self.bits = 32
-
- def check_CC(self):
- if 'CC' in self.env['ENV']:
- self.env['CC'] = self.env['ENV']['CC']
- self.CC = self.env['CC']
-
- def print_results(self):
- self.ctx.Display(" Building on a %s platform\n" % self.system)
- self.ctx.Display(" Building for %d bit architecture\n" % self.bits)
- if hasattr(self, 'CC'):
- self.ctx.Display(" Using environment specified C compiler: %s\n" % self.CC)
- return True
diff -r 3aba6671e2c9 -r 07515a87d26b Project.py
--- a/Project.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,239 +0,0 @@
-import os, sys
-import SCons.Script
-import SConfig
-from Package import Package
-from Installation import Installation
-from Configuration import Configuration
-
-class Project(SConfig.Node):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Node.__init__(self, scons_env, scons_opts, required)
- self.checks += [self.check_libs, self.select_config, self.print_results]
-
- def setup_options(self):
- self.opts.AddOptions(
- SCons.Script.BoolOption('with_debug',
- 'Generate debugging symbols', 1),
- SCons.Script.BoolOption('static_libraries',
- 'Build static libraries', 1),
- SCons.Script.BoolOption('shared_libraries',
- 'Build shared libraries', 1),
- ('build_dir', 'Temporary build directory', 'build')
- )
-
- def check_libs(self):
- if not self.env['static_libraries'] and not self.env['shared_libraries']:
- self.ctx.Display(" Both static and shared libraries disabled!\n")
- return False
- return True
-
- def select_config(self):
- """Decide which set of dependencies to select as the desired
- configuration."""
-
- # Don't know why, but I need to do the following lengthy thing because
- # this doesn't work: deps = [d for d in self.reduce_dependencies()]
- deps = []
- clashes = []
- for d in self.reduce_dependencies():
- if isinstance(d, tuple):
- clashes += [d]
- else:
- cur = [s for s in d]
- if cur:
- deps += [cur]
-
- # If there were no valid configurations found, then we're pretty much
- # screwed. Display the clashes so the user knows why.
- if not len(deps):
- self.ctx.Display(' There are no valid combinations of the required packages.\n')
- self.ctx.Display(' This means that one or more packages have conflicting\n')
- self.ctx.Display(' dependencies.\n\n')
- if not len(clashes):
- raise 'Error: Should be clashes!'
-
- # Need to process the list of clashes and construct some useful information.
- clash_dict = {}
- for clash in clashes:
- if clash[2].inst.pkg != clash[3].inst.pkg:
- raise 'Error: Inconsistent clash report!'
- cause = clash[2].inst.pkg
- top_pkgs = (clash[0].inst.pkg, clash[1].inst.pkg)
- if cause not in clash_dict:
- clash_dict[cause] = {}
- top_pkg_dict = clash_dict[cause]
- for top_pkg, useable in zip(top_pkgs, clash[2:]):
- if top_pkg == useable.inst.pkg:
- continue
- if top_pkg not in top_pkg_dict:
- top_pkg_dict[top_pkg] = []
- if useable not in top_pkg_dict[top_pkg]:
- top_pkg_dict[top_pkg] += [useable]
-
- for cause_pkg, useable_dict in clash_dict.iteritems():
- self.ctx.Display(' There were conflicts with package \'%s\':\n' % cause_pkg.name)
- for useable_pkg, useable_list in useable_dict.iteritems():
- self.ctx.Display(' %s can use:\n' % useable_pkg.name)
- for useable in useable_list:
- self.ctx.Display(' %s\n' % useable.inst.base_dir)
- return False
-
- # Decide which selection of dependencies to use. For the moment base
- # it entirely on the number of packages in the set.
- selected = []
- for d in deps:
- if len(d) > len(selected):
- selected = d
-
- # Set the 'selected' member of each package with configurations.
- done = []
- rem = list(selected)
- while len(rem):
- cur = rem.pop()
- if isinstance(cur, Configuration):
- if cur.inst.pkg not in done:
- cur.inst.pkg.selected = cur
- rem += cur.deps
- done += [cur.inst.pkg]
- return True
-
- def reduce_dependencies(self, deps=[], pkgs={}, cur_index=0):
- """Each combination of dependent installations represents a
- unique installation of this package. This method generates sets
- of unique dependency combinations."""
-
- # The dictionary 'pkgs' is to map packages to installations. This is
- # needed to prevent multiple installations being used for the same
- # packages.
-
- # Once we're at the end of the list of dependencies we can
- # yield a result.
- if cur_index == len(self.deps):
- yield deps
-
- else:
- cur_dep, required = self.deps[cur_index]
-
- # If the dependency isn't actually required by this package, include
- # a combination that doesn't use it.
- if not required:
- for d in self.reduce_dependencies(deps, pkgs, cur_index + 1):
- yield d
-
- # 'Package's are the only kind of object to have installations.
- if isinstance(cur_dep, Package):
-
- # If we've already included an installation of this package
- # in this set of dependencies we must use the same one in
- # every other dependency.
- if cur_dep in pkgs:
- deps += [pkgs[cur_dep]]
- for d in self.reduce_dependencies(deps, pkgs, cur_index + 1):
- yield d
- del deps[-1]
-
- # If we havn't already foudn this dependency anywhere, try
- # out all of it's installations, if it has any.
- elif len(cur_dep.configurations):
- for dep_cfg in cur_dep.configurations:
-
- # We have to keep the current state of 'pkgs' clean,
- # so copy it to a new one.
- new_pkgs = dict(pkgs)
-
- # Traverse the dependency tree for this installation,
- # adding each dependent installation to the 'new_pkgs'
- # mapping.
- rem = [dep_cfg]
- okay = True
- while len(rem):
- cur = rem.pop()
-
- # If the installation doesn't exist in the mapping, just
- # add it in and continue.
- if cur.inst.pkg not in new_pkgs:
- new_pkgs[cur.inst.pkg] = (cur, dep_cfg)
-
- # If we've already got a version of this dependency
- # in the mapping, it means the version we're currently
- # looking at must be the same or we can't use this
- # combination.
- else:
- entry = new_pkgs[cur.inst.pkg]
- if entry[0] != cur:
-
- # Report conflicts using a tuple of both the
- # conflicting installations.
- yield (dep_cfg, entry[1], cur, entry[0])
- okay = False
-
- # Get out of this loop, we can't use this
- # dependency combination now. Move on to the
- # next installation.
- break
-
- # Add the dependencies of the current installation to be
- # checked.
- rem += cur.deps
-
- # If we were able to match up all previous installations with
- # this one, continue recursing for every other dependency.
- if okay:
- deps += [dep_cfg]
- for d in self.reduce_dependencies(deps, new_pkgs, cur_index + 1):
- yield d
- del deps[-1]
-
- # There are no installations for this dependency. If it's
- # a required dependency then something has gone very wrong.
- elif cur_dep.required:
-
- # Throw an error here.
- # TODO
- raise 'Error: Should be a valid configuration.'
-
- else:
- # There are no installations for this package but it's also not required,
- # so we can safely ignore it and continue on our merry way.
- for d in self.reduce_dependencies(deps, pkgs, cur_index + 1):
- yield d
-
- else:
- deps += [cur_dep]
- for d in self.reduce_dependencies(deps, pkgs, cur_index + 1):
- yield d
- del deps[-1]
-
- def print_results(self):
- self.ctx.Display(" Static libraries: %s\n" % str(bool(self.env['static_libraries'])))
- self.ctx.Display(" Shared libraries: %s\n" % str(bool(self.env['shared_libraries'])))
- self.ctx.Display(" Using build directory: %s\n" % self.env['build_dir'])
- self.ctx.Display(" Debugging symbols: %s\n" % str(bool(self.env['with_debug'])))
- return True
-
- def enable(self, scons_env, old_state=None):
- SConfig.Node.enable(self, scons_env, old_state)
-
- # Setup debugging flags.
- if self.env['with_debug']:
- d = scons_env.ParseFlags('-g')
- self.backup_variable(scons_env, d.keys(), old_state)
- scons_env.MergeFlags(d)
- else:
- d = scons_env.ParseFlags('-DNDEBUG')
- self.backup_variable(scons_env, d.keys(), old_state)
- scons_env.MergeFlags(d)
-
- # Setup the include paths.
- inc_dir = self.env.get_build_path('include')
- self.backup_variable(scons_env, 'CPPPATH', old_state)
- scons_env.PrependUnique(CPPPATH=[inc_dir])
-
- # Setup LIB_DIR.
- lib_dir = self.env.get_build_path('lib')
- self.backup_variable(scons_env, 'LIBPATH', old_state)
- scons_env.PrependUnique(LIBPATH=[lib_dir])
-
- # Setup the RPATH.
- self.backup_variable(scons_env, 'RPATH', old_state)
- scons_env.PrependUnique(RPATH=[scons_env.Dir(lib_dir).abspath])
diff -r 3aba6671e2c9 -r 07515a87d26b SConscript
--- a/SConscript Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,282 +0,0 @@
-import os, sys, platform, pickle, shutil
-import glob as pyglob
-import SConfig
-from SCons.Script.SConscript import SConsEnvironment
-
-#
-# Setup the Package system.
-#
-
-def Package(env, pkg_module, required=True, **kw):
- """Create a new package to be configured."""
- if not hasattr(env, 'package_options'):
- env.package_options = Options()
- if not hasattr(env, 'packages'):
- env.packages = {}
- env.package_list = []
- if not pkg_module in env.packages:
- pkg = pkg_module(env, env.package_options, required, **kw)
- for attr, val in kw.iteritems():
- if not hasattr(pkg, attr):
- print 'Package does not have attribute!'
- sys.exit()
- setattr(pkg, attr, val)
- env.packages[pkg_module] = pkg
- env.package_list += [pkg]
- return env.packages[pkg_module]
-
-def CheckPackages(ctx, pkg_list):
- for pkg in pkg_list:
- pkg.configure(ctx)
-
-def configure_packages(env):
- # If we have 'help' given as a target, use that to generate help.
- if 'help' in COMMAND_LINE_TARGETS:
- env.Alias('help', '.')
- print env.package_options.GenerateHelpText(env)
- return
-
- # Get rid of the temporary directory to make sure we're building
- # from scratch.
- if os.path.exists('.sconsign.dblite'):
- os.remove('.sconsign.dblite')
-
- # Run the setup method for all nodes.
- is_setup = False
- while not is_setup:
- is_setup = True
- for n in env.package_list:
-
- # If any node is not ready, we need to loop through them
- # all again.
- if not n.is_setup:
- n.setup()
- is_setup = False
-
- # Update dependencies and requirements.
- pkgs_rem = list(env.package_list)
- while len(pkgs_rem):
- pkg = pkgs_rem.pop()
- if pkg.required:
- for d, r in pkg.deps:
- if r and not d.required:
- d.required = True
- pkgs_rem += [d]
-
- # Call the packages checker.
- sconf = Configure(pkg.env, custom_tests={'CheckPackages': CheckPackages})
- sconf.CheckPackages(env.package_list)
- sconf.Finish()
-
- # Print package results.
- print '\n*****************************************'
- print "* Results *"
- print '*****************************************\n'
- for pkg in env.package_list:
- if isinstance(pkg, SConfig.Package):
- if pkg.selected:
- print str(pkg.selected),
-
- # Print out build message.
- print '\n*****************************************'
- print "* Now run 'scons' to build the project. *"
- print '*****************************************\n'
-
-def save_config(env, filename='config.cfg'):
- # Put the results on this environment.
- for pkg in env.package_list:
- if pkg.result:
- pkg.enable(env)
-
- # Update config variables.
- env.AppendUnique(CONFIGVARS=['CC', 'CFLAGS', 'CCFLAGS',
- 'CPPPATH', 'CPPDEFINES',
- 'LIBPATH', 'LIBS', 'STATICLIBS',
- 'RPATH', 'INTLIBS',
- 'FRAMEWORKS'])
- env.AppendUnique(CONFIGVARS=env.package_options.keys())
-
- # Dump to file.
- d = {}
- for a in env['CONFIGVARS']:
- if a in env._dict:
- d[a] = env[a]
- f = file(filename, 'w')
- import pickle
- pickle.dump(d, f)
- f.close()
-
-def load_config(env, filename='config.cfg'):
- if not os.path.exists(filename):
- print "\nError: project hasn't been configured!"
- print '*******************************************************'
- print "* Run 'scons config' to configure the project. *"
- print "* Run 'scons help' to see what options are available. *"
- print '*******************************************************'
- env.Exit()
- f = file(filename, 'r')
- import pickle
- d = pickle.load(f)
- f.close()
- for k, v in d.iteritems():
- env[k] = v
- for script in env.get('CONFIGSCRIPTS', []):
- env.SConscript(script, 'env')
- if 'build_dir' in env._dict:
- env.Default(env['build_dir'])
-
-def write_pkgconfig(env, filename, name, desc='', version=0):
- """Write out a pkgconfig file."""
-
- # Make sure the directory structure exists.
- filename = File(filename).abspath
- dirs = os.path.dirname(filename)
- if not os.path.exists(dirs):
- os.makedirs(dirs)
-
- # Write the pkgconfig file.
- f = open(filename, 'w')
- build_path = env.get('build_dir', '')
- if build_path:
- f.write('prefix=%s\n' % build_path)
- f.write('exec_prefix=%s\n' % os.path.join(build_path, 'bin'))
- f.write('libdir=%s\n' % os.path.join(build_path, 'lib'))
- f.write('includedir=%s\n' % os.path.join(build_path, 'include'))
- f.write('\n')
- f.write('Name: %s\n' % name)
- f.write('Description: %s\n' % desc)
- f.write('Version: %s\n' % version)
- f.write('Requires:\n')
-
- # Unfortunately SCons leaves hashes in paths after calling the
- # subst command, so we'll need to expand these manually.
- old_state = {'LIBPATH': env['LIBPATH'], 'CPPPATH': env['CPPPATH']}
- env['LIBPATH'] = [Dir(p).abspath for p in env['LIBPATH']]
- env['CPPPATH'] = [Dir(p).abspath for p in env['CPPPATH']]
- f.write(env.subst('Libs: ${_LIBDIRFLAGS} ${_LIBFLAGS}') + '\n')
- f.write(env.subst('Cflags: ${_CPPINCFLAGS}') + '\n')
- env.Replace(**old_state)
- f.close()
-
-SConsEnvironment.Package = Package
-SConsEnvironment.configure_packages = configure_packages
-SConsEnvironment.save_config = save_config
-SConsEnvironment.load_config = load_config
-SConsEnvironment.write_pkgconfig = write_pkgconfig
-
-#
-# Useful utilities.
-#
-
-def copy_file(env, dst, src):
- dst = File(dst).abspath
- if os.path.exists(dst):
- return
- dst_dir = os.path.dirname(dst)
- if not os.path.exists(dst_dir):
- os.makedirs(dst_dir)
- shutil.copy(src, dst)
-
-def get_build_path(env, prefix):
- if os.path.isabs(env['build_dir']):
- bld_dir = env['build_dir']
- else:
- bld_dir = '#' + env['build_dir']
- if prefix:
- return os.path.join(bld_dir, prefix)
- else:
- return bld_dir
-
-def get_target_name(env, source, extension=''):
- """Return the destination name for a source file with suffix 'suffix'. This
- is useful for building files into the correct build path. Returns the full
- path to the built source without extension."""
- if extension:
- src = source[:-len(extension)]
- else:
- src = source
- return env.get_build_path(src)
-
-def glob(env, pattern):
- if not os.path.isabs(pattern):
- old = os.getcwd()
- os.chdir(Dir('.').srcnode().abspath)
- res = pyglob.glob(pattern)
- os.chdir(old)
- else:
- res = pyglob.glob(pattern)
- return res
-
-def path_exists(env, path):
- if not os.path.isabs(path):
- old = os.getcwd()
- os.chdir(Dir('.').srcnode().abspath)
- res = os.path.exists(path)
- os.chdir(old)
- else:
- res = os.path.exists(path)
- return res
-
-def strip_dir(env, path, subdir):
- offs = path.find(os.path.sep + subdir + os.path.sep)
- if offs != -1:
- return path[:offs] + path[offs + len(subdir) + 1:]
- offs = path.find(os.path.sep + subdir)
- if offs != -1:
- return path[:-(len(subdir) + 1)]
- return path
-
-def make_list(self, var):
- """Convert anything into a list. Handles things that are already lists,
- tuples and strings."""
-
- if isinstance(var, str):
- return [var]
- elif isinstance(var, (list, tuple)):
- if not var:
- return []
- return list(var)
- elif var is None:
- return []
- else:
- return [var]
-
-def reverse_list(self, _list):
- """Return a reversed copy of a list."""
-
- rev = list(_list)
- rev.reverse()
- return rev
-
-SConsEnvironment.strip_dir = strip_dir
-SConsEnvironment.copy_file = copy_file
-SConsEnvironment.get_build_path = get_build_path
-SConsEnvironment.get_target_name = get_target_name
-SConsEnvironment.glob = glob
-SConsEnvironment.path_exists = path_exists
-SConsEnvironment.make_list = make_list
-SConsEnvironment.reverse_list = reverse_list
-
-# Customize the created base environment.
-Import('env')
-
-def _interleave(int_libs, env):
- txt = ''
- first = True
- for paths, libs in int_libs:
- env['MYLIBPATHS'] = paths
- env['MYLIBS'] = libs
- if first:
- first = False
- else:
- txt += ' '
- txt += env.subst('$_MYLIBPATHS $_MYLIBS')
- return txt
-
-env['_abspath'] = lambda x: File(x).abspath # Needed by Darwin.
-env['_interleave'] = _interleave
-env['_MYLIBPATHS'] = '$( ${_concat(LIBDIRPREFIX, MYLIBPATHS, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
-env['_MYLIBS'] = '${_stripixes(LIBLINKPREFIX, MYLIBS, LIBLINKSUFFIX, LIBPREFIX, LIBSUFFIX, __env__)}'
-env['_INTLIBS'] = '${_interleave(INTLIBS, __env__)}'
-env['LINKCOM'] += ' $STATICLIBS $_INTLIBS' # Needed for static libs. Thanks SCons. :(
-env['SHLINKCOM'] += ' $STATICLIBS $_INTLIBS' # Needed for static libs. Thanks SCons. :(
diff -r 3aba6671e2c9 -r 07515a87d26b TerminalController.py
--- a/TerminalController.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,192 +0,0 @@
-import sys, re
-
-class TerminalController:
- """
- A class that can be used to portably generate formatted output to
- a terminal.
-
- `TerminalController` defines a set of instance variables whose
- values are initialized to the control sequence necessary to
- perform a given action. These can be simply included in normal
- output to the terminal:
-
- >>> term = TerminalController()
- >>> print 'This is '+term.GREEN+'green'+term.NORMAL
-
- Alternatively, the `render()` method can used, which replaces
- '${action}' with the string required to perform 'action':
-
- >>> term = TerminalController()
- >>> print term.render('This is ${GREEN}green${NORMAL}')
-
- If the terminal doesn't support a given action, then the value of
- the corresponding instance variable will be set to ''. As a
- result, the above code will still work on terminals that do not
- support color, except that their output will not be colored.
- Also, this means that you can test whether the terminal supports a
- given action by simply testing the truth value of the
- corresponding instance variable:
-
- >>> term = TerminalController()
- >>> if term.CLEAR_SCREEN:
- ... print 'This terminal supports clearning the screen.'
-
- Finally, if the width and height of the terminal are known, then
- they will be stored in the `COLS` and `LINES` attributes.
- """
- # Cursor movement:
- BOL = '' #: Move the cursor to the beginning of the line
- UP = '' #: Move the cursor up one line
- DOWN = '' #: Move the cursor down one line
- LEFT = '' #: Move the cursor left one char
- RIGHT = '' #: Move the cursor right one char
-
- # Deletion:
- CLEAR_SCREEN = '' #: Clear the screen and move to home position
- CLEAR_EOL = '' #: Clear to the end of the line.
- CLEAR_BOL = '' #: Clear to the beginning of the line.
- CLEAR_EOS = '' #: Clear to the end of the screen
-
- # Output modes:
- BOLD = '' #: Turn on bold mode
- BLINK = '' #: Turn on blink mode
- DIM = '' #: Turn on half-bright mode
- REVERSE = '' #: Turn on reverse-video mode
- NORMAL = '' #: Turn off all modes
-
- # Cursor display:
- HIDE_CURSOR = '' #: Make the cursor invisible
- SHOW_CURSOR = '' #: Make the cursor visible
-
- # Terminal size:
- COLS = None #: Width of the terminal (None for unknown)
- LINES = None #: Height of the terminal (None for unknown)
-
- # Foreground colors:
- BLACK = BLUE = GREEN = CYAN = RED = MAGENTA = YELLOW = WHITE = ''
-
- # Background colors:
- BG_BLACK = BG_BLUE = BG_GREEN = BG_CYAN = ''
- BG_RED = BG_MAGENTA = BG_YELLOW = BG_WHITE = ''
-
- _STRING_CAPABILITIES = """
- BOL=cr UP=cuu1 DOWN=cud1 LEFT=cub1 RIGHT=cuf1
- CLEAR_SCREEN=clear CLEAR_EOL=el CLEAR_BOL=el1 CLEAR_EOS=ed BOLD=bold
- BLINK=blink DIM=dim REVERSE=rev UNDERLINE=smul NORMAL=sgr0
- HIDE_CURSOR=cinvis SHOW_CURSOR=cnorm""".split()
- _COLORS = """BLACK BLUE GREEN CYAN RED MAGENTA YELLOW WHITE""".split()
- _ANSICOLORS = "BLACK RED GREEN YELLOW BLUE MAGENTA CYAN WHITE".split()
-
- def __init__(self, term_stream=sys.stdout):
- """
- Create a `TerminalController` and initialize its attributes
- with appropriate values for the current terminal.
- `term_stream` is the stream that will be used for terminal
- output; if this stream is not a tty, then the terminal is
- assumed to be a dumb terminal (i.e., have no capabilities).
- """
- # Curses isn't available on all platforms
- try: import curses
- except: return
-
- # If the stream isn't a tty, then assume it has no capabilities.
- if not term_stream.isatty(): return
-
- # Check the terminal type. If we fail, then assume that the
- # terminal has no capabilities.
- try: curses.setupterm()
- except: return
-
- # Look up numeric capabilities.
- self.COLS = curses.tigetnum('cols')
- self.LINES = curses.tigetnum('lines')
-
- # Look up string capabilities.
- for capability in self._STRING_CAPABILITIES:
- (attrib, cap_name) = capability.split('=')
- setattr(self, attrib, self._tigetstr(cap_name) or '')
-
- # Colors
- set_fg = self._tigetstr('setf')
- if set_fg:
- for i,color in zip(range(len(self._COLORS)), self._COLORS):
- setattr(self, color, curses.tparm(set_fg, i) or '')
- set_fg_ansi = self._tigetstr('setaf')
- if set_fg_ansi:
- for i,color in zip(range(len(self._ANSICOLORS)), self._ANSICOLORS):
- setattr(self, color, curses.tparm(set_fg_ansi, i) or '')
- set_bg = self._tigetstr('setb')
- if set_bg:
- for i,color in zip(range(len(self._COLORS)), self._COLORS):
- setattr(self, 'BG_'+color, curses.tparm(set_bg, i) or '')
- set_bg_ansi = self._tigetstr('setab')
- if set_bg_ansi:
- for i,color in zip(range(len(self._ANSICOLORS)), self._ANSICOLORS):
- setattr(self, 'BG_'+color, curses.tparm(set_bg_ansi, i) or '')
-
- def _tigetstr(self, cap_name):
- # String capabilities can include "delays" of the form "$<2>".
- # For any modern terminal, we should be able to just ignore
- # these, so strip them out.
- import curses
- cap = curses.tigetstr(cap_name) or ''
- return re.sub(r'\$<\d+>[/*]?', '', cap)
-
- def render(self, template):
- """
- Replace each $-substitutions in the given template string with
- the corresponding terminal control string (if it's defined) or
- '' (if it's not).
- """
- return re.sub(r'\$\$|\${\w+}', self._render_sub, template)
-
- def _render_sub(self, match):
- s = match.group()
- if s == '$$': return s
- else: return getattr(self, s[2:-1])
-
-#######################################################################
-# Example use case: progress bar
-#######################################################################
-
-class ProgressBar:
- """
- A 3-line progress bar, which looks like::
-
- Header
- 20% [===========----------------------------------]
- progress message
-
- The progress bar is colored, if the terminal supports color
- output; and adjusts to the width of the terminal.
- """
- BAR = '%3d%% ${GREEN}[${BOLD}%s%s${NORMAL}${GREEN}]${NORMAL}\n'
- HEADER = '${BOLD}${CYAN}%s${NORMAL}\n\n'
-
- def __init__(self, term, header):
- self.term = term
- if not (self.term.CLEAR_EOL and self.term.UP and self.term.BOL):
- raise ValueError("Terminal isn't capable enough -- you "
- "should use a simpler progress dispaly.")
- self.width = self.term.COLS or 75
- self.bar = term.render(self.BAR)
- self.header = self.term.render(self.HEADER % header.center(self.width))
- self.cleared = 1 #: true if we haven't drawn the bar yet.
- self.update(0, '')
-
- def update(self, percent, message):
- if self.cleared:
- sys.stdout.write(self.header)
- self.cleared = 0
- n = int((self.width-10)*percent)
- sys.stdout.write(
- self.term.BOL + self.term.UP + self.term.CLEAR_EOL +
- (self.bar % (100*percent, '='*n, '-'*(self.width-10-n))) +
- self.term.CLEAR_EOL + message.center(self.width))
-
- def clear(self):
- if not self.cleared:
- sys.stdout.write(self.term.BOL + self.term.CLEAR_EOL +
- self.term.UP + self.term.CLEAR_EOL +
- self.term.UP + self.term.CLEAR_EOL)
- self.cleared = 1
diff -r 3aba6671e2c9 -r 07515a87d26b __init__.py
--- a/__init__.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-from Node import Node
-from Package import Package
-from Installation import Installation
-from Configuration import Configuration
-from Platform import Platform
-from Project import Project
-import packages
-
-from TerminalController import TerminalController
diff -r 3aba6671e2c9 -r 07515a87d26b check_headers.py
--- a/check_headers.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,93 +0,0 @@
-import os
-import SConfig
-
-def get_all_headers(cfg, hdrs):
- """For the provided configuration, return a list of all the
- headers required by it's dependencies."""
-
- # Process the dependencies first, to maintain the correct order.
- for dep in cfg.deps:
- get_all_headers(dep, hdrs)
-
- # Add the current set of headers, making sure we don't include
- # any duplicates.
- cfg_hdrs = cfg.inst.pkg.env.make_list(cfg.hdrs)
- for h in cfg_hdrs:
- if h in hdrs: continue
- if cfg.inst.fwork:
- hdrs += ['%s/%s' % (cfg.inst.fwork, h)]
- else:
- hdrs += [h]
-
-def get_header_source(cfg):
- """From the provided configuration, return a string to use to check
- that the headers are compatible with a C compiler."""
-
- # Begin with standard ansi C headers.
- src = '#include<stdlib.h>\n#include<stdio.h>\n#include<string.h>\n'
-
- # To be safe, we have to include all the required headers from all
- # our dependencies before including any from this installation.
- hdrs = []
- get_all_headers(cfg, hdrs)
-
- # Now convert the list of headers into strings to be appended to the
- # source string and return.
- for h in hdrs:
- src += '#include<' + h + '>\n'
- return src
-
-def check_headers(cfg):
- """Determine if the required headers are available by using the
- current settings in the provided configuration."""
-
- # Alias the installation and package for easy access.
- inst = cfg.inst
- pkg = inst.pkg
-
- # If there are no headers to check, automatically pass.
- if not cfg.hdrs:
- return True
-
- # We need to be able to locate the header files themselves before
- # trying any sanity tests. This helps make sure we're not getting
- # false positives from headers being found automatically in default
- # compiler locations.
- if not inst.fwork:
- for hdr in cfg.hdrs:
- found = False
- for hdr_dir in inst.hdr_dirs:
-
- # Combine the header sub-directories with the base directory
- # and header file.
- path = os.path.join(inst.base_dir, hdr_dir, hdr)
-
- # Check if the path exists.
- if os.path.exists(path):
-
- # If so, we can break out of the directory loop and move
- # on to the next header.
- found = True
- break
-
- # If we couldn't find this header, the configuration is invalid.
- # We can return negative straight away.
- if not found: return False
-
- # If we're here, we were able to locate all the header files.
- # Now we try to compile a piece of source code to ensure the headers
- # are compatible with a compiler. First we need the source code
- # to compile.
- src = get_header_source(cfg)
-
- # We'll need to backup the existing environment state when we enable
- # the current configuration.
- old_state = {}
- cfg.enable(pkg.env, old_state)
-
- # Try compiling the source and revert the environment.
- result = pkg.compile_source(src)
- pkg.env.Replace(**old_state)
-
- # Return the results.
- return result
diff -r 3aba6671e2c9 -r 07515a87d26b check_libraries.py
--- a/check_libraries.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,540 +0,0 @@
-import os, shutil
-import SConfig
-import check_headers
-
-def get_symbols_source(cfg):
- """Build the source code required to check that a set of symbols is
- present in a package installation. We produce two sets of source code:
- the first is the code to build a library with a 'main-like' function
- call. The second is a 'runner' that calls the library's function. We
- do this because we need to check that we can build a library out of
- the symbols, which ensures any cross-dependencies are compatible."""
-
- # Alias the instance and package.
- inst = cfg.inst
- pkg = inst.pkg
-
- # Begin with header inclusions and symbol prototypes.
- src = check_headers.get_header_source(cfg)
- for sym, proto in zip(inst.syms, pkg.symbol_prototypes):
- src += (proto % sym) + '\n'
-
- # Setup the main function call.
- src += 'int main(int argc, char* argv[]) {\n'
-
- # Setup any code required for the symbol check.
- if pkg.symbol_setup:
- src += pkg.symbol_setup + '\n'
-
- # Unpack all the symbols.
- for sym, call in zip(inst.syms, pkg.symbol_calls):
- src += (call % sym) + '\n'
-
- # Include any teardown code.
- if pkg.symbol_teardown:
- src += pkg.symbol_teardown + '\n'
-
- # Finish it off and return.
- src += 'return 0;\n}\n'
- return src
-
-def generate_library_paths(cfg, lib):
- lib_name = cfg.inst.pkg.env.subst('${SHLIBPREFIX}' + lib + '${SHLIBSUFFIX}')
- if cfg.inst.lib_dirs:
- for d in cfg.inst.lib_dirs:
- path = os.path.join(cfg.inst.base_dir, d, lib_name)
- yield os.path.abspath(path)
- else:
- yield lib_name
-
-def check_shared_exist(cfg):
- """Run a sanity check on shared libraries to see if they exist."""
-
- # Alias the instance and package.
- inst = cfg.inst
- pkg = inst.pkg
-
- pkg.ctx.Log(' Checking for existence of shared libraries:\n')
-
- # If we're configuring the dynamic linker, just return True.
- if pkg.name == 'dl':
- pkg.ctx.Log(' No need for the \'dl\' package.\n')
- return True
-
- # If we don't have any libraries to check, return True.
- if not cfg.libs:
- pkg.ctx.Log(' No libraries to check!\n')
- return True
-
- # If this package is shared, it should have a configuration of the
- # 'dl' package as one of it's dependencies.
- dl = None
- for dep in cfg.deps:
- if dep.inst.pkg.name == 'dl':
- dl = dep
- break
- if not dl:
- raise 'Error: No dynamic linker as a dependency!'
-
- # Build a binary to try and dynamically open all the libraries that are
- # indicated to be shared.
- result = [1, '', '']
- src = check_headers.get_header_source(dl)
- src += """
-int main(int argc, char* argv[]) {
- void* lib[%d];
-""" % len(cfg.libs)
-
- # Need to reverse the list of libraries to account for potential global
- # run-time loading bindings.
- libs = pkg.env.reverse_list(cfg.libs)
- for l in libs:
- if pkg.shared_libraries and l not in pkg.shared_libraries:
- continue
- if l in inst.extra_libs:
- continue
-
- offs = ''
- for p in generate_library_paths(cfg, l):
- offs += ' '
- src += '%slib[%d] = dlopen("%s", RTLD_LAZY | RTLD_GLOBAL);\n' % (offs, cfg.libs.index(l), p)
- src += '%sif( !lib[%d] ) {\n' % (offs, cfg.libs.index(l))
- src += offs + ' printf( "%s", dlerror() );\n'
- src += offs + ' return 1;\n'
- src += offs + '}\n'
- while len(offs) > 2:
- offs = offs[:-2]
- src += offs + '}\n'
- src += ' return 0;\n}\n'
-
- old_state = {}
- dl.enable(pkg.env, old_state)
- result = cfg.inst.pkg.run_source(src)
- pkg.env.Replace(**old_state)
-
- if not result[0]:
- pkg.ctx.Log(' Failed.\n')
- return False
-
- pkg.ctx.Log(' Success.\n')
- return True
-
-def run_dependency_check(cfg, dep_cfg, dl, use_dep=False):
- # Alias some stuff.
- pkg = cfg.inst.pkg
- dep_pkg = dep_cfg.inst.pkg
-
- # Setup the source code for the initialisation library.
- lib1_src = check_headers.get_header_source(cfg)
- lib1_src += '\nvoid init( int argc, char* argv[] ) {\n'
- lib1_src += dep_pkg.init_code + '\n'
- lib1_src += '}\n\nvoid fina() {\n'
- lib1_src += dep_pkg.fina_code + '\n'
- lib1_src += '}\n'
-
- # Enable the main package for building the initialisation library. We need to
- # skip enabling the dependency package; that's the point of this check, to see
- # if the main package already has a connection to a shared library of this
- # dependency.
- old_state = {}
- if use_dep:
- cfg.enable(pkg.env, old_state)
- else:
- cfg.enable(pkg.env, old_state, lib_exclude=dep_pkg)
-
- # Build the initialisation library and grab it's path.
- result = pkg.library_source(lib1_src)
- if not result[0]:
- raise 'Broken'
- init_lib = pkg.ctx.sconf.lastTarget.abspath
-
- # Disable the main package for building the check library.
- pkg.env.Replace(**old_state)
-
- # Setup the source code to check if this dependency has been
- # initialised.
- lib2_src = check_headers.get_header_source(dep_cfg)
- lib2_src += '\nint check() {\n'
- lib2_src += dep_pkg.check_code + '\n}\n'
-
- # Enable the dependency package for building the check library.
- old_state = {}
- dep_cfg.enable(pkg.env, old_state)
-
- # Build the check library and grab it's path.
- result = pkg.library_source(lib2_src)
- if not result[0]:
- raise 'Broken'
- check_lib = pkg.ctx.sconf.lastTarget.abspath
-
- # Disable the secondary package for building the loader binary.
- pkg.env.Replace(**old_state)
-
- # Setup the code for the loader.
- ldr_src = """#include <stdlib.h>
-#include <stdio.h>
-#include <dlfcn.h>
-
-int main( int argc, char* argv[] ) {
- void* lib1;
- void* lib2;
- void (*init)( int, char** );
- void (*fina)();
- int (*check)();
-
- lib1 = dlopen( \"""" + init_lib + """\", RTLD_NOW );
- if( !lib1 ) {
- printf( "lib1 open failed\\n" );
- printf( "Error: %s\\n", dlerror() );
- return EXIT_SUCCESS;
- }
- init = dlsym( lib1, "init" );
- if( !init ) {
- printf( "init sym failed\\n" );
- printf( "Error: %s\\n", dlerror() );
- return EXIT_SUCCESS;
- }
- fina = dlsym( lib1, "fina" );
- if( !fina ) {
- printf( "fina sym failed\\n" );
- printf( "Error: %s\\n", dlerror() );
- return EXIT_SUCCESS;
- }
-
- lib2 = dlopen( \"""" + check_lib + """\", RTLD_NOW );
- if( !lib2 ) {
- printf( "lib2 open failed\\n" );
- printf( "Error: %s\\n", dlerror() );
- return EXIT_SUCCESS;
- }
- check = dlsym( lib2, "check" );
- if( !check ) {
- printf( "check sym failed\\n" );
- printf( "Error: %s\\n", dlerror() );
- return EXIT_SUCCESS;
- }
-
- init( argc, argv );
- if( check() )
- printf( "Connected.\\n" );
- else
- printf( "Disconnected.\\n" );
- fina();
-
- if( lib1 )
- dlclose( lib1 );
- if( lib2 )
- dlclose( lib2 );
-
- return EXIT_SUCCESS;
-}
-"""
-
- # Enable the dynamic linker package for the loader.
- old_state = {}
- dl.enable(pkg.env, old_state)
-
- # Build and run the loader. Unfortunately, because of the way
- # SCons works, I think we need to rebuild this everytime.
- result = pkg.run_source(ldr_src)
-
- # Disable the main package.
- pkg.env.Replace(**old_state)
-
- return result
-
-def check_shared_dependencies(cfg):
- # Alias the instance and package.
- inst = cfg.inst
- pkg = inst.pkg
-
- pkg.ctx.Log(' Check the shared library dependencies:\n')
-
- # Don't check for shared libraries if we're currently checking the
- # dynamic linker package.
- if pkg.name == 'dl':
- pkg.ctx.Log(' No need for the \'dl\' package.\n')
- return True
-
- # If this package is shared, it should have a configuration of the
- # 'dl' package as one of it's dependencies.
- dl = None
- for dep in cfg.deps:
- if dep.inst.pkg.name == 'dl':
- dl = dep
- break
- if not dl:
- raise 'No dynamic linker as a dependency!'
-
- # Check every dependency to see if it works.
- for dep_cfg in cfg.deps:
-
- # Alias the dependency's package.
- dep_pkg = dep_cfg.inst.pkg
-
- pkg.ctx.Log(' Checking dependency \'%s\'\n' % dep_pkg.name)
-
- # Make sure the dependency needs to be checked.
- if not (dep_pkg.init_code and dep_pkg.fina_code and dep_pkg.check_code):
- pkg.ctx.Log(' No code for dependency check, skipping.\n')
- continue
-
- # Run the test without the dependent package.
- result = run_dependency_check(cfg, dep_cfg, dl)
-
- # If the link failed, we have a bug.
- if not result[0]:
- raise 'Error: This link should not have failed.\n'
-
- # Check if we were able to open the initialisation library.
- if result[1].find('lib1 open failed') != -1:
-
- # If not, we need to know why.
- error = result[1].split('\n')[1]
-
- # If we can find any of the symbols we were trying to link in the
- # error output, this means the main library is not connected to
- # any library that satsifies the dependency. Theoretically, this
- # means we can link against any dependency installation, however
- # this is not really true. So, the answer is to just try it with
- # the current dependency to see if it works.
- if error.find('undefined') != -1: # TODO: search for symbols
- pkg.ctx.Log(' Symbols not present, trying again with dependency enabled.\n')
-
- # Yep, bogus. Try it with the dependency thrown in.
- result = run_dependency_check(cfg, dep_cfg, dl, use_dep=True)
-
- # If it failed for any reason, we can't use this combination.
- if not result[0] or \
- result[1].find('Error') != -1 or \
- result[1].find('Disconnected') != -1:
- pkg.ctx.Log(' Failed: Incompatible libraries.\n')
- return False
-
- # If the libraries are disconnected, it means this configuration is
- # invalid. We need to use one with a different dependency.
- if result[1].find('Disconnected') != -1:
- pkg.ctx.Log(' Failed: Shared libraries are not connected.\n')
- return False
-
- pkg.ctx.Log(' Success: Shared libraries are connected.\n')
-
- return True
-
-def check_files(cfg):
- """Check for the existence of static and shared library files. Return results
- as a tuple of two booleans, the first for static and the second for shared."""
-
- # Alias the installation and package for easy access.
- inst = cfg.inst
- pkg = inst.pkg
-
- pkg.ctx.Log(' Checking for existence of files:\n')
-
- # Set the result flag to false just in case none of the libraries are
- # in our standard set.
- static_found = False
-
- # Check each library in the current set.
- for lib in cfg.libs:
-
- pkg.ctx.Log(' Looking for static %s:\n' % lib)
-
- # Don't check for the existance of a library if it's considered
- # an extra library. This is because extra libraries can often exist
- # in only compiler known locations.
- if lib in pkg.extra_libraries + inst.extra_libs:
- pkg.ctx.Log(' Is an auxilliary library, skipping.\n')
- continue
-
- # Check each library directory for the files.
- static_found = False
- for lib_dir in inst.lib_dirs:
-
- # Combine the library sub-directories with the base directory
- # and both the static and shared library names.
- name = pkg.env.subst('${LIBPREFIX}' + lib + '${LIBSUFFIX}')
- path = os.path.join(inst.base_dir, lib_dir, name)
-
- pkg.ctx.Log(' Trying %s ... ' % path)
-
- # Check if either of the paths exist.
- static_found = os.path.exists(path)
-
- # If so, we can break out of the directory loop and move
- # on to the next library.
- if static_found:
- pkg.ctx.Log('found.\n')
- break
-
- pkg.ctx.Log('not found.\n')
-
- # If we couldn't find this library, the configuration is invalid.
- # We can return negative straight away.
- if not static_found: break
-
- # Set the result flag to false just in case none of the libraries are
- # in our standard set.
- shared_found = False
-
- # Check each library in the current set.
- for lib in cfg.libs:
-
- pkg.ctx.Log(' Looking for shared %s:\n' % lib)
-
- # Don't check for the existance of a library if it's considered
- # an extra library. This is because extra libraries can often exist
- # in only compiler known locations.
- if lib in pkg.extra_libraries + inst.extra_libs:
- pkg.ctx.Log(' Is an auxilliary library, skipping.\n')
- continue
-
- # Don't check for libraries that are known to not be shared.
- if pkg.shared_libraries is not None and lib not in pkg.shared_libraries:
- pkg.ctx.Log(' Is not required to be a shared library.\n')
- continue
-
- # Check each library directory for the files.
- shared_found = False
- for lib_dir in inst.lib_dirs:
-
- # Combine the library sub-directories with the base directory
- # and both the static and shared library names.
- name = pkg.env.subst('${SHLIBPREFIX}' + lib + '${SHLIBSUFFIX}')
- path = os.path.join(inst.base_dir, lib_dir, name)
-
- pkg.ctx.Log(' Trying %s ... ' % path)
-
- # Check if either of the paths exist.
- shared_found = os.path.exists(path)
-
- # If so, we can break out of the directory loop and move
- # on to the next library.
- if shared_found:
- pkg.ctx.Log('found.\n')
- break
-
- pkg.ctx.Log('not found.\n')
-
- # If we couldn't find this library, the configuration is invalid.
- # We can return negative straight away.
- if not shared_found: break
-
- # Return results.
- return (static_found, shared_found)
-
-def check_libraries(cfg):
- # Alias the installation and package for easy access.
- inst = cfg.inst
- pkg = inst.pkg
-
- pkg.ctx.Log('Checking libraries: %s\n' % str(cfg.libs))
-
- # If there are no libraries to check, automatically pass, but store
- # unknown values for results.
- if not cfg.libs and not inst.fwork:
- pkg.ctx.Log(' No libraries to check!\n')
- cfg.has_static = None
- cfg.has_shared = None
- return True
-
- # Try and locate files that look like the library files we need.
- if not inst.fwork:
- cfg.has_static, cfg.has_shared = check_files(cfg)
-
- # If we couldn't find either, we're broken and should return immediately.
- if not inst.fwork and not (cfg.has_static or cfg.has_shared):
- pkg.ctx.Log(' Could not find files for either static or shared libraries.\n')
- return False
-
- # Now we need to find a valid set of symbols by trying to build a test
- # program. The symbol type required for any configuration is dependent on
- # the installation, not in it's dependencies. So, if we've already
- # found the right set of symbols for this installation, just use those,
- # don't bother searching.
- pkg.ctx.Log(' Checking for symbols:\n')
- if inst.syms is not None:
- pkg.ctx.Log(' Installation already has symbols defined.\n')
- pre_exist = True
- symbols = [(inst.syms, inst.sym_def)]
- else:
- pre_exist = False
-
- #If there aren't any symbols defined in the package,
- # use an empty set.
- if not pkg.symbols:
- pkg.ctx.Log(' No symbols given, using an empty set.\n')
- symbols = [([], '')]
- else:
- pkg.ctx.Log(' Searching package defined symbols.\n')
- symbols = pkg.symbols
-
- # Try out each set of symbols.
- for inst.syms, inst.sym_def in symbols:
-
- pkg.ctx.Log(' Trying symbol set: %s\n' % str(inst.syms))
-
- # Build the source required for the symbols check.
- src = get_symbols_source(cfg)
-
- # Enable the environment and build the source.
- old_state = {}
- cfg.enable(pkg.env, old_state)
- result = pkg.run_source(src)
- pkg.env.Replace(**old_state)
-
- # Check the results.
- if result[0]:
-
- # In addition to the results indicating success, we need to check
- # the output for any warnings that may indicate failure.
- if result[1].find('skipping incompatible') != -1 or \
- result[2].find('skipping incompatible') != -1:
-
- # The library path specified wasn't actually used, instead the
- # libraries in there were incompatible and the compiler was
- # able to make use of libraries in default search paths.
- pkg.ctx.Log(' Failed: compiler reported incompatible libraries.\n')
- result[0] = 0
- continue
-
- # We're done, break out of the symbols search.
- pkg.ctx.Log(' Success.\n')
- break
-
- pkg.ctx.Log(' Failed.\n')
-
- # If we couldn't find a valid set of symbols we need to clear the symbols
- # stored on the installation only if they had not been found before.
- if not result[0]:
- if not pre_exist:
- inst.syms = None
- inst.sym_def = ''
-
- # TODO: We need to set the correct error here.
- return False
-
- # Check for the existence of shared libraries if we were able to find shared
- # library-like files.
- if not inst.fwork and cfg.has_shared:
- cfg.has_shared = check_shared_exist(cfg)
-
- # Also check if we know of a dependency for which the package was linked
- # against for each dependency listed.
- if not inst.fwork and cfg.has_shared:
- cfg.has_shared = check_shared_dependencies(cfg)
-
- # If we've gotten this far and we're using a framework, we know we have
- # the right stuff.
- if inst.fwork:
- cfg.has_static = True
- cfg.has_shared = True
- cfg.libs = None
-
- # If we don't have all the necessary libraries, return negative.
- if pkg.require_shared and not cfg.has_shared:
- pkg.ctx.Log(' Failed: shared libraries are required.\n')
- return False
-
- pkg.ctx.Log(' Success.\n')
- return True
diff -r 3aba6671e2c9 -r 07515a87d26b checks.py
--- a/checks.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-from check_headers import check_headers
-from check_libraries import check_libraries
diff -r 3aba6671e2c9 -r 07515a87d26b packages/BlasLapack.py
--- a/packages/BlasLapack.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,79 +0,0 @@
-import os
-import SConfig
-
-class BlasLapack(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.cmath = self.dependency(SConfig.packages.cmath)
- self.libraries = [['lapack', 'blas'],
- ['flapack', 'fblas'],
- ['flapack', 'fblas', 'gfortran'],
- ['clapack', 'cblas'],
- ['mkl_lapack', 'mkl']]
- self.shared_libraries = ['lapack', 'blas']
- self.extra_libraries = ['gfortran']
- self.frameworks = ['Accelerate']
- self.symbols = [(['dgeev'], 'FORTRAN_NORMAL'),
- (['dgeev_'], 'FORTRAN_SINGLE_TRAILINGBAR'),
- (['dgeev__'], 'FORTRAN_DOUBLE_TRAILINGBAR'),
- (['DGEEV'], 'FORTRAN_UPPERCASE')]
- self.symbol_setup = '''char jobVecLeft='N';
-char jobVecRight='N';
-int dim=1;
-double* arrayA=NULL;
-double* outputReal=NULL;
-double* outputImag=NULL;
-double* leftEigenVec=NULL;
-double* rightEigenVec=NULL;
-int leadDimVL=1;
-int leadDimVR=1;
-double* workSpace=NULL;
-int dimWorkSpace;
-int INFO=0;
-
-dimWorkSpace=10*dim;
-arrayA=malloc(dim*dim*sizeof(double));
-memset(arrayA, 0, dim*dim*sizeof(double));
-outputReal=malloc(dim*sizeof(double));
-outputImag=malloc(dim*sizeof(double));
-memset(outputReal, 0, dim*sizeof(double));
-memset(outputImag, 0, dim*sizeof(double));
-workSpace=malloc(dimWorkSpace*sizeof(double));
-leftEigenVec=malloc(leadDimVL*dim*sizeof(double));
-rightEigenVec=malloc(leadDimVR*dim*sizeof(double));
-memset(leftEigenVec, 0, leadDimVL*dim*sizeof(double));
-memset(rightEigenVec, 0, leadDimVR*dim*sizeof(double));
-'''
- self.symbol_teardown = '''free(arrayA);
-free(outputReal);
-free(outputImag);
-free(workSpace);
-free(leftEigenVec);
-free(rightEigenVec);
-'''
- self.symbol_prototypes = ['void %s(char*,char*,int*,double*,int*,double*,double*,double*,int*,double*,int*,double*,int*,int*);']
- self.symbol_calls = ['%s(&jobVecLeft, &jobVecRight, &dim, arrayA, &dim, outputReal, outputImag, leftEigenVec, &leadDimVL, rightEigenVec, &leadDimVR, workSpace, &dimWorkSpace, &INFO );']
-
- # Thanks to there not being a C version of Blas/Lapack on edda, we need
- # to be able to search for that installation specifically.
- def process_installations(self, inst):
- lib_dir = ['/usr/local/IBM_compilers/xlf/9.1/lib64',
- '/opt/ibmcmp/lib64']
- use_dir = True
- for d in lib_dir:
- if not os.path.exists(d):
- use_dir = False
- break
- if use_dir:
- inst.add_lib_dirs(lib_dir)
-
- def generate_libraries(self, inst):
- lib_dir = '/usr/local/IBM_compilers/xlf/9.1/lib64'
- if lib_dir in inst.lib_dirs:
- old_libs = list(inst.extra_libs)
- inst.extra_libs += ['xlf90', 'xlfmath', 'xl']
- yield ['blas', 'lapack', 'xlf90', 'xlfmath', 'xl']
- inst.extra_libs = old_libs
- else:
- for libs in SConfig.Package.generate_libraries(self, inst):
- yield libs
diff -r 3aba6671e2c9 -r 07515a87d26b packages/CompilerFlags.py
--- a/packages/CompilerFlags.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,61 +0,0 @@
-import os, platform
-import SCons.Script
-import SConfig
-
-class CompilerFlags(SConfig.Node):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Node.__init__(self, scons_env, scons_opts, required)
- self.checks = [self.check_bit_flags,
- self.check_architecture]
-
- def setup_options(self):
- SConfig.Node.setup_options(self)
- self.opts.AddOptions(
- SCons.Script.BoolOption('with_32bit', 'Generate 32bit code', 0),
- SCons.Script.BoolOption('with_64bit', 'Generate 64bit code', 0)
- )
-
- def check_architecture(self):
- if (platform.platform().find('x86_64') != -1 or \
- platform.platform().find('ppc64') != -1 or \
- platform.architecture()[0].find('64') != -1 or \
- self.env['with_64bit']) and \
- not self.env['with_32bit']:
- self.bits = 64
- if self.flag_64bit:
- self.env.MergeFlags(self.flag_64bit)
- if self.env.subst('$CC') == self.env.subst('$LINK'):
- self.env.AppendUnique(LINKFLAGS=[self.flag_64bit])
- else:
- self.bits = 32
- if self.flag_32bit:
- self.env.MergeFlags(self.flag_32bit)
- if self.env.subst('$CC') == self.env.subst('$LINK'):
- self.env.AppendUnique(LINKFLAGS=[self.flag_32bit])
- return True
-
- def check_bit_flags(self):
- if self.try_flag('-m32')[0]:
- self.flag_32bit = '-m32'
- elif self.try_flag('-q32')[0]:
- self.flag_32bit = '-q32'
- else:
- self.flag_32bit = ''
- if self.try_flag('-m64')[0]:
- self.flag_64bit = '-m64'
- elif self.try_flag('-q64')[0]:
- self.flag_64bit = '-q64'
- else:
- self.flag_64bit = ''
- return True
-
- def try_flag(self, flag):
- state = self.env.ParseFlags(flag)
- old = self.push_state(state)
- result = self.run_scons_cmd(self.ctx.TryCompile, '', '.c')
- self.pop_state(old)
- if result[0] and (result[1].find('not recognized') != -1 or
- result[1].find('not recognised') != -1 or
- result[1].find('unknown option') != -1):
- result[0] = 0
- return [result[0], result[1], '']
diff -r 3aba6671e2c9 -r 07515a87d26b packages/HDF5.py
--- a/packages/HDF5.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,72 +0,0 @@
-import os
-import SConfig
-
-class HDF5(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.base_patterns = ['*hdf5*', '*HDF5*']
- self.headers = [['hdf5.h']]
- self.libraries = [['hdf5'],
- ['hdf5', 'pthread'],
- ['hdf5', 'pthread', 'z'],
- ['hdf5', 'pthread', 'z', 'sz']]
- self.require_parallel = False
-
- def process_installation(self, inst):
- SConfig.Package.process_installation(self, inst)
-
- inst.parallel_support = False
- extra_lib_dirs = []
- extra_libs = []
- for lib_dir in inst.lib_dirs:
- set_file = os.path.join(inst.base_dir, lib_dir, 'libhdf5.settings')
- if os.path.exists(set_file):
- f = open(set_file, 'r')
- for line in f.readlines():
- if line.find('Extra libraries') != -1:
- dict = self.env.ParseFlags(line.split(':')[1])
- extra_lib_dirs = dict.get('LIBPATH', [])
- extra_libs = dict.get('LIBS', [])
- if line.find('Parallel support') != -1:
- psup = line.split(':')[1].strip()
- inst.parallel_support = (psup == 'yes')
- disabled = """
- inst.add_lib_dirs(extra_lib_dirs)
- inst.add_extra_libs(extra_libs)"""
-
- # Do we need the math library?
- if 'm' in extra_libs:
- self.cmath = self.dependency(SConfig.packages.cmath)
-
- # Do we need to include the library for szip or pthread?
- if 'pthread' in extra_libs: inst.add_extra_libs('pthread')
- if 'z' in extra_libs: inst.add_extra_libs('z')
- if 'sz' in extra_libs: inst.add_extra_libs('sz')
-
- # If we have parallel support or we require parallel support,
- # add in MPI requirements.
- if inst.parallel_support or self.require_parallel:
- self.mpi = self.dependency(SConfig.packages.MPI)
- self.symbols = [(['H5Pset_dxpl_mpio', 'H5Pset_fapl_mpio'], '')]
- self.symbol_calls = ['%s(dxpl_props, H5FD_MPIO_COLLECTIVE);',
- '%s(fapl_props, MPI_COMM_WORLD, MPI_INFO_NULL);']
- self.symbol_setup = """hid_t dxpl_props, fapl_props;
-MPI_Comm comm_world;
-
-MPI_Init(&argc, &argv);
-MPI_Comm_dup(MPI_COMM_WORLD, &comm_world);
-dxpl_props = H5Pcreate(H5P_DATASET_XFER);
-fapl_props = H5Pcreate(H5P_FILE_ACCESS);
-"""
- self.symbol_teardown = """H5Pclose(dxpl_props);
-H5Pclose(fapl_props);
-MPI_Finalize();
-"""
-
- return
-
- def get_check_symbols_fail_reason(self, fail_logs):
- for log in fail_logs:
- if log.find('_mpio\''):
- return 'Not a parallel HDF5 implementation.'
- return ''
diff -r 3aba6671e2c9 -r 07515a87d26b packages/HGRevision.py
--- a/packages/HGRevision.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,25 +0,0 @@
-import os
-import SConfig
-
-class HGRevision(SConfig.Node):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Node.__init__(self, scons_env, scons_opts, required)
- self.checks = [self.extract_revision]
- self.define_name = 'VERSION'
-
- # Will be set after configuration.
- self.revision = ''
-
- def extract_revision(self):
- import commands
- result = commands.getstatusoutput('hg identify')
- if result[0]:
- return False
- self.revision = result[1].split()[0].strip()
- return True
-
- def enable(self, scons_env, old_state=None):
- SConfig.Node.enable(self, scons_env, old_state)
- self.backup_variable(scons_env, 'CPPDEFINES', old_state)
- ver = scons_env['ESCAPE']('"' + self.revision + '"')
- scons_env.AppendUnique(CPPDEFINES=[(self.define_name, ver)])
diff -r 3aba6671e2c9 -r 07515a87d26b packages/MPI.py
--- a/packages/MPI.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,42 +0,0 @@
-import os
-import SConfig
-
-class MPI(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.dependency(SConfig.packages.CompilerFlags)
- self.base_patterns = ['mpich*', 'MPICH*']
- self.header_sub_dir = ['mpi']
- self.headers = [['mpi.h']]
- self.libraries = [['mpich'],
- ['mpich', 'pmpich'],
- ['mpich', 'rt'],
- ['mpich', 'pmpich', 'rt'],
- ['mpich', 'pvfs2'],
- ['mpich', 'pmpich', 'pvfs2'],
- ['mpich', 'rt', 'pvfs2'],
- ['mpich', 'pmpich', 'rt', 'pvfs2'],
- ['mpi'],
- ['lam', 'mpi']]
- self.shared_libraries = ['mpich', 'pmpich', 'mpi', 'lam']
- self.extra_libraries = ['rt', 'pvfs2']
- self.symbols = [(['MPI_Init', 'MPI_Comm_dup', 'MPI_Finalize'], '')]
- self.symbol_setup = 'MPI_Comm comm_world;\n'
- self.symbol_calls = ['%s(&argc, &argv);',
- '%s(MPI_COMM_WORLD, &comm_world);',
- '%s();']
- self.init_code = 'MPI_Init(&argc, &argv);'
- self.fina_code = 'MPI_Finalize();'
- self.check_code = """int is_ready;
-MPI_Initialized(&is_ready);
-return is_ready;"""
-
- def process_installation(self, inst):
- SConfig.Package.process_installation(self, inst)
-
- # MPICH sometimes stores it's shared libraries in prefix/lib/shared.
- for lib_dir in inst.lib_dirs:
- shared_dir = os.path.join(lib_dir, 'shared')
- path = os.path.join(inst.base_dir, shared_dir)
- if os.path.exists(path):
- inst.add_lib_dirs(shared_dir, prepend=True)
diff -r 3aba6671e2c9 -r 07515a87d26b packages/OSMesa.py
--- a/packages/OSMesa.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-import os
-import SConfig
-
-class OSMesa(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.dependency(SConfig.packages.OpenGL)
- self.header_sub_dir = ['GL']
- self.headers = [['osmesa.h']]
- self.libraries = [['OSMesa']]
- self.symbols = [(['OSMesaCreateContext', 'OSMesaDestroyContext'], '')]
- self.symbol_setup = 'void* ctx;'
- self.symbol_calls = ['ctx = %s(OSMESA_RGBA, NULL);', '%s(ctx);']
diff -r 3aba6671e2c9 -r 07515a87d26b packages/OpenGL.py
--- a/packages/OpenGL.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-import os
-import SConfig
-
-class OpenGL(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.header_sub_dir = ['GL']
- self.headers = [['gl.h', 'glu.h']]
- self.libraries = [['GL', 'GLU']]
- self.frameworks = ['OpenGL']
diff -r 3aba6671e2c9 -r 07515a87d26b packages/PETSc.py
--- a/packages/PETSc.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,191 +0,0 @@
-import os, re
-import SConfig
-
-class PETSc(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.mpi = self.dependency(SConfig.packages.MPI)
- self.blas_lapack = self.dependency(SConfig.packages.BlasLapack)
- self.base_patterns = ['petsc*', 'PETSC*', 'PETSc*']
- self.header_sub_dir = ['petsc']
- self.headers = [['petsc.h',
- 'petscvec.h', 'petscmat.h',
- 'petscksp.h', 'petscsnes.h']]
- self.libraries = [['petscsnes', 'petscksp',
- 'petscdm',
- 'petscmat', 'petscvec',
- 'petsc',]]
- self.symbols = [(['PetscInitialize', 'MPI_Comm_dup', 'PetscFinalize'], '')]
- self.symbol_setup = 'MPI_Comm comm_world;\n'
- self.symbol_calls = ['%s(&argc, &argv, NULL, NULL);',
- '%s(PETSC_COMM_WORLD, &comm_world);',
- '%s();']
-
- def process_installation(self, inst):
- SConfig.Package.process_installation(self, inst)
-
- # Read the PETSc architecture.
- inst.arch = self.get_arch(inst.base_dir)
- if not inst.arch:
- return
-
- # Add the bmake/arch include directory.
- hdr_dir = os.path.join('bmake', inst.arch)
- if not os.path.exists(os.path.join(inst.base_dir, hdr_dir)):
- return
- inst.add_hdr_dirs(hdr_dir)
-
- # Add the lib/arch library directory.
- lib_dir = os.path.join('lib', inst.arch)
- if not os.path.exists(os.path.join(inst.base_dir, lib_dir)):
- return
- inst.add_lib_dirs(lib_dir)
-
- # Parse extra libraries.
- extra_lib_dirs, extra_libs = self.get_extra_libraries(inst)
- inst.add_lib_dirs(extra_lib_dirs)
- inst.add_extra_libs(extra_libs)
-
- # There seems to be some extra library paths we'll need that get stored
- # in SL_LINKER_LIBS.
-# extra_lib_dirs, extra_libs = self.get_sl_linker_libs(inst)
-# inst.add_lib_dirs(extra_lib_dirs)
-# inst.add_libs(extra_libs)
-
- # Everything's okay.
- return
-
- def get_arch(self, base_dir):
- petscconf = os.path.join(base_dir, 'bmake',
- 'petscconf')
- if not os.path.exists(petscconf):
- return None
- f = file(petscconf, 'r')
- arch = f.readline().split('=')[1][:-1]
- f.close()
- return arch
-
- def get_extra_libraries(self, inst):
- """Read 'petscconf' and extract any additional dependencies/extra
- libraries we may need."""
-
- # Make sure the file exists before trying anything.
- petscconf = os.path.join(inst.base_dir, 'bmake', inst.arch, 'petscconf')
- if not os.path.exists(petscconf):
- return
-
- # Read all the lines, which are of the form 'something = something else'.
- f = file(petscconf, 'r')
- line_dict = {}
- for line in f.readlines():
- sides = line.split('=')
- line_dict[sides[0].strip()] = sides[1].strip()
- f.close()
-
- # Try and locate any possible dependent installations
- # PETSc knows about.
- name_map = {'MPI': self.mpi,
- 'BLASLAPACK': self.blas_lapack}
- for name_base, pkg in name_map.iteritems():
- name = name_base + '_INCLUDE'
- if name not in line_dict: continue
- string = self.subst(line_dict[name], line_dict).strip()
- for sub in string.split(' '):
- if sub[:len(self.env['INCPREFIX'])] == self.env['INCPREFIX']:
- base_dir = os.path.normpath(sub[len(self.env['INCPREFIX']):])
-
- # Try the base directory on it's own; sometimes
- # the libraries will be placed there.
- pkg.add_candidate(SConfig.Installation(pkg, base_dir))
-
- # Try combining with sub-directories.
- base_dir = os.path.dirname(base_dir)
- for hdr, lib in pkg.combine_base_dir(base_dir):
- pkg.add_candidate(SConfig.Installation(pkg, base_dir, hdr, lib))
-
- name = name_base + '_LIB'
- if name not in line_dict: continue
- string = self.subst(line_dict[name], line_dict).strip()
- for sub in string.split(' '):
- if sub[:len(self.env['LIBDIRPREFIX'])] == self.env['LIBDIRPREFIX']:
- base_dir = os.path.normpath(sub[len(self.env['LIBDIRPREFIX']):])
-
- # Try the base directory on it's own; sometimes
- # the libraries will be placed there.
- pkg.add_candidate(SConfig.Installation(pkg, base_dir, [], ['']))
-
- # Try combining with sub-directories.
- base_dir = os.path.dirname(base_dir)
- for hdr, lib in pkg.combine_base_dir(base_dir):
- pkg.add_candidate(SConfig.Installation(pkg, base_dir, hdr, lib))
-
- # Hunt down all the libraries and library paths we may need.
- names = ['PACKAGES_LIBS', 'SL_LINKER_LIBS']
- for name in names:
- if name not in line_dict: continue
- lib_string = line_dict['PACKAGES_LIBS']
- lib_string = self.subst(lib_string, line_dict)
-
- extra_lib_dirs = []
- extra_libs = []
- for string in lib_string.split(' '):
- if string[:len(self.env['LIBLINKPREFIX'])] == self.env['LIBLINKPREFIX']:
- extra_libs += [string[len(self.env['LIBLINKPREFIX']):]]
- elif string[:len(self.env['LIBDIRPREFIX'])] == self.env['LIBDIRPREFIX']:
- extra_lib_dirs += [string[len(self.env['LIBDIRPREFIX']):]]
- return (extra_lib_dirs, extra_libs)
-
- def get_sl_linker_libs(self, inst):
- petscconf = os.path.join(inst.base_dir, 'bmake', inst.arch, 'petscconf')
- if not os.path.exists(petscconf): return ([], [])
- f = file(petscconf, 'r')
- line_dict = {}
- for line in f.readlines():
- sides = line.split('=')
- line_dict[sides[0].strip()] = sides[1].strip()
- f.close()
- if 'SL_LINKER_LIBS' not in line_dict: return ([], [])
- lib_string = line_dict['SL_LINKER_LIBS']
- lib_string = self.subst(lib_string, line_dict)
-
- extra_lib_dirs = []
- extra_libs = []
- for string in lib_string.split(' '):
- if string[:len(self.env['LIBLINKPREFIX'])] == self.env['LIBLINKPREFIX']:
- extra_libs += [string[len(self.env['LIBLINKPREFIX']):]]
- elif string[:len(self.env['LIBDIRPREFIX'])] == self.env['LIBDIRPREFIX']:
- extra_lib_dirs += [string[len(self.env['LIBDIRPREFIX']):]]
- return (extra_lib_dirs, extra_libs)
-
- def subst(self, line, line_dict):
- inp = [w.strip() for w in line.split()]
- out = []
- while len(inp):
- w = inp[0]
- inp = inp[1:]
- if self.is_macro(w):
- new_line = self.expand_macro(w, line_dict)
- new_words = [nw.strip() for nw in new_line.split()]
- inp = new_words + inp
- else:
- out += [w]
- return ' '.join(out)
-
- def expand_macro(self, macro, line_dict):
- if macro[:2] == '${' and macro[-1:] == '}':
- macro = macro[2:-1]
- elif macro[0] == '$':
- macro = macro[1:]
- if macro not in line_dict: return ''
- return line_dict[macro]
-
- def is_macro(self, word):
- if (word[:2] == '${' and word[-1:] == '}') or word[0] == '$':
- return True
- return False
-
- def get_check_headers_fail_reason(self, fail_logs):
- for log in fail_logs:
- if log.find('MPI_') != -1:
- return 'Selected MPI implementation incompatible.'
- return ''
diff -r 3aba6671e2c9 -r 07515a87d26b packages/PETScExt.py
--- a/packages/PETScExt.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,36 +0,0 @@
-import os
-import SConfig
-
-class PETScExt(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.petsc = self.dependency(SConfig.packages.PETSc)
- self.base_patterns = ['petscext*', 'PETSCEXT*', 'PETScExt*']
- self.header_sub_dir = ['petsc']
- self.headers = [['petscext.h',
- 'petscext_vec.h', 'petscext_mat.h',
- 'petscext_ksp.h', 'petscext_snes.h']]
- self.libraries = [['petscext_snes', 'petscext_ksp', 'petscext_pc',
- 'petscext_mat', 'petscext_vec',
- 'petscext_utils']]
-
- def process_installation(self, inst):
- # Have to get the architecture using this garbage...
- archs = os.listdir(os.path.join(inst.base_dir, 'lib'))
- for arch in archs:
- if arch[0] != '.':
- inst.arch = arch
-
- # Add the bmake/arch include directory.
- hdr_dir = os.path.join('bmake', inst.arch)
- if not os.path.exists(os.path.join(inst.base_dir, hdr_dir)):
- return False # Can't continue without bmake include directory.
- inst.add_hdr_dirs(hdr_dir)
-
- # Add the lib/arch library directory.
- lib_dir = os.path.join('lib', inst.arch)
- if not os.path.exists(os.path.join(inst.base_dir, lib_dir)):
- return False # Must have correct library path.
- inst.add_lib_dirs(lib_dir)
-
- return True
diff -r 3aba6671e2c9 -r 07515a87d26b packages/PICellerator.py
--- a/packages/PICellerator.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-import os
-import SConfig
-
-class PICellerator(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.dependency(SConfig.packages.StgFEM)
- self.base_patterns = ['PICellerator*']
- self.headers = [[os.path.join('PICellerator', 'PICellerator.h')]]
- self.libraries = [['PICellerator']]
- self.symbols = [(['PICellerator_Init', 'PICellerator_Finalise'], '')]
- self.symbol_setup = '''MPI_Init(&argc, &argv);
-StGermain_Init(&argc, &argv);
-StgDomain_Init(&argc, &argv);
-StgFEM_Init(&argc, &argv);'''
- self.symbol_teardown = '''StgFEM_Finalise();
-StgDomain_Finalise();
-StGermain_Finalise();
-MPI_Finalize();'''
- self.symbol_calls = ['%s(&argc, &argv);', '%s();']
diff -r 3aba6671e2c9 -r 07515a87d26b packages/SDL.py
--- a/packages/SDL.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-import os
-import SConfig
-
-class SDL(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.header_sub_dir = ['SDL']
- self.headers = [['SDL.h'],
- ['SDL/SDL.h']] # For framework.
- self.libraries = [['SDL']]
- self.frameworks = [['SDL', 'Cocoa']]
diff -r 3aba6671e2c9 -r 07515a87d26b packages/SVNRevision.py
--- a/packages/SVNRevision.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,35 +0,0 @@
-import os
-import SConfig
-
-class SVNRevision(SConfig.Node):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Node.__init__(self, scons_env, scons_opts, required)
- self.checks = [self.extract_revision]
- self.define_name = 'VERSION'
- self.checkout_path = os.getcwd()
-
- # Will be set after configuration.
- self.revision = 0
-
- def extract_revision(self):
- svn_path = os.path.join(self.checkout_path, '.svn', 'entries')
- if not os.path.exists(svn_path):
- return [0, '', 'Could not find .svn directory']
- f = file(svn_path, 'r')
- all_lines = f.readlines()
- f.close()
-
- for l in all_lines:
- ind = l.rfind('revision=')
- if ind != -1:
- self.revision = int(l[ind + 10:l.rfind('"')])
- return True
-
- self.revision = int(all_lines[3])
- return True
-
- def enable(self, scons_env, old_state=None):
- SConfig.Node.enable(self, scons_env, old_state)
- self.backup_variable(scons_env, 'CPPDEFINES', old_state)
- ver = scons_env['ESCAPE']('"' + str(self.revision) + '"')
- scons_env.AppendUnique(CPPDEFINES=[(self.define_name, ver)])
diff -r 3aba6671e2c9 -r 07515a87d26b packages/StGermain.py
--- a/packages/StGermain.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,36 +0,0 @@
-import os
-import SConfig
-
-class StGermain(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.dependency(SConfig.packages.cmath)
- self.dependency(SConfig.packages.libXML2)
- self.dependency(SConfig.packages.MPI)
- self.dependency(SConfig.packages.SVNRevision)
- self.base_patterns = ['StGermain*']
- self.headers = [[os.path.join('StGermain', 'StGermain.h')]]
- self.libraries = [['StGermain']]
- self.symbols = [(['StGermain_Init', 'StGermain_Finalise'], '')]
- self.symbol_setup = 'MPI_Init(&argc, &argv);'
- self.symbol_teardown = 'MPI_Finalize();'
- self.symbol_calls = ['%s(&argc, &argv);', '%s();']
-
- def enable(self, scons_env, old_state=None):
- SConfig.Package.enable(self, scons_env, old_state)
- if self.base_dir:
- script = os.path.join(self.base_dir, 'script', 'pcu', 'scons.py')
- if os.path.exists(script):
- self.backup_variable(scons_env, 'CONFIGSCRIPTS', old_state)
- scons_env.AppendUnique(CONFIGVARS=['CONFIGSCRIPTS'])
- scons_env.AppendUnique(CONFIGSCRIPTS=[script])
- env = scons_env
- scons_env.SConscript(script, 'env')
-
- script = os.path.join(self.base_dir, 'script', 'StGermain', 'scons.py')
- if os.path.exists(script):
- self.backup_variable(scons_env, 'CONFIGSCRIPTS', old_state)
- scons_env.AppendUnique(CONFIGVARS=['CONFIGSCRIPTS'])
- scons_env.AppendUnique(CONFIGSCRIPTS=[script])
- env = scons_env
- scons_env.SConscript(script, 'env')
diff -r 3aba6671e2c9 -r 07515a87d26b packages/StgDomain.py
--- a/packages/StgDomain.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-import os
-import SConfig
-
-class StgDomain(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.dependency(SConfig.packages.StGermain)
- self.dependency(SConfig.packages.BlasLapack)
- self.dependency(SConfig.packages.HDF5, False)
- self.base_patterns = ['StgDomain*']
- self.headers = [[os.path.join('StgDomain', 'StgDomain.h')]]
- self.libraries = [['StgDomain']]
- self.symbols = [(['StgDomain_Init', 'StgDomain_Finalise'], '')]
- self.symbol_setup = '''MPI_Init(&argc, &argv);
-StGermain_Init(&argc, &argv);
-'''
- self.symbol_teardown = '''StGermain_Finalise();
-MPI_Finalize();
-'''
- self.symbol_calls = ['%s(&argc, &argv);', '%s();']
diff -r 3aba6671e2c9 -r 07515a87d26b packages/StgFEM.py
--- a/packages/StgFEM.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-import os
-import SConfig
-
-class StgFEM(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.dependency(SConfig.packages.StgDomain)
- petsc = self.dependency(SConfig.packages.PETSc)
- petsc.have_define = 'HAVE_PETSC'
- self.base_patterns = ['StgFEM*']
- self.headers = [[os.path.join('StgFEM', 'StgFEM.h')]]
- self.libraries = [['StgFEM']]
- self.symbols = [(['StgFEM_Init', 'StgFEM_Finalise'], '')]
- self.symbol_setup = '''MPI_Init(&argc, &argv);
-StGermain_Init(&argc, &argv);
-StgDomain_Init(&argc, &argv);'''
- self.symbol_teardown = '''StgDomain_Finalise();
-StGermain_Finalise();
-MPI_Finalize();'''
- self.symbol_calls = ['%s(&argc, &argv);', '%s();']
diff -r 3aba6671e2c9 -r 07515a87d26b packages/X11.py
--- a/packages/X11.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-import os
-import SConfig
-
-class X11(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.base_dirs += ['/usr/X11R6']
- self.header_sub_dir = ['X11']
- self.headers = [['Xlib.h']]
- self.libraries = [['X11', 'Xmu']]
- self.symbols = [(['XOpenDisplay'], '')]
- self.symbol_setup = 'void* display;'
- self.symbol_calls = ['display = %s(NULL);']
diff -r 3aba6671e2c9 -r 07515a87d26b packages/__init__.py
--- a/packages/__init__.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,25 +0,0 @@
-from CompilerFlags import CompilerFlags
-from libXML2 import libXML2
-from MPI import MPI
-from PETSc import PETSc
-from PETScExt import PETScExt
-from cmath import cmath
-from SVNRevision import SVNRevision
-from HGRevision import HGRevision
-from BlasLapack import BlasLapack
-from StGermain import StGermain
-from StgDomain import StgDomain
-from StgFEM import StgFEM
-from PICellerator import PICellerator
-from dl import dl
-from OpenGL import OpenGL
-from OSMesa import OSMesa
-from SDL import SDL
-from libPNG import libPNG
-from libJPEG import libJPEG
-from libTIFF import libTIFF
-from libFAME import libFAME
-from libavcodec import libavcodec
-from HDF5 import HDF5
-from X11 import X11
-from pcu import pcu
diff -r 3aba6671e2c9 -r 07515a87d26b packages/cmath.py
--- a/packages/cmath.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-import os
-import SConfig
-
-class cmath(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.dependency(SConfig.packages.CompilerFlags)
- self.libraries = [['m']]
diff -r 3aba6671e2c9 -r 07515a87d26b packages/dl.py
--- a/packages/dl.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-import os
-import SConfig
-
-class dl(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.dependency(SConfig.packages.CompilerFlags)
- self.headers = [['dlfcn.h']]
- self.libraries = [['dl']]
diff -r 3aba6671e2c9 -r 07515a87d26b packages/libFAME.py
--- a/packages/libFAME.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-import os
-import SConfig
-
-class libFAME(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.headers = [['fame.h']]
- self.libraries = [['fame']]
- self.have_define = 'HAVE_FAME'
diff -r 3aba6671e2c9 -r 07515a87d26b packages/libJPEG.py
--- a/packages/libJPEG.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-import os
-import SConfig
-
-class libJPEG(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.headers = [['jpeglib.h']]
- self.libraries = [['jpeg']]
- self.have_define = 'HAVE_JPEG'
diff -r 3aba6671e2c9 -r 07515a87d26b packages/libPNG.py
--- a/packages/libPNG.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-import os
-import SConfig
-
-class libPNG(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.headers = [['png.h']]
- self.libraries = [['png']]
- self.have_define = 'HAVE_PNG'
diff -r 3aba6671e2c9 -r 07515a87d26b packages/libTIFF.py
--- a/packages/libTIFF.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-import os
-import SConfig
-
-class libTIFF(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.headers = [['tiff.h']]
- self.libraries = [['tiff']]
- self.have_define = 'HAVE_TIFF'
diff -r 3aba6671e2c9 -r 07515a87d26b packages/libXML2.py
--- a/packages/libXML2.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-import os
-import SConfig
-
-class libXML2(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.dependency(SConfig.packages.CompilerFlags)
- self.header_sub_dir = ['libxml2']
- self.headers = [[os.path.join('libxml', 'parser.h')]]
- self.libraries = [['xml2']]
diff -r 3aba6671e2c9 -r 07515a87d26b packages/libavcodec.py
--- a/packages/libavcodec.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-import os
-import SConfig
-
-class libavcodec(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.header_sub_dir = ['ffmpeg']
- self.headers = [['avcodec.h']]
- self.libraries = [['avcodec']]
- self.have_define = 'HAVE_AVCODEC'
diff -r 3aba6671e2c9 -r 07515a87d26b packages/pcu.py
--- a/packages/pcu.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,29 +0,0 @@
-import os
-import SConfig
-
-class pcu(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False):
- SConfig.Package.__init__(self, scons_env, scons_opts, required)
- self.dependency(SConfig.packages.MPI)
- self.headers = [[os.path.join('pcu', 'pcu.h')]]
- self.libraries = [['pcu']]
- self.checks += [self.check_scons_script]
-
- self.scons_script = ''
-
- def check_scons_script(self):
- if self.base_dir:
- script = os.path.join(self.base_dir, 'script', 'pcu', 'scons.py')
- if os.path.exists(script):
- self.scons_script = script
- self.ctx.Display(' Found SCons builder script.\n')
- return True
-
- def enable(self, scons_env, old_state=None):
- SConfig.Package.enable(self, scons_env, old_state)
- if self.scons_script:
- self.backup_variable(scons_env, 'CONFIGSCRIPTS', old_state)
- scons_env.AppendUnique(CONFIGVARS=['CONFIGSCRIPTS'])
- scons_env.AppendUnique(CONFIGSCRIPTS=[self.scons_script])
- env = scons_env
- scons_env.SConscript(self.scons_script, 'env')
diff -r 3aba6671e2c9 -r 07515a87d26b packages/szip.py
--- a/packages/szip.py Mon Jun 23 04:52:53 2008 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-import os
-import SConfig
-
-class szip(SConfig.Package):
- def __init__(self, scons_env, scons_opts, required=False, **kw):
- SConfig.Package.__init__(self, scons_env, scons_opts, required, **kw)
- self.headers = [['szlib.h']]
- self.libraries = [['sz']]
More information about the CIG-COMMITS
mailing list