[cig-commits] r4301 - in long/3D/Gale/trunk: . python python/BuildSystem python/BuildSystem/build python/BuildSystem/build/compile python/BuildSystem/build/templates python/BuildSystem/client-python python/BuildSystem/config python/BuildSystem/config/packages python/BuildSystem/install python/Gale python/Gale/packages

walter at geodynamics.org walter at geodynamics.org
Tue Aug 15 15:49:36 PDT 2006


Author: walter
Date: 2006-08-15 15:49:33 -0700 (Tue, 15 Aug 2006)
New Revision: 4301

Added:
   long/3D/Gale/trunk/python/BuildSystem/build/
   long/3D/Gale/trunk/python/BuildSystem/build/__init__.py
   long/3D/Gale/trunk/python/BuildSystem/build/__init__.pyc
   long/3D/Gale/trunk/python/BuildSystem/build/bk.py
   long/3D/Gale/trunk/python/BuildSystem/build/buildGraph.py
   long/3D/Gale/trunk/python/BuildSystem/build/buildGraph.pyc
   long/3D/Gale/trunk/python/BuildSystem/build/builder.py
   long/3D/Gale/trunk/python/BuildSystem/build/compile/
   long/3D/Gale/trunk/python/BuildSystem/build/compile/C.py
   long/3D/Gale/trunk/python/BuildSystem/build/compile/Cxx.py
   long/3D/Gale/trunk/python/BuildSystem/build/compile/F90.py
   long/3D/Gale/trunk/python/BuildSystem/build/compile/SIDL.py
   long/3D/Gale/trunk/python/BuildSystem/build/compile/__init__.py
   long/3D/Gale/trunk/python/BuildSystem/build/fileState.py
   long/3D/Gale/trunk/python/BuildSystem/build/fileset.py
   long/3D/Gale/trunk/python/BuildSystem/build/framework.py
   long/3D/Gale/trunk/python/BuildSystem/build/processor.py
   long/3D/Gale/trunk/python/BuildSystem/build/templates/
   long/3D/Gale/trunk/python/BuildSystem/build/templates/Compile.py
   long/3D/Gale/trunk/python/BuildSystem/build/templates/SIDL.py
   long/3D/Gale/trunk/python/BuildSystem/build/templates/__init__.py
   long/3D/Gale/trunk/python/BuildSystem/build/templates/usingC.py
   long/3D/Gale/trunk/python/BuildSystem/build/templates/usingCxx.py
   long/3D/Gale/trunk/python/BuildSystem/build/templates/usingPython.py
   long/3D/Gale/trunk/python/BuildSystem/build/templates/usingSIDL.py
   long/3D/Gale/trunk/python/BuildSystem/build/transform.py
   long/3D/Gale/trunk/python/BuildSystem/checkdlllibs.py
   long/3D/Gale/trunk/python/BuildSystem/client-python/
   long/3D/Gale/trunk/python/BuildSystem/client-python/cygwinpath.c
   long/3D/Gale/trunk/python/BuildSystem/config/package.py
   long/3D/Gale/trunk/python/BuildSystem/config/packages/
   long/3D/Gale/trunk/python/BuildSystem/config/packages/BlasLapack.py
   long/3D/Gale/trunk/python/BuildSystem/config/packages/Boost.py
   long/3D/Gale/trunk/python/BuildSystem/config/packages/MPI.py
   long/3D/Gale/trunk/python/BuildSystem/config/packages/PETSc.py
   long/3D/Gale/trunk/python/BuildSystem/config/packages/__init__.py
   long/3D/Gale/trunk/python/BuildSystem/config/packages/config.guess
   long/3D/Gale/trunk/python/BuildSystem/config/packages/config.sub
   long/3D/Gale/trunk/python/BuildSystem/config/preTests.py
   long/3D/Gale/trunk/python/BuildSystem/config/programs.py
   long/3D/Gale/trunk/python/BuildSystem/emacsclient.py
   long/3D/Gale/trunk/python/BuildSystem/getsplicers.py
   long/3D/Gale/trunk/python/BuildSystem/make.py
   long/3D/Gale/trunk/python/BuildSystem/setsplicers.py
   long/3D/Gale/trunk/python/BuildSystem/sidldllpath.py
Removed:
   long/3D/Gale/trunk/python/petsc/
Modified:
   long/3D/Gale/trunk/
   long/3D/Gale/trunk/configure.py
   long/3D/Gale/trunk/python/BuildSystem/config/base.py
   long/3D/Gale/trunk/python/BuildSystem/config/compilerOptions.py
   long/3D/Gale/trunk/python/BuildSystem/config/compilers.py
   long/3D/Gale/trunk/python/BuildSystem/config/framework.py
   long/3D/Gale/trunk/python/BuildSystem/config/headers.py
   long/3D/Gale/trunk/python/BuildSystem/config/libraries.py
   long/3D/Gale/trunk/python/BuildSystem/config/setCompilers.py
   long/3D/Gale/trunk/python/BuildSystem/config/types.py
   long/3D/Gale/trunk/python/BuildSystem/install/retrieval.py
   long/3D/Gale/trunk/python/BuildSystem/maker.py
   long/3D/Gale/trunk/python/BuildSystem/nargs.py
   long/3D/Gale/trunk/python/BuildSystem/script.py
   long/3D/Gale/trunk/python/Gale/Configure.py
   long/3D/Gale/trunk/python/Gale/packages/libxml2.py
   long/3D/Gale/trunk/python/Gale/packages/petsc.py
Log:
 r603 at earth:  boo | 2006-08-15 15:47:57 -0700
 More cleaning of configure.  No longer use python/petsc



Property changes on: long/3D/Gale/trunk
___________________________________________________________________
Name: svk:merge
   - 3a629746-de10-0410-b17b-fd6ecaaa963e:/cig:602
   + 3a629746-de10-0410-b17b-fd6ecaaa963e:/cig:603

Modified: long/3D/Gale/trunk/configure.py
===================================================================
--- long/3D/Gale/trunk/configure.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/configure.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -41,7 +41,7 @@
 
   # Should be run from the toplevel
   pythonDir = os.path.abspath(os.path.join('python'))
-  petscDir  = os.path.abspath(os.path.join('python/petsc/python'))
+#  petscDir  = os.path.abspath(os.path.join('python/petsc/python'))
   bsDir     = os.path.join(pythonDir, 'BuildSystem')
   if not os.path.isdir(pythonDir):
     raise RuntimeError('Run configure from $GALE_DIR, not '+os.path.abspath('.'))
@@ -52,7 +52,7 @@
       
   sys.path.insert(0, bsDir)
   sys.path.insert(0, pythonDir)
-  sys.path.insert(0, petscDir)
+#  sys.path.insert(0, petscDir)
 
   import config.framework
   import cPickle
@@ -108,26 +108,26 @@
     +'---------------------------------------------------------------------------------------\n'  \
     +emsg+'*********************************************************************************\n'
     se = ''
-  except ImportError, e :
-    emsg = str(e)
-    if not emsg.endswith('\n'): emsg = emsg+'\n'
-    msg ='*********************************************************************************\n'\
-    +'                     UNABLE to FIND MODULE for configure.py \n' \
-    +'---------------------------------------------------------------------------------------\n'  \
-    +emsg+'*********************************************************************************\n'
-    se = ''
-  except SystemExit, e:
-    if e.code is None or e.code == 0:
-      return
-    msg ='*********************************************************************************\n'\
-    +'           CONFIGURATION CRASH  (Please send configure.log to cig-long at geodynamics.org)\n' \
-    +'*********************************************************************************\n'
-    se  = str(e)
-  except Exception, e:
-    msg ='*********************************************************************************\n'\
-    +'          CONFIGURATION CRASH  (Please send configure.log to cig-long at geodynamics.org)\n' \
-    +'*********************************************************************************\n'
-    se  = str(e)
+#   except ImportError, e :
+#     emsg = str(e)
+#     if not emsg.endswith('\n'): emsg = emsg+'\n'
+#     msg ='*********************************************************************************\n'\
+#     +'                     UNABLE to FIND MODULE for configure.py \n' \
+#     +'---------------------------------------------------------------------------------------\n'  \
+#     +emsg+'*********************************************************************************\n'
+#     se = ''
+#   except SystemExit, e:
+#     if e.code is None or e.code == 0:
+#       return
+#     msg ='*********************************************************************************\n'\
+#     +'           CONFIGURATION CRASH  (Please send configure.log to cig-long at geodynamics.org)\n' \
+#     +'*********************************************************************************\n'
+#     se  = str(e)
+#   except Exception, e:
+#     msg ='*********************************************************************************\n'\
+#     +'          CONFIGURATION CRASH  (Please send configure.log to cig-long at geodynamics.org)\n' \
+#     +'*********************************************************************************\n'
+#     se  = str(e)
 
   print msg
   if not framework is None:

Added: long/3D/Gale/trunk/python/BuildSystem/build/__init__.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/__init__.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/__init__.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1 @@
+all = ['bk', 'buildGraph', 'builder', 'compile', 'fileState', 'fileset', 'framework', 'processor', 'templates', 'transform']

Added: long/3D/Gale/trunk/python/BuildSystem/build/__init__.pyc
===================================================================
(Binary files differ)


Property changes on: long/3D/Gale/trunk/python/BuildSystem/build/__init__.pyc
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Added: long/3D/Gale/trunk/python/BuildSystem/build/bk.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/bk.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/bk.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,147 @@
+import build.fileset
+import build.transform
+
+def convertPath(file):
+  '''Converts the cygwin path to a full Windows path'''
+  try:
+    import cygwinpath
+    return cygwinpath.convertToFullWin32Path(file)
+  except ImportError:
+    pass
+  return file
+
+class Tag (build.transform.Transform):
+  '''Tags all relevant Bitkeeper filesets
+     - Unlocked files are tagged "bkedit"
+     - Locked files which are unchanged are tagged "bkrevert"
+     - New implementation files are tagged "bkadd"'''
+  def __init__(self, rootFunc, inputTag = None):
+    import re
+
+    build.transform.Transform.__init__(self)
+    self.implRE   = re.compile(r'^(.*)_impl$')
+    self.rootFunc = rootFunc
+    self.inputTag = inputTag
+    if not self.inputTag is None and not isinstance(self.inputTag, list):
+      self.inputTag = [self.inputTag]
+    return
+
+  def __str__(self):
+    return 'BitKeeper tag transform'
+
+  def getUnlockedFiles(self, root):
+    '''Return a list of all files not locked by BitKeeper in the root directories'''
+    files       = []
+    lockedFiles = []
+    files.extend(self.executeShellCommand('bk sfiles -g '+convertPath(root)).split())
+    lockedFiles.extend(self.executeShellCommand('bk sfiles -lg '+convertPath(root)).split())
+    map(files.remove, lockedFiles)
+    return files
+
+  def isImplementationFile(self, filename):
+    '''Returns True if filename is an implementation file'''
+    import os
+
+    if filename[-1] == '~': return 0
+    if filename[-1] == '#': return 0
+    if os.path.splitext(filename)[1] == '.pyc': return 0
+    if self.implRE.match(os.path.dirname(filename)):
+      return 1
+    return 0
+
+  def getNewFiles(self, root):
+    '''Return a list of all implementation files not under BitKeeper control in the root directories'''
+    files = []
+    files.extend(filter(self.isImplementationFile, self.executeShellCommand('bk sfiles -ax '+convertPath(root)).split()))
+    return files
+
+  def getUnchangedFiles(self, root):
+    '''Return a list of the files locked by Bitkeeper, but unchanged'''
+    lockedFiles  = []
+    changedFiles = []
+    lockedFiles.extend(self.executeShellCommand('bk sfiles -lg '+convertPath(root)).split())
+    changedFiles.extend(self.executeShellCommand('bk sfiles -cg '+convertPath(root)).split())
+    map(lockedFiles.remove, changedFiles)
+    return lockedFiles
+
+  def handleFile(self, f, set):
+    '''Add new filesets to the output
+       - All files under BitKeeper control are tagged "bkedit"
+       - All new implementation files are tagged "bkadd"
+       - All locked but unchanged files under BitKeeper control are tagged "bkrevert"'''
+    root = self.rootFunc(f)
+    if (self.inputTag is None or set.tag in self.inputTag) and root:
+      import os
+      if not os.path.isdir(root):
+        os.makedirs(root)
+      self.output.children.append(build.fileset.FileSet(filenames = self.getUnlockedFiles(root),  tag = 'bkedit'))
+      self.output.children.append(build.fileset.FileSet(filenames = self.getNewFiles(root),       tag = 'bkadd'))
+      self.output.children.append(build.fileset.FileSet(filenames = self.getUnchangedFiles(root), tag = 'bkrevert'))
+    return build.transform.Transform.handleFile(self, f, set)
+
+class Open (build.transform.Transform):
+  '''This nodes handles sets with tag "bkedit", editing each file'''
+  def __init__(self):
+    build.transform.Transform.__init__(self)
+    return
+
+  def __str__(self):
+    return 'BitKeeper open transform'
+
+  def edit(self, set):
+    '''Edit the files in set with BitKeeper'''
+    if not len(set): return
+    self.debugPrint('Opening files', 2, 'bk')
+    command = 'bk edit '+' '.join(map(convertPath, set))
+    output  = self.executeShellCommand(command)
+    return self.output
+
+  def handleFileSet(self, set):
+    '''Handle sets with tag "bkedit"'''
+    if set.tag == 'bkedit':
+      self.edit(set)
+      map(self.handleFileSet, set.children)
+      return self.output
+    return build.transform.Transform.handleFileSet(self, set)
+
+class Close (build.transform.Transform):
+  '''This nodes handles sets with tag "bkadd" and "bkrevert", adding new files and reverting unchanged files'''
+  def __init__(self):
+    build.transform.Transform.__init__(self)
+    return
+
+  def __str__(self):
+    return 'BitKeeper close transform'
+
+  def add(self, set):
+    '''Add the files in set to BitKeeper'''
+    if not len(set): return
+    self.debugPrint('Putting new files under version control', 2, 'bk')
+    map(lambda f: self.debugPrint('Adding '+f+' to version control', 3, 'bk'), set)
+    command = 'bk add '+' '.join(map(convertPath, set))
+    output  = self.executeShellCommand(command)
+    command = 'bk co -q '+' '.join(map(convertPath, set))
+    output  = self.executeShellCommand(command)
+    return self.output
+
+  def revert(self, set):
+    '''Revert the files in set using BitKeeper'''
+    if not len(set): return
+    self.debugPrint('Reverting unchanged files', 2, 'bk')
+    command = 'bk unedit '+' '.join(map(convertPath, set))
+    output  = self.executeShellCommand(command)
+    command = 'bk co -q '+' '.join(map(convertPath, set))
+    output  = self.executeShellCommand(command)
+    return self.output
+
+  def handleFileSet(self, set):
+    '''Handle sets with tag "bkadd" and "bkrevert"'''
+    if set.tag == 'bkadd':
+      self.add(set)
+      map(self.handleFileSet, set.children)
+      return self.output
+    elif set.tag == 'bkrevert':
+      self.revert(set)
+      map(self.handleFileSet, set.children)
+      return self.output
+    return build.transform.Transform.handleFileSet(self, set)

Added: long/3D/Gale/trunk/python/BuildSystem/build/buildGraph.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/buildGraph.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/buildGraph.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,183 @@
+from __future__ import generators
+
+class BuildGraph(object):
+  def __init__(self, vertices = []):
+    '''Create a graph'''
+    self.vertices = []
+    self.inEdges  = {}
+    self.outEdges = {}
+    map(self.addVertex, vertices)
+    return
+
+  def __str__(self):
+    return 'BuildGraph with '+str(len(self.vertices))+' vertices and '+str(reduce(lambda k,l: k+l, [len(edgeList) for edgeList in self.inEdges.values()], 0))+' edges'
+
+  def addVertex(self, vertex):
+    '''Add a vertex if it does not already exist in the vertex list
+       - Should be able to use Set in Python 2.3'''
+    if vertex is None: return
+    if not vertex in self.vertices:
+      self.vertices.append(vertex)
+      self.clearEdges(vertex)
+    return
+
+  def addEdges(self, vertex, inputs = [], outputs = []):
+    '''Define the in and out edges for a vertex by listing the other vertices defining the edges
+       - If any vertex does not exist in the graph, it is created'''
+    self.addVertex(vertex)
+    for input in inputs:
+      self.addVertex(input)
+      if not vertex is None and not input is None:
+        if not input  in self.inEdges[vertex]: self.inEdges[vertex].append(input)
+        if not vertex in self.outEdges[input]: self.outEdges[input].append(vertex)
+    for output in outputs:
+      self.addVertex(output)
+      if not vertex is None and not output is None:
+        if not vertex in self.inEdges[output]:  self.inEdges[output].append(vertex)
+        if not output in self.outEdges[vertex]: self.outEdges[vertex].append(output)
+    return
+
+  def getEdges(self, vertex):
+    return (self.inEdges[vertex], self.outEdges[vertex])
+
+  def clearEdges(self, vertex, inOnly = 0, outOnly = 0):
+    if inOnly and outOnly:
+      raise RuntimeError('Inconsistent arguments')
+    if not outOnly:
+      self.inEdges[vertex]  = []
+    if not inOnly:
+      self.outEdges[vertex] = []
+    return
+
+  def removeVertex(self, vertex):
+    '''Remove a vertex if already exists in the vertex list
+       - Also removes all associated edges'''
+    if vertex is None: return
+    if vertex in self.vertices:
+      self.vertices.remove(vertex)
+      del self.inEdges[vertex]
+      del self.outEdges[vertex]
+      for v in self.vertices:
+        if vertex in self.inEdges[v]:  self.inEdges[v].remove(vertex)
+        if vertex in self.outEdges[v]: self.outEdges[v].remove(vertex)
+    return
+
+  def addSubgraph(self, graph):
+    '''Add the vertices and edges of another graph into this one'''
+    map(self.addVertex, graph.vertices)
+    map(lambda v: apply(self.addEdges, (v,)+graph.getEdges(v)), graph.vertices)
+    return
+
+  def removeSubgraph(self, graph):
+    '''Remove the vertices and edges of a subgraph, and all the edges connected to it'''
+    map(self.removeVertex, graph.vertices)
+    return
+
+  def printIndent(self, indent):
+    import sys
+    for i in range(indent): sys.stdout.write('  ')
+
+  def display(self):
+    print 'I am a BuildGraph with '+str(len(self.vertices))+' vertices'
+    for vertex in BuildGraph.breadthFirstSearch(self):
+      self.printIndent(vertex.__level)
+      print '('+str(self.vertices.index(vertex))+') '+str(vertex)+' in: '+str(map(self.vertices.index, self.inEdges[vertex]))+' out: '+str(map(self.vertices.index, self.outEdges[vertex]))
+    return
+
+  def appendGraph(self, graph):
+    '''Join every leaf of this graph to every root of the input graph, leaving the result in this graph'''
+    leaves = BuildGraph.getLeaves(self)
+    self.addSubgraph(graph)
+    map(lambda v: self.addEdges(v, outputs = BuildGraph.getRoots(graph)), leaves)
+    return self
+
+  def prependGraph(self, graph):
+    '''Join every leaf of the input graph to every root of this graph, leaving the result in this graph'''
+    roots = BuildGraph.getRoots(self)
+    self.addSubgraph(graph)
+    map(lambda v: self.addEdges(v, outputs = roots), BuildGraph.getLeaves(graph))
+    return self
+
+  def getRoots(graph):
+    '''Return all the sources in the graph (nodes without entering edges)'''
+    return filter(lambda v: not len(graph.getEdges(v)[0]), graph.vertices)
+  getRoots = staticmethod(getRoots)
+
+  def getLeaves(graph):
+    '''Return all the sinks in the graph (nodes without exiting edges)'''
+    return filter(lambda v: not len(graph.getEdges(v)[1]), graph.vertices)
+  getLeaves = staticmethod(getLeaves)
+
+  def depthFirstVisit(graph, vertex, seen = None, returnFinished = 0, outEdges = 1):
+    '''This is a generator returning vertices in a depth-first traversal only for the subtree rooted at vertex
+       - If returnFinished is True, return a vertex when it finishes
+       - Otherwise, return a vertex when it is first seen
+       - If outEdges is True, proceed along these, otherwise use inEdges'''
+    if seen is None: seen = []
+    seen.append(vertex)
+    if not returnFinished:
+      yield vertex
+    # Cute trick since outEdges is index 1, and inEdges is index 0
+    for v in graph.getEdges(vertex)[outEdges]:
+      if not v in seen:
+        try:
+          for v2 in BuildGraph.depthFirstVisit(graph, v, seen, returnFinished, outEdges):
+            yield v2
+        except StopIteration:
+          pass
+    if returnFinished:
+      yield vertex
+    return
+  depthFirstVisit = staticmethod(depthFirstVisit)
+
+  def depthFirstSearch(graph, returnFinished = 0, outEdges = 1):
+    '''This is a generator returning vertices in a depth-first traversal
+       - If returnFinished is True, return a vertex when it finishes
+       - Otherwise, return a vertex when it is first seen
+       - If outEdges is True, proceed along these, otherwise use inEdges'''
+    seen = []
+    for vertex in graph.vertices:
+      if not vertex in seen:
+        try:
+          for v in BuildGraph.depthFirstVisit(graph, vertex, seen, returnFinished, outEdges):
+            yield v
+        except StopIteration:
+          pass
+    return
+  depthFirstSearch = staticmethod(depthFirstSearch)
+
+  def breadthFirstSearch(graph, returnFinished = 0):
+    '''This is a generator returning vertices in a breadth-first traversal
+       - If returnFinished is True, return a vertex when it finishes
+       - Otherwise, return a vertex when it is first seen'''
+    queue = BuildGraph.getRoots(graph)[0:1]
+    if not len(queue): return
+    seen  = [queue[0]]
+    if not returnFinished:
+      queue[0].__level = 0
+      yield queue[0]
+    while len(queue):
+      vertex = queue[-1]
+      for v in graph.getEdges(vertex)[1]:
+        if not v in seen:
+          seen.append(v)
+          v.__level = vertex.__level + 1
+          queue.insert(0, v)
+          if not returnFinished:
+            yield v
+      vertex = queue.pop()
+      if returnFinished:
+        yield vertex
+    return
+
+  def topologicalSort(graph, start = None):
+    '''Reorder the vertices using topological sort'''
+    if start is None:
+      vertices = [vertex for vertex in BuildGraph.depthFirstSearch(graph, returnFinished = 1)]
+    else:
+      vertices = [vertex for vertex in BuildGraph.depthFirstVisit(graph, start, returnFinished = 1)]
+    vertices.reverse()
+    for vertex in vertices:
+      yield vertex
+    return
+  topologicalSort = staticmethod(topologicalSort)

Added: long/3D/Gale/trunk/python/BuildSystem/build/buildGraph.pyc
===================================================================
(Binary files differ)


Property changes on: long/3D/Gale/trunk/python/BuildSystem/build/buildGraph.pyc
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Added: long/3D/Gale/trunk/python/BuildSystem/build/builder.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/builder.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/builder.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,47 @@
+from __future__ import generators
+import base
+
+class Builder (base.Base):
+  def __init__(self, buildGraph = None):
+    base.Base.__init__(self)
+    self.buildGraph    = buildGraph
+    self.currentVertex = None
+    return
+
+  def processInput(self, input):
+    inputs = {}
+    if isinstance(input, dict):
+      inputs.update(input)
+    elif not input is None:
+      inputs[None] = input
+    return inputs
+
+  def execute(self, start = None, input = None):
+    '''Execute the topologically sorted build graph, optionally starting from the transform "start" with the optional FileSet "input"'''
+    import build.buildGraph
+
+    inputs  = self.processInput(input)
+    started = 0
+    self.debugPrint('Starting build', 1, 'build')
+    if not self.currentVertex is None:
+      start = self.currentVertex
+    for vertex in build.buildGraph.BuildGraph.topologicalSort(self.buildGraph):
+      self.debugPrint('Executing vertex '+str(vertex), 2, 'build')
+      if not started:
+        if not start is None and not vertex == start:
+          continue
+        started = 1
+        if None in inputs:
+          self.debugPrint('Processing initial input '+self.debugFileSetStr(inputs[None]), 3, 'build')
+          vertex.handleFileSet(inputs[None])
+      if vertex in inputs:
+        self.debugPrint('Processing specified input '+self.debugFileSetStr(inputs[vertex]), 3, 'build')
+        vertex.handleFileSet(inputs[vertex])
+      for parent in self.buildGraph.getEdges(vertex)[0]:
+        self.debugPrint('Processing input '+self.debugFileSetStr(parent.output)+' from vertex: '+str(parent), 3, 'build')
+        vertex.handleFileSet(parent.output)
+      self.debugPrint('Generated output '+self.debugFileSetStr(vertex.output)+' from vertex: '+str(vertex), 3, 'build')
+      self.currentVertex = vertex
+      yield vertex
+    self.currentVertex = None
+    return

Added: long/3D/Gale/trunk/python/BuildSystem/build/compile/C.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/compile/C.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/compile/C.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,33 @@
+import build.processor
+
+class Compiler (build.processor.Compiler):
+  def __init__(self, sourceDB, usingC, compiler = None, warningFlags = None, inputTag = 'c'):
+    build.processor.Compiler.__init__(self, sourceDB, compiler, inputTag, updateType = 'deferred')
+    self.usingC       = usingC
+    self.warningFlags = warningFlags
+    self.language     = 'C'
+    self.includeDirs.append('.')
+    self.checkCompiler()
+    return
+
+  def __str__(self):
+    return self.language+' compiler('+self.processor+') for '+str(self.inputTag)
+
+  def checkCompiler(self):
+    '''Checks the compatibility of the supplied compiler'''
+    if self.processor is None:
+      self.processor = self.argDB['CC']
+    return
+
+  def getOptimizationFlags(self, source = None):
+    if self.argDB['CFLAGS']:
+      return [self.argDB['CFLAGS']]
+    return []
+
+  def getWarningFlags(self, source = None):
+    '''Return a list of the compiler warning flags. The default is most of the GCC warnings.'''
+    if self.warningFlags is None:
+      return ['-Wall', '-Wundef', '-Wpointer-arith', '-Wbad-function-cast', '-Wcast-align', '-Wwrite-strings',
+              '-Wconversion', '-Wsign-compare', '-Wstrict-prototypes', '-Wmissing-prototypes', '-Wmissing-declarations',
+              '-Wmissing-noreturn', '-Wredundant-decls', '-Wnested-externs', '-Winline']
+    return self.warningFlags

Added: long/3D/Gale/trunk/python/BuildSystem/build/compile/Cxx.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/compile/Cxx.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/compile/Cxx.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,45 @@
+import build.processor
+
+class Compiler (build.processor.Compiler):
+  def __init__(self, sourceDB, usingCxx, compiler = None, warningFlags = None, inputTag = 'cxx'):
+    build.processor.Compiler.__init__(self, sourceDB, compiler, inputTag, updateType = 'deferred')
+    self.usingCxx     = usingCxx
+    self.warningFlags = warningFlags
+    self.language     = 'Cxx'
+    self.includeDirs.append('.')
+    self.checkCompiler()
+    return
+
+  def __str__(self):
+    return self.language+' compiler('+self.processor+') for '+str(self.inputTag)
+
+  def checkCompiler(self):
+    '''Checks the compatibility of the supplied compiler'''
+    import config.setCompilers
+
+    if self.processor is None:
+      self.processor = self.argDB['CXX']
+    compiler = self.processor
+    if config.setCompilers.Configure.isGNU(compiler):
+      import commands
+      # Make sure g++ is recent enough
+      (status, output) = commands.getstatusoutput(compiler+' -dumpversion')
+      if not status == 0:
+        raise RuntimeError('The compiler you specified ('+compiler+') could not be run. Perhaps it is not in your path.')
+      version = output.split('.')[0]
+      if not version == '3':
+        raise RuntimeError('The g++ you specified ('+compiler+') is version '+version+'; please install a g++ of at least version 3 or fix your path. Get gcc/g++ at http://gcc.gnu.com')
+    return
+
+  def getOptimizationFlags(self, source = None):
+    if self.argDB['CXXFLAGS']:
+      return [self.argDB['CXXFLAGS']]
+    return []
+
+  def getWarningFlags(self, source = None):
+    '''Return a list of the compiler warning flags. The default is most of the GCC warnings.'''
+    if self.warningFlags is None:
+      return ['-Wall', '-Wundef', '-Wpointer-arith', '-Wcast-align', '-Wwrite-strings',
+              '-Wconversion', '-Wsign-compare', '-Wstrict-prototypes', '-Wmissing-prototypes',
+              '-Wmissing-noreturn', '-Wredundant-decls', '-Winline']
+    return self.warningFlags

Added: long/3D/Gale/trunk/python/BuildSystem/build/compile/F90.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/compile/F90.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/compile/F90.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,34 @@
+import build.processor
+
+class Compiler (build.processor.Compiler):
+  def __init__(self, sourceDB, usingF90, compiler = None, warningFlags = None, inputTag = 'f90'):
+    build.processor.Compiler.__init__(self, sourceDB, compiler, inputTag, updateType = 'deferred')
+    self.usingF90     = usingF90
+    self.warningFlags = warningFlags
+    self.language     = 'F90'
+    self.includeDirs.append('.')
+    self.checkCompiler()
+    return
+
+  def __str__(self):
+    return self.language+' compiler('+self.processor+') for '+str(self.inputTag)
+
+  def checkCompiler(self):
+    '''Checks the compatibility of the supplied compiler'''
+    if self.processor is None:
+      self.processor = self.argDB['F90']
+    compiler = self.processor
+    if not compiler == 'ifc':
+      raise RuntimeError('I only know how to deal with Intel F90 right now. Shoot me.')
+    return
+
+  def getOptimizationFlags(self, source = None):
+    if self.argDB['FFLAGS']:
+      return [self.argDB['FFLAGS']]
+    return []
+
+  def getWarningFlags(self, source = None):
+    '''Return a list of the compiler warning flags. The default is empty.'''
+    if self.warningFlags is None:
+      return []
+    return self.warningFlags

Added: long/3D/Gale/trunk/python/BuildSystem/build/compile/SIDL.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/compile/SIDL.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/compile/SIDL.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,184 @@
+import build.fileset
+import build.processor
+
+import os
+
+class SIDLConstants:
+  '''This class contains data about the SIDL language'''
+  def getLanguages():
+    '''Returns a list of all permissible SIDL target languages'''
+    # This should be argDB['installedLanguages']
+    return ['C', 'Cxx', 'C++', 'Python', 'F77', 'F90', 'Java', 'Mathematica', 'Matlab']
+  getLanguages = staticmethod(getLanguages)
+
+  def checkLanguage(language):
+    '''Check for a valid SIDL target language, otherwise raise a ValueError'''
+    if not language in SIDLConstants.getLanguages():
+      raise ValueError('Invalid SIDL language: '+language)
+  checkLanguage = staticmethod(checkLanguage)
+
+class SIDLLanguageList (list):
+  def __setitem__(self, key, value):
+    SIDLConstants.checkLanguage(value)
+    list.__setitem__(self, key, value)
+
+class Compiler(build.processor.Processor):
+  '''The SIDL compiler processes any FileSet with the tag "sidl", and outputs a FileSet of source code with the appropriate language tag.
+     - Servers always compile a single SIDL file'''
+  def __init__(self, sourceDB, language, outputDir, isServer, usingSIDL):
+    SIDLConstants.checkLanguage(language)
+    build.processor.Processor.__init__(self, sourceDB, None, ['sidl', 'old sidl'], language.lower(), not isServer, 'deferred')
+    # Can't initialize processor in constructor since I have to wait for Base to set argDB
+    self.processor = self.getCompilerDriver()
+    self.language  = language
+    self.outputDir = outputDir
+    self.isServer  = isServer
+    if isServer:
+      self.action  = 'server'
+    else:
+      self.action  = 'client'
+    self.usingSIDL = usingSIDL
+    self.repositoryDirs = []
+    self.outputTag = self.language.lower()+' '+self.action
+    return
+
+  def __str__(self):
+    return 'SIDL Compiler for '+self.language+' '+self.action
+
+  def handleErrors(self, command, status, output):
+    if status or output.find('Error:') >= 0:
+      raise RuntimeError('Could not execute \''+str(command)+'\':\n'+str(output))
+
+  def getCompilerDriver(self):
+    project = self.getInstalledProject('bk://sidl.bkbits.net/Compiler')
+    if project is None:
+      return 'scandal.py'
+    return os.path.join(project.getRoot(), 'driver', 'python', 'scandal.py')
+
+  def getCompilerModule(self, name = 'scandal'):
+    import imp
+
+    root = os.path.dirname(self.getCompilerDriver())
+    if not root:
+      raise ImportError('Project bk://sidl.bkbits.net/Compiler is not installed')
+    (fp, pathname, description) = imp.find_module(name, [root])
+    try:
+      return imp.load_module(name, fp, pathname, description)
+    finally:
+      if fp: fp.close()
+
+  def getActionFlags(self, source):
+    '''Return a list of the compiler flags specifying the generation action.'''
+    return ['-'+self.action+'='+self.language]
+
+  def getDependenciesSIDL(self):
+    '''Return all SIDL files found in project dependencies'''
+    if not self.repositoryDirs: return []
+    sources = []
+    for dir in self.repositoryDirs:
+      dir = os.path.join(dir, 'sidl')
+      if not os.path.exists(dir):
+        self.debugPrint('Invalid SIDL include directory: '+dir, 4, 'compile')
+        continue
+      for source in os.listdir(dir):
+        if not os.path.splitext(source)[1] == '.sidl': continue
+        source = os.path.join(dir, source)
+        if not os.path.isfile(source): raise RuntimeError('Invalid SIDL include: '+source)
+        sources.append(source)
+    return sources
+
+  def getIncludeFlags(self, source):
+    return ['-includes=['+','.join(self.getDependenciesSIDL())+']']
+
+  def getOutputFlags(self, source):
+    '''Return a list of the compiler flags specifying the output directories'''
+    if isinstance(source, build.fileset.FileSet): source = source[0]
+    (package, ext) = os.path.splitext(os.path.basename(source))
+    if not self.outputDir is None:
+      if self.isServer:
+        outputDir = os.path.join(self.outputDir, self.usingSIDL.getServerRootDir(self.language, package))
+      else:
+        outputDir = os.path.join(self.outputDir, self.usingSIDL.getClientRootDir(self.language))
+      return ['-'+self.action+'Dirs={'+self.language+':'+outputDir+'}']
+    return []
+
+  def getFlags(self, source):
+    return self.getActionFlags(source)+self.getIncludeFlags(source)+self.getOutputFlags(source)
+
+  def processFileShell(self, source, set):
+    '''Compile "source" using a shell command'''
+    return self.processFileSetShell(build.fileset.FileSet([source], tag = set.tag))
+
+  def processFileSetShell(self, set):
+    '''Compile all the files in "set" using a shell command'''
+    if not len(set) or set.tag.startswith('old'): return self.output
+    self.debugPrint('Compiling '+str(set)+' into a '+self.language+' '+self.action, 3, 'compile')
+    command = ' '.join([self.getProcessor()]+self.getFlags(set)+set)
+    output  = self.executeShellCommand(command, self.handleErrors)
+    #self.output.extend(map(self.getIntermediateFileName, set))
+    return self.output
+
+  def processFileModule(self, source, set):
+    '''Compile "source" using a module directly'''
+    return self.processFileSetModule(build.fileset.FileSet([source], tag = set.tag))
+
+  def processFileSetModule(self, set):
+    '''Compile all the files in "set" using a module directly'''
+    if not len(set): return self.output
+    import nargs
+    import sourceDatabase
+    import cPickle
+    import md5
+
+    # Check for cached output
+    #   We could of course hash this big key again
+    #   These keys could be local, but we can do that if they proliferate too much. It would mean
+    #     that each project would have to compile the SIDL once
+    flags    = self.getFlags(set)
+    cacheKey = 'cacheKey'+''.join([sourceDatabase.SourceDB.getChecksum(f) for f in set]+[md5.new(''.join(flags)).hexdigest()])
+    if set.tag.startswith('old') and cacheKey in self.argDB:
+      self.debugPrint('Loading '+str(set)+' for a '+self.language+' '+self.action+' from argument database ('+cacheKey+')', 3, 'compile')
+      outputFiles = cPickle.loads(self.argDB[cacheKey])
+    else:
+      # Save targets so that they do not interfere with Scandal
+      target            = self.argDB.target
+      self.argDB.target = []
+      # Run compiler and reporter
+      compiler = self.getCompilerModule().Scandal(flags+set)
+      if not set.tag.startswith('old'):
+        self.debugPrint('Compiling '+str(set)+' into a '+self.language+' '+self.action, 3, 'compile')
+        self.debugPrint('  with flags '+str(flags), 4, 'compile')
+        compiler.run()
+      else:
+        self.debugPrint('Reporting on '+str(set)+' for a '+self.language+' '+self.action, 3, 'compile')
+        self.debugPrint('  with flags '+str(flags), 4, 'compile')
+        compiler.report()
+      outputFiles          = compiler.outputFiles
+      self.argDB[cacheKey] = cPickle.dumps(outputFiles)
+      # Restore targets and remove flags
+      self.argDB.target = target
+      for flag in flags:
+        del self.argDB[nargs.Arg.parseArgument(flag)[0]]
+    # Construct output
+    tag = self.outputTag
+    if self.isServer:
+      (package, ext) = os.path.splitext(os.path.basename(set[0]))
+      tag           += ' '+package
+    self.output.children.append(build.fileset.RootedFileSet(self.usingSIDL.project.getUrl(), outputFiles, tag = tag))
+    return self.output
+
+  def processFile(self, source, set):
+    '''Compile "source"'''
+    return self.processFileModule(source, set)
+
+  def processFileSet(self, set):
+    '''Compile all the files in "set"'''
+    return self.processFileSetModule(set)
+
+  def processOldFile(self, source, set):
+    '''Compile "source"'''
+    return self.processFileModule(source, set)
+
+  def processOldFileSet(self, set):
+    '''Compile all the files in "set"'''
+    return self.processFileSetModule(set)

Added: long/3D/Gale/trunk/python/BuildSystem/build/compile/__init__.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/compile/__init__.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/compile/__init__.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1 @@
+all = ['C', 'Cxx', 'F90', 'SIDL']

Added: long/3D/Gale/trunk/python/BuildSystem/build/fileState.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/fileState.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/fileState.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,140 @@
+import build.fileset
+import build.transform
+
+import os
+
+class FileChanged (build.transform.Transform):
+  '''Detects whether files have changed using checksums
+     - If the force flag is given, all files are marked changed'''
+  def __init__(self, sourceDB, inputTag = None, changedTag = 'changed', unchangedTag = 'unchanged', force = 0):
+    build.transform.Transform.__init__(self)
+    self.sourceDB      = sourceDB
+    self.inputTag      = inputTag
+    if isinstance(self.inputTag, str): self.inputTag = [self.inputTag]
+    self.force         = force
+    self.changed       = build.fileset.FileSet(tag = changedTag)
+    self.unchanged     = build.fileset.FileSet(tag = unchangedTag)
+    self.output.children.append(self.changed)
+    self.output.children.append(self.unchanged)
+    return
+
+  def compare(self, source, sourceEntry):
+    '''Return True if the checksum for "source" has changed since "sourceEntry" was recorded'''
+    self.debugPrint('Checking for '+source+' in the source database', 3, 'sourceDB')
+    checksum = self.sourceDB.getChecksum(source)
+    if not sourceEntry[0] == checksum:
+      self.debugPrint(source+' has changed relative to the source database: '+str(sourceEntry[0])+' <> '+str(checksum), 3, 'sourceDB')
+      return 1
+    return 0
+
+  def hasChanged(self, source):
+    '''Returns True if "source" has changed since it was last updates in the source database'''
+    if self.force:
+      self.debugPrint(source+' was forcibly tagged', 3, 'sourceDB')
+      return 1
+    try:
+      if not os.path.exists(source):
+        self.debugPrint(source+' does not exist', 3, 'sourceDB')
+      else:
+        if not self.compare(source, self.sourceDB[source]):
+          for dep in self.sourceDB[source][3]:
+            try:
+              if self.compare(dep, self.sourceDB[dep]):
+                return 1
+            except KeyError: pass
+          return 0
+    except KeyError:
+      self.debugPrint(source+' does not exist in source database', 3, 'sourceDB')
+    return 1
+
+  def handleFile(self, f, set):
+    '''Place the file into either the "changed" or "unchanged" output set
+       - If inputTag was specified, only handle files with this tag'''
+    if self.inputTag is None or set.tag in self.inputTag:
+      if self.hasChanged(f):
+        self.changed.append(f)
+      else:
+        self.unchanged.append(f)
+      return self.output
+    return build.transform.Transform.handleFile(self, f, set)
+
+class GenericTag (FileChanged):
+  '''Uses input tag, extension and directory checks to group files which need further processing'''
+  def __init__(self, sourceDB, outputTag, inputTag = None, ext = '', deferredExt = None, root = None, force = 0):
+    FileChanged.__init__(self, sourceDB, inputTag, outputTag, 'old '+outputTag, force)
+    self.ext   = ext
+    if isinstance(self.ext, list):
+      self.ext = map(lambda x: '.'+x, self.ext)
+    elif isinstance(self.ext, str):
+      self.ext = ['.'+self.ext]
+    self.deferredExt   = deferredExt
+    if isinstance(self.deferredExt, list):
+      self.deferredExt = map(lambda x: '.'+x, self.deferredExt)
+    elif isinstance(self.deferredExt, str):
+      self.deferredExt = ['.'+self.deferredExt]
+    self.root   = root
+    if not self.root is None:
+      self.root = os.path.normpath(self.root)
+    self.deferredUpdates = build.fileset.FileSet(tag = 'update '+outputTag)
+    self.output.children.append(self.deferredUpdates)
+    return
+
+  def __str__(self):
+    return 'Tag transform for extension '+str(self.ext)+str(self.inputTag)+' to tag '+self.changed.tag
+
+  def handleFile(self, f, set):
+    '''- If the file is not in the specified root directory, use the default handler
+       - If the file is in the extension list, call the parent method
+       - If the file is in the deferred extension list and has changed, put it in the update set'''
+    if self.inputTag is None or set.tag in self.inputTag:
+      (base, ext) = os.path.splitext(f)
+      if not self.root or self.root+os.sep == os.path.commonprefix([os.path.normpath(base), self.root+os.sep]):
+        if self.ext is None or ext in self.ext:
+          return FileChanged.handleFile(self, f, set)
+        elif not self.deferredExt is None and ext in self.deferredExt:
+          if self.hasChanged(f):
+            self.deferredUpdates.append(f)
+          return self.output
+    return build.transform.Transform.handleFile(self, f, set)
+
+  def handleFileSet(self, set):
+    '''Check root directory if given, and then execute the default set handling method'''
+    if self.root and not os.path.isdir(self.root):
+      raise RuntimeError('Invalid root directory for tagging operation: '+self.root)
+    return FileChanged.handleFileSet(self, set)
+
+class Update (build.transform.Transform):
+  '''Update nodes process files whose update in the source database was delayed'''
+  def __init__(self, sourceDB, tag = None):
+    build.transform.Transform.__init__(self)
+    self.sourceDB = sourceDB
+    if tag is None:
+      self.tag = []
+    else:
+      self.tag = tag
+    if self.tag and not isinstance(self.tag, list):
+      self.tag = [self.tag]
+    self.tag   = map(lambda t: 'update '+t, self.tag)
+    return
+
+  def __str__(self):
+    return 'Update transform for '+str(self.tag)
+
+  def handleFile(self, f, set):
+    '''If the file tag starts with "update", then update it in the source database'''
+    if (self.tag and set.tag in self.tag) or (set.tag and set.tag[:6] == 'update'):
+      if os.path.isfile(f):
+        self.sourceDB.updateSource(f)
+      return self.output
+    return build.transform.Transform.handleFile(self, f, set)
+
+  def handleFileSet(self, set):
+    '''Execute the default set handling method, and save source database'''
+    output = build.transform.Transform.handleFileSet(self, set)
+    # I could check here, and only save in the first recursive call
+    self.sourceDB.save()
+    try:
+      import gc
+      gc.collect()
+    except ImportError: pass
+    return output

Added: long/3D/Gale/trunk/python/BuildSystem/build/fileset.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/fileset.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/fileset.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,188 @@
+import base
+
+import os
+
+class FileSet(list):
+  def __init__(self, filenames = None, tag = None, filesets = [], mustExist = 1):
+    list.__init__(self)
+    self.children  = filesets[:]
+    self.tag       = tag
+    self.mustExist = mustExist
+    if not filenames is None:
+      self.extend(filenames)
+    return
+
+  def clone(self):
+    '''Return a FileSet with the same tag and existence flag, but no members or children'''
+    return FileSet(tag = self.tag, mustExist = self.mustExist)
+
+  def checkFile(self, filename):
+    '''If mustExist true, check for file existence'''
+    if self.mustExist and not os.path.exists(filename):
+      raise ValueError('File '+filename+' does not exist!')
+    return filename
+
+  def append(self, item):
+    item = self.checkFile(item)
+    if not item in self:
+      list.append(self, item)
+    return
+
+  def extend(self, l):
+    for item in l:
+      self.append(item)
+    return
+
+  def insert(self, index, item):
+    item = self.checkFile(item)
+    if not item in self:
+      list.insert(self, index, item)
+    return
+
+  def isCompatible(self, set):
+    '''Return True if the set tags and mustExist flags match'''
+    return (self.tag == set.tag) and (self.mustExist == set.mustExist)
+
+class TreeFileSet (FileSet):
+  def __init__(self, roots = None, fileTest = lambda file: 1, tag = None):
+    if roots is None:
+      self.roots  = FileSet(os.getcwd())
+    else:
+      if isinstance(roots, str):
+        self.roots = FileSet([roots])
+      else:
+        self.roots = roots
+    self.fileTest = fileTest
+    FileSet.__init__(self, filenames = self.walkTree(), tag = tag)
+    return
+
+  def walkTree(self):
+    files = []
+    for root in self.roots:
+      os.path.walk(root, self.walkFunc, files)
+    return files
+
+  def walkFunc(self, defaultFiles, directory, fileList):
+    if (os.path.basename(directory) == 'SCCS'): return
+    for file in fileList:
+      fullPath = os.path.join(directory, file)
+      if (os.path.isdir(fullPath)):            continue
+      if (file[-1] == '~'):                    continue
+      if (file[0] == '#' and file[-1] == '#'): continue
+      if (self.fileTest(fullPath)): defaultFiles.append(fullPath)
+
+class ExtensionFileSet (TreeFileSet):
+  def __init__(self, roots, exts, tag = None):
+    self.exts = exts
+    if not isinstance(self.exts, list): self.exts = [self.exts]
+    TreeFileSet.__init__(self, roots, self.extTest, tag = tag)
+    return
+
+  def extTest(self, file):
+    (base, ext) = os.path.splitext(file)
+    if (ext in self.exts):
+      return 1
+    else:
+      return 0
+
+class RootedFileSet(FileSet, base.Base):
+  def __init__(self, projectUrl, filenames = None, tag = None, filesets = [], mustExist = 1):
+    FileSet.__init__(self, None, tag, filesets, mustExist)
+    base.Base.__init__(self)
+    self.projectUrl = projectUrl
+    if not filenames is None:
+      self.extend(filenames)
+    return
+
+  def __str__(self):
+    return '['+','.join(map(str, self))+']'
+
+  def getProjectUrl(self):
+    return self._projectUrl
+
+  def setProjectUrl(self, url):
+    self._projectUrl = url
+  projectUrl = property(getProjectUrl, setProjectUrl, doc = 'The URL of the project which provides a root for all files in the set')
+
+  def getProjectRoot(self):
+    if not hasattr(self, '_projectRoot'):
+      project = self.getInstalledProject(self.projectUrl)
+      if project is None:
+        self._projectRoot = ''
+      else:
+        self._projectRoot = project.getRoot()
+    return self._projectRoot
+
+  def setProjectRoot(self):
+    raise RuntimeError('Cannot set the project root. It is determined by the project URL.')
+  projectRoot = property(getProjectRoot, setProjectRoot, doc = 'The project root for all files in the set')
+
+  def __getstate__(self):
+    '''Remove the cached project root directory before pickling'''
+    d = base.Base.__getstate__(self)
+    if '_projectRoot' in d: del d['_projectRoot']
+    return d
+
+  def __getitem__(self, index):
+    return os.path.join(self.projectRoot, list.__getitem__(self, index))
+
+  def __getslice__(self, start, end):
+    root = self.projectRoot
+    return map(lambda f: os.path.join(root, f), list.__getslice__(self, start, end))
+
+  def __setitem__(self, index, item):
+    return list.__setitem__(self, index, self.checkFile(item))
+
+  def __setslice__(self, start, end, s):
+    root = self.projectRoot
+    return list.__setslice__(self, start, end, map(lambda f: self.checkFile(f, root), s))
+
+  def __iter__(self):
+    return FileSetIterator(self)
+
+  def clone(self):
+    '''Return a RootedFileSet with the same root, tag and existence flag, but no members or children'''
+    set = RootedFileSet(self.projectUrl, tag = self.tag, mustExist = self.mustExist)
+    set._projectRoot = self._projectRoot
+    return set
+
+  def checkFile(self, filename, root = None):
+    if root is None:
+      root = self.projectRoot
+    if os.path.isabs(filename):
+      filename = FileSet.checkFile(self, filename)
+      if not filename.startswith(root+os.sep):
+        raise ValueError('Absolute path '+filename+' conflicts with project root '+root)
+      else:
+        filename = filename[len(root)+1:]
+    else:
+      filename = FileSet.checkFile(self, os.path.join(root, filename))
+    return filename
+
+  def isCompatible(self, set):
+    '''Return True if the roots match and the superclass match returns True'''
+    return isinstance(set, RootedFileSet) and (self.projectRoot == set.projectRoot) and FileSet.isCompatible(self, set)
+
+class FileSetIterator (object):
+  def __init__(self, set):
+    self.set   = set
+    self.index = -1
+    self.max   = len(set)
+    return
+
+  def __iter__(self):
+    return self
+
+  def next(self):
+    self.index += 1
+    if self.index == self.max: raise StopIteration()
+    return self.set[self.index]
+
+class RootedExtensionFileSet (RootedFileSet, ExtensionFileSet):
+  def __init__(self, projectUrl, roots, exts, tag = None):
+    self.exts = exts
+    if not isinstance(self.exts, list): self.exts = [self.exts]
+    base.Base.__init__(self)
+    self.projectUrl = projectUrl
+    TreeFileSet.__init__(self, map(lambda d: os.path.join(self.projectRoot, d), roots), self.extTest, tag = tag)
+    return

Added: long/3D/Gale/trunk/python/BuildSystem/build/framework.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/framework.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/framework.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,686 @@
+import user
+import importer
+import base
+import sourceDatabase
+
+import atexit
+import cPickle
+import os
+import sys
+import nargs
+
+if not hasattr(sys, 'version_info'):
+  raise RuntimeError('You must have Python version 2.2 or higher to run the build system')
+
+class Framework(base.Base):
+  '''This is the base class for all user make modules'''
+  def __init__(self, project, clArgs = None, argDB = None):
+    '''Setup the project, argument database, and source database'''
+    try:
+      import gc
+      #gc.set_debug(gc.DEBUG_LEAK)
+    except ImportError: pass
+    base.Base.__init__(self, clArgs, argDB)
+    import build.builder
+    self.project         = project
+    self.targets         = {}
+    self.directories     = {}
+    self.filesets        = {}
+    self.configureHeader = None
+    self.builder         = build.builder.Builder(None)
+    self.createTmpDir()
+    return
+
+  def setupArgDB(self, argDB, clArgs):
+    '''Setup argument types, using the database created by base.Base'''
+
+    # Generic arguments
+    argDB.setType('help',           nargs.ArgBool(None, 0, 'Print help message',   isTemporary = 1), forceLocal = 1)
+    argDB.setType('noConfigure',    nargs.ArgBool(None, 0, 'Suppress configure',   isTemporary = 1), forceLocal = 1)
+    argDB.setType('forceConfigure', nargs.ArgBool(None, 0, 'Force a  reconfigure', isTemporary = 1), forceLocal = 1)
+    argDB.setType('displayTarget',  nargs.ArgBool(None, 0, 'Display a target',     isTemporary = 1), forceLocal = 1)
+    argDB.setType('noStackTrace',   nargs.ArgBool(None, 0, 'Suppress a stack trace on error'), forceLocal = 1)
+    argDB.setType('checkpoint',     nargs.Arg(None, None,  'Pickled state of evaluation'), forceLocal = 1)
+    # Source database manipulation
+    argDB.setType('restart',        nargs.ArgBool(None, 0, 'Restart the build',    isTemporary = 1), forceLocal = 1)
+    # Argument database manipulation
+    argDB.setType('fileset',        nargs.Arg(None, None, 'Name of a FileSet or full path of an individual file', isTemporary = 1), forceLocal = 1)
+    argDB.setType('regExp',         nargs.Arg(None, None, 'Regular expression',                                   isTemporary = 1), forceLocal = 1)
+
+    if not 'installedprojects'  in self.argDB: self.argDB['installedprojects']  = []
+    if not 'installedLanguages' in self.argDB: self.argDB['installedLanguages'] = ['Python', 'Cxx']
+    if not 'clientLanguages'    in self.argDB: self.argDB['clientLanguages']    = []
+
+    base.Base.setupArgDB(self, argDB, clArgs)
+    return argDB
+
+  def setupSourceDB(self, proj):
+    '''Load any existing source database for the given project, and register its save method'''
+    import project
+
+    root     = project.ProjectPath('', proj.getUrl())
+    filename = project.ProjectPath('bsSource.db', proj.getUrl())
+    self.debugPrint('Reading source database for '+proj.getUrl()+' from '+str(filename), 2, 'sourceDB')
+    if os.path.exists(str(filename)):
+      try:
+        dbFile        = open(str(filename), 'r')
+        self.sourceDB = cPickle.load(dbFile)
+        self.sourceDB.filename = filename
+        dbFile.close()
+      except Exception, e:
+        self.debugPrint('Source database '+str(filename)+' could not be read: '+str(e)+'. Creating a new one', 2, 'sourceDB')
+        self.sourceDB = sourceDatabase.SourceDB(root, filename)
+    else:
+      self.debugPrint('Source database '+str(filename)+' does not exist. Creating a new one', 2, 'sourceDB')
+      self.sourceDB = sourceDatabase.SourceDB(root, filename)
+    return
+
+  def makeSourceDBPathsAbsolute(self, sourceDB):
+    '''Return another source database in which all paths are absolute'''
+    newDB = sourceDatabase.SourceDB(self.argDB)
+    pwd   = self.project.getRoot()
+    for key in sourceDB:
+      new_key        = pwd+key
+      newDB[new_key] = sourceDB[key]
+    return newDB
+
+  def makeSourceDBPathsRelative(self, sourceDB):
+    '''Return another source database in which all paths are relative to the root of this project'''
+    import re
+
+    newDB = sourceDatabase.SourceDB(self.argDB)
+    pwd   = self.project.getRoot()
+    for key in sourceDB:
+      new_key        = re.split(pwd, key)[-1]
+      newDB[new_key] = sourceDB[key]
+    return newDB
+
+  def checkTmpDir(self, tmpDir):
+    '''Check that the temporary direcotry exists and has sufficient space available'''
+    if not os.path.exists(tmpDir):
+      del self.argDB['TMPDIR']
+      self.argDB.setType('TMPDIR', nargs.ArgDir(None, None, 'Temporary directory '+tmpDir+' does not exist. Select another directory'))
+      newTmp = self.argDB['TMPDIR']
+      return 0
+
+    try:
+      stats     = os.statvfs(tmpDir)
+      freeSpace = stats.f_bavail*stats.f_frsize
+      if freeSpace < 50*1024*1024:
+        del self.argDB['TMPDIR']
+        self.argDB.setType('TMPDIR', nargs.ArgDir(None, None,'Insufficient space ('+str(freeSpace/1024)+'K) on '+tmpDir+'. Select another directory'))
+        newTmp = self.argDB['TMPDIR']
+        return 0
+    except: pass
+    return 1
+
+  def createTmpDir(self):
+    '''Create a valid temporary directory and store it in argDB["TMPDIR"]'''
+    import tempfile
+
+    if 'TMPDIR' in self.argDB:
+      if isinstance(self.argDB['TMPDIR'], int):
+        # How in the hell is TMPDIR being set to 1?
+        tmpDir = '/tmp'
+      else:
+        tmpDir = self.argDB['TMPDIR']
+    elif 'TMPDIR' in os.environ:
+      tmpDir = os.environ['TMPDIR']
+    else:
+      tmpDir = '/tmp'
+
+    self.argDB['TMPDIR'] = tmpDir
+    while not self.checkTmpDir(tmpDir):
+      tmpDir = self.argDB['TMPDIR']
+
+    self.tmpDir = os.path.join(tmpDir, 'bs-'+str(os.getpid()))
+    if not os.path.exists(self.tmpDir):
+      try:
+        os.makedirs(self.tmpDir)
+      except:
+        raise RuntimeError('Cannot create tmp directory '+self.tmpDir)
+    tempfile.tempdir = self.tmpDir
+    atexit.register(self.destroyTmpDir)
+    return
+
+  def destroyTmpDir(self):
+    if not os.path.exists(self.tmpDir): return
+    import shutil
+    return shutil.rmtree(self.tmpDir)
+
+  def getMakeModule(self, root, name = 'make'):
+    import imp
+
+    (fp, pathname, description) = imp.find_module(name, [root])
+    try:
+      return imp.load_module(name, fp, pathname, description)
+    finally:
+      if fp: fp.close()
+
+  def setupDependencies(self):
+    '''Augment the project dependence graph with this project
+       - The project and dependencies MUST be activated prior to calling this method'''
+    if not 'projectDependenceGraph' in self.argDB:
+      import build.buildGraph
+      self.argDB['projectDependenceGraph'] = build.buildGraph.BuildGraph()
+    self.dependenceGraph = self.argDB['projectDependenceGraph']
+    self.dependenceGraph.addVertex(self.project)
+    self.dependenceGraph.clearEdges(self.project, outOnly = 1)
+    self.dependenceGraph.addEdges(self.project, outputs = map(self.getInstalledProject, self.executeTarget('getDependencies')))
+    self.argDB['projectDependenceGraph'] = self.dependenceGraph
+    return self.dependenceGraph
+
+  def getSIDLTemplate(self):
+    if not hasattr(self, '_sidlTemplate'):
+      import build.templates.SIDL
+
+      self._sidlTemplate = build.templates.SIDL.Template(self.sourceDB, self.project, self.dependenceGraph)
+      # Add default client languages
+      map(self._sidlTemplate.addClient, self.argDB['clientLanguages'])
+    return self._sidlTemplate
+  sidlTemplate = property(getSIDLTemplate, doc = 'This is the default template for SIDL operations')
+
+  def getCompileTemplate(self):
+    if not hasattr(self, '_compileTemplate'):
+      import build.templates.Compile
+
+      packages = map(lambda f: os.path.splitext(os.path.basename(f))[0], self.filesets['sidl'])
+      self._compileTemplate = build.templates.Compile.Template(self.argDB, self.sourceDB, self.project, self.dependenceGraph, self.sidlTemplate.usingSIDL, packages)
+    return self._compileTemplate
+  compileTemplate = property(getCompileTemplate, doc = 'This is the default template for source operations')
+
+  def t_getDependencies(self):
+    '''Return a list of the URLs for projects upon which this one depends'''
+    return []
+
+  def t_activate(self):
+    '''Load all necessary data for this project into the current RDict, without destroying previous data'''
+    # Update project in 'installedprojects'
+    self.argDB['installedprojects'] = [self.project]+self.argDB['installedprojects']
+    self.debugPrint('Activated project '+str(self.project), 2, 'install')
+    return self.project
+
+  def t_deactivate(self):
+    '''Unload the first matching project in the current RDict'''
+    # Remove project from 'installedprojects'
+    p = self.getInstalledProject(self.project.getUrl())
+    if not p is None:
+      projects = self.argDB['installedprojects']
+      projects.remove(p)
+      self.argDB['installedprojects'] = projects
+    self.debugPrint('Deactivated project '+str(self.project), 2, 'install')
+    return self.project
+
+  def t_configure(self):
+    '''Runs configure.py if it is present, and either configure.log is missing or -forceConfigure is given'''
+    if self.argDB['noConfigure']: return
+    import config.framework
+
+    root      = self.project.getRoot()
+    framework = config.framework.Framework(sys.argv[1:])
+    for arg in ['debugLevel', 'debugSections']:
+      framework.argDB[arg] = self.argDB[arg]
+    framework.argDB['log'] = os.path.join(root, 'configure.log')
+    if not self.configureHeader is None:
+      framework.header     = self.configureHeader
+    # Load default configure module
+    try:
+      framework.addChild(self.getMakeModule(root, 'configure').Configure(framework))
+    except ImportError:
+      return
+    # Run configuration only if the log file was absent or it is forced
+    if self.argDB['forceConfigure'] or not framework.checkLog(framework.logName):
+      try:
+        framework.configure()
+      except Exception, e:
+        import traceback
+
+        msg = 'CONFIGURATION FAILURE:\n'+str(e)+'\n'
+        print msg
+        framework.log.write(msg)
+        traceback.print_tb(sys.exc_info()[2], file = framework.log)
+        raise e
+      framework.storeSubstitutions(self.argDB)
+    return
+
+  def t_sidl(self):
+    '''Recompile the SIDL for this project'''
+    return self.executeGraph(self.sidlTemplate.getTarget(), input = self.filesets['sidl'])
+
+  def buildClient(self, proj, lang):
+    import build.buildGraph
+
+    clientDir = self.compileTemplate.usingSIDL.getClientRootDir(lang)
+    self.debugPrint('Building '+lang+' client in '+proj.getRoot(), 1, 'build')
+    maker  = self.getMakeModule(proj.getRoot()).PetscMake(None, self.argDB)
+    maker.setupProject()
+    maker.setupDependencies()
+    maker.setupSourceDB(maker.project)
+    sidlGraph    = maker.sidlTemplate.getClientTarget(lang, fullTarget = 1, forceRebuild = 1)
+    compileGraph = maker.compileTemplate.getClientTarget(lang)
+    compileGraph.prependGraph(sidlGraph)
+    maker.executeGraph(compileGraph, input = maker.filesets['sidl'])
+    return
+
+  def missingClients(self):
+    '''Check that this project has built all the clients, and if not return True'''
+    import build.buildGraph
+
+    for lang in self.compileTemplate.usingSIDL.clientLanguages:
+      clientDir = self.compileTemplate.usingSIDL.getClientRootDir(lang)
+      if not os.path.isdir(os.path.join(self.project.getRoot(), clientDir)):
+        self.debugPrint('Building missing '+lang+' client in '+self.project.getRoot(), 1, 'build')
+        return 1
+    return 0
+
+  def getProjectCompileGraph(self, forceRebuild = 0):
+    '''Return the compile graph for the given project without dependencies'''
+    input        = {None: self.filesets['sidl']}
+    sidlGraph    = self.sidlTemplate.getTarget(forceRebuild = forceRebuild)
+    compileGraph = self.compileTemplate.getTarget()
+    compileGraph.prependGraph(sidlGraph)
+    return (compileGraph, input)
+
+  def getCompileGraph(self):
+    if 'checkpoint' in self.argDB:
+      input        = {}
+      self.builder = cPickle.loads(self.argDB['checkpoint'])
+      compileGraph = self.builder.buildGraph
+      self.debugPrint('Loaded checkpoint for '+str(self.project), 2, 'build')
+    else:
+      import build.buildGraph
+
+      compileGraph = build.buildGraph.BuildGraph()
+      input        = {}
+      for p in build.buildGraph.BuildGraph.topologicalSort(self.dependenceGraph, self.project):
+        try:
+          if p == self.project:
+            maker = self
+          else:
+            maker = self.getMakeModule(p.getRoot()).PetscMake(None, self.argDB)
+            maker.setupProject()
+            maker.setupDependencies()
+            maker.setupSourceDB(maker.project)
+            maker.setupBuild()
+          (depGraph, depInput) = maker.getProjectCompileGraph(forceRebuild = maker.missingClients())
+          compileGraph.prependGraph(depGraph)
+          self.debugPrint('Prepended graph for '+str(maker.project), 4, 'build')
+          if None in depInput:
+            for r in build.buildGraph.BuildGraph.getRoots(depGraph): depInput[r] = depInput[None]
+            del depInput[None]
+          input.update(depInput)
+        except ImportError:
+          self.debugPrint('No make module present in '+p.getRoot(), 2, 'build')
+    return (compileGraph, input)
+
+  def t_sidlCheckpoint(self):
+    '''Recompile the SIDL for this project'''
+    import build.buildGraph
+
+    # Add project dependency compile graphs
+    # TODO: Remove all "forward" edges in dependenceGraph (edges which connect further down to already reachable nodes)
+    depGraphs = []
+    for v in self.dependenceGraph.outEdges[self.project]:
+      try:
+        maker = self.getMakeModule(v.getRoot()).PetscMake(None, self.argDB)
+        maker.setupProject()
+        maker.setupDependencies()
+        maker.setupSourceDB(maker.project)
+        maker.setupBuild()
+        depGraphs.append(maker.executeTarget('sidlCheckpoint'))
+      except ImportError:
+        self.debugPrint('No make module present in '+v.getRoot(), 2, 'build')
+
+    sidlGraph    = self.sidlTemplate.getTarget()
+    articGraph   = build.buildGraph.BuildGraph([build.transform.Transform()])
+    compileGraph = self.compileTemplate.getTarget()
+    startVertex  = build.buildGraph.BuildGraph.getRoots(sidlGraph)[0]
+    input        = {startVertex: self.filesets['sidl']}
+    endVertex    = build.buildGraph.BuildGraph.getRoots(articGraph)[0]
+    compileGraph.prependGraph(articGraph)
+    compileGraph.prependGraph(sidlGraph)
+
+    output = self.executeGraph(compileGraph, start = startVertex, input = input, end = endVertex)
+    compileGraph.removeSubgraph(sidlGraph)
+    for g in depGraphs:
+      compileGraph.prependGraph(g)
+    self.builder.currentVertex = None
+    self.argDB['checkpoint']   = cPickle.dumps(self.builder)
+    return compileGraph
+
+  def t_compile(self):
+    '''Recompile the entire source for this project'''
+    (compileGraph, input) = self.getCompileGraph()
+    return self.executeGraph(compileGraph, input = input)
+
+  def t_compilePrograms(self):
+    '''Recompile executables for this project'''
+    (compileGraph, input) = self.getCompileGraph()
+    for program in self.filesets['programs']:
+      compileGraph.appendGraph(self.compileTemplate.getExecutableTarget(program))
+    return self.executeGraph(compileGraph, input = input)
+
+  def t_install(self):
+    '''Install all necessary data for this project into the current RDict'''
+    # Update language specific information
+    self.compileTemplate.install()
+    # Update project in 'installedprojects'
+    projects = self.argDB['installedprojects']
+    map(lambda p: projects.remove(p), self.getInstalledProject(self.project.getUrl(), returnAll = 1))
+    self.argDB['installedprojects'] = projects+[self.project]
+    self.debugPrint('Installed project '+str(self.project), 2, 'install')
+    # Update project in 'projectDependenceGraph'
+    import build.buildGraph
+
+    self.argDB['projectDependenceGraph'] = self.dependenceGraph
+    self.debugPrint('Updated project dependence graph with project '+str(self.project), 2, 'install')
+    # Remove any build checkpoint
+    if 'checkpoint' in self.argDB:
+      del self.argDB['checkpoint']
+    return self.project
+
+  def t_uninstall(self):
+    '''Remove all instances of this project from the current RDict'''
+    # Remove project from 'installedprojects'
+    projects = self.argDB['installedprojects']
+    map(lambda p: projects.remove(p), self.getInstalledProject(self.project.getUrl(), returnAll = 1))
+    self.argDB['installedprojects'] = projects
+    # Remove project from 'projectDependenceGraph'
+    dependenceGraph = self.argDB['projectDependenceGraph']
+    dependenceGraph.removeVertex(self.project)
+    self.argDB['projectDependenceGraph'] = dependenceGraph
+    # Remove configure log
+    logName = os.path.join(self.project.getRoot(), 'configure.log')
+    if os.path.isfile(logName):
+      os.remove(logName)
+    return self.project
+
+  def t_citool(self):
+    '''Run bk citool on all the projects'''
+    for p in self.argDB['installedprojects']:
+      print 'Running bk citool on '+p.getRoot()
+      self.executeShellCommand('cd '+p.getRoot()+'; bk citool')
+
+  def t_push(self):
+    '''Run bk push on all the projects'''
+    for p in self.argDB['installedprojects']:
+      print 'Running bk push on '+p.getRoot()
+      try:
+        self.executeShellCommand('cd '+p.getRoot()+'; bk push')
+      except:
+        pass
+
+  def t_pull(self):
+    '''Run bk pull on all the projects'''
+    for p in self.argDB['installedprojects']:
+      print 'Running bk pull on '+p.getRoot()
+      self.executeShellCommand('cd '+p.getRoot()+'; bk pull')
+
+  def getHeadRevision(self, proj):
+    import install.retrieval
+    return install.retrieval.Retriever().bkHeadRevision(proj.getRoot())
+
+  def t_makeStamp(self):
+    import build.buildGraph
+
+    stamp  = {}
+#    bsProj = self.getInstalledProject('bk://sidl.bkbits.net/BuildSystem')
+#    stamp[bsProj.getUrl()] = self.getHeadRevision(bsProj)
+    for p in build.buildGraph.BuildGraph.depthFirstVisit(self.dependenceGraph, self.project):
+      stamp[p.getUrl()] = self.getHeadRevision(p)
+    return stamp
+
+  def t_default(self):
+    '''Activate, configure, build, and install this project'''
+    return ['activate', 'configure', 'compile', 'install']
+
+  def t_printTargets(self):
+    '''Prints a list of all the targets available'''
+    for target in self.targets:
+      print target+':'
+      print '  No help available'
+    for attr in dir(self):
+      if attr[0:2] == 't_':
+        print attr[2:]+':'
+        if getattr(self, attr).__doc__:
+          print '  '+getattr(self, attr).__doc__
+        else:
+          print '  No help available'
+    return
+
+  def t_purge(self):
+    '''Purge a fileset from the source database, identified using -fileset=<set name>'''
+    if 'fileset' in self.argDB:
+      setName = self.argDB['fileset']
+      if setName in self.filesets:
+        self.debugPrint('Purging source database of fileset '+setName, 1, 'sourceDB')
+        for f in self.filesets[setName]:
+          self.debugPrint('Purging '+f, 3, 'sourceDB')
+          try:
+            del self.sourceDB[f]
+          except KeyError:
+            print 'File '+f+' not found for purge'
+      else:
+        print 'FileSet '+setName+' not found for purge'
+      self.sourceDB.save()
+    return
+
+  def t_update(self):
+    '''Update a value in the source database, identifier using -fileset=<set name>'''
+    if 'fileset' in self.argDB:
+      setName = self.argDB['fileset']
+      if setName in self.filesets:
+        self.debugPrint('Updating source database of fileset '+setName, 1, 'sourceDB')
+        for f in self.filesets[setName]:
+          self.debugPrint('Updating '+f, 3, 'sourceDB')
+          try:
+            self.sourceDB.updateSource(f)
+          except KeyError:
+            print 'File '+f+' not found in source database'
+      else:
+        print 'FileSet '+setName+' not found for update'
+      self.sourceDB.save()
+    return
+
+  def t_printSIDLHTML(self):
+    '''Print all the SIDL dependencies as HTML'''
+    import build.compile.SIDL
+
+    self.argDB.target = []
+    for v in self.sidlTemplate.getClientTarget('Python').vertices:
+      if hasattr(v, 'getIncludeFlags'):
+        includes = v.getIncludeFlags(None)
+    mod      = build.compile.SIDL.Compiler(self.sourceDB, 'Python', None, 0, self.sidlTemplate.usingSIDL).getCompilerModule('scandalDoc')
+    args     = ['-printer=[ANL.SIDL.PrettyPrinterHTML]']+includes+self.filesets['sidl']
+    self.debugPrint('Running scandalDoc with arguments '+str(args), 3, 'build')
+    compiler = mod.ScandalDoc(args)
+    compiler.run()
+    return compiler.outputFiles
+
+  def t_printSIDL(self):
+    '''Print all the SIDL dependencies as plain text'''
+    import build.compile.SIDL
+
+    self.argDB.target = []
+    for v in self.sidlTemplate.getClientTarget('Python').vertices:
+      if hasattr(v, 'getIncludeFlags'):
+        includes = v.getIncludeFlags(None)
+    mod      = build.compile.SIDL.Compiler(self.sourceDB, 'Python', None, 0, self.sidlTemplate.usingSIDL).getCompilerModule('scandalDoc')
+    args     = ['-printer=[ANL.SIDL.PrettyPrinter]']+includes+self.filesets['sidl']
+    self.debugPrint('Running scandalDoc with arguments '+str(args), 3, 'build')
+    compiler = mod.ScandalDoc(args)
+    compiler.run()
+    return compiler.outputFiles
+
+  def t_printSIDLBabel(self,exportDir):
+    '''Print the SIDL for this project and all dependent projects in
+       a format Babel can parse'''
+    import build.compile.SIDL
+
+    self.argDB.target = []
+    for v in self.sidlTemplate.getClientTarget('Python').vertices:
+      if hasattr(v, 'getIncludeFlags'):
+        includes = v.getIncludeFlags(None)
+    mod      = build.compile.SIDL.Compiler(self.sourceDB, 'Python', None, 0, self.sidlTemplate.usingSIDL).getCompilerModule('scandalDoc')
+    args     = ['-filename='+os.path.join(exportDir,'allsidl.sidl')]+['-printer=[ANL.SIDL.PrettyPrinterBabel]']+includes+self.filesets['sidl']
+    self.debugPrint('Running scandalDoc with arguments '+str(args), 3, 'build')
+    compiler = mod.ScandalDoc(args)
+    compiler.run()
+    return compiler.outputFiles
+
+  def t_exportBabel(self):
+    '''Exports all the SIDL projects and impls in a form that Babel can handle'''
+    self.argDB.setType('exportDir', nargs.ArgString(key='exportDir', help='Directory to export for Babel'))
+    exportDir = self.argDB['exportDir']
+    if not os.path.isdir(exportDir): os.makedirs(exportDir)
+    self.t_printSIDLBabel(exportDir)
+
+    directories = self.getDependencyPaths()
+    import getsplicers
+    getsplicers.getSplicers(directories)
+    try:
+      #output = self.executeShellCommand('cd '+exportDir+'; babel --server=C allsidl.sidl')
+      import commands
+      (status,output) = commands.getstatusoutput('cd '+exportDir+';babel --server=C++ allsidl.sidl')
+    except:
+      pass
+    print status
+    print output
+    import setsplicers
+    setsplicers.setSplicers(exportDir)
+
+  def getDependencyPaths(self):
+    directories = [self.project.getRoot()]
+    ip          = self.argDB['installedprojects']
+    for j in self.t_getDependencies():
+      for l in ip:
+        if l.getUrl() == j:
+          maker = self.getMakeModule(l.getRoot()).PetscMake(None, self.argDB)
+          dirs  = maker.getDependencyPaths()
+          for d in dirs:
+            if not d in directories: directories.append(d)
+    return directories
+    
+  def t_updateBootstrap(self):
+    '''Create a bootstrap tarball and copy it to the FTP site'''
+    import install.installerclass
+
+    installer = install.installerclass.Installer()
+    tarball   = installer.backup(self.project.getUrl())
+    #self.executeShellCommand('scp '+tarball+' petsc at terra.mcs.anl.gov://mcs/ftp/pub/petsc/sidl/'+tarball)
+    os.rmdir('backup')
+    raise RuntimeError('Need to fix the path')
+    return
+
+    
+  def t_updateWebsite(self):
+    '''Print all the SIDL dependencies as HTML and move to the website'''
+    for f in self.executeTarget('printSIDLHTML'):
+      self.executeShellCommand('scp '+f+' '+self.project.getWebDirectory()+'/'+f)
+      os.remove(f)
+    return
+
+  def cpFile(self, localFile, remoteDirectory, remoteFile = None, recursive = 0):
+    cmd = 'scp '
+    if recursive: cmd += '-r '
+    if remoteFile:
+      try: self.executeShellCommand(cmd+localFile+' '+os.path.join(remoteDirectory, remoteFile))
+      except: pass
+    else:
+      try: self.executeShellCommand(cmd+localFile+' '+remoteDirectory)
+      except: pass
+    return
+
+  def cpWebsite(self, localFile, remoteFile = None, recursive = 0):
+    return self.cpFile(localFile, self.project.getWebDirectory(), remoteFile, recursive)
+
+  def setupProject(self):
+    '''Hook for user operations before project activation'''
+    return
+
+  def setupBuild(self):
+    '''Hook for user operations after project activation, but before build'''
+    return
+
+  def stampBuild(self):
+    '''Create a version stamp for this build, store it in the RDict and log it'''
+    stamp = self.executeTarget('makeStamp')
+    self.argDB['stamp-'+self.project.getUrl()] = stamp
+    self.debugPrint('Build stamp: '+str(stamp), 4, 'build')
+    return stamp
+
+  def executeGraph(self, graph, start = None, input = None, end = None):
+    '''Execute a BuildGraph'''
+    output = None
+    if self.argDB['displayTarget']:
+      graph.display()
+    else:
+      self.builder.buildGraph = graph
+      for vertex in self.builder.execute(start = start, input = input):
+        output = vertex.output
+        if end == vertex:
+          break
+    return output
+
+  def executeTarget(self, target):
+    '''Execute the target and return the output'''
+    self.debugPrint('Executing target '+target, 4, 'build')
+    output = None
+    if self.targets.has_key(target):
+      self.executeGraph(self.targets[target])
+    elif hasattr(self, 't_'+target):
+      output = getattr(self, 't_'+target)()
+    else:
+      print 'Invalid target: '+str(target)
+    return output
+
+  def expandTargets(self,target):
+    '''Return a copy of targets, after expansion of special targets'''
+    if target is None:
+      target = self.argDB.target[:]
+    else:
+      if not isinstance(target, list):
+        target = [target]
+      else:
+        target = target[:]
+    if 'default' in target:
+      idx = target.index('default')
+      target[idx:idx] = self.executeTarget('default')
+      target.remove('default')
+    return target
+
+  def mainBuild(self, target = None):
+    '''Execute the build operation'''
+    if self.argDB['help']:
+      self.executeTarget('printTargets')
+      return
+    target = self.expandTargets(target)
+    self.setupProject()
+    if 'activate' in target:
+      self.executeTarget('activate')
+      target.remove('activate')
+      if not len(target): return
+    self.setupDependencies()
+    self.setupSourceDB(self.project)
+    if 'configure' in target:
+      self.executeTarget('configure')
+      target.remove('configure')
+      if not len(target): return
+    self.setupBuild()
+    self.stampBuild()
+    map(self.executeTarget, target)
+    return
+
+  def main(self, target = None):
+    '''Execute the build operation and handle any exceptions'''
+    try:
+      return self.mainBuild(target)
+    except Exception, e:
+      import traceback
+
+      msg = 'BUILD FAILURE:\n'+str(e)+'\n'
+      print msg
+      self.log.write(msg)
+      traceback.print_tb(sys.exc_info()[2], file = self.log)
+      if not self.argDB['noStackTrace']:
+        traceback.print_tb(sys.exc_info()[2])

Added: long/3D/Gale/trunk/python/BuildSystem/build/processor.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/processor.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/processor.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,528 @@
+from __future__ import generators
+import base
+import build.transform
+import build.fileset
+
+import os
+
+class Processor(build.transform.Transform):
+  '''Processor is the base class for source transformation, such as compilers, code generators, linkers, etc.
+     - A FileSet with inputTag is transformed into a FileSet with outputTag.
+     - Processed files are updated in the source database, either immediately, or put into a fileset tagged "update "<inputTag>
+     - If isSetwise is true, FileSets are processed as a whole, otherwise individual files are processed
+     - Files tagged "old "<inputTag> will be placed in "old "<outputTag>'''
+  def __init__(self, sourceDB, processor, inputTag, outputTag, isSetwise, updateType):
+    build.transform.Transform.__init__(self)
+    self.sourceDB   = sourceDB
+    self.processor  = processor
+    self.inputTag   = inputTag
+    if not isinstance(self.inputTag, list): self.inputTag = [self.inputTag]
+    self.output.tag = outputTag
+    self.isSetwise  = isSetwise
+    self.updateType = updateType
+    if self.updateType == 'deferred':
+      self.deferredUpdates = build.fileset.FileSet(tag = 'update '+self.inputTag[0])
+      self.output.children.append(self.deferredUpdates)
+    self.oldOutput = build.fileset.FileSet(tag = 'old '+outputTag, mustExist = 0)
+    self.output.children.append(self.oldOutput)
+    return
+
+  def setProcessor(self, processor):
+    '''Set the processor executable'''
+    self._processor = processor
+    return
+
+  def getProcessor(self):
+    '''Return the processor executable'''
+    return self._processor
+  processor = property(getProcessor, setProcessor, doc = 'This is the executable which will process files')
+
+  def handleErrors(self, command, status, output):
+    if status:
+      raise RuntimeError('Could not execute \''+command+'\':\n'+output)
+    elif output.find('warning') >= 0:
+      self.debugPrint('\''+command+'\': '+output, 1, 'compile')
+
+  def checkTag(self, f, tag):
+    '''- If the tag matches the transform tag, return True
+       - Otherwise return False'''
+    if tag in self.inputTag:
+      return 1
+    return 0
+
+  def checkOldTag(self, f, tag):
+    '''- If the tag matches the "old "<transform tag>, return True
+       - Otherwise return False'''
+    if tag in map(lambda t: 'old '+t, self.inputTag):
+      return 1
+    return 0
+
+  def getIntermediateFileName(self, source, ext = '.o'):
+    '''Get the name of the object file for "source"'''
+    import tempfile
+    (dir, file) = os.path.split(source)
+    (base, dum) = os.path.splitext(file)
+    return os.path.join(tempfile.tempdir, dir.replace('/', '+')+'+'+base+ext)
+
+  def getSourceFileName(self, object):
+    '''Get the name of the source file without extension for "object"'''
+    (dir, file) = os.path.split(object)
+    (base, dum) = os.path.splitext(file)
+    return base.replace('+', '/')
+
+  def processFile(self, f, set):
+    return
+
+  def processOldFile(self, f, set):
+    self.oldOutput.append(f)
+    return
+
+  def processFileSet(self, set):
+    for f in set:
+      self.handleFile(f, set)
+    return self.output
+
+  def processOldFileSet(self, set):
+    for f in set:
+      self.processOldFile(f, set)
+    return self.output
+
+  def updateFile(self, source):
+    '''Update the file in the source database
+       - With deferred updates, a new FileSet is created with an update tag'''
+    if self.updateType == 'immediate':
+      self.sourceDB.updateSource(source)
+    elif self.updateType == 'deferred':
+      self.deferredUpdates.append(source)
+    return
+
+  def handleFile(self, f, set):
+    '''Process and update the file if checkTag() returns True, otherwise call the default handler'''
+    if self.checkTag(f, set.tag):
+      self.processFile(f, set)
+      self.updateFile(f)
+      return self.output
+    elif self.checkOldTag(f, set.tag):
+      self.processOldFile(f, set)
+      return self.output
+    return build.transform.Transform.handleFile(self, f, set)
+
+  def handleFileSet(self, set):
+    '''Process and update the set if execution is setwise and checkTag() returns True, otherwise call the default handler'''
+    if self.isSetwise:
+      if self.checkTag(None, set.tag):
+        self.processFileSet(set)
+        map(self.updateFile, set)
+        map(self.handleFileSet, set.children)
+      elif self.checkOldTag(None, set.tag):
+        self.processOldFileSet(set)
+        map(self.handleFileSet, set.children)
+      else:
+        build.transform.Transform.handleFileSet(self, set)
+    else:
+      build.transform.Transform.handleFileSet(self, set)
+    return self.output
+
+class Compiler(Processor):
+  '''A Compiler processes any FileSet with source, and outputs a FileSet of the intermediate object files.'''
+  def __init__(self, sourceDB, compiler, inputTag, outputTag = None, isSetwise = 0, updateType = 'immediate'):
+    if not isinstance(inputTag, list): inputTag = [inputTag]
+    if outputTag is None:
+      outputTag = inputTag[0]+' object'
+    Processor.__init__(self, sourceDB, compiler, inputTag, outputTag, isSetwise, updateType)
+    self.includeDirs = []
+    self.defines     = []
+    return
+
+  def includeDirsIter(self):
+    '''Return an iterator for the include directories'''
+    for dir in self.includeDirs:
+      try:
+        dir = str(dir)
+      except TypeError:
+        for d in dir.getPath():
+          yield d
+      else:
+        yield dir
+    return
+
+  def checkIncludeDirectory(self, dirname):
+    '''Check that the include directory exists
+       - Arguments preceeded by dashes are ignored'''
+    if not os.path.isdir(dirname):
+      if not dirname or not dirname[0:2] == '-I':
+        raise RuntimeError('Include directory '+dirname+' does not exist')
+
+  def getIncludeFlags(self, source = None):
+    '''Return a list of the compiler flags specifying include directories'''
+    flags = []
+    for dirname in self.includeDirsIter():
+      try:
+        self.checkIncludeDirectory(dirname)
+        if dirname[0] == '-':
+          flags.append(dirname)
+        else:
+          flags.append('-I'+dirname)
+      except RuntimeError, e:
+        self.debugPrint(str(e), 3, 'compile')
+    return flags
+
+  def getDefineFlags(self, source = None):
+    '''Return a lsit of the compiler flags specifying defines'''
+    flags = []
+    for define in self.defines:
+      if isinstance(define, tuple):
+        flags.append('-D'+define[0]+'='+define[1])
+      else:
+        flags.append('-D'+define)
+    return flags
+
+  def getOutputFlags(self, source):
+    '''Return a list of the compiler flags specifying the intermediate output file'''
+    if isinstance(source, build.fileset.FileSet): source = source[0]
+    object = self.getIntermediateFileName(source)
+    if object:
+      return ['-c', '-o '+object]
+    return []
+
+  def getOptimizationFlags(self, source = None):
+    '''Return a list of the compiler optimization flags. The default is empty.'''
+    return []
+
+  def getWarningFlags(self, source = None):
+    '''Return a list of the compiler warning flags. The default is empty.'''
+    return []
+
+  def getFlags(self, source):
+    return self.getOptimizationFlags(source)+self.getWarningFlags(source)+self.getDefineFlags(source)+self.getIncludeFlags(source)+self.getOutputFlags(source)
+
+  def processFile(self, source, set):
+    '''Compile "source"'''
+    return self.processFileSet(build.fileset.FileSet([source], tag = set.tag))
+
+  def processFileSet(self, set):
+    '''Compile all the files in "set"'''
+    objs = map(self.getIntermediateFileName, set)
+    self.debugPrint('Compiling '+str(set)+' into '+str(objs), 3, 'compile')
+    command = ' '.join([self.processor]+self.getFlags(set)+set)
+    output  = self.executeShellCommand(command, self.handleErrors)
+    self.output.extend(objs)
+    return self.output
+
+  def processOldFile(self, f, set):
+    '''Put "old" object in for old source'''
+    self.oldOutput.append(self.getIntermediateFileName(f))
+    return self.output
+
+class Linker(Processor):
+  '''A Linker processes any FileSet with intermediate object files, and outputs a FileSet of libraries.'''
+  def __init__(self, sourceDB, using, linker, inputTag, outputTag = None, isSetwise = 0, updateType = 'immediate', library = None, libExt = None):
+    Processor.__init__(self, sourceDB, linker, inputTag, outputTag, isSetwise, updateType)
+    self.using          = using
+    self.library        = library
+    self.libExt         = libExt
+    self.extraLibraries = []
+    return
+
+  def __str__(self):
+    return 'Linker('+self.processor+') for '+str(self.inputTag)
+
+  def getProcessor(self):
+    '''Return the processor executable'''
+    if self._processor is None:
+      return self.using.linker
+    return self._processor
+  processor = property(getProcessor, Processor.setProcessor, doc = 'This is the executable which will process files')
+
+  def getLibExt(self):
+    return self._libExt
+
+  def setLibExt(self, ext):
+    self._libExt = ext
+  libExt = property(getLibExt, setLibExt, doc = 'The library extension')
+
+  def extraLibrariesIter(self):
+    '''Return an iterator for the extra libraries
+       - Empty library names are possible, and they are ignored'''
+    for lib in self.extraLibraries:
+      try:
+        lib = str(lib)
+      except TypeError:
+        for l in lib.getPath():
+          if l: yield l
+      else:
+        if lib: yield lib
+    return
+
+  def getLibrary(self, object):
+    '''Get the library for "object", and ensures that a FileSet all has the same library'''
+    if isinstance(object, build.fileset.FileSet):
+      library = dict(zip(map(self.getLibrary, object), range(len(object))))
+      if len(library) > 1: raise RuntimeError('Invalid setwise link due to incompatible libraries: '+str(library.keys()))
+      return library.keys()[0]
+    if not self.library is None:
+      (library, ext) = os.path.splitext(str(self.library))
+    else:
+      source      = self.getSourceFileName(object)
+      (dir, file) = os.path.split(source)
+      (base, ext) = os.path.splitext(file)
+      # Handle Python
+      if base[-7:] == '_Module':
+        library = os.path.join(dir, base[:-7]+'module')
+      else:
+        library = os.path.join(dir, base)
+    # Ensure the directory exists
+    dir = os.path.dirname(library)
+    if dir and not os.path.exists(dir):
+      os.makedirs(dir)
+    if self.libExt:
+      return library+'.'+self.libExt
+    return library
+
+  def getOptimizationFlags(self, source):
+    '''Return a list of the linker optimization flags.'''
+    return []
+
+  def getLinkerFlags(self, source):
+    '''Return a list of the linker specific flags. The default is gives the extraLibraries as arguments.'''
+    flags = [self.using.getLinkerFlags()]
+    for lib in self.extraLibrariesIter():
+      # Options and object files are passed verbatim
+      if lib[0] == '-' or lib.endswith('.o'):
+        flags.append(lib)
+      # Big Intel F90 hack (the shared library is broken)
+      elif lib.endswith('intrins.a'):
+        flags.append(lib)
+      else:
+        (dir, file) = os.path.split(lib)
+        (base, ext) = os.path.splitext(file)
+        if not base.startswith('lib'):
+          flags.append(lib)
+        else:
+          if dir:
+            if 'C_LINKER_SLFLAG' in self.argDB:
+              flags.extend(['-L'+dir, self.argDB['C_LINKER_SLFLAG']+dir])
+            else:
+              flags.extend(['-L'+dir])
+          flags.append('-l'+base[3:])
+    return flags
+
+  def getOutputFlags(self, source):
+    '''Return a list of the linker flags specifying the library'''
+    return ['-o '+self.getLibrary(source)]
+
+  def getFlags(self, source):
+    return self.getOptimizationFlags(source)+self.getLinkerFlags(source)+self.getOutputFlags(source)
+
+  def processFile(self, source, set):
+    '''Link "source"'''
+    # Leave this set unchanged
+    build.transform.Transform.handleFile(self, source, set)
+    return self.processFileSet(build.fileset.FileSet([source], tag = set.tag))
+
+  def processFileSet(self, set):
+    '''Link all the files in "set"'''
+    if len(set) == 0: return self.output
+    # Leave this set unchanged
+    for f in set:
+      build.transform.Transform.handleFile(self, f, set)
+    library = self.getLibrary(set)
+    self.debugPrint('Linking '+str(set)+' into '+library, 3, 'compile')
+    command = ' '.join([self.processor]+set+self.getFlags(set))
+    output  = self.executeShellCommand(command, self.handleErrors)
+    self.output.append(library)
+    return self.output
+
+  def processOldFile(self, f, set):
+    '''Output old library'''
+    self.oldOutput.append(self.getLibrary(f))
+    return self.output
+
+class DirectoryArchiver(Linker):
+  '''A DirectoryArchiver processes any FileSet with intermediate object files, and outputs a FileSet of those files moved to a storage directory.'''
+  def __init__(self, sourceDB, using, archiver, inputTag, outputTag = None, isSetwise = 0, updateType = 'none', library = None, libExt = 'dir'):
+    if not isinstance(inputTag, list): inputTag = [inputTag]
+    if outputTag is None:
+      outputTag = inputTag[0]+' library'
+    Linker.__init__(self, sourceDB, using, archiver, inputTag, outputTag, isSetwise, updateType, library, libExt)
+    return
+
+  def __str__(self):
+    return 'DirectoryArchiver('+self.processor+') for '+str(self.inputTag)
+
+  def getOptimizationFlags(self, source):
+    '''Return a list of the archiver optimization flags. The default is empty.'''
+    return []
+
+  def getLinkerFlags(self, source):
+    '''Return a list of the archiver specific flags. The default is empty.'''
+    return []
+
+  def getOutputFlags(self, source):
+    '''Return a list of the archiver flags specifying the archive'''
+    return [self.getLibrary(source)]
+
+  def processFileSet(self, set):
+    '''Link all the files in "set"'''
+    if len(set) == 0: return self.output
+    # Leave this set unchanged
+    for f in set:
+      build.transform.Transform.handleFile(self, f, set)
+    library = self.getLibrary(set)
+    # Ensure the directory exists
+    if not os.path.exists(library):
+      os.makedirs(library)
+    self.debugPrint('Linking '+str(set)+' into '+library, 3, 'compile')
+    command = ' '.join([self.processor]+set+self.getFlags(set))
+    output  = self.executeShellCommand(command, self.handleErrors)
+    self.output.extend(map(lambda f: os.path.join(library, os.path.basename(f)), set))
+    return self.output
+
+  def processOldFile(self, f, set):
+    '''Convert old objects'''
+    self.oldOutput.append(os.path.join(self.getLibrary(f), os.path.basename(f)))
+    return self.output
+
+class Archiver(Linker):
+  '''An Archiver processes any FileSet with intermediate object files, and outputs a FileSet of static libraries.'''
+  def __init__(self, sourceDB, using, archiver, inputTag, outputTag = None, isSetwise = 0, updateType = 'immediate', library = None, libExt = 'a'):
+    if not isinstance(inputTag, list): inputTag = [inputTag]
+    if outputTag is None:
+      outputTag = inputTag[0]+' library'
+    Linker.__init__(self, sourceDB, using, archiver, inputTag, outputTag, isSetwise, updateType, library, libExt)
+    return
+
+  def __str__(self):
+    return 'Archiver('+self.processor+') for '+str(self.inputTag)
+
+  def getOptimizationFlags(self, source):
+    '''Return a list of the archiver optimization flags. The default is empty.'''
+    return []
+
+  def getLinkerFlags(self, source):
+    '''Return a list of the archiver specific flags. The default is crv.'''
+    return ['crv']
+
+  def getOutputFlags(self, source):
+    '''Return a list of the archiver flags specifying the archive'''
+    return [self.getLibrary(source)]
+
+  def processOldFile(self, f, set):
+    '''An Archiver produces no "old" filesets'''
+    return self.output
+
+class SharedLinker(Linker):
+  '''A SharedLinker processes any FileSet of libraries, and outputs a FileSet of shared libraries
+     - This linker now works correctly with Cygwin'''
+  def __init__(self, sourceDB, using, linker, inputTag, outputTag = None, isSetwise = 0, updateType = 'none', library = None, libExt = None):
+    if not isinstance(inputTag, list): inputTag = [inputTag]
+    if outputTag is None:
+      outputTag = inputTag[0]+' shared library'
+    Linker.__init__(self, sourceDB, using, linker, inputTag, outputTag, isSetwise, updateType, library, libExt)
+    return
+
+  def getLibExt(self):
+    if self._libExt is None:
+      return 'so'
+    return self._libExt
+  libExt = property(getLibExt, Linker.setLibExt, doc = 'The library extension')
+
+  def __str__(self):
+    if self.argDB['HAVE_CYGWIN']:
+      return 'Cygwin Shared linker('+self.processor+') for '+str(self.inputTag)
+    return 'Shared linker('+self.processor+') for '+str(self.inputTag)
+
+  def checkSharedLibrary(self, source):
+    '''Check that a shared library can be opened, otherwise throw a RuntimeException'''
+    try:
+      import BS.LinkCheckerI.Checker
+      import BS.LinkError
+
+      try:
+        BS.LinkCheckerI.Checker.Checker().openLibrary(source)
+      except BS.LinkError.Exception, e:
+        raise RuntimeError(e.getMessage())
+    except ImportError:
+      self.debugPrint('Did not check shared library '+source, 3, 'link')
+
+  def getOptimizationFlags(self, source):
+    '''Return a list of the linker optimization flags. The default is empty.'''
+    return []
+
+  def getLinkerFlags(self, source):
+    '''Return a list of the linker specific flags. The default is the flags for shared linking plus the base class flags.'''
+    flags = []
+    if self.argDB['SHARED_LIBRARY_FLAG']:
+      flags.append(self.argDB['SHARED_LIBRARY_FLAG'])
+    flags.extend(Linker.getLinkerFlags(self, source))
+    return flags
+
+class ImportSharedLinker(SharedLinker):
+  '''An ImportSharedLinker processes any FileSet of libraries, and outputs a FileSet of import libraries'''
+  def __init__(self, sourceDB, using, linker, inputTag, outputTag = None, isSetwise = 0, updateType = 'none', library = None, libExt = None):
+    SharedLinker.__init__(self, sourceDB, using, linker, inputTag, outputTag, isSetwise, updateType, library, libExt)
+    self.imports     = build.fileset.FileSet()
+    self.imports.tag = outputTag+' import'
+    return
+
+  def __str__(self):
+    return 'Import shared linker('+self.processor+') for '+str(self.inputTag)
+
+  def getLibExt(self):
+    if self._libExt is None:
+      return 'dll.a'
+    return self._libExt
+  libExt = property(getLibExt, Linker.setLibExt, doc = 'The library extension')
+
+  def getOutputFlags(self, source):
+    '''Return a list of the linker flags specifying the library'''
+    implibname = self.getLibrary(source)
+    # This is a really ugly hack.
+    # Since the dllname is a symbol in the implib, we should be generating the implibname from the dllname,
+    # not by hoping the dllname is the implibname without the .a
+    dllname    = implibname[:-2]
+    return ['-o '+dllname+' -Wl,--out-implib='+implibname]
+
+  def handleErrors(self, command, status, output):
+    '''Ignore errors when trying to link libraries
+       - This is the only way to get correct C++ mangling'''
+    return
+
+  def processFileSet(self, set):
+    '''Link all the files in "set"'''
+    if self.argDB['HAVE_CYGWIN']:
+      super(SharedLinker, self).processFileSet(set)
+    else:
+      # Leave this set unchanged
+      for f in set:
+        build.transform.Transform.handleFile(self, f, set)
+    return self.output
+
+  def processOldFile(self, f, set):
+    '''Output old library'''
+    if self.argDB['HAVE_CYGWIN']:
+      super(SharedLinker, self).processOldFile(f, set)
+    return self.output
+
+class LibraryAdder (build.transform.Transform):
+  '''A LibraryAdder adds every library matching inputTag to the extraLibraries member of linker'''
+  def __init__(self, inputTag, linker, prepend = 0):
+    build.transform.Transform.__init__(self)
+    self.inputTag = inputTag
+    if not isinstance(self.inputTag, list):
+      self.inputTag = [self.inputTag]
+    self.linker  = linker
+    self.prepend = prepend
+    return
+
+  def __str__(self):
+    return 'Adding libraries from '+str(self.inputTag)+' to '+str(self.linker)
+
+  def handleFile(self, f, set):
+    '''Put all libraries matching inputTag in linker.extraLibraries'''
+    if self.inputTag is None or set.tag in self.inputTag:
+      if self.prepend:
+        self.linker.extraLibraries.insert(0, f)
+      else:
+        self.linker.extraLibraries.append(f)
+    return build.transform.Transform.handleFile(self, f, set)

Added: long/3D/Gale/trunk/python/BuildSystem/build/templates/Compile.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/templates/Compile.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/templates/Compile.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,139 @@
+'''
+A Template is a default mechanism for constructing a BuildGraph. They are similar in spirit to
+the default Make rules. Users with simple needs can use them to automatically build a project.
+Any template should have a getTarget() call which provides a BuildGraph.
+'''
+import base
+import build.buildGraph
+import project
+
+class Template(base.Base):
+  '''This template constructs BuildGraphs capable of compiling source into object files'''
+  def __init__(self, argDB, sourceDB, project, dependenceGraph, usingSIDL, packages):
+    import build.compile.SIDL
+    base.Base.__init__(self)
+    self.argDB           = argDB
+    self.sourceDB        = sourceDB
+    self.project         = project
+    self.dependenceGraph = dependenceGraph
+    self.usingSIDL       = usingSIDL
+    self.packages        = packages
+    self.defines         = []
+    self.includeDirs     = []
+    self.extraLibraries  = []
+    self.serverLanguages = build.compile.SIDL.SIDLLanguageList()
+    self.clientLanguages = build.compile.SIDL.SIDLLanguageList()
+    return
+
+  def __getattr__(self, name):
+    '''Handle requests for nonexistent using<lang> objects'''
+    if name.startswith('using'):
+      return self.getUsing(name)
+    raise AttributeError('No attribute '+name)
+
+  def addServer(self, lang):
+    '''Designate that a server for lang should be built'''
+    if not lang in self.serverLanguages and not lang in self.usingSIDL.serverLanguages:
+      self.serverLanguages.append(lang)
+    return
+
+  def addClient(self, lang):
+    '''Designate that a client for lang should be built'''
+    if not lang in self.clientLanguages and not lang in self.usingSIDL.clientLanguages:
+      self.clientLanguages.append(lang)
+    return
+
+  def getUsing(self, name):
+    '''Create a using<lang> object from build.templates and name it _using<lang>'''
+    if hasattr(self, '_'+name):
+      return getattr(self, '_'+name)
+    cls = 'Using'+name[5:]
+    try:
+      obj = getattr(__import__('build.templates.'+name, globals(), locals(), [cls]), cls)(self.argDB, self.sourceDB, self.project, self.usingSIDL)
+    except ImportError:
+      obj = getattr(__import__(name, globals(), locals(), [cls]), cls)(self.argDB, self.sourceDB, self.project, self.usingSIDL)
+    setattr(self, '_'+name, obj)
+    return obj
+
+  def setupExtraOptions(self, lang, compileGraph):
+    '''Set client include directories for all dependencies and the runtime library for linking'''
+    import os
+
+    # Hack for excluding build system: Should ask if it is a dependency for Runtime
+    useRuntime = not self.project.getUrl() == 'bk://sidl.bkbits.net/BuildSystem'
+
+    for vertex in compileGraph.vertices:
+      if hasattr(vertex, 'defines'):
+        # Custom defines
+        vertex.defines.extend(self.defines)
+      if hasattr(vertex, 'includeDirs'):
+        dfs = build.buildGraph.BuildGraph.depthFirstVisit(self.dependenceGraph, self.project)
+        # Client includes for project dependencies
+        vertex.includeDirs.extend([project.ProjectPath(self.usingSIDL.getClientRootDir(lang), v.getUrl()) for v in dfs])
+        # Runtime includes
+        if useRuntime:
+          vertex.includeDirs.extend(self.usingSIDL.getRuntimeIncludes())
+        # Custom includes
+        vertex.includeDirs.extend(self.includeDirs)
+      if hasattr(vertex, 'extraLibraries'):
+        if useRuntime:
+          if not (self.project == self.usingSIDL.getRuntimeProject() and lang == self.usingSIDL.getRuntimeLanguage()):
+            # Runtime libraries
+            vertex.extraLibraries.extend(self.usingSIDL.getRuntimeLibraries())
+        # Custom libraries
+        vertex.extraLibraries.extend(self.extraLibraries)
+    return compileGraph
+
+  def getServerTarget(self, lang, package):
+    using = getattr(self, 'using'+lang.capitalize())
+    return self.setupExtraOptions(lang, using.getServerCompileTarget(package))
+
+  def getServerTargets(self, isStatic = 0):
+    '''Return a BuildGraph which will compile the servers specified
+       - This is a linear array since all source is independent'''
+    target = build.buildGraph.BuildGraph()
+    for lang in self.usingSIDL.serverLanguages+self.serverLanguages:
+      for package in self.packages:
+        if (isStatic and not package in self.usingSIDL.staticPackages) or (not isStatic and package in self.usingSIDL.staticPackages): continue
+        target.appendGraph(self.getServerTarget(lang, package))
+    return target
+
+  def getClientTarget(self, lang, fullTarget = 0):
+    using  = getattr(self, 'using'+lang.capitalize())
+    target = self.setupExtraOptions(lang, using.getClientCompileTarget())
+    if fullTarget:
+      target.appendGraph(build.buildGraph.BuildGraph([build.fileState.Update(self.sourceDB)]))
+    return target
+
+  def getClientTargets(self):
+    '''Return a BuildGraph which will compile the clients specified
+       - This is a linear array since all source is independent'''
+    target = build.buildGraph.BuildGraph()
+    for lang in self.usingSIDL.clientLanguages+self.clientLanguages:
+      target.appendGraph(self.getClientTarget(lang))
+    return target
+
+  def getTarget(self):
+    '''Return a BuildGraph which will compile source into object files'''
+    target = build.buildGraph.BuildGraph()
+    target.appendGraph(self.getServerTargets(isStatic = 1))
+    target.appendGraph(self.getClientTargets())
+    target.appendGraph(self.getServerTargets())
+    target.appendGraph(build.buildGraph.BuildGraph([build.fileState.Update(self.sourceDB)]))
+    return target
+
+  def getExecutableTarget(self, program):
+    '''Return a BuildGraph which will compile user provided source into an executable'''
+    target = build.buildGraph.BuildGraph()
+    for lang in self.usingSIDL.clientLanguages:
+      using = getattr(self, 'using'+lang.capitalize())
+      graph = self.setupExtraOptions(lang, using.getExecutableCompileTarget(program))
+      target.appendGraph(graph)
+    return target
+
+  def install(self):
+    for lang in self.usingSIDL.clientLanguages:
+      getattr(self, 'using'+lang.capitalize()).installClient()
+    for lang in self.usingSIDL.serverLanguages:
+      for package in self.packages:
+        getattr(self, 'using'+lang.capitalize()).installServer(package)

Added: long/3D/Gale/trunk/python/BuildSystem/build/templates/SIDL.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/templates/SIDL.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/templates/SIDL.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,112 @@
+'''
+A Template is a default mechanism for constructing a BuildGraph. They are similar in spirit to
+the default Make rules. Users with simple needs can use them to automatically build a project.
+Any template should have a getTarget() call which provides a BuildGraph.
+'''
+import base
+import build.buildGraph
+import build.compile.SIDL
+import project
+
+class ServerRootMap (project.ProjectPath):
+  '''This class maps SIDL files to the server root directory they would generate'''
+  def __init__(self, projectUrl, language, usingSIDL):
+    project.ProjectPath.__init__(self, '', projectUrl)
+    self.language  = language
+    self.usingSIDL = usingSIDL
+    return
+
+  def __call__(self, f):
+    import os
+    return os.path.join(self.projectRoot, self.usingSIDL.getServerRootDir(self.language, os.path.splitext(os.path.basename(f))[0]))
+
+class Template(base.Base):
+  '''This template constructs BuildGraphs capable of compiling SIDL into server and client source'''
+  def __init__(self, sourceDB, project, dependenceGraph, usingSIDL = None):
+    base.Base.__init__(self)
+    self.sourceDB        = sourceDB
+    self.project         = project
+    self.dependenceGraph = dependenceGraph
+    self.usingSIDL       = usingSIDL
+    if self.usingSIDL is None:
+      import build.templates.usingSIDL
+      self.usingSIDL = build.templates.usingSIDL.UsingSIDL(self.sourceDB, self.project)
+    return
+
+  def addServer(self, lang):
+    '''Designate that a server for lang should be built, which also implies the client'''
+    return self.usingSIDL.addServer(lang)
+
+  def addClient(self, lang):
+    '''Designate that a client for lang should be built'''
+    return self.usingSIDL.addClient(lang)
+
+  def addStaticPackage(self, package):
+    '''For a static package, the client is statically linked to the server since dynamic loading is not feasible'''
+    return self.usingSIDL.addStaticPackage(package)
+
+  def addRepositoryDirs(self, compiler):
+    compiler.repositoryDirs.extend([vertex.getRoot() for vertex in build.buildGraph.BuildGraph.depthFirstVisit(self.dependenceGraph, self.project)])
+    return compiler
+
+  def getServerTarget(self, lang):
+    target = build.buildGraph.BuildGraph()
+    rootFunc   = ServerRootMap(self.project.getUrl(), lang, self.usingSIDL)
+    lastVertex = None
+    vertex     = build.bk.Tag(rootFunc = rootFunc, inputTag = ['sidl', 'old sidl'])
+    target.addEdges(lastVertex, outputs = [vertex])
+    lastVertex = vertex
+    vertex     = build.bk.Open()
+    target.addEdges(lastVertex, outputs = [vertex])
+    lastVertex = vertex
+    vertex     = self.addRepositoryDirs(build.compile.SIDL.Compiler(self.sourceDB, lang, self.project.getRoot(), 1, self.usingSIDL))
+    target.addEdges(lastVertex, outputs = [vertex])
+    lastVertex = vertex
+    vertex     = build.bk.Tag(rootFunc = rootFunc, inputTag = ['update sidl', 'old sidl'])
+    target.addEdges(lastVertex, outputs = [vertex])
+    lastVertex = vertex
+    vertex     = build.bk.Close()
+    target.addEdges(lastVertex, outputs = [vertex])
+    return target
+
+  def getServerTargets(self):
+    '''Return a BuildGraph which will compile SIDL into the clients specified'''
+    import build.bk
+    import os
+
+    target = build.buildGraph.BuildGraph()
+    for lang in self.usingSIDL.serverLanguages:
+      target.addSubgraph(self.getServerTarget(lang))
+    return target
+
+  def getClientTarget(self, lang, fullTarget = 0, forceRebuild = 0):
+    '''Return a BuildGraph which will compile SIDL into the client specified'''
+    target = build.buildGraph.BuildGraph()
+    target.addVertex(self.addRepositoryDirs(build.compile.SIDL.Compiler(self.sourceDB, lang, self.project.getRoot(), 0, self.usingSIDL)))
+    if fullTarget:
+      target.prependGraph(build.buildGraph.BuildGraph([build.fileState.GenericTag(self.sourceDB, 'sidl', ext = 'sidl', force = forceRebuild)]))
+      target.appendGraph(build.buildGraph.BuildGraph([build.fileState.Update(self.sourceDB)]))
+    return target
+
+  def getClientTargets(self):
+    '''Return a BuildGraph which will compile SIDL into the clients specified
+       - Currently this graph is just a list of unconnected vertices, which will be linked up in the final target'''
+    target = build.buildGraph.BuildGraph()
+    for lang in self.usingSIDL.clientLanguages:
+      target.addSubgraph(self.getClientTarget(lang))
+    return target
+
+  def getTarget(self, forceRebuild = 0):
+    '''Return a BuildGraph which will compile SIDL into the servers and clients specified'''
+    import build.fileState
+
+    target = build.buildGraph.BuildGraph()
+    tagger = build.fileState.GenericTag(self.sourceDB, 'sidl', ext = 'sidl', force = forceRebuild)
+    target.addVertex(tagger)
+    client = self.getClientTargets()
+    server = self.getServerTargets()
+    target.addSubgraph(client)
+    target.addSubgraph(server)
+    target.addEdges(tagger, outputs = build.buildGraph.BuildGraph.getRoots(client)+build.buildGraph.BuildGraph.getRoots(server))
+    target.appendGraph(build.buildGraph.BuildGraph([build.fileState.Update(self.sourceDB)]))
+    return target

Added: long/3D/Gale/trunk/python/BuildSystem/build/templates/__init__.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/templates/__init__.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/templates/__init__.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1 @@
+all = ['Compile', 'SIDL', 'usingC', 'usingCxx', 'usingMathematica', 'usingPython', 'usingSIDL']

Added: long/3D/Gale/trunk/python/BuildSystem/build/templates/usingC.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/templates/usingC.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/templates/usingC.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,74 @@
+import base
+
+import os
+
+class UsingC (base.Base):
+  def __init__(self, argDB, sourceDB, project, usingSIDL):
+    import config.base
+
+    base.Base.__init__(self)
+    self.language    = 'C'
+    self.argDB       = argDB
+    self.sourceDB    = sourceDB
+    self.project     = project
+    self.usingSIDL   = usingSIDL
+
+    self.languageModule     = {}
+    self.preprocessorObject = {}
+    self.compilerObject     = {}
+    self.linkerObject       = {}
+    return
+
+  def isCompiled(self):
+    '''Returns True is source needs to be compiled in order to execute'''
+    return 1
+
+  def getCompileSuffix(self):
+    '''Return the suffix for compilable files (.c)'''
+    return self.getCompilerObject(self.language).sourceExtension
+
+  def getLinker(self):
+    if not hasattr(self, '_linker'):
+      return self.argDB[self.getLinkerObject(self.language).name]
+    return self._linker
+  def setLinker(self, linker):
+    self._linker = linker
+  linker = property(getLinker, setLinker, doc = 'The linker corresponding to the C compiler')
+
+  def getLinkerFlags(self):
+    if not hasattr(self, '_linkerFlags'):
+      return self.getLinkerObject(self.language).getFlags()
+    return self._linkerFlags
+  def setLinkerFlags(self, flags):
+    self._linkerFlags = flags
+  linkerFlags = property(getLinkerFlags, setLinkerFlags, doc = 'The flags for the C linker')
+
+  #####################
+  # Language Operations
+  def getLanguageModule(self, language):
+    if not language in self.languageModule:
+      moduleName = 'config.compile.'+language
+      components = moduleName.split('.')
+      module     = __import__(moduleName)
+      for component in components[1:]:
+        module   = getattr(module, component)
+      self.languageModule[language] = module
+    return self.languageModule[language]
+
+  def getPreprocessorObject(self, language):
+    if not language in self.preprocessorObject:
+      self.preprocessorObject[language] = self.getLanguageModule(language).Preprocessor(self.argDB)
+      self.preprocessorObject[language].checkSetup()
+    return self.preprocessorObject[language]
+
+  def getCompilerObject(self, language):
+    if not language in self.compilerObject:
+      self.compilerObject[language] = self.getLanguageModule(language).Compiler(self.argDB)
+      self.compilerObject[language].checkSetup()
+    return self.compilerObject[language]
+
+  def getLinkerObject(self, language):
+    if not language in self.linkerObject:
+      self.linkerObject[language] = self.getLanguageModule(language).Linker(self.argDB)
+      self.linkerObject[language].checkSetup()
+    return self.linkerObject[language]

Added: long/3D/Gale/trunk/python/BuildSystem/build/templates/usingCxx.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/templates/usingCxx.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/templates/usingCxx.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,260 @@
+import base
+import build.buildGraph
+import build.processor
+import build.transform
+import project
+
+import os
+
+class UsingCxx (base.Base):
+  def __init__(self, argDB, sourceDB, project, usingSIDL, usingC = None):
+    import config.base
+
+    base.Base.__init__(self)
+    self.language    = 'Cxx'
+    self.argDB       = argDB
+    self.sourceDB    = sourceDB
+    self.project     = project
+    self.usingSIDL   = usingSIDL
+    self.usingC      = usingC
+    self.clArgs      = None
+    self.configBase  = config.base.Configure(self)
+    if self.usingC is None:
+      import build.templates.usingC
+      self.usingC    = build.templates.usingC.UsingC(self.argDB, self.sourceDB, self.project, self.usingSIDL)
+    self.setup()
+    # Driver may need many outside includes and libraries
+    self.programIncludeDirs = {}
+    self.programLibraryTags = {}
+    self.programLibraries   = {}
+
+    self.languageModule     = {}
+    self.preprocessorObject = {}
+    self.compilerObject     = {}
+    self.linkerObject       = {}
+    return
+
+  def __getstate__(self):
+    '''Do not save the include directories and extra libraries'''
+    d = base.Base.__getstate__(self)
+    del d['includeDirs']
+    del d['extraLibraries']
+    return d
+
+  def __setstate__(self, d):
+    '''Recreate the include directories and extra libraries'''
+    base.Base.__setstate__(self, d)
+    self.setup()
+    return
+
+  def setup(self):
+    '''Setup include directories and extra libraries'''
+    self.setupIncludeDirectories()
+    self.setupExtraLibraries()
+    return
+
+  def setupIncludeDirectories(self):
+    self.includeDirs = []
+    return self.includeDirs
+
+  def setupExtraLibraries(self):
+    self.extraLibraries = []
+    return self.extraLibraries
+
+  def isCompiled(self):
+    '''Returns True is source needs to be compiled in order to execute'''
+    return 1
+
+  def getCompileSuffix(self):
+    '''Return the suffix for compilable files (.cc)'''
+    return self.getCompilerObject(self.language).sourceExtension
+
+  def getLinker(self):
+    if not hasattr(self, '_linker'):
+      return self.argDB[self.getLinkerObject(self.language).name]
+    return self._linker
+  def setLinker(self, linker):
+    self._linker = linker
+  linker = property(getLinker, setLinker, doc = 'The linker corresponding to the Cxx compiler')
+
+  def getLinkerFlags(self):
+    if not hasattr(self, '_linkerFlags'):
+      return self.getLinkerObject(self.language).getFlags()
+    return self._linkerFlags
+  def setLinkerFlags(self, flags):
+    self._linkerFlags = flags
+  linkerFlags = property(getLinkerFlags, setLinkerFlags, doc = 'The flags for the Cxx linker')
+
+  def getServerLibrary(self, package, proj = None, lang = None):
+    '''Server libraries follow the naming scheme: lib<project>-<lang>-<package>-server.a'''
+    if proj is None: proj = self.project
+    if lang is None: lang = self.language
+    return project.ProjectPath(os.path.join('lib', 'lib'+proj.getName()+'-'+lang.lower()+'-'+package+'-server.a'), proj.getUrl())
+
+  def getExecutableLibrary(self, program):
+    '''Executable libraries follow the naming scheme: lib<project>-<lang>-<program>-exec.a'''
+    return project.ProjectPath(os.path.join('lib', 'lib'+self.project.getName()+'-'+self.language.lower()+'-'+program+'-exec.a'), self.project.getUrl())
+
+  def getGenericCompileTarget(self, action):
+    '''All purposes are in Cxx, so only a Cxx compiler is necessary.'''
+    import build.compile.Cxx
+    inputTag  = map(lambda a: self.language.lower()+' '+a, action)
+    outputTag = self.language.lower()+' '+action[0]+' '+self.language.lower()
+    tagger    = build.fileState.GenericTag(self.sourceDB, outputTag, inputTag = inputTag, ext = 'cc', deferredExt = 'hh')
+    compiler  = build.compile.Cxx.Compiler(self.sourceDB, self, inputTag = outputTag)
+    compiler.includeDirs.extend(self.includeDirs)
+    target    = build.buildGraph.BuildGraph()
+    target.addVertex(tagger)
+    target.addEdges(tagger, outputs = [compiler])
+    return (target, compiler)
+
+  def getIORCompileTarget(self, action):
+    import build.compile.C
+    outputTag = self.language.lower()+' '+action+' '+self.usingC.language.lower()
+    tagger    = build.fileState.GenericTag(self.sourceDB, outputTag, inputTag = self.language.lower()+' '+action, ext = 'c', deferredExt = 'h')
+    compiler  = build.compile.C.Compiler(self.sourceDB, self.usingC, inputTag = outputTag)
+    compiler.includeDirs.extend(self.includeDirs)
+    target    = build.buildGraph.BuildGraph()
+    target.addVertex(tagger)
+    target.addEdges(tagger, outputs = [compiler])
+    return (target, compiler)
+
+  def getServerCompileTarget(self, package):
+    '''All purposes are in Cxx, so only a Cxx compiler is necessary for the skeleton and implementation.'''
+    inputTag      = ['server '+package]
+    if len(self.usingSIDL.staticPackages):
+      inputTag.append('client')
+    (target,    compiler)    = self.getGenericCompileTarget(inputTag)
+    (iorTarget, iorCompiler) = self.getIORCompileTarget('server '+package)
+    compiler.includeDirs.append(project.ProjectPath(self.usingSIDL.getServerRootDir(self.language, package), self.project.getUrl()))
+    inputTags     = [compiler.output.tag, iorCompiler.output.tag]
+    archiveTag    = self.language.lower()+' server library directory'
+    sharedTag     = self.language.lower()+' server shared library'
+    clientTag     = self.language.lower()+' client shared library'
+    library       = self.getServerLibrary(package)
+    linker        = build.buildGraph.BuildGraph()
+    archiver      = build.processor.DirectoryArchiver(self.sourceDB, self, 'cp', inputTags, archiveTag, isSetwise = 1, library = library)
+    consolidator  = build.transform.Consolidator(archiveTag, archiveTag, 'old '+archiveTag)
+    sharedLinker  = build.processor.SharedLinker(self.sourceDB, self, None, archiveTag, sharedTag, isSetwise = 1, library = library)
+    sharedLinker.extraLibraries.extend(self.extraLibraries)
+    libraryAdder  = build.processor.LibraryAdder([clientTag, 'old '+clientTag], sharedLinker)
+    archiveFilter = build.transform.Filter(archiveTag)
+    linker.addVertex(archiver)
+    linker.addEdges(consolidator, [archiver])
+    linker.addEdges(libraryAdder, [consolidator])
+    linker.addEdges(sharedLinker, [libraryAdder])
+    linker.addEdges(archiveFilter, [sharedLinker])
+    linker.addEdges(build.transform.Remover(inputTags), [archiveFilter])
+    target.appendGraph(iorTarget)
+    target.appendGraph(linker)
+    return target
+
+##  def getClientCompileTarget(self):
+##    '''All purposes are in Cxx, so only a Cxx compiler is necessary for the stubs and cartilage.'''
+##    if len(self.usingSIDL.staticPackages):
+##      return build.buildGraph.BuildGraph()
+##    (target, compiler) = self.getGenericCompileTarget(['client'])
+##    sharedTag    = self.language.lower()+' client shared library'
+##    importTag    = self.language.lower()+' client import library'
+##    linker       = build.buildGraph.BuildGraph()
+##    sharedLinker = build.processor.SharedLinker(self.sourceDB, self, None, compiler.output.tag, sharedTag)
+##    sharedLinker.extraLibraries.extend(self.extraLibraries)
+##    importLinker = build.processor.ImportSharedLinker(self.sourceDB, self, None, compiler.output.tag, importTag)
+##    sharedAdder  = build.processor.LibraryAdder([importTag, 'old '+importTag], sharedLinker, prepend = 1)
+##    linker.addVertex(importLinker)
+##    linker.addEdges(sharedAdder,  [importLinker])
+##    linker.addEdges(sharedLinker, [sharedAdder])
+##    linker.addEdges(build.transform.Remover([compiler.output.tag, compiler.output.tag+' import']), [sharedLinker])
+##    target.appendGraph(linker)
+##    return target
+
+  def getClientCompileTarget(self):
+    '''All purposes are in Cxx, so only a Cxx compiler is necessary for the stubs and cartilage.'''
+    if len(self.usingSIDL.staticPackages):
+      return build.buildGraph.BuildGraph()
+    (target, compiler) = self.getGenericCompileTarget(['client'])
+    sharedTag    = self.language.lower()+' client shared library'
+    linker       = build.buildGraph.BuildGraph()
+    sharedLinker = build.processor.SharedLinker(self.sourceDB, self, None, compiler.output.tag, sharedTag)
+    sharedLinker.extraLibraries.extend(self.extraLibraries)
+    linker.addVertex(sharedLinker)
+    linker.addEdges(build.transform.Remover([compiler.output.tag]), [sharedLinker])
+    target.appendGraph(linker)
+    return target
+
+  def getExecutableCompileTarget(self, program):
+    '''All source should be Cxx'''
+    name         = os.path.basename(program)
+    prefix       = 'executable '+name
+    (target, compiler) = self.getGenericCompileTarget([prefix])
+    if name in self.programIncludeDirs:
+      compiler.includeDirs.extend(self.programIncludeDirs[name])
+    sharedTag    = self.language.lower()+' '+prefix+' shared library'
+    clientTag    = self.language.lower()+' client shared library'
+    if name in self.programLibraryTags:
+      progTags   = self.programLibraryTags[name]
+    else:
+      progTags   = []
+    library      = self.getExecutableLibrary(name)
+    linker       = build.buildGraph.BuildGraph()
+    sharedLinker = build.processor.SharedLinker(self.sourceDB, self, None, compiler.output.tag, sharedTag, isSetwise = 1, library = library)
+    sharedLinker.extraLibraries.extend(self.extraLibraries)
+    if name in self.programLibraries:
+      sharedLinker.extraLibraries.extend(self.programLibraries[name])
+    sharedAdder  = build.processor.LibraryAdder([clientTag, 'old '+clientTag], sharedLinker)
+    progLinker   = build.processor.Linker(self.sourceDB, compiler.processor, sharedTag, prefix, isSetwise = 1, library = program)
+    progAdder    = build.processor.LibraryAdder([clientTag, 'old '+clientTag]+progTags, progLinker)
+    progLinker.extraLibraries.extend(self.extraLibraries)
+    if name in self.programLibraries:
+      progLinker.extraLibraries.extend(self.programLibraries[name])
+    linker.addVertex(sharedAdder)
+    linker.addEdges(sharedLinker, [sharedAdder])
+    linker.addEdges(progAdder,    [sharedLinker])
+    linker.addEdges(progLinker,   [progAdder])
+    linker.addEdges(build.transform.Remover(compiler.output.tag), [progLinker])
+    target.appendGraph(linker)
+    return target
+
+  def installClient(self):
+    '''Does nothing right now'''
+    return
+
+  def installClasses(self, package):
+    for cls in self.usingSIDL.getClasses(package):
+      self.project.addImplementation(cls, os.path.join(self.project.getRoot(), self.usingSIDL.getServerLibrary(self.project.getName(), self.language, package, isShared = 1)), self.language)
+    return
+
+  def installServer(self, package):
+    '''Does nothing right now'''
+    self.installClasses(package)
+    return
+
+  #####################
+  # Language Operations
+  def getLanguageModule(self, language):
+    if not language in self.languageModule:
+      moduleName = 'config.compile.'+language
+      components = moduleName.split('.')
+      module     = __import__(moduleName)
+      for component in components[1:]:
+        module   = getattr(module, component)
+      self.languageModule[language] = module
+    return self.languageModule[language]
+
+  def getPreprocessorObject(self, language):
+    if not language in self.preprocessorObject:
+      self.preprocessorObject[language] = self.getLanguageModule(language).Preprocessor(self.argDB)
+      self.preprocessorObject[language].checkSetup()
+    return self.preprocessorObject[language]
+
+  def getCompilerObject(self, language):
+    if not language in self.compilerObject:
+      self.compilerObject[language] = self.getLanguageModule(language).Compiler(self.argDB)
+      self.compilerObject[language].checkSetup()
+    return self.compilerObject[language]
+
+  def getLinkerObject(self, language):
+    if not language in self.linkerObject:
+      self.linkerObject[language] = self.getLanguageModule(language).Linker(self.argDB)
+      self.linkerObject[language].checkSetup()
+    return self.linkerObject[language]

Added: long/3D/Gale/trunk/python/BuildSystem/build/templates/usingPython.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/templates/usingPython.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/templates/usingPython.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,165 @@
+import base
+import build.buildGraph
+import build.processor
+import build.transform
+import project
+
+import os
+
+class UsingPython (base.Base):
+  def __init__(self, argDB, sourceDB, project, usingSIDL, usingC = None):
+    base.Base.__init__(self)
+    self.argDB     = argDB
+    self.sourceDB  = sourceDB
+    self.project   = project
+    self.usingSIDL = usingSIDL
+    self.usingC    = usingC
+    if self.usingC is None:
+      import build.templates.usingC
+      self.usingC = build.templates.usingC.UsingC(self.argDB, self.sourceDB, self.project, self.usingSIDL)
+    self.language = 'Python'
+    self.setup()
+    return
+
+  def __getstate__(self):
+    '''Do not save the include directories and extra libraries'''
+    d = base.Base.__getstate__(self)
+    del d['includeDirs']
+    del d['extraLibraries']
+    return d
+
+  def __setstate__(self, d):
+    '''Recreate the include directories and extra libraries'''
+    self.__dict__.update(d)
+    self.setup()
+    return
+
+  def setup(self):
+    '''Setup include directories and extra libraries'''
+    self.setupIncludeDirectories()
+    self.setupExtraLibraries()
+    return
+
+  def setupIncludeDirectories(self):
+    try:
+      if not 'PYTHON_INCLUDE' in self.argDB:
+        import distutils.sysconfig
+        self.argDB['PYTHON_INCLUDE'] = distutils.sysconfig.get_python_inc()
+    except: pass
+    self.includeDirs = [project.ArgumentPath('PYTHON_INCLUDE')]
+    return self.includeDirs
+
+  def setupExtraLibraries(self):
+    import distutils.sysconfig
+    if not 'PYTHON_LIB' in self.argDB:
+      SO = distutils.sysconfig.get_config_var('SO')
+      try:
+        # Look for the shared library
+        lib = os.path.join(distutils.sysconfig.get_config_var('LIBDIR'), distutils.sysconfig.get_config_var('LDLIBRARY'))
+        if not os.path.isfile(lib):
+          lib = os.path.join(distutils.sysconfig.get_config_var('LIBPL'), distutils.sysconfig.get_config_var('LDLIBRARY'))
+        # if .so was not built then need to strip .a off of end
+        if lib[-2:] == '.a': lib = lib[0:-2]
+        # may be stuff after .so like .0, so cannot use splitext()
+        lib = lib.split(SO)[0]+SO
+        self.argDB['PYTHON_LIB'] = lib
+      except TypeError:
+        try:
+          # Try the archive instead
+          lib = lib.split(SO)[0]+'.a'
+          self.argDB['PYTHON_LIB'] = lib
+        except: pass
+      except: pass
+
+    extraLibraries = []
+    if not distutils.sysconfig.get_config_var('LDFLAGS'):
+      extraLibraries.append(distutils.sysconfig.get_config_var('LDFLAGS'))
+    if not distutils.sysconfig.get_config_var('LIBS'):
+      for lib in distutils.sysconfig.get_config_var('LIBS').split():
+        # Change -l<lib> to lib<lib>.so
+        extraLibraries.append('lib'+lib[2:]+'.so')
+    self.argDB['PYTHON_EXTRA_LIB'] = extraLibraries
+
+    self.extraLibraries = []
+    self.extraLibraries.append(project.ArgumentPath('PYTHON_LIB'))
+    self.extraLibraries.append(project.ArgumentPath('PYTHON_EXTRA_LIB'))
+    return self.extraLibraries
+
+  def isCompiled(self):
+    '''Returns True is source needs to be compiled in order to execute'''
+    return 0
+
+  def getInterpreterSuffix(self):
+    '''Return the suffix used for interpreter files (.py)'''
+    return '.py'
+
+  def getServerLibrary(self, package, proj = None, lang = None):
+    '''Server libraries follow the naming scheme: lib<project>-<lang>-<package>-server.a'''
+    if proj is None: proj = self.project
+    if lang is None: lang = self.language
+    return project.ProjectPath(os.path.join('lib', 'lib'+proj.getName()+'-'+lang.lower()+'-'+package+'-server.a'), proj.getUrl())
+
+  def getGenericCompileTarget(self, action):
+    '''Python code does not need compilation, so only a C compiler is necessary.'''
+    import build.compile.C
+    outputTag = self.language.lower()+' '+action+' '+self.usingC.language.lower()
+    tagger    = build.fileState.GenericTag(self.sourceDB, outputTag, inputTag = self.language.lower()+' '+action, ext = 'c', deferredExt = ['h', 'py'])
+    compiler  = build.compile.C.Compiler(self.sourceDB, self.usingC, inputTag = outputTag)
+    compiler.includeDirs.extend(self.includeDirs)
+    target    = build.buildGraph.BuildGraph()
+    target.addVertex(tagger)
+    target.addEdges(tagger, outputs = [compiler])
+    return (target, compiler)
+
+  def getServerCompileTarget(self, package):
+    '''Python code does not need compilation, so only a C compiler is necessary for the skeleton.'''
+    (target, compiler) = self.getGenericCompileTarget('server '+package)
+    archiveTag    = self.language.lower()+' server library directory'
+    sharedTag     = self.language.lower()+' server shared library'
+    library       = self.getServerLibrary(package)
+    linker        = build.buildGraph.BuildGraph()
+    archiver      = build.processor.DirectoryArchiver(self.sourceDB, self.usingC, 'cp', compiler.output.tag, archiveTag, isSetwise = 1, library = library)
+    consolidator  = build.transform.Consolidator(archiveTag, archiveTag, 'old '+archiveTag)
+    sharedLinker  = build.processor.SharedLinker(self.sourceDB, self.usingC, None, archiveTag, sharedTag, isSetwise = 1, library = library)
+    if not (self.project.getUrl() == 'bk://sidl.bkbits.net/Compiler' and package == 'pythonGenerator'):
+      # Also need pythonGenerator library
+      sharedLinker.extraLibraries.append(self.getServerLibrary('pythonGenerator', proj = self.getInstalledProject('bk://sidl.bkbits.net/Compiler')))
+    sharedLinker.extraLibraries.extend(self.extraLibraries)
+    archiveFilter = build.transform.Filter(archiveTag)
+    linker.addVertex(archiver)
+    linker.addEdges(consolidator, [archiver])
+    linker.addEdges(sharedLinker, [consolidator])
+    linker.addEdges(archiveFilter, [sharedLinker])
+    linker.addEdges(build.transform.Remover(compiler.output.tag), [archiveFilter])
+    target.appendGraph(linker)
+    return target
+
+  def getClientCompileTarget(self):
+    '''Python code does not need compilation, so only a C compiler is necessary for the cartilage.'''
+    (target, compiler) = self.getGenericCompileTarget('client')
+    sharedTag    = self.language.lower()+' client shared library'
+    linker       = build.buildGraph.BuildGraph()
+    sharedLinker = build.processor.SharedLinker(self.sourceDB, self.usingC, None, compiler.output.tag, sharedTag)
+    sharedLinker.extraLibraries.extend(self.extraLibraries)
+    linker.addVertex(sharedLinker)
+    linker.addEdges(build.transform.Remover(compiler.output.tag), [sharedLinker])
+    target.appendGraph(linker)
+    return target
+
+  def getExecutableCompileTarget(self, program):
+    '''Python code does not need compilation'''
+    return build.buildGraph.BuildGraph()
+
+  def installClient(self):
+    '''Add Python paths for clients to the project'''
+    return self.project.appendPath(self.language, os.path.join(self.project.getRoot(), self.usingSIDL.getClientRootDir(self.language)))
+
+  def installClasses(self, package):
+    for cls in self.usingSIDL.getClasses(package):
+      self.project.addImplementation(cls, os.path.join(self.project.getRoot(), self.usingSIDL.getServerLibrary(self.project.getName(), self.language, package, isShared = 1)), self.language)
+    return
+
+  def installServer(self, package):
+    '''Add Python paths for servers to the project'''
+    self.installClasses(package)
+    return self.project.appendPath(self.language, os.path.join(self.project.getRoot(), self.usingSIDL.getServerRootDir(self.language, package)))

Added: long/3D/Gale/trunk/python/BuildSystem/build/templates/usingSIDL.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/templates/usingSIDL.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/templates/usingSIDL.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,105 @@
+import base
+import project
+
+import os
+
+class UsingSIDL (base.Base):
+  def __init__(self, sourceDB, project):
+    base.Base.__init__(self)
+    import build.compile.SIDL
+    self.sourceDB        = sourceDB
+    self.project         = project
+    self.serverLanguages = build.compile.SIDL.SIDLLanguageList()
+    self.clientLanguages = build.compile.SIDL.SIDLLanguageList()
+    # Languages in which the client must be linked with the server
+    self.staticPackages  = []
+    return
+
+  def addServer(self, lang):
+    '''Designate that a server for lang should be built, which also implies the client'''
+    if lang in self.argDB['installedLanguages']:
+      if not lang in self.serverLanguages:
+        self.serverLanguages.append(lang)
+      self.addClient(lang)
+    else:
+      self.debugPrint('Language '+lang+' not installed', 2, 'compile')
+    return
+
+  def addClient(self, lang):
+    '''Designate that a client for lang should be built'''
+    if lang in self.argDB['installedLanguages']:
+      if not lang in self.clientLanguages:
+        self.clientLanguages.append(lang)
+    else:
+      self.debugPrint('Language '+lang+' not installed', 2, 'compile')
+    return
+
+  def addStaticPackage(self, package):
+    '''For a static package, the client is statically linked to the server since dynamic loading is not feasible'''
+    self.staticPackages.append(package)
+    return
+
+  def getServerRootDir(self, lang, package):
+    '''Returns a server directory name'''
+    return 'server-'+lang.lower()+'-'+package
+
+  def getClientRootDir(self, lang, root = None):
+    '''Returns a client directory name'''
+    return 'client-'+lang.lower()
+
+  def getServerLibrary(self, projectName, lang, package, isShared = 0):
+    '''Server libraries follow the naming scheme: lib<project>-<lang>-<package>-server.a'''
+    if isShared:
+      ext = '.so'
+    else:
+      ext = '.a'
+    return os.path.join('lib', 'lib'+projectName+'-'+lang.lower()+'-'+package+'-server'+ext)
+
+  def getRuntimeLanguage(self):
+    '''Return the implementation language for the runtime'''
+    return 'Cxx'
+
+  def getRuntimePackage(self):
+    '''Return the implementation package for the runtime'''
+    return 'sidl'
+
+  def getRuntimeProject(self):
+    '''Return the project associated with the SIDL Runtime'''
+    projects = [self.project]
+    if 'installedprojects' in self.argDB:
+      projects += self.argDB['installedprojects']
+    for project in projects:
+      if project.getUrl() == 'bk://sidl.bkbits.net/Runtime':
+        return project
+    raise ImportError('Could not find runtime project')
+
+  def getRuntimeIncludes(self):
+    '''Return the includes for the SIDL Runtime'''
+    proj = self.getRuntimeProject()
+    return [project.ProjectPath(self.getServerRootDir(self.getRuntimeLanguage(), self.getRuntimePackage()), proj.getUrl())]
+
+  def getRuntimeLibraries(self):
+    '''Return the libraries for the SIDL Runtime'''
+    proj = self.getRuntimeProject()
+    return [project.ProjectPath(self.getServerLibrary(proj.getName(), self.getRuntimeLanguage(), self.getRuntimePackage()), proj.getUrl())]
+
+  def getClassesInFile(path):
+    '''Return all the classes present in the SIDL file'''
+    try:
+      import SIDL.Loader
+      import SIDLLanguage.Parser
+      import ANL.SIDL.ClassFinder
+
+      parser = SIDLLanguage.Parser.Parser(SIDL.Loader.createClass('ANL.SIDLCompilerI.SIDLCompiler'))
+      ast    = parser.parseFile(path)
+      finder = ANL.SIDL.ClassFinder.ClassFinder()
+      ast.accept(finder)
+      return [c.getFullIdentifier() for c in finder.getClasses()]
+    except: pass
+    return []
+  getClassesInFile = staticmethod(getClassesInFile)
+
+  def getClasses(self, package):
+    '''Return all the classes present in the SIDL file for "package"'''
+    return UsingSIDL.getClassesInFile(os.path.join('sidl', package+'.sidl'))
+

Added: long/3D/Gale/trunk/python/BuildSystem/build/transform.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/build/transform.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/build/transform.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,117 @@
+import base
+import build.fileset
+
+class Transform(base.Base):
+  '''This is a generic node in the build graph. It has hooks for processing a FileSet (handleFileSet), or an individual file (handleFile).'''
+  def __init__(self):
+    '''Reset the node'''
+    base.Base.__init__(self)
+    self.reset()
+    return
+
+  def reset(self):
+    '''Clears all state in this node, usually in preparation for another execution'''
+    self.output = build.fileset.FileSet()
+    return
+
+  def addOutputFile(self, f, set = None):
+    '''Add a file to the correct output set
+       - This adds a set with the appropriate metadata if necessary'''
+    if set.isCompatible(self.output):
+      return self.output.append(f)
+    for child in self.output.children:
+      if set.isCompatible(child):
+        return child.append(f)
+    newSet = set.clone()
+    newSet.append(f)
+    return self.output.children.append(newSet)
+
+  def handleFile(self, f, set = None):
+    '''Process a file which has an optional FileSet associated with it
+       - This default method merely adds the file to an output set'''
+    return self.addOutputFile(f, set)
+
+  def handleFileSet(self, set):
+    '''Process a FileSet
+       - This default method calls handleFile() on each member of the set'''
+    map(lambda f: self.handleFile(f, set), set)
+    map(self.handleFileSet, set.children)
+    return self.output
+
+class Filter (Transform):
+  '''A Filter removes every file in sets matching inputTag'''
+  def __init__(self, inputTag):
+    Transform.__init__(self)
+    self.inputTag = inputTag
+    if not isinstance(self.inputTag, list):
+      self.inputTag = [self.inputTag]
+    return
+
+  def __str__(self):
+    return 'Filter for '+str(self.inputTag)
+
+  def handleFile(self, f, set):
+    '''Drop files with inputTag'''
+    if set.tag in self.inputTag:
+      return self.output
+    return Transform.handleFile(self, f, set)
+
+class Remover (Transform):
+  '''A Remover removes every file in sets matching inputTag'''
+  def __init__(self, inputTag = None):
+    Transform.__init__(self)
+    self.inputTag = inputTag
+    if not isinstance(self.inputTag, list):
+      self.inputTag = [self.inputTag]
+    return
+
+  def __str__(self):
+    return 'Remover for '+str(self.inputTag)
+
+  def handleFile(self, f, set):
+    '''Call the supplied function on f (also giving tag)
+       - If inputTag was specified, only handle files with this tag'''
+    if self.inputTag is None or set.tag in self.inputTag:
+      import os
+      os.remove(f)
+      return self.output
+    return Transform.handleFile(self, f, set)
+
+class Consolidator (Transform):
+  '''A Consolidator combines every file in sets matching inputTag into a single output set
+     - If oldTag is provided, sets matching this tag are added only if at least one file with inputTag is present'''
+  def __init__(self, inputTag, outputTag, oldTag = []):
+    Transform.__init__(self)
+    self.inputTag   = inputTag
+    self.oldTag     = oldTag
+    if not isinstance(self.inputTag, list):
+      self.inputTag = [self.inputTag]
+    if not isinstance(self.oldTag, list):
+      self.oldTag   = [self.oldTag]
+    self.output.tag = outputTag
+    if len(self.oldTag):
+      self.oldOutput     = build.fileset.FileSet()
+      self.oldOutput.tag = self.oldTag[0]
+      self.hasOutput     = 0
+      self.output.children.append(self.oldOutput)
+    return
+
+  def __str__(self):
+    return 'Consolidating '+str(self.inputTag)+'('+str(self.oldTag)+') into '+self.output.tag
+
+  def handleFile(self, f, set):
+    '''Put all files matching inputTag in the output set'''
+    if self.inputTag is None or set.tag in self.inputTag:
+      self.output.append(f)
+      if not self.hasOutput:
+        self.hasOutput = 1
+        self.output.children.remove(self.oldOutput)
+        self.output.extend(self.oldOutput)
+      return self.output
+    elif set.tag in self.oldTag:
+      if self.hasOutput:
+        self.output.append(f)
+      else:
+        self.oldOutput.append(f)
+      return self.output
+    return Transform.handleFile(self, f, set)

Added: long/3D/Gale/trunk/python/BuildSystem/checkdlllibs.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/checkdlllibs.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/checkdlllibs.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+import user
+import importer
+
+import os
+import sys
+import SIDL.Loader
+
+def checkDLLLibs():
+  dirs = SIDL.Loader.getSearchPath()
+  dirs = dirs.split(';')
+  for dir in dirs:
+    if os.path.isdir(dir):
+      for f in os.listdir(dir):
+        if f and os.path.splitext(f)[1] == '.so':
+          print 'Loading '+os.path.join(dir, f)
+          SIDL.Loader.loadLibrary(os.path.join(dir, f))
+
+    
+if __name__ ==  '__main__':
+  if len(sys.argv) > 1: sys.exit('Usage: checkdlllibs.py')
+  checkDLLLibs()
+


Property changes on: long/3D/Gale/trunk/python/BuildSystem/checkdlllibs.py
___________________________________________________________________
Name: svn:executable
   + *
Name: svn:mime-type
   + text/script

Added: long/3D/Gale/trunk/python/BuildSystem/client-python/cygwinpath.c
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/client-python/cygwinpath.c	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/client-python/cygwinpath.c	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,46 @@
+#include <Python.h>
+#include "cygwinpath_Module.h"
+
+#ifdef HAVE_CYGWIN
+
+#include <sys/cygwin.h>
+#include <sys/param.h>
+static PyObject *cygwinpath_convertToFullWin32Path(PyObject *self, PyObject *args) {
+  char *cygpath;
+  char  winpath[MAXPATHLEN];
+
+  if (!PyArg_ParseTuple(args, (char *) "s", &cygpath)) {
+    return NULL;
+  }
+  cygwin_conv_to_full_win32_path(cygpath, winpath);
+  return(Py_BuildValue((char *) "s", winpath));
+}
+
+#else
+
+static PyObject *
+cygwinpath_convertToFullWin32Path(
+  PyObject *_self,
+  PyObject *_args,
+  PyObject *_kwdict
+)
+{
+  char *cygpath;
+
+  if (!PyArg_ParseTuple(_args, (char *) "s", &cygpath)) {
+    return NULL;
+  }
+  return(Py_BuildValue((char *) "s", cygpath));
+}
+
+#endif /* HAVE_CYGWIN */
+
+static PyMethodDef _cygwinpath_methods[] = {
+  {(char *) "convertToFullWin32Path", (PyCFunction) cygwinpath_convertToFullWin32Path, (METH_VARARGS | METH_KEYWORDS), (char *) "Convert a cygwin path to its WIN32 path.\n"},
+  {NULL, NULL}
+};
+
+void initcygwinpath(void);
+void initcygwinpath(void) {
+  (void) Py_InitModule3((char *) "cygwinpath", _cygwinpath_methods, (char *) "Path conversion for Cygwin\n");
+}

Modified: long/3D/Gale/trunk/python/BuildSystem/config/base.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/base.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/base.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -61,6 +61,9 @@
 
 import os
 
+class ConfigureSetupError(Exception):
+  pass
+
 class Configure(script.Script):
   def __init__(self, framework):
     script.Script.__init__(self, framework.clArgs, framework.argDB)
@@ -353,7 +356,7 @@
       raise RuntimeError('Invalid language: '+language)
     return codeStr
 
-  def preprocess(self, codeStr):
+  def preprocess(self, codeStr, timeout = 600.0):
     def report(command, status, output, error):
       if error or status:
         self.framework.log.write('Possible ERROR while running preprocessor: '+error)
@@ -368,7 +371,7 @@
     f = file(self.compilerSource, 'w')
     f.write(self.getCode(codeStr))
     f.close()
-    (out, err, ret) = Configure.executeShellCommand(command, checkCommand = report, log = self.framework.log)
+    (out, err, ret) = Configure.executeShellCommand(command, checkCommand = report, timeout = timeout, log = self.framework.log)
     if os.path.isfile(self.compilerDefines): os.remove(self.compilerDefines)
     if os.path.isfile(self.compilerFixes): os.remove(self.compilerFixes)
     if os.path.isfile(self.compilerSource): os.remove(self.compilerSource)
@@ -378,10 +381,12 @@
     '''Return the contents of stdout when preprocessing "codeStr"'''
     return self.preprocess(codeStr)[0]
 
-  def checkPreprocess(self, codeStr):
+  def checkPreprocess(self, codeStr, timeout = 600.0):
     '''Return True if no error occurred
        - An error is signaled by a nonzero return code, or output on stderr'''
-    (out, err, ret) = self.preprocess(codeStr)
+    (out, err, ret) = self.preprocess(codeStr, timeout = timeout)
+    #pgi dumps filename on stderr - but returns 0 errorcode'
+    if err =='conftest.c:': err = ''
     err = self.framework.filterPreprocessOutput(err)
     return not ret and not len(err)
 
@@ -509,9 +514,9 @@
         if defaultOutputArg in self.framework.argDB:
           return (self.framework.argDB[defaultOutputArg], 0)
         else:
-          raise RuntimeError('Must give a default value for '+defaultOutputArg+' since executables cannot be run')
+          raise ConfigureSetupError('Must give a default value for '+defaultOutputArg+' since executables cannot be run')
       else:
-        raise RuntimeError('Running executables on this system is not supported')
+        raise ConfigureSetupError('Running executables on this system is not supported')
     cleanup = cleanup and self.framework.doCleanup
     if executor:
       command = executor+' ./'+self.linkerObj

Modified: long/3D/Gale/trunk/python/BuildSystem/config/compilerOptions.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/compilerOptions.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/compilerOptions.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -17,7 +17,7 @@
     # GNU gcc
     if config.setCompilers.Configure.isGNU(compiler):
       if bopt == '':
-        flags.extend(['-Wall', '-Wshadow', '-Wwrite-strings', '-Wno-long-double', '-fomit-frame-pointer', '-Wno-strict-aliasing'])
+        flags.extend(['-Wall', '-Wwrite-strings', '-Wno-long-double', '-fomit-frame-pointer', '-Wno-strict-aliasing'])
       elif bopt == 'g':
         if self.framework.argDB['with-gcov']:
           flags.extend(['-fprofile-arcs', '-ftest-coverage'])
@@ -88,11 +88,12 @@
     # GNU g++
     if config.setCompilers.Configure.isGNU(compiler):
       if bopt == '':
-        flags.extend(['-Wall','-Wshadow', '-Wwrite-strings', '-Wno-long-double', '-fomit-frame-pointer', '-Wno-strict-aliasing'])
+        flags.extend(['-Wall', '-Wwrite-strings', '-Wno-long-double', '-fomit-frame-pointer', '-Wno-strict-aliasing'])
       elif bopt in ['g']:
         if self.framework.argDB['with-gcov']:
           flags.extend(['-fprofile-arcs', '-ftest-coverage'])
-        flags.append('-g3')
+        # -g3 causes an as SEGV on OSX
+        flags.append('-g')
       elif bopt in ['O']:
         if os.environ.has_key('USER'):
           flags.append('-O')
@@ -133,9 +134,7 @@
       # Windows Microsoft
       elif compiler.find('win32fe cl') >= 0:
         if bopt == '':
-          flags.append('-GR')
-          if not self.addCompilerFlag('-EHsc'):
-            self.addCompilerFlag('-GX')
+          flags.extend(['-GR','-GX','-EHsc']) # either GX or EHsc should be used.
         elif bopt == 'g':
           flags.extend(['-MT','-Z7','-Zm200'])
         elif bopt == 'O':

Modified: long/3D/Gale/trunk/python/BuildSystem/config/compilers.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/compilers.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/compilers.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -763,7 +763,7 @@
     # check these monster libraries work from C++
     if hasattr(self.setCompilers, 'CXX'):
       self.logPrint('Check that Fortran libraries can be used from C++', 4, 'compilers')
-      self.setCompilers.LIBS = oldLibs+' '+' '.join([self.libraries.getLibArgument(lib) for lib in self.flibs])
+      self.setCompilers.LIBS = ' '.join([self.libraries.getLibArgument(lib) for lib in self.flibs])+' '+oldLibs
       try:
         self.setCompilers.checkCompiler('Cxx')
         self.logPrint('Fortran libraries can be used from C++', 4, 'compilers')
@@ -856,8 +856,9 @@
         f90Guess = 'intel8'
       elif self.setCompilers.vendor in ['lahaye', 'nag']:
         f90Guess = 'nag'
-      elif self.setCompilers.vendor == 'portland':
-        f90Guess = 'pgi'
+##    This interface is not finished
+##      elif self.setCompilers.vendor == 'portland':
+##        f90Guess = 'pgi'
       elif self.setCompilers.vendor == 'sgi':
         f90Guess = 'IRIX'
       elif self.setCompilers.vendor == 'solaris':

Modified: long/3D/Gale/trunk/python/BuildSystem/config/framework.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/framework.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/framework.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -123,6 +123,59 @@
           dirs.extend(self.listDirs(os.path.join(base, dir),rest ))            
     return dirs
 
+  def getArchitecture(self):
+    import sys
+
+    auxDir = None
+    searchDirs = [os.path.join(self.root, 'packages')] + sys.path
+    for d in searchDirs:
+      if os.path.isfile(os.path.join(d, 'config.sub')):
+        auxDir      = d
+        configSub   = os.path.join(auxDir, 'config.sub')
+        configGuess = os.path.join(auxDir, 'config.guess')
+        break
+    if auxDir is None:
+      self.logPrintBox('Unable to locate config.sub in '+str(searchDirs)+'.\nYour BuildSystem installation is incomplete.\n Get BuildSystem again', 'screen')
+      return ('Unknown', 'Unknown', sys.platform)
+    try:
+      host   = config.base.Configure.executeShellCommand(self.shell+' '+configGuess, log = self.log)[0]
+      output = config.base.Configure.executeShellCommand(self.shell+' '+configSub+' '+host, log = self.log)[0]
+    except RuntimeError, e:
+      fd = open(configGuess)
+      data = fd.read()
+      fd.close()
+      if data.find('\r\n') >= 0:
+        raise RuntimeError('''It appears BuildSystem.tar.gz is uncompressed on Windows (perhaps with Winzip)
+          and files copied over to Unix/Linux. Windows introduces LF characters which are
+          inappropriate on other systems. Please use gunzip/tar on the install machine.\n''')
+      raise RuntimeError('Unable to determine host type using '+configSub+': '+str(e))
+    m = re.match(r'^(?P<cpu>[^-]*)-(?P<vendor>[^-]*)-(?P<os>.*)$', output)
+    if not m:
+      raise RuntimeError('Unable to parse output of '+configSub+': '+output)
+    return (m.group('cpu'), m.group('vendor'), m.group('os'))
+
+  def getHostCPU(self):
+    if not hasattr(self, '_host_cpu'):
+      return self.argDB['with-host-cpu']
+    return self._host_cpu
+  def setHostCPU(self, cpu):
+    self._host_cpu = cpu
+  host_cpu = property(getHostCPU, setHostCPU, doc = 'Machine CPU')
+  def getHostVendor(self):
+    if not hasattr(self, '_host_vendor'):
+      return self.argDB['with-host-vendor']
+    return self._host_vendor
+  def setHostVendor(self, vendor):
+    self._host_vendor = vendor
+  host_vendor = property(getHostVendor, setHostVendor, doc = 'Machine Vendor')
+  def getHostOS(self):
+    if not hasattr(self, '_host_os'):
+      return self.argDB['with-host-os']
+    return self._host_os
+  def setHostOS(self, os):
+    self._host_os = os
+  host_os = property(getHostOS, setHostOS, doc = 'Machine OS')
+
   def setupHelp(self, help):
     import nargs
 
@@ -141,7 +194,9 @@
     if list: searchdirs.append(list[-1])
     list = self.listDirs('/opt/','intel_fc_[0-9.]*/bin')
     if list: searchdirs.append(list[-1])
-    
+
+    host_cpu, host_vendor, host_os = self.getArchitecture()
+
     help.addArgument('Framework', '-configModules',       nargs.Arg(None, None, 'A list of Python modules with a Configure class'))
     help.addArgument('Framework', '-ignoreCompileOutput', nargs.ArgBool(None, 1, 'Ignore compiler output'))
     help.addArgument('Framework', '-ignoreLinkOutput',    nargs.ArgBool(None, 1, 'Ignore linker output'))
@@ -150,7 +205,11 @@
     help.addArgument('Framework', '-with-alternatives',   nargs.ArgBool(None, 0, 'Provide a choice among alternative package installations'))
     help.addArgument('Framework', '-search-dirs',         nargs.Arg(None, searchdirs, 'A list of directories used to search for executables'))
     help.addArgument('Framework', '-package-dirs',        nargs.Arg(None, packagedirs, 'A list of directories used to search for packages'))
+    help.addArgument('Framework', '-with-external-packages-dir=<dir>', nargs.Arg(None, None, 'Location to install downloaded packages'))
     help.addArgument('Framework', '-with-batch',          nargs.ArgBool(None, 0, 'Machine uses a batch system to submit jobs'))
+    help.addArgument('Framework', '-with-host-cpu',       nargs.Arg(None, host_cpu,    'Machine CPU'))
+    help.addArgument('Framework', '-with-host-vendor',    nargs.Arg(None, host_vendor, 'Machine vendor'))
+    help.addArgument('Framework', '-with-host-os',        nargs.Arg(None, host_os,     'Machine OS'))
     return help
 
   def getCleanup(self):
@@ -445,7 +504,6 @@
   def substituteName(self, match, prefix = None):
     '''Return the substitution value for a given name, or return "@name_UNKNOWN@"'''
     name = match.group('name')
-
     if self.subst.has_key(name):
       return self.subst[name]
     elif self.argSubst.has_key(name):
@@ -475,15 +533,15 @@
   def substituteFile(self, inName, outName):
     '''Carry out substitution on the file "inName", creating "outName"'''
     inFile  = file(inName)
-    if not os.path.exists(os.path.dirname(outName)):
-      if(os.path.dirname(outName)):
+    if os.path.dirname(outName):
+      if not os.path.exists(os.path.dirname(outName)):
         os.makedirs(os.path.dirname(outName))
     outFile = file(outName, 'w')
     for line in inFile.xreadlines():
       outFile.write(self.substRE.sub(self.substituteName, line))
     outFile.close()
     inFile.close()
-#     self.actions.addArgument('Framework', 'Substitution', inName+' was substituted to produce '+outName)
+#    self.actions.addArgument('Framework', 'Substitution', inName+' was substituted to produce '+outName)
     return
 
   def substitute(self):
@@ -746,6 +804,13 @@
     self.log.write(('='*80)+'\n')
     return
 
+  def configureExternalPackagesDir(self):
+    if 'with-external-packages-dir' in self.argDB:
+      self.externalPackagesDir = self.argDB['with-external-packages-dir']
+    else:
+      self.externalPackagesDir = None
+    return
+
   def addBatchInclude(self, includes):
     '''Add an include or a list of includes to the batch run'''
     if not isinstance(includes, list):
@@ -804,6 +869,7 @@
     self.setup()
     self.outputBanner()
     self.updateDependencies()
+    self.executeTest(self.configureExternalPackagesDir)
     for child in graph.DirectedGraph.topologicalSort(self.childGraph):
       if not hasattr(child, '_configured'):
         child.configure()

Modified: long/3D/Gale/trunk/python/BuildSystem/config/headers.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/headers.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/headers.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -20,20 +20,11 @@
        - Otherwise return -I<include>'''
     if not include:
       return ''
+    include = include.replace(' ', '\\ ')
     if include[0] == '-':
       return include
     return '-I'+include
 
-
-  def getCPPPATHArgument(self, include):
-    '''Return the proper CPPPATH argument for the given filename
-       - If starts with -I then strip it off
-       - Otherwise, return it unchanged'''
-    if include.startswith('-I'):
-      return include[2:]
-    return include
-
-
   def toString(self,includes):
     '''Converts a list of includes to a string suitable for a compiler'''
     return ' '.join([self.getIncludeArgument(include) for include in includes])
@@ -53,14 +44,14 @@
       self.addDefine(self.getDefineName(header), found)
     return found
 
-  def checkInclude(self, incl, hfiles, otherIncludes = []):
+  def checkInclude(self, incl, hfiles, otherIncludes = [], timeout = 600.0):
     '''Checks if a particular include file can be found along particular include paths'''
     if not isinstance(hfiles, list):
       hfiles = [hfiles]
     for hfile in hfiles:
       oldFlags = self.compilers.CPPFLAGS
       self.compilers.CPPFLAGS += ' '+' '.join([self.getIncludeArgument(inc) for inc in incl+otherIncludes])
-      found = self.checkPreprocess('#include <' +hfile+ '>\n')
+      found = self.checkPreprocess('#include <' +hfile+ '>\n', timeout = timeout)
       self.compilers.CPPFLAGS = oldFlags
       if not found: return 0
     self.framework.log.write('Found header files ' +str(hfiles)+ ' in '+str(incl)+'\n')

Modified: long/3D/Gale/trunk/python/BuildSystem/config/libraries.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/libraries.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/libraries.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -24,41 +24,6 @@
     self.headers      = framework.require('config.headers',      self)
     return
 
-  def getLIBPATHArgument(self, library):
-    '''Return the path, if any, to add to the list of paths for the
-    linker to search for.  This is used when outputing the list of
-    libraries to SCons.
-    - If the path is empty, starts with -l, or is "lib"<name>, return nothing.
-    - If it is -L<path>, return <path>
-    - If it is an absolute path, return the parent directory
-    - Otherwise, return nothing
-    '''
-    if not library or library.startswith('-l') or library.startswith('lib'):
-      return ''
-    if library.startswith('-L'):
-      return library[2:]
-    if os.path.isabs(library):
-      return os.path.dirname(library)
-    return ''
-
-  def getLIBSArgument(self, library):
-    '''Return the library, if any, to add to the list of libraries for the
-    linker to search for.  This is used when outputing the list of
-    libraries to SCons.
-    - If the path is empty or starts with -L return nothing.
-    - If it is -l<name>, return <name>
-    - If it is "lib"<name> or an absolute path, return getLibName(library)
-    - Otherwise, return nothing
-    '''
-    if not library or library.startswith('-L'):
-      return ''
-    if library.startswith('-l'):
-      return library[2:]
-    if library.startswith('lib') or os.path.isabs(library):
-      return self.getLibName(library)
-    return ''
-
-
   def getLibArgument(self, library):
     '''Return the proper link line argument for the given filename library
       - If the path is empty, return it unchanged
@@ -79,18 +44,19 @@
     if library.lstrip()[0] == '-':
       return library
     if len(library) > 3 and library[-4:] == '.lib':
-      return library
+      return library.replace(' ', '\\ ')
     if os.path.basename(library).startswith('lib'):
       name = self.getLibName(library)
       if ((len(library) > 2 and library[1] == ':') or os.path.isabs(library)):
         flagName  = self.language[-1]+'SharedLinkerFlag'
         flagSubst = self.language[-1].upper()+'_LINKER_SLFLAG'
+        dirname   = os.path.dirname(library).replace(' ', '\\ ')
         if hasattr(self.setCompilers, flagName) and not getattr(self.setCompilers, flagName) is None:
-          return getattr(self.setCompilers, flagName)+os.path.dirname(library)+' -L'+os.path.dirname(library)+' -l'+name
+          return getattr(self.setCompilers, flagName)+dirname+' -L'+dirname+' -l'+name
         if flagSubst in self.framework.argDB:
-          return self.framework.argDB[flagSubst]+os.path.dirname(library)+' -L'+os.path.dirname(library)+' -l'+name
+          return self.framework.argDB[flagSubst]+dirname+' -L'+dirname+' -l'+name
         else:
-          return '-L'+os.path.dirname(library)+' -l'+name
+          return '-L'+dirname+' -l'+name
       else:
         return '-l'+name
     if os.path.splitext(library)[1] == '.so':
@@ -239,7 +205,7 @@
     self.headers.check('dlfcn.h')
     return
 
-  def checkShared(self, includes, initFunction, checkFunction, finiFunction = None, checkLink = None, libraries = [], initArgs = '&argc, &argv', boolType = 'int', noCheckArg = 0, executor = None):
+  def checkShared(self, includes, initFunction, checkFunction, finiFunction = None, checkLink = None, libraries = [], initArgs = '&argc, &argv', boolType = 'int', noCheckArg = 0, defaultArg = '', executor = None):
     '''Determine whether a library is shared
        - initFunction(int *argc, char *argv[]) is called to initialize some static data
        - checkFunction(int *check) is called to verify that the static data wer set properly
@@ -362,7 +328,7 @@
     oldLibs = self.setCompilers.LIBS
     if self.haveLib('dl'):
       self.setCompilers.LIBS += ' -ldl'
-    if self.checkRun(defaultIncludes, body, executor = executor):
+    if self.checkRun(defaultIncludes, body, defaultArg = defaultArg, executor = executor):
       isShared = 1
     self.setCompilers.LIBS = oldLibs
     if os.path.isfile('lib1.so') and self.framework.doCleanup: os.remove('lib1.so')

Added: long/3D/Gale/trunk/python/BuildSystem/config/package.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/package.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/package.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,384 @@
+from __future__ import generators
+import config.base
+
+import os
+
+class Package(config.base.Configure):
+  def __init__(self, framework):
+    config.base.Configure.__init__(self, framework)
+    self.headerPrefix     = 'PETSC'
+    self.substPrefix      = 'PETSC'
+    self.arch             = None # The architecture identifier
+    self.externalPackagesDir = os.path.abspath('externalpackages')
+    # These are derived by the configure tests
+    self.found            = 0
+    self.setNames()
+    self.include          = []
+    self.lib              = []
+    self.dlib             = []   # all libraries in this package and all those it depends on
+    self.directory        = None # path of the package installation point; for example /usr/local or /home/bsmith/mpich-2.0.1
+    self.version          = ''
+    # These are specified for the package
+    self.required         = 0    # 1 means the package is required
+    self.download         = []   # urls where repository or tarballs may be found
+    self.deps             = []   # other packages whose dlib or include we depend on, usually we also use self.framework.require()
+    self.defaultLanguage  = 'C'  # The language in which to run tests
+    self.liblist          = [[]] # list of libraries we wish to check for (override with your own generateLibraryList())
+    self.extraLib         = []   # additional libraries needed to link
+    self.includes         = []   # headers to check for
+    self.functions        = []   # functions we wish to check for in the libraries
+    self.functionsFortran = 0    # 1 means the above symbol is a Fortran symbol, so name-mangling is done
+    self.functionsCxx     = [0, '', ''] # 1 means the above symbol is a C++ symbol, so name-mangling with prototype/call is done
+    self.cxx              = 0    # 1 means requires C++
+    self.fc               = 0    # 1 means requires fortran
+    self.needsMath        = 0    # 1 means requires the system math library
+    self.libdir           = 'lib'     # location of libraries in the package directory tree
+    self.includedir       = 'include' # location of includes in the package directory tree
+    self.license          = None # optional license text
+    self.excludedDirs     = []   # list of directory names that could be false positives, SuperLU_DIST when looking for SuperLU
+    self.archIndependent  = 0    # 1 means the install directory does not incorporate the ARCH name
+    return
+    
+  def __str__(self):
+    '''Prints the location of the packages includes and libraries'''
+    output = ''
+    if self.found:
+      output = self.name+':\n'
+      if self.version: output += '  Version:  '+self.version+'\n'
+      if self.include: output += '  Includes: '+str(self.include)+'\n'
+      if self.lib:     output += '  Library:  '+str(self.lib)+'\n'
+    return output
+
+  def setupDependencies(self, framework):
+    config.base.Configure.setupDependencies(self, framework)
+    self.setCompilers  = framework.require('config.setCompilers', self)
+    self.compilers     = framework.require('config.compilers', self)
+    self.headers       = framework.require('config.headers', self)
+    self.libraries     = framework.require('config.libraries', self)
+    self.programs      = framework.require('config.programs', self)
+    self.sourceControl = framework.require('config.sourceControl',self)
+    return
+
+  def setupHelp(self,help):
+    '''Prints help messages for the package'''
+    import nargs
+    help.addArgument(self.PACKAGE,'-with-'+self.package+'=<bool>',nargs.ArgBool(None,self.required,'Indicate if you wish to test for '+self.name))
+    help.addArgument(self.PACKAGE,'-with-'+self.package+'-dir=<dir>',nargs.ArgDir(None,None,'Indicate the root directory of the '+self.name+' installation'))
+    if self.download and not self.download[0] == 'redefine':
+      help.addArgument(self.PACKAGE, '-download-'+self.package+'=<no,yes,ifneeded,filename>', nargs.ArgDownload(None, 0, 'Download and install '+self.name))
+    help.addArgument(self.PACKAGE,'-with-'+self.package+'-include=<dir>',nargs.ArgDir(None,None,'Indicate the directory of the '+self.name+' include files'))
+    help.addArgument(self.PACKAGE,'-with-'+self.package+'-lib=<libraries: e.g. [/Users/..../libparmetis.a,...]>',nargs.ArgLibrary(None,None,'Indicate the '+self.name+' libraries'))    
+    return
+
+  def setNames(self):
+    '''Setup various package names
+    name:         The module name (usually the filename)
+    package:      The lowercase name
+    PACKAGE:      The uppercase name
+    downloadname: Name for download option and file (usually name)
+    '''
+    import sys
+    if hasattr(sys.modules.get(self.__module__), '__file__'):
+      self.name       = os.path.splitext(os.path.basename(sys.modules.get(self.__module__).__file__))[0]
+    else:
+      self.name       = 'DEBUGGING'
+    self.PACKAGE      = self.name.upper()
+    self.package      = self.name.lower()
+    self.downloadname = self.name
+    return
+
+  def getDefaultLanguage(self):
+    '''The language in which to run tests'''
+    if hasattr(self, 'languageProvider'):
+      if hasattr(self.languageProvider, 'defaultLanguage'):
+        return self.languageProvider.defaultLanguage
+      elif hasattr(self.languageProvider, 'clanguage'):
+        return self.languageProvider.clanguage
+    return self._defaultLanguage
+  def setDefaultLanguage(self, defaultLanguage):
+    '''The language in which to run tests'''
+    self._defaultLanguage = defaultLanguage
+    return
+  defaultLanguage = property(getDefaultLanguage, setDefaultLanguage, doc = 'The language in which to run tests')
+
+  def getArch(self):
+    '''The architecture identifier'''
+    if hasattr(self, 'archProvider'):
+      if hasattr(self.archProvider, 'arch'):
+        return self.archProvider.arch
+    return self._arch
+  def setArch(self, arch):
+    '''The architecture identifier'''
+    self._arch = arch
+    return
+  arch = property(getArch, setArch, doc = 'The architecture identifier')
+
+  def getExternalPackagesDir(self):
+    '''The directory for downloaded packages'''
+    if not self.framework.externalPackagesDir is None:
+      packages = os.path.abspath('externalpackages')
+      return self.framework.externalPackagesDir
+    return self._externalPackagesDir
+  def setExternalPackagesDir(self, externalPackagesDir):
+    '''The directory for downloaded packages'''
+    self._externalPackagesDir = externalPackagesDir
+    return
+  externalPackagesDir = property(getExternalPackagesDir, setExternalPackagesDir, doc = 'The directory for downloaded packages')
+
+  def getSearchDirectories(self):
+    '''By default, do not search any particular directories'''
+    return []
+
+  def getInstallDir(self):
+    if self.archIndependent:
+      return os.path.abspath(self.Install())
+    return os.path.abspath(os.path.join(self.Install(), self.arch))
+
+  def generateLibList(self, directory):
+    '''Generates full path list of libraries from self.liblist'''
+    alllibs = []
+    for libSet in self.liblist:
+      libs = []
+      # add full path only to the first library in the list
+      if not self.libdir == directory and len(libSet) > 0:
+        libs.append(os.path.join(directory, libSet[0]))
+      for library in libSet[1:]:
+        # if the library name doesn't start with lib - then add the fullpath
+        if library.startswith('lib') or self.libdir == directory:
+          libs.append(library)
+        else:
+          libs.append(os.path.join(directory, library))
+      libs.extend(self.extraLib)
+      alllibs.append(libs)
+    return alllibs
+
+  def generateGuesses(self):
+    d = self.checkDownload(1)
+    if d:
+      for l in self.generateLibList(os.path.join(d, self.libdir)):
+        yield('Download '+self.PACKAGE, d, l, os.path.join(d, self.includedir))
+      raise RuntimeError('Downloaded '+self.package+' could not be used. Please check install in '+d+'\n')
+
+    if 'with-'+self.package+'-dir' in self.framework.argDB:     
+      d = self.framework.argDB['with-'+self.package+'-dir']
+      for l in self.generateLibList(os.path.join(d, self.libdir)):
+        yield('User specified root directory '+self.PACKAGE, d, l, os.path.join(d, self.includedir))
+      if 'with-'+self.package+'-include' in self.framework.argDB:
+        raise RuntimeError('Do not set --with-'+self.package+'-include if you set --with-'+self.package+'-dir')
+      if 'with-'+self.package+'-lib' in self.framework.argDB:
+        raise RuntimeError('Do not set --with-'+self.package+'-lib if you set --with-'+self.package+'-dir')
+      raise RuntimeError('--with-'+self.package+'-dir='+self.framework.argDB['with-'+self.package+'-dir']+' did not work')
+
+    if 'with-'+self.package+'-include' in self.framework.argDB and not 'with-'+self.package+'-lib' in self.framework.argDB:
+      raise RuntimeError('If you provide --with-'+self.package+'-include you must also supply with-'+self.package+'-lib\n')
+    if 'with-'+self.package+'-lib' in self.framework.argDB and not 'with-'+self.package+'-include' in self.framework.argDB:
+      raise RuntimeError('If you provide --with-'+self.package+'-lib you must also supply with-'+self.package+'-include\n')
+    if 'with-'+self.package+'-include-dir' in self.framework.argDB:
+        raise RuntimeError('Use --with-'+self.package+'-include; not --with-'+self.package+'-include-dir') 
+
+    if 'with-'+self.package+'-include' in self.framework.argDB and 'with-'+self.package+'-lib' in self.framework.argDB:
+      # hope that package root is one level above include directory
+      d = os.path.dirname(self.framework.argDB['with-'+self.package+'-include'])
+      inc = self.framework.argDB['with-'+self.package+'-include']
+      libs = self.framework.argDB['with-'+self.package+'-lib']
+      if not isinstance(libs, list): libs = [libs]
+      libs = [os.path.abspath(l) for l in libs]
+      yield('User specified '+self.PACKAGE+' libraries', d, libs, os.path.abspath(inc))
+      raise RuntimeError('--with-'+self.package+'-lib='+str(self.framework.argDB['with-'+self.package+'-lib'])+' and \n'+\
+                         '--with-'+self.package+'-include='+str(self.framework.argDB['with-'+self.package+'-include'])+' did not work') 
+
+    for d in self.getSearchDirectories():
+      for l in self.generateLibList(os.path.join(d, self.libdir)):
+        if isinstance(self.includedir, list):
+          includedir = ([inc for inc in self.includedir if os.path.isabs(inc)] +
+                        [os.path.join(d, inc) for inc in self.includedir if not os.path.isabs(inc)])
+        elif d:
+          includedir = os.path.join(d, self.includedir)
+        else:
+          includedir = ''
+        yield('Package specific search directory '+self.PACKAGE, d, l, includedir)
+
+    d = self.checkDownload(requireDownload = 0)
+    if d:
+      for l in self.generateLibList(os.path.join(d, self.libdir)):
+        yield('Download '+self.PACKAGE, d, l, os.path.join(d, self.includedir))
+      raise RuntimeError('Downloaded '+self.package+' could not be used. Please check install in '+self.getInstallDir()+'\n')
+
+    raise RuntimeError('You must specify a path for '+self.name+' with --with-'+self.package+'-dir=<directory>')
+
+  def checkDownload(self, requireDownload = 1):
+    '''Check if we should download the package, returning the install directory or the empty string indicating installation'''
+    if not self.download:
+      return ''
+    downloadPackage = 0
+    if requireDownload and isinstance(self.framework.argDB['download-'+self.downloadname.lower()], str):
+      self.download = ['file://'+os.path.abspath(self.framework.argDB['download-'+self.downloadname.lower()])]
+      downloadPackage = 1
+    elif self.framework.argDB['download-'+self.downloadname.lower()] == 1 and requireDownload:
+      downloadPackage = 1
+    elif self.framework.argDB['download-'+self.downloadname.lower()] == 2 and not requireDownload:
+      downloadPackage = 1
+
+    if downloadPackage:
+      if not self.download:
+        raise RuntimeError('URL missing for package'+self.package+'.\n')
+      if self.license and not os.path.isfile(os.path.expanduser(os.path.join('~','.'+self.package+'_license'))):
+        self.framework.logClear()
+        self.logPrint("**************************************************************************************************", debugSection='screen')
+        self.logPrint('You must register to use '+self.downloadname+' at '+self.license, debugSection='screen')
+        self.logPrint('    Once you have registered, config/configure.py will continue and download and install '+self.downloadname+' for you', debugSection='screen')
+        self.logPrint("**************************************************************************************************\n", debugSection='screen')
+        fd = open(os.path.expanduser(os.path.join('~','.'+self.package+'_license')),'w')
+        fd.close()
+      return self.getInstallDir()
+    return ''
+
+  def matchExcludeDir(self,dir):
+    '''Check is the dir matches something in the excluded directory list'''
+    for exdir in self.excludedDirs:
+      if dir.startswith(exdir):
+        return 1
+    return 0
+
+  def getDir(self, retry = 1):
+    '''Find the directory containing the package'''
+    packages = self.externalPackagesDir
+    if not os.path.isdir(packages):
+      os.mkdir(packages)
+      self.framework.actions.addArgument('Framework', 'Directory creation', 'Created the external packages directory: '+packages)
+    Dir = None
+    for d in os.listdir(packages):
+      if d.startswith(self.downloadname) and os.path.isdir(os.path.join(packages, d)) and not self.matchExcludeDir(d):
+        Dir = d
+        break
+    if Dir is None:
+      self.framework.logPrint('Could not locate an existing copy of '+self.downloadname+':')
+      self.framework.logPrint('  '+str(os.listdir(packages)))
+      if retry <= 0:
+        raise RuntimeError('Unable to download '+self.downloadname)
+      self.downLoad()
+      return self.getDir(retry = 0)
+    if not self.archIndependent:
+      if not os.path.isdir(os.path.join(packages, Dir, self.arch)):
+        os.mkdir(os.path.join(packages, Dir, self.arch))
+    return os.path.join(packages, Dir)
+
+  def downLoad(self):
+    '''Downloads a package; using bk or ftp; opens it in the with-external-packages-dir directory'''
+    import install.retrieval
+
+    retriever = install.retrieval.Retriever(self.sourceControl, argDB = self.framework.argDB)
+    retriever.setup()
+    failureMessage = []
+    self.framework.logPrint('Downloading '+self.name)
+    for url in self.download:
+      try:
+        retriever.genericRetrieve(url, self.externalPackagesDir, self.downloadname)
+        self.framework.actions.addArgument(self.PACKAGE, 'Download', 'Downloaded '+self.name+' into '+self.getDir(0))
+        return
+      except RuntimeError, e:
+        failureMessage.append('  Failed to download '+url+'\n'+str(e))
+    failureMessage = 'Unable to download '+self.package+' from locations '+str(self.download)+'\n'+'\n'.join(failureMessage)
+    raise RuntimeError(failureMessage)
+
+  def Install(self):
+    raise RuntimeError('No custom installation implemented for package '+self.package+'\n')
+
+  def checkInclude(self, incl, hfiles, otherIncludes = [], timeout = 600.0):
+    if self.cxx:
+      self.headers.pushLanguage('C++')
+    ret = self.executeTest(self.headers.checkInclude, [incl, hfiles],{'otherIncludes' : otherIncludes, 'timeout': timeout})
+    if self.cxx:
+      self.headers.popLanguage()
+    return ret
+
+  def checkPackageLink(self, includes, body, cleanup = 1, codeBegin = None, codeEnd = None, shared = 0):
+    oldFlags = self.compilers.CPPFLAGS
+    oldLibs  = self.compilers.LIBS
+    self.compilers.CPPFLAGS += ' '+self.headers.toString(self.include)
+    self.compilers.LIBS = self.libraries.toString(self.lib)+' '+self.compilers.LIBS
+    result = self.checkLink(includes, body, cleanup, codeBegin, codeEnd, shared)
+    self.compilers.CPPFLAGS = oldFlags
+    self.compilers.LIBS = oldLibs
+    return result
+
+  def configureLibrary(self):
+    '''Find an installation and check if it can work with PETSc'''
+    self.framework.log.write('==================================================================================\n')
+    self.framework.logPrint('Checking for a functional '+self.name)
+    foundLibrary = 0
+    foundHeader  = 0
+
+    # get any libraries and includes we depend on
+    libs         = []
+    incls        = []
+    for package in self.deps:
+      if not hasattr(package, 'found'):
+        raise RuntimeError('Package '+package.name+' does not have found attribute!')
+      if not package.found:
+        if self.framework.argDB['with-'+package.package] == 1:
+          raise RuntimeError('Package '+package.PACKAGE+' needed by '+self.name+' failed to configure.\nMail configure.log to petsc-maint at mcs.anl.gov.')
+        else:
+          raise RuntimeError('Did not find package '+package.PACKAGE+' needed by '+self.name+'.\nEnable the package using --with-'+package.package)
+      if hasattr(package, 'dlib'):    libs  += package.dlib
+      if hasattr(package, 'include'): incls += package.include
+    if self.needsMath:
+      if self.libraries.math is None:
+        raise RuntimeError('Math library not found')
+      libs += self.libraries.math
+      
+    for location, directory, lib, incl in self.generateGuesses():
+      if lib == '': lib = []
+      elif not isinstance(lib, list): lib = [lib]
+      if incl == '': incl = []
+      elif not isinstance(incl, list): incl = [incl]
+      incl += self.compilers.fincs
+      self.framework.logPrint('Checking for library in '+location+': '+str(lib))
+      if self.executeTest(self.libraries.check,[lib, self.functions],{'otherLibs' : libs, 'fortranMangle' : self.functionsFortran, 'cxxMangle' : self.functionsCxx[0], 'prototype' : self.functionsCxx[1], 'call' : self.functionsCxx[2]}):
+        self.lib = lib	
+        self.framework.logPrint('Checking for headers '+location+': '+str(incl))
+        if (not self.includes) or self.checkInclude(incl, self.includes, incls, timeout = 1800.0):
+          self.include = incl
+          self.found   = 1
+          self.dlib    = self.lib+libs
+          if not hasattr(self.framework, 'packages'):
+            self.framework.packages = []
+          self.directory = directory
+          self.framework.packages.append(self)
+          return
+    raise RuntimeError('Could not find a functional '+self.name+'\n')
+
+  def checkSharedLibrary(self):
+    '''By default we don\'t care about checking if the library is shared'''
+    return 1
+
+  def alternateConfigureLibrary(self):
+    '''Called if --with-packagename=0; does nothing by default'''
+    pass
+
+  def consistencyChecks(self):
+    if 'with-'+self.package+'-dir' in self.framework.argDB and ('with-'+self.package+'-include' in self.framework.argDB or 'with-'+self.package+'-lib' in self.framework.argDB):
+      raise RuntimeError('Specify either "--with-'+self.package+'-dir" or "--with-'+self.package+'-lib --with-'+self.package+'-include". But not both!')
+    if self.framework.argDB['with-'+self.package]:
+      if self.cxx and not hasattr(self.compilers, 'CXX'):
+        raise RuntimeError('Cannot use '+self.name+' without C++, run config/configure.py --with-cxx')
+      if self.fc and not hasattr(self.compilers, 'FC'):
+        raise RuntimeError('Cannot use '+self.name+' without Fortran, run config/configure.py --with-fc')
+    return
+
+  def configure(self):
+    if self.download and not self.download[0] == 'redefine' and self.framework.argDB['download-'+self.downloadname.lower()]:
+      self.framework.argDB['with-'+self.package] = 1
+    if 'with-'+self.package+'-dir' in self.framework.argDB or 'with-'+self.package+'-include' in self.framework.argDB or 'with-'+self.package+'-lib' in self.framework.argDB:
+      self.framework.argDB['with-'+self.package] = 1
+
+    self.consistencyChecks()
+    if self.framework.argDB['with-'+self.package]:
+      # If clanguage is c++, test external packages with the c++ compiler
+      self.libraries.pushLanguage(self.defaultLanguage)
+      self.executeTest(self.configureLibrary)
+      self.executeTest(self.checkSharedLibrary)
+      self.libraries.popLanguage()
+    else:
+      self.executeTest(self.alternateConfigureLibrary)
+    return

Added: long/3D/Gale/trunk/python/BuildSystem/config/packages/BlasLapack.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/packages/BlasLapack.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/packages/BlasLapack.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,474 @@
+from __future__ import generators
+import user
+import config.base
+import config.package
+from sourceDatabase import SourceDB
+import md5
+import os
+
+class Configure(config.package.Package):
+  '''FIX: This has not yet been converted to the package style'''
+  def __init__(self, framework):
+    config.package.Package.__init__(self, framework)
+    self.headerPrefix     = ''
+    self.substPrefix      = ''
+    self.argDB            = framework.argDB
+    self.found            = 0
+    self.f2c              = 0
+    self.fblaslapack      = 0
+    self.missingRoutines  = []
+    self.separateBlas     = 1
+    self.defaultPrecision = 'double'
+    return
+
+  def __str__(self):
+    return 'BLAS/LAPACK: '+self.libraries.toString(self.lib)+'\n'
+
+  def setupHelp(self, help):
+    import nargs
+    help.addArgument('BLAS/LAPACK', '-with-blas-lapack-dir=<dir>',                nargs.ArgDir(None, None, 'Indicate the directory containing BLAS and LAPACK libraries'))
+    help.addArgument('BLAS/LAPACK', '-with-blas-lapack-lib=<lib>',                nargs.Arg(None, None, 'Indicate the library containing BLAS and LAPACK'))
+    help.addArgument('BLAS/LAPACK', '-with-blas-lib=<lib>',                       nargs.Arg(None, None, 'Indicate the library(s) containing BLAS'))
+    help.addArgument('BLAS/LAPACK', '-with-lapack-lib=<lib>',                     nargs.Arg(None, None, 'Indicate the library(s) containing LAPACK'))
+    help.addArgument('BLAS/LAPACK', '-download-c-blas-lapack=<no,yes,ifneeded,filename>', nargs.ArgDownload(None, 0, 'Automatically install a C version of BLAS/LAPACK'))
+    help.addArgument('BLAS/LAPACK', '-download-f-blas-lapack=<no,yes,ifneeded,filename>', nargs.ArgDownload(None, 0, 'Automatically install a Fortran version of BLAS/LAPACK'))
+    return
+
+  def getDefaultPrecision(self):
+    '''The precision of the library'''
+    if hasattr(self, 'precisionProvider'):
+      if hasattr(self.precisionProvider, 'precision'):
+        return self.precisionProvider.precision
+    return self._defaultPrecision
+  def setDefaultPrecision(self, defaultPrecision):
+    '''The precision of the library'''
+    self._defaultPrecision = defaultPrecision
+    return
+  defaultPrecision = property(getDefaultPrecision, setDefaultPrecision, doc = 'The precision of the library')
+
+  def getOtherLibs(self, foundBlas = None, blasLibrary = None, separateBlas = None):
+    if foundBlas is None:
+      foundBlas = self.foundBlas
+    if blasLibrary is None:
+      blasLibrary = self.blasLibrary
+    if separateBlas is None:
+      separateBlas = self.separateBlas
+    otherLibs = []
+    if foundBlas:
+      if separateBlas:
+        otherLibs = blasLibrary
+    if self.useCompatibilityLibs:
+      otherLibs += self.compilers.flibs
+    return otherLibs
+
+  def checkBlas(self, blasLibrary, otherLibs, fortranMangle, routine = 'ddot'):
+    '''This checks the given library for the routine, ddot by default'''
+    oldLibs = self.compilers.LIBS
+    prototype = ''
+    call      = ''
+    if fortranMangle=='stdcall':
+      if routine=='ddot':
+        prototype = 'double __stdcall DDOT(int*,double*,int*,double*,int*);'
+        call      = 'DDOT(0,0,0,0,0);'
+    found   = self.libraries.check(blasLibrary, routine, otherLibs = otherLibs, fortranMangle = fortranMangle, prototype = prototype, call = call)
+    self.compilers.LIBS = oldLibs
+    return found
+
+  def checkLapack(self, lapackLibrary, otherLibs, fortranMangle, routines = ['dgetrs', 'dgeev']):
+    oldLibs = self.compilers.LIBS
+    found   = 0
+    prototypes = ['','']
+    calls      = ['','']
+    if fortranMangle=='stdcall':
+      if routines == ['dgetrs','dgeev']:
+        prototypes = ['void __stdcall DGETRS(char*,int,int*,int*,double*,int*,int*,double*,int*,int*);',
+                      'void __stdcall DGEEV(char*,int,char*,int,int*,double*,int*,double*,double*,double*,int*,double*,int*,double*,int*,int*);']
+        calls      = ['DGETRS(0,0,0,0,0,0,0,0,0,0);',
+                      'DGEEV(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0);']
+    for routine, prototype, call in zip(routines, prototypes, calls):
+      found = found or self.libraries.check(lapackLibrary, routine, otherLibs = otherLibs, fortranMangle = fortranMangle, prototype = prototype, call = call)
+      if found: break
+    self.compilers.LIBS = oldLibs
+    return found
+
+  def checkLib(self, lapackLibrary, blasLibrary = None):
+    '''Checking for BLAS and LAPACK symbols'''
+
+    #check for BLASLAPACK_STDCALL calling convention!!!!
+    
+    if blasLibrary is None:
+      self.separateBlas = 0
+      blasLibrary       = lapackLibrary
+    else:
+      self.separateBlas = 1
+    if not isinstance(lapackLibrary, list): lapackLibrary = [lapackLibrary]
+    if not isinstance(blasLibrary,   list): blasLibrary   = [blasLibrary]
+    foundBlas   = 0
+    foundLapack = 0
+    self.f2c    = 0
+    mangleFunc = self.compilers.fortranMangling
+    foundBlas = self.checkBlas(blasLibrary, self.getOtherLibs(foundBlas, blasLibrary), mangleFunc)
+    if foundBlas:
+      foundLapack = self.checkLapack(lapackLibrary, self.getOtherLibs(foundBlas, blasLibrary), mangleFunc)
+    elif not hasattr(self.compilers, 'FC'):
+      self.framework.logPrint('Checking cblaslapack')
+      foundcBlasLapack = self.checkBlas(blasLibrary, self.getOtherLibs(foundBlas, blasLibrary), 0, 'f2cblaslapack_id_')
+      if foundcBlasLapack:
+        foundBlas = self.checkBlas(blasLibrary, self.getOtherLibs(foundBlas, blasLibrary), 0, 'ddot_')
+        foundLapack = self.checkLapack(lapackLibrary, self.getOtherLibs(foundBlas, blasLibrary), 0, ['dgetrs_', 'dgeev_'])
+        if foundBlas and foundLapack:
+          self.framework.logPrint('Found cblaslapack')
+          self.f2c = 1
+    return (foundBlas, foundLapack)
+
+  def generateGuesses(self):
+    # check that user has used the options properly
+    if 'with-blas-lib' in self.framework.argDB and not 'with-lapack-lib' in self.framework.argDB:
+      raise RuntimeError('If you use the --with-blas-lib=<lib> you must also use --with-lapack-lib=<lib> option')
+    if not 'with-blas-lib' in self.framework.argDB and 'with-lapack-lib' in self.framework.argDB:
+      raise RuntimeError('If you use the --with-lapack-lib=<lib> you must also use --with-blas-lib=<lib> option')
+    if 'with-blas-lib' in self.framework.argDB and 'with-blas-lapack-dir' in self.framework.argDB:
+      raise RuntimeError('You cannot set both the library containing BLAS with --with-blas-lib=<lib>\nand the directory to search with --with-blas-lapack-dir=<dir>')
+    if 'with-blas-lapack-lib' in self.framework.argDB and 'with-blas-lapack-dir' in self.framework.argDB:
+      raise RuntimeError('You cannot set both the library containing BLAS/LAPACK with --with-blas-lapack-lib=<lib>\nand the directory to search with --with-blas-lapack-dir=<dir>')
+
+    if self.framework.argDB['download-c-blas-lapack']:
+      self.download= 'ftp://ftp.mcs.anl.gov/pub/petsc/externalpackages/f2cblaslapack.tar.gz'
+    elif self.framework.argDB['download-f-blas-lapack']:
+      self.download= 'ftp://ftp.mcs.anl.gov/pub/petsc/externalpackages/fblaslapack.tar.gz'
+        
+    if self.framework.argDB['download-c-blas-lapack'] == 1 or isinstance(self.framework.argDB['download-c-blas-lapack'], str):
+      if isinstance(self.framework.argDB['download-c-blas-lapack'], str):
+        self.download= 'file://'+os.path.abspath(self.framework.argDB['download-c-blas-lapack'])
+      self.f2c = 1
+      
+      if hasattr(self.compilers, 'FC'):
+        raise RuntimeError('Should request f-blas-lapack, not --download-c-blas-lapack=yes since you have a fortran compiler?')
+      libdir = self.downLoadBlasLapack('f2c', 'c')
+      yield ('Downloaded BLAS/LAPACK library', [os.path.join(libdir,'libf2cblas.a')]+self.libraries.math, os.path.join(libdir,'libf2clapack.a'), 0)
+      raise RuntimeError('Could not use downloaded c-blas-lapack?')
+    if self.framework.argDB['download-f-blas-lapack'] == 1  or isinstance(self.framework.argDB['download-f-blas-lapack'], str):
+      if isinstance(self.framework.argDB['download-f-blas-lapack'], str):
+        self.download= 'file://'+os.path.abspath(self.framework.argDB['download-f-blas-lapack'])
+      self.fblaslapack = 1
+      
+      if not hasattr(self.compilers, 'FC'):
+        raise RuntimeError('Cannot request f-blas-lapack without Fortran compiler, maybe you want --download-c-blas-lapack=1?')
+      libdir = self.downLoadBlasLapack('f','f')            
+      yield ('Downloaded BLAS/LAPACK library', os.path.join(libdir,'libfblas.a'), os.path.join(libdir,'libflapack.a'), 1)
+      raise RuntimeError('Could not use downloaded f-blas-lapack?')
+    # Try specified BLASLAPACK library
+    if 'with-blas-lapack-lib' in self.framework.argDB:
+      yield ('User specified BLAS/LAPACK library', None, self.framework.argDB['with-blas-lapack-lib'], 1)
+      raise RuntimeError('You set a value for --with-blas-lapack-lib=<lib>, but '+str(self.framework.argDB['with-blas-lapack-lib'])+' cannot be used\n')
+    # Try specified BLAS and LAPACK libraries
+    if 'with-blas-lib' in self.framework.argDB and 'with-lapack-lib' in self.framework.argDB:
+      yield ('User specified BLAS and LAPACK libraries', self.framework.argDB['with-blas-lib'], self.framework.argDB['with-lapack-lib'], 1)
+      raise RuntimeError('You set a value for --with-blas-lib=<lib> and --with-lapack-lib=<lib>, but '+str(self.framework.argDB['with-blas-lib'])+' and '+str(self.framework.argDB['with-lapack-lib'])+' cannot be used\n')
+    # Try specified installation root
+    if 'with-blas-lapack-dir' in self.framework.argDB:
+      dir = self.framework.argDB['with-blas-lapack-dir']
+      if not (len(dir) > 2 and dir[1] == ':') :
+        dir = os.path.abspath(dir)
+      yield ('User specified installation root (HPUX)', os.path.join(dir, 'libveclib.a'),  os.path.join(dir, 'liblapack.a'), 1)
+      yield ('User specified installation root (F2C)', [os.path.join(dir, 'libf2cblas.a')]+self.libraries.math, os.path.join(dir, 'libf2clapack.a'), 1)
+      yield ('User specified installation root', os.path.join(dir, 'libfblas.a'),   os.path.join(dir, 'libflapack.a'), 1)
+      yield ('User specified ATLAS Linux installation root', [os.path.join(dir, 'libcblas.a'),os.path.join(dir, 'libf77blas.a'), os.path.join(dir, 'libatlas.a')],  [os.path.join(dir, 'liblapack.a')], 1)
+      yield ('User specified ATLAS Linux installation root', [os.path.join(dir, 'libf77blas.a'), os.path.join(dir, 'libatlas.a')],  [os.path.join(dir, 'liblapack.a')], 1)
+      # Check AMD ACML libraries
+      yield ('User specified AMD ACML lib dir', None, os.path.join(dir,'lib','libacml.a'), 1)
+      # Check Linux MKL variations
+      yield ('User specified MKL Linux-x86 lib dir', None, [os.path.join(dir, 'libmkl_lapack.a'), 'libmkl_def.a', 'guide', 'pthread'], 1)
+      yield ('User specified MKL Linux-ia64 lib dir', None, [os.path.join(dir, 'libmkl_lapack.a'), 'libmkl_ipf.a', 'guide', 'pthread'], 1)
+      yield ('User specified MKL Linux-em64t lib dir', None, [os.path.join(dir, 'libmkl_lapack.a'), 'libmkl_em64t.a', 'guide', 'pthread'], 1)
+      yield ('User specified MKL Linux-x86 installation root', None, [os.path.join(dir,'lib','32','libmkl_lapack.a'),'libmkl_def.a', 'guide', 'pthread'], 1)
+      yield ('User specified MKL Linux-x86 installation root', None, [os.path.join(dir,'lib','32','libmkl_lapack.a'),'libmkl_def.a', 'guide', 'vml','pthread'], 1)
+      yield ('User specified MKL Linux-ia64 installation root', None, [os.path.join(dir,'lib','64','libmkl_lapack.a'),'libmkl_ipf.a', 'guide', 'pthread'], 1)
+      yield ('User specified MKL Linux-em64t installation root', None, [os.path.join(dir,'lib','em64t','libmkl_lapack.a'),'libmkl_em64t.a', 'guide', 'pthread'], 1)
+      if self.setCompilers.use64BitPointers:
+        mkldir = os.path.join(dir, 'ia64', 'lib')
+      else:
+        mkldir = os.path.join(dir, 'ia32', 'lib')
+      yield ('User specified MKL Windows installation root', None, [os.path.join(mkldir, 'mkl_c_dll.lib')], 1)
+      yield ('User specified MKL Windows lib dir', None, [os.path.join(dir, 'mkl_c_dll.lib')], 1)
+      yield ('User specified stdcall MKL Windows installation root', None, [os.path.join(mkldir, 'mkl_s_dll.lib')], 1)
+      # Search for liblapack.a and libblas.a after the implementations with more specific name to avoid
+      # finding these in /usr/lib despite using -L<blas-lapack-dir> while attempting to get a different library.
+      yield ('User specified installation root', os.path.join(dir, 'libblas.a'),    os.path.join(dir, 'liblapack.a'), 1)
+      raise RuntimeError('You set a value for --with-blas-lapack-dir=<dir>, but '+self.framework.argDB['with-blas-lapack-dir']+' cannot be used\n')
+    # IRIX locations
+    yield ('IRIX Mathematics library', None, 'libcomplib.sgimath.a', 1)
+    yield ('Another IRIX Mathematics library', None, 'libscs.a', 1)
+    yield ('Compaq/Alpha Mathematics library', None, 'libcxml.a', 1)
+    # IBM ESSL locations
+    yield ('IBM ESSL Mathematics library', None, 'libessl.a', 1)
+    # Portland group compiler blas and lapack
+    if 'PGI' in os.environ:
+      dir = os.path.join(os.environ['PGI'],'linux86','5.1','lib')
+      yield ('User specified installation root', os.path.join(dir, 'libblas.a'), os.path.join(dir, 'liblapack.a'), 1)
+      dir = os.path.join(os.environ['PGI'],'linux86','5.0','lib')
+      yield ('User specified installation root', os.path.join(dir, 'libblas.a'), os.path.join(dir, 'liblapack.a'), 1)
+      dir = os.path.join(os.environ['PGI'],'linux86','lib')
+      yield ('User specified installation root', os.path.join(dir, 'libblas.a'), os.path.join(dir, 'liblapack.a'), 1)
+    # Try compiler defaults
+    yield ('Default compiler locations', 'libblas.a', 'liblapack.a', 1)
+    yield ('HPUX', 'libveclib.a', 'liblapack.a', 1)
+    # /usr/local/lib
+    dir = os.path.join('/usr','local','lib')
+    yield ('Default compiler locations /usr/local/lib', os.path.join(dir,'libblas.a'), os.path.join(dir,'liblapack.a'), 1)
+    yield ('Default Atlas location /usr/local/lib',[os.path.join(dir, 'libcblas.a'),os.path.join(dir, 'libf77blas.a'), os.path.join(dir, 'libatlas.a')],  [os.path.join(dir, 'liblapack.a')], 1)
+    yield ('Default Atlas location /usr/local/lib',[os.path.join(dir, 'libf77blas.a'), os.path.join(dir, 'libatlas.a')],  [os.path.join(dir, 'liblapack.a')], 1)
+    yield ('Default compiler locations with G77', None, ['liblapack.a', 'libblas.a','libg2c.a'], 1)
+    # Try MacOSX location
+    yield ('MacOSX BLAS/LAPACK library', None, os.path.join('/System', 'Library', 'Frameworks', 'vecLib.framework', 'vecLib'), 1)
+    # Sun locations
+    yield ('Sun sunperf BLAS/LAPACK library', None, ['libsunperf.a','libsunmath.a','libm.a'], 1)
+    yield ('Sun sunperf BLAS/LAPACK library', None, ['libsunperf.a','libF77.a','libM77.a','libsunmath.a','libm.a'], 1)
+    yield ('Sun sunperf BLAS/LAPACK library', None, ['libsunperf.a','libfui.a','libfsu.a','libsunmath.a','libm.a'], 1)
+    # Try Microsoft Windows location
+    for MKL_Version in [os.path.join('MKL','8.1'),os.path.join('MKL','8.0'),'MKL72','MKL70','MKL61','MKL']:
+      MKL_Dir = os.path.join('/cygdrive', 'c', 'Program Files', 'Intel', MKL_Version)
+      if self.setCompilers.use64BitPointers:
+        MKL_Dir = os.path.join(MKL_Dir, 'ia64', 'lib')
+      else:
+        MKL_Dir = os.path.join(MKL_Dir, 'ia32', 'lib')
+      yield ('Microsoft Windows, Intel MKL library', None, os.path.join(MKL_Dir,'mkl_c_dll.lib'), 1)
+      yield ('Microsoft Windows, Intel MKL stdcall library', None, os.path.join(MKL_Dir,'mkl_s_dll.lib'), 1)
+    if self.framework.argDB['download-c-blas-lapack'] == 2:
+      if hasattr(self.compilers, 'FC'):
+        raise RuntimeError('Should request f-blas-lapack, not --download-c-blas-lapack=yes since you have a fortran compiler?')
+      libdir = self.downLoadBlasLapack('f2c', 'c')
+      yield ('Downloaded BLAS/LAPACK library', [os.path.join(libdir,'libf2cblas.a')]+self.libraries.math, os.path.join(libdir,'libf2clapack.a'), 0)
+    if self.framework.argDB['download-f-blas-lapack'] == 2:
+      if not hasattr(self.compilers, 'FC'):
+        raise RuntimeError('Cannot request f-blas-lapack without Fortran compiler, maybe you want --download-c-blas-lapack=1?')
+      libdir = self.downLoadBlasLapack('f','f')            
+      yield ('Downloaded BLAS/LAPACK library', os.path.join(libdir,'libfblas.a'), os.path.join(libdir,'libflapack.a'), 1)
+    return
+
+  def getSharedFlag(self,cflags):
+    for flag in ['-PIC', '-fPIC', '-KPIC']:
+      if cflags.find(flag) >=0: return flag
+    return ''
+
+  def downLoadBlasLapack(self, f2c, l):
+    self.framework.log.write('Downloading '+l+'blaslapack\n')
+    if self.framework.externalPackagesDir is None:
+      packages = os.path.abspath('externalpackages')
+    else:
+      packages = self.framework.externalPackagesDir
+    if not os.path.isdir(packages):
+      os.mkdir(packages)
+    if f2c == 'f2c':
+      self.f2c = 1
+    if f2c == 'f':
+      self.setCompilers.pushLanguage('FC')
+      if config.setCompilers.Configure.isNAG(self.setCompilers.getLinker()):
+        raise RuntimeError('Cannot compile fortran blaslapack with NAG compiler - install blas/lapack compiled with g77 instead')
+      self.setCompilers.popLanguage()
+    libdir = os.path.join(packages,f2c+'blaslapack',self.arch)
+    if not os.path.isdir(os.path.join(packages,f2c+'blaslapack')):
+      self.framework.log.write('Actually need to ftp '+l+'blaslapack\n')
+      import urllib
+      try:
+        urllib.urlretrieve(self.download,os.path.join(packages,f2c+'blaslapack.tar.gz'))
+      except:
+        raise RuntimeError('Error downloading '+f2c+'blaslapack.tar.gz requested with -with-'+l+'-blas-lapack option')
+      try:
+        self.executeShellCommand('cd '+packages+'; gunzip '+f2c+'blaslapack.tar.gz', log = self.framework.log, timeout = 360.0)
+      except:
+        raise RuntimeError('Error unzipping '+f2c+'blaslapack.tar.gz requested with -with-'+l+'-blas-lapack option')
+      try:
+        self.executeShellCommand('cd '+packages+'; tar -xf '+f2c+'blaslapack.tar', log = self.framework.log, timeout = 360.0)
+      except:
+        raise RuntimeError('Error doing tar -xf '+f2c+'blaslapack.tar requested with -with-'+l+'-blas-lapack option')
+      os.unlink(os.path.join(packages,f2c+'blaslapack.tar'))
+      self.framework.actions.addArgument('BLAS/LAPACK', 'Download', 'Downloaded PETSc '+f2c+'blaslapack into '+os.path.dirname(libdir))
+    else:
+      self.framework.log.write('Found '+l+'blaslapack, do not need to download\n')
+    if not os.path.isdir(libdir):
+      os.mkdir(libdir)
+    blasDir = os.path.join(packages,f2c+'blaslapack')
+    g = open(os.path.join(blasDir,'tmpmakefile'),'w')
+    f = open(os.path.join(blasDir,'makefile'),'r')    
+    line = f.readline()
+    while line:
+      if line.startswith('CC  '):
+        cc = self.compilers.CC
+        line = 'CC = '+cc+'\n'
+      if line.startswith('COPTFLAGS '):
+        self.setCompilers.pushLanguage('C')
+        line = 'COPTFLAGS  = '+self.setCompilers.getCompilerFlags()
+        if self.defaultPrecision == 'int':
+          line += ' -DDOUBLE=int -DLONG=""\n'
+        elif self.defaultPrecision == 'longdouble':
+          line += ' -DDOUBLE=double -DLONG=long\n'
+        else:
+          line += ' -DDOUBLE=double -DLONG=""\n'
+        self.setCompilers.popLanguage()
+      if line.startswith('CNOOPT'):
+        self.setCompilers.pushLanguage('C')
+        line = 'CNOOPT = '+self.getSharedFlag(self.setCompilers.getCompilerFlags())
+        if self.defaultPrecision == 'int':
+          line += ' -DDOUBLE=int -DLONG=""\n'
+        elif self.defaultPrecision == 'longdouble':
+          line += ' -DDOUBLE=double -DLONG=long\n'
+        else:
+          line += ' -DDOUBLE=double -DLONG=""\n'
+        self.setCompilers.popLanguage()
+      if line.startswith('FC  '):
+        fc = self.compilers.FC
+        if fc.find('f90') >= 0:
+          import commands
+          output  = commands.getoutput(fc+' -v')
+          if output.find('IBM') >= 0:
+            fc = os.path.join(os.path.dirname(fc),'xlf')
+            self.framework.log.write('Using IBM f90 compiler, switching to xlf for compiling BLAS/LAPACK\n')
+        line = 'FC = '+fc+'\n'
+      if line.startswith('FOPTFLAGS '):
+        self.setCompilers.pushLanguage('FC')
+        line = 'FOPTFLAGS  = '+self.setCompilers.getCompilerFlags().replace('-Mfree','')+'\n'
+        self.setCompilers.popLanguage()       
+      if line.startswith('FNOOPT'):
+        self.setCompilers.pushLanguage('FC')
+        line = 'FNOOPT = '+self.getSharedFlag(self.setCompilers.getCompilerFlags())+'\n'
+        self.setCompilers.popLanguage()
+      if line.startswith('AR  '):
+        line = 'AR      = '+self.setCompilers.AR+'\n'
+      if line.startswith('AR_FLAGS  '):
+        line = 'AR_FLAGS      = '+self.setCompilers.AR_FLAGS+'\n'
+      if line.startswith('LIB_SUFFIX '):
+        line = 'LIB_SUFFIX = '+self.setCompilers.AR_LIB_SUFFIX+'\n'
+      if line.startswith('RANLIB  '):
+        line = 'RANLIB = '+self.setCompilers.RANLIB+'\n'
+      if line.startswith('RM  '):
+        line = 'RM = '+self.programs.RM+'\n'
+      
+
+      if line.startswith('include'):
+        line = '\n'
+      g.write(line)
+      line = f.readline()
+    f.close()
+    g.close()
+    if os.path.isfile(os.path.join(libdir,'tmpmakefile')) and (SourceDB.getChecksum(os.path.join(libdir,'tmpmakefile')) == SourceDB.getChecksum(os.path.join(blasDir,'tmpmakefile'))):
+      self.framework.log.write('Do not need to compile '+l+'blaslapack, already compiled\n')
+      return libdir
+    try:
+      self.logPrintBox('Compiling '+l.upper()+'BLASLAPACK; this may take several minutes')
+      output  = config.base.Configure.executeShellCommand('cd '+blasDir+';make -f tmpmakefile cleanblaslapck cleanlib; make -f tmpmakefile', timeout=2500, log = self.framework.log)[0]
+    except RuntimeError, e:
+      raise RuntimeError('Error running make on '+l+'blaslapack: '+str(e))
+    try:
+      output  = config.base.Configure.executeShellCommand('cd '+blasDir+';mv -f lib'+f2c+'blas.'+self.setCompilers.AR_LIB_SUFFIX+' lib'+f2c+'lapack.'+self.setCompilers.AR_LIB_SUFFIX+' '+self.arch, timeout=30, log = self.framework.log)[0]
+    except RuntimeError, e:
+      raise RuntimeError('Error moving '+l+'blaslapack libraries: '+str(e))
+    try:
+      output  = config.base.Configure.executeShellCommand('cd '+blasDir+';cp -f tmpmakefile '+self.arch, timeout=30, log = self.framework.log)[0]
+    except RuntimeError, e:
+      pass
+    return libdir
+  
+  def configureLibrary(self):
+    self.functionalBlasLapack = []
+    self.foundBlas   = 0
+    self.foundLapack = 0
+    for (name, blasLibrary, lapackLibrary, self.useCompatibilityLibs) in self.generateGuesses():
+      self.framework.log.write('================================================================================\n')
+      self.framework.log.write('Checking for a functional BLAS and LAPACK in '+name+'\n')
+      (foundBlas, foundLapack) = self.executeTest(self.checkLib, [lapackLibrary, blasLibrary])
+      if foundBlas:   self.foundBlas   = 1
+      if foundLapack: self.foundLapack = 1
+      if foundBlas and foundLapack:
+        self.functionalBlasLapack.append((name, blasLibrary, lapackLibrary))
+        if not self.framework.argDB['with-alternatives']:
+          break
+    # User chooses one or take first (sort by version)
+    if self.foundBlas and self.foundLapack:
+      name, self.blasLibrary, self.lapackLibrary = self.functionalBlasLapack[0]
+      if not isinstance(self.blasLibrary,   list): self.blasLibrary   = [self.blasLibrary]
+      if not isinstance(self.lapackLibrary, list): self.lapackLibrary = [self.lapackLibrary]
+      self.lib = []
+      if self.lapackLibrary[0]: self.lib.extend(self.lapackLibrary)
+      if self.blasLibrary[0]:   self.lib.extend(self.blasLibrary)
+      self.dlib = self.lib[:]
+      if self.useCompatibilityLibs:
+        self.dlib.extend(self.compilers.flibs)
+      self.framework.packages.append(self)
+      if self.f2c:
+        self.addDefine('BLASLAPACK_UNDERSCORE', 1)
+    else:
+      if not self.foundBlas:
+        # check for split blas/blas-dev packages
+        import glob
+        blib = glob.glob('/usr/lib/libblas.*')
+        if blib != [] and not (os.path.isfile('/usr/lib/libblas.so') or os.path.isfile('/usr/lib/libblas.a')):
+          raise RuntimeError('Incomplete BLAS install? Perhaps blas package is installed - but blas-dev/blas-devel is required.')
+        if hasattr(self.compilers, 'FC'): C = 'f'
+        else: C = 'c'
+        raise RuntimeError('Could not find a functional BLAS. Run with --with-blas-lib=<lib> to indicate the library containing BLAS.\n Or --download-'+C+'-blas-lapack=1 to have one automatically downloaded and installed\n')
+      if not self.foundLapack:
+        # check for split blas/blas-dev packages
+        import glob
+        llib = glob.glob('/usr/lib/liblapack.*')
+        if llib != [] and not (os.path.isfile('/usr/lib/liblapack.so') or os.path.isfile('/usr/lib/liblapack.a')):
+          raise RuntimeError('Incomplete LAPACK install? Perhaps lapack package is installed - but lapack-dev/lapack-devel is required.')
+        if hasattr(self.compilers, 'FC'): C = 'f'
+        else: C = 'c'
+        raise RuntimeError('Could not find a functional LAPACK. Run with --with-lapack-lib=<lib> to indicate the library containing LAPACK.\n Or --download-'+C+'-blas-lapack=1 to have one automatically downloaded and installed\n')
+    self.found = 1
+    return
+
+  def checkESSL(self):
+    '''Check for the IBM ESSL library'''
+    if self.libraries.check(self.lapackLibrary, 'iessl'):
+      self.addDefine('HAVE_ESSL',1)
+    return
+
+  def checkPESSL(self):
+    '''Check for the IBM PESSL library - and error out - if used instead of ESSL'''
+    if self.libraries.check(self.lapackLibrary, 'pdgemm'):
+      raise RuntimeError('Cannot use PESSL instead of ESSL!')
+    return
+
+  def checkMissing(self):
+    '''Check for missing LAPACK routines'''
+    if self.foundLapack:
+      mangleFunc = hasattr(self.compilers, 'FC') and not self.f2c
+      for baseName in ['gesvd','geev','getrf','potrf','getrs','potrs']:
+        if self.f2c:
+          routine = 'd'+baseName+'_'
+        else:
+          routine = 'd'+baseName
+        oldLibs = self.compilers.LIBS
+        if not self.libraries.check(self.lapackLibrary, routine, otherLibs = self.getOtherLibs(), fortranMangle = mangleFunc):
+          self.missingRoutines.append(baseName)
+          self.addDefine('MISSING_LAPACK_'+baseName.upper(), 1)
+        self.compilers.LIBS = oldLibs
+    return
+
+  def checkForRoutine(self,routine):
+    ''' used by other packages to see if a BLAS routine is available
+        This is not really correct because other packages do not (usually) know about f2cblasLapack'''
+    if self.f2c:
+      return self.libraries.check(self.dlib,routine+'_')
+    else:
+      return self.libraries.check(self.dlib,routine,fortranMangle = hasattr(self.compilers, 'FC'))
+
+  def configure(self):
+    self.executeTest(self.configureLibrary)
+    self.executeTest(self.checkESSL)
+    self.executeTest(self.checkPESSL)
+    self.executeTest(self.checkMissing)
+    if (self.defaultPrecision == 'longdouble' or self.defaultPrecision == 'int') and not self.f2c:
+      raise RuntimeError('Need to use --download-c-blas-lapack when using --with-precision=longdouble/int')
+    return
+
+if __name__ == '__main__':
+  import config.framework
+  import sys
+  framework = config.framework.Framework(sys.argv[1:])
+  framework.setupLogging()
+  framework.children.append(Configure(framework))
+  framework.configure()
+  framework.dumpSubstitutions()

Added: long/3D/Gale/trunk/python/BuildSystem/config/packages/Boost.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/packages/Boost.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/packages/Boost.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,17 @@
+import config.package
+
+class Configure(config.package.Package):
+  def __init__(self, framework):
+    config.package.Package.__init__(self, framework)
+    self.download        = ['ftp://ftp.mcs.anl.gov/pub/petsc/externalpackages/boost.tar.gz']
+    self.includes        = ['boost/multi_index_container.hpp']
+    self.cxx             = 1
+    self.includedir      = ''
+    self.archIndependent = 1
+    return
+
+  def Install(self):
+    import sys
+    boostDir = self.getDir()
+    self.framework.actions.addArgument('Boost', 'Install', 'Installed Boost into '+boostDir)
+    return boostDir

Added: long/3D/Gale/trunk/python/BuildSystem/config/packages/MPI.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/packages/MPI.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/packages/MPI.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,520 @@
+#!/usr/bin/env python
+from __future__ import generators
+import user
+import config.base
+import config.package
+import os
+from stat import *
+
+class Configure(config.package.Package):
+  def __init__(self, framework):
+    config.package.Package.__init__(self, framework)
+    self.download_lam     = ['http://www.lam-mpi.org/download/files/lam-7.1.1.tar.gz']
+    self.download_mpich   = ['ftp://ftp.mcs.anl.gov/pub/mpi/mpich2-1.0.4p1.tar.gz']
+    self.download         = ['redefine']
+    self.functions        = ['MPI_Init', 'MPI_Comm_create']
+    self.includes         = ['mpi.h']
+    self.liblist_mpich    = [['libmpich.a', 'libpmpich.a'],
+                             ['libfmpich.a','libmpich.a', 'libpmpich.a'],
+                             ['libmpich.a'],
+                             ['libmpich.a','libpthread.a'],
+                             ['libfmpich.a','libmpich.a', 'libpmpich.a', 'libmpich.a', 'libpmpich.a', 'libpmpich.a'],
+                             ['libmpich.a', 'libpmpich.a', 'libmpich.a', 'libpmpich.a', 'libpmpich.a'],
+                             ['libmpich.a','libssl.a','libuuid.a','libpthread.a','librt.a','libdl.a'],
+                             ['libmpich.a','libnsl.a','libsocket.a','librt.a','libnsl.a','libsocket.a'],
+                             ['mpich2.lib'],
+                             ['libmpich.a','libgm.a','libpthread.a'],
+                             ['mpich.lib']]
+    self.liblist_lam      = [['liblamf77mpi.a','libmpi++.a','libmpi.a','liblam.a'],
+                             ['liblammpi++.a','libmpi.a','liblam.a'],
+                             ['libmpi.a','libmpi++.a'],['libmpi.a'],
+                             ['liblammpio.a','libpmpi.a','liblamf77mpi.a','libmpi.a','liblam.a'],
+                             ['liblammpio.a','libpmpi.a','liblamf90mpi.a','libmpi.a','liblam.a'],
+                             ['liblammpio.a','libpmpi.a','libmpi.a','liblam.a'],
+                             ['liblammpi++.a','libmpi.a','liblam.a'],
+                             ['libmpi.a','liblam.a']]
+    self.liblist          = [[]] + self.liblist_lam + self.liblist_mpich
+    # defaults to --with-mpi=yes
+    self.required         = 1
+    self.double           = 0
+    self.complex          = 1
+    self.isPOE            = 0
+    self.usingMPIUni      = 0
+    self.requires32bitint = 0
+    self.shared           = 0
+    # local state
+    self.commf2c          = 0
+    self.commc2f          = 0
+    return
+
+  def setupHelp(self, help):
+    config.package.Package.setupHelp(self,help)
+    import nargs
+    help.addArgument('MPI', '-download-lam=<no,yes,ifneeded,filename>',    nargs.ArgDownload(None, 0, 'Download and install LAM/MPI'))
+    help.addArgument('MPI', '-download-mpich=<no,yes,ifneeded,filename>',  nargs.ArgDownload(None, 0, 'Download and install MPICH-2'))
+    help.addArgument('MPI', '-with-mpirun=<prog>',                nargs.Arg(None, None, 'The utility used to launch MPI jobs'))
+    help.addArgument('MPI', '-with-mpi-compilers=<bool>',         nargs.ArgBool(None, 1, 'Try to use the MPI compilers, e.g. mpicc'))
+    help.addArgument('MPI', '-with-mpi-shared=<bool>',            nargs.ArgBool(None, None, 'Try to use shared MPI libraries'))
+    help.addArgument('MPI', '-download-mpich-machines=[machine1,machine2...]',  nargs.Arg(None, ['localhost','localhost'], 'Machines for MPI to use'))
+    help.addArgument('MPI', '-download-mpich-pm=gforker or mpd',  nargs.Arg(None, 'gforker', 'Launcher for MPI processes')) 
+    help.addArgument('MPI', '-download-mpich-device=ch3:shm or see mpich2 docs', nargs.Arg(None, None, 'Communicator for MPI processes'))
+    return
+
+  def setupDependencies(self, framework):
+    config.package.Package.setupDependencies(self, framework)
+    self.compilers = framework.require('config.compilers', self)
+    self.types = framework.require('config.types', self)
+    return
+
+  # search many obscure locations for MPI
+  def getSearchDirectories(self):
+    import re
+    yield ''
+    # Try configure package directories
+    dirExp = re.compile(r'mpi(ch)?(-.*)?')
+    for packageDir in self.framework.argDB['package-dirs']:
+      packageDir = os.path.abspath(packageDir)
+      if not os.path.isdir(packageDir):
+        raise RuntimeError('Invalid package directory: '+packageDir)
+      for f in os.listdir(packageDir):
+        dir = os.path.join(packageDir, f)
+        if not os.path.isdir(dir):
+          continue
+        if not dirExp.match(f):
+          continue
+        yield (dir)
+    # Try SUSE location
+    yield (os.path.abspath(os.path.join('/opt', 'mpich')))
+    # Try IBM
+    self.isPOE = 1
+    dir = os.path.abspath(os.path.join('/usr', 'lpp', 'ppe.poe'))
+    yield (os.path.abspath(os.path.join('/usr', 'lpp', 'ppe.poe')))
+    self.isPOE = 0
+    # Try /usr/local
+    yield (os.path.abspath(os.path.join('/usr', 'local')))
+    # Try /usr/local/*mpich*
+    if os.path.isdir(dir):
+      ls = os.listdir(dir)
+      for dir in ls:
+        if dir.find('mpich') >= 0:
+          dir = os.path.join('/usr','local',dir)
+          if os.path.isdir(dir):
+            yield (dir)
+    # Try ~/mpich*
+    homedir = os.getenv('HOME')
+    if homedir:
+      ls = os.listdir(homedir)
+      for dir in ls:
+        if dir.find('mpich') >= 0:
+          dir = os.path.join(homedir,dir)
+          if os.path.isdir(dir):
+            yield (dir)
+    # Try MPICH install locations under Windows
+    yield(os.path.join('/cygdrive','c','Program Files','MPICH2'))
+    yield(os.path.join('/cygdrive','c','Program Files','MPICH'))
+    yield(os.path.join('/cygdrive','c','Program Files','MPICH','SDK.gcc'))
+    yield(os.path.join('/cygdrive','c','Program Files','MPICH','SDK'))
+    return
+
+  def checkSharedLibrary(self):
+    '''Check that the libraries for MPI are shared libraries'''
+    self.executeTest(self.configureMPIRUN)
+    try:
+      self.shared = self.libraries.checkShared('#include <mpi.h>\n','MPI_Init','MPI_Initialized','MPI_Finalize',checkLink = self.checkPackageLink,libraries = self.lib, defaultArg = 'with-mpi-shared', executor = self.mpirun)
+    except RuntimeError, e:
+      if self.framework.argDB['with-shared']:
+        raise RuntimeError('Shared libraries cannot be built using MPI provided.\nEither rebuild with --with-shared=0 or rebuild MPI with shared library support')
+      self.framework.logPrint('MPI libraries cannot be used with shared libraries')
+      self.shared = 0
+    return
+
+  def configureMPIRUN(self):
+    '''Checking for mpirun'''
+    if 'with-mpirun' in self.framework.argDB:
+      self.framework.argDB['with-mpirun'] = os.path.expanduser(self.framework.argDB['with-mpirun'])
+      if not self.getExecutable(self.framework.argDB['with-mpirun'], resultName = 'mpirun'):
+        raise RuntimeError('Invalid mpirun specified: '+str(self.framework.argDB['with-mpirun']))
+      return
+    if self.isPOE:
+      self.mpirun = os.path.abspath(os.path.join('bin', 'mpirun.poe'))
+      return
+    mpiruns = ['mpiexec -np 1', 'mpirun -np 1', 'mprun -np 1', 'mpiexec', 'mpirun', 'mprun']
+    path    = []
+    if 'with-mpi-dir' in self.framework.argDB:
+      path.append(os.path.join(os.path.abspath(self.framework.argDB['with-mpi-dir']), 'bin'))
+      # MPICH-NT-1.2.5 installs MPIRun.exe in mpich/mpd/bin
+      path.append(os.path.join(os.path.abspath(self.framework.argDB['with-mpi-dir']), 'mpd','bin'))
+    for inc in self.include:
+      path.append(os.path.join(os.path.dirname(inc), 'bin'))
+      # MPICH-NT-1.2.5 installs MPIRun.exe in mpich/SDK/include/../../mpd/bin
+      path.append(os.path.join(os.path.dirname(os.path.dirname(inc)),'mpd','bin'))
+    for lib in self.lib:
+      path.append(os.path.join(os.path.dirname(os.path.dirname(lib)), 'bin'))
+    self.pushLanguage('C')
+    if os.path.basename(self.getCompiler()) == 'mpicc' and os.path.dirname(self.getCompiler()):
+      path.append(os.path.dirname(self.getCompiler()))
+    self.popLanguage()
+    if not self.getExecutable(mpiruns, path = path, useDefaultPath = 1, resultName = 'mpirun',setMakeMacro=0):
+      raise RuntimeError('Could not locate MPIRUN - please specify --with-mpirun option')
+    self.addMakeMacro('MPIRUN',self.mpirun.replace(' -np 1','').replace(' ', '\\ '))
+    return
+        
+  def configureConversion(self):
+    '''Check for the functions which convert communicators between C and Fortran
+       - Define HAVE_MPI_COMM_F2C and HAVE_MPI_COMM_C2F if they are present
+       - Some older MPI 1 implementations are missing these'''
+    oldFlags = self.compilers.CPPFLAGS
+    oldLibs  = self.compilers.LIBS
+    self.compilers.CPPFLAGS += ' '+self.headers.toString(self.include)
+    self.compilers.LIBS = self.libraries.toString(self.lib)+' '+self.compilers.LIBS
+    if self.checkLink('#include <mpi.h>\n', 'if (MPI_Comm_f2c(MPI_COMM_WORLD));\n'):
+      self.commf2c = 1
+      self.addDefine('HAVE_MPI_COMM_F2C', 1)
+    if self.checkLink('#include <mpi.h>\n', 'if (MPI_Comm_c2f(MPI_COMM_WORLD));\n'):
+      self.commc2f = 1
+      self.addDefine('HAVE_MPI_COMM_C2F', 1)
+    if self.checkLink('#include <mpi.h>\n', 'MPI_Fint a;\n'):
+      self.addDefine('HAVE_MPI_FINT', 1)
+
+    self.compilers.CPPFLAGS = oldFlags
+    self.compilers.LIBS = oldLibs
+    return
+
+  def configureTypes(self):
+    '''Checking for MPI types'''
+    oldFlags = self.compilers.CPPFLAGS
+    self.compilers.CPPFLAGS += ' '+self.headers.toString(self.include)
+    self.framework.batchIncludeDirs.extend([self.headers.getIncludeArgument(inc) for inc in self.include])
+    self.types.checkSizeof('MPI_Comm', 'mpi.h')
+    if 'HAVE_MPI_FINT' in self.defines:
+      self.types.checkSizeof('MPI_Fint', 'mpi.h')
+    self.compilers.CPPFLAGS = oldFlags
+    return
+
+  def alternateConfigureLibrary(self):
+    '''Setup MPIUNI, our uniprocessor version of MPI'''
+    self.addDefine('HAVE_MPIUNI', 1)
+    self.include = [os.path.abspath(os.path.join('include', 'mpiuni'))]
+    self.lib = [os.path.abspath(os.path.join('lib', self.arch, 'libmpiuni'))]
+    self.mpirun = '${PETSC_DIR}/bin/mpirun.uni'
+    self.addMakeMacro('MPIRUN','${PETSC_DIR}/bin/mpirun.uni')
+    self.addDefine('HAVE_MPI_COMM_F2C', 1)
+    self.addDefine('HAVE_MPI_COMM_C2F', 1)
+    self.addDefine('HAVE_MPI_FINT', 1)
+    self.commf2c = 1
+    self.commc2f = 1
+    self.framework.packages.append(self)
+    self.usingMPIUni = 1
+    self.version = 'PETSc MPIUNI uniprocessor MPI replacement'
+    return
+
+  def configureMissingPrototypes(self):
+    '''Checks for missing prototypes, which it adds to petscfix.h'''
+    if not 'HAVE_MPI_FINT' in self.defines:
+      self.addPrototype('typedef int MPI_Fint;')
+    if not 'HAVE_MPI_COMM_F2C' in self.defines:
+      self.addPrototype('#define MPI_Comm_f2c(a) (a)')
+    if not 'HAVE_MPI_COMM_C2F' in self.defines:
+      self.addPrototype('#define MPI_Comm_c2f(a) (a)')
+    return
+
+  def configureMPICHShared(self):
+    '''MPICH cannot be used with shared libraries on the Mac, reject if trying'''
+    if config.setCompilers.Configure.isDarwin():
+      if not self.setCompilers.staticLibraries:
+        for lib in self.lib:
+          if lib.find('mpich') >= 0:
+            raise RuntimeError('Sorry, we have not been able to figure out how to use shared libraries on the \n \
+              Mac with MPICH. Either run config/configure.py with --with-shared=0 or use LAM instead of MPICH; \n\
+              for instance with --download-lam=1')
+    return
+
+  def checkDownload(self, requireDownload = 1):
+    '''Check if we should download LAM or MPICH'''
+
+    if self.framework.argDB['download-lam'] and self.framework.argDB['download-mpich']:
+      raise RuntimeError('Sorry, cannot install both LAM and MPICH. Install any one of the two')
+
+    # check for LAM
+    if self.framework.argDB['download-lam']:
+      if config.setCompilers.Configure.isCygwin():
+        raise RuntimeError('Sorry, cannot download-install LAM on Windows. Sugest installing windows version of MPICH manually')
+      self.liblist      = self.liblist_lam   # only generate LAM MPI guesses
+      self.download     = self.download_lam
+      self.downloadname = 'lam'
+      return config.package.Package.checkDownload(self, requireDownload)
+        
+    # Check for MPICH
+    if self.framework.argDB['download-mpich']:
+      if config.setCompilers.Configure.isCygwin():
+        raise RuntimeError('Sorry, cannot download-install MPICH on Windows. Sugest installing windows version of MPICH manually')
+      self.liblist      = self.liblist_mpich   # only generate MPICH guesses
+      self.download     = self.download_mpich
+      self.downloadname = 'mpich'
+      return config.package.Package.checkDownload(self, requireDownload)
+    return None
+
+  def Install(self):
+    if self.framework.argDB['download-lam']:
+      return self.InstallLAM()
+    elif self.framework.argDB['download-mpich']:
+      return self.InstallMPICH()
+    else:
+      raise RuntimeError('Internal Error!')
+    
+  def InstallLAM(self):
+    lamDir = self.getDir()
+
+    # Get the LAM directories
+    installDir = os.path.join(lamDir, self.arch)
+    # Configure and Build LAM
+    self.framework.pushLanguage('C')
+    args = ['--prefix='+installDir, '--with-rsh=ssh','CC="'+self.framework.getCompiler()+' '+self.framework.getCompilerFlags()+'"']
+    if self.framework.argDB['with-shared']:
+      if self.setCompilers.staticLibraries:
+        raise RuntimeError('Configuring with shared libraries - but the system/compilers do not support this')
+      args.append('--enable-shared')
+    self.framework.popLanguage()
+    # c++ can't be disabled with LAM
+    if hasattr(self.compilers, 'CXX'):
+      self.framework.pushLanguage('Cxx')
+      args.append('CXX="'+self.framework.getCompiler()+' '+self.framework.getCompilerFlags()+'"')
+      self.framework.popLanguage()
+    # no separate F90 options for LAM
+    if hasattr(self.compilers, 'FC'):
+      self.framework.pushLanguage('FC')
+      args.append('FC="'+self.framework.getCompiler()+' '+self.framework.getCompilerFlags()+'"')
+      self.framework.popLanguage()
+    else:
+      args.append('--without-fc')
+    args = ' '.join(args)
+
+    try:
+      fd      = file(os.path.join(installDir,'config.args'))
+      oldargs = fd.readline()
+      fd.close()
+    except:
+      oldargs = ''
+    if not oldargs == args:
+      self.framework.log.write('Have to rebuild LAM oldargs = '+oldargs+'\n new args = '+args+'\n')
+      try:
+        self.logPrintBox('Configuring LAM/MPI; this may take several minutes')
+        output  = config.base.Configure.executeShellCommand('cd '+lamDir+';CXX='';export CXX; ./configure '+args, timeout=1500, log = self.framework.log)[0]
+      except RuntimeError, e:
+        raise RuntimeError('Error running configure on LAM/MPI: '+str(e))
+      try:
+        self.logPrintBox('Compiling LAM/MPI; this may take several minutes')
+        output  = config.base.Configure.executeShellCommand('cd '+lamDir+';LAM_INSTALL_DIR='+installDir+';export LAM_INSTALL_DIR; make install', timeout=2500, log = self.framework.log)[0]
+      except RuntimeError, e:
+        raise RuntimeError('Error running make on LAM/MPI: '+str(e))
+      if not os.path.isdir(os.path.join(installDir,'lib')):
+        self.framework.log.write('Error running make on LAM/MPI   ******(libraries not installed)*******\n')
+        self.framework.log.write('********Output of running make on LAM follows *******\n')        
+        self.framework.log.write(output)
+        self.framework.log.write('********End of Output of running make on LAM *******\n')
+        raise RuntimeError('Error running make on LAM, libraries not installed')
+      
+      fd = file(os.path.join(installDir,'config.args'), 'w')
+      fd.write(args)
+      fd.close()
+      #need to run ranlib on the libraries using the full path
+      try:
+        output  = config.base.Configure.executeShellCommand(self.setCompilers.RANLIB+' '+os.path.join(installDir,'lib')+'/lib*.a', timeout=2500, log = self.framework.log)[0]
+      except RuntimeError, e:
+        raise RuntimeError('Error running ranlib on LAM/MPI libraries: '+str(e))
+      # start up LAM demon; note lamboot does not close stdout, so call will ALWAYS timeout.
+      try:
+        output  = config.base.Configure.executeShellCommand('PATH=${PATH}:'+os.path.join(installDir,'bin')+' '+os.path.join(installDir,'bin','lamboot'), timeout=10, log = self.framework.log)[0]
+      except:
+        pass
+      self.framework.actions.addArgument(self.PACKAGE, 'Install', 'Installed LAM/MPI into '+installDir)
+    return self.getDir()
+
+  def InstallMPICH(self):
+    mpichDir = self.getDir()
+    installDir = os.path.join(mpichDir, self.arch)
+    if not os.path.isdir(installDir):
+      os.mkdir(installDir)
+      
+    # Configure and Build MPICH
+    self.framework.pushLanguage('C')
+    args = ['--prefix='+installDir]
+    args.append('CC="'+self.framework.getCompiler()+' '+self.framework.getCompilerFlags()+'"')
+    self.framework.popLanguage()
+    if hasattr(self.compilers, 'CXX'):
+      self.framework.pushLanguage('Cxx')
+      args.append('CXX="'+self.framework.getCompiler()+' '+self.framework.getCompilerFlags()+'"')
+      self.framework.popLanguage()
+    else:
+      args.append('--disable-cxx')
+    if hasattr(self.compilers, 'FC'):
+      self.framework.pushLanguage('FC')      
+      fc = self.framework.getCompiler()
+      if self.compilers.fortranIsF90:
+        try:
+          output, error, status = self.executeShellCommand(fc+' -v')
+          output += error
+        except:
+          output = ''
+        if output.find('IBM') >= 0:
+          fc = os.path.join(os.path.dirname(fc), 'xlf')
+          self.framework.log.write('Using IBM f90 compiler, switching to xlf for compiling MPICH\n')
+        # now set F90
+        args.append('F90="'+fc+' '+self.framework.getCompilerFlags().replace('-Mfree','')+'"')
+      else:
+        args.append('--disable-f90')
+      args.append('F77="'+fc+' '+self.framework.getCompilerFlags().replace('-Mfree','')+'"')
+      self.framework.popLanguage()
+    else:
+      args.append('--disable-f77')
+      args.append('--disable-f90')
+    if self.framework.argDB['with-shared']:
+      if self.setCompilers.staticLibraries:
+        raise RuntimeError('Configuring with shared libraries - but the system/compilers do not support this')
+      if self.compilers.isGCC:
+        if config.setCompilers.Configure.isDarwin():
+          args.append('--enable-sharedlibs=gcc-osx')
+        else:        
+          args.append('--enable-sharedlibs=gcc')
+      else:
+        args.append('--enable-sharedlibs=libtool')
+    if 'download-mpich-device' in self.argDB:
+      args.append('--with-device='+self.argDB['download-mpich-device'])
+    args.append('--without-mpe')
+    args.append('--with-pm='+self.argDB['download-mpich-pm'])
+    args = ' '.join(args)
+    configArgsFilename = os.path.join(installDir,'config.args')
+    try:
+      fd      = file(configArgsFilename)
+      oldargs = fd.readline()
+      fd.close()
+    except:
+      self.framework.logPrint('Unable to find old configure arguments in '+configArgsFilename)
+      oldargs = ''
+    if not oldargs == args:
+      self.framework.logPrint('Have to rebuild MPICH oldargs = '+oldargs+'\n new args = '+args)
+      try:
+        self.logPrintBox('Running configure on MPICH; this may take several minutes')
+        output  = config.base.Configure.executeShellCommand('cd '+mpichDir+';./configure '+args, timeout=2000, log = self.framework.log)[0]
+      except RuntimeError, e:
+        import sys
+        if sys.platform.startswith('cygwin'):
+          raise RuntimeError('Error running configure on MPICH. \n \
+  On Microsoft Windows systems, please obtain and install the binary distribution from \n \
+    http://www.mcs.anl.gov/mpi/mpich/mpich-nt \n \
+  then rerun configure.  \n \
+  If you choose to install MPICH to a location other than the default, use \n \
+    --with-mpi-dir=<directory> \n \
+  to specify the location of the installation when you rerun configure.')
+        raise RuntimeError('Error running configure on MPICH: '+str(e))
+      try:
+        self.logPrintBox('Running make on MPICH; this may take several minutes')
+        output  = config.base.Configure.executeShellCommand('cd '+mpichDir+';make; make install', timeout=2500, log = self.framework.log)[0]
+      except RuntimeError, e:
+        import sys
+        if sys.platform.startswith('cygwin'):
+          raise RuntimeError('Error running make; make install on MPICH. \n \
+  On Microsoft Windows systems, please obtain and install the binary distribution from \n \
+    http://www.mcs.anl.gov/mpi/mpich/mpich-nt \n \
+  then rerun configure.  \n \
+  If you choose to install MPICH to a location other than the default, use \n \
+    --with-mpi-dir=<directory> \n \
+  to specify the location of the installation when you rerun configure.')
+        raise RuntimeError('Error running make; make install on MPICH: '+str(e))
+
+      try:
+        fd = file(configArgsFilename, 'w')
+        fd.write(args)
+        fd.close()
+      except:
+        self.framework.logPrint('Unable to output configure arguments into '+configArgsFilename)
+      if self.argDB['download-mpich-pm'] == 'mpd':
+        homedir = os.getenv('HOME')
+        if homedir:
+          if not os.path.isfile(os.path.join(homedir,'.mpd.conf')):
+            fd = open(os.path.join(homedir,'.mpd.conf'),'w')
+            fd.write('secretword=mr45-j9z\n')
+            fd.close()
+            os.chmod(os.path.join(homedir,'.mpd.conf'),S_IRWXU)
+        else:
+          self.logPrint('No HOME env var, so could not check for or create .mpd.conf')
+
+        # start up MPICH's demon
+        self.framework.logPrint('Starting up MPICH mpd demon needed for mpirun')
+        try:
+          output = self.executeShellCommand('cd '+installDir+'; bin/mpdboot',timeout=25)
+          self.framework.logPrint('Output from trying to run mpdboot:'+str(output))
+          self.framework.logPrint('Started up MPICH mpd demon needed for mpirun')
+        except RuntimeError, e:
+          self.framework.logPrint('Error trying to run mpdboot:'+str(e))
+      self.framework.actions.addArgument('MPI', 'Install', 'Installed MPICH into '+installDir)
+    return self.getDir()
+
+  def addExtraLibraries(self):
+    '''Check for various auxiliary libraries we may need'''
+    extraLib = []
+    if not self.setCompilers.usedMPICompilers:
+      if self.executeTest(self.libraries.check, [['rt'], 'timer_create', None, extraLib]):
+        extraLib.append('librt.a')
+      if self.executeTest(self.libraries.check, [['aio'], 'aio_read', None, extraLib]):
+        extraLib.insert(0, 'libaio.a')
+      if self.executeTest(self.libraries.check, [['nsl'], 'exit', None, extraLib]):
+        extraLib.insert(0, 'libnsl.a')
+      self.extraLib.extend(extraLib)
+    return
+
+  def SGIMPICheck(self):
+    '''Returns true if SGI MPI is used'''
+    if self.libraries.check(self.lib, 'MPI_SGI_barrier') :
+      self.logPrint('SGI MPI detected - defining MISSING_SIGTERM')
+      self.addDefine('MISSING_SIGTERM', 1)
+      return 1
+    else:
+      self.logPrint('SGI MPI test failure')
+      return 0
+
+  def FortranMPICheck(self):
+    '''Make sure fortran include [mpif.h] and library symbols are found'''
+    if not hasattr(self.compilers, 'FC'):
+      return 0
+    # Fortran compiler is being used - so make sure mpif.h exists
+    self.libraries.pushLanguage('FC')
+    oldFlags = self.compilers.CPPFLAGS
+    self.compilers.CPPFLAGS += ' '+self.headers.toString(self.include)
+    self.framework.log.write('Checking for header mpif.h\n')
+    if not self.libraries.checkCompile(body = '       include \'mpif.h\''):
+      raise RuntimeError('Fortran error! mpif.h could not be located at: '+str(self.include))
+    # check if mpi_init form fortran works
+    self.framework.log.write('Checking for fortran mpi_init()\n')
+    if not self.libraries.check(self.lib,'', call = '       include \'mpif.h\'\n       integer ierr\n       call mpi_init(ierr)'):
+      raise RuntimeError('Fortran error! mpi_init() could not be located!')
+    self.compilers.CPPFLAGS = oldFlags
+    self.libraries.popLanguage()
+    return 0
+
+  def configureLibrary(self):
+    '''Calls the regular package configureLibrary and then does an additional test needed by MPI'''
+    if 'with-'+self.package+'-shared' in self.framework.argDB:
+      self.framework.argDB['with-'+self.package] = 1
+    self.addExtraLibraries()
+    config.package.Package.configureLibrary(self)
+    # Satish check here if the self.directory is truly the MPI root directory with mpicc underneath it
+    # if not then set it to None
+
+    #self.executeTest(self.configureMPICHShared)
+    self.executeTest(self.configureConversion)
+    self.executeTest(self.configureTypes)
+    self.executeTest(self.configureMissingPrototypes)
+    self.executeTest(self.SGIMPICheck)
+    self.executeTest(self.FortranMPICheck)
+    if self.libraries.check(self.dlib, "MPI_Alltoallw"):
+      self.addDefine('HAVE_MPI_ALLTOALLW',1)
+
+if __name__ == '__main__':
+  import config.framework
+  import sys
+  framework = config.framework.Framework(sys.argv[1:])
+  framework.setupLogging(framework.clArgs)
+  framework.children.append(Configure(framework))
+  framework.configure()
+  framework.dumpSubstitutions()


Property changes on: long/3D/Gale/trunk/python/BuildSystem/config/packages/MPI.py
___________________________________________________________________
Name: svn:mime-type
   + text/script

Added: long/3D/Gale/trunk/python/BuildSystem/config/packages/PETSc.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/packages/PETSc.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/packages/PETSc.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,459 @@
+#!/usr/bin/env python
+'''
+  This is the first try for a hierarchically configured module. The idea is to
+add the configure objects from a previously executed framework into the current
+framework. However, this necessitates a reorganization of the activities in the
+module.
+
+  We must now have three distinct phases: location, construction, and testing.
+This is very similar to the current compiler checks. The construction phase is
+optional, and only necessary when the package has not been previously configured.
+The phases will necessarily interact, as an installtion must be located before
+testing, however anothe should be located if the testing fails.
+
+  We will give each installation a unique key, which is returned by the location
+method. This will allow us to identify working installations, as well as those
+that failed testing.
+
+  There is a wierd role reversal that can happen. If we look for PETSc, but
+cannot find it, it is reasonable to ask to have it automatically downloaded.
+However, in this case, rather than using the configure objects from the existing
+PETSc, we contribute objects to the PETSc which will be built.
+
+'''
+from __future__ import generators
+import user
+import config.base
+
+import re
+import os
+
+class InvalidPETScError(RuntimeError):
+  pass
+
+class Configure(config.base.Configure):
+  def __init__(self, framework):
+    config.base.Configure.__init__(self, framework)
+    self.headerPrefix = ''
+    self.substPrefix  = ''
+    self.location     = None
+    self.trial        = {}
+    self.working      = {}
+    return
+
+  def __str__(self):
+    if self.found:
+      desc = ['PETSc:']	
+      desc.append('  Type: '+self.name)
+      desc.append('  Version: '+self.version)
+      desc.append('  Includes: '+str(self.include))
+      desc.append('  Library: '+str(self.lib))
+      return '\n'.join(desc)+'\n'
+    else:
+      return ''
+
+  def setupHelp(self, help):
+    import nargs
+    help.addArgument('PETSc', '-with-petsc=<bool>',                nargs.ArgBool(None, 1, 'Activate PETSc'))
+    # Location options
+    help.addArgument('PETSc', '-with-petsc-dir=<root dir>',        nargs.ArgDir(None, None, 'Specify the root directory of the PETSc installation'))
+    help.addArgument('PETSc', '-with-petsc-arch=<arch>',           nargs.Arg(None, None, 'Specify PETSC_ARCH'))
+    # Construction options
+    help.addArgument('PETSc', '-download-petsc=<no,yes,ifneeded>', nargs.ArgFuzzyBool(None, 0, 'Install PETSc'))
+    # Testing options
+    help.addArgument('PETSc', '-with-petsc-shared=<bool>',         nargs.ArgBool(None, 1, 'Require that the PETSc library be shared'))
+    return
+
+  def setupPackageDependencies(self, framework):
+    import sys
+
+    petscConf = None
+    for (name, (petscDir, petscArch)) in self.getLocations():
+      petscPythonDir = os.path.join(petscDir, 'python')
+      sys.path.append(petscPythonDir)
+      confPath = os.path.join(petscDir, 'bmake', petscArch)
+      petscConf = framework.loadFramework(confPath)
+      if petscConf:
+        self.logPrint('Loaded PETSc-AS configuration ('+name+') from '+confPath)
+        self.location = (petscDir, petscArch)
+        self.trial[self.location] = name
+        break
+      else:
+        self.logPrint('PETSc-AS has no cached configuration in '+confPath)
+        sys.path.reverse()
+        sys.path.remove(petscPythonDir)
+        sys.path.reverse()
+    if not petscConf:
+      self.downloadPETSc()
+    framework.addPackageDependency(petscConf, confPath)
+    return
+
+  def setupDependencies(self, framework):
+    config.base.Configure.setupDependencies(self, framework)
+    self.languages  = framework.require('PETSc.utilities.languages', self)
+    self.compilers  = framework.require('config.compilers', self)
+    self.headers    = framework.require('config.headers', self)
+    self.libraries  = framework.require('config.libraries', self)
+    self.blaslapack = framework.require('config.packages.BlasLapack', self)
+    self.mpi        = framework.require('config.packages.MPI', self)
+    return
+
+  def getPETScArch(self, petscDir):
+    '''Return the allowable PETSc architectures for a given root'''
+    if 'with-petsc-arch' in self.framework.argDB:
+      yield self.framework.argDB['with-petsc-arch']
+    elif 'PETSC_ARCH' in os.environ:
+      yield os.environ['PETSC_ARCH']
+    else:
+      if os.path.isdir(os.path.join(petscDir, 'bmake')):
+        for d in os.listdir(os.path.join(petscDir, 'bmake')):
+          if not os.path.isdir(os.path.join(petscDir, 'bmake', d)):
+            continue
+          if d in ['common', 'docsonly', 'SCCS']:
+            continue
+          yield d
+    return
+
+  def getLocations(self):
+    '''Return all allowable locations for PETSc'''
+    if hasattr(self, '_configured'):
+      key =(self.dir, self.arch)
+      yield (self.working[key], key)
+      raise InvalidPETScError('Configured PETSc is not usable')
+    if self.framework.argDB['download-petsc'] == 1:
+      yield self.downloadPETSc()
+      raise InvalidPETScError('Downloaded PETSc is not usable')
+    if 'with-petsc-dir' in self.framework.argDB:
+      petscDir = self.framework.argDB['with-petsc-dir']
+      for petscArch in self.getPETScArch(petscDir):
+        yield ('User specified installation root', (petscDir, petscArch))
+      raise InvalidPETScError('No working architecitures in '+str(petscDir))
+    elif 'PETSC_DIR' in os.environ:
+      petscDir = os.environ['PETSC_DIR']
+      for petscArch in self.getPETScArch(petscDir):
+        yield ('User specified installation root', (petscDir, petscArch))
+      raise InvalidPETScError('No working architecitures in '+str(petscDir))
+    else:
+      for petscArch in self.getPETScArch(petscDir):
+        yield ('Default compiler locations', ('', petscArch))
+      petscDirRE = re.compile(r'(PETSC|pets)c(-.*)?')
+      trialDirs = []
+      for packageDir in self.framework.argDB['package-dirs']:
+        if os.path.isdir(packageDir):
+          for d in os.listdir(packageDir):
+            if petscDirRE.match(d):
+              trialDirs.append(('Package directory installation root', os.path.join(packageDir, d)))
+      usrLocal = os.path.join('/usr', 'local')
+      if os.path.isdir(os.path.join('/usr', 'local')):
+        trialDirs.append(('Frequent user install location (/usr/local)', usrLocal))
+        for d in os.listdir(usrLocal):
+          if petscDirRE.match(d):
+            trialDirs.append(('Frequent user install location (/usr/local/'+d+')', os.path.join(usrLocal, d)))
+      if 'HOME' in os.environ and os.path.isdir(os.environ['HOME']):
+        for d in os.listdir(os.environ['HOME']):
+          if petscDirRE.match(d):
+            trialDirs.append(('Frequent user install location (~/'+d+')', os.path.join(os.environ['HOME'], d)))
+    return
+
+  def downloadPETSc(self):
+    if self.framework.argDB['download-petsc'] == 0:
+      raise RuntimeError('No functioning PETSc located')
+    # Download and build PETSc
+    #   Use only the already configured objects from this run
+    raise RuntimeError('Not implemented')
+
+  def getDir(self):
+    if self.location:
+      return self.location[0]
+    return None
+  dir = property(getDir, doc = 'The PETSc root directory')
+
+  def getArch(self):
+    if self.location:
+      return self.location[1]
+    return None
+  arch = property(getArch, doc = 'The PETSc architecture')
+
+  def getFound(self):
+    return self.location and self.location in self.working
+  found = property(getFound, doc = 'Did we find a valid PETSc installation')
+
+  def getName(self):
+    if self.location and self.location in self.working:
+      return self.working[self.location][0]
+    return None
+  name = property(getName, doc = 'The PETSc installation type')
+
+  def getInclude(self, useTrial = 0):
+    if self.location and self.location in self.working:
+      return self.working[self.location][1]
+    elif useTrial and self.location and self.location in self.trial:
+      return self.trial[self.location][1]
+    return None
+  include = property(getInclude, doc = 'The PETSc include directories')
+
+  def getLib(self, useTrial = 0):
+    if self.location and self.location in self.working:
+      return self.working[self.location][2]
+    elif useTrial and self.location and self.location in self.trial:
+      return self.trial[self.location][2]
+    return None
+  lib = property(getLib, doc = 'The PETSc libraries')
+
+  def getVersion(self):
+    if self.location and self.location in self.working:
+      return self.working[self.location][3]
+    return None
+  version = property(getVersion, doc = 'The PETSc version')
+
+  def getOtherIncludes(self):
+    if not hasattr(self, '_otherIncludes'):
+      includes = []
+      includes.extend([self.headers.getIncludeArgument(inc) for inc in self.mpi.include])
+      return ' '.join(includes)
+    return self._otherIncludes
+  def setOtherIncludes(self, otherIncludes):
+    self._otherIncludes = otherIncludes
+  otherIncludes = property(getOtherIncludes, setOtherIncludes, doc = 'Includes needed to compile PETSc')
+
+  def getOtherLibs(self):
+    if not hasattr(self, '_otherLibs'):
+      libs = self.compilers.flibs[:]
+      libs.extend(self.mpi.lib)
+      libs.extend(self.blaslapack.lib)
+      return libs
+    return self._otherLibs
+  def setOtherLibs(self, otherLibs):
+    self._otherLibs = otherLibs
+  otherLibs = property(getOtherLibs, setOtherLibs, doc = 'Libraries needed to link PETSc')
+
+  def checkLib(self, libraries):
+    '''Check for PETSc creation functions in libraries, which can be a list of libraries or a single library
+       - PetscInitialize from libpetsc
+       - VecCreate from libpetscvec
+       - MatCreate from libpetscmat
+       - DADestroy from libpetscdm
+       - KSPCreate from libpetscksp
+       - SNESCreate from libpetscsnes
+       - TSCreate from libpetscts
+       '''
+    if not isinstance(libraries, list): libraries = [libraries]
+    oldLibs = self.compilers.LIBS
+    self.libraries.pushLanguage(self.languages.clanguage)
+    found   = (self.libraries.check(libraries, 'PetscInitializeNoArguments', otherLibs = self.otherLibs, prototype = 'int PetscInitializeNoArguments(void);', cxxMangle = not self.languages.cSupport) and
+               self.libraries.check(libraries, 'VecDestroy', otherLibs = self.otherLibs, prototype = 'typedef struct _p_Vec *Vec;int VecDestroy(Vec);', call = 'VecDestroy((Vec) 0)', cxxMangle = not self.languages.cSupport) and
+               self.libraries.check(libraries, 'MatDestroy', otherLibs = self.otherLibs, prototype = 'typedef struct _p_Mat *Mat;int MatDestroy(Mat);', call = 'MatDestroy((Mat) 0)', cxxMangle = not self.languages.cSupport) and
+               self.libraries.check(libraries, 'DADestroy', otherLibs = self.otherLibs, prototype = 'typedef struct _p_DA *DA;int DADestroy(DA);', call = 'DADestroy((DA) 0)', cxxMangle = not self.languages.cSupport) and
+               self.libraries.check(libraries, 'KSPDestroy', otherLibs = self.otherLibs, prototype = 'typedef struct _p_KSP *KSP;int KSPDestroy(KSP);', call = 'KSPDestroy((KSP) 0)', cxxMangle = not self.languages.cSupport) and
+               self.libraries.check(libraries, 'SNESDestroy', otherLibs = self.otherLibs, prototype = 'typedef struct _p_SNES *SNES;int SNESDestroy(SNES);', call = 'SNESDestroy((SNES) 0)', cxxMangle = not self.languages.cSupport) and
+               self.libraries.check(libraries, 'TSDestroy', otherLibs = self.otherLibs, prototype = 'typedef struct _p_TS *TS;int TSDestroy(TS);', call = 'TSDestroy((TS) 0)', cxxMangle = not self.languages.cSupport))
+    self.libraries.popLanguage()
+    self.compilers.LIBS = oldLibs
+    return found
+
+  def checkInclude(self, includeDir):
+    '''Check that petsc.h is present'''
+    oldFlags = self.compilers.CPPFLAGS
+    self.compilers.CPPFLAGS += ' '.join([self.headers.getIncludeArgument(inc) for inc in includeDir])
+    if self.otherIncludes:
+      self.compilers.CPPFLAGS += ' '+self.otherIncludes
+    self.pushLanguage(self.languages.clanguage)
+    found = self.checkPreprocess('#include <petsc.h>\n')
+    self.popLanguage()
+    self.compilers.CPPFLAGS = oldFlags
+    return found
+
+  def checkPETScLink(self, includes, body, cleanup = 1, codeBegin = None, codeEnd = None, shared = None):
+    '''Analogous to checkLink(), but the PETSc includes and libraries are automatically provided'''
+    success  = 0
+    oldFlags = self.compilers.CPPFLAGS
+    self.compilers.CPPFLAGS += ' '.join([self.headers.getIncludeArgument(inc) for inc in self.getInclude(useTrial = 1)])
+    if self.otherIncludes:
+      self.compilers.CPPFLAGS += ' '+self.otherIncludes
+    oldLibs  = self.compilers.LIBS
+    self.compilers.LIBS = ' '.join([self.libraries.getLibArgument(lib) for lib in self.getLib(useTrial = 1)+self.otherLibs])+' '+self.compilers.LIBS
+    if self.checkLink(includes, body, cleanup, codeBegin, codeEnd, shared):
+      success = 1
+    self.compilers.CPPFLAGS = oldFlags
+    self.compilers.LIBS     = oldLibs
+    return success
+
+  def checkWorkingLink(self):
+    '''Checking that we can link a PETSc executable'''
+    self.pushLanguage(self.languages.clanguage)
+    if not self.checkPETScLink('#include <petsclog.h>\n', 'PetscLogDouble time;\nPetscErrorCode ierr;\n\nierr = PetscGetTime(&time); CHKERRQ(ierr);\n'):
+      self.logPrint('PETSc cannot link, which indicates a problem with the PETSc installation')
+      return 0
+    self.logPrint('PETSc can link with '+self.languages.clanguage)
+    self.popLanguage()
+
+    if hasattr(self.compilers, 'CXX') and self.languages.clanguage == 'C':
+      self.pushLanguage('C++')
+      self.sourceExtension = '.C'
+      if not self.checkPETScLink('#define PETSC_USE_EXTERN_CXX\n#include <petsc.h>\n', 'PetscLogDouble time;\nPetscErrorCode ierr;\n\nierr = PetscGetTime(&time); CHKERRQ(ierr);\n'):
+        self.logPrint('PETSc cannot link C++ but can link C, which indicates a problem with the PETSc installation')
+        self.popLanguage()
+        return 0
+      self.popLanguage()
+      self.logPrint('PETSc can link with C++')
+    
+    if hasattr(self.compilers, 'FC'):
+      self.pushLanguage('FC')
+      self.sourceExtension = '.F'
+      if not self.checkPETScLink('', '          integer ierr\n          real time\n          call PetscGetTime(time, ierr)\n'):
+        self.logPrint('PETSc cannot link Fortran, but can link C, which indicates a problem with the PETSc installation\nRun with -with-fc=0 if you do not wish to use Fortran')
+        self.popLanguage()
+        return 0
+      self.popLanguage()
+      self.logPrint('PETSc can link with Fortran')
+    return 1
+
+  def checkSharedLibrary(self, libraries):
+    '''Check that the libraries for PETSc are shared libraries'''
+    if config.setCompilers.Configure.isDarwin():
+      # on Apple if you list the MPI libraries again you will generate multiply defined errors 
+      # since they are already copied into the PETSc dynamic library.
+      self.setOtherLibs([])
+    self.pushLanguage(self.languages.clanguage)
+    isShared = self.libraries.checkShared('#include <petsc.h>\n', 'PetscInitialize', 'PetscInitialized', 'PetscFinalize', checkLink = self.checkPETScLink, libraries = libraries, initArgs = '&argc, &argv, 0, 0', boolType = 'PetscTruth', executor = self.mpi.mpirun)
+    self.popLanguage()
+    return isShared
+
+  def configureVersion(self):
+    '''Determine the PETSc version'''
+    majorRE    = re.compile(r'^#define PETSC_VERSION_MAJOR([\s]+)(?P<versionNum>\d+)[\s]*$');
+    minorRE    = re.compile(r'^#define PETSC_VERSION_MINOR([\s]+)(?P<versionNum>\d+)[\s]*$');
+    subminorRE = re.compile(r'^#define PETSC_VERSION_SUBMINOR([\s]+)(?P<versionNum>\d+)[\s]*$');
+    patchRE    = re.compile(r'^#define PETSC_VERSION_PATCH([\s]+)(?P<patchNum>\d+)[\s]*$');
+    dateRE     = re.compile(r'^#define PETSC_VERSION_DATE([\s]+)"(?P<date>(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) \d\d?, \d\d\d\d)"[\s]*$');
+    input   = file(os.path.join(self.dir, 'include', 'petscversion.h'))
+    lines   = []
+    majorNum = 'Unknown'
+    minorNum = 'Unknown'
+    subminorNum = 'Unknown'
+    patchNum = 'Unknown'
+    self.date = 'Unknown'
+    for line in input.readlines():
+      m1 = majorRE.match(line)
+      m2 = minorRE.match(line)
+      m3 = subminorRE.match(line)
+      m4 = patchRE.match(line)
+      m5 = dateRE.match(line)
+      if m1:
+        majorNum = int(m1.group('versionNum'))
+      elif m2:
+        minorNum = int(m2.group('versionNum'))
+      elif m3:
+        subminorNum = int(m3.group('versionNum'))
+
+      if m4:
+        patchNum = int(m4.group('patchNum'))+1
+        lines.append('#define PETSC_VERSION_PATCH'+m4.group(1)+str(patchNum)+'\n')
+      elif m5:
+        self.date = time.strftime('%b %d, %Y', time.localtime(time.time()))
+        lines.append('#define PETSC_VERSION_DATE'+m5.group(1)+'"'+self.date+'"\n')
+      else:
+        lines.append(line)
+    input.close()
+    self.logPrint('Found PETSc version (%s,%s,%s) patch %s on %s' % (majorNum, minorNum, subminorNum, patchNum, self.date))
+    return '%d.%d.%d' % (majorNum, minorNum, subminorNum)
+
+  def includeGuesses(self, path = None):
+    '''Return all include directories present in path or its ancestors'''
+    if not path:
+      yield []
+    while path:
+      dir = os.path.join(path, 'include')
+      if os.path.isdir(dir):
+        yield [dir, os.path.join(path, 'bmake', self.arch)]
+      if path == '/':
+        return
+      path = os.path.dirname(path)
+    return
+
+  def libraryGuesses(self, root = None):
+    '''Return standard library name guesses for a given installation root'''
+    libs = ['ts', 'snes', 'ksp', 'dm', 'mat', 'vec', '']
+    if root:
+      d = os.path.join(root, 'lib', self.arch)
+      if not os.path.isdir(d):
+        self.logPrint('', 3, 'petsc')
+        return
+      yield [os.path.join(d, 'libpetsc'+lib+'.a') for lib in libs]
+    else:
+      yield ['libpetsc'+lib+'.a' for lib in libs]
+    return
+
+  def configureLibrary(self):
+    '''Find a working PETSc
+       - Right now, C++ builds are required to use PETSC_USE_EXTERN_CXX'''
+    for location, name in self.trial.items():
+      self.framework.logPrintDivider()
+      self.framework.logPrint('Checking for a functional PETSc in '+name+', location/origin '+str(location))
+      lib     = None
+      include = None
+      found   = 0
+      for libraries in self.libraryGuesses(location[0]):
+        if self.checkLib(libraries):
+          lib = libraries
+          for includeDir in self.includeGuesses(location[0]):
+            if self.checkInclude(includeDir):
+              include = includeDir
+              self.trial[location] = (name, include, lib, 'Unknown')
+              if self.executeTest(self.checkWorkingLink):
+                found = 1
+                break
+              else:
+                self.framework.logPrintDivider(single = 1)
+                self.framework.logPrint('PETSc in '+name+', location/origin '+str(location)+' failed checkWorkingLink test')
+            else:
+              self.framework.logPrintDivider(single = 1)
+              self.framework.logPrint('PETSc in '+name+', location/origin '+str(location)+' failed checkInclude test with includeDir: '+str(includeDir))
+          if not found:
+            self.framework.logPrintDivider(single = 1)
+            self.framework.logPrint('PETSc in '+name+', location/origin '+str(location)+' failed checkIncludes test')
+            continue
+        else:
+          self.framework.logPrintDivider(single = 1)
+          self.framework.logPrint('PETSc in '+name+', location/origin '+str(location)+' failed checkLib test with libraries: '+str(libraries))
+          continue
+        if self.framework.argDB['with-petsc-shared']:
+          if not self.executeTest(self.checkSharedLibrary, [libraries]):
+            self.framework.logPrintDivider(single = 1)
+            self.framework.logPrint('PETSc in '+name+', location/origin '+str(location)+' failed checkSharedLibrary test with libraries: '+str(libraries))
+            found = 0
+        if found:
+          break
+      if found:
+        version = self.executeTest(self.configureVersion)
+        self.working[location] = (name, include, lib, version)
+        break
+    if found:
+      self.logPrint('Choose PETSc '+self.version+' in '+self.name)
+    else:
+      raise RuntimeError('Could not locate any functional PETSc')
+    return
+
+  def setOutput(self):
+    '''Add defines and substitutions
+       - HAVE_PETSC is defined if a working PETSc is found
+       - PETSC_INCLUDE and PETSC_LIB are command line arguments for the compile and link'''
+    if self.found:
+      self.addDefine('HAVE_PETSC', 1)
+      self.addSubstitution('PETSC_INCLUDE', ' '.join([self.headers.getIncludeArgument(inc) for inc in self.include]))
+      self.addSubstitution('PETSC_LIB', ' '.join(map(self.libraries.getLibArgument, self.lib)))
+    return
+
+  def configure(self):
+    self.executeTest(self.configureLibrary)
+    self.setOutput()
+    return
+
+if __name__ == '__main__':
+  import config.framework
+  import sys
+  framework = config.framework.Framework(sys.argv[1:])
+  framework.setup()
+  framework.addChild(Configure(framework))
+  framework.configure()
+  framework.dumpSubstitutions()


Property changes on: long/3D/Gale/trunk/python/BuildSystem/config/packages/PETSc.py
___________________________________________________________________
Name: svn:mime-type
   + text/script

Added: long/3D/Gale/trunk/python/BuildSystem/config/packages/__init__.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/packages/__init__.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/packages/__init__.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1 @@
+all = ['BlasLapack', 'MPI', 'PETSc']

Added: long/3D/Gale/trunk/python/BuildSystem/config/packages/config.guess
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/packages/config.guess	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/packages/config.guess	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,1465 @@
+#! /bin/sh
+# Attempt to guess a canonical system name.
+#   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+#   2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+
+timestamp='2005-04-22'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# Originally written by Per Bothner <per at bothner.com>.
+# Please send patches to <config-patches at gnu.org>.  Submit a context
+# diff and a properly formatted ChangeLog entry.
+#
+# This script attempts to guess a canonical system name similar to
+# config.sub.  If it succeeds, it prints the system name on stdout, and
+# exits with 0.  Otherwise, it exits with 1.
+#
+# The plan is that this can be called by configure scripts if you
+# don't specify an explicit build system type.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of the system \`$me' is run on.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches at gnu.org>."
+
+version="\
+GNU config.guess ($timestamp)
+
+Originally written by Per Bothner.
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit 0 ;;
+    --version | -v )
+       echo "$version" ; exit 0 ;;
+    --help | --h* | -h )
+       echo "$usage"; exit 0 ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help" >&2
+       exit 1 ;;
+    * )
+       break ;;
+  esac
+done
+
+if test $# != 0; then
+  echo "$me: too many arguments$help" >&2
+  exit 1
+fi
+
+trap 'exit 1' 1 2 15
+
+# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+# compiler to aid in system detection is discouraged as it requires
+# temporary files to be created and, as you can see below, it is a
+# headache to deal with in a portable fashion.
+
+# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
+# use `HOST_CC' if defined, but it is deprecated.
+
+# Portable tmp directory creation inspired by the Autoconf team.
+
+set_cc_for_build='
+trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
+trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
+: ${TMPDIR=/tmp} ;
+ { tmp=`(umask 077 && mktemp -d -q "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
+ { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
+ { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
+dummy=$tmp/dummy ;
+tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
+case $CC_FOR_BUILD,$HOST_CC,$CC in
+ ,,)    echo "int x;" > $dummy.c ;
+	for c in cc gcc c89 c99 ; do
+	  if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then
+	     CC_FOR_BUILD="$c"; break ;
+	  fi ;
+	done ;
+	if test x"$CC_FOR_BUILD" = x ; then
+	  CC_FOR_BUILD=no_compiler_found ;
+	fi
+	;;
+ ,,*)   CC_FOR_BUILD=$CC ;;
+ ,*,*)  CC_FOR_BUILD=$HOST_CC ;;
+esac ;'
+
+# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
+# (ghazi at noc.rutgers.edu 1994-08-24)
+if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
+	PATH=$PATH:/.attbin ; export PATH
+fi
+
+UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
+UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
+UNAME_SYSTEM=`(uname -s) 2>/dev/null`  || UNAME_SYSTEM=unknown
+UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
+# Note: order is significant - the case branches are not exclusive.
+
+case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
+    *:NetBSD:*:*)
+	# NetBSD (nbsd) targets should (where applicable) match one or
+	# more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*,
+	# *-*-netbsdecoff* and *-*-netbsd*.  For targets that recently
+	# switched to ELF, *-*-netbsd* would select the old
+	# object file format.  This provides both forward
+	# compatibility and a consistent mechanism for selecting the
+	# object file format.
+	#
+	# Note: NetBSD doesn't particularly care about the vendor
+	# portion of the name.  We always set it to "unknown".
+	sysctl="sysctl -n hw.machine_arch"
+	UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
+	    /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
+	case "${UNAME_MACHINE_ARCH}" in
+	    armeb) machine=armeb-unknown ;;
+	    arm*) machine=arm-unknown ;;
+	    sh3el) machine=shl-unknown ;;
+	    sh3eb) machine=sh-unknown ;;
+	    *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
+	esac
+	# The Operating System including object format, if it has switched
+	# to ELF recently, or will in the future.
+	case "${UNAME_MACHINE_ARCH}" in
+	    arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+		eval $set_cc_for_build
+		if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+			| grep __ELF__ >/dev/null
+		then
+		    # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+		    # Return netbsd for either.  FIX?
+		    os=netbsd
+		else
+		    os=netbsdelf
+		fi
+		;;
+	    *)
+	        os=netbsd
+		;;
+	esac
+	# The OS release
+	# Debian GNU/NetBSD machines have a different userland, and
+	# thus, need a distinct triplet. However, they do not need
+	# kernel version information, so it can be replaced with a
+	# suitable tag, in the style of linux-gnu.
+	case "${UNAME_VERSION}" in
+	    Debian*)
+		release='-gnu'
+		;;
+	    *)
+		release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
+		;;
+	esac
+	# Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+	# contains redundant information, the shorter form:
+	# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+	echo "${machine}-${os}${release}"
+	exit 0 ;;
+    amd64:OpenBSD:*:*)
+	echo x86_64-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    amiga:OpenBSD:*:*)
+	echo m68k-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    cats:OpenBSD:*:*)
+	echo arm-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    hp300:OpenBSD:*:*)
+	echo m68k-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    luna88k:OpenBSD:*:*)
+    	echo m88k-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    mac68k:OpenBSD:*:*)
+	echo m68k-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    macppc:OpenBSD:*:*)
+	echo powerpc-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    mvme68k:OpenBSD:*:*)
+	echo m68k-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    mvme88k:OpenBSD:*:*)
+	echo m88k-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    mvmeppc:OpenBSD:*:*)
+	echo powerpc-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    sgi:OpenBSD:*:*)
+	echo mips64-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    sun3:OpenBSD:*:*)
+	echo m68k-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    *:OpenBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-openbsd${UNAME_RELEASE}
+	exit 0 ;;
+    *:ekkoBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE}
+	exit 0 ;;
+    macppc:MirBSD:*:*)
+	echo powerppc-unknown-mirbsd${UNAME_RELEASE}
+	exit 0 ;;
+    *:MirBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE}
+	exit 0 ;;
+    alpha:OSF1:*:*)
+	case $UNAME_RELEASE in
+	*4.0)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+		;;
+	*5.*)
+	        UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+		;;
+	esac
+	# According to Compaq, /usr/sbin/psrinfo has been available on
+	# OSF/1 and Tru64 systems produced since 1995.  I hope that
+	# covers most systems running today.  This code pipes the CPU
+	# types through head -n 1, so we only detect the type of CPU 0.
+	ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^  The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+	case "$ALPHA_CPU_TYPE" in
+	    "EV4 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV4.5 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "LCA4 (21066/21068)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV5 (21164)")
+		UNAME_MACHINE="alphaev5" ;;
+	    "EV5.6 (21164A)")
+		UNAME_MACHINE="alphaev56" ;;
+	    "EV5.6 (21164PC)")
+		UNAME_MACHINE="alphapca56" ;;
+	    "EV5.7 (21164PC)")
+		UNAME_MACHINE="alphapca57" ;;
+	    "EV6 (21264)")
+		UNAME_MACHINE="alphaev6" ;;
+	    "EV6.7 (21264A)")
+		UNAME_MACHINE="alphaev67" ;;
+	    "EV6.8CB (21264C)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8AL (21264B)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8CX (21264D)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.9A (21264/EV69A)")
+		UNAME_MACHINE="alphaev69" ;;
+	    "EV7 (21364)")
+		UNAME_MACHINE="alphaev7" ;;
+	    "EV7.9 (21364A)")
+		UNAME_MACHINE="alphaev79" ;;
+	esac
+	# A Pn.n version is a patched version.
+	# A Vn.n version is a released version.
+	# A Tn.n version is a released field test version.
+	# A Xn.n version is an unreleased experimental baselevel.
+	# 1.2 uses "1.2" for uname -r.
+	echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+	exit 0 ;;
+    Alpha\ *:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# Should we change UNAME_MACHINE based on the output of uname instead
+	# of the specific Alpha model?
+	echo alpha-pc-interix
+	exit 0 ;;
+    21064:Windows_NT:50:3)
+	echo alpha-dec-winnt3.5
+	exit 0 ;;
+    Amiga*:UNIX_System_V:4.0:*)
+	echo m68k-unknown-sysv4
+	exit 0;;
+    *:[Aa]miga[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-amigaos
+	exit 0 ;;
+    *:[Mm]orph[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-morphos
+	exit 0 ;;
+    *:OS/390:*:*)
+	echo i370-ibm-openedition
+	exit 0 ;;
+    *:z/VM:*:*)
+	echo s390-ibm-zvmoe
+	exit 0 ;;
+    *:OS400:*:*)
+        echo powerpc-ibm-os400
+	exit 0 ;;
+    arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+	echo arm-acorn-riscix${UNAME_RELEASE}
+	exit 0;;
+    SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+	echo hppa1.1-hitachi-hiuxmpp
+	exit 0;;
+    Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
+	# akee at wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
+	if test "`(/bin/universe) 2>/dev/null`" = att ; then
+		echo pyramid-pyramid-sysv3
+	else
+		echo pyramid-pyramid-bsd
+	fi
+	exit 0 ;;
+    NILE*:*:*:dcosx)
+	echo pyramid-pyramid-svr4
+	exit 0 ;;
+    DRS?6000:unix:4.0:6*)
+	echo sparc-icl-nx6
+	exit 0 ;;
+    DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
+	case `/usr/bin/uname -p` in
+	    sparc) echo sparc-icl-nx7 && exit 0 ;;
+	esac ;;
+    sun4H:SunOS:5.*:*)
+	echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit 0 ;;
+    sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
+	echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit 0 ;;
+    i86pc:SunOS:5.*:*)
+	echo i386-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit 0 ;;
+    sun4*:SunOS:6*:*)
+	# According to config.sub, this is the proper way to canonicalize
+	# SunOS6.  Hard to guess exactly what SunOS6 will be like, but
+	# it's likely to be more like Solaris than SunOS4.
+	echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit 0 ;;
+    sun4*:SunOS:*:*)
+	case "`/usr/bin/arch -k`" in
+	    Series*|S4*)
+		UNAME_RELEASE=`uname -v`
+		;;
+	esac
+	# Japanese Language versions have a version number like `4.1.3-JL'.
+	echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'`
+	exit 0 ;;
+    sun3*:SunOS:*:*)
+	echo m68k-sun-sunos${UNAME_RELEASE}
+	exit 0 ;;
+    sun*:*:4.2BSD:*)
+	UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+	test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
+	case "`/bin/arch`" in
+	    sun3)
+		echo m68k-sun-sunos${UNAME_RELEASE}
+		;;
+	    sun4)
+		echo sparc-sun-sunos${UNAME_RELEASE}
+		;;
+	esac
+	exit 0 ;;
+    aushp:SunOS:*:*)
+	echo sparc-auspex-sunos${UNAME_RELEASE}
+	exit 0 ;;
+    # The situation for MiNT is a little confusing.  The machine name
+    # can be virtually everything (everything which is not
+    # "atarist" or "atariste" at least should have a processor
+    # > m68000).  The system name ranges from "MiNT" over "FreeMiNT"
+    # to the lowercase version "mint" (or "freemint").  Finally
+    # the system name "TOS" denotes a system which is actually not
+    # MiNT.  But MiNT is downward compatible to TOS, so this should
+    # be no problem.
+    atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+        echo m68k-atari-mint${UNAME_RELEASE}
+	exit 0 ;;
+    atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+        exit 0 ;;
+    *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+        echo m68k-atari-mint${UNAME_RELEASE}
+	exit 0 ;;
+    milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+        echo m68k-milan-mint${UNAME_RELEASE}
+        exit 0 ;;
+    hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+        echo m68k-hades-mint${UNAME_RELEASE}
+        exit 0 ;;
+    *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+        echo m68k-unknown-mint${UNAME_RELEASE}
+        exit 0 ;;
+    m68k:machten:*:*)
+	echo m68k-apple-machten${UNAME_RELEASE}
+	exit 0 ;;
+    powerpc:machten:*:*)
+	echo powerpc-apple-machten${UNAME_RELEASE}
+	exit 0 ;;
+    RISC*:Mach:*:*)
+	echo mips-dec-mach_bsd4.3
+	exit 0 ;;
+    RISC*:ULTRIX:*:*)
+	echo mips-dec-ultrix${UNAME_RELEASE}
+	exit 0 ;;
+    VAX*:ULTRIX*:*:*)
+	echo vax-dec-ultrix${UNAME_RELEASE}
+	exit 0 ;;
+    2020:CLIX:*:* | 2430:CLIX:*:*)
+	echo clipper-intergraph-clix${UNAME_RELEASE}
+	exit 0 ;;
+    mips:*:*:UMIPS | mips:*:*:RISCos)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+#ifdef __cplusplus
+#include <stdio.h>  /* for printf() prototype */
+	int main (int argc, char *argv[]) {
+#else
+	int main (argc, argv) int argc; char *argv[]; {
+#endif
+	#if defined (host_mips) && defined (MIPSEB)
+	#if defined (SYSTYPE_SYSV)
+	  printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_SVR4)
+	  printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
+	  printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0);
+	#endif
+	#endif
+	  exit (-1);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c \
+	  && $dummy `echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` \
+	  && exit 0
+	echo mips-mips-riscos${UNAME_RELEASE}
+	exit 0 ;;
+    Motorola:PowerMAX_OS:*:*)
+	echo powerpc-motorola-powermax
+	exit 0 ;;
+    Motorola:*:4.3:PL8-*)
+	echo powerpc-harris-powermax
+	exit 0 ;;
+    Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
+	echo powerpc-harris-powermax
+	exit 0 ;;
+    Night_Hawk:Power_UNIX:*:*)
+	echo powerpc-harris-powerunix
+	exit 0 ;;
+    m88k:CX/UX:7*:*)
+	echo m88k-harris-cxux7
+	exit 0 ;;
+    m88k:*:4*:R4*)
+	echo m88k-motorola-sysv4
+	exit 0 ;;
+    m88k:*:3*:R3*)
+	echo m88k-motorola-sysv3
+	exit 0 ;;
+    AViiON:dgux:*:*)
+        # DG/UX returns AViiON for all architectures
+        UNAME_PROCESSOR=`/usr/bin/uname -p`
+	if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
+	then
+	    if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
+	       [ ${TARGET_BINARY_INTERFACE}x = x ]
+	    then
+		echo m88k-dg-dgux${UNAME_RELEASE}
+	    else
+		echo m88k-dg-dguxbcs${UNAME_RELEASE}
+	    fi
+	else
+	    echo i586-dg-dgux${UNAME_RELEASE}
+	fi
+ 	exit 0 ;;
+    M88*:DolphinOS:*:*)	# DolphinOS (SVR3)
+	echo m88k-dolphin-sysv3
+	exit 0 ;;
+    M88*:*:R3*:*)
+	# Delta 88k system running SVR3
+	echo m88k-motorola-sysv3
+	exit 0 ;;
+    XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
+	echo m88k-tektronix-sysv3
+	exit 0 ;;
+    Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
+	echo m68k-tektronix-bsd
+	exit 0 ;;
+    *:IRIX*:*:*)
+	echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'`
+	exit 0 ;;
+    ????????:AIX?:[12].1:2)   # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
+	echo romp-ibm-aix      # uname -m gives an 8 hex-code CPU id
+	exit 0 ;;              # Note that: echo "'`uname -s`'" gives 'AIX '
+    i*86:AIX:*:*)
+	echo i386-ibm-aix
+	exit 0 ;;
+    ia64:AIX:*:*)
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${UNAME_MACHINE}-ibm-aix${IBM_REV}
+	exit 0 ;;
+    *:AIX:2:3)
+	if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
+		eval $set_cc_for_build
+		sed 's/^		//' << EOF >$dummy.c
+		#include <sys/systemcfg.h>
+
+		main()
+			{
+			if (!__power_pc())
+				exit(1);
+			puts("powerpc-ibm-aix3.2.5");
+			exit(0);
+			}
+EOF
+		$CC_FOR_BUILD -o $dummy $dummy.c && $dummy && exit 0
+		echo rs6000-ibm-aix3.2.5
+	elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
+		echo rs6000-ibm-aix3.2.4
+	else
+		echo rs6000-ibm-aix3.2
+	fi
+	exit 0 ;;
+    *:AIX:*:[45])
+	IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
+	if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then
+		IBM_ARCH=rs6000
+	else
+		IBM_ARCH=powerpc
+	fi
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${IBM_ARCH}-ibm-aix${IBM_REV}
+	exit 0 ;;
+    *:AIX:*:*)
+	echo rs6000-ibm-aix
+	exit 0 ;;
+    ibmrt:4.4BSD:*|romp-ibm:BSD:*)
+	echo romp-ibm-bsd4.4
+	exit 0 ;;
+    ibmrt:*BSD:*|romp-ibm:BSD:*)            # covers RT/PC BSD and
+	echo romp-ibm-bsd${UNAME_RELEASE}   # 4.3 with uname added to
+	exit 0 ;;                           # report: romp-ibm BSD 4.3
+    *:BOSX:*:*)
+	echo rs6000-bull-bosx
+	exit 0 ;;
+    DPX/2?00:B.O.S.:*:*)
+	echo m68k-bull-sysv3
+	exit 0 ;;
+    9000/[34]??:4.3bsd:1.*:*)
+	echo m68k-hp-bsd
+	exit 0 ;;
+    hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
+	echo m68k-hp-bsd4.4
+	exit 0 ;;
+    9000/[34678]??:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	case "${UNAME_MACHINE}" in
+	    9000/31? )            HP_ARCH=m68000 ;;
+	    9000/[34]?? )         HP_ARCH=m68k ;;
+	    9000/[678][0-9][0-9])
+		if [ -x /usr/bin/getconf ]; then
+		    sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+                    sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+                    case "${sc_cpu_version}" in
+                      523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
+                      528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
+                      532)                      # CPU_PA_RISC2_0
+                        case "${sc_kernel_bits}" in
+                          32) HP_ARCH="hppa2.0n" ;;
+                          64) HP_ARCH="hppa2.0w" ;;
+			  '') HP_ARCH="hppa2.0" ;;   # HP-UX 10.20
+                        esac ;;
+                    esac
+		fi
+		if [ "${HP_ARCH}" = "" ]; then
+		    eval $set_cc_for_build
+		    sed 's/^              //' << EOF >$dummy.c
+
+              #define _HPUX_SOURCE
+              #include <stdlib.h>
+              #include <unistd.h>
+
+              int main ()
+              {
+              #if defined(_SC_KERNEL_BITS)
+                  long bits = sysconf(_SC_KERNEL_BITS);
+              #endif
+                  long cpu  = sysconf (_SC_CPU_VERSION);
+
+                  switch (cpu)
+              	{
+              	case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+              	case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+              	case CPU_PA_RISC2_0:
+              #if defined(_SC_KERNEL_BITS)
+              	    switch (bits)
+              		{
+              		case 64: puts ("hppa2.0w"); break;
+              		case 32: puts ("hppa2.0n"); break;
+              		default: puts ("hppa2.0"); break;
+              		} break;
+              #else  /* !defined(_SC_KERNEL_BITS) */
+              	    puts ("hppa2.0"); break;
+              #endif
+              	default: puts ("hppa1.0"); break;
+              	}
+                  exit (0);
+              }
+EOF
+		    (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
+		    test -z "$HP_ARCH" && HP_ARCH=hppa
+		fi ;;
+	esac
+	if [ ${HP_ARCH} = "hppa2.0w" ]
+	then
+	    # avoid double evaluation of $set_cc_for_build
+	    test -n "$CC_FOR_BUILD" || eval $set_cc_for_build
+	    if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E -) | grep __LP64__ >/dev/null
+	    then
+		HP_ARCH="hppa2.0w"
+	    else
+		HP_ARCH="hppa64"
+	    fi
+	fi
+	echo ${HP_ARCH}-hp-hpux${HPUX_REV}
+	exit 0 ;;
+    ia64:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	echo ia64-hp-hpux${HPUX_REV}
+	exit 0 ;;
+    3050*:HI-UX:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#include <unistd.h>
+	int
+	main ()
+	{
+	  long cpu = sysconf (_SC_CPU_VERSION);
+	  /* The order matters, because CPU_IS_HP_MC68K erroneously returns
+	     true for CPU_PA_RISC1_0.  CPU_IS_PA_RISC returns correct
+	     results, however.  */
+	  if (CPU_IS_PA_RISC (cpu))
+	    {
+	      switch (cpu)
+		{
+		  case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
+		  default: puts ("hppa-hitachi-hiuxwe2"); break;
+		}
+	    }
+	  else if (CPU_IS_HP_MC68K (cpu))
+	    puts ("m68k-hitachi-hiuxwe2");
+	  else puts ("unknown-hitachi-hiuxwe2");
+	  exit (0);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c && $dummy && exit 0
+	echo unknown-hitachi-hiuxwe2
+	exit 0 ;;
+    9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* )
+	echo hppa1.1-hp-bsd
+	exit 0 ;;
+    9000/8??:4.3bsd:*:*)
+	echo hppa1.0-hp-bsd
+	exit 0 ;;
+    *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
+	echo hppa1.0-hp-mpeix
+	exit 0 ;;
+    hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* )
+	echo hppa1.1-hp-osf
+	exit 0 ;;
+    hp8??:OSF1:*:*)
+	echo hppa1.0-hp-osf
+	exit 0 ;;
+    i*86:OSF1:*:*)
+	if [ -x /usr/sbin/sysversion ] ; then
+	    echo ${UNAME_MACHINE}-unknown-osf1mk
+	else
+	    echo ${UNAME_MACHINE}-unknown-osf1
+	fi
+	exit 0 ;;
+    parisc*:Lites*:*:*)
+	echo hppa1.1-hp-lites
+	exit 0 ;;
+    C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+	echo c1-convex-bsd
+        exit 0 ;;
+    C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+        exit 0 ;;
+    C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+	echo c34-convex-bsd
+        exit 0 ;;
+    C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+	echo c38-convex-bsd
+        exit 0 ;;
+    C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+	echo c4-convex-bsd
+        exit 0 ;;
+    CRAY*Y-MP:*:*:*)
+	echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit 0 ;;
+    CRAY*[A-Z]90:*:*:*)
+	echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \
+	| sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
+	      -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
+	      -e 's/\.[^.]*$/.X/'
+	exit 0 ;;
+    CRAY*TS:*:*:*)
+	echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit 0 ;;
+    CRAY*T3E:*:*:*)
+	echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit 0 ;;
+    CRAY*SV1:*:*:*)
+	echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit 0 ;;
+    *:UNICOS/mp:*:*)
+	echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit 0 ;;
+    F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+	FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+        FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+        FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
+        echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+        exit 0 ;;
+    5000:UNIX_System_V:4.*:*)
+        FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+        FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
+        echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+	exit 0 ;;
+    i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+	echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
+	exit 0 ;;
+    sparc*:BSD/OS:*:*)
+	echo sparc-unknown-bsdi${UNAME_RELEASE}
+	exit 0 ;;
+    *:BSD/OS:*:*)
+	echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
+	exit 0 ;;
+    *:FreeBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
+	exit 0 ;;
+    i*:CYGWIN*:*)
+	echo ${UNAME_MACHINE}-pc-cygwin
+	exit 0 ;;
+    i*:MINGW*:*)
+	echo ${UNAME_MACHINE}-pc-mingw32
+	exit 0 ;;
+    i*:PW*:*)
+	echo ${UNAME_MACHINE}-pc-pw32
+	exit 0 ;;
+    x86:Interix*:[34]*)
+	echo i586-pc-interix${UNAME_RELEASE}|sed -e 's/\..*//'
+	exit 0 ;;
+    [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*)
+	echo i${UNAME_MACHINE}-pc-mks
+	exit 0 ;;
+    i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
+	# UNAME_MACHINE based on the output of uname instead of i386?
+	echo i586-pc-interix
+	exit 0 ;;
+    i*:UWIN*:*)
+	echo ${UNAME_MACHINE}-pc-uwin
+	exit 0 ;;
+    amd64:CYGWIN*:*:*)
+	echo x86_64-unknown-cygwin
+	exit 0 ;;
+    p*:CYGWIN*:*)
+	echo powerpcle-unknown-cygwin
+	exit 0 ;;
+    prep*:SunOS:5.*:*)
+	echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit 0 ;;
+    *:GNU:*:*)
+	# the GNU system
+	echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
+	exit 0 ;;
+    *:GNU/*:*:*)
+	# other systems with GNU libc and userland
+	echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu
+	exit 0 ;;
+    i*86:Minix:*:*)
+	echo ${UNAME_MACHINE}-pc-minix
+	exit 0 ;;
+    arm*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit 0 ;;
+    cris:Linux:*:*)
+	echo cris-axis-linux-gnu
+	exit 0 ;;
+    crisv32:Linux:*:*)
+	echo crisv32-axis-linux-gnu
+	exit 0 ;;
+    frv:Linux:*:*)
+    	echo frv-unknown-linux-gnu
+	exit 0 ;;
+    ia64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit 0 ;;
+    m32r*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit 0 ;;
+    m68*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit 0 ;;
+    mips:Linux:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#undef CPU
+	#undef mips
+	#undef mipsel
+	#if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+	CPU=mipsel
+	#else
+	#if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+	CPU=mips
+	#else
+	CPU=
+	#endif
+	#endif
+EOF
+	eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^CPU=`
+	test x"${CPU}" != x && echo "${CPU}-unknown-linux-gnu" && exit 0
+	;;
+    mips64:Linux:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#undef CPU
+	#undef mips64
+	#undef mips64el
+	#if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+	CPU=mips64el
+	#else
+	#if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+	CPU=mips64
+	#else
+	CPU=
+	#endif
+	#endif
+EOF
+	eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^CPU=`
+	test x"${CPU}" != x && echo "${CPU}-unknown-linux-gnu" && exit 0
+	;;
+    ppc:Linux:*:*)
+	echo powerpc-unknown-linux-gnu
+	exit 0 ;;
+    ppc64:Linux:*:*)
+	echo powerpc64-unknown-linux-gnu
+	exit 0 ;;
+    alpha:Linux:*:*)
+	case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
+	  EV5)   UNAME_MACHINE=alphaev5 ;;
+	  EV56)  UNAME_MACHINE=alphaev56 ;;
+	  PCA56) UNAME_MACHINE=alphapca56 ;;
+	  PCA57) UNAME_MACHINE=alphapca56 ;;
+	  EV6)   UNAME_MACHINE=alphaev6 ;;
+	  EV67)  UNAME_MACHINE=alphaev67 ;;
+	  EV68*) UNAME_MACHINE=alphaev68 ;;
+        esac
+	objdump --private-headers /bin/sh | grep ld.so.1 >/dev/null
+	if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi
+	echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC}
+	exit 0 ;;
+    parisc:Linux:*:* | hppa:Linux:*:*)
+	# Look for CPU level
+	case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+	  PA7*) echo hppa1.1-unknown-linux-gnu ;;
+	  PA8*) echo hppa2.0-unknown-linux-gnu ;;
+	  *)    echo hppa-unknown-linux-gnu ;;
+	esac
+	exit 0 ;;
+    parisc64:Linux:*:* | hppa64:Linux:*:*)
+	echo hppa64-unknown-linux-gnu
+	exit 0 ;;
+    s390:Linux:*:* | s390x:Linux:*:*)
+	echo ${UNAME_MACHINE}-ibm-linux
+	exit 0 ;;
+    sh64*:Linux:*:*)
+    	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit 0 ;;
+    sh*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit 0 ;;
+    sparc:Linux:*:* | sparc64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit 0 ;;
+    x86_64:Linux:*:*)
+	echo x86_64-unknown-linux-gnu
+	exit 0 ;;
+    i*86:Linux:*:*)
+	# The BFD linker knows what the default object file format is, so
+	# first see if it will tell us. cd to the root directory to prevent
+	# problems with other programs or directories called `ld' in the path.
+	# Set LC_ALL=C to ensure ld outputs messages in English.
+	ld_supported_targets=`cd /; LC_ALL=C ld --help 2>&1 \
+			 | sed -ne '/supported targets:/!d
+				    s/[ 	][ 	]*/ /g
+				    s/.*supported targets: *//
+				    s/ .*//
+				    p'`
+        case "$ld_supported_targets" in
+	  elf32-i386)
+		TENTATIVE="${UNAME_MACHINE}-pc-linux-gnu"
+		;;
+	  a.out-i386-linux)
+		echo "${UNAME_MACHINE}-pc-linux-gnuaout"
+		exit 0 ;;
+	  coff-i386)
+		echo "${UNAME_MACHINE}-pc-linux-gnucoff"
+		exit 0 ;;
+	  "")
+		# Either a pre-BFD a.out linker (linux-gnuoldld) or
+		# one that does not give us useful --help.
+		echo "${UNAME_MACHINE}-pc-linux-gnuoldld"
+		exit 0 ;;
+	esac
+	# Determine whether the default compiler is a.out or elf
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#include <features.h>
+	#ifdef __ELF__
+	# ifdef __GLIBC__
+	#  if __GLIBC__ >= 2
+	LIBC=gnu
+	#  else
+	LIBC=gnulibc1
+	#  endif
+	# else
+	LIBC=gnulibc1
+	# endif
+	#else
+	#ifdef __INTEL_COMPILER
+	LIBC=gnu
+	#else
+	LIBC=gnuaout
+	#endif
+	#endif
+	#ifdef __dietlibc__
+	LIBC=dietlibc
+	#endif
+EOF
+	eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^LIBC=`
+	test x"${LIBC}" != x && echo "${UNAME_MACHINE}-pc-linux-${LIBC}" && exit 0
+	test x"${TENTATIVE}" != x && echo "${TENTATIVE}" && exit 0
+	;;
+    i*86:DYNIX/ptx:4*:*)
+	# ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+	# earlier versions are messed up and put the nodename in both
+	# sysname and nodename.
+	echo i386-sequent-sysv4
+	exit 0 ;;
+    i*86:UNIX_SV:4.2MP:2.*)
+        # Unixware is an offshoot of SVR4, but it has its own version
+        # number series starting with 2...
+        # I am not positive that other SVR4 systems won't match this,
+	# I just have to hope.  -- rms.
+        # Use sysv4.2uw... so that sysv4* matches it.
+	echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
+	exit 0 ;;
+    i*86:OS/2:*:*)
+	# If we were able to find `uname', then EMX Unix compatibility
+	# is probably installed.
+	echo ${UNAME_MACHINE}-pc-os2-emx
+	exit 0 ;;
+    i*86:XTS-300:*:STOP)
+	echo ${UNAME_MACHINE}-unknown-stop
+	exit 0 ;;
+    i*86:atheos:*:*)
+	echo ${UNAME_MACHINE}-unknown-atheos
+	exit 0 ;;
+	i*86:syllable:*:*)
+	echo ${UNAME_MACHINE}-pc-syllable
+	exit 0 ;;
+    i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.0*:*)
+	echo i386-unknown-lynxos${UNAME_RELEASE}
+	exit 0 ;;
+    i*86:*DOS:*:*)
+	echo ${UNAME_MACHINE}-pc-msdosdjgpp
+	exit 0 ;;
+    i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*)
+	UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
+	if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
+		echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
+	else
+		echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
+	fi
+	exit 0 ;;
+    i*86:*:5:[78]*)
+	case `/bin/uname -X | grep "^Machine"` in
+	    *486*)	     UNAME_MACHINE=i486 ;;
+	    *Pentium)	     UNAME_MACHINE=i586 ;;
+	    *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
+	esac
+	echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
+	exit 0 ;;
+    i*86:*:3.2:*)
+	if test -f /usr/options/cb.name; then
+		UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
+		echo ${UNAME_MACHINE}-pc-isc$UNAME_REL
+	elif /bin/uname -X 2>/dev/null >/dev/null ; then
+		UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
+		(/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
+		(/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
+			&& UNAME_MACHINE=i586
+		(/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		(/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		echo ${UNAME_MACHINE}-pc-sco$UNAME_REL
+	else
+		echo ${UNAME_MACHINE}-pc-sysv32
+	fi
+	exit 0 ;;
+    pc:*:*:*)
+	# Left here for compatibility:
+        # uname -m prints for DJGPP always 'pc', but it prints nothing about
+        # the processor, so we play safe by assuming i386.
+	echo i386-pc-msdosdjgpp
+        exit 0 ;;
+    Intel:Mach:3*:*)
+	echo i386-pc-mach3
+	exit 0 ;;
+    paragon:*:*:*)
+	echo i860-intel-osf1
+	exit 0 ;;
+    i860:*:4.*:*) # i860-SVR4
+	if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
+	  echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4
+	else # Add other i860-SVR4 vendors below as they are discovered.
+	  echo i860-unknown-sysv${UNAME_RELEASE}  # Unknown i860-SVR4
+	fi
+	exit 0 ;;
+    mini*:CTIX:SYS*5:*)
+	# "miniframe"
+	echo m68010-convergent-sysv
+	exit 0 ;;
+    mc68k:UNIX:SYSTEM5:3.51m)
+	echo m68k-convergent-sysv
+	exit 0 ;;
+    M680?0:D-NIX:5.3:*)
+	echo m68k-diab-dnix
+	exit 0 ;;
+    M68*:*:R3V[5678]*:*)
+	test -r /sysV68 && echo 'm68k-motorola-sysv' && exit 0 ;;
+    3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
+	OS_REL=''
+	test -r /etc/.relid \
+	&& OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && echo i486-ncr-sysv4.3${OS_REL} && exit 0
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	  && echo i586-ncr-sysv4.3${OS_REL} && exit 0 ;;
+    3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+        /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+          && echo i486-ncr-sysv4 && exit 0 ;;
+    m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
+	echo m68k-unknown-lynxos${UNAME_RELEASE}
+	exit 0 ;;
+    mc68030:UNIX_System_V:4.*:*)
+	echo m68k-atari-sysv4
+	exit 0 ;;
+    TSUNAMI:LynxOS:2.*:*)
+	echo sparc-unknown-lynxos${UNAME_RELEASE}
+	exit 0 ;;
+    rs6000:LynxOS:2.*:*)
+	echo rs6000-unknown-lynxos${UNAME_RELEASE}
+	exit 0 ;;
+    PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.0*:*)
+	echo powerpc-unknown-lynxos${UNAME_RELEASE}
+	exit 0 ;;
+    SM[BE]S:UNIX_SV:*:*)
+	echo mips-dde-sysv${UNAME_RELEASE}
+	exit 0 ;;
+    RM*:ReliantUNIX-*:*:*)
+	echo mips-sni-sysv4
+	exit 0 ;;
+    RM*:SINIX-*:*:*)
+	echo mips-sni-sysv4
+	exit 0 ;;
+    *:SINIX-*:*:*)
+	if uname -p 2>/dev/null >/dev/null ; then
+		UNAME_MACHINE=`(uname -p) 2>/dev/null`
+		echo ${UNAME_MACHINE}-sni-sysv4
+	else
+		echo ns32k-sni-sysv
+	fi
+	exit 0 ;;
+    PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort
+                      # says <Richard.M.Bartel at ccMail.Census.GOV>
+        echo i586-unisys-sysv4
+        exit 0 ;;
+    *:UNIX_System_V:4*:FTX*)
+	# From Gerald Hewes <hewes at openmarket.com>.
+	# How about differentiating between stratus architectures? -djm
+	echo hppa1.1-stratus-sysv4
+	exit 0 ;;
+    *:*:*:FTX*)
+	# From seanf at swdc.stratus.com.
+	echo i860-stratus-sysv4
+	exit 0 ;;
+    i*86:VOS:*:*)
+	# From Paul.Green at stratus.com.
+	echo ${UNAME_MACHINE}-stratus-vos
+	exit 0 ;;
+    *:VOS:*:*)
+	# From Paul.Green at stratus.com.
+	echo hppa1.1-stratus-vos
+	exit 0 ;;
+    mc68*:A/UX:*:*)
+	echo m68k-apple-aux${UNAME_RELEASE}
+	exit 0 ;;
+    news*:NEWS-OS:6*:*)
+	echo mips-sony-newsos6
+	exit 0 ;;
+    R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+	if [ -d /usr/nec ]; then
+	        echo mips-nec-sysv${UNAME_RELEASE}
+	else
+	        echo mips-unknown-sysv${UNAME_RELEASE}
+	fi
+        exit 0 ;;
+    BeBox:BeOS:*:*)	# BeOS running on hardware made by Be, PPC only.
+	echo powerpc-be-beos
+	exit 0 ;;
+    BeMac:BeOS:*:*)	# BeOS running on Mac or Mac clone, PPC only.
+	echo powerpc-apple-beos
+	exit 0 ;;
+    BePC:BeOS:*:*)	# BeOS running on Intel PC compatible.
+	echo i586-pc-beos
+	exit 0 ;;
+    SX-4:SUPER-UX:*:*)
+	echo sx4-nec-superux${UNAME_RELEASE}
+	exit 0 ;;
+    SX-5:SUPER-UX:*:*)
+	echo sx5-nec-superux${UNAME_RELEASE}
+	exit 0 ;;
+    SX-6:SUPER-UX:*:*)
+	echo sx6-nec-superux${UNAME_RELEASE}
+	exit 0 ;;
+    Power*:Rhapsody:*:*)
+	echo powerpc-apple-rhapsody${UNAME_RELEASE}
+	exit 0 ;;
+    *:Rhapsody:*:*)
+	echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
+	exit 0 ;;
+    *:Darwin:*:*)
+	UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
+	case $UNAME_PROCESSOR in
+	    *86) UNAME_PROCESSOR=i686 ;;
+	    unknown) UNAME_PROCESSOR=powerpc ;;
+	esac
+	echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
+	exit 0 ;;
+    *:procnto*:*:* | *:QNX:[0123456789]*:*)
+	UNAME_PROCESSOR=`uname -p`
+	if test "$UNAME_PROCESSOR" = "x86"; then
+		UNAME_PROCESSOR=i386
+		UNAME_MACHINE=pc
+	fi
+	echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE}
+	exit 0 ;;
+    *:QNX:*:4*)
+	echo i386-pc-qnx
+	exit 0 ;;
+    NSE-?:NONSTOP_KERNEL:*:*)
+	echo nse-tandem-nsk${UNAME_RELEASE}
+	exit 0 ;;
+    NSR-?:NONSTOP_KERNEL:*:*)
+	echo nsr-tandem-nsk${UNAME_RELEASE}
+	exit 0 ;;
+    *:NonStop-UX:*:*)
+	echo mips-compaq-nonstopux
+	exit 0 ;;
+    BS2000:POSIX*:*:*)
+	echo bs2000-siemens-sysv
+	exit 0 ;;
+    DS/*:UNIX_System_V:*:*)
+	echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
+	exit 0 ;;
+    *:Plan9:*:*)
+	# "uname -m" is not consistent, so use $cputype instead. 386
+	# is converted to i386 for consistency with other x86
+	# operating systems.
+	if test "$cputype" = "386"; then
+	    UNAME_MACHINE=i386
+	else
+	    UNAME_MACHINE="$cputype"
+	fi
+	echo ${UNAME_MACHINE}-unknown-plan9
+	exit 0 ;;
+    *:TOPS-10:*:*)
+	echo pdp10-unknown-tops10
+	exit 0 ;;
+    *:TENEX:*:*)
+	echo pdp10-unknown-tenex
+	exit 0 ;;
+    KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
+	echo pdp10-dec-tops20
+	exit 0 ;;
+    XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
+	echo pdp10-xkl-tops20
+	exit 0 ;;
+    *:TOPS-20:*:*)
+	echo pdp10-unknown-tops20
+	exit 0 ;;
+    *:ITS:*:*)
+	echo pdp10-unknown-its
+	exit 0 ;;
+    SEI:*:*:SEIUX)
+        echo mips-sei-seiux${UNAME_RELEASE}
+	exit 0 ;;
+    *:DragonFly:*:*)
+	echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
+	exit 0 ;;
+    *:*VMS:*:*)
+    	UNAME_MACHINE=`(uname -p) 2>/dev/null`
+	case "${UNAME_MACHINE}" in
+	    A*) echo alpha-dec-vms && exit 0 ;;
+	    I*) echo ia64-dec-vms && exit 0 ;;
+	    V*) echo vax-dec-vms && exit 0 ;;
+	esac ;;
+    *:XENIX:*:SysV)
+	echo i386-pc-xenix
+	exit 0 ;;
+esac
+
+#echo '(No uname command or uname output not recognized.)' 1>&2
+#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2
+
+eval $set_cc_for_build
+cat >$dummy.c <<EOF
+#ifdef _SEQUENT_
+# include <sys/types.h>
+# include <sys/utsname.h>
+#endif
+main ()
+{
+#if defined (sony)
+#if defined (MIPSEB)
+  /* BFD wants "bsd" instead of "newsos".  Perhaps BFD should be changed,
+     I don't know....  */
+  printf ("mips-sony-bsd\n"); exit (0);
+#else
+#include <sys/param.h>
+  printf ("m68k-sony-newsos%s\n",
+#ifdef NEWSOS4
+          "4"
+#else
+	  ""
+#endif
+         ); exit (0);
+#endif
+#endif
+
+#if defined (__arm) && defined (__acorn) && defined (__unix)
+  printf ("arm-acorn-riscix"); exit (0);
+#endif
+
+#if defined (hp300) && !defined (hpux)
+  printf ("m68k-hp-bsd\n"); exit (0);
+#endif
+
+#if defined (NeXT)
+#if !defined (__ARCHITECTURE__)
+#define __ARCHITECTURE__ "m68k"
+#endif
+  int version;
+  version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
+  if (version < 4)
+    printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
+  else
+    printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
+  exit (0);
+#endif
+
+#if defined (MULTIMAX) || defined (n16)
+#if defined (UMAXV)
+  printf ("ns32k-encore-sysv\n"); exit (0);
+#else
+#if defined (CMU)
+  printf ("ns32k-encore-mach\n"); exit (0);
+#else
+  printf ("ns32k-encore-bsd\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (__386BSD__)
+  printf ("i386-pc-bsd\n"); exit (0);
+#endif
+
+#if defined (sequent)
+#if defined (i386)
+  printf ("i386-sequent-dynix\n"); exit (0);
+#endif
+#if defined (ns32000)
+  printf ("ns32k-sequent-dynix\n"); exit (0);
+#endif
+#endif
+
+#if defined (_SEQUENT_)
+    struct utsname un;
+
+    uname(&un);
+
+    if (strncmp(un.version, "V2", 2) == 0) {
+	printf ("i386-sequent-ptx2\n"); exit (0);
+    }
+    if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
+	printf ("i386-sequent-ptx1\n"); exit (0);
+    }
+    printf ("i386-sequent-ptx\n"); exit (0);
+
+#endif
+
+#if defined (vax)
+# if !defined (ultrix)
+#  include <sys/param.h>
+#  if defined (BSD)
+#   if BSD == 43
+      printf ("vax-dec-bsd4.3\n"); exit (0);
+#   else
+#    if BSD == 199006
+      printf ("vax-dec-bsd4.3reno\n"); exit (0);
+#    else
+      printf ("vax-dec-bsd\n"); exit (0);
+#    endif
+#   endif
+#  else
+    printf ("vax-dec-bsd\n"); exit (0);
+#  endif
+# else
+    printf ("vax-dec-ultrix\n"); exit (0);
+# endif
+#endif
+
+#if defined (alliant) && defined (i860)
+  printf ("i860-alliant-bsd\n"); exit (0);
+#endif
+
+  exit (1);
+}
+EOF
+
+$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && $dummy && exit 0
+
+# Apollos put the system type in the environment.
+
+test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit 0; }
+
+# Convex versions that predate uname can use getsysinfo(1)
+
+if [ -x /usr/convex/getsysinfo ]
+then
+    case `getsysinfo -f cpu_type` in
+    c1*)
+	echo c1-convex-bsd
+	exit 0 ;;
+    c2*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+	exit 0 ;;
+    c34*)
+	echo c34-convex-bsd
+	exit 0 ;;
+    c38*)
+	echo c38-convex-bsd
+	exit 0 ;;
+    c4*)
+	echo c4-convex-bsd
+	exit 0 ;;
+    esac
+fi
+
+cat >&2 <<EOF
+$0: unable to guess system type
+
+This script, last modified $timestamp, has failed to recognize
+the operating system you are using. It is advised that you
+download the most up to date version of the config scripts from
+
+  http://savannah.gnu.org/cgi-bin/viewcvs/*checkout*/config/config/config.guess
+and
+  http://savannah.gnu.org/cgi-bin/viewcvs/*checkout*/config/config/config.sub
+
+If the version you run ($0) is already up to date, please
+send the following data and any information you think might be
+pertinent to <config-patches at gnu.org> in order to provide the needed
+information to handle your system.
+
+config.guess timestamp = $timestamp
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo               = `(hostinfo) 2>/dev/null`
+/bin/universe          = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch              = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = ${UNAME_MACHINE}
+UNAME_RELEASE = ${UNAME_RELEASE}
+UNAME_SYSTEM  = ${UNAME_SYSTEM}
+UNAME_VERSION = ${UNAME_VERSION}
+EOF
+
+exit 1
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:


Property changes on: long/3D/Gale/trunk/python/BuildSystem/config/packages/config.guess
___________________________________________________________________
Name: svn:executable
   + *

Added: long/3D/Gale/trunk/python/BuildSystem/config/packages/config.sub
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/packages/config.sub	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/packages/config.sub	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,1569 @@
+#! /bin/sh
+# Configuration validation subroutine script.
+#   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+#   2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+
+timestamp='2005-04-22'
+
+# This file is (in principle) common to ALL GNU software.
+# The presence of a machine in this file suggests that SOME GNU software
+# can handle that machine.  It does not imply ALL GNU software can.
+#
+# This file is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330,
+# Boston, MA 02111-1307, USA.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# Please send patches to <config-patches at gnu.org>.  Submit a context
+# diff and a properly formatted ChangeLog entry.
+#
+# Configuration subroutine to validate and canonicalize a configuration type.
+# Supply the specified configuration type as an argument.
+# If it is invalid, we print an error message on stderr and exit with code 1.
+# Otherwise, we print the canonical config type on stdout and succeed.
+
+# This file is supposed to be the same for all GNU packages
+# and recognize all the CPU types, system types and aliases
+# that are meaningful with *any* GNU software.
+# Each package is responsible for reporting which valid configurations
+# it does not support.  The user should be able to distinguish
+# a failure to support a valid configuration from a meaningless
+# configuration.
+
+# The goal of this file is to map all the various variations of a given
+# machine specification into a single specification in the form:
+#	CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
+# or in some cases, the newer four-part form:
+#	CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
+# It is wrong to echo any other type of specification.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS
+       $0 [OPTION] ALIAS
+
+Canonicalize a configuration name.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches at gnu.org>."
+
+version="\
+GNU config.sub ($timestamp)
+
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit 0 ;;
+    --version | -v )
+       echo "$version" ; exit 0 ;;
+    --help | --h* | -h )
+       echo "$usage"; exit 0 ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help"
+       exit 1 ;;
+
+    *local*)
+       # First pass through any local machine types.
+       echo $1
+       exit 0;;
+
+    * )
+       break ;;
+  esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+    exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+    exit 1;;
+esac
+
+# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
+# Here we must recognize all the valid KERNEL-OS combinations.
+maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
+case $maybe_os in
+  nto-qnx* | linux-gnu* | linux-dietlibc | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | \
+  kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* | storm-chaos* | os2-emx* | rtmk-nova*)
+    os=-$maybe_os
+    basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
+    ;;
+  *)
+    basic_machine=`echo $1 | sed 's/-[^-]*$//'`
+    if [ $basic_machine != $1 ]
+    then os=`echo $1 | sed 's/.*-/-/'`
+    else os=; fi
+    ;;
+esac
+
+### Let's recognize common machines as not being operating systems so
+### that things like config.sub decstation-3100 work.  We also
+### recognize some manufacturers as not being operating systems, so we
+### can provide default operating systems below.
+case $os in
+	-sun*os*)
+		# Prevent following clause from handling this invalid input.
+		;;
+	-dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
+	-att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
+	-unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
+	-convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
+	-c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
+	-harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
+	-apple | -axis | -knuth | -cray)
+		os=
+		basic_machine=$1
+		;;
+	-sim | -cisco | -oki | -wec | -winbond)
+		os=
+		basic_machine=$1
+		;;
+	-scout)
+		;;
+	-wrs)
+		os=-vxworks
+		basic_machine=$1
+		;;
+	-chorusos*)
+		os=-chorusos
+		basic_machine=$1
+		;;
+ 	-chorusrdb)
+ 		os=-chorusrdb
+		basic_machine=$1
+ 		;;
+	-hiux*)
+		os=-hiuxwe2
+		;;
+	-sco5)
+		os=-sco3.2v5
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco4)
+		os=-sco3.2v4
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2.[4-9]*)
+		os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2v[4-9]*)
+		# Don't forget version if it is 3.2v4 or newer.
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco*)
+		os=-sco3.2v2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-udk*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-isc)
+		os=-isc2.2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-clix*)
+		basic_machine=clipper-intergraph
+		;;
+	-isc*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-lynx*)
+		os=-lynxos
+		;;
+	-ptx*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'`
+		;;
+	-windowsnt*)
+		os=`echo $os | sed -e 's/windowsnt/winnt/'`
+		;;
+	-psos*)
+		os=-psos
+		;;
+	-mint | -mint[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+esac
+
+# Decode aliases for certain CPU-COMPANY combinations.
+case $basic_machine in
+	# Recognize the basic CPU types without company name.
+	# Some are omitted here because they have special meanings below.
+	1750a | 580 \
+	| a29k \
+	| alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
+	| alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
+	| am33_2.0 \
+	| arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr \
+	| bfin \
+	| c4x | clipper \
+	| d10v | d30v | dlx | dsp16xx \
+	| fr30 | frv \
+	| h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
+	| i370 | i860 | i960 | ia64 \
+	| ip2k | iq2000 \
+	| m32r | m32rle | m68000 | m68k | m88k | maxq | mcore \
+	| mips | mipsbe | mipseb | mipsel | mipsle \
+	| mips16 \
+	| mips64 | mips64el \
+	| mips64vr | mips64vrel \
+	| mips64orion | mips64orionel \
+	| mips64vr4100 | mips64vr4100el \
+	| mips64vr4300 | mips64vr4300el \
+	| mips64vr5000 | mips64vr5000el \
+	| mipsisa32 | mipsisa32el \
+	| mipsisa32r2 | mipsisa32r2el \
+	| mipsisa64 | mipsisa64el \
+	| mipsisa64r2 | mipsisa64r2el \
+	| mipsisa64sb1 | mipsisa64sb1el \
+	| mipsisa64sr71k | mipsisa64sr71kel \
+	| mipstx39 | mipstx39el \
+	| mn10200 | mn10300 \
+	| msp430 \
+	| ns16k | ns32k \
+	| openrisc | or32 \
+	| pdp10 | pdp11 | pj | pjl \
+	| powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \
+	| pyramid \
+	| sh | sh[1234] | sh[23]e | sh[34]eb | shbe | shle | sh[1234]le | sh3ele \
+	| sh64 | sh64le \
+	| sparc | sparc64 | sparc64b | sparc86x | sparclet | sparclite \
+	| sparcv8 | sparcv9 | sparcv9b \
+	| strongarm \
+	| tahoe | thumb | tic4x | tic80 | tron \
+	| v850 | v850e \
+	| we32k \
+	| x86 | xscale | xscalee[bl] | xstormy16 | xtensa \
+	| z8k)
+		basic_machine=$basic_machine-unknown
+		;;
+	m6811 | m68hc11 | m6812 | m68hc12)
+		# Motorola 68HC11/12.
+		basic_machine=$basic_machine-unknown
+		os=-none
+		;;
+	m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k)
+		;;
+
+	# We use `pc' rather than `unknown'
+	# because (1) that's what they normally are, and
+	# (2) the word "unknown" tends to confuse beginning users.
+	i*86 | x86_64)
+	  basic_machine=$basic_machine-pc
+	  ;;
+	# Object if more than one company name word.
+	*-*-*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+	# Recognize the basic CPU types with company name.
+	580-* \
+	| a29k-* \
+	| alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
+	| alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
+	| alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
+	| arm-*  | armbe-* | armle-* | armeb-* | armv*-* \
+	| avr-* \
+	| bfin-* | bs2000-* \
+	| c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \
+	| clipper-* | craynv-* | cydra-* \
+	| d10v-* | d30v-* | dlx-* \
+	| elxsi-* \
+	| f30[01]-* | f700-* | fr30-* | frv-* | fx80-* \
+	| h8300-* | h8500-* \
+	| hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
+	| i*86-* | i860-* | i960-* | ia64-* \
+	| ip2k-* | iq2000-* \
+	| m32r-* | m32rle-* \
+	| m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
+	| m88110-* | m88k-* | maxq-* | mcore-* \
+	| mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
+	| mips16-* \
+	| mips64-* | mips64el-* \
+	| mips64vr-* | mips64vrel-* \
+	| mips64orion-* | mips64orionel-* \
+	| mips64vr4100-* | mips64vr4100el-* \
+	| mips64vr4300-* | mips64vr4300el-* \
+	| mips64vr5000-* | mips64vr5000el-* \
+	| mipsisa32-* | mipsisa32el-* \
+	| mipsisa32r2-* | mipsisa32r2el-* \
+	| mipsisa64-* | mipsisa64el-* \
+	| mipsisa64r2-* | mipsisa64r2el-* \
+	| mipsisa64sb1-* | mipsisa64sb1el-* \
+	| mipsisa64sr71k-* | mipsisa64sr71kel-* \
+	| mipstx39-* | mipstx39el-* \
+	| mmix-* \
+	| msp430-* \
+	| none-* | np1-* | ns16k-* | ns32k-* \
+	| orion-* \
+	| pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
+	| powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \
+	| pyramid-* \
+	| romp-* | rs6000-* \
+	| sh-* | sh[1234]-* | sh[23]e-* | sh[34]eb-* | shbe-* \
+	| shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
+	| sparc-* | sparc64-* | sparc64b-* | sparc86x-* | sparclet-* \
+	| sparclite-* \
+	| sparcv8-* | sparcv9-* | sparcv9b-* | strongarm-* | sv1-* | sx?-* \
+	| tahoe-* | thumb-* \
+	| tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \
+	| tron-* \
+	| v850-* | v850e-* | vax-* \
+	| we32k-* \
+	| x86-* | x86_64-* | xps100-* | xscale-* | xscalee[bl]-* \
+	| xstormy16-* | xtensa-* \
+	| ymp-* \
+	| z8k-*)
+		;;
+	# Recognize the various machine names and aliases which stand
+	# for a CPU type and a company and sometimes even an OS.
+	386bsd)
+		basic_machine=i386-unknown
+		os=-bsd
+		;;
+	3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
+		basic_machine=m68000-att
+		;;
+	3b*)
+		basic_machine=we32k-att
+		;;
+	a29khif)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+    	abacus)
+		basic_machine=abacus-unknown
+		;;
+	adobe68k)
+		basic_machine=m68010-adobe
+		os=-scout
+		;;
+	alliant | fx80)
+		basic_machine=fx80-alliant
+		;;
+	altos | altos3068)
+		basic_machine=m68k-altos
+		;;
+	am29k)
+		basic_machine=a29k-none
+		os=-bsd
+		;;
+	amd64)
+		basic_machine=x86_64-pc
+		;;
+	amd64-*)
+		basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	amdahl)
+		basic_machine=580-amdahl
+		os=-sysv
+		;;
+	amiga | amiga-*)
+		basic_machine=m68k-unknown
+		;;
+	amigaos | amigados)
+		basic_machine=m68k-unknown
+		os=-amigaos
+		;;
+	amigaunix | amix)
+		basic_machine=m68k-unknown
+		os=-sysv4
+		;;
+	apollo68)
+		basic_machine=m68k-apollo
+		os=-sysv
+		;;
+	apollo68bsd)
+		basic_machine=m68k-apollo
+		os=-bsd
+		;;
+	aux)
+		basic_machine=m68k-apple
+		os=-aux
+		;;
+	balance)
+		basic_machine=ns32k-sequent
+		os=-dynix
+		;;
+	c90)
+		basic_machine=c90-cray
+		os=-unicos
+		;;
+	convex-c1)
+		basic_machine=c1-convex
+		os=-bsd
+		;;
+	convex-c2)
+		basic_machine=c2-convex
+		os=-bsd
+		;;
+	convex-c32)
+		basic_machine=c32-convex
+		os=-bsd
+		;;
+	convex-c34)
+		basic_machine=c34-convex
+		os=-bsd
+		;;
+	convex-c38)
+		basic_machine=c38-convex
+		os=-bsd
+		;;
+	cray | j90)
+		basic_machine=j90-cray
+		os=-unicos
+		;;
+	craynv)
+		basic_machine=craynv-cray
+		os=-unicosmp
+		;;
+	cr16c)
+		basic_machine=cr16c-unknown
+		os=-elf
+		;;
+	crds | unos)
+		basic_machine=m68k-crds
+		;;
+	crisv32 | crisv32-* | etraxfs*)
+		basic_machine=crisv32-axis
+		;;
+	cris | cris-* | etrax*)
+		basic_machine=cris-axis
+		;;
+	crx)
+		basic_machine=crx-unknown
+		os=-elf
+		;;
+	da30 | da30-*)
+		basic_machine=m68k-da30
+		;;
+	decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
+		basic_machine=mips-dec
+		;;
+	decsystem10* | dec10*)
+		basic_machine=pdp10-dec
+		os=-tops10
+		;;
+	decsystem20* | dec20*)
+		basic_machine=pdp10-dec
+		os=-tops20
+		;;
+	delta | 3300 | motorola-3300 | motorola-delta \
+	      | 3300-motorola | delta-motorola)
+		basic_machine=m68k-motorola
+		;;
+	delta88)
+		basic_machine=m88k-motorola
+		os=-sysv3
+		;;
+	djgpp)
+		basic_machine=i586-pc
+		os=-msdosdjgpp
+		;;
+	dpx20 | dpx20-*)
+		basic_machine=rs6000-bull
+		os=-bosx
+		;;
+	dpx2* | dpx2*-bull)
+		basic_machine=m68k-bull
+		os=-sysv3
+		;;
+	ebmon29k)
+		basic_machine=a29k-amd
+		os=-ebmon
+		;;
+	elxsi)
+		basic_machine=elxsi-elxsi
+		os=-bsd
+		;;
+	encore | umax | mmax)
+		basic_machine=ns32k-encore
+		;;
+	es1800 | OSE68k | ose68k | ose | OSE)
+		basic_machine=m68k-ericsson
+		os=-ose
+		;;
+	fx2800)
+		basic_machine=i860-alliant
+		;;
+	genix)
+		basic_machine=ns32k-ns
+		;;
+	gmicro)
+		basic_machine=tron-gmicro
+		os=-sysv
+		;;
+	go32)
+		basic_machine=i386-pc
+		os=-go32
+		;;
+	h3050r* | hiux*)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	h8300hms)
+		basic_machine=h8300-hitachi
+		os=-hms
+		;;
+	h8300xray)
+		basic_machine=h8300-hitachi
+		os=-xray
+		;;
+	h8500hms)
+		basic_machine=h8500-hitachi
+		os=-hms
+		;;
+	harris)
+		basic_machine=m88k-harris
+		os=-sysv3
+		;;
+	hp300-*)
+		basic_machine=m68k-hp
+		;;
+	hp300bsd)
+		basic_machine=m68k-hp
+		os=-bsd
+		;;
+	hp300hpux)
+		basic_machine=m68k-hp
+		os=-hpux
+		;;
+	hp3k9[0-9][0-9] | hp9[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k2[0-9][0-9] | hp9k31[0-9])
+		basic_machine=m68000-hp
+		;;
+	hp9k3[2-9][0-9])
+		basic_machine=m68k-hp
+		;;
+	hp9k6[0-9][0-9] | hp6[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k7[0-79][0-9] | hp7[0-79][0-9])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k78[0-9] | hp78[0-9])
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][13679] | hp8[0-9][13679])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][0-9] | hp8[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hppa-next)
+		os=-nextstep3
+		;;
+	hppaosf)
+		basic_machine=hppa1.1-hp
+		os=-osf
+		;;
+	hppro)
+		basic_machine=hppa1.1-hp
+		os=-proelf
+		;;
+	i370-ibm* | ibm*)
+		basic_machine=i370-ibm
+		;;
+# I'm not sure what "Sysv32" means.  Should this be sysv3.2?
+	i*86v32)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv32
+		;;
+	i*86v4*)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv4
+		;;
+	i*86v)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv
+		;;
+	i*86sol2)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-solaris2
+		;;
+	i386mach)
+		basic_machine=i386-mach
+		os=-mach
+		;;
+	i386-vsta | vsta)
+		basic_machine=i386-unknown
+		os=-vsta
+		;;
+	iris | iris4d)
+		basic_machine=mips-sgi
+		case $os in
+		    -irix*)
+			;;
+		    *)
+			os=-irix4
+			;;
+		esac
+		;;
+	isi68 | isi)
+		basic_machine=m68k-isi
+		os=-sysv
+		;;
+	m88k-omron*)
+		basic_machine=m88k-omron
+		;;
+	magnum | m3230)
+		basic_machine=mips-mips
+		os=-sysv
+		;;
+	merlin)
+		basic_machine=ns32k-utek
+		os=-sysv
+		;;
+	mingw32)
+		basic_machine=i386-pc
+		os=-mingw32
+		;;
+	miniframe)
+		basic_machine=m68000-convergent
+		;;
+	*mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+	mips3*-*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`
+		;;
+	mips3*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
+		;;
+	monitor)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	morphos)
+		basic_machine=powerpc-unknown
+		os=-morphos
+		;;
+	msdos)
+		basic_machine=i386-pc
+		os=-msdos
+		;;
+	mvs)
+		basic_machine=i370-ibm
+		os=-mvs
+		;;
+	ncr3000)
+		basic_machine=i486-ncr
+		os=-sysv4
+		;;
+	netbsd386)
+		basic_machine=i386-unknown
+		os=-netbsd
+		;;
+	netwinder)
+		basic_machine=armv4l-rebel
+		os=-linux
+		;;
+	news | news700 | news800 | news900)
+		basic_machine=m68k-sony
+		os=-newsos
+		;;
+	news1000)
+		basic_machine=m68030-sony
+		os=-newsos
+		;;
+	news-3600 | risc-news)
+		basic_machine=mips-sony
+		os=-newsos
+		;;
+	necv70)
+		basic_machine=v70-nec
+		os=-sysv
+		;;
+	next | m*-next )
+		basic_machine=m68k-next
+		case $os in
+		    -nextstep* )
+			;;
+		    -ns2*)
+		      os=-nextstep2
+			;;
+		    *)
+		      os=-nextstep3
+			;;
+		esac
+		;;
+	nh3000)
+		basic_machine=m68k-harris
+		os=-cxux
+		;;
+	nh[45]000)
+		basic_machine=m88k-harris
+		os=-cxux
+		;;
+	nindy960)
+		basic_machine=i960-intel
+		os=-nindy
+		;;
+	mon960)
+		basic_machine=i960-intel
+		os=-mon960
+		;;
+	nonstopux)
+		basic_machine=mips-compaq
+		os=-nonstopux
+		;;
+	np1)
+		basic_machine=np1-gould
+		;;
+	nsr-tandem)
+		basic_machine=nsr-tandem
+		;;
+	op50n-* | op60c-*)
+		basic_machine=hppa1.1-oki
+		os=-proelf
+		;;
+	or32 | or32-*)
+		basic_machine=or32-unknown
+		os=-coff
+		;;
+	os400)
+		basic_machine=powerpc-ibm
+		os=-os400
+		;;
+	OSE68000 | ose68000)
+		basic_machine=m68000-ericsson
+		os=-ose
+		;;
+	os68k)
+		basic_machine=m68k-none
+		os=-os68k
+		;;
+	pa-hitachi)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	paragon)
+		basic_machine=i860-intel
+		os=-osf
+		;;
+	pbd)
+		basic_machine=sparc-tti
+		;;
+	pbb)
+		basic_machine=m68k-tti
+		;;
+	pc532 | pc532-*)
+		basic_machine=ns32k-pc532
+		;;
+	pentium | p5 | k5 | k6 | nexgen | viac3)
+		basic_machine=i586-pc
+		;;
+	pentiumpro | p6 | 6x86 | athlon | athlon_*)
+		basic_machine=i686-pc
+		;;
+	pentiumii | pentium2 | pentiumiii | pentium3)
+		basic_machine=i686-pc
+		;;
+	pentium4)
+		basic_machine=i786-pc
+		;;
+	pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
+		basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumpro-* | p6-* | 6x86-* | athlon-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentium4-*)
+		basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pn)
+		basic_machine=pn-gould
+		;;
+	power)	basic_machine=power-ibm
+		;;
+	ppc)	basic_machine=powerpc-unknown
+		;;
+	ppc-*)	basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppcle | powerpclittle | ppc-le | powerpc-little)
+		basic_machine=powerpcle-unknown
+		;;
+	ppcle-* | powerpclittle-*)
+		basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64)	basic_machine=powerpc64-unknown
+		;;
+	ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64le | powerpc64little | ppc64-le | powerpc64-little)
+		basic_machine=powerpc64le-unknown
+		;;
+	ppc64le-* | powerpc64little-*)
+		basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ps2)
+		basic_machine=i386-ibm
+		;;
+	pw32)
+		basic_machine=i586-unknown
+		os=-pw32
+		;;
+	rom68k)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	rm[46]00)
+		basic_machine=mips-siemens
+		;;
+	rtpc | rtpc-*)
+		basic_machine=romp-ibm
+		;;
+	s390 | s390-*)
+		basic_machine=s390-ibm
+		;;
+	s390x | s390x-*)
+		basic_machine=s390x-ibm
+		;;
+	sa29200)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	sb1)
+		basic_machine=mipsisa64sb1-unknown
+		;;
+	sb1el)
+		basic_machine=mipsisa64sb1el-unknown
+		;;
+	sei)
+		basic_machine=mips-sei
+		os=-seiux
+		;;
+	sequent)
+		basic_machine=i386-sequent
+		;;
+	sh)
+		basic_machine=sh-hitachi
+		os=-hms
+		;;
+	sh64)
+		basic_machine=sh64-unknown
+		;;
+	sparclite-wrs | simso-wrs)
+		basic_machine=sparclite-wrs
+		os=-vxworks
+		;;
+	sps7)
+		basic_machine=m68k-bull
+		os=-sysv2
+		;;
+	spur)
+		basic_machine=spur-unknown
+		;;
+	st2000)
+		basic_machine=m68k-tandem
+		;;
+	stratus)
+		basic_machine=i860-stratus
+		os=-sysv4
+		;;
+	sun2)
+		basic_machine=m68000-sun
+		;;
+	sun2os3)
+		basic_machine=m68000-sun
+		os=-sunos3
+		;;
+	sun2os4)
+		basic_machine=m68000-sun
+		os=-sunos4
+		;;
+	sun3os3)
+		basic_machine=m68k-sun
+		os=-sunos3
+		;;
+	sun3os4)
+		basic_machine=m68k-sun
+		os=-sunos4
+		;;
+	sun4os3)
+		basic_machine=sparc-sun
+		os=-sunos3
+		;;
+	sun4os4)
+		basic_machine=sparc-sun
+		os=-sunos4
+		;;
+	sun4sol2)
+		basic_machine=sparc-sun
+		os=-solaris2
+		;;
+	sun3 | sun3-*)
+		basic_machine=m68k-sun
+		;;
+	sun4)
+		basic_machine=sparc-sun
+		;;
+	sun386 | sun386i | roadrunner)
+		basic_machine=i386-sun
+		;;
+	sv1)
+		basic_machine=sv1-cray
+		os=-unicos
+		;;
+	symmetry)
+		basic_machine=i386-sequent
+		os=-dynix
+		;;
+	t3e)
+		basic_machine=alphaev5-cray
+		os=-unicos
+		;;
+	t90)
+		basic_machine=t90-cray
+		os=-unicos
+		;;
+	tic54x | c54x*)
+		basic_machine=tic54x-unknown
+		os=-coff
+		;;
+	tic55x | c55x*)
+		basic_machine=tic55x-unknown
+		os=-coff
+		;;
+	tic6x | c6x*)
+		basic_machine=tic6x-unknown
+		os=-coff
+		;;
+	tx39)
+		basic_machine=mipstx39-unknown
+		;;
+	tx39el)
+		basic_machine=mipstx39el-unknown
+		;;
+	toad1)
+		basic_machine=pdp10-xkl
+		os=-tops20
+		;;
+	tower | tower-32)
+		basic_machine=m68k-ncr
+		;;
+	tpf)
+		basic_machine=s390x-ibm
+		os=-tpf
+		;;
+	udi29k)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	ultra3)
+		basic_machine=a29k-nyu
+		os=-sym1
+		;;
+	v810 | necv810)
+		basic_machine=v810-nec
+		os=-none
+		;;
+	vaxv)
+		basic_machine=vax-dec
+		os=-sysv
+		;;
+	vms)
+		basic_machine=vax-dec
+		os=-vms
+		;;
+	vpp*|vx|vx-*)
+		basic_machine=f301-fujitsu
+		;;
+	vxworks960)
+		basic_machine=i960-wrs
+		os=-vxworks
+		;;
+	vxworks68)
+		basic_machine=m68k-wrs
+		os=-vxworks
+		;;
+	vxworks29k)
+		basic_machine=a29k-wrs
+		os=-vxworks
+		;;
+	w65*)
+		basic_machine=w65-wdc
+		os=-none
+		;;
+	w89k-*)
+		basic_machine=hppa1.1-winbond
+		os=-proelf
+		;;
+	xbox)
+		basic_machine=i686-pc
+		os=-mingw32
+		;;
+	xps | xps100)
+		basic_machine=xps100-honeywell
+		;;
+	ymp)
+		basic_machine=ymp-cray
+		os=-unicos
+		;;
+	z8k-*-coff)
+		basic_machine=z8k-unknown
+		os=-sim
+		;;
+	none)
+		basic_machine=none-none
+		os=-none
+		;;
+
+# Here we handle the default manufacturer of certain CPU types.  It is in
+# some cases the only manufacturer, in others, it is the most popular.
+	w89k)
+		basic_machine=hppa1.1-winbond
+		;;
+	op50n)
+		basic_machine=hppa1.1-oki
+		;;
+	op60c)
+		basic_machine=hppa1.1-oki
+		;;
+	romp)
+		basic_machine=romp-ibm
+		;;
+	mmix)
+		basic_machine=mmix-knuth
+		;;
+	rs6000)
+		basic_machine=rs6000-ibm
+		;;
+	vax)
+		basic_machine=vax-dec
+		;;
+	pdp10)
+		# there are many clones, so DEC is not a safe bet
+		basic_machine=pdp10-unknown
+		;;
+	pdp11)
+		basic_machine=pdp11-dec
+		;;
+	we32k)
+		basic_machine=we32k-att
+		;;
+	sh3 | sh4 | sh[34]eb | sh[1234]le | sh[23]ele)
+		basic_machine=sh-unknown
+		;;
+	sh64)
+		basic_machine=sh64-unknown
+		;;
+	sparc | sparcv8 | sparcv9 | sparcv9b)
+		basic_machine=sparc-sun
+		;;
+	cydra)
+		basic_machine=cydra-cydrome
+		;;
+	orion)
+		basic_machine=orion-highlevel
+		;;
+	orion105)
+		basic_machine=clipper-highlevel
+		;;
+	mac | mpw | mac-mpw)
+		basic_machine=m68k-apple
+		;;
+	pmac | pmac-mpw)
+		basic_machine=powerpc-apple
+		;;
+	*-unknown)
+		# Make sure to match an already-canonicalized machine name.
+		;;
+	*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+esac
+
+# Here we canonicalize certain aliases for manufacturers.
+case $basic_machine in
+	*-digital*)
+		basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'`
+		;;
+	*-commodore*)
+		basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'`
+		;;
+	*)
+		;;
+esac
+
+# Decode manufacturer-specific aliases for certain operating systems.
+
+if [ x"$os" != x"" ]
+then
+case $os in
+        # First match some system type aliases
+        # that might get confused with valid system types.
+	# -solaris* is a basic system type, with this one exception.
+	-solaris1 | -solaris1.*)
+		os=`echo $os | sed -e 's|solaris1|sunos4|'`
+		;;
+	-solaris)
+		os=-solaris2
+		;;
+	-svr4*)
+		os=-sysv4
+		;;
+	-unixware*)
+		os=-sysv4.2uw
+		;;
+	-gnu/linux*)
+		os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
+		;;
+	# First accept the basic system types.
+	# The portable systems comes first.
+	# Each alternative MUST END IN A *, to match a version number.
+	# -sysv* is not here because it comes later, after sysvr4.
+	-gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
+	      | -*vms* | -sco* | -esix* | -isc* | -aix* | -sunos | -sunos[34]*\
+	      | -hpux* | -unos* | -osf* | -luna* | -dgux* | -solaris* | -sym* \
+	      | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
+	      | -aos* \
+	      | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
+	      | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
+	      | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* | -openbsd* \
+	      | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
+	      | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
+	      | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
+	      | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
+	      | -chorusos* | -chorusrdb* \
+	      | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
+	      | -mingw32* | -linux-gnu* | -linux-uclibc* | -uxpv* | -beos* | -mpeix* | -udk* \
+	      | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
+	      | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
+	      | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
+	      | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
+	      | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \
+	      | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly*)
+	# Remember, each alternative MUST END IN *, to match a version number.
+		;;
+	-qnx*)
+		case $basic_machine in
+		    x86-* | i*86-*)
+			;;
+		    *)
+			os=-nto$os
+			;;
+		esac
+		;;
+	-nto-qnx*)
+		;;
+	-nto*)
+		os=`echo $os | sed -e 's|nto|nto-qnx|'`
+		;;
+	-sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
+	      | -windows* | -osx | -abug | -netware* | -os9* | -beos* \
+	      | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
+		;;
+	-mac*)
+		os=`echo $os | sed -e 's|mac|macos|'`
+		;;
+	-linux-dietlibc)
+		os=-linux-dietlibc
+		;;
+	-linux*)
+		os=`echo $os | sed -e 's|linux|linux-gnu|'`
+		;;
+	-sunos5*)
+		os=`echo $os | sed -e 's|sunos5|solaris2|'`
+		;;
+	-sunos6*)
+		os=`echo $os | sed -e 's|sunos6|solaris3|'`
+		;;
+	-opened*)
+		os=-openedition
+		;;
+        -os400*)
+		os=-os400
+		;;
+	-wince*)
+		os=-wince
+		;;
+	-osfrose*)
+		os=-osfrose
+		;;
+	-osf*)
+		os=-osf
+		;;
+	-utek*)
+		os=-bsd
+		;;
+	-dynix*)
+		os=-bsd
+		;;
+	-acis*)
+		os=-aos
+		;;
+	-atheos*)
+		os=-atheos
+		;;
+	-syllable*)
+		os=-syllable
+		;;
+	-386bsd)
+		os=-bsd
+		;;
+	-ctix* | -uts*)
+		os=-sysv
+		;;
+	-nova*)
+		os=-rtmk-nova
+		;;
+	-ns2 )
+		os=-nextstep2
+		;;
+	-nsk*)
+		os=-nsk
+		;;
+	# Preserve the version number of sinix5.
+	-sinix5.*)
+		os=`echo $os | sed -e 's|sinix|sysv|'`
+		;;
+	-sinix*)
+		os=-sysv4
+		;;
+        -tpf*)
+		os=-tpf
+		;;
+	-triton*)
+		os=-sysv3
+		;;
+	-oss*)
+		os=-sysv3
+		;;
+	-svr4)
+		os=-sysv4
+		;;
+	-svr3)
+		os=-sysv3
+		;;
+	-sysvr4)
+		os=-sysv4
+		;;
+	# This must come after -sysvr4.
+	-sysv*)
+		;;
+	-ose*)
+		os=-ose
+		;;
+	-es1800*)
+		os=-ose
+		;;
+	-xenix)
+		os=-xenix
+		;;
+	-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+		os=-mint
+		;;
+	-aros*)
+		os=-aros
+		;;
+	-kaos*)
+		os=-kaos
+		;;
+	-zvmoe)
+		os=-zvmoe
+		;;
+	-none)
+		;;
+	*)
+		# Get rid of the `-' at the beginning of $os.
+		os=`echo $os | sed 's/[^-]*-//'`
+		echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2
+		exit 1
+		;;
+esac
+else
+
+# Here we handle the default operating systems that come with various machines.
+# The value should be what the vendor currently ships out the door with their
+# machine or put another way, the most popular os provided with the machine.
+
+# Note that if you're going to try to match "-MANUFACTURER" here (say,
+# "-sun"), then you have to tell the case statement up towards the top
+# that MANUFACTURER isn't an operating system.  Otherwise, code above
+# will signal an error saying that MANUFACTURER isn't an operating
+# system, and we'll never get to this point.
+
+case $basic_machine in
+	*-acorn)
+		os=-riscix1.2
+		;;
+	arm*-rebel)
+		os=-linux
+		;;
+	arm*-semi)
+		os=-aout
+		;;
+    c4x-* | tic4x-*)
+        os=-coff
+        ;;
+	# This must come before the *-dec entry.
+	pdp10-*)
+		os=-tops20
+		;;
+	pdp11-*)
+		os=-none
+		;;
+	*-dec | vax-*)
+		os=-ultrix4.2
+		;;
+	m68*-apollo)
+		os=-domain
+		;;
+	i386-sun)
+		os=-sunos4.0.2
+		;;
+	m68000-sun)
+		os=-sunos3
+		# This also exists in the configure program, but was not the
+		# default.
+		# os=-sunos4
+		;;
+	m68*-cisco)
+		os=-aout
+		;;
+	mips*-cisco)
+		os=-elf
+		;;
+	mips*-*)
+		os=-elf
+		;;
+	or32-*)
+		os=-coff
+		;;
+	*-tti)	# must be before sparc entry or we get the wrong os.
+		os=-sysv3
+		;;
+	sparc-* | *-sun)
+		os=-sunos4.1.1
+		;;
+	*-be)
+		os=-beos
+		;;
+	*-ibm)
+		os=-aix
+		;;
+    	*-knuth)
+		os=-mmixware
+		;;
+	*-wec)
+		os=-proelf
+		;;
+	*-winbond)
+		os=-proelf
+		;;
+	*-oki)
+		os=-proelf
+		;;
+	*-hp)
+		os=-hpux
+		;;
+	*-hitachi)
+		os=-hiux
+		;;
+	i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
+		os=-sysv
+		;;
+	*-cbm)
+		os=-amigaos
+		;;
+	*-dg)
+		os=-dgux
+		;;
+	*-dolphin)
+		os=-sysv3
+		;;
+	m68k-ccur)
+		os=-rtu
+		;;
+	m88k-omron*)
+		os=-luna
+		;;
+	*-next )
+		os=-nextstep
+		;;
+	*-sequent)
+		os=-ptx
+		;;
+	*-crds)
+		os=-unos
+		;;
+	*-ns)
+		os=-genix
+		;;
+	i370-*)
+		os=-mvs
+		;;
+	*-next)
+		os=-nextstep3
+		;;
+	*-gould)
+		os=-sysv
+		;;
+	*-highlevel)
+		os=-bsd
+		;;
+	*-encore)
+		os=-bsd
+		;;
+	*-sgi)
+		os=-irix
+		;;
+	*-siemens)
+		os=-sysv4
+		;;
+	*-masscomp)
+		os=-rtu
+		;;
+	f30[01]-fujitsu | f700-fujitsu)
+		os=-uxpv
+		;;
+	*-rom68k)
+		os=-coff
+		;;
+	*-*bug)
+		os=-coff
+		;;
+	*-apple)
+		os=-macos
+		;;
+	*-atari*)
+		os=-mint
+		;;
+	*)
+		os=-none
+		;;
+esac
+fi
+
+# Here we handle the case where we know the os, and the CPU type, but not the
+# manufacturer.  We pick the logical manufacturer.
+vendor=unknown
+case $basic_machine in
+	*-unknown)
+		case $os in
+			-riscix*)
+				vendor=acorn
+				;;
+			-sunos*)
+				vendor=sun
+				;;
+			-aix*)
+				vendor=ibm
+				;;
+			-beos*)
+				vendor=be
+				;;
+			-hpux*)
+				vendor=hp
+				;;
+			-mpeix*)
+				vendor=hp
+				;;
+			-hiux*)
+				vendor=hitachi
+				;;
+			-unos*)
+				vendor=crds
+				;;
+			-dgux*)
+				vendor=dg
+				;;
+			-luna*)
+				vendor=omron
+				;;
+			-genix*)
+				vendor=ns
+				;;
+			-mvs* | -opened*)
+				vendor=ibm
+				;;
+			-os400*)
+				vendor=ibm
+				;;
+			-ptx*)
+				vendor=sequent
+				;;
+			-tpf*)
+				vendor=ibm
+				;;
+			-vxsim* | -vxworks* | -windiss*)
+				vendor=wrs
+				;;
+			-aux*)
+				vendor=apple
+				;;
+			-hms*)
+				vendor=hitachi
+				;;
+			-mpw* | -macos*)
+				vendor=apple
+				;;
+			-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+				vendor=atari
+				;;
+			-vos*)
+				vendor=stratus
+				;;
+		esac
+		basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"`
+		;;
+esac
+
+echo $basic_machine$os
+exit 0
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:


Property changes on: long/3D/Gale/trunk/python/BuildSystem/config/packages/config.sub
___________________________________________________________________
Name: svn:executable
   + *

Added: long/3D/Gale/trunk/python/BuildSystem/config/preTests.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/preTests.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/preTests.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,101 @@
+'''
+This module tests features so broken, that he normal test apparatus is likely
+o fail. For example, there are several buggy implementations of Python, we can
+recognize and work around.
+'''
+import os, sys
+
+class Configure:
+  '''These test must run before almost any operations
+  - We maintain an internal self.log array of strings so that normal logging need not be initialized
+  '''
+  def __init__(self, options = {}, convertOptions = False):
+    self.options        = options
+    self.convertOptions = convertOptions
+    self.log            = []
+    return
+
+  def checkCygwin(self):
+    '''Check for versions of Cygwin below 1.5.11-1
+    These version have a known bug in the Python threads module
+    '''
+    if os.path.exists('/usr/bin/cygcheck.exe'):
+      buf = os.popen('/usr/bin/cygcheck.exe -c cygwin').read()
+      if buf.find('1.5.11-1') > -1:
+        return 1
+      else:
+        return 0
+    return 0
+
+  def checkCygwinPython(self):
+    '''Check for versions of Cygwin Python 2.4 and above
+    These version have a known bug in the Python threads module
+    '''
+    if os.path.exists('/usr/bin/cygcheck.exe'):
+      buf = os.popen('/usr/bin/cygcheck.exe -c python').read()
+      if buf.find('2.4') > -1:
+        return 1
+    return 0
+
+  def checkRedHat9(self):
+    '''Check for Redhat 9
+    This version have a known bug in the Python threads module
+    '''
+    try:
+      file = open('/etc/redhat-release','r')
+    except:
+      return 0
+    try:
+      buf = file.read()
+      file.close()
+    except:
+      # can't read file - assume dangerous RHL9
+      return 1
+    if buf.find('Shrike') > -1:
+      return 1
+    return 0
+
+  def checkThreads(self):
+    '''Check Python threading'''
+    if self.checkCygwin():
+      errorMsg = '''\
+      =================================================================================
+       *** cygwin-1.5.11-1 detected. configure.py fails with this version ***
+       *** Please upgrade to cygwin-1.5.12-1 or newer version. This can   ***
+       *** be done by running cygwin-setup, selecting "next" all the way. ***
+      ================================================================================='''
+      sys.exit(errorMsg)
+    if self.checkRedHat9():
+      sys.argv.append('--useThreads=0')
+      self.log.append('''\
+================================================================================
+   *** RHL9 detected. Threads do not work correctly with this distribution ***
+    ********* Disabling thread usage for this run of configure.py ***********
+================================================================================''')
+      if self.checkCygwinPython():
+        sys.argv.append('--useThreads=0')
+        self.log.append('''\
+================================================================================
+** Cygwin-python-2.4 detected. Threads do not work correctly with this version *
+ ********* Disabling thread usage for this run of configure.py ****************
+================================================================================''')
+    return
+
+  def checkOptions(self, options, convertOptions = False):
+    '''Check for some initial options, and optionally give them default values
+    - If convertOptions is true, process GNU options prefixes into our canonical form
+    '''
+    import nargs
+
+    if convertOptions:
+      nargs.Arg.processAlternatePrefixes(sys.argv)
+    for name, defaultArg in options.items():
+      if nargs.Arg.findArgument(name,sys.argv) is None:
+        if defaultArg:
+          sys.argv.append('%s=%s' % name, str(defaultArg))
+    return
+
+  def configure(self):
+    self.checkThreads()
+    self.checkOptions(self.options, self.convertOptions)
+    return

Added: long/3D/Gale/trunk/python/BuildSystem/config/programs.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/programs.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/programs.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+import user
+import config.base
+
+import os
+
+class Configure(config.base.Configure):
+  def __init__(self, framework):
+    config.base.Configure.__init__(self, framework)
+    self.headerPrefix = ''
+    self.substPrefix  = ''
+    return
+
+  def __str__(self):
+    return ''
+
+  def setupHelp(self, help):
+    import nargs
+    return
+
+  def configureMkdir(self):
+    '''Make sure we can have mkdir automatically make intermediate directories'''
+    self.getExecutable('mkdir', getFullPath = 1, setMakeMacro = 0)
+    if hasattr(self, 'mkdir'):
+      confDir    = '.conftest'
+      conftmpDir = os.path.join('.conftest', 'tmp')
+      if os.path.exists(conftmpDir): os.rmdir(conftmpDir)
+      if os.path.exists(confDir):    os.rmdir(confDir)
+      try:
+        (output, error, status) = config.base.Configure.executeShellCommand(self.mkdir+' -p '+conftmpDir, log = self.framework.log)
+        if not status and os.path.isdir(conftmpDir):
+          self.mkdir = self.mkdir+' -p'
+          self.logPrint('Adding -p flag to '+self.mkdir+' to automatically create directories')
+        else:
+          self.logPrint('Could not determine flag for '+self.mkdir+' to automatically create directories')
+      except RuntimeError:
+        self.logPrint('Could not determine flag for '+self.mkdir+' to automatically create directories')
+      self.addMakeMacro('MKDIR', self.mkdir)
+      if os.path.exists(conftmpDir): os.rmdir(conftmpDir)
+      if os.path.exists(confDir):    os.rmdir(confDir)
+    return
+
+  def configurePrograms(self):
+    '''Check for the programs needed to build and run PETSc'''
+    self.getExecutable('sh',   getFullPath = 1, resultName = 'SHELL')
+    if not hasattr(self, 'SHELL'): raise RuntimeError('Could not locate sh executable')
+    self.getExecutable('sed',  getFullPath = 1)
+    if not hasattr(self, 'sed'): raise RuntimeError('Could not locate sed executable')
+    self.getExecutable('mv',   getFullPath = 1)
+    if not hasattr(self, 'mv'): raise RuntimeError('Could not locate mv executable')
+    self.getExecutable('cp',   getFullPath = 1)
+    if not hasattr(self, 'cp'): raise RuntimeError('Could not locate cp executable')
+    self.getExecutable('grep', getFullPath = 1)    
+    if not hasattr(self, 'grep'): raise RuntimeError('Could not locate grep executable')
+    self.getExecutable('rm -f',getFullPath = 1, resultName = 'RM')
+    if not hasattr(self, 'RM'): raise RuntimeError('Could not locate rm executable')
+    self.getExecutable('diff', getFullPath = 1,setMakeMacro=0)
+    if hasattr(self, 'diff'):
+      # check if diff supports -w option for ignoring whitespace
+      f = file('diff1', 'w')
+      f.write('diff\n')
+      f.close()
+      f = file('diff2', 'w')
+      f.write('diff  \n')
+      f.close()
+      (out,err,status) = Configure.executeShellCommand(self.diff+' -w diff1 diff2')
+      os.unlink('diff1')
+      os.unlink('diff2')
+      if not status:    
+        self.diff = self.diff + ' -w'
+      self.addMakeMacro('DIFF',self.diff)
+    else:
+      raise RuntimeError('Could not locate diff executable')
+    self.getExecutable('ps', path = '/usr/ucb:/usr/usb', resultName = 'UCBPS')
+    if hasattr(self, 'UCBPS'):
+      self.addDefine('HAVE_UCBPS', 1)
+    self.getExecutable('gzip', getFullPath=1, resultName = 'GZIP')
+    if hasattr(self, 'GZIP'):
+      self.addDefine('HAVE_GZIP', 1)
+    return
+
+  def configure(self):
+    self.executeTest(self.configureMkdir)
+    self.executeTest(self.configurePrograms)    
+    return


Property changes on: long/3D/Gale/trunk/python/BuildSystem/config/programs.py
___________________________________________________________________
Name: svn:mime-type
   + text/script

Modified: long/3D/Gale/trunk/python/BuildSystem/config/setCompilers.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/setCompilers.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/setCompilers.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -274,7 +274,7 @@
       yield os.path.join(self.framework.argDB['with-mpi-dir'], 'bin', 'hcc')
       yield os.path.join(self.framework.argDB['with-mpi-dir'], 'bin', 'mpcc_r')
       self.usedMPICompilers = 0
-      raise RuntimeError('bin/<mpicc,hcc/mpcc_r> you provided with -with-mpi-dir='+self.framework.argDB['with-mpi-dir']+' does not work')
+      raise RuntimeError('MPI compiler wrappers in '+self.framework.argDB['with-mpi-dir']+'/bin do not work. See http://www.mcs.anl.gov/petsc/petsc-as/documentation/faq.html#mpi-compilers')
     else:
       if self.useMPICompilers():
         self.usedMPICompilers = 1
@@ -294,6 +294,8 @@
       vendor = self.vendor
       if (not vendor) and self.framework.argDB['with-gnu-compilers']:
         yield 'gcc'
+        if Configure.isGNU('cc'):
+          yield 'cc'     
       if not self.vendor is None:
         if not vendor and not Configure.isGNU('cc'):
           yield 'cc'
@@ -314,8 +316,11 @@
         if vendor == 'solaris' or not vendor:
           if not Configure.isGNU('cc'):
             yield 'cc'
+      # duplicate code
       if self.framework.argDB['with-gnu-compilers']:
         yield 'gcc'
+        if Configure.isGNU('cc'):
+          yield 'cc'     
     return
 
   def checkCCompiler(self):
@@ -437,6 +442,8 @@
       vendor = self.vendor
       if (not vendor) and self.framework.argDB['with-gnu-compilers']:
         yield 'g++'
+        if Configure.isGNU('c++'):
+          yield 'c++'
       if not self.vendor is None:
         if not vendor:
           if not Configure.isGNU('c++'):
@@ -461,8 +468,11 @@
           yield 'pgCC'
         if vendor == 'solaris':
           yield 'CC'
+      #duplicate code
       if self.framework.argDB['with-gnu-compilers']:
         yield 'g++'
+        if Configure.isGNU('c++'):
+          yield 'c++'
     return
 
   def checkCxxCompiler(self):
@@ -592,6 +602,8 @@
         yield 'gfortran'
         yield 'g95'
         yield 'g77'
+        if Configure.isGNU('f77'):
+          yield 'f77'
       if not self.vendor is None:
         if vendor == 'ibm' or not vendor:
           yield 'xlf'
@@ -614,10 +626,13 @@
           yield 'f90'
           if not Configure.isGNU('f77'):
             yield 'f77'
+      #duplicate code
       if self.framework.argDB['with-gnu-compilers']:
         yield 'gfortran'
         yield 'g95'
         yield 'g77'
+        if Configure.isGNU('f77'):
+          yield 'f77'
     return
 
   def checkFortranCompiler(self):
@@ -928,7 +943,7 @@
     # undefined warning must also have flat_namespace
     if Configure.isDarwin():
       #yield ('libtool', ['-noprebind','-dynamic','-single_module','-flat_namespace -undefined warning','-multiply_defined suppress'], 'dylib')
-      yield (self.CC, ['-dynamiclib -single_module', '-undefined dynamic_lookup', '-flat_namespace -undefined warning', '-multiply_defined suppress'], 'dylib')
+      yield (self.CC, ['-dynamiclib -single_module', '-undefined dynamic_lookup', '-multiply_defined suppress'], 'dylib')
     # Default to static linker
     self.setStaticLinker()
     self.staticLinker = self.AR
@@ -1042,6 +1057,8 @@
     '''Test whether we need to explicitly include libc in shared linking
        - Mac OSX requires an explicit reference to libc for shared linking'''
     self.explicitLibc = None
+    if self.staticLibraries:
+      return
     tmpCompilerDefines   = self.compilerDefines
     self.compilerDefines = ''
     code = '#include <stdlib.h> \nint foo(void) {void *chunk = malloc(31); free(chunk); return 0;}\n'
@@ -1177,8 +1194,7 @@
     if Configure.isDarwin():
       self.executeTest(self.checkLinkerMac)
     self.executeTest(self.checkSharedLinkerPaths)
-    if not self.staticLibraries:
-      self.executeTest(self.checkLibC)
+    self.executeTest(self.checkLibC)
     self.executeTest(self.checkDynamicLinker)
     self.executeTest(self.output)
     return

Modified: long/3D/Gale/trunk/python/BuildSystem/config/types.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/config/types.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/config/types.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -9,6 +9,8 @@
     self.headerPrefix = ''
     self.substPrefix  = ''
     self.sizes = {}
+    self.c99_complex = 0
+    self.cxx_complex = 0
     return
 
   def setupHelp(self, help):
@@ -82,19 +84,24 @@
     self.addDefine('RETSIGTYPE', returnType)
     return
 
-  def checkComplex(self):
-    '''Check for complex numbers in namespace std, and if --enable-complex is given, defines PETSC_USE_COMPLEX if they are present'''
+  def checkC99Complex(self):
+    '''Check for complex numbers in in C99 std'''
+    includes = '#include <complex.h>\n'
+    body     = 'double complex x;\n'
+    if self.checkLink(includes, body):
+      self.addDefine('HAVE_C99_COMPLEX', 1)
+      self.c99_complex = 1
+    return
+
+  def checkCxxComplex(self):
+    '''Check for complex numbers in namespace std'''
     self.pushLanguage('C++')
     includes = '#include <complex>\n'
     body     = 'std::complex<double> x;\n'
-    found    = 0
     if self.checkLink(includes, body):
-      self.addDefine('HAVE_COMPLEX', 1)
-      found = 1
+      self.addDefine('HAVE_CXX_COMPLEX', 1)
+      self.cxx_complex = 1
     self.popLanguage()
-
-    #if found and self.framework.argDB['enable-complex']:
-    #  self.addDefine('PETSC_USE_COMPLEX', 1)
     return
 
   def checkFortranStar(self):
@@ -189,7 +196,7 @@
       endian = self.framework.argDB['with-endian']
     else:
       # See if sys/param.h defines the BYTE_ORDER macro
-      includes = '#include <sys/types.h>\n#include <sys/param.h>\n'
+      includes = '#include <sys/types.h>\n#ifdef HAVE_SYS_PARAM_H\n  #include <sys/param.h>\n#endif\n'
       body     = '''
 #if !BYTE_ORDER || !BIG_ENDIAN || !LITTLE_ENDIAN
   bogus endian macros
@@ -328,8 +335,9 @@
     self.executeTest(self.checkPID)
     self.executeTest(self.checkUID)
     self.executeTest(self.checkSignal)
+    self.executeTest(self.checkC99Complex)
     if hasattr(self.compilers, 'CXX'):
-      self.executeTest(self.checkComplex)
+      self.executeTest(self.checkCxxComplex)
     if hasattr(self.compilers, 'FC'):
       #self.executeTest(self.checkFortranStar)
       self.executeTest(self.checkFortranKind)

Added: long/3D/Gale/trunk/python/BuildSystem/emacsclient.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/emacsclient.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/emacsclient.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,24 @@
+#
+# Uses the emacsclient feature to allow editing a string
+# We should reverse engineer the emacs lisp (looks fairly easy)
+# to skip the file completely
+#
+import commands
+import os
+import pwd
+
+def edit(astring):
+  filename = os.path.join('/tmp', pwd.getpwuid(os.getuid())[0]+'-emacsclient')
+  
+  f = open(filename,'w')
+  f.write(astring)
+  f.close()
+  (status, output) = commands.getstatusoutput('emacsclient '+filename)
+  if status:
+    print 'Problem running emacsclient'
+    print output
+    return astring
+  f = open(filename,'r')
+  astring = f.read()
+  f.close()
+  return astring


Property changes on: long/3D/Gale/trunk/python/BuildSystem/emacsclient.py
___________________________________________________________________
Name: svn:executable
   + *

Added: long/3D/Gale/trunk/python/BuildSystem/getsplicers.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/getsplicers.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/getsplicers.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+#
+#   This is absolute crap; we really need to parse the impls and process them
+#
+import user
+
+import os
+import sys
+import re
+import cPickle
+import project
+import RDict
+import commands
+
+
+def getSplicersDir(splicedimpls,dir,names):
+  reg = re.compile('splicer.begin\(([A-Za-z0-9._]*)\)')
+
+  if 'SCCS' in names: del names[names.index('SCCS')]
+  if 'BitKeeper' in names: del names[names.index('BitKeeper')]
+  if 'docs' in names: del names[names.index('docs')]
+  for f in names:
+    ext = os.path.splitext(f)[1]
+    if not ext in splicedimpls: continue
+    if f == '__init__.py': continue
+    if not os.path.isfile(os.path.join(dir,f)): continue
+    fd = open(os.path.join(dir,f),'r')
+    line = fd.readline()
+    while line:
+      if not line.find('splicer.begin') == -1:
+        fl = reg.search(line)
+        name = fl.group(1)
+
+        line = fd.readline()
+        body = ''
+        while line.find('splicer.end') == -1:
+          body = body + line
+          line = fd.readline()
+        splicedimpls[ext][name] = body
+
+      line = fd.readline()
+    fd.close()
+  
+def getSplicers(directories):
+  splicedimpls = {'.c' : {}, '.h' : {}, '.cc' : {}, '.hh' : {}, '.py' : {}, '.m' : {}}
+
+  if not directories: directories = [os.getcwd()]
+  for directory in directories:
+    os.path.walk(directory,getSplicersDir,splicedimpls)
+
+  f    = open('splicerblocks', 'w')
+  cPickle.dump(splicedimpls,f)
+  f.close()
+    
+if __name__ ==  '__main__':
+  if len(sys.argv) > 2: sys.exit('Usage: getsplicers.py <directory>')
+  getSplicers(sys.argv[1:-1])
+


Property changes on: long/3D/Gale/trunk/python/BuildSystem/getsplicers.py
___________________________________________________________________
Name: svn:executable
   + *
Name: svn:mime-type
   + text/script

Modified: long/3D/Gale/trunk/python/BuildSystem/install/retrieval.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/install/retrieval.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/install/retrieval.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -72,6 +72,13 @@
       config.base.Configure.executeShellCommand('cd '+root+'; tar -xf '+archive, log = self.log)
     except RuntimeError, e:
       raise RuntimeError('Error doing tar -xf '+archive+': '+str(e))
+    # now find the dirname - and do a chmod
+    try:
+      output = config.base.Configure.executeShellCommand('cd '+root+'; tar -tf '+archive+' | head -n 1', log = self.log)
+      dirname = output[0].strip()
+      config.base.Configure.executeShellCommand('cd '+root+'; chmod -R a+r '+dirname+';find  '+dirname + ' -type d -name "*" -exec chmod a+rx {} \;', log = self.log)
+    except RuntimeError, e:
+      raise RuntimeError('Error  changing permissions for '+archive+': '+str(e))
     os.unlink(localFile)
     return
 

Added: long/3D/Gale/trunk/python/BuildSystem/make.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/make.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/make.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+import user
+import maker
+import project
+
+import os
+
+class Make(maker.Make):
+  def __init__(self):
+    maker.Make.__init__(self)
+    self.project = project.Project('bk://sidl.bkbits.net/BuildSystem', self.getRoot())
+    self.project.setWebDirectory('petsc at harley.mcs.anl.gov://mcs/www-unix/ase')
+    return
+
+  def setupDependencies(self, sourceDB):
+    maker.Make.setupDependencies(self, sourceDB)
+    sourceDB.addDependency(os.path.join('client-python', 'cygwinpath.c'), os.path.join('client-python', 'cygwinpath.h'))
+    return
+
+  def updateDependencies(self, sourceDB):
+    sourceDB.updateSource(os.path.join('client-python', 'cygwinpath.h'))
+    maker.Make.updateDependencies(self, sourceDB)
+    return
+
+  def setupConfigure(self, framework):
+    doConfigure = maker.Make.setupConfigure(self, framework)
+    framework.header = os.path.join('client-python', 'cygwinpath.h')
+    return doConfigure
+
+  def configure(self, builder):
+    framework   = maker.Make.configure(self, builder)
+    self.python = framework.require('config.python', None)
+    return
+
+  def buildCygwinPath(self, builder):
+    '''Builds the Python module which translates Cygwin paths'''
+    builder.pushConfiguration('Triangle Library')
+    compiler = builder.getCompilerObject()
+    linker   = builder.getLinkerObject()
+    compiler.includeDirectories.update(self.python.include)
+    linker.libraries.update(self.python.lib)
+    source = os.path.join('client-python', 'cygwinpath.c')
+    object = os.path.join('client-python', 'cygwinpath.o')
+    self.builder.compile([source], object)
+    self.builder.link([object], os.path.join('client-python', 'cygwinpath.so'), shared = 1)
+    builder.popConfiguration()
+    return
+
+  def build(self, builder):
+    self.buildCygwinPath(builder)
+    return
+
+  def t_updateWebsite(self):
+    build.framework.Framework.t_updateWebsite(self)
+    self.cpWebsite('docs/website/index.html')
+    self.cpWebsite('install/bootstrap.py', 'bootstrap.sh')
+    self.cpWebsite('docs/tutorials/*.ppt')
+    self.cpWebsite('docs/website/faq.html')
+    self.cpWebsite('docs/website/projects.html')
+    return
+
+  def t_updateBootstrap(self):
+    import install.installerclass
+
+    for url in ['bk://sidl.bkbits.net/Runtime', 'bk://sidl.bkbits.net/Compiler']:
+      installer = install.installerclass.Installer()
+      dir       = os.path.join('/mcs','ftp', 'pub', 'petsc', 'sidl')
+      tarball   = installer.getRepositoryName(installer.getMappedUrl(url))+'.tgz'
+      fullPath  = os.path.join(dir, tarball)
+      installer.backup(url)
+      try: self.executeShellCommand('ssh petsc at harley.mcs.anl.gov mv '+fullPath+' '+fullPath+'.old')
+      except: pass
+      self.cpFile(tarball, 'petsc at harley.mcs.anl.gov:/'+dir)
+      os.remove(tarball)
+    return
+
+if __name__ == '__main__':
+  Make().run()


Property changes on: long/3D/Gale/trunk/python/BuildSystem/make.py
___________________________________________________________________
Name: svn:executable
   + *
Name: svn:mime-type
   + text/script

Modified: long/3D/Gale/trunk/python/BuildSystem/maker.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/maker.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/maker.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -234,16 +234,43 @@
   '''A basic make template that acts much like a traditional makefile'''
   languageNames = {'C': 'C', 'Cxx': 'Cxx', 'FC': 'Fortran', 'Python': 'Python'}
 
-  def __init__(self, implicitRoot = 0):
+  def __init__(self, implicitRoot = 0, configureParent = None, module = None):
     '''Setup the library and driver source descriptions'''
     if not implicitRoot:
       self.root = os.getcwd()
-    Make.__init__(self)
+    Make.__init__(self, configureParent = configureParent)
     self.lib = {}
     self.dylib = {}
     self.bin = {}
+    if not module is None:
+      self.module = module
     return
 
+  def setupHelp(self, help):
+    import nargs
+
+    help = Make.setupHelp(self, help)
+    help.addArgument('basicMake', 'libdir', nargs.ArgDir(None, 'lib', 'Root for installation of libraries', mustExist = 0, isTemporary = 1))
+    help.addArgument('basicMake', 'bindir', nargs.ArgDir(None, 'bin', 'Root for installation of executables', mustExist = 0, isTemporary = 1))
+    return help
+
+  def getMakeModule(self):
+    if not hasattr(self, '_module'):
+      import sys
+      d = sys.modules['__main__']
+      if os.path.basename(d.__file__) == 'pdb.py':
+        sys.path.insert(0, '.')
+        import make
+        d = sys.modules['make']
+      elif not 'Make' in dir(d):
+        d = sys.modules['make']
+      return d
+    return self._module
+  def setMakeModule(self, module):
+    self._module = module
+    return
+  module = property(getMakeModule, setMakeModule, doc = 'The make module for this build')
+
   def classifySource(self, srcList):
     src = {}
     for f in srcList:
@@ -252,7 +279,7 @@
         if not 'C' in src:
           src['C'] = []
         src['C'].append(f)
-      elif ext in ['.cc', '.hh', '.C', '.cpp']:
+      elif ext in ['.cc', '.hh', '.C', '.cpp', '.cxx']:
         if not 'Cxx' in src:
           src['Cxx'] = []
         src['Cxx'].append(f)
@@ -266,6 +293,20 @@
         src['Python'].append(f)
     return src
 
+  def classifyIncludes(self, incList):
+    inc = {}
+    for f in incList:
+      base, ext = os.path.splitext(f)
+      if ext in ['.h']:
+        if not 'C' in inc:
+          inc['C'] = []
+        inc['C'].append(f)
+      elif ext in ['.hh']:
+        if not 'Cxx' in inc:
+          inc['Cxx'] = []
+        inc['Cxx'].append(f)
+    return inc
+
   def parseDocString(self, docstring, defaultName = None):
     parts = docstring.split(':', 1)
     if len(parts) < 2:
@@ -276,7 +317,7 @@
     else:
       name = parts[0]
       src = parts[1].split()
-    return (name, self.classifySource(src))
+    return (name, self.classifySource(src), self.classifyIncludes(src))
 
   def setupConfigure(self, framework):
     '''We always want to configure'''
@@ -296,15 +337,16 @@
     return framework
 
   def getImplicitLibraries(self):
-    import sys
-    d = sys.modules['__main__']
+    d = self.module
     for name in dir(d):
       if not name.startswith('lib_'):
         continue
       func = getattr(d, name)
       lib = struct()
-      lib.name, lib.src = self.parseDocString(func.__doc__, name[4:])
-      lib.includes, lib.libs = func(self)
+      lib.name, lib.src, lib.inc = self.parseDocString(func.__doc__, name[4:])
+      params = func(self)
+      lib.includes, lib.libs = params[0:2]
+      if (len(params) == 3): lib.flags = params[2]
       lib.configuration = name[4:]
       self.logPrint('Found configuration '+lib.configuration+' for library '+lib.name)
       self.logPrint('  includes '+str(lib.includes)+' libraries '+str(lib.libs))
@@ -312,15 +354,16 @@
     return
 
   def getImplicitDynamicLibraries(self):
-    import sys
-    d = sys.modules['__main__']
+    d = self.module
     for name in dir(d):
       if not name.startswith('dylib_'):
         continue
       func = getattr(d, name)
       lib = struct()
-      lib.name, lib.src = self.parseDocString(func.__doc__, name[6:])
-      lib.includes, lib.libs = func(self)
+      lib.name, lib.src, lib.inc = self.parseDocString(func.__doc__, name[6:])
+      params = func(self)
+      lib.includes, lib.libs = params[0:2]
+      if (len(params) == 3): lib.flags = params[2]
       lib.configuration = name[6:]
       self.logPrint('Found configuration '+lib.configuration+' for dynamic library '+lib.name)
       self.logPrint('  includes '+str(lib.includes)+' libraries '+str(lib.libs))
@@ -328,20 +371,34 @@
     return
 
   def getImplicitExecutables(self):
-    import sys
-    d = sys.modules['__main__']
+    d = self.module
     for name in dir(d):
       if not name.startswith('bin_'):
         continue
       func = getattr(d, name)
       bin = struct()
-      bin.name, bin.src = self.parseDocString(func.__doc__, name[4:])
-      bin.includes, bin.libs = func(self)
+      bin.name, bin.src, bin.inc = self.parseDocString(func.__doc__, name[4:])
+      params = func(self)
+      bin.includes, bin.libs = params[0:2]
+      if (len(params) == 3): bin.flags = params[2]
       bin.configuration = name[4:]
       self.bin[bin.name] = bin
     return
 
   def setupDirectories(self, builder):
+    if self.prefix is None:
+      self.logPrint('ERROR: prefix is None')
+      self.libDir = os.path.abspath(self.argDB['libdir'])
+      self.binDir = os.path.abspath(self.argDB['bindir'])
+    else:
+      self.logPrint('prefix '+self.prefix+' libDir '+self.argDB['libdir']+' totdir '+os.path.join(self.prefix, self.argDB['libdir']))
+      self.libDir = os.path.abspath(os.path.join(self.prefix, self.argDB['libdir']))
+      self.binDir = os.path.abspath(os.path.join(self.prefix, self.argDB['bindir']))
+    self.logPrint('Library directory is '+self.libDir)
+    self.logPrint('Executable directory is '+self.binDir)
+    return
+
+  def setupLibraryDirectories(self, builder):
     '''Determine the directories for source includes, libraries, and binaries'''
     languages = sets.Set()
     [languages.update(lib.src.keys()) for lib in self.lib.values()+self.dylib.values()]
@@ -349,9 +406,12 @@
     self.includeDir = {}
     for language in languages:
       self.srcDir[language] = os.path.abspath(os.path.join('src', self.languageNames[language].lower()))
-      self.includeDir[language] = os.path.abspath('include')
-    self.libDir = os.path.abspath('lib')
-    self.binDir = os.path.abspath('bin')
+      self.logPrint('Source directory for '+language+' is '+self.srcDir[language])
+      if self.prefix is None:
+        self.includeDir[language] = os.path.abspath('include')
+      else:
+        self.includeDir[language] = os.path.abspath(os.path.join(self.prefix, 'include'))
+      self.logPrint('Include directory for '+language+' is '+self.includeDir[language])
     return
 
   def setupLibraries(self, builder):
@@ -362,6 +422,8 @@
       builder.pushConfiguration(lib.configuration)
       for language in languages:
         builder.pushLanguage(language)
+        if hasattr(lib, 'flags'):
+          builder.setCompilerFlags(' '.join(lib.flags))
         compiler = builder.getCompilerObject()
         lib.includes = filter(lambda inc: inc, lib.includes)
         self.logPrint('  Adding includes '+str(lib.includes))
@@ -392,6 +454,8 @@
       builder.pushConfiguration(lib.configuration)
       for language in languages:
         builder.pushLanguage(language)
+        if hasattr(lib, 'flags'):
+          builder.setCompilerFlags(' '.join(lib.flags))
         compiler = builder.getCompilerObject()
         lib.includes = filter(lambda inc: inc, lib.includes)
         self.logPrint('  Adding includes '+str(lib.includes))
@@ -422,6 +486,8 @@
       builder.pushConfiguration(bin.configuration)
       for language in languages:
         builder.pushLanguage(language)
+        if hasattr(bin, 'flags'):
+          builder.setCompilerFlags(' '.join(bin.flags))
         compiler = builder.getCompilerObject()
         bin.includes = filter(lambda inc: inc, bin.includes)
         self.logPrint('  Adding includes '+str(bin.includes))
@@ -446,14 +512,17 @@
   def buildDirectories(self, builder):
     '''Create the necessary directories'''
     languages = sets.Set()
-    [languages.update(lib.src.keys()) for lib in self.lib.values()]
+    [languages.update(lib.src.keys()) for lib in self.lib.values()+self.dylib.values()]
     for language in languages:
       if not os.path.isdir(self.includeDir[language]):
         os.mkdir(self.includeDir[language])
+        self.logPrint('Created include directory '+self.includeDir[language])
     if not os.path.isdir(self.libDir):
       os.mkdir(self.libDir)
+      self.logPrint('Created library directory '+self.libDir)
     if not os.path.isdir(self.binDir):
       os.mkdir(self.binDir)
+      self.logPrint('Created executable directory '+self.binDir)
     return
 
   def buildLibraries(self, builder):
@@ -467,7 +536,7 @@
         sources = [os.path.join(self.srcDir, self.srcDir[language], f) for f in lib.src[language]]
         for f in sources:
           builder.compile([f])
-        objects.extend([self.builder.getCompilerTarget(f) for f in sources])
+        objects.extend([self.builder.getCompilerTarget(f) for f in sources if not self.builder.getCompilerTarget(f) is None])
         builder.popLanguage()
       builder.link(objects, os.path.join(self.libDir, lib.name+'.'+self.setCompilers.sharedLibraryExt), shared = 1)
       builder.popConfiguration()
@@ -484,7 +553,7 @@
         sources = [os.path.join(self.srcDir, self.srcDir[language], f) for f in lib.src[language]]
         for f in sources:
           builder.compile([f])
-        objects.extend([self.builder.getCompilerTarget(f) for f in sources])
+        objects.extend([self.builder.getCompilerTarget(f) for f in sources if not self.builder.getCompilerTarget(f) is None])
         builder.popLanguage()
       builder.link(objects, os.path.join(self.libDir, lib.name+'.'+self.setCompilers.dynamicLibraryExt), shared = 'dynamic')
       builder.popConfiguration()
@@ -508,10 +577,11 @@
     return
 
   def setupBuild(self, builder):
+    self.executeSection(self.setupDirectories, builder)
     self.getImplicitLibraries()
     self.getImplicitDynamicLibraries()
     self.getImplicitExecutables()
-    self.executeSection(self.setupDirectories, builder)
+    self.executeSection(self.setupLibraryDirectories, builder)
     self.executeSection(self.setupLibraries, builder)
     self.executeSection(self.setupDynamicLibraries, builder)
     self.executeSection(self.setupExecutables, builder)
@@ -527,6 +597,23 @@
     self.executeSection(self.buildExecutables, builder)
     return
 
+  def installIncludes(self, builder):
+    import shutil
+    for lib in self.lib.values()+self.dylib.values():
+      self.logPrint('Installing library: '+lib.name)
+      for language in lib.inc:
+        for inc in lib.inc[language]:
+          installInc = os.path.join(self.includeDir[language], os.path.basename(inc))
+          if os.path.isfile(installInc):
+            os.remove(installInc)
+          self.logPrint('Installing '+inc+' into '+installInc)
+          shutil.copy(os.path.join(self.srcDir[language], inc), installInc)
+    return
+
+  def install(self, builder, argDB):
+    self.executeSection(self.installIncludes, builder)
+    return
+
 class SIDLMake(Make):
   def __init__(self, builder = None):
     import re

Modified: long/3D/Gale/trunk/python/BuildSystem/nargs.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/nargs.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/nargs.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -20,7 +20,11 @@
   '''This is the base class for all objects contained in RDict. Access to the raw argument values is
 provided by getValue() and setValue(). These objects can be thought of as type objects for the
 values themselves. It is possible to set an Arg in the RDict which has not yet been assigned a value
-in order to declare the type of that option.'''
+in order to declare the type of that option.
+
+Inputs which cannot be converted to the correct type will cause TypeError, those failing validation
+tests will cause ValueError.
+'''
   def __init__(self, key, value = None, help = '', isTemporary = 0):
     self.key = key
     if not value is None:
@@ -104,6 +108,24 @@
     return None
   findArgument = staticmethod(findArgument)
 
+  def processAlternatePrefixes(argList):
+    '''Convert alternate prefixes to our normal form'''
+    for l in range(0, len(argList)):
+      name = argList[l]
+      if name.find('enable-') >= 0:
+        argList[l] = name.replace('enable-','with-')
+        if name.find('=') == -1: argList[l] = argList[l]+'=1'
+      if name.find('disable-') >= 0:
+        argList[l] = name.replace('disable-','with-')
+        if name.find('=') == -1: argList[l] = argList[l]+'=0'
+        elif name.endswith('=1'): argList[l].replace('=1','=0')
+      if name.find('without-') >= 0:
+        argList[l] = name.replace('without-','with-')
+        if name.find('=') == -1: argList[l] = argList[l]+'=0'
+        elif name.endswith('=1'): argList[l].replace('=1','=0')
+    return
+  processAlternatePrefixes = staticmethod(processAlternatePrefixes)
+
   def __str__(self):
     if not self.isValueSet():
       return 'Empty '+str(self.__class__)
@@ -229,7 +251,7 @@
     except:
       raise TypeError('Invalid integer number: '+str(value)+' for key '+str(self.key))
     if value < self.min or value >= self.max:
-      raise TypeError('Number out of range: '+str(value)+' not in ['+str(self.min)+','+str(self.max)+')'+' for key '+str(self.key))
+      raise ValueError('Number out of range: '+str(value)+' not in ['+str(self.min)+','+str(self.max)+')'+' for key '+str(self.key))
     self.value = value
     return
 
@@ -251,7 +273,7 @@
     except:
       raise TypeError('Invalid floating point number: '+str(value)+' for key '+str(self.key))
     if value < self.min or value >= self.max:
-      raise TypeError('Number out of range: '+str(value)+' not in ['+str(self.min)+','+str(self.max)+')'+' for key '+str(self.key))
+      raise ValueError('Number out of range: '+str(value)+' not in ['+str(self.min)+','+str(self.max)+')'+' for key '+str(self.key))
     self.value = value
     return
 
@@ -260,7 +282,6 @@
   def __init__(self, key, value = None, help = '', mustExist = 1, isTemporary = 0):
     self.mustExist = mustExist
     Arg.__init__(self, key, value, help, isTemporary)
-
     return
 
   def getEntryPrompt(self):
@@ -332,7 +353,7 @@
     value = nvalue
     for dir in value:
       if self.mustExist and not os.path.isdir(dir):
-        raise TypeError('Invalid directory: '+str(dir)+' for key '+str(self.key))
+        raise ValueError('Invalid directory: '+str(dir)+' for key '+str(self.key))
     self.value = value
     return
 
@@ -371,7 +392,7 @@
         value = [value]
       for lib in value:
         if lib.startswith('/') and not os.path.isfile(lib):
-          raise TypeError('Invalid library: '+str(lib)+' for key '+str(self.key))
+          raise ValueError('Invalid library: '+str(lib)+' for key '+str(self.key))
     self.value = value
     return
 
@@ -423,8 +444,8 @@
           if self.checkExecutable(dir, value):
             found = 1
             break
-      if found:
-        raise TypeError('Invalid executable: '+str(value)+' for key '+str(self.key))
+      if not found:
+        raise ValueError('Invalid executable: '+str(value)+' for key '+str(self.key))
     self.value = value+options
     return
 
@@ -441,6 +462,50 @@
   def setValue(self, value):
     '''Set the value. SHOULD MAKE THIS A PROPERTY'''
     if self.regExp and not self.re.match(value):
-      raise TypeError('Invalid string '+str(value)+'. You must give a string satisfying "'+str(self.regExp)+'"'+' for key '+str(self.key))
+      raise ValueError('Invalid string '+str(value)+'. You must give a string satisfying "'+str(self.regExp)+'"'+' for key '+str(self.key))
     self.value = value
     return
+
+class ArgDownload(Arg):
+  '''Arguments that represent software downloads'''
+  def __init__(self, key, value = None, help = '', isTemporary = 0):
+    Arg.__init__(self, key, value, help, isTemporary)
+    return
+
+  def valueName(self, value):
+    if value == 0:
+      return 'no'
+    elif value == 1:
+      return 'yes'
+    elif value == 2:
+      return 'ifneeded'
+    return str(value)
+
+  def __str__(self):
+    if not self.isValueSet():
+      return 'Empty '+str(self.__class__)
+    elif isinstance(self.value, list):
+      return str(map(self.valueName, self.value))
+    return self.valueName(self.value)
+
+  def getEntryPrompt(self):
+    return 'Please enter download value for '+str(self.key)+': '
+
+  def setValue(self, value):
+    '''Set the value. SHOULD MAKE THIS A PROPERTY'''
+    try:
+      if   value == '0':        value = 0
+      elif value == '1':        value = 1
+      elif value == 'no':       value = 0
+      elif value == 'yes':      value = 1
+      elif value == 'false':    value = 0
+      elif value == 'true':     value = 1
+      elif value == 'ifneeded': value = 2
+      elif not isinstance(value, int):
+        value = str(value)
+    except:
+      raise TypeError('Invalid download value: '+str(value)+' for key '+str(self.key))
+    if isinstance(value, str) and not os.path.isfile(value):
+      raise ValueError('Invalid download location: '+str(value)+' for key '+str(self.key))
+    self.value = value
+    return

Modified: long/3D/Gale/trunk/python/BuildSystem/script.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/script.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/script.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -291,7 +291,8 @@
     if 'languageModule' in d:
       d['languageModule'] = dict([(lang,mod._loadName) for lang,mod in d['languageModule'].items()])
     for member in ['preprocessorObject', 'compilerObject', 'linkerObject', 'sharedLinkerObject', 'dynamicLinkerObject', 'framework']:
-      del d[member]
+      if member in d:
+        del d[member]
     return d
 
   def __setstate__(self, d):

Added: long/3D/Gale/trunk/python/BuildSystem/setsplicers.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/setsplicers.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/setsplicers.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+#
+#   This is absolute crap; we really need to parse the impls and process them
+#
+import user
+
+import os
+import sys
+import re
+import cPickle
+import string
+
+def setSplicersDir(splicedimpls,dir,names):
+
+  reg        = re.compile('splicer.begin\(([A-Za-z0-9._]*)\)')
+  reginclude = re.compile('#include [ ]*"([a-zA-Z_0-9/]*.[h]*)"')
+
+  if 'SCCS' in names: del names[names.index('SCCS')]
+  if 'BitKeeper' in names: del names[names.index('BitKeeper')]
+  if 'docs' in names: del names[names.index('docs')]
+  for f in names:
+    ext = os.path.splitext(f)[1]
+    if not ext in splicedimpls: continue
+    if f == '__init__.py': continue
+    if not os.path.isfile(os.path.join(dir,f)): continue
+    fd = open(os.path.join(dir,f),'r')
+    foundreplacement = 0
+    text = ''
+    line = fd.readline()
+    while line:
+      text = text+line
+      if not line.find('splicer.begin') == -1:
+        fl = reg.search(line)
+        name = fl.group(1)
+
+        line = fd.readline()
+        body = ''
+        while line.find('splicer.end') == -1:
+          body = body + line
+          line = fd.readline()
+
+            
+        # replace body with saved splicer block
+        if name.endswith('._includes') and ext == '.cc':
+          foundreplacement = 1
+#          print 'handling includes for class '+name
+          name = name[0:-10]
+          len1  = len(name)
+          body = '#include "SIDL.hh"\n'
+          for n in splicedimpls[ext]:
+            if n.startswith(name) and n.endswith('._includes') and n[len1+1:-10].find('.') == -1:
+#              print '   '+n
+              body = body + splicedimpls[ext][n]
+        elif name in splicedimpls[ext]:
+          foundreplacement = 1
+#          print 'Replacing -------'+name
+#          print body
+#          print 'with ------------'
+#          print splicedimpls[ext][name]
+          body = splicedimpls[ext][name]
+        else:
+#          print 'Cannot find splicer block '+name+' '+f+' ext '+ext
+          pass
+          
+#         convert ASE directory hierarchy of includes
+        nb = ''
+        for l in body.split('\n'):
+          if reginclude.search(l):
+            fname    = reginclude.match(l).group(1)
+            (fn,extmp) = os.path.splitext(fname)
+            fn = fn.split('/')
+            if len(fn) > 1 and fn[-1] == fn[-2]:
+              t = '#include "'+string.join(fn[0:-1],'_')+'.hh"'
+              nb = nb + t + '\n'
+            else:
+              nb = nb + l + '\n'              
+          else:
+            nb = nb + l + '\n'              
+          
+        text = text+nb
+        text = text+line
+      line = fd.readline()
+    fd.close()
+
+    if foundreplacement:
+#      print 'Replaced blocks in '+os.path.join(dir,f)
+      fd = open(os.path.join(dir,f),'w')
+      fd.write(text)
+      fd.close()
+
+#    print text
+  
+def setSplicers(directory):
+
+  f    = open('splicerblocks', 'r')
+  splicedimpls = cPickle.load(f)
+  f.close()
+
+  # change SIDL.Args and SIDL.ProjectState impl names
+  replaces =  {'SIDL.Args':'SIDLASE.Args','SIDL.ProjectState':'SIDLASE.ProjectState'}
+  for i in splicedimpls:
+    sillytmp = splicedimpls[i]
+    for j in sillytmp:
+      for k in replaces:
+        if not string.find(j,k) == -1:
+          newname = j.replace(k,replaces[k])
+#          print 'Converting '+j+' to '+newname+' ext '+i
+          splicedimpls[i][newname] = splicedimpls[i][j]
+          del splicedimpls[i][j]
+
+  
+  regset    = re.compile('\.set\(([->< a-zA-Z_0-9/.\(\)\[\]&+*]*),([->< a-zA-Z_0-9/.\(\)\[\]&+*]*)\)[ ]*;')
+  regcreate = re.compile('\.create\(([->< a-zA-Z_0-9/.\(\)\[\]&+*]*),([->< a-zA-Z_0-9/.\(\)\[\]&+*]*),([->< a-zA-Z_0-9/.\(\)\[\]&+*]*)\)[ ]*;')
+  replaces =  {'SIDL/Args':'SIDLASE/Args',    'SIDL/ProjectState':'SIDLASE/ProjectState',
+               'SIDL::Args':'SIDLASE::Args',  'SIDL::ProjectState':'SIDLASE::ProjectState',
+               '.dim(':'.dimen(',             '.destroy(':'.deleteRef(',
+               '.setMessage(':'.setNote(',    '.getMessage(':'getNote(',
+               '.isInstanceOf(':'.isType(', ' IDENT':' MPIB::IDENT',
+               ' SIMILAR':' MPIB::SIMILAR',    ' CONGRUENT':' MPIB::CONGRUENT',
+               '__enum':''}
+  for i in splicedimpls:
+    for j in splicedimpls[i]:
+      if regset.search(splicedimpls[i][j]):
+        splicedimpls[i][j] = regset.sub('.set(\\2,\\1);',splicedimpls[i][j])
+      if regcreate.search(splicedimpls[i][j]):
+        splicedimpls[i][j] = regcreate.sub('.createRow(\\1,\\2,\\3);',splicedimpls[i][j])
+      for k in replaces:    
+        splicedimpls[i][j] = splicedimpls[i][j].replace(k,replaces[k])
+  
+  if not directory: directory = os.getcwd()
+  os.path.walk(directory,setSplicersDir,splicedimpls)
+
+    
+if __name__ ==  '__main__':
+  if len(sys.argv) > 2: sys.exit('Usage: getsplicers.py <directory>')
+  sys.argv.append(None)
+  setSplicers(sys.argv[1])
+


Property changes on: long/3D/Gale/trunk/python/BuildSystem/setsplicers.py
___________________________________________________________________
Name: svn:executable
   + *
Name: svn:mime-type
   + text/script

Added: long/3D/Gale/trunk/python/BuildSystem/sidldllpath.py
===================================================================
--- long/3D/Gale/trunk/python/BuildSystem/sidldllpath.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/BuildSystem/sidldllpath.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+import user
+import project
+import RDict
+
+import os
+import sys
+
+def getSIDLDLLPath():
+  if 'SIDL_DLL_PATH' in os.environ:
+    SIDL_DLL_PATH = filter(lambda p: len(p), os.environ['SIDL_DLL_PATH'].split(';'))
+  else:  
+    SIDL_DLL_PATH = [] 
+  argDB    = RDict.RDict(parentDirectory = os.path.abspath(os.path.dirname(sys.modules['RDict'].__file__)))
+  projects = argDB['installedprojects']
+  for p in projects:
+    try:
+      root = os.path.join(p.getRoot(), 'lib')
+      if not root in SIDL_DLL_PATH:
+        SIDL_DLL_PATH.append(root)
+    except: pass
+  return ';'.join(SIDL_DLL_PATH)
+
+def getSIDLDLLMap():
+  argDB    = RDict.RDict(parentDirectory = os.path.abspath(os.path.dirname(sys.modules['RDict'].__file__)))
+  projects = argDB['installedprojects']
+  dllMap   = {}
+  for p in projects:
+    impls = p.getImplementations()
+    for cls in impls:
+      dllMap[cls] = impls[cls][0][0]
+  return dllMap
+
+if __name__ ==  '__main__':
+  if len(sys.argv) > 2: sys.exit('Usage: sidldllpath.py [path | map]')
+  if len(sys.argv) == 1 or sys.argv[1] == 'path':
+    print getSIDLDLLPath()
+  elif sys.argv[1] == 'map':
+    print getSIDLDLLMap()


Property changes on: long/3D/Gale/trunk/python/BuildSystem/sidldllpath.py
___________________________________________________________________
Name: svn:executable
   + *
Name: svn:mime-type
   + text/script

Modified: long/3D/Gale/trunk/python/Gale/Configure.py
===================================================================
--- long/3D/Gale/trunk/python/Gale/Configure.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/Gale/Configure.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -22,20 +22,20 @@
 # localstatedir, sbindir, sharedstatedir, sysconfdir, and maybe
 # oldincludedir.  But it should all be moved into BuildSystem proper.
 
-    help.addArgument('PETSc', '-prefix=<path>',            nargs.Arg(None, '/usr/local', 'Specify location to install Gale (eg. /usr/local)'))
-    help.addArgument('PETSc', '-exec_prefix=<path>',
+    help.addArgument('Install', '-prefix=<path>',            nargs.Arg(None, '/usr/local', 'Specify location to install Gale (eg. /usr/local)'))
+    help.addArgument('Install', '-exec_prefix=<path>',
                      nargs.Arg(None, '${prefix}',
                                'Specify location to install Gale (eg. /usr/local)'))
-    help.addArgument('PETSc', '-bindir=<path>',
+    help.addArgument('Install', '-bindir=<path>',
                      nargs.Arg(None, '${exec_prefix}/bin',
                                'Specify location to install Gale binaries (eg. /usr/local/bin)'))
-    help.addArgument('PETSc', '-libdir=<path>',
+    help.addArgument('Install', '-libdir=<path>',
                      nargs.Arg(None, '${exec_prefix}/lib',
                                'Specify location to install Gale libraries (eg. /usr/local/lib)'))
-    help.addArgument('PETSc', '-mandir=<path>',
+    help.addArgument('Install', '-mandir=<path>',
                      nargs.Arg(None, '${exec_prefix}/man',
                                'Specify location to install Gale man pages (eg. /usr/local/man)'))
-    help.addArgument('PETSc', '-includedir=<path>',
+    help.addArgument('Install', '-includedir=<path>',
                      nargs.Arg(None, '${exec_prefix}/include',
                                'Specify location to install Gale header files (eg. /usr/local/include)'))
     return
@@ -43,28 +43,20 @@
   def setupDependencies(self, framework):
     config.base.Configure.setupDependencies(self, framework)
     self.setCompilers  = framework.require('config.setCompilers',      self)
-    self.arch          = framework.require('Gale.utilities.arch', self.setCompilers)
-    self.petscdir      = framework.require('PETSc.utilities.petscdir', self.setCompilers)
-    self.petscdir.isPetsc = 0
-    self.languages     = framework.require('PETSc.utilities.languages',self.setCompilers)
     self.compilers     = framework.require('config.compilers',         self)
-#    self.types         = framework.require('config.types',             self)
     self.headers       = framework.require('config.headers',           self)
     self.functions     = framework.require('config.functions',         self)
     self.libraries     = framework.require('config.libraries',         self)
-#    self.python        = framework.require('config.python',         self)
     self.libxml2       = framework.require('Gale.packages.libxml2',    self)
     self.libxml2       = framework.require('Gale.packages.petsc',    self)
-    self.bmakedir      = framework.require('PETSc.utilities.bmakeDir', self)
-    self.mpi           = framework.require('PETSc.packages.MPI',       self)
+    self.mpi           = framework.require('config.packages.MPI',       self)
 
     self.compilers.headerPrefix = self.headerPrefix
-#    self.types.headerPrefix     = self.headerPrefix
     self.headers.headerPrefix   = self.headerPrefix
     self.functions.headerPrefix = self.headerPrefix
     self.libraries.headerPrefix = self.headerPrefix
     headersC = map(lambda name: name+'.h', ['stdlib'])
-    functions = ['drand48']
+    functions = ['rand']
     libraries1 = [(['socket', 'nsl'], 'socket'), (['fpe'], 'handle_sigfpes')]
     self.headers.headers.extend(headersC)
     self.functions.functions.extend(functions)
@@ -72,33 +64,22 @@
     return
 
   def configureDefaults(self):
-    if self.framework.argDB['with-default-arch']:
-      fd = file(os.path.join('bmake', 'petscconf'), 'w')
-      fd.write('PETSC_ARCH='+self.arch.arch+'\n')
-      fd.write('include '+os.path.join('${PETSC_DIR}','bmake',self.arch.arch,'petscconf')+'\n')
-      fd.close()
-      self.framework.actions.addArgument('PETSc', 'Build', 'Set default architecture to '+self.arch.arch+' in bmake/petscconf')
-    else:
-      os.unlink(os.path.join('bmake', 'petscconf'))
     return
 
   def configureScript(self):
     '''Output a script in the bmake directory which will reproduce the configuration'''
     import nargs
 
-    scriptName = os.path.join(self.bmakedir.bmakeDir, 'configure.py')
+    scriptName = 'reconfigure.py'
     args = dict([(nargs.Arg.parseArgument(arg)[0], arg) for arg in self.framework.clArgs])
     if 'configModules' in args:
       del args['configModules']
     if 'optionsModule' in args:
       del args['optionsModule']
-    if not 'PETSC_ARCH' in args:
-      args['PETSC_ARCH'] = '-PETSC_ARCH='+str(self.arch.arch)
     f = file(scriptName, 'w')
     f.write('#!/usr/bin/env python\n')
     f.write('if __name__ == \'__main__\':\n')
     f.write('  import sys\n')
-    f.write('  sys.path.insert(0, '+repr(os.path.join(self.petscdir.dir, 'config'))+')\n')
     f.write('  import configure\n')
     f.write('  configure_options = '+repr(args.values())+'\n')
     f.write('  configure.petsc_configure(configure_options)\n')
@@ -124,17 +105,6 @@
     return
 
   def configure(self):
-##    if not os.path.samefile(self.petscdir.dir, os.getcwd()):
-##      raise RuntimeError('Wrong GALE_DIR option specified: '+str(self.petscdir.dir) + '\n  Configure invoked in: '+os.path.realpath(os.getcwd()))
-    self.framework.header          = 'bmake/'+self.arch.arch+'/galeconf.h'
-    self.framework.cHeader         = 'bmake/'+self.arch.arch+'/galefix.h'
-    self.framework.makeMacroHeader = 'bmake/'+self.arch.arch+'/galeconf'
-    self.framework.makeRuleHeader  = 'bmake/'+self.arch.arch+'/galerules'
-    if self.libraries.math is None:
-      raise RuntimeError('Gale requires a functional math library. Please send configure.log to cig-long at geodynamics.org.')
-    if self.languages.clanguage == 'Cxx' and not hasattr(self.compilers, 'CXX'):
-      raise RuntimeError('Cannot set C language to C++ without a functional C++ compiler.')
-##    self.executeTest(self.configureDefaults)
     self.executeTest(self.configureScript)
     self.executeTest(self.configureInstall)
     self.executeTest(self.configureGCOV)

Modified: long/3D/Gale/trunk/python/Gale/packages/libxml2.py
===================================================================
--- long/3D/Gale/trunk/python/Gale/packages/libxml2.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/Gale/packages/libxml2.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -3,12 +3,12 @@
 import user
 import config.autoconf
 import os
-import PETSc.package
+import config.package
 
-class Configure(PETSc.package.Package,config.autoconf.Configure):
+class Configure(config.package.Package,config.autoconf.Configure):
   def __init__(self, framework):
     config.autoconf.Configure.__init__(self, framework)
-    PETSc.package.Package.__init__(self, framework)
+    config.package.Package.__init__(self, framework)
     self.headerPrefix = ''
     self.substPrefix  = ''
     self.foundlibxml2     = 0
@@ -29,10 +29,7 @@
     return
 
   def setupDependencies(self, framework):
-    PETSc.package.Package.setupDependencies(self, framework)
-    self.make = framework.require('PETSc.utilities.Make', self)
-    self.libraryOptions = framework.require('PETSc.utilities.libraryOptions', self)
-    self.languages = framework.require('PETSc.utilities.languages', self)
+    config.package.Package.setupDependencies(self, framework)
     return
 
   def configureLibrary(self):

Modified: long/3D/Gale/trunk/python/Gale/packages/petsc.py
===================================================================
--- long/3D/Gale/trunk/python/Gale/packages/petsc.py	2006-08-15 22:49:31 UTC (rev 4300)
+++ long/3D/Gale/trunk/python/Gale/packages/petsc.py	2006-08-15 22:49:33 UTC (rev 4301)
@@ -3,12 +3,12 @@
 import user
 import config.autoconf
 import os
-import PETSc.package
+import config.package
 
-class Configure(PETSc.package.Package,config.autoconf.Configure):
+class Configure(config.package.Package,config.autoconf.Configure):
   def __init__(self, framework):
     config.autoconf.Configure.__init__(self, framework)
-    PETSc.package.Package.__init__(self, framework)
+    config.package.Package.__init__(self, framework)
     self.download = ['http://geodynamics.org/~walter/petsc-2.3.1.tar.gz']
     self.downloadname = 'petsc'
     self.headerPrefix = ''
@@ -26,17 +26,14 @@
     
   def setupHelp(self, help):
     import nargs
-    help.addArgument('petsc', '-with-petsc-arch=<arch>', nargs.ArgDir(None, None, 'Specify an arch for petsc'))
-    help.addArgument('petsc', '-with-petsc-dir=<dir>', nargs.ArgDir(None, None, 'Specify an installation directory for petsc.'))
-    help.addArgument('petsc', '-with-petsc-include=<include dir>', nargs.ArgDir(None, None, 'Specify an include directory for petsc'))
-    help.addArgument('petsc', '-with-petsc-libdir=<petsc lib>',         nargs.Arg(None, None,    'Specify petsc library directory'))
+    help.addArgument('Petsc', '-with-petsc-arch=<arch>', nargs.ArgDir(None, None, 'Specify an arch for petsc'))
+    help.addArgument('Petsc', '-with-petsc-dir=<dir>', nargs.ArgDir(None, None, 'Specify an installation directory for petsc.'))
+    help.addArgument('Petsc', '-with-petsc-include=<include dir>', nargs.ArgDir(None, None, 'Specify an include directory for petsc'))
+    help.addArgument('Petsc', '-with-petsc-libdir=<petsc lib>',         nargs.Arg(None, None,    'Specify petsc library directory'))
     return
 
   def setupDependencies(self, framework):
-    PETSc.package.Package.setupDependencies(self, framework)
-    self.make = framework.require('PETSc.utilities.Make', self)
-    self.libraryOptions = framework.require('PETSc.utilities.libraryOptions', self)
-    self.languages = framework.require('PETSc.utilities.languages', self)
+    config.package.Package.setupDependencies(self, framework)
     return
 
   def check_library_link(self, libraryDir, bmakeDir):



More information about the cig-commits mailing list