[cig-commits] r17231 - short/3D/PyLith/trunk/examples/greensfns/hex8

willic3 at geodynamics.org willic3 at geodynamics.org
Thu Sep 30 18:50:38 PDT 2010


Author: willic3
Date: 2010-09-30 18:50:38 -0700 (Thu, 30 Sep 2010)
New Revision: 17231

Added:
   short/3D/PyLith/trunk/examples/greensfns/hex8/designdata.py
   short/3D/PyLith/trunk/examples/greensfns/hex8/designprior.py
   short/3D/PyLith/trunk/examples/greensfns/hex8/getcoords.py
   short/3D/PyLith/trunk/examples/greensfns/hex8/lininvert.py
   short/3D/PyLith/trunk/examples/greensfns/hex8/points2spatialdb.py
Log:
Added a bunch of utility codes related to performing an inversion using
PyLith Green's functions.  These will need to be cleaned up and I will
need to provide some examples of how to use them.



Added: short/3D/PyLith/trunk/examples/greensfns/hex8/designdata.py
===================================================================
--- short/3D/PyLith/trunk/examples/greensfns/hex8/designdata.py	                        (rev 0)
+++ short/3D/PyLith/trunk/examples/greensfns/hex8/designdata.py	2010-10-01 01:50:38 UTC (rev 17231)
@@ -0,0 +1,528 @@
+#!/usr/bin/env python
+#
+# ----------------------------------------------------------------------
+#
+#                           Brad T. Aagaard
+#                        U.S. Geological Survey
+#
+# <LicenseText>
+#
+# ----------------------------------------------------------------------
+#
+
+## @file greensfns/designdata
+
+## @brief Python application to create the data arrays necessary for an
+## inversion, given data information and Green's function information.
+## The matrices produced are the data matrix (vector), the data covariance
+## matrix (diagonal at present), and the data design matrix (numObservations
+## by numParameters).
+
+import math
+import numpy
+import sys
+from pyre.units.length import km
+from pyre.units.length import m
+
+from pyre.applications.Script import Script as Application
+
+class DesignData(Application):
+  """
+  Python application to create the data arrays necessary for an
+  inversion, given data information and Green's function information.
+  The matrices produced are the data matrix (vector), the data covariance
+  matrix (diagonal at present), and the data design matrix (numObservations
+  by numParameters).
+  """
+  
+  class Inventory(Application.Inventory):
+    """
+    Python object for managing DesignData facilities and properties.
+    """
+
+    ## @class Inventory
+    ## Python object for managing DesignData facilities and properties.
+    ##
+    ## \b Properties
+    ## @li \b data_input_file File containing data, locations, and stdDev.
+    ## @li \b gf_metadata_file File containing metadata for GF.
+    ## @li \b gfresponses_ll_root Root name for left-lateral GF responses.
+    ## @li \b gfresponses_ud_root Root name for updip GF responses.
+    ## @li \b data_output_file Output file for scaled data.
+    ## @li \b cov_output_file Output file for scaled covariance matrix.
+    ## @li \b design_output_file Output file for data design matrix.
+    ## @li \b metadata_output_file Output file describing impulses and responses.
+    ## @li \b data_scale Scaling factor to apply to data and stdDev.
+    ## @li \b search_radius Radius from data center to search for GF.
+    ## @li \b impulse_number_width Width of impulse number field.
+    ##
+    ## \b Facilities
+    ## @li None
+
+    import pyre.inventory
+
+    dataInputFile = pyre.inventory.str("data_input_file",
+                                       default="data.txt")
+    dataInputFile.meta['tip'] = "File containing data, locations, and stdDev."
+
+    gfMetadataFile = pyre.inventory.str("gf_metadata_file",
+                                        default="gf_metadata.txt")
+    gfMetadataFile.meta['tip'] = "Name of file describing GF impulses."
+
+    gfResponsesLlRoot = pyre.inventory.str("gfresponses_ll_root",
+                                           default="gfresponse_ll.vtk")
+    gfResponsesLlRoot.meta['tip'] = "Root name for left-lateral GF responses."
+
+    gfResponsesUdRoot = pyre.inventory.str("gfresponses_ud_root",
+                                           default="gfresponse_ud.vtk")
+    gfResponsesUdRoot.meta['tip'] = "Root name for updip GF responses."
+
+    dataOutputFile = pyre.inventory.str("data_output_file",
+                                        default="data_vals.txt")
+    dataOutputFile.meta['tip'] = "Output file for scaled data."
+
+    covOutputFile = pyre.inventory.str("cov_output_file",
+                                       default="data_cov.txt")
+    covOutputFile.meta['tip'] = "Output file for scaled covriance matrix."
+
+    designOutputFile = pyre.inventory.str("design_output_file",
+                                          default="data_design.txt")
+    designOutputFile.meta['tip'] = "Output file for data design matrix."
+
+    metadataOutputFile = pyre.inventory.str("metadata_output_file",
+                                            default="data_metadata.txt")
+    metadataOutputFile.meta['tip'] = "Output file containing data metadata."
+
+    dataScale = pyre.inventory.float("data_scale", default=1.0)
+    dataScale.meta['tip'] = "Scaling factor to apply to data and stdDev."
+
+    searchRadius = pyre.inventory.dimensional("search_radius", default=100.0*km)
+    searchRadius.meta['tip'] = "Radius from data center to search for GF."
+
+    impulseNumberWidth = pyre.inventory.int("impulse_number_width", default=5)
+    impulseNumberWidth.meta['tip'] = "Width of impulse number field."
+
+  
+  # PUBLIC METHODS /////////////////////////////////////////////////////
+
+  def __init__(self, name="designdata"):
+    Application.__init__(self, name)
+
+    self.numTotalImpulses = 0
+    self.numUsedImpulses = 0
+    self.numDataPoints = 0
+    self.designRows = 0
+    self.designColumns = 0
+    self.numCells = 0
+    self.numVertices = 0
+    self.distanceScale = 0.0
+
+    self.usedImpulses = []
+    self.usedImpulsesLl = []
+    self.usedImpulsesUd = []
+    self.interpIndices = []
+    self.dataNames = []
+    self.dataCoords = None
+    self.dataVals = None
+    self.dataCov = None
+    self.dataCenter = None
+    self.design = None
+    self.interpFuncs = None
+    self.vertexCoords = None
+    self.cellConnect = None
+
+    return
+
+
+  def main(self):
+    # import pdb
+    # pdb.set_trace()
+    self._readData()
+    self._readMetadata()
+    self._findImpulses()
+    self._createInterp()
+    self._writeMetadata()
+    self._makeDesign()
+    return
+
+
+  # PRIVATE METHODS ////////////////////////////////////////////////////
+
+  def _configure(self):
+    """
+    Setup members using inventory.
+    """
+    Application._configure(self)
+    # import pdb
+    # pdb.set_trace()
+
+    # File info.
+    self.dataInputFile = self.inventory.dataInputFile
+    self.gfMetadataFile = self.inventory.gfMetadataFile
+    self.gfResponsesLlRoot = self.inventory.gfResponsesLlRoot
+    self.gfResponsesUdRoot = self.inventory.gfResponsesUdRoot
+    self.dataOutputFile = self.inventory.dataOutputFile
+    self.covOutputFile = self.inventory.covOutputFile
+    self.designOutputFile = self.inventory.designOutputFile
+    self.metadataOutputFile = self.inventory.metadataOutputFile
+
+    # Data information
+    self.dataScale = self.inventory.dataScale
+
+    # Impulse information
+    self.searchRadius = self.inventory.searchRadius.value
+    self.impulseNumberWidth = self.inventory.impulseNumberWidth
+
+    return
+      
+
+  def _readData(self):
+    """
+    Function to read data, coordinates, and standard deviations.
+    """
+    f = open(self.dataInputFile, 'r')
+    lines = f.readlines()
+    self.numDataPoints = len(lines) - 1
+    self.designRows = 3 * self.numDataPoints
+    coords = []
+    data = []
+    cov = []
+    for line in range(1,self.numDataPoints + 1):
+      lineSplit = lines[line].split()
+      east = float(lineSplit[2])
+      north = float(lineSplit[1])
+      self.dataNames.append(lineSplit[0])
+      coords.append([east, north])
+      vE = self.dataScale * float(lineSplit[5])
+      vN = self.dataScale * float(lineSplit[6])
+      vU = self.dataScale * float(lineSplit[7])
+      data.append(vE)
+      data.append(vN)
+      data.append(vU)
+      sigE = self.dataScale * float(lineSplit[8])
+      sigN = self.dataScale * float(lineSplit[9])
+      sigU = self.dataScale * float(lineSplit[10])
+      cov.append(sigE*sigE)
+      cov.append(sigN*sigN)
+      cov.append(sigU*sigU)
+
+    f.close()
+
+    print "Number of data points: %i" % self.numDataPoints
+    print "Number of rows in design matrix: %i" % self.designRows
+    sys.stdout.flush()
+    self.dataVals = numpy.array(data, dtype=numpy.float64)
+    self.dataCov = numpy.array(cov, dtype=numpy.float64)
+    numpy.savetxt(self.dataOutputFile, self.dataVals)
+    numpy.savetxt(self.covOutputFile, self.dataCov)
+    self.dataCoords = numpy.array(coords, dtype=numpy.float64)
+    self.dataCenter = numpy.mean(self.dataCoords, axis=0).reshape((1, 2))
+
+    return
+
+
+  def _readMetadata(self):
+    """
+    Function to read metadata.
+    """
+    self.metadata = numpy.loadtxt(self.gfMetadataFile, dtype=numpy.float64,
+                                         skiprows=1)
+    self.numTotalImpulses = self.metadata.shape[0]
+
+    print "Total number of impulses:  %i" % self.numTotalImpulses
+    sys.stdout.flush()
+
+    return
+
+
+  def _findImpulses(self):
+    """
+    Function to find impulses that lie within a given radius of the data center.
+    """
+    import scipy.spatial.distance
+    
+    impulseCoords = self.metadata[:,1:3]
+    distance = scipy.spatial.distance.cdist(impulseCoords, self.dataCenter,
+                                            'euclidean')
+
+    for impulse in range(self.numTotalImpulses):
+      if (distance[impulse] < self.searchRadius):
+        self.usedImpulses.append(impulse)
+        llFilename = self._getFilename(self.gfResponsesLlRoot, impulse)
+        udFilename = self._getFilename(self.gfResponsesUdRoot, impulse)
+        self.usedImpulsesLl.append(llFilename)
+        self.usedImpulsesUd.append(udFilename)
+
+    self.numUsedImpulses = len(self.usedImpulses)
+    self.designColumns = 2 * self.numUsedImpulses
+    print "Number of impulse locations used:  %i" % self.numUsedImpulses
+    print "Number of columns in design matrix:  %i" % self.designColumns
+    sys.stdout.flush()
+    return
+
+      
+  def _getFilename(self, fileRoot, impulse):
+    """
+    Function to create a filename given the root filename and the impulse
+    number.
+    """
+    impulseNum = int(impulse)
+    impulseString = repr(impulseNum).rjust(self.impulseNumberWidth, '0')
+    filename = fileRoot + "_t" + impulseString + ".vtk"
+
+    return filename
+
+
+  def _createInterp(self):
+    """
+    Function to find cell containing an observation point and create the
+    corresponding interpolation functions.
+    """
+    from enthought.mayavi.sources.vtk_file_reader import VTKFileReader
+    from enthought.tvtk.api import tvtk
+
+    # First read a VTK file to get geometry info
+    reader = VTKFileReader()
+    filename = self.usedImpulsesLl[0]
+    reader.initialize(filename)
+    data = reader.outputs[0]
+
+    # Get cell and vertex info.
+    cellVtk = data.get_cells()
+    self.numCells = cellVtk.number_of_cells
+    cellArray = cellVtk.to_array()
+    self.vertexCoords = data._get_points().to_array()
+    (self.numVertices, spaceDim) = self.vertexCoords.shape
+    cellMatrix = cellArray.reshape(self.numCells,4)
+    self.cellConnect = cellMatrix[:,1:4]
+
+    meshRange = numpy.ptp(self.vertexCoords, axis=0)
+    self.distanceScale = 0.1 * numpy.amax(meshRange)
+
+    # Find cells enclosing each observation point.
+    self.interpIndices = self._findCells()
+
+    # Compute interpolation functions for each cell/observation point.
+    self._computeInterp()
+
+    return
+
+
+  def _computeInterp(self):
+    """
+    Function to compute interpolation functions for a point lying in a
+    triangular cell.
+    """
+    interpList = []
+    for point in range(self.numDataPoints):
+      cellNum = self.interpIndices[point]
+      cellCoords = self.vertexCoords[self.cellConnect[cellNum,:],0:2]
+      pointCoords = self.dataCoords[point,:]
+      x1 = cellCoords[0,0]
+      x2 = cellCoords[1,0]
+      x3 = cellCoords[2,0]
+      y1 = cellCoords[0,1]
+      y2 = cellCoords[1,1]
+      y3 = cellCoords[2,1]
+      x = pointCoords[0]
+      y = pointCoords[1]
+      denom = x1 * (y3 - y2) + x2 * (y1 - y3) + x3 * (y2 - y1)
+      l1 = x * (y3 - y2) + x2 * (y - y3) + x3 * (y2 - y)
+      l2 = -(x * (y3 - y1) + x1 * (y - y3) + x3 * (y1 - y))
+      l3 = x * (y2 - y1) + x1 * (y - y2) + x2 * (y1 - y)
+      interpList.append([l1/denom, l2/denom, l3/denom])
+
+    self.interpFuncs = numpy.array(interpList, dtype=numpy.float64)
+
+    return
+
+
+  def _findCells(self):
+    """
+    Function to find triangles enclosing a set of points.
+    Brute force method for now.
+    """
+
+    indices = []
+    for point in range(self.numDataPoints):
+      pointCoords = self.dataCoords[point,:]
+      cellFound = False
+      for cell in range(self.numCells):
+        cellCoords = self.vertexCoords[self.cellConnect[cell,:],0:2]
+        inTriangle = self._inTriangle(pointCoords, cellCoords)
+        if (inTriangle):
+          indices.append(cell)
+          cellFound = True
+          break
+
+      if (not cellFound):
+        msg = 'Unable to find surrounding cell for data point # %i' % point
+        raise ValueError(msg)
+
+    return indices
+
+
+  def _inTriangle(self, pointCoords, cellCoords):
+    """
+    Function to determine whether a point lies within a triangular cell.
+    """
+
+    v2 = pointCoords - cellCoords[0,:]
+
+    if (math.sqrt(numpy.dot(v2, v2)) > self.distanceScale):
+      return False
+
+    v0 = cellCoords[2,:] - cellCoords[0,:]
+    v1 = cellCoords[1,:] - cellCoords[0,:]
+
+    dot00 = numpy.dot(v0, v0)
+    dot01 = numpy.dot(v0, v1)
+    dot02 = numpy.dot(v0, v2)
+    dot11 = numpy.dot(v1, v1)
+    dot12 = numpy.dot(v1, v2)
+
+    invDenom = 1.0/(dot00 * dot11 - dot01 * dot01)
+    u = (dot11 * dot02 - dot01 * dot12) * invDenom
+    v = (dot00 * dot12 - dot01 * dot02) * invDenom
+
+    return ((u > 0.0) and (v > 0.0) and (u + v < 1.0))
+  
+    
+  def _makeDesign(self):
+    """
+    Function to create design matrix and write it to a file.
+    """
+    # Compute coefficients for each impulse/response pair.
+    impulseList = []
+    impulseTypes = [self.usedImpulsesLl, self.usedImpulsesUd]
+    for impulseType in range(2):
+      filelist = impulseTypes[impulseType]
+      for impulse in range(self.numUsedImpulses):
+        print 'Working on impulse # %i (%i)' % \
+          (impulse, self.usedImpulses[impulse])
+        sys.stdout.flush()
+        impulseCoeffs = self._getCoeffs(filelist[impulse])
+        impulseList.append(impulseCoeffs)
+    self.design = numpy.transpose(numpy.array(impulseList, dtype=numpy.float64))
+    numpy.savetxt(self.designOutputFile, self.design)
+
+    return
+    
+      
+  def _getCoeffs(self, vtkFile):
+    """
+    Function to compute all the coefficients from a particular impulse.
+    """
+    from enthought.mayavi.sources.vtk_file_reader import VTKFileReader
+    from enthought.tvtk.api import tvtk
+
+    # Read VTK file
+    reader = VTKFileReader()
+    reader.initialize(vtkFile)
+    data = reader.outputs[0]
+
+    # Get computed displacements
+    vertData = data._get_point_data()
+    numVertDataArrays = vertData._get_number_of_arrays()
+    displArray = None
+    for arrayNum in range(numVertDataArrays):
+      arrayName = vertData.get_array_name(arrayNum)
+      if (arrayName == 'displacement'):
+        displArray = vertData.get_array(arrayNum).to_array()
+        break
+
+    responseVals = []
+    for dataPoint in range(self.numDataPoints):
+      u, v, w = self._computeDispl(dataPoint, displArray)
+      responseVals.append(u)
+      responseVals.append(v)
+      responseVals.append(w)
+
+    return responseVals
+
+
+  def _computeDispl(self, dataPoint, displArray):
+    """
+    Function to interpolate displacements to a given data point.
+    """
+
+    u = 0.0
+    v = 0.0
+    w = 0.0
+    cellNum = self.interpIndices[dataPoint]
+    for vertex in range(3):
+      vertNum = self.cellConnect[cellNum, vertex]
+      u += self.interpFuncs[dataPoint, vertex] * displArray[vertNum, 0]
+      v += self.interpFuncs[dataPoint, vertex] * displArray[vertNum, 1]
+      w += self.interpFuncs[dataPoint, vertex] * displArray[vertNum, 2]
+
+    return (u, v, w)
+  
+    
+  def _writeMetadata(self):
+    """
+    Function to write metadata describing impulses (columns) and data (rows).
+    """
+    f = open(self.metadataOutputFile, 'w')
+    newLine = '\n'
+    tab = '\t'
+    
+    # Write data information.
+    dataDescr = 'Information for rows (data points), %i rows total\n' % self.designRows
+    f.write(dataDescr)
+    dataHead = 'Row #' + tab + 'Site-name' + tab + 'Component' + tab + \
+      'X-coord' + tab + 'Y-coord' + newLine
+    f.write(dataHead)
+    components = ['X', 'Y', 'Z']
+    dataFormat = '%i' + tab + '%s' + tab + '%s' + tab + '%e' + tab + '%e' + \
+      newLine
+
+
+    rowNum = 0
+    for dataPoint in range(self.numDataPoints):
+      coords = self.dataCoords[dataPoint, :]
+      name = self.dataNames[dataPoint]
+      for component in range(3):
+        outLine = dataFormat % (rowNum, name, components[component],
+                                coords[0], coords[1])
+        f.write(outLine)
+        rowNum += 1
+    
+    # Write impulse information.
+    impulseDescr = newLine + newLine + \
+                   'Information for columns (impulses), %i columns total\n' % self.designColumns
+    f.write(impulseDescr)
+    impulseHead = 'Column #' + tab + 'Impulse type' + tab + 'X-coord' + tab + \
+                  'Y-coord' + tab + 'Z-coord' + tab + \
+                  'Normal-X' + tab + 'Normal-Y' + tab + 'Normal-Z' + tab + \
+                  'Strike-X' + tab + 'Strike-Y' + tab + 'Strike-Z' + tab + \
+                  'Dip-X' + tab + 'Dip-Y' + tab + 'Dip-Z' + newLine
+    f.write(impulseHead)
+    impulseTypes = ['ll', 'ud']
+
+    colNum = 0
+    metadataOut = 12 * (tab + '%e')
+    outFormat = '%i' + tab + '%s' + metadataOut + newLine
+    for impulseType in range(2):
+      impulseText = impulseTypes[impulseType]
+      for impulse in self.usedImpulses:
+        metadata = self.metadata[impulse, 1:13]
+        outLine = outFormat % (colNum, impulseText,
+                               metadata[0], metadata[1], metadata[2],
+                               metadata[3], metadata[4], metadata[5],
+                               metadata[6], metadata[7], metadata[8],
+                               metadata[9], metadata[10], metadata[11])
+        f.write(outLine)
+        colNum += 1
+
+    f.close()
+
+    return
+  
+
+# ----------------------------------------------------------------------
+if __name__ == '__main__':
+  app = DesignData()
+  app.run()
+
+# End of file


Property changes on: short/3D/PyLith/trunk/examples/greensfns/hex8/designdata.py
___________________________________________________________________
Name: svn:executable
   + *

Added: short/3D/PyLith/trunk/examples/greensfns/hex8/designprior.py
===================================================================
--- short/3D/PyLith/trunk/examples/greensfns/hex8/designprior.py	                        (rev 0)
+++ short/3D/PyLith/trunk/examples/greensfns/hex8/designprior.py	2010-10-01 01:50:38 UTC (rev 17231)
@@ -0,0 +1,228 @@
+#!/usr/bin/env python
+#
+# ----------------------------------------------------------------------
+#
+#                           Brad T. Aagaard
+#                        U.S. Geological Survey
+#
+# <LicenseText>
+#
+# ----------------------------------------------------------------------
+#
+
+## @file greensfns/designprior
+
+## @brief Python application to create the a priori arrays necessary for an
+## inversion, given the impulse locations, the a priori values, and the
+## desired correlation length.
+## The matrices produced are the a priori data matrix (vector) and the a priori
+## covariance matrix. The a priori design matrix is assumed to be an identity
+## matrix with dimensions corresponding to the number of parameters.
+
+import math
+import numpy
+import sys
+from pyre.units.length import km
+from pyre.units.length import m
+
+from pyre.applications.Script import Script as Application
+
+class DesignPrior(Application):
+  """
+  Python application to create the a priori arrays necessary for an
+  inversion, given the impulse locations, the a priori values, and the
+  desired correlation length.
+  The matrices produced are the a priori data matrix (vector) and the a priori
+  covariance matrix. The a priori design matrix is assumed to be an identity
+  matrix with dimensions corresponding to the number of parameters.
+  """
+  
+  class Inventory(Application.Inventory):
+    """
+    Python object for managing DesignPrior facilities and properties.
+    """
+
+    ## @class Inventory
+    ## Python object for managing DesignPrior facilities and properties.
+    ##
+    ## \b Properties
+    ## @li \b metadata_input_file File containing metadata for the inversion.
+    ## @li \b data_output_file File containing a priori parameter values.
+    ## @li \b cov_output_file Output file for a priori covariance matrix.
+    ## @li \b correlation_length Correlation length for covariance.
+    ## @li \b std_dev Standard deviation to use for covariance.
+    ## @li \b apriori_value A priori parameter values (all set to this value).
+    ## @li \b diagonal_frac Additional fraction to add to covariance diagonal.
+    ##
+    ## \b Facilities
+    ## @li None
+
+    import pyre.inventory
+
+    metadataInputFile = pyre.inventory.str("metadata_input_file",
+                                           default="metadata.txt")
+    metadataInputFile.meta['tip'] = "File containing inversion metadata."
+
+    dataOutputFile = pyre.inventory.str("data_output_file",
+                                        default="data_vals.txt")
+    dataOutputFile.meta['tip'] = "Output file with a priori parameter values."
+
+    covOutputFile = pyre.inventory.str("cov_output_file",
+                                       default="data_cov.txt")
+    covOutputFile.meta['tip'] = "Output file for a priori covriance matrix."
+
+    correlationLength = pyre.inventory.dimensional("correlation_length",
+                                                   default=10.0*km)
+    correlationLength.meta['tip'] = "Correlation length for covariance."
+
+    stdDev = pyre.inventory.dimensional("std_dev", default=1.0*m)
+    stdDev.meta['tip'] = "Standard deviation for covariance."
+
+    aprioriValue = pyre.inventory.dimensional("apriori_value", default=0.0*m)
+    aprioriValue.meta['tip'] = "A priori parameter values."
+
+    diagonalFrac = pyre.inventory.float("diagonal_frac", default=0.1)
+    diagonalFrac.meta['tip'] = "Additional fraction to add to covariance diagonal."
+
+    minCovar = pyre.inventory.float("min_covar", default=1.0e-5)
+    minCovar.meta['tip'] = "Covariance values less than this are set to zero."
+
+  
+  # PUBLIC METHODS /////////////////////////////////////////////////////
+
+  def __init__(self, name="designprior"):
+    Application.__init__(self, name)
+
+    self.numParameters = 0
+
+    self.impulseCoords = None
+
+    return
+
+
+  def main(self):
+    # import pdb
+    # pdb.set_trace()
+    self._readMetadata()
+    self._makeCovar()
+    return
+
+
+  # PRIVATE METHODS ////////////////////////////////////////////////////
+
+  def _configure(self):
+    """
+    Setup members using inventory.
+    """
+    Application._configure(self)
+    # import pdb
+    # pdb.set_trace()
+
+    # File info.
+    self.metadataInputFile = self.inventory.metadataInputFile
+    self.dataOutputFile = self.inventory.dataOutputFile
+    self.covOutputFile = self.inventory.covOutputFile
+
+    # Parameters
+    self.correlationLength = self.inventory.correlationLength.value
+    self.stdDev = self.inventory.stdDev.value
+    self.aprioriValue = self.inventory.aprioriValue.value
+    self.diagonalFrac = self.inventory.diagonalFrac
+    self.minCovar = self.inventory.minCovar
+
+    return
+      
+
+  def _readMetadata(self):
+    """
+    Function to read information describing parameters and write parameter
+    values.
+    """
+    # Get impulse information.
+    f = open(self.metadataInputFile, 'r')
+    lines = f.readlines()
+    coords = []
+    impulseData = False
+    for line in lines:
+      if impulseData:
+        lineSplit = line.split()
+        impulseCoord = [float(lineSplit[2]), float(lineSplit[3]),
+                        float(lineSplit[4])]
+        coords.append(impulseCoord)
+        self.numParameters += 1
+      testFind = line.find('Column')
+      if testFind > -1:
+        impulseData = True
+
+    f.close()
+    self.impulseCoords = numpy.array(coords, dtype=numpy.float64)
+
+    print "Number of parameters: %i" % self.numParameters
+
+    # Write a priori parameter values.
+    parameterVals = self.aprioriValue * numpy.ones(self.numParameters)
+    numpy.savetxt(self.dataOutputFile, parameterVals)
+
+    return
+
+
+  def _distanceFunc(self, distance):
+    """
+    Function to compute autocorrelation based on distance.
+    """
+    distanceArg = -0.5 * (distance * distance)/\
+                  (self.correlationLength * self.correlationLength)
+    covariance = self.stdDev * self.stdDev * math.exp(distanceArg)
+    return covariance
+
+  
+  def _makeCovar(self):
+    """
+    Function to create a priori covariance matrix for a given correlation
+    length.
+    """
+    import scipy.spatial.distance
+    import scipy.stats.mstats
+
+    # Compute distances between all points.
+    # Temporary kludge because the prior approach correlated different types
+    # of impulses with each other.
+    impulseHalf = self.impulseCoords[0:self.numParameters/2,:]
+    distance = scipy.spatial.distance.cdist(impulseHalf,
+                                            impulseHalf, 'euclidean')
+
+    # Compute covariance.
+    multiplier = -0.5/(self.correlationLength * self.correlationLength)
+    distanceArg = multiplier * distance * distance
+    # distanceFunc = numpy.vectorize(self._distanceFunc)
+    # aprioriCovar = distanceFunc(distance)
+    covarMult = self.stdDev * self.stdDev
+    aprioriCovarHalf = covarMult * numpy.exp(distanceArg)
+    aprioriCovarEmpty = numpy.zeros_like(aprioriCovarHalf)
+
+    # Delete all entries below threshold.
+    aprioriCovarHalfThresh = scipy.stats.mstats.threshold(
+      aprioriCovarHalf, threshmin=self.minCovar)
+
+    # Join together different pieces.
+    aprioriCovarTop = numpy.hstack((aprioriCovarHalfThresh, aprioriCovarEmpty))
+    aprioriCovarBot = numpy.hstack((aprioriCovarEmpty, aprioriCovarHalfThresh))
+    aprioriCovar = numpy.vstack((aprioriCovarTop, aprioriCovarBot))
+
+    # Add additional amount to diagonal.
+    covarDiagAdd = self.diagonalFrac * numpy.diag(aprioriCovar)
+    diagIndices = numpy.arange(self.numParameters)
+    aprioriCovar[diagIndices, diagIndices] += covarDiagAdd
+
+    # Write covariance to file.
+    numpy.savetxt(self.covOutputFile, aprioriCovar)
+
+    return
+  
+
+# ----------------------------------------------------------------------
+if __name__ == '__main__':
+  app = DesignPrior()
+  app.run()
+
+# End of file


Property changes on: short/3D/PyLith/trunk/examples/greensfns/hex8/designprior.py
___________________________________________________________________
Name: svn:executable
   + *

Added: short/3D/PyLith/trunk/examples/greensfns/hex8/getcoords.py
===================================================================
--- short/3D/PyLith/trunk/examples/greensfns/hex8/getcoords.py	                        (rev 0)
+++ short/3D/PyLith/trunk/examples/greensfns/hex8/getcoords.py	2010-10-01 01:50:38 UTC (rev 17231)
@@ -0,0 +1,146 @@
+#!/usr/bin/env python
+#
+# ----------------------------------------------------------------------
+#
+#                           Brad T. Aagaard
+#                        U.S. Geological Survey
+#
+# <LicenseText>
+#
+# ----------------------------------------------------------------------
+#
+
+## @file greensfns/getcoords
+
+## @brief Python application to get the coordinates for a set of vertex
+## ID's and output them to a file. This is a very simple code that expects
+## coordinates in Abaqus format as well as ID's in Abaqus nodeset format.
+
+import math
+import numpy
+import os
+import re
+import glob
+from pyre.units.time import s
+from pyre.units.length import m
+
+from pyre.applications.Script import Script as Application
+
+class GetCoords(Application):
+  """
+  Python application to get the coordinates for a set of vertex
+  ID's and output them to a file. This is a very simple code that expects
+  coordinates in Abaqus format as well as ID's in Abaqus nodeset format.
+  """
+  
+  class Inventory(Application.Inventory):
+    """
+    Python object for managing GetCoords facilities and properties.
+    """
+
+    ## @class Inventory
+    ## Python object for managing GetCoords facilities and properties.
+    ##
+    ## \b Properties
+    ## @li \b coord_file  Name of file containing vertex coordinates.
+    ## @li \b id_file  Name of file containing vertex ID's.
+    ## @li \b output_file Name of output file with requested coordinates.
+    ## \b Facilities
+    ## @li None
+
+    import pyre.inventory
+
+    coordFile = pyre.inventory.str("coord_file", default="mesh.coords")
+    coordFile.meta['tip'] = "Name of file containing vertex coordinates."
+
+    idFile = pyre.inventory.str("id_file", default="mesh.ids")
+    idFile.meta['tip'] = "Name of file containing vertex ID's."
+
+    outputFile = pyre.inventory.str("output_file", default="mesh.ids")
+    outputFile.meta['tip'] = "Name of output file with requested coordinates."
+
+  
+  # PUBLIC METHODS /////////////////////////////////////////////////////
+
+  def __init__(self, name="getcoords"):
+    Application.__init__(self, name)
+
+    self.numIds = 0
+    self.idList = []
+
+    return
+
+
+  def main(self):
+    # import pdb
+    # pdb.set_trace()
+    self._readIds()
+    self._getCoords()
+    return
+
+
+  # PRIVATE METHODS ////////////////////////////////////////////////////
+
+  def _configure(self):
+    """
+    Setup members using inventory.
+    """
+    Application._configure(self)
+    # import pdb
+    # pdb.set_trace()
+
+    # File info.
+    self.coordFile = self.inventory.coordFile
+    self.idFile = self.inventory.idFile
+    self.outputFile = self.inventory.outputFile
+
+    return
+      
+
+  def _readIds(self):
+    """
+    Function to read ID numbers.
+    """
+    f = open(self.idFile, 'r')
+    lines = f.readlines()
+    for line in lines:
+      for entry in line.split(', '):
+        self.idList.append(int(entry.rstrip(',\n')))
+
+    self.numIds = len(self.idList)
+    f.close()
+
+    return
+
+
+  def _getCoords(self):
+    """
+    Function to read a list of coordinates and output the coordinates if
+    they correspond to one of the requested ID's.
+    """
+    f = open(self.coordFile, 'r')
+    o = open(self.outputFile, 'w')
+    lines = f.readlines()
+    for line in lines:
+      vertexId = int(line.split(', ')[0].rstrip(',\n'))
+      for requestedId in self.idList:
+        if vertexId == requestedId:
+          x = line.split(', ')[1].rstrip(',')
+          y = line.split(', ')[2].rstrip(',')
+          z = line.split(', ')[3].rstrip(',\n')
+          outLine = x + '   ' + y + '   ' + z + '\n'
+          o.write(outLine)
+          break
+
+    f.close()
+    o.close()
+
+    return
+  
+
+# ----------------------------------------------------------------------
+if __name__ == '__main__':
+  app = GetCoords()
+  app.run()
+
+# End of file


Property changes on: short/3D/PyLith/trunk/examples/greensfns/hex8/getcoords.py
___________________________________________________________________
Name: svn:executable
   + *

Added: short/3D/PyLith/trunk/examples/greensfns/hex8/lininvert.py
===================================================================
--- short/3D/PyLith/trunk/examples/greensfns/hex8/lininvert.py	                        (rev 0)
+++ short/3D/PyLith/trunk/examples/greensfns/hex8/lininvert.py	2010-10-01 01:50:38 UTC (rev 17231)
@@ -0,0 +1,310 @@
+#!/usr/bin/env python
+#
+# ----------------------------------------------------------------------
+#
+#                           Brad T. Aagaard
+#                        U.S. Geological Survey
+#
+# <LicenseText>
+#
+# ----------------------------------------------------------------------
+#
+
+## @file greensfns/lininvert
+
+## @brief Python application to perform a linear inversion, given the necessary
+## matrices. In addition to performing the inversion, a number of (optional)
+## files are produced containing inversion information.
+
+import math
+import numpy
+import sys
+from pyre.units.length import km
+from pyre.units.length import m
+
+from pyre.applications.Script import Script as Application
+
+class LinInvert(Application):
+  """
+  Python application to perform a linear inversion, given the necessary
+  matrices. In addition to performing the inversion, a number of (optional)
+  files are produced containing inversion information.
+  """
+  
+  class Inventory(Application.Inventory):
+    """
+    Python object for managing LinInvert facilities and properties.
+    """
+
+    ## @class Inventory
+    ## Python object for managing LinInvert facilities and properties.
+    ##
+    ## \b Properties
+    ## @li \b data_input_file File containing data values.
+    ## @li \b apriori_input_file File containing a priori parameter values.
+    ## @li \b data_cov_input_file File containing data covariance.
+    ## @li \b apriori_cov_input_file File containing a priori covariance.
+    ## @li \b data_design_input_file File containing data design matrix.
+    ## @li \b info_output_file Output file with inversion summary.
+    ## @li \b data_output_file Output file containing data information.
+    ## @li \b param_output_file Output file containing parameter information.
+    ##
+    ## \b Facilities
+    ## @li None
+
+    import pyre.inventory
+
+    dataInputFile = pyre.inventory.str("data_input_file", default="data.txt")
+    dataInputFile.meta['tip'] = "File containing data values."
+
+    aprioriInputFile = pyre.inventory.str("apriori_input_file",
+                                        default="apriori_values.txt")
+    aprioriInputFile.meta['tip'] = "File containing a priori parameter values."
+
+    dataCovInputFile = pyre.inventory.str("data_cov_input_file",
+                                          default="data_cov.txt")
+    dataCovInputFile.meta['tip'] = "File containing data covariance."
+
+    aprioriCovInputFile = pyre.inventory.str("apriori_cov_input_file",
+                                             default="apriori_cov.txt")
+    aprioriCovInputFile.meta['tip'] = "File containing a priori covariance."
+
+    dataDesignInputFile = pyre.inventory.str("data_design_input_file",
+                                             default="data_design.txt")
+    dataDesignInputFile.meta['tip'] = "File containing data design matrix."
+
+    infoOutputFile = pyre.inventory.str("info_output_file",
+                                        default="inversion_info.txt")
+    infoOutputFile.meta['tip'] = "Output file with inversion summary."
+
+    dataOutputFile = pyre.inventory.str("data_output_file",
+                                        default="data_output.txt")
+    dataOutputFile.meta['tip'] = "Output file containing data information."
+
+    paramOutputFile = pyre.inventory.str("param_output_file",
+                                         default="param_output.txt")
+    paramOutputFile.meta['tip'] = "Output file with parameter information."
+
+  
+  # PUBLIC METHODS /////////////////////////////////////////////////////
+
+  def __init__(self, name="lininvert"):
+    Application.__init__(self, name)
+
+    self.numObs = 0
+    self.numParams = 0
+    self.chiSquare = 0.0
+
+    self.dataVals = None
+    self.dataCovVec = None
+    self.dataCov = None
+    self.dataDesign = None
+    self.aprioriVals = None
+    self.aprioriCov = None
+
+    self.hData = None
+    self.hApriori = None
+    self.resolData = None
+    self.solution = None
+    self.fData = None
+    self.predicted = None
+
+    return
+
+
+  def main(self):
+    # import pdb
+    # pdb.set_trace()
+    self._readMats()
+    self._invertData()
+    self._outputDataInfo()
+    self._outputParamInfo()
+    return
+
+
+  # PRIVATE METHODS ////////////////////////////////////////////////////
+
+  def _configure(self):
+    """
+    Setup members using inventory.
+    """
+    Application._configure(self)
+    # import pdb
+    # pdb.set_trace()
+
+    # File info.
+    self.dataInputFile = self.inventory.dataInputFile
+    self.aprioriInputFile = self.inventory.aprioriInputFile
+    self.dataCovInputFile = self.inventory.dataCovInputFile
+    self.aprioriCovInputFile = self.inventory.aprioriCovInputFile
+    self.dataDesignInputFile = self.inventory.dataDesignInputFile
+    self.infoOutputFile = self.inventory.infoOutputFile
+    self.dataOutputFile = self.inventory.dataOutputFile
+    self.paramOutputFile = self.inventory.paramOutputFile
+
+    return
+      
+
+  def _readMats(self):
+    """
+    Function to read input matrices.
+    """
+    self.dataVals = numpy.loadtxt(self.dataInputFile, dtype=numpy.float64)
+    self.dataCovVec = numpy.loadtxt(self.dataCovInputFile, dtype=numpy.float64)
+    self.dataCov = numpy.diag(self.dataCovVec)
+    self.dataDesign = numpy.loadtxt(self.dataDesignInputFile,
+                                    dtype=numpy.float64)
+    self.aprioriVals = numpy.loadtxt(self.aprioriInputFile, dtype=numpy.float64)
+    self.aprioriCov = numpy.loadtxt(self.aprioriCovInputFile,
+                                    dtype=numpy.float64)
+
+    self.numObs = self.dataVals.shape[0]
+    self.numParams = self.aprioriVals.shape[0]
+    # print "Number of observations: %i" % self.numObs
+    # print "Number of parameters: %i" % self.numParams
+    # sys.stdout.flush()
+
+    return
+
+
+  def _invertData(self):
+    """
+    Function to perform inversion.
+    """
+    # Perform inversion.
+    covAAdT = numpy.dot(self.aprioriCov, numpy.transpose(self.dataDesign))
+    sum1 = self.dataCov + numpy.dot(self.dataDesign, covAAdT)
+    self.hData = numpy.dot(covAAdT, numpy.linalg.inv(sum1))
+    self.resolData = numpy.dot(self.hData, self.dataDesign)
+    self.hApriori = numpy.identity(self.numParams, dtype=numpy.float64) - \
+                    self.resolData
+    self.solution = numpy.dot(self.hData, self.dataVals) + \
+                    numpy.dot(self.hApriori, self.aprioriVals)
+
+    return
+
+
+  def _outputDataInfo(self):
+    """
+    Function to write out general info and info related to data.
+    """
+
+    # Compute data-related quantities.
+    fDataVec = numpy.sqrt(1.0/self.dataCovVec)
+    fDataVecInv = 1.0/fDataVec
+    self.fData = numpy.diag(fDataVec)
+    self.predicted = numpy.dot(self.dataDesign, self.solution)
+    dataMisfit = self.dataVals - self.predicted
+    weightMisfit = dataMisfit * fDataVec
+    self.chiSquare = numpy.sum(weightMisfit * weightMisfit)
+    
+    # Write out inversion info.
+    string1 = "\nNumber of observations:  %i\n" % self.numObs
+    string2 = "Number of parameters:  %i\n" % self.numParams
+    string3 = "Chi-square value:  %e\n" % self.chiSquare
+    print string1
+    print string2
+    print string3
+    sys.stdout.flush()
+    i = open(self.infoOutputFile, 'w')
+    i.write(string1)
+    i.write(string2)
+    i.write(string3)
+    i.close()
+                               
+    # Write out data info.
+    # Write out the following columns:
+    # 1.  Observed data value.
+    # 2.  Predicted data value.
+    # 3.  Standard deviation of observation.
+    # 4.  Observed minus predicted value.
+    # 5.  Observed minus predicted weighted by standard deviation.
+    
+    dataOut = numpy.transpose(numpy.vstack((self.dataVals, self.predicted,
+                                            fDataVecInv, dataMisfit,
+                                            weightMisfit)))
+    numpy.savetxt(self.dataOutputFile, dataOut)
+
+    return
+
+
+  def _outputParamInfo(self):
+    """
+    Function to write out parameter-related information.
+    """
+    
+    # Compute a posteriori covariance contributions.
+    aposterioriCov = numpy.dot(self.hApriori, self.aprioriCov)
+    prod1 = numpy.dot(self.hData, self.dataCov)
+    aposterioriCovD = numpy.dot(prod1, numpy.transpose(self.hData))
+    aposterioriCovA = numpy.dot(aposterioriCov, numpy.transpose(self.hApriori))
+
+    # Compute eigenvalues and eigenvectors of a priori covariance.
+    aprioriEigVals, aprioriEigVecs = numpy.linalg.eigh(self.aprioriCov)
+    aprioriValsVec = numpy.sqrt(1.0/aprioriEigVals)
+    aprioriValsVecInv = 1.0/aprioriValsVec
+    aprioriValsDiag = numpy.diag(aprioriValsVec)
+    aprioriValsDiagInv = numpy.diag(aprioriValsVecInv)
+    fApriori = numpy.dot(aprioriValsDiag, numpy.transpose(aprioriEigVecs))
+    fAprioriInv = numpy.dot(aprioriEigVecs, aprioriValsDiagInv)
+    
+    # Compute a posteriori covariances in primed coordinate system.
+    prod2 = numpy.dot(fApriori, aposterioriCov)
+    aposterioriCovPrime = numpy.dot(prod2, numpy.transpose(fApriori))
+    hDataPrime = numpy.dot(aposterioriCovPrime,
+                           numpy.transpose(self.dataDesign))
+    prod3 = numpy.dot(self.dataDesign, fAprioriInv)
+    dataDesignPrime = numpy.dot(self.fData, prod3)
+
+    aposterioriCovDPrime = numpy.dot(hDataPrime, numpy.transpose(hDataPrime))
+    aposterioriCovAPrime = numpy.dot(aposterioriCovPrime,
+                                     numpy.transpose(aposterioriCovPrime))
+
+    resolDataPrime = numpy.dot(hDataPrime, dataDesignPrime)
+
+    # Columns to output:
+    # 1.  Predicted parameter value.
+    # 2.  A priori parameter value.
+    # 3.  A priori covariance.
+    # 4.  A posteriori covariance.
+    # 5.  A posteriori covariance (normalized).
+    # 6.  Contribution of data to a posteriori covariance.
+    # 7.  Contribution of data to a posteriori covariance (normalized).
+    # 8.  Contribution of a priori data to a posteriori covariance.
+    # 9.  Contribution of a priori data to a posteriori covariance (normalized).
+    # 10.  Contribution of data to resolution matrix.
+    # 11.  Contribution of data to resolution matrix (normalized).
+
+    # Extract necessary diagonals.
+    aprioriCovDiag = numpy.diag(self.aprioriCov)
+    aposterioriCovDiag = numpy.diag(aposterioriCov)
+    aposterioriCovPrimeDiag = numpy.diag(aposterioriCovPrime)
+    aposterioriCovDDiag = numpy.diag(aposterioriCovD)
+    aposterioriCovDPrimeDiag = numpy.diag(aposterioriCovDPrime)
+    aposterioriCovADiag = numpy.diag(aposterioriCovA)
+    aposterioriCovAPrimeDiag = numpy.diag(aposterioriCovAPrime)
+    resolDataDiag = numpy.diag(self.resolData)
+    resolDataPrimeDiag = numpy.diag(resolDataPrime)
+
+    # Output columns.
+    paramOut = numpy.transpose(numpy.vstack((self.solution, self.aprioriVals,
+                                             aprioriCovDiag, aposterioriCovDiag,
+                                             aposterioriCovPrimeDiag,
+                                             aposterioriCovDDiag,
+                                             aposterioriCovDPrimeDiag,
+                                             aposterioriCovADiag,
+                                             aposterioriCovAPrimeDiag,
+                                             resolDataDiag,
+                                             resolDataPrimeDiag)))
+                            
+    numpy.savetxt(self.paramOutputFile, paramOut)
+
+    return
+  
+
+# ----------------------------------------------------------------------
+if __name__ == '__main__':
+  app = LinInvert()
+  app.run()
+
+# End of file


Property changes on: short/3D/PyLith/trunk/examples/greensfns/hex8/lininvert.py
___________________________________________________________________
Name: svn:executable
   + *

Added: short/3D/PyLith/trunk/examples/greensfns/hex8/points2spatialdb.py
===================================================================
--- short/3D/PyLith/trunk/examples/greensfns/hex8/points2spatialdb.py	                        (rev 0)
+++ short/3D/PyLith/trunk/examples/greensfns/hex8/points2spatialdb.py	2010-10-01 01:50:38 UTC (rev 17231)
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+#
+# ----------------------------------------------------------------------
+#
+#                           Brad T. Aagaard
+#                        U.S. Geological Survey
+#
+# <LicenseText>
+#
+# ----------------------------------------------------------------------
+#
+
+## @file greensfns/points2spatialdb
+
+## @brief Python application to create a spatialdb using a given set of points.
+## A subset of the points is read from a separate file, along with corresponding
+## parameter values. A spatial database containing all points is created, with
+## all values set to zero other than the subset.
+
+import math
+import numpy
+from pyre.units.length import km
+from pyre.units.length import m
+
+from pyre.applications.Script import Script as Application
+
+class Points2Spatialdb(Application):
+  """
+  Python application to create a spatialdb using a given set of points.
+  A subset of the points is read from a separate file, along with corresponding
+  parameter values. A spatial database containing all points is created, with
+  all values set to zero other than the subset.
+  """
+  
+  class Inventory(Application.Inventory):
+    """
+    Python object for managing Points2Spatialdb facilities and properties.
+    """
+
+    ## @class Inventory
+    ## Python object for managing Points2Spatialdb facilities and properties.
+    ##
+    ## \b Properties
+    ## @li \b point_coord_file File containing all coordinates.
+    ## @li \b point_value_file File containing point subset with values.
+    ## @li \b parameter_names List of parameter names.
+    ## @li \b distance_tol Distance tolerance to determine coincident points.
+    ##
+    ## \b Facilities
+    ## @li \b geometry  Geometry for output database.
+    ## @li \b iohandler  Object for output database.
+
+    import pyre.inventory
+
+    pointCoordFile = pyre.inventory.str("point_coord_file",
+                                        default="point_coords.txt")
+    pointCoordFile.meta['tip'] = "File containing all coordinates."
+
+    pointValueFile = pyre.inventory.str("point_value_file",
+                                        default="point_values.txt")
+    pointValueFile.meta['tip'] = "File containing point subset with values."
+
+    parameterNames = pyre.inventory.list("parameter_names",
+                                        default=['left-lateral-slip',
+                                                 'reverse-slip',
+                                                 'fault-opening'])
+    parameterNames.meta['tip'] = "List of parameter names."
+
+    parameterUnits = pyre.inventory.list("parameter_units",
+                                        default=['m','m','m'])
+    parameterUnits.meta['tip'] = "List of parameter units."
+
+    distanceTol = pyre.inventory.dimensional("distance_tol",
+                                             default=10.0*m)
+    distanceTol.meta['tip'] = "Distance tolerance for coincident points."
+
+    from spatialdata.spatialdb.generator.Geometry import Geometry
+    geometry = pyre.inventory.facility("geometry", family="geometry",
+                                       factory=Geometry)
+    geometry.meta['tip'] = "Geometry for output database."
+
+    from spatialdata.spatialdb.SimpleIOAscii import SimpleIOAscii
+    iohandler = pyre.inventory.facility("iohandler", family="simpledb_io",
+                                        factory=SimpleIOAscii)
+    iohandler.meta['tip'] = "Object for writing database."
+
+  
+  # PUBLIC METHODS /////////////////////////////////////////////////////
+
+  def __init__(self, name="points2spatialdb"):
+    Application.__init__(self, name)
+
+    self.numTotalPoints = 0
+    self.numSubsetPoints = 0
+    self.numParameters = 0
+
+    self.coincidentPoints = []
+
+    self.totalPoints = None
+    self.subsetPoints = None
+    self.subsetValues = None
+
+    return
+
+
+  def main(self):
+    # import pdb
+    # pdb.set_trace()
+    self._readPoints()
+    self._findCoincident()
+    self._writeSpatialdb()
+    return
+
+
+  # PRIVATE METHODS ////////////////////////////////////////////////////
+
+  def _configure(self):
+    """
+    Setup members using inventory.
+    """
+    Application._configure(self)
+    # import pdb
+    # pdb.set_trace()
+
+    # File info.
+    self.pointCoordFile = self.inventory.pointCoordFile
+    self.pointValueFile = self.inventory.pointValueFile
+
+    # Parameter info.
+    self.parameterNames = self.inventory.parameterNames
+    self.parameterUnits = self.inventory.parameterUnits
+
+    # Parameters.
+    self.distanceTol = self.inventory.distanceTol.value
+
+    # Spatialdb output facilities.
+    self.geometry = self.inventory.geometry
+    self.iohandler = self.inventory.iohandler
+
+    return
+      
+
+  def _readPoints(self):
+    """
+    Function to read the file containing all points as well as the file
+    containing the point subset with values.
+    """
+    self.totalPoints = numpy.loadtxt(self.pointCoordFile, dtype=numpy.float64)
+    subsetData = numpy.loadtxt(self.pointValueFile, dtype=numpy.float64)
+    self.subsetPoints = subsetData[:,0:3]
+    self.subsetValues = subsetData[:,3:]
+    self.numTotalPoints = self.totalPoints.shape[0]
+    self.numSubsetPoints = self.subsetPoints.shape[0]
+    self.numParameters = self.subsetValues.shape[1]
+    if (self.numParameters != len(self.parameterNames)):
+      msg = "Number of parameters not consistent with parameter names."
+      raise ValueError(msg)
+
+    return
+        
+    
+  def _findCoincident(self):
+    """
+    Function to find points in the total set matching those in the subset.
+    """
+    import scipy.spatial.distance
+
+    distance = scipy.spatial.distance.cdist(self.subsetPoints, self.totalPoints,
+                                            'euclidean')
+    minIndices = numpy.argmin(distance, axis=1)
+    
+    for point in range(self.numSubsetPoints):
+      matchPoint = minIndices[point]
+      if (distance[point, minIndices[point]] < self.distanceTol):
+        self.coincidentPoints.append(matchPoint)
+      else:
+        msg = "No matching point found for subset point # %i." % point
+        raise ValueError(msg)
+
+    return
+
+
+  def _writeSpatialdb(self):
+    """
+    Function to write out the spatial database, after inserting the desired
+    values.
+    """
+    # Create info for each parameter.
+    values = []
+    for parameter in range(self.numParameters):
+      paramVals = numpy.zeros(self.numTotalPoints, dtype=numpy.float64)
+      paramVals[self.coincidentPoints] = self.subsetValues[:,parameter]
+      paramInfo = {'name': self.parameterNames[parameter],
+                   'units': self.parameterUnits[parameter],
+                   'data': paramVals.flatten()}
+      values.append(paramInfo)
+
+    # Write database.
+    data = {'points': self.totalPoints,
+            'coordsys': self.geometry.coordsys,
+            'data_dim': self.geometry.dataDim,
+            'values': values}
+    self.iohandler.write(data)
+
+    return
+  
+
+# ----------------------------------------------------------------------
+if __name__ == '__main__':
+  app = Points2Spatialdb()
+  app.run()
+
+# End of file


Property changes on: short/3D/PyLith/trunk/examples/greensfns/hex8/points2spatialdb.py
___________________________________________________________________
Name: svn:executable
   + *



More information about the CIG-COMMITS mailing list