[cig-commits] r17108 - in short/3D/PyLith/trunk: libsrc/meshio modulesrc/meshio pylith/meshio

brad at geodynamics.org brad at geodynamics.org
Sun Aug 22 12:10:07 PDT 2010


Author: brad
Date: 2010-08-22 12:10:07 -0700 (Sun, 22 Aug 2010)
New Revision: 17108

Added:
   short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc
   short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.hh
   short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.icc
   short/3D/PyLith/trunk/modulesrc/meshio/DataWriterHDF5.i
   short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5.py
   short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5Mesh.py
   short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5SubMesh.py
   short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5SubSubMesh.py
Log:
Added skeleton code for output with HDF5 files.

Added: short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc	                        (rev 0)
+++ short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc	2010-08-22 19:10:07 UTC (rev 17108)
@@ -0,0 +1,296 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+#include <portinfo>
+
+#include <petscmesh_viewers.hh> // USES HDF5Viewer
+
+#include <cassert> // USES assert()
+#include <sstream> // USES std::ostringstream
+#include <stdexcept> // USES std::runtime_error
+
+// ----------------------------------------------------------------------
+// Constructor
+template<typename mesh_type, typename field_type>
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::DataWriterHDF5(void) :
+  _filename("output.h5"),
+  _viewer(0)
+{ // constructor
+} // constructor
+
+// ----------------------------------------------------------------------
+// Destructor
+template<typename mesh_type, typename field_type>
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::~DataWriterHDF5(void)
+{ // destructor
+  deallocate();
+} // destructor  
+
+// ----------------------------------------------------------------------
+// Deallocate PETSc and local data structures.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type, field_type>::deallocate(void)
+{ // deallocate
+  if (0 != _viewer)
+    PetscViewerDestroy(_viewer);
+  _viewer = 0;
+} // deallocate
+  
+// ----------------------------------------------------------------------
+// Copy constructor.
+template<typename mesh_type, typename field_type>
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::DataWriterHDF5(const DataWriterHDF5<mesh_type, field_type>& w) :
+  DataWriter<mesh_type, field_type>(w),
+  _filename(w._filename),
+  _viewer(0)
+{ // copy constructor
+} // copy constructor
+
+// ----------------------------------------------------------------------
+// Prepare file for data at a new time step.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::openTimeStep(const double t,
+						       const mesh_type& mesh,
+						       const char* label,
+						       const int labelId)
+{ // openTimeStep
+#if 0
+  // MATT - This stuff needs to be updated for HDF5.
+
+  try {
+    PetscErrorCode err = 0;
+    
+    const std::string& filename = _hdf5Filename(t);
+
+    err = PetscViewerCreate(mesh.comm(), &_viewer);
+    CHECK_PETSC_ERROR(err);
+    err = PetscViewerSetType(_viewer, PETSCVIEWERASCII);
+    CHECK_PETSC_ERROR(err);
+    err = PetscViewerSetFormat(_viewer, PETSC_VIEWER_ASCII_HDF5);
+    CHECK_PETSC_ERROR(err);
+    err = PetscViewerFileSetName(_viewer, filename.c_str());
+    CHECK_PETSC_ERROR(err);
+
+    const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh = mesh.sieveMesh();
+    
+    err = HDF5Viewer::writeHeader(sieveMesh, _viewer);
+    CHECK_PETSC_ERROR(err);
+    //std::cout << "Wrote header for " << filename << std::endl;
+    err = HDF5Viewer::writeVertices(sieveMesh, _viewer);
+    CHECK_PETSC_ERROR(err);
+    //std::cout << "Wrote vertices for " << filename << std::endl;
+    if (0 == label) {
+      err = HDF5Viewer::writeElements(sieveMesh, _viewer);
+      CHECK_PETSC_ERROR(err);
+    } else {
+      const std::string labelName = 
+	(sieveMesh->hasLabel("censored depth")) ? "censored depth" : "depth";
+      err = HDF5Viewer::writeElements(sieveMesh, label, labelId, labelName, 0, _viewer);      
+      CHECK_PETSC_ERROR(err);
+    } // if
+    //std::cout << "Wrote elements for " << filename << std::endl;
+
+    _wroteVertexHeader = false;
+    _wroteCellHeader = false;
+  } catch (const std::exception& err) {
+    std::ostringstream msg;
+    msg << "Error while preparing for writing data to HDF5 file "
+	<< _filename << " at time " << t << ".\n" << err.what();
+    throw std::runtime_error(msg.str());
+  } catch (const ALE::Exception& err) {
+    std::ostringstream msg;
+    msg << "Error while preparing for writing data to HDF5 file "
+	<< _filename << " at time " << t << ".\n" << err.msg();
+    throw std::runtime_error(msg.str());
+  } catch (...) { 
+    std::ostringstream msg;
+    msg << "Unknown error while preparing for writing data to HDF5 file "
+	<< _filename << " at time " << t << ".\n";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+
+#endif
+} // openTimeStep
+
+// ----------------------------------------------------------------------
+/// Cleanup after writing data for a time step.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::closeTimeStep(void)
+{ // closeTimeStep
+  PetscViewerDestroy(_viewer); _viewer = 0;
+} // closeTimeStep
+
+// ----------------------------------------------------------------------
+// Write field over vertices to file.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::writeVertexField(
+				            const double t,
+					    const field_type& field,
+					    const mesh_type& mesh)
+{ // writeVertexField
+#if 0
+  // MATT - This stuff needs to be update for HDF5.
+
+  typedef typename mesh_type::SieveMesh SieveMesh;
+  typedef typename field_type::Mesh::RealSection RealSection;
+
+  try {
+    int rank = 0;
+    MPI_Comm_rank(field.mesh().comm(), &rank);
+
+    const ALE::Obj<SieveMesh>& sieveMesh = mesh.sieveMesh();
+    assert(!sieveMesh.isNull());
+    const std::string labelName = 
+      (sieveMesh->hasLabel("censored depth")) ? "censored depth" : "depth";
+    const ALE::Obj<typename SieveMesh::numbering_type>& numbering =
+      sieveMesh->getFactory()->getNumbering(sieveMesh, labelName, 0);
+    assert(!numbering.isNull());
+
+    const ALE::Obj<RealSection>& section = field.section();
+    assert(!section.isNull());
+    assert(!sieveMesh->getLabelStratum(labelName, 0).isNull());
+    
+    const int localFiberDim = 
+      (sieveMesh->getLabelStratum(labelName, 0)->size() > 0) ? 
+      section->getFiberDimension(*sieveMesh->getLabelStratum(labelName, 0)->begin()) : 0;
+    int fiberDim = 0;
+    MPI_Allreduce((void *) &localFiberDim, (void *) &fiberDim, 1, 
+		  MPI_INT, MPI_MAX, field.mesh().comm());
+    assert(fiberDim > 0);
+    const int enforceDim =
+      (field.vectorFieldType() != topology::FieldBase::VECTOR) ? fiberDim : 3;
+
+    PetscErrorCode err = 0;
+    if (!_wroteVertexHeader) {
+      err = PetscViewerASCIIPrintf(_viewer, "POINT_DATA %d\n", 
+				   numbering->getGlobalSize());
+      CHECK_PETSC_ERROR(err);
+      _wroteVertexHeader = true;
+    } // if
+
+    err = HDF5Viewer::writeField(section, field.label(), fiberDim, numbering,
+				_viewer, enforceDim, _precision);
+    CHECK_PETSC_ERROR(err);
+  } catch (const std::exception& err) {
+    std::ostringstream msg;
+    msg << "Error while writing field '" << field.label() << "' at time " 
+	<< t << " to HDF5 file '" << _filename << "'.\n" << err.what();
+    throw std::runtime_error(msg.str());
+  } catch (...) { 
+    std::ostringstream msg;
+    msg << "Error while writing field '" << field.label() << "' at time " 
+	<< t << " to HDF5 file '" << _filename << "'.\n";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+
+#endif
+} // writeVertexField
+
+// ----------------------------------------------------------------------
+// Write field over cells to file.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::writeCellField(
+				       const double t,
+				       const field_type& field,
+				       const char* label,
+				       const int labelId)
+{ // writeCellField
+#if 0
+  // MATT - This stuff needs to be update to HDF5.
+
+  typedef typename field_type::Mesh::SieveMesh SieveMesh;
+  typedef typename field_type::Mesh::RealSection RealSection;
+
+  try {
+    int rank = 0;
+    MPI_Comm_rank(field.mesh().comm(), &rank);
+
+    // Correctly handle boundary and fault meshes
+    //   Cannot just use mesh->depth() because boundaries report the wrong thing
+    const ALE::Obj<SieveMesh>& sieveMesh = field.mesh().sieveMesh();
+    assert(!sieveMesh.isNull());
+    const int cellDepth = (sieveMesh->depth() == -1) ? -1 : 1;
+    const int depth = (0 == label) ? cellDepth : labelId;
+    const std::string labelName = (0 == label) ?
+      ((sieveMesh->hasLabel("censored depth")) ?
+       "censored depth" : "depth") : label;
+    assert(!sieveMesh->getFactory().isNull());
+    const ALE::Obj<typename SieveMesh::numbering_type>& numbering = 
+      sieveMesh->getFactory()->getNumbering(sieveMesh, labelName, depth);
+    assert(!numbering.isNull());
+    assert(!sieveMesh->getLabelStratum(labelName, depth).isNull());
+    const ALE::Obj<RealSection>& section = field.section();
+    assert(!section.isNull());
+
+    const int localFiberDim = 
+      (sieveMesh->getLabelStratum(labelName, depth)->size() > 0) ? 
+      section->getFiberDimension(*sieveMesh->getLabelStratum(labelName, depth)->begin()) : 0;
+    int fiberDim = 0;
+    MPI_Allreduce((void *) &localFiberDim, (void *) &fiberDim, 1, 
+		  MPI_INT, MPI_MAX, field.mesh().comm());
+    assert(fiberDim > 0);
+    const int enforceDim =
+      (field.vectorFieldType() != topology::FieldBase::VECTOR) ? fiberDim : 3;
+
+    PetscErrorCode err = 0;
+    if (!_wroteCellHeader) {
+      err = PetscViewerASCIIPrintf(_viewer, "CELL_DATA %d\n", 
+				   numbering->getGlobalSize());
+      CHECK_PETSC_ERROR(err);
+      _wroteCellHeader = true;
+    } // if
+
+    HDF5Viewer::writeField(section, field.label(), fiberDim, numbering,
+			  _viewer, enforceDim, _precision);
+  } catch (const std::exception& err) {
+    std::ostringstream msg;
+    msg << "Error while writing field '" << field.label() << "' at time " 
+	<< t << " to HDF5 file '" << _filename << "'.\n" << err.what();
+    throw std::runtime_error(msg.str());
+  } catch (...) { 
+    std::ostringstream msg;
+    msg << "Error while writing field '" << field.label() << "' at time " 
+	<< t << " to HDF5 file '" << _filename << "'.\n";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+
+#endif
+} // writeCellField
+
+// ----------------------------------------------------------------------
+// Generate filename for HDF5 file.
+template<typename mesh_type, typename field_type>
+std::string
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::_hdf5Filename(const double t) const
+{ // _hdf5Filename
+  std::ostringstream filename;
+  const int indexExt = _filename.find(".h5");
+  const int numTimeSteps = DataWriter<mesh_type, field_type>::_numTimeSteps;
+  if (0 == numTimeSteps)
+    filename << std::string(_filename, 0, indexExt) << "_info.hdf5";
+
+  return std::string(filename.str());
+} // _hdf5Filename
+
+
+// End of file 

Added: short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.hh
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.hh	                        (rev 0)
+++ short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.hh	2010-08-22 19:10:07 UTC (rev 17108)
@@ -0,0 +1,155 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+/**
+ * @file libsrc/meshio/DataWriterHDF5.hh
+ *
+ * @brief Object for writing finite-element data to HDF5 file.
+ *
+ * HDF5 schema for PyLith output.
+ *
+ * / - root group
+ *   geometry - group
+       coordsys - attribute string with coordinate system
+ *     vertices - dataset [nvertices, spacedim]
+ *   topology - group
+ *     cell_type - attribute string with cell type
+ *     cells - dataset [ncells, ncorners]
+ *   vertex_fields - group
+ *     VERTEX_FIELD (name of vertex field) - dataset 
+ *       [ntimesteps, nvertices, fiberdim]
+ *   cell_fields - group
+ *     CELL_FIELD (name of cell field) - dataset
+ *       [ntimesteps, ncells, fiberdim]
+ */
+
+#if !defined(pylith_meshio_datawriterhdf5_hh)
+#define pylith_meshio_datawriterhdf5_hh
+
+// Include directives ---------------------------------------------------
+#include "DataWriter.hh" // ISA DataWriter
+
+// DataWriterHDF5 --------------------------------------------------------
+/// Object for writing finite-element data to HDF5 file.
+template<typename mesh_type, typename field_type>
+class pylith::meshio::DataWriterHDF5 : public DataWriter<mesh_type,field_type>
+{ // DataWriterHDF5
+  friend class TestDataWriterHDF5Mesh; // unit testing
+  friend class TestDataWriterHDF5SubMesh; // unit testing
+  friend class TestDataWriterHDF5BCMesh; // unit testing
+  friend class TestDataWriterHDF5FaultMesh; // unit testing
+
+// PUBLIC METHODS ///////////////////////////////////////////////////////
+public :
+
+  /// Constructor
+  DataWriterHDF5(void);
+
+  /// Destructor
+  ~DataWriterHDF5(void);
+
+  /** Make copy of this object.
+   *
+   * @returns Copy of this.
+   */
+  DataWriter<mesh_type, field_type>* clone(void) const;
+
+  /// Deallocate PETSc and local data structures.
+  void deallocate(void);
+  
+  /** Set filename for HDF5 file.
+   *
+   * @param filename Name of HDF5 file.
+   */
+  void filename(const char* filename);
+
+  /** Prepare file for data at a new time step.
+   *
+   * @param t Time stamp for new data
+   * @param mesh Finite-element mesh.
+   * @param label Name of label defining cells to include in output
+   *   (=0 means use all cells in mesh).
+   * @param labelId Value of label defining which cells to include.
+   */
+  void openTimeStep(const double t,
+		    const mesh_type& mesh,
+		    const char* label =0,
+		    const int labelId =0);
+
+  /// Cleanup after writing data for a time step.
+  void closeTimeStep(void);
+
+  /** Write field over vertices to file.
+   *
+   * @param t Time associated with field.
+   * @param field Field over vertices.
+   * @param mesh Mesh associated with output.
+   */
+  void writeVertexField(const double t,
+			const field_type& field,
+			const mesh_type& mesh);
+
+  /** Write field over cells to file.
+   *
+   * @param t Time associated with field.
+   * @param field Field over cells.
+   * @param label Name of label defining cells to include in output
+   *   (=0 means use all cells in mesh).
+   * @param labelId Value of label defining which cells to include.
+   */
+  void writeCellField(const double t,
+		      const field_type& field,
+		      const char* label =0,
+		      const int labelId =0);
+
+// PRIVATE METHODS //////////////////////////////////////////////////////
+private :
+
+  /** Copy constructor.
+   *
+   * @param w Object to copy.
+   */
+  DataWriterHDF5(const DataWriterHDF5& w);
+
+  /** Generate filename for HDF5 file.
+   *
+   * @param t Time in seconds.
+   */
+  std::string _hdf5Filename(const double t) const;
+
+// NOT IMPLEMENTED //////////////////////////////////////////////////////
+private :
+
+  const DataWriterHDF5& operator=(const DataWriterHDF5&); ///< Not implemented
+
+// PRIVATE MEMBERS //////////////////////////////////////////////////////
+private :
+
+  std::string _filename; ///< Name of HDF5 file.
+
+  PetscViewer _viewer; ///< Output file
+
+}; // DataWriterHDF5
+
+#include "DataWriterHDF5.icc" // inline methods
+#include "DataWriterHDF5.cc" // template definitions
+
+#endif // pylith_meshio_datawriterhdf5_hh
+
+
+// End of file 

Added: short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.icc
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.icc	                        (rev 0)
+++ short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.icc	2010-08-22 19:10:07 UTC (rev 17108)
@@ -0,0 +1,42 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+#if !defined(pylith_meshio_datawriterhdf5_hh)
+#error "DataWriterHDF5.icc must be included only from DataWriterHDF5.hh"
+#else
+
+// Make copy of this object.
+template<typename mesh_type, typename field_type>
+inline
+pylith::meshio::DataWriter<mesh_type, field_type>*
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::clone(void) const {
+  return new DataWriterHDF5(*this);
+}
+
+// Set filename for HDF5 file.
+template<typename mesh_type, typename field_type>
+inline
+void
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::filename(const char* filename) {
+  _filename = filename;
+}
+
+
+#endif
+
+// End of file

Added: short/3D/PyLith/trunk/modulesrc/meshio/DataWriterHDF5.i
===================================================================
--- short/3D/PyLith/trunk/modulesrc/meshio/DataWriterHDF5.i	                        (rev 0)
+++ short/3D/PyLith/trunk/modulesrc/meshio/DataWriterHDF5.i	2010-08-22 19:10:07 UTC (rev 17108)
@@ -0,0 +1,102 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+/**
+ * @file modulesrc/meshio/DataWriterHDF5.i
+ *
+ * @brief Python interface to C++ DataWriterHDF5 object.
+ */
+
+namespace pylith {
+  namespace meshio {
+
+    template<typename mesh_type, typenam field_type>
+    class pylith::meshio::DataWriterHDF5 :
+      public DataWriter<mesh_type, field_type>
+    { // DataWriterHDF5  
+      
+      // PUBLIC METHODS /////////////////////////////////////////////////
+    public :
+
+      /// Constructor
+      DataWriterHDF5(void);
+      
+      /// Destructor
+      ~DataWriterHDF5(void);
+      
+      /** Make copy of this object.
+       *
+       * @returns Copy of this.
+       */
+      DataWriter<mesh_type, field_type>* clone(void) const;
+      
+      /// Deallocate PETSc and local data structures.
+      void deallocate(void);
+  
+      /** Set filename for HDF5 file.
+       *
+       * @param filename Name of HDF5 file.
+       */
+      void filename(const char* filename);
+      
+      /** Prepare file for data at a new time step.
+       *
+       * @param t Time stamp for new data
+       * @param mesh Finite-element mesh.
+       * @param label Name of label defining cells to include in output
+       *   (=0 means use all cells in mesh).
+       * @param labelId Value of label defining which cells to include.
+       */
+      void openTimeStep(const double t,
+			const mesh_type& mesh,
+			const char* label =0,
+			const int labelId =0);
+      
+      /// Cleanup after writing data for a time step.
+      void closeTimeStep(void);
+      
+      /** Write field over vertices to file.
+       *
+       * @param t Time associated with field.
+       * @param field Field over vertices.
+       * @param mesh Mesh for output.
+       */
+      void writeVertexField(const double t,
+			    const field_type& field,
+			    const mesh_type& mesh);
+      
+      /** Write field over cells to file.
+       *
+       * @param t Time associated with field.
+       * @param field Field over cells.
+       * @param label Name of label defining cells to include in output
+       *   (=0 means use all cells in mesh).
+       * @param labelId Value of label defining which cells to include.
+       */
+      void writeCellField(const double t,
+			  const field_type& field,
+			  const char* label =0,
+			  const int labelId =0);
+      
+    }; // DataWriterHDF5
+
+  } // meshio
+} // pylith
+
+
+// End of file 

Added: short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5.py
===================================================================
--- short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5.py	                        (rev 0)
+++ short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5.py	2010-08-22 19:10:07 UTC (rev 17108)
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+#
+# ----------------------------------------------------------------------
+#
+# Brad T. Aagaard, U.S. Geological Survey
+# Charles A. Williams, GNS Science
+# Matthew G. Knepley, University of Chicago
+#
+# This code was developed as part of the Computational Infrastructure
+# for Geodynamics (http://geodynamics.org).
+#
+# Copyright (c) 2010 University of California, Davis
+#
+# See COPYING for license information.
+#
+# ----------------------------------------------------------------------
+#
+
+## @file pyre/meshio/DataWriterHDF5.py
+##
+## @brief Python object for writing finite-element data to HDF5 file.
+
+from DataWriter import DataWriter
+
+# DataWriterHDF5 class
+class DataWriterHDF5(DataWriter):
+  """
+  Python object for writing finite-element data to HDF5 file.
+
+  Inventory
+
+  \b Properties
+  @li \b filename Name of HDF5 file.
+  
+  \b Facilities
+  @li None
+  """
+
+  # INVENTORY //////////////////////////////////////////////////////////
+
+  import pyre.inventory
+
+  filename = pyre.inventory.str("filename", default="output.h5")
+  filename.meta['tip'] = "Name of HDF5 file."
+
+  # PUBLIC METHODS /////////////////////////////////////////////////////
+
+  def __init__(self, name="datawriterhdf5"):
+    """
+    Constructor.
+    """
+    DataWriter.__init__(self, name)
+    return
+
+
+  def initialize(self, normalizer):
+    """
+    Initialize writer.
+    """
+    DataWriter.initialize(self, normalizer)
+    return
+
+
+# End of file 

Added: short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5Mesh.py
===================================================================
--- short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5Mesh.py	                        (rev 0)
+++ short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5Mesh.py	2010-08-22 19:10:07 UTC (rev 17108)
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#
+# ----------------------------------------------------------------------
+#
+# Brad T. Aagaard, U.S. Geological Survey
+# Charles A. Williams, GNS Science
+# Matthew G. Knepley, University of Chicago
+#
+# This code was developed as part of the Computational Infrastructure
+# for Geodynamics (http://geodynamics.org).
+#
+# Copyright (c) 2010 University of California, Davis
+#
+# See COPYING for license information.
+#
+# ----------------------------------------------------------------------
+#
+
+## @file pyre/meshio/DataWriterHDF5Mesh.py
+##
+## @brief Python object for writing finite-element data to HDF5 file.
+
+from DataWriterHDF5 import DataWriterHDF5
+from meshio import MeshDataWriterHDF5 as ModuleDataWriterHDF5
+
+# DataWriterHDF5Mesh class
+class DataWriterHDF5Mesh(DataWriterHDF5, ModuleDataWriterHDF5):
+  """
+  Python object for writing finite-element data to HDF5 file.
+
+  Inventory
+
+  Factory: output_data_writer
+  """
+
+  # PUBLIC METHODS /////////////////////////////////////////////////////
+
+  def __init__(self, name="datawriterhdf5mesh"):
+    """
+    Constructor.
+    """
+    DataWriterHDF5.__init__(self, name)
+    ModuleDataWriterHDF5.__init__(self)
+    return
+
+
+  def initialize(self, normalizer):
+    """
+    Initialize writer.
+    """
+    DataWriterHDF5.initialize(self, normalizer)
+    
+    ModuleDataWriterHDF5.filename(self, self.filename)
+    return
+  
+
+# FACTORIES ////////////////////////////////////////////////////////////
+
+def output_data_writer():
+  """
+  Factory associated with DataWriter.
+  """
+  return DataWriterHDF5Mesh()
+
+
+# End of file 

Added: short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5SubMesh.py
===================================================================
--- short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5SubMesh.py	                        (rev 0)
+++ short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5SubMesh.py	2010-08-22 19:10:07 UTC (rev 17108)
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+#
+# ----------------------------------------------------------------------
+#
+# Brad T. Aagaard, U.S. Geological Survey
+# Charles A. Williams, GNS Science
+# Matthew G. Knepley, University of Chicago
+#
+# This code was developed as part of the Computational Infrastructure
+# for Geodynamics (http://geodynamics.org).
+#
+# Copyright (c) 2010 University of California, Davis
+#
+# See COPYING for license information.
+#
+# ----------------------------------------------------------------------
+#
+
+## @file pyre/meshio/DataWriterVTKSubMesh.py
+##
+## @brief Python object for writing finite-element data to VTK file.
+
+from DataWriterVTK import DataWriterVTK
+from meshio import SubMeshDataWriterVTK as ModuleDataWriterVTK
+
+# DataWriterVTKSubMesh class
+class DataWriterVTKSubMesh(DataWriterVTK, ModuleDataWriterVTK):
+  """
+  Python object for writing finite-element data to VTK file.
+
+  Inventory
+
+  Factory: output_data_writer
+  """
+
+  # PUBLIC METHODS /////////////////////////////////////////////////////
+
+  def __init__(self, name="datawritervtksubmesh"):
+    """
+    Constructor.
+    """
+    DataWriterVTK.__init__(self, name)
+    ModuleDataWriterVTK.__init__(self)
+    return
+
+
+  def initialize(self, normalizer):
+    """
+    Initialize writer.
+    """
+    DataWriterVTK.initialize(self, normalizer)
+
+    ModuleDataWriterVTK.filename(self, self.filename)
+    ModuleDataWriterVTK.timeFormat(self, self.timeFormat)
+    ModuleDataWriterVTK.timeConstant(self, self.timeConstantN)
+    ModuleDataWriterVTK.precision(self, self.precision)
+    return
+  
+
+# FACTORIES ////////////////////////////////////////////////////////////
+
+def output_data_writer():
+  """
+  Factory associated with DataWriter.
+  """
+  return DataWriterVTKSubMesh()
+
+
+# End of file 

Added: short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5SubSubMesh.py
===================================================================
--- short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5SubSubMesh.py	                        (rev 0)
+++ short/3D/PyLith/trunk/pylith/meshio/DataWriterHDF5SubSubMesh.py	2010-08-22 19:10:07 UTC (rev 17108)
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#
+# ----------------------------------------------------------------------
+#
+# Brad T. Aagaard, U.S. Geological Survey
+# Charles A. Williams, GNS Science
+# Matthew G. Knepley, University of Chicago
+#
+# This code was developed as part of the Computational Infrastructure
+# for Geodynamics (http://geodynamics.org).
+#
+# Copyright (c) 2010 University of California, Davis
+#
+# See COPYING for license information.
+#
+# ----------------------------------------------------------------------
+#
+
+## @file pyre/meshio/DataWriterHDF5SubSubMesh.py
+##
+## @brief Python object for writing finite-element data to HDF5 file.
+
+from DataWriterHDF5 import DataWriterHDF5
+from meshio import SubSubMeshDataWriterHDF5 as ModuleDataWriterHDF5
+
+# DataWriterHDF5SubSubMesh class
+class DataWriterHDF5SubSubMesh(DataWriterHDF5, ModuleDataWriterHDF5):
+  """
+  Python object for writing finite-element data to HDF5 file.
+
+  Inventory
+
+  Factory: output_data_writer
+  """
+
+  # PUBLIC METHODS /////////////////////////////////////////////////////
+
+  def __init__(self, name="datawriterhdf5submesh"):
+    """
+    Constructor.
+    """
+    DataWriterHDF5.__init__(self, name)
+    ModuleDataWriterHDF5.__init__(self)
+    return
+
+
+  def initialize(self, normalizer):
+    """
+    Initialize writer.
+    """
+    DataWriterHDF5.initialize(self, normalizer)
+
+    ModuleDataWriterHDF5.filename(self, self.filename)
+    return
+  
+
+# FACTORIES ////////////////////////////////////////////////////////////
+
+def output_data_writer():
+  """
+  Factory associated with DataWriter.
+  """
+  return DataWriterHDF5SubSubMesh()
+
+
+# End of file 



More information about the CIG-COMMITS mailing list