[cig-commits] r17289 - in short/3D/PyLith/trunk: examples/bar_shearwave/tri3 libsrc libsrc/meshio libsrc/topology modulesrc/meshio pylith

brad at geodynamics.org brad at geodynamics.org
Sat Oct 16 20:27:29 PDT 2010


Author: brad
Date: 2010-10-16 20:27:29 -0700 (Sat, 16 Oct 2010)
New Revision: 17289

Modified:
   short/3D/PyLith/trunk/examples/bar_shearwave/tri3/pylithapp.cfg
   short/3D/PyLith/trunk/libsrc/Makefile.am
   short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc
   short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh
   short/3D/PyLith/trunk/libsrc/meshio/Makefile.am
   short/3D/PyLith/trunk/libsrc/topology/RefineEdges2.cc
   short/3D/PyLith/trunk/modulesrc/meshio/meshio.i
   short/3D/PyLith/trunk/pylith/Makefile.am
Log:
Restarted work on HDF5 object.

Modified: short/3D/PyLith/trunk/examples/bar_shearwave/tri3/pylithapp.cfg
===================================================================
--- short/3D/PyLith/trunk/examples/bar_shearwave/tri3/pylithapp.cfg	2010-10-15 23:45:21 UTC (rev 17288)
+++ short/3D/PyLith/trunk/examples/bar_shearwave/tri3/pylithapp.cfg	2010-10-17 03:27:29 UTC (rev 17289)
@@ -223,19 +223,19 @@
 vertex_data_fields = [displacement,velocity]
 skip = 2
 writer.filename = output/shearwave.vtk
-writer.time_format = %05.2f
+#writer.time_format = %05.2f
 
 # Give basename for VTK fault output.
 [pylithapp.timedependent.interfaces.fault.output]
 skip = 2
 writer.filename = output/shearwave-fault.vtk
-writer.time_format = %05.2f
+#writer.time_format = %05.2f
 
 # Give basename for VTK output of state variables.
 [pylithapp.timedependent.materials.elastic.output]
 skip = 29
 writer.filename = output/shearwave-statevars.vtk
-writer.time_format = %05.2f
+#writer.time_format = %05.2f
 
 # Averate state variables over cell
 cell_filter = pylith.meshio.CellFilterAvgMesh

Modified: short/3D/PyLith/trunk/libsrc/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/Makefile.am	2010-10-15 23:45:21 UTC (rev 17288)
+++ short/3D/PyLith/trunk/libsrc/Makefile.am	2010-10-17 03:27:29 UTC (rev 17289)
@@ -165,6 +165,12 @@
 
 if ENABLE_CUBIT
   libpylith_la_SOURCES += \
+	meshio/HDF5.cc
+  libpylith_la_LIBADD += -lhdf5
+endif
+
+if ENABLE_CUBIT
+  libpylith_la_SOURCES += \
 	meshio/MeshIOCubit.cc
   libpylith_la_LIBADD += -lnetcdf_c++ -lnetcdf
 endif

Modified: short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc	2010-10-15 23:45:21 UTC (rev 17288)
+++ short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc	2010-10-17 03:27:29 UTC (rev 17289)
@@ -18,10 +18,6 @@
 
 #include "HDF5.hh" // implementation of class methods
 
-extern "C" {
-#include "hdf5.h" // USES hdf5
-}
-
 #include <stdexcept> // USES std::runtime_error
 #include <sstream> // USES std::ostringstream
 #include <cassert> // USES assert()
@@ -34,7 +30,7 @@
   _file = H5Fopen(filename, mode, H5P_DEFAULT);
   if (_file < 0) {
     std::ostringstream msg;
-    msg << "Could not open HDF5 mesh file '" << filename << "'.";
+    msg << "Could not open HDF5 file '" << filename << "'.";
     throw std::runtime_error(msg.str());
   } // if
 } // constructor
@@ -51,7 +47,7 @@
 hid_t
 pylith::meshio::HDF5::createGroup(const char* name)
 { // createGroup
-  hid_t group = H5Gcreate(_file, name, 0);
+  hid_t group = H5Gcreate(_file, name, 0, H5P_DEFAULT, H5P_DEFAULT);
   if (group < 0) {
     std::ostringstream msg;
     msg << "Coule not create group '" << name << "'.";
@@ -66,23 +62,27 @@
 void
 pylith::meshio::HDF5::writeAttribute(hid_t parent,
 				     const char* name,
-				     const void* pValue,
+				     const void* value,
 				     hid_t datatype)
 { // writeAttribute
   try {
     hid_t dataspace = H5Screate(H5S_SCALAR);
-    if (dataspace < 0)
+    if (dataspace < 0) {
       throw std::runtime_error("Could not create dataspace for");
+    } // if
     hid_t attribute = H5Acreate(parent, name,
-				datatype, dataspace, H5P_DEFAULT);
-    if (attribute < 0)
+				datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+    if (attribute < 0) {
       throw std::runtime_error("Could not create");
-    hid_t err = H5Awrite(attribute, datatype, pValue);
-    if (err < 0)
+    } // if
+    hid_t err = H5Awrite(attribute, datatype, value);
+    if (err < 0) {
       throw std::runtime_error("Could not write");
+    } // if
     err = H5Aclose(attribute);
-    if (err < 0)
+    if (err < 0) {
       throw std::runtime_error("Could not close");
+    } // if
   } catch (std::exception& err) {
     std::ostringstream msg;
     msg << err.what() << " attribute '" << name << "'.";
@@ -97,6 +97,169 @@
 				     const char* name,
 				     const char* value)
 { // writeAttribute
+  try {
+    hid_t dataspace = H5Screate(H5S_SCALAR);
+    if (dataspace < 0) {
+      throw std::runtime_error("Could not create dataspace for");
+    } // if
+    hid_t attribute = H5Acreate(parent, name,
+				H5T_C_S1, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+    if (attribute < 0) {
+      throw std::runtime_error("Could not create");
+    } // if
+    hid_t err = H5Awrite(attribute, H5T_C_S1, value);
+    if (err < 0) {
+      throw std::runtime_error("Could not write");
+    } // if
+    err = H5Aclose(attribute);
+    if (err < 0) {
+      throw std::runtime_error("Could not close");
+    } // if
+  } catch (std::exception& err) {
+    std::ostringstream msg;
+    msg << err.what() << " attribute '" << name << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
 } // writeAttribute
 
-// End of file 
+// ----------------------------------------------------------------------
+// Create dataset.
+void
+pylith::meshio::HDF5::createDataset(const char* parent,
+				    const char* name,
+				    const hsize_t* dims,
+				    const hsize_t ndims,
+				    hid_t datatype)
+{ // createDataset
+  try {
+    // Create the dataspace
+    hid_t dataspace = H5Screate_simple(ndims, dims, 0);
+    if (dataspace < 0)
+      throw std::runtime_error("Could not create dataspace.");
+      
+    // Create chunked dataset
+    hid_t property = H5Pcreate(H5P_DATASET_CREATE);
+    if (property < 0)
+      throw std::runtime_error("Could not create property for dataset.");
+
+    H5Pset_chunk(property, ndims, dims);
+    //H5Pset_deflate(property, 6);
+
+    std::string fullname = 
+      std::string(parent) + std::string("/") + std::string(name);
+#if defined(OLD_H5INTERFACE)
+    hid_t dataset = H5Dcreate(_file, fullname.c_str(),
+			      datatype, dataspace, property);
+#else
+    hid_t dataset = H5Dcreate(_file, fullname.c_str(),
+			      datatype, dataspace, H5P_DEFAULT,
+			      property, H5P_DEFAULT);
+#endif
+    if (dataset < 0)
+      throw std::runtime_error("Could not create dataset.");
+    H5Pclose(property);
+    H5Sclose(dataspace);
+    H5Dclose(dataset);
+  } catch (const std::exception& err) {
+    std::ostringstream msg;
+    msg << "Error occurred while creating dataset '"
+	<< parent << "/" << name << "':\n"
+	<< err.what();
+    throw std::runtime_error(msg.str());
+  } catch (...) {
+    std::ostringstream msg;
+    msg << "Unknown  occurred while creating dataset '"
+	<< parent << "/" << name << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+} // createDataset
+
+// ----------------------------------------------------------------------
+// Create dataset associated with data stored in a raw external binary
+// file.
+void
+pylith::meshio::HDF5::createDatasetRawExternal(const char* parent,
+					       const char* name,
+					       const char* filename,
+					       const hsize_t* dims,
+					       const hsize_t ndims,
+					       hid_t datatype)
+{ // createDatasetRawExternal
+} // createDatasetRawExternal
+
+// ----------------------------------------------------------------------
+// Append slice to dataset.
+void
+pylith::meshio::HDF5::writeDatasetSlice(const char* parent,
+					const char* name,
+					const void* data,
+					const hsize_t* dims,
+					const hsize_t ndims,
+					const int islice,
+					hid_t datatype)
+{ // writeDatasetSlice
+  assert(_file > 0);
+
+  try {
+    // Select hyperslab in file
+    hsize_t* count = (ndims > 0) ? new hsize_t[ndims] : 0;
+    hsize_t* stride = (ndims > 0) ? new hsize_t[ndims] : 0;
+    hsize_t* offset = (ndims > 0) ? new hsize_t[ndims] : 0;
+    
+    for (int i=0; i < ndims; ++i) {
+      count[i] = 1;
+      stride[i] = 1;
+      offset[i] = 0;
+    } // for
+    offset[0] = islice;
+
+    // Open group
+    hid_t group = H5Gopen(_file, parent, H5P_DEFAULT);
+    if (group < 0)
+      throw std::runtime_error("Could not open group.");
+    
+    // Open the dataset
+    hid_t dataset = H5Dopen(group, name, H5P_DEFAULT);
+    if (dataset < 0)
+      throw std::runtime_error("Could not open dataset.");
+    
+    hid_t dataspace = H5Dget_space(dataset);
+    if (dataspace < 0)
+      throw std::runtime_error("Could not get dataspace.");
+
+    hid_t chunkspace = H5Screate_simple(ndims, dims, 0);
+    if (chunkspace < 0)
+      throw std::runtime_error("Could not create chunk dataspace.");
+
+    herr_t err = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET,
+				     offset, stride, count, dims);
+    if (err < 0)
+      throw std::runtime_error("Could not select hyperslab.");
+
+    err = H5Dwrite(dataset, datatype, chunkspace, dataspace, 
+		   H5P_DEFAULT, data);
+    if (err < 0)
+      throw std::runtime_error("Could not write data.");
+    err = H5Dclose(dataset);
+    if (err < 0)
+      throw std::runtime_error("Could not close dataset.");
+    
+    err = H5Gclose(group);
+    if (err < 0)
+      throw std::runtime_error("Could not close group.");
+  } catch (const std::exception& err) {
+    std::ostringstream msg;
+    msg << "Error occurred while writing dataset '"
+	<< parent << "/" << name << "':\n"
+	<< err.what();
+    throw std::runtime_error(msg.str());
+  } catch (...) {
+    std::ostringstream msg;
+    msg << "Unknown  occurred while writing dataset '"
+	<< parent << "/" << name << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+} // writeDatasetSlice
+
+
+// End of file

Modified: short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh	2010-10-15 23:45:21 UTC (rev 17288)
+++ short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh	2010-10-17 03:27:29 UTC (rev 17289)
@@ -25,7 +25,9 @@
   } // meshio
 } // pylith
 
-typedef int hid_t; // HASA hid_t
+extern "C" {
+#include "hdf5.h" // USES hdf5
+}
 
 class pylith::meshio::HDF5
 { // HDF5
@@ -38,7 +40,8 @@
    * @param filename Name of HDF5 file
    * @param mode Mode for HDF5 file
    */
-  HDF5(const char* filename, hid_t mode);
+  HDF5(const char* filename,
+       hid_t mode);
 
   /// Destructor
   ~HDF5(void);
@@ -52,44 +55,77 @@
    */
   hid_t createGroup(const char* name);
 
-  /** Create scalar attribute.
+  /** Set scalar attribute.
    *
    * @param parent Parent of attribute.
-   * @param attrName Name of attribute.
-   * @param pValue Pointer to scalar value
+   * @param name Name of attribute.
+   * @param value Attribute value.
    * @param datatype Datatype of scalar.
    */
   void writeAttribute(hid_t parent,
 		      const char* name,
-		      const void* pValue,
+		      const void* value,
 		      hid_t datatype);
 
-  /** Create string attribute.
+  /** Set string attribute.
    *
    * @param parent Parent of attribute.
-   * @param attrName Name of attribute.
+   * @param name Name of attribute.
    * @param value String value
    */
   void writeAttribute(hid_t parent,
 		      const char* name,
 		      const char* value);
 
-  /** Write dataset.
+  /** Create dataset.
    *
+   * @param parent Full path for parent of dataset.
+   * @param name Name of dataset.
+   * @param dims Dimensions of data.
+   * @param ndims Number of dimensions of data.
+   * @param datatype Type of data.
+   */
+  void createDataset(const char* parent,
+		     const char* name,
+		     const hsize_t* dims,
+		     const hsize_t ndims,
+		     hid_t datatype);
+  
+  /** Create dataset associated with data stored in a raw external
+   * binary file.
+   *
    * @param parent Parent of dataset.
    * @param name Name of dataset.
-   * @param pData Pointer to data.
+   * @param filename Name of external raw data file.
    * @param dims Dimensions of data.
    * @param ndims Number of dimensions of data.
    * @param datatype Type of data.
    */
-  void writeDataset(hid_t parent,
-		    const char* name,
-		    const void* pData,
-		    const int* dims,
-		    const int ndims,
-		    hid_t datatype);
+  void createDatasetRawExternal(const char* parent,
+				const char* name,
+				const char* filename,
+				const hsize_t* dims,
+				const hsize_t ndims,
+				hid_t datatype);
 
+  /** Append slice to dataset.
+   *
+   * @param parent Parent of dataset.
+   * @param name Name of dataset.
+   * @param data Data.
+   * @param dims Dimensions of data.
+   * @param ndims Number of dimensions of data.
+   * @param islice Index of data slice.
+   * @param datatype Type of data.
+   */
+  void writeDatasetSlice(const char* parent,
+			 const char* name,
+			 const void* data,
+			 const hsize_t* dims,
+			 const hsize_t ndims,
+			 const int islice,
+			 hid_t datatype);
+
 // PRIVATE MEMBERS ------------------------------------------------------
 private :
 

Modified: short/3D/PyLith/trunk/libsrc/meshio/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/Makefile.am	2010-10-15 23:45:21 UTC (rev 17288)
+++ short/3D/PyLith/trunk/libsrc/meshio/Makefile.am	2010-10-17 03:27:29 UTC (rev 17289)
@@ -29,9 +29,6 @@
 	DataWriterVTK.hh \
 	DataWriterVTK.icc \
 	DataWriterVTK.cc \
-	DataWriterHDF5.hh \
-	DataWriterHDF5.icc \
-	DataWriterHDF5.cc \
 	MeshBuilder.hh \
 	MeshIO.hh \
 	MeshIO.icc \
@@ -51,6 +48,14 @@
 	VertexFilterVecNorm.cc \
 	meshiofwd.hh
 
+if ENABLE_HDF5
+  subpkginclude_HEADERS += \
+	HDF5.hh \
+	DataWriterHDF5.hh \
+	DataWriterHDF5.icc \
+	DataWriterHDF5.cc
+endif
+
 if ENABLE_CUBIT
   subpkginclude_HEADERS += \
 	MeshIOCubit.hh \

Modified: short/3D/PyLith/trunk/libsrc/topology/RefineEdges2.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/topology/RefineEdges2.cc	2010-10-15 23:45:21 UTC (rev 17288)
+++ short/3D/PyLith/trunk/libsrc/topology/RefineEdges2.cc	2010-10-17 03:27:29 UTC (rev 17289)
@@ -282,8 +282,10 @@
 		  << std::endl;
       } // if
       //assert(-1 != newRemotePoint);
-      newSendOverlap->addArrow(newLocalPoint, rank, newRemotePoint);
-      newRecvOverlap->addArrow(rank, newLocalPoint, newRemotePoint);
+      if (-1 != newRemotePoint) {
+	newSendOverlap->addArrow(newLocalPoint, rank, newRemotePoint);
+	newRecvOverlap->addArrow(rank, newLocalPoint, newRemotePoint);
+      } // if
     } // for
   } // for
 

Modified: short/3D/PyLith/trunk/modulesrc/meshio/meshio.i
===================================================================
--- short/3D/PyLith/trunk/modulesrc/meshio/meshio.i	2010-10-15 23:45:21 UTC (rev 17288)
+++ short/3D/PyLith/trunk/modulesrc/meshio/meshio.i	2010-10-17 03:27:29 UTC (rev 17289)
@@ -25,7 +25,7 @@
 #include "pylith/meshio/MeshIOAscii.hh"
 #include "pylith/meshio/MeshIOLagrit.hh"
 #include "pylith/meshio/MeshIOSieve.hh"
-#ifdef ENABLE_CUBIT
+#if defined(ENABLE_CUBIT)
 #include "pylith/meshio/MeshIOCubit.hh"
 #endif
 
@@ -35,9 +35,11 @@
 #include "pylith/meshio/CellFilterAvg.hh"
 #include "pylith/meshio/DataWriter.hh"
 #include "pylith/meshio/DataWriterVTK.hh"
-#include "pylith/meshio/DataWriterHDF5.hh"
 #include "pylith/meshio/OutputManager.hh"
 #include "pylith/meshio/OutputSolnSubset.hh"
+#if defined(ENABLE_HDF5)
+#include "pylith/meshio/DataWriterHDF5.hh"
+#endif
 
 #include "pylith/utils/arrayfwd.hh"
 %}
@@ -60,7 +62,7 @@
 %include "MeshIOAscii.i"
 %include "MeshIOLagrit.i"
 %include "MeshIOSieve.i"
-#ifdef ENABLE_CUBIT
+#if defined(ENABLE_CUBIT)
 %include "MeshIOCubit.i"
 #endif
 
@@ -70,9 +72,11 @@
 %include "CellFilterAvg.i"
 %include "DataWriter.i"
 %include "DataWriterVTK.i"
-%include "DataWriterHDF5.i"
 %include "OutputManager.i"
 %include "OutputSolnSubset.i"
+#if defined(ENABLE_HDF5)
+%include "DataWriterHDF5.i"
+#endif
 
 // Template instatiation
 %template(MeshVertexFilter) pylith::meshio::VertexFilter<pylith::topology::Field<pylith::topology::Mesh> >;
@@ -93,9 +97,11 @@
 %template(SubMeshDataWriterVTK) pylith::meshio::DataWriterVTK<pylith::topology::SubMesh, pylith::topology::Field<pylith::topology::Mesh> >;
 %template(SubSubMeshDataWriterVTK) pylith::meshio::DataWriterVTK<pylith::topology::SubMesh, pylith::topology::Field<pylith::topology::SubMesh> >;
 
+#if defined(ENABLE_HDF5)
 %template(MeshDataWriterHDF5) pylith::meshio::DataWriterHDF5<pylith::topology::Mesh, pylith::topology::Field<pylith::topology::Mesh> >;
 %template(SubMeshDataWriterHDF5) pylith::meshio::DataWriterHDF5<pylith::topology::SubMesh, pylith::topology::Field<pylith::topology::Mesh> >;
 %template(SubSubMeshDataWriterHDF5) pylith::meshio::DataWriterHDF5<pylith::topology::SubMesh, pylith::topology::Field<pylith::topology::SubMesh> >;
+#endif
 
 %template(MeshOutputManager) pylith::meshio::OutputManager<pylith::topology::Mesh, pylith::topology::Field<pylith::topology::Mesh> >;
 %template(SubMeshOutputManager) pylith::meshio::OutputManager<pylith::topology::SubMesh, pylith::topology::Field<pylith::topology::SubMesh> >;

Modified: short/3D/PyLith/trunk/pylith/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/pylith/Makefile.am	2010-10-15 23:45:21 UTC (rev 17288)
+++ short/3D/PyLith/trunk/pylith/Makefile.am	2010-10-17 03:27:29 UTC (rev 17289)
@@ -91,13 +91,8 @@
 	meshio/DataWriterVTKMesh.py \
 	meshio/DataWriterVTKSubMesh.py \
 	meshio/DataWriterVTKSubSubMesh.py \
-	meshio/DataWriterHDF5.py \
-	meshio/DataWriterHDF5Mesh.py \
-	meshio/DataWriterHDF5SubMesh.py \
-	meshio/DataWriterHDF5SubSubMesh.py \
 	meshio/MeshIOObj.py \
 	meshio/MeshIOAscii.py \
-	meshio/MeshIOCubit.py \
 	meshio/MeshIOLagrit.py \
 	meshio/MeshIOSieve.py \
 	meshio/OutputDirichlet.py \
@@ -179,7 +174,18 @@
 	tests/StateVariables.py \
 	tests/Fault.py
 
+if ENABLE_CUBIT
+  nobase_pkgpyexec_PYTHON += meshio/MeshIOCubit.py
+endif
 
+if ENABLE_HDF5
+  nobase_pkgpyexec_PYTHON += \
+	meshio/DataWriterHDF5.py \
+	meshio/DataWriterHDF5Mesh.py \
+	meshio/DataWriterHDF5SubMesh.py \
+	meshio/DataWriterHDF5SubSubMesh.py
+endif
+
 if ENABLE_TETGEN
   nobase_pkgpyexec_PYTHON += topology/MeshGenSimple.py
 endif



More information about the CIG-COMMITS mailing list