[cig-commits] [commit] baagaard/feature-output-station-names: Add writePointNames() to DataWriter. (f784781)

cig_noreply at geodynamics.org cig_noreply at geodynamics.org
Wed Nov 5 15:47:55 PST 2014


Repository : https://github.com/geodynamics/pylith

On branch  : baagaard/feature-output-station-names
Link       : https://github.com/geodynamics/pylith/compare/f33c75b19fd60eedb2a3405db76a1fee333bb1d7...5b6d812b1612809fea3bd331c4e5af98c25a536a

>---------------------------------------------------------------

commit f7847815a78270f7339e92272a4c284a6d71186c
Author: Brad Aagaard <baagaard at usgs.gov>
Date:   Tue Nov 4 17:24:35 2014 -0800

    Add writePointNames() to DataWriter.
    
    Update HDF5 object to allow writing dataset of variable length strings.
    Update corresponding C++ unit tests (TestHDF5).


>---------------------------------------------------------------

f7847815a78270f7339e92272a4c284a6d71186c
 libsrc/pylith/meshio/DataWriter.cc        |  11 +
 libsrc/pylith/meshio/DataWriter.hh        |  11 +
 libsrc/pylith/meshio/DataWriterHDF5.cc    |  36 ++-
 libsrc/pylith/meshio/DataWriterHDF5.hh    |  14 ++
 libsrc/pylith/meshio/DataWriterHDF5Ext.cc |  32 ++-
 libsrc/pylith/meshio/DataWriterHDF5Ext.hh |  10 +
 libsrc/pylith/meshio/HDF5.cc              | 404 ++++++++++++++++++------------
 libsrc/pylith/meshio/HDF5.hh              |  63 ++++-
 libsrc/pylith/meshio/Xdmf.cc              |   9 +-
 unittests/libtests/meshio/TestHDF5.cc     |  80 ++++--
 unittests/libtests/meshio/TestHDF5.hh     |  13 +-
 11 files changed, 474 insertions(+), 209 deletions(-)

diff --git a/libsrc/pylith/meshio/DataWriter.cc b/libsrc/pylith/meshio/DataWriter.cc
index 3c63e48..71d027e 100644
--- a/libsrc/pylith/meshio/DataWriter.cc
+++ b/libsrc/pylith/meshio/DataWriter.cc
@@ -108,6 +108,7 @@ pylith::meshio::DataWriter::openTimeStep(const PylithScalar t,
 					 const char* label,
 					 const int labelId)
 { // openTimeStep
+  // Default: no implementation.
 } // openTimeStep
 
 // ----------------------------------------------------------------------
@@ -115,6 +116,7 @@ pylith::meshio::DataWriter::openTimeStep(const PylithScalar t,
 void
 pylith::meshio::DataWriter::closeTimeStep(void)
 { // closeTimeStep
+  // Default: no implementation.
 } // closeTimeStep
 
 // ----------------------------------------------------------------------
@@ -126,4 +128,13 @@ pylith::meshio::DataWriter::DataWriter(const DataWriter& w) :
 } // copy constructor
 
 
+// ----------------------------------------------------------------------
+// Write dataset with names of points to file.
+void
+pylith::meshio::DataWriter::writePointNames(const char* const* names,
+					    const int numNames)
+{ // writePointNames
+  // Default: no implementation.
+} // writePointNames
+
 // End of file 
diff --git a/libsrc/pylith/meshio/DataWriter.hh b/libsrc/pylith/meshio/DataWriter.hh
index b52798d..4cdf411 100644
--- a/libsrc/pylith/meshio/DataWriter.hh
+++ b/libsrc/pylith/meshio/DataWriter.hh
@@ -124,6 +124,17 @@ public :
 		      const char* label =0,
 		      const int labelId =0) = 0;
 
+  /** Write dataset with names of points to file.
+   *
+   * @param names Array with name for each point, e.g., station name.
+   * @param nunNames Number of names in array.
+   *
+   * Primarily used with OutputSolnPoints.
+   */
+  virtual
+  void writePointNames(const char* const* names,
+		       const int numNames);
+
 // PROTECTED METHODS ////////////////////////////////////////////////////
 protected :
 
diff --git a/libsrc/pylith/meshio/DataWriterHDF5.cc b/libsrc/pylith/meshio/DataWriterHDF5.cc
index 570e6b7..334c94f 100644
--- a/libsrc/pylith/meshio/DataWriterHDF5.cc
+++ b/libsrc/pylith/meshio/DataWriterHDF5.cc
@@ -319,8 +319,8 @@ pylith::meshio::DataWriterHDF5::writeVertexField(const PylithScalar t,
       err = PetscViewerHDF5GetFileId(_viewer, &h5); PYLITH_CHECK_ERROR(err);
       assert(h5 >= 0);
       std::string fullName = std::string("/vertex_fields/") + field.label();
-      HDF5::writeAttribute(h5, fullName.c_str(), "vector_field_type",
-			   topology::FieldBase::vectorFieldString(field.vectorFieldType()));
+      const char* sattr = topology::FieldBase::vectorFieldString(field.vectorFieldType());
+      HDF5::writeAttribute(h5, fullName.c_str(), "vector_field_type", sattr);
     } // if
 
   } catch (const std::exception& err) {
@@ -387,8 +387,8 @@ pylith::meshio::DataWriterHDF5::writeCellField(const PylithScalar t,
       err = PetscViewerHDF5GetFileId(_viewer, &h5); PYLITH_CHECK_ERROR(err);
       assert(h5 >= 0);
       std::string fullName = std::string("/cell_fields/") + field.label();
-      HDF5::writeAttribute(h5, fullName.c_str(), "vector_field_type",
-			   topology::FieldBase::vectorFieldString(field.vectorFieldType()));
+      const char* sattr = topology::FieldBase::vectorFieldString(field.vectorFieldType());
+      HDF5::writeAttribute(h5, fullName.c_str(), "vector_field_type", sattr);
     } // if
   } catch (const std::exception& err) {
     std::ostringstream msg;
@@ -406,6 +406,34 @@ pylith::meshio::DataWriterHDF5::writeCellField(const PylithScalar t,
 } // writeCellField
 
 // ----------------------------------------------------------------------
+// Write dataset with names of points to file.
+void
+pylith::meshio::DataWriterHDF5::writePointNames(const char* const* names,
+						const int numNames)
+{ // writePointNames
+  PYLITH_METHOD_BEGIN;
+
+  assert(_viewer);
+
+  try {
+    hid_t h5 = -1;
+    PetscErrorCode err = PetscViewerHDF5GetFileId(_viewer, &h5); PYLITH_CHECK_ERROR(err);
+    assert(h5 >= 0);
+    HDF5::writeDataset(h5, "/", "stations", names, numNames);
+  } catch (const std::exception& err) {
+    std::ostringstream msg;
+    msg << "Error while writing stations to HDF5 file '" << _hdf5Filename() << "'.\n" << err.what();
+    throw std::runtime_error(msg.str());
+  } catch (...) { 
+    std::ostringstream msg;
+    msg << "Error while writing stations to HDF5 file '" << _hdf5Filename() << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+
+  PYLITH_METHOD_END;
+} // writePointNames
+
+// ----------------------------------------------------------------------
 // Generate filename for HDF5 file.
 std::string
 pylith::meshio::DataWriterHDF5::_hdf5Filename(void) const
diff --git a/libsrc/pylith/meshio/DataWriterHDF5.hh b/libsrc/pylith/meshio/DataWriterHDF5.hh
index e8becc1..214d4c9 100644
--- a/libsrc/pylith/meshio/DataWriterHDF5.hh
+++ b/libsrc/pylith/meshio/DataWriterHDF5.hh
@@ -36,6 +36,10 @@
  *   cell_fields - group
  *     CELL_FIELD (name of cell field) - dataset
  *       [ntimesteps, ncells, fiberdim]
+ *   time - dataset
+ *     [ntimesteps]
+ *   stations - dataset [optional]
+ *     [nvertices, 64]
  */
 
 #if !defined(pylith_meshio_datawriterhdf5_hh)
@@ -121,6 +125,16 @@ public :
 		      const char* label =0,
 		      const int labelId =0);
 
+  /** Write dataset with names of points to file.
+   *
+   * @param names Array with name for each point, e.g., station name.
+   * @param nunNames Number of names in array.
+   *
+   * Primarily used with OutputSolnPoints.
+   */
+  void writePointNames(const char* const* names,
+		       const int numNames);
+
 // PRIVATE METHODS //////////////////////////////////////////////////////
 private :
 
diff --git a/libsrc/pylith/meshio/DataWriterHDF5Ext.cc b/libsrc/pylith/meshio/DataWriterHDF5Ext.cc
index 95e379e..48d951f 100644
--- a/libsrc/pylith/meshio/DataWriterHDF5Ext.cc
+++ b/libsrc/pylith/meshio/DataWriterHDF5Ext.cc
@@ -442,7 +442,8 @@ pylith::meshio::DataWriterHDF5Ext::writeVertexField(const PylithScalar t,
 	
         _h5->createDatasetRawExternal("/vertex_fields", field.label(), _datasetFilename(field.label()).c_str(), maxDims, ndims, scalartype);
         std::string fullName = std::string("/vertex_fields/") + field.label();
-        _h5->writeAttribute(fullName.c_str(), "vector_field_type", topology::FieldBase::vectorFieldString(field.vectorFieldType()));
+	const char* sattr = topology::FieldBase::vectorFieldString(field.vectorFieldType());
+        _h5->writeAttribute(fullName.c_str(), "vector_field_type", sattr);
       } // if
     } else {
       if (!commRank) {
@@ -599,8 +600,8 @@ pylith::meshio::DataWriterHDF5Ext::writeCellField(const PylithScalar t,
 	
         _h5->createDatasetRawExternal("/cell_fields", field.label(), _datasetFilename(field.label()).c_str(), maxDims, ndims, scalartype);
         std::string fullName = std::string("/cell_fields/") + field.label();
-        _h5->writeAttribute(fullName.c_str(), "vector_field_type",
-                            topology::FieldBase::vectorFieldString(field.vectorFieldType()));
+	const char* sattr = topology::FieldBase::vectorFieldString(field.vectorFieldType());
+        _h5->writeAttribute(fullName.c_str(), "vector_field_type", sattr);
       } // if
 
     } else {
@@ -631,6 +632,31 @@ pylith::meshio::DataWriterHDF5Ext::writeCellField(const PylithScalar t,
 } // writeCellField
 
 // ----------------------------------------------------------------------
+// Write dataset with names of points to file.
+void
+pylith::meshio::DataWriterHDF5Ext::writePointNames(const char* const* names,
+						   const int numNames)
+{ // writePointNames
+  PYLITH_METHOD_BEGIN;
+
+  assert(_h5);
+
+  try {
+    _h5->writeDataset("/", "stations", names, numNames);
+  } catch (const std::exception& err) {
+    std::ostringstream msg;
+    msg << "Error while writing stations to HDF5 file '" << _hdf5Filename() << "'.\n" << err.what();
+    throw std::runtime_error(msg.str());
+  } catch (...) { 
+    std::ostringstream msg;
+    msg << "Error while writing stations to HDF5 file '" << _hdf5Filename() << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+
+  PYLITH_METHOD_END;
+} // writePointNames
+
+// ----------------------------------------------------------------------
 // Generate filename for HDF5 file.
 std::string
 pylith::meshio::DataWriterHDF5Ext::_hdf5Filename(void) const
diff --git a/libsrc/pylith/meshio/DataWriterHDF5Ext.hh b/libsrc/pylith/meshio/DataWriterHDF5Ext.hh
index ded115b..eea295c 100644
--- a/libsrc/pylith/meshio/DataWriterHDF5Ext.hh
+++ b/libsrc/pylith/meshio/DataWriterHDF5Ext.hh
@@ -120,6 +120,16 @@ public :
 		      const char* label =0,
 		      const int labelId =0);
 
+  /** Write dataset with names of points to file.
+   *
+   * @param names Array with name for each point, e.g., station name.
+   * @param nunNames Number of names in array.
+   *
+   * Primarily used with OutputSolnPoints.
+   */
+  void writePointNames(const char* const* names,
+		       const int numNames);
+
 // PRIVATE METHODS //////////////////////////////////////////////////////
 private :
 
diff --git a/libsrc/pylith/meshio/HDF5.cc b/libsrc/pylith/meshio/HDF5.cc
index 584933a..3c33b94 100644
--- a/libsrc/pylith/meshio/HDF5.cc
+++ b/libsrc/pylith/meshio/HDF5.cc
@@ -244,7 +244,7 @@ pylith::meshio::HDF5::getDatasetDims(hsize_t** dims,
     throw std::runtime_error(msg.str());
   } catch (...) {
     std::ostringstream msg;
-    msg << "Unknown  occurred while reading dataset '"
+    msg << "Unknown error occurred while reading dataset '"
 	<< parent << "/" << name << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch  
@@ -300,7 +300,7 @@ pylith::meshio::HDF5::getGroupDatasets(string_vector* names,
     throw std::runtime_error(msg.str());
   } catch (...) {
     std::ostringstream msg;
-    msg << "Unknown occurred while getting names of datasets for group '"
+    msg << "Unknown error occurred while getting names of datasets for group '"
 	<< parent << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch  
@@ -348,54 +348,7 @@ pylith::meshio::HDF5::writeAttribute(const char* parent,
 { // writeAttribute
   PYLITH_METHOD_BEGIN;
 
-  assert(parent);
-  assert(name);
-  assert(value);
-
-  try {
-    hid_t dataspace = H5Screate(H5S_SCALAR);
-    if (dataspace < 0)
-      throw std::runtime_error("Could not create dataspace for");
-
-#if defined(PYLITH_HDF5_USE_API_18)
-    hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
-#else
-    hid_t dataset = H5Dopen(_file, parent);
-#endif
-    if (dataset < 0)
-      throw std::runtime_error("Could not open parent dataset for");
-
-#if defined(PYLITH_HDF5_USE_API_18)
-    hid_t attribute = H5Acreate2(dataset, name,
-				datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
-#else
-    hid_t attribute = H5Acreate(dataset, name,
-				datatype, dataspace, H5P_DEFAULT);
-#endif
-    if (attribute < 0)
-      throw std::runtime_error("Could not create");
-
-    hid_t err = H5Awrite(attribute, datatype, value);
-    if (err < 0)
-      throw std::runtime_error("Could not write");
-
-    err = H5Aclose(attribute);
-    if (err < 0) 
-      throw std::runtime_error("Could not close");
-
-    err = H5Dclose(dataset);
-    if (err < 0) 
-      throw std::runtime_error("Could not close dataset for");
-
-    err = H5Sclose(dataspace);
-    if (err < 0) 
-      throw std::runtime_error("Could not close dataspace for");
-
-  } catch (std::exception& err) {
-    std::ostringstream msg;
-    msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
-    throw std::runtime_error(msg.str());
-  } // try/catch
+  HDF5::writeAttribute(_file, parent, name, value, datatype);
 
   PYLITH_METHOD_END;
 } // writeAttribute
@@ -464,87 +417,47 @@ pylith::meshio::HDF5::writeAttribute(hid_t h5,
 } // writeAttribute
 
 // ----------------------------------------------------------------------
-// Read scalar attribute.
+// Write string attribute.
 void
-pylith::meshio::HDF5::readAttribute(const char* parent,
-				    const char* name,
-				    void* value,
-				    hid_t datatype)
-{ // readAttribute
+pylith::meshio::HDF5::writeAttribute(const char* parent,
+				     const char* name,
+				     const char* value)
+{ // writeAttribute
   PYLITH_METHOD_BEGIN;
 
-  assert(parent);
-  assert(name);
-  assert(value);
-
-  try {
-#if defined(PYLITH_HDF5_USE_API_18)
-    hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
-#else
-    hid_t dataset = H5Dopen(_file, parent);
-#endif
-    if (dataset < 0)
-      throw std::runtime_error("Could not open parent dataset for");
-
-    hid_t attribute = H5Aopen(dataset, name, H5P_DEFAULT);
-    if (attribute < 0)
-      throw std::runtime_error("Could not open");
-
-    hid_t dtype = H5Aget_type(attribute);
-    if (dtype < 0)
-      throw std::runtime_error("Could not get datatype of");
-
-    hid_t err = H5Aread(attribute, dtype, value);
-    if (err < 0)
-      throw std::runtime_error("Could not read");
-
-    err = H5Tclose(dtype);
-    if (err < 0) 
-      throw std::runtime_error("Could not close datatype for");
-
-    err = H5Aclose(attribute);
-    if (err < 0) 
-      throw std::runtime_error("Could not close");
-
-    err = H5Dclose(dataset);
-    if (err < 0) 
-      throw std::runtime_error("Could not close dataset for");
-
-  } catch (std::exception& err) {
-    std::ostringstream msg;
-    msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
-    throw std::runtime_error(msg.str());
-  } // try/catch
+  HDF5::writeAttribute(_file, parent, name, value);
 
   PYLITH_METHOD_END;
-} // readAttribute
+} // writeAttribute
 
 // ----------------------------------------------------------------------
-// Write string attribute.
+// Write string attribute (external handle to HDF5 file).
 void
-pylith::meshio::HDF5::writeAttribute(const char* parent,
+pylith::meshio::HDF5::writeAttribute(hid_t h5,
+				     const char* parent,
 				     const char* name,
 				     const char* value)
 { // writeAttribute
   PYLITH_METHOD_BEGIN;
 
+  assert(h5);
   assert(parent);
   assert(name);
   assert(value);
 
   try {
-    hid_t dataspace = H5Screate(H5S_SCALAR);
-    if (dataspace < 0) 
-      throw std::runtime_error("Could not create dataspace for");
-
 #if defined(PYLITH_HDF5_USE_API_18)
-    hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
+    hid_t dataset = H5Dopen2(h5, parent, H5P_DEFAULT);
 #else
-    hid_t dataset = H5Dopen(_file, parent);
+    hid_t dataset = H5Dopen(h5, parent);
 #endif
     if (dataset < 0) 
       throw std::runtime_error("Could not open parent dataset for");
 
+    hid_t dataspace = H5Screate(H5S_SCALAR);
+    if (dataspace < 0) 
+      throw std::runtime_error("Could not create dataspace for");
+
     hid_t datatype = H5Tcopy(H5T_C_S1);
     if (datatype < 0) 
       throw std::runtime_error("Could not create datatype for");
@@ -593,13 +506,13 @@ pylith::meshio::HDF5::writeAttribute(const char* parent,
 } // writeAttribute
 
 // ----------------------------------------------------------------------
-// Write string attribute (external handle to HDF5 file).
+// Read scalar attribute.
 void
-pylith::meshio::HDF5::writeAttribute(hid_t h5,
-				     const char* parent,
-				     const char* name,
-				     const char* value)
-{ // writeAttribute
+pylith::meshio::HDF5::readAttribute(const char* parent,
+				    const char* name,
+				    void* value,
+				    hid_t datatype)
+{ // readAttribute
   PYLITH_METHOD_BEGIN;
 
   assert(parent);
@@ -607,56 +520,38 @@ pylith::meshio::HDF5::writeAttribute(hid_t h5,
   assert(value);
 
   try {
-    hid_t dataspace = H5Screate(H5S_SCALAR);
-    if (dataspace < 0) 
-      throw std::runtime_error("Could not create dataspace for");
-
 #if defined(PYLITH_HDF5_USE_API_18)
-    hid_t dataset = H5Dopen2(h5, parent, H5P_DEFAULT);
+    hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
 #else
-    hid_t dataset = H5Dopen(h5, parent);
+    hid_t dataset = H5Dopen(_file, parent);
 #endif
     if (dataset < 0)
       throw std::runtime_error("Could not open parent dataset for");
 
-    hid_t datatype = H5Tcopy(H5T_C_S1);
-    if (datatype < 0) 
-      throw std::runtime_error("Could not create datatype for");
+    hid_t attribute = H5Aopen(dataset, name, H5P_DEFAULT);
+    if (attribute < 0)
+      throw std::runtime_error("Could not open");
 
-    herr_t err = H5Tset_size(datatype, strlen(value)+1);
-    if (err < 0) 
-      throw std::runtime_error("Could not set size of");
+    hid_t dtype = H5Aget_type(attribute);
+    if (dtype < 0)
+      throw std::runtime_error("Could not get datatype of");
 
-#if defined(PYLITH_HDF5_USE_API_18)
-    hid_t attribute = H5Acreate2(dataset, name,
-				datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
-#else
-    hid_t attribute = H5Acreate(dataset, name,
-				datatype, dataspace, H5P_DEFAULT);
-#endif
-    if (attribute < 0) 
-      throw std::runtime_error("Could not create");
+    hid_t err = H5Aread(attribute, dtype, value);
+    if (err < 0)
+      throw std::runtime_error("Could not read");
 
-    err = H5Awrite(attribute, datatype, value);
+    err = H5Tclose(dtype);
     if (err < 0) 
-      throw std::runtime_error("Could not write");
+      throw std::runtime_error("Could not close datatype for");
 
     err = H5Aclose(attribute);
     if (err < 0) 
       throw std::runtime_error("Could not close");
 
-    err = H5Tclose(datatype);
-    if (err < 0) 
-      throw std::runtime_error("Could not close datatype for");
-
     err = H5Dclose(dataset);
     if (err < 0) 
       throw std::runtime_error("Could not close dataset for");
 
-    err = H5Sclose(dataspace);
-    if (err < 0) 
-      throw std::runtime_error("Could not close dataspace for");
-
   } catch (std::exception& err) {
     std::ostringstream msg;
     msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
@@ -664,7 +559,7 @@ pylith::meshio::HDF5::writeAttribute(hid_t h5,
   } // try/catch
 
   PYLITH_METHOD_END;
-} // writeAttribute
+} // readAttribute
 
 // ----------------------------------------------------------------------
 // Read string attribute.
@@ -817,7 +712,7 @@ pylith::meshio::HDF5::createDataset(const char* parent,
     throw std::runtime_error(msg.str());
   } catch (...) {
     std::ostringstream msg;
-    msg << "Unknown  occurred while creating dataset '" << name << "'.";
+    msg << "Unknown error occurred while creating dataset '" << name << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch
 
@@ -835,7 +730,7 @@ pylith::meshio::HDF5::writeDatasetChunk(const char* parent,
 					const int ndims,
 					const int chunk,
 					hid_t datatype)
-{ // writeDatasetSlice
+{ // writeDatasetChunk
   PYLITH_METHOD_BEGIN;
 
   assert(parent);
@@ -928,13 +823,13 @@ pylith::meshio::HDF5::writeDatasetChunk(const char* parent,
     throw std::runtime_error(msg.str());
   } catch (...) {
     std::ostringstream msg;
-    msg << "Unknown  occurred while writing dataset '"
+    msg << "Unknown error occurred while writing dataset '"
 	<< parent << "/" << name << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch
 
   PYLITH_METHOD_END;
-} // writeDatasetSlice
+} // writeDatasetChunk
 
 // ----------------------------------------------------------------------
 // Read dataset slice.
@@ -946,7 +841,7 @@ pylith::meshio::HDF5::readDatasetChunk(const char* parent,
 				       int* const ndims,
 				       const int chunk,
 				       hid_t datatype)
-{ // readDatasetSlice
+{ // readDatasetChunk
   PYLITH_METHOD_BEGIN;
 
   assert(parent);
@@ -1045,13 +940,13 @@ pylith::meshio::HDF5::readDatasetChunk(const char* parent,
     throw std::runtime_error(msg.str());
   } catch (...) {
     std::ostringstream msg;
-    msg << "Unknown  occurred while reading dataset '"
+    msg << "Unknown error occurred while reading dataset '"
 	<< parent << "/" << name << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch
 
   PYLITH_METHOD_END;
-} // readDatasetSlice
+} // readDatasetChunk
 
 // ----------------------------------------------------------------------
 // Create dataset associated with data stored in a raw external binary
@@ -1138,7 +1033,7 @@ pylith::meshio::HDF5::createDatasetRawExternal(const char* parent,
     throw std::runtime_error(msg.str());
   } catch (...) {
     std::ostringstream msg;
-    msg << "Unknown  occurred while creating dataset '" << name << "'.";
+    msg << "Unknown error occurred while creating dataset '" << name << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch
 
@@ -1203,12 +1098,213 @@ pylith::meshio::HDF5::extendDatasetRawExternal(const char* parent,
     throw std::runtime_error(msg.str());
   } catch (...) {
     std::ostringstream msg;
-    msg << "Unknown  occurred while updating dataset '" << name << "'.";
+    msg << "Unknown error occurred while updating dataset '" << name << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch
 
   PYLITH_METHOD_END;
 } // extendDatasetRawExternal
 
+// ----------------------------------------------------------------------
+// Write dataset comprised of an array of strings (external HDF5 handle).
+void
+pylith::meshio::HDF5::writeDataset(const char* parent,
+				   const char* name,
+				   const char* const* sarray,
+				   const int nstrings)
+{ // writeDataset
+  PYLITH_METHOD_BEGIN;
+
+  HDF5::writeDataset(_file, parent, name, sarray, nstrings);
+
+  PYLITH_METHOD_END;
+} // writeDataset
+
+// ----------------------------------------------------------------------
+// Write dataset comprised of an array of strings (external HDF5 handle).
+void
+pylith::meshio::HDF5::writeDataset(hid_t h5,
+				   const char* parent,
+				   const char* name,
+				   const char* const* sarray,
+				   const int nstrings)
+{ // writeDataset
+  PYLITH_METHOD_BEGIN;
+
+  assert(h5);
+  assert(parent);
+  assert(name);
+  assert(sarray);
+  assert(nstrings > 0);
+
+  try {
+    // Open group
+#if defined(PYLITH_HDF5_USE_API_18)
+    hid_t group = H5Gopen2(h5, parent, H5P_DEFAULT);
+#else
+    hid_t group = H5Gopen(h5, parent);
+#endif
+    if (group < 0) 
+      throw std::runtime_error("Could not open group.");
+
+    // Create the dataspace
+    const int ndims = 1;
+    hsize_t dims[ndims] = { nstrings };
+    hid_t dataspace = H5Screate_simple(ndims, dims, NULL);
+    if (dataspace < 0)
+      throw std::runtime_error("Could not create dataspace.");
+      
+    hid_t datatype = H5Tcopy(H5T_C_S1);
+    if (datatype < 0) 
+      throw std::runtime_error("Could not create datatype.");
+    herr_t err = H5Tset_size(datatype, H5T_VARIABLE);
+    if (err < 0) 
+      throw std::runtime_error("Could not set size of datatype.");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+    hid_t dataset = H5Dcreate2(group, name, datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+#else
+    hid_t dataset = H5Dcreate(group, name, datatype, dataspace, H5P_DEFAULT);
+#endif
+    if (dataset < 0) 
+      throw std::runtime_error("Could not create dataset.");
+
+    err = H5Dwrite(dataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, sarray);
+    if (err < 0)
+      throw std::runtime_error("Could not write dataset.");
+
+    err = H5Dclose(dataset);
+    if (err < 0)
+      throw std::runtime_error("Could not close dataset.");
+
+    err = H5Tclose(datatype);
+    if (err < 0) 
+      throw std::runtime_error("Could not close datatype.");
+
+    err = H5Sclose(dataspace);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataspace.");
+
+    err = H5Gclose(group);
+    if (err < 0) 
+      throw std::runtime_error("Could not close group.");
+
+  } catch (const std::exception& err) {
+    std::ostringstream msg;
+    msg << "Error occurred while creating dataset '"
+	<< parent << "/" << name << "':\n"
+	<< err.what();
+    throw std::runtime_error(msg.str());
+  } catch (...) {
+    std::ostringstream msg;
+    msg << "Unknown error occurred while writing dataset '" << name << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+
+  PYLITH_METHOD_END;
+} // writeDataset
+
+
+// ----------------------------------------------------------------------
+// Read dataset comprised of an array of strings.
+pylith::string_vector
+pylith::meshio::HDF5::readDataset(const char* parent,
+				  const char* name)
+{ // readDataset
+  PYLITH_METHOD_BEGIN;
+
+  assert(_file);
+  assert(parent);
+  assert(name);
+
+  pylith::string_vector data;
+  char** ptrarray = 0;
+  try {
+    // Open group
+#if defined(PYLITH_HDF5_USE_API_18)
+    hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
+#else
+    hid_t group = H5Gopen(_file, parent);
+#endif
+    if (group < 0)
+      throw std::runtime_error("Could not open group.");
+    
+    // Open the dataset
+#if defined(PYLITH_HDF5_USE_API_18)
+    hid_t dataset = H5Dopen2(group, name, H5P_DEFAULT);
+#else
+    hid_t dataset = H5Dopen(group, name);
+#endif
+    if (dataset < 0)
+      throw std::runtime_error("Could not open dataset.");
+
+    // Get the dataspace
+    hid_t dataspace = H5Dget_space(dataset);
+    if (dataspace < 0)
+      throw std::runtime_error("Could not get dataspace.");
+    const int ndims = H5Sget_simple_extent_ndims(dataspace);
+    if (ndims != 1)
+      throw std::runtime_error("Expected 1 dimension for string dataset.");
+    hsize_t dims[1];
+    H5Sget_simple_extent_dims(dataspace, dims, NULL);
+    const int nstrings = dims[0];
+    if (nstrings <= 0)
+      throw std::runtime_error("Zero size for dataset.");
+    ptrarray = (nstrings > 0) ? new char*[nstrings] : 0;
+    
+    // Create the datatype
+    hid_t datatype = H5Tcopy(H5T_C_S1);
+    if (datatype < 0) 
+      throw std::runtime_error("Could not create datatype.");
+    herr_t err = H5Tset_size(datatype, H5T_VARIABLE);
+    if (err < 0) 
+      throw std::runtime_error("Could not set size of datatype.");
+
+    // Read the dataset
+    err = H5Dread(dataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, ptrarray);
+
+    data.resize(nstrings);
+    for (int i=0; i < nstrings; ++i) {
+      data[i] = ptrarray[i];
+    } // for
+    
+    err = H5Dvlen_reclaim(datatype, dataspace, H5P_DEFAULT, ptrarray);
+    delete[] ptrarray; ptrarray = 0;
+
+    err = H5Dclose(dataset);
+    if (err < 0)
+      throw std::runtime_error("Could not close dataset.");
+
+    err = H5Tclose(datatype);
+    if (err < 0) 
+      throw std::runtime_error("Could not close datatype.");
+
+    err = H5Sclose(dataspace);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataspace.");
+
+    err = H5Gclose(group);
+    if (err < 0) 
+      throw std::runtime_error("Could not close group.");
+
+  } catch (const std::exception& err) {
+    delete[] ptrarray; ptrarray = 0;
+
+    std::ostringstream msg;
+    msg << "Error occurred while creating dataset '"
+	<< parent << "/" << name << "':\n"
+	<< err.what();
+    throw std::runtime_error(msg.str());
+  } catch (...) {
+    delete[] ptrarray; ptrarray = 0;
+
+    std::ostringstream msg;
+    msg << "Unknown error occurred while writing dataset '" << name << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+
+  PYLITH_METHOD_RETURN(data);
+} // readDataset
+
 
 // End of file
diff --git a/libsrc/pylith/meshio/HDF5.hh b/libsrc/pylith/meshio/HDF5.hh
index d5d38f8..1d2a8b9 100644
--- a/libsrc/pylith/meshio/HDF5.hh
+++ b/libsrc/pylith/meshio/HDF5.hh
@@ -136,18 +136,6 @@ public :
 		      const void* value,
 		      hid_t datatype);
 
-  /** Read scalar attribute.
-   *
-   * @param parent Full path of parent dataset for attribute.
-   * @param name Name of attribute.
-   * @param datatype Datatype of scalar.
-   * @param value Attribute value.
-   */
-  void readAttribute(const char* parent,
-		     const char* name,
-		     void* value,
-		     hid_t datatype);
-
   /** Set string attribute.
    *
    * @param parent Full path of parent dataset for attribute.
@@ -172,11 +160,23 @@ public :
 		      const char* name,
 		      const char* value);
 
+  /** Read scalar attribute.
+   *
+   * @param parent Full path of parent dataset for attribute.
+   * @param name Name of attribute.
+   * @param datatype Datatype of scalar.
+   * @param value Attribute value.
+   */
+  void readAttribute(const char* parent,
+		     const char* name,
+		     void* value,
+		     hid_t datatype);
+
   /** Read string attribute.
    *
    * @param parent Full path of parent dataset for attribute.
    * @param name Name of attribute.
-   * @param value String value
+   * @returns value String value
    */
   std::string readAttribute(const char* parent,
 			    const char* name);
@@ -267,6 +267,43 @@ public :
 				const hsize_t* dims,
 				const int ndims);
   
+  /** Write dataset comprised of an array of strings.
+   *
+   * @param parent Full path of parent group for dataset.
+   * @param name Name of dataset.
+   * @param sarray Array of null terminated C strings.
+   * @param nstrings Size of array.
+   */
+  void writeDataset(const char* parent,
+		    const char* name,
+		    const char* const* sarray,
+		    const int nstrings);
+
+  /** Write dataset comprised of an array of strings (used with
+   * external handle to HDF5 file, such as PetscHDF5Viewer).
+   *
+   * @param h5 HDF5 file.
+   * @param parent Full path of parent group for dataset.
+   * @param name Name of dataset.
+   * @param sarray Array of null terminated C strings.
+   * @param nstrings Size of array.
+   */
+  static
+  void writeDataset(hid_t h5,
+		    const char* parent,
+		    const char* name,
+		    const char* const* sarray,
+		    const int nstrings);
+
+  /** Read dataset comprised of an array of strings.
+   *
+   * @param parent Full path of parent group for dataset.
+   * @param name Name of dataset.
+   * @returns Array of string.
+   */
+  pylith::string_vector readDataset(const char* parent,
+				    const char* name);
+
 // PRIVATE MEMBERS ------------------------------------------------------
 private :
 
diff --git a/libsrc/pylith/meshio/Xdmf.cc b/libsrc/pylith/meshio/Xdmf.cc
index 7f70868..081583a 100644
--- a/libsrc/pylith/meshio/Xdmf.cc
+++ b/libsrc/pylith/meshio/Xdmf.cc
@@ -90,8 +90,7 @@ pylith::meshio::Xdmf::write(const char* filenameXdmf,
   numCorners = dims[1];
   delete[] dims; dims = 0;
   int cellDim = 0;
-  h5.readAttribute("/topology/cells", "cell_dim", (void*)&cellDim, 
-		  H5T_NATIVE_INT);
+  h5.readAttribute("/topology/cells", "cell_dim", (void*)&cellDim, H5T_NATIVE_INT);
   if (0 == cellDim && 1 == numCorners)
     cellType = "Polyvertex";
   else if (1 == cellDim && 2 == numCorners)
@@ -286,8 +285,7 @@ pylith::meshio::Xdmf::_getFieldMetadata(std::vector<FieldMetadata>* metadata,
       h5.getDatasetDims(&dims, &ndims, parent, fieldNames[i].c_str());
       std::string fullName = 
 	std::string(parent) + std::string("/") + fieldNames[i];
-      const std::string& vectorFieldString = 
-	h5.readAttribute(fullName.c_str(), "vector_field_type");
+      const std::string& vectorFieldString = h5.readAttribute(fullName.c_str(), "vector_field_type");
       switch(topology::FieldBase::parseVectorFieldString(vectorFieldString.c_str())) {
       case topology::FieldBase::SCALAR :
 	(*metadata)[iOffset+i].vectorFieldType = "Scalar";
@@ -329,8 +327,7 @@ pylith::meshio::Xdmf::_getFieldMetadata(std::vector<FieldMetadata>* metadata,
       h5.getDatasetDims(&dims, &ndims, parent, fieldNames[i].c_str());
       std::string fullName =
 	std::string(parent) + std::string("/") + fieldNames[i];
-      const std::string& vectorFieldString = 
-	h5.readAttribute(fullName.c_str(), "vector_field_type");
+      const std::string& vectorFieldString = h5.readAttribute(fullName.c_str(), "vector_field_type");
       switch(topology::FieldBase::parseVectorFieldString(vectorFieldString.c_str())) {
       case topology::FieldBase::SCALAR :
 	(*metadata)[iOffset+i].vectorFieldType = "Scalar";
diff --git a/unittests/libtests/meshio/TestHDF5.cc b/unittests/libtests/meshio/TestHDF5.cc
index b3e5801..7a64548 100644
--- a/unittests/libtests/meshio/TestHDF5.cc
+++ b/unittests/libtests/meshio/TestHDF5.cc
@@ -240,31 +240,6 @@ pylith::meshio::TestHDF5::testAttributeScalar(void)
 } // testAttributeScalar
 
 // ----------------------------------------------------------------------
-// Test writeAttribute(string) and readAttribute(string).
-void
-pylith::meshio::TestHDF5::testAttributeString(void)
-{ // testAttributeString
-  PYLITH_METHOD_BEGIN;
-
-  HDF5 h5("test.h5", H5F_ACC_TRUNC);
-
-  const hsize_t ndims = 1;
-  const hsize_t dims[ndims] = { 2 };
-  h5.createDataset("/", "data", dims, dims, ndims, H5T_NATIVE_INT);
-
-  const std::string valueE = "abcd";
-  h5.writeAttribute("/data", "mystring", valueE.c_str());
-  h5.close();
-
-  h5.open("test.h5", H5F_ACC_RDONLY);
-  std::string value = h5.readAttribute("/data", "mystring");
-  CPPUNIT_ASSERT_EQUAL(valueE, value);
-  h5.close();
-
-  PYLITH_METHOD_END;
-} // testAttributeString
-
-// ----------------------------------------------------------------------
 // Test createDataset().
 void
 pylith::meshio::TestHDF5::testCreateDataset(void)
@@ -431,5 +406,60 @@ pylith::meshio::TestHDF5::testDatasetRawExternal(void)
   PYLITH_METHOD_END;
 } // testDatasetRawExternal
 
+// ----------------------------------------------------------------------
+// Test writeAttribute(string) and readAttribute(string).
+void
+pylith::meshio::TestHDF5::testAttributeString(void)
+{ // testAttributeString
+  PYLITH_METHOD_BEGIN;
+
+  HDF5 h5("test.h5", H5F_ACC_TRUNC);
+
+  const hsize_t ndims = 1;
+  const hsize_t dims[ndims] = { 2 };
+  h5.createDataset("/", "data", dims, dims, ndims, H5T_NATIVE_INT);
+
+  const std::string valueE = "abcd";
+  h5.writeAttribute("/data", "mystring", valueE.c_str());
+  h5.close();
+
+  h5.open("test.h5", H5F_ACC_RDONLY);
+  std::string value = h5.readAttribute("/data", "mystring");
+  CPPUNIT_ASSERT_EQUAL(valueE, value);
+  h5.close();
+
+  PYLITH_METHOD_END;
+} // testAttributeString
+
+// ----------------------------------------------------------------------
+// Test writeDataset(string) and readDataset(string).
+void
+pylith::meshio::TestHDF5::testDatasetString(void)
+{ // testDatasetString
+  PYLITH_METHOD_BEGIN;
+
+  HDF5 h5("test.h5", H5F_ACC_TRUNC);
+
+  const hsize_t ndims = 1;
+  const size_t nstrings = 3;
+  const hsize_t dims[ndims] = { nstrings };
+  const char* dataE[nstrings] = {"abc", "defg", "hijkl" };
+
+  HDF5::writeDataset(h5._file, "/", "data", dataE, nstrings);
+  h5.close();
+
+  h5.open("test.h5", H5F_ACC_RDONLY);
+  const pylith::string_vector& data = h5.readDataset("/", "data");
+  h5.close();
+
+  CPPUNIT_ASSERT_EQUAL(nstrings, data.size());
+  for (int i=0; i < nstrings; ++i) {
+    const std::string& stringE = dataE[i];
+    CPPUNIT_ASSERT_EQUAL(stringE, data[i]);
+  } // for
+
+  PYLITH_METHOD_END;
+} // testDatasetString
+
 
 // End of file 
diff --git a/unittests/libtests/meshio/TestHDF5.hh b/unittests/libtests/meshio/TestHDF5.hh
index 8fb6331..a00c21e 100644
--- a/unittests/libtests/meshio/TestHDF5.hh
+++ b/unittests/libtests/meshio/TestHDF5.hh
@@ -51,11 +51,13 @@ class pylith::meshio::TestHDF5 : public CppUnit::TestFixture
   CPPUNIT_TEST( testGetGroupDatasets );
   CPPUNIT_TEST( testCreateGroup );
   CPPUNIT_TEST( testAttributeScalar );
-  CPPUNIT_TEST( testAttributeString );
   CPPUNIT_TEST( testCreateDataset );
   CPPUNIT_TEST( testDatasetChunk );
   CPPUNIT_TEST( testDatasetRawExternal );
 
+  CPPUNIT_TEST( testAttributeString );
+  CPPUNIT_TEST( testDatasetString );
+
   CPPUNIT_TEST_SUITE_END();
 
   // PUBLIC METHODS /////////////////////////////////////////////////////
@@ -85,9 +87,6 @@ public :
   /// Test writeAttribute(scalar) and readAttribute(scalar).
   void testAttributeScalar(void);
 
-  /// Test writeAttribute(string) and readAttribute(string).
-  void testAttributeString(void);
-
   /// Test createDataset().
   void testCreateDataset(void);
 
@@ -97,6 +96,12 @@ public :
   /// Test createDatasetRawExternal() and updateDatasetRawExternal().
   void testDatasetRawExternal(void);
 
+  /// Test writeAttribute(string) and readAttribute(string).
+  void testAttributeString(void);
+
+  /// Test writeDataset(string) and readDataset(string).
+  void testDatasetString(void);
+
 }; // class TestHDF5
 
 #endif // pylith_meshio_testhdf5_hh



More information about the CIG-COMMITS mailing list