[cig-commits] r17291 - in short/3D/PyLith/trunk: libsrc/meshio unittests/libtests/meshio

brad at geodynamics.org brad at geodynamics.org
Sun Oct 17 17:58:09 PDT 2010


Author: brad
Date: 2010-10-17 17:58:09 -0700 (Sun, 17 Oct 2010)
New Revision: 17291

Added:
   short/3D/PyLith/trunk/unittests/libtests/meshio/TestHDF5.cc
   short/3D/PyLith/trunk/unittests/libtests/meshio/TestHDF5.hh
Modified:
   short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc
   short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh
   short/3D/PyLith/trunk/libsrc/meshio/meshiofwd.hh
   short/3D/PyLith/trunk/unittests/libtests/meshio/Makefile.am
Log:
Started work on C++ unit tests for HDF5 object.

Modified: short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc	2010-10-17 03:29:17 UTC (rev 17290)
+++ short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc	2010-10-18 00:58:09 UTC (rev 17291)
@@ -23,106 +23,311 @@
 #include <cassert> // USES assert()
 
 // ----------------------------------------------------------------------
-// Constructor
+// Default constructor.
+pylith::meshio::HDF5::HDF5(void) :
+  _file(-1)
+{ // constructor
+} // constructor
+
+// ----------------------------------------------------------------------
+// Constructor with filename and mode.
 pylith::meshio::HDF5::HDF5(const char* filename,
-			   hid_t mode)
+			   hid_t mode,
+			   const bool create)
 { // constructor
-  _file = H5Fopen(filename, mode, H5P_DEFAULT);
-  if (_file < 0) {
-    std::ostringstream msg;
-    msg << "Could not open HDF5 file '" << filename << "'.";
-    throw std::runtime_error(msg.str());
-  } // if
+  if (create) {
+    _file = H5Fcreate(filename, mode, H5P_DEFAULT, H5P_DEFAULT);
+    if (_file < 0) {
+      std::ostringstream msg;
+      msg << "Could not create HDF5 file '" << filename << "'.";
+      throw std::runtime_error(msg.str());
+    } // if
+    
+  } else {
+    _file = H5Fopen(filename, mode, H5P_DEFAULT);
+    if (_file < 0) {
+      std::ostringstream msg;
+      msg << "Could not open existing HDF5 file '" << filename << "'.";
+      throw std::runtime_error(msg.str());
+    } // if
+  } // if/else
 } // constructor
 
 // ----------------------------------------------------------------------
 // Destructor
 pylith::meshio::HDF5::~HDF5(void)
 { // destructor
-  H5Fclose(_file);
+  close();
 } // destructor
 
 // ----------------------------------------------------------------------
+// Open HDF5 file.
+void
+pylith::meshio::HDF5::open(const char* filename,
+			   hid_t mode,
+			   const bool create)
+{ // open
+  if (_file >= 0) {
+    throw std::runtime_error("HDF5 file already open.");
+  } // if
+
+  if (create) {
+    _file = H5Fcreate(filename, mode, H5P_DEFAULT, H5P_DEFAULT);
+    if (_file < 0) {
+      std::ostringstream msg;
+      msg << "Could not create HDF5 file '" << filename << "'.";
+      throw std::runtime_error(msg.str());
+    } // if
+    
+  } else {
+    _file = H5Fopen(filename, mode, H5P_DEFAULT);
+    if (_file < 0) {
+      std::ostringstream msg;
+      msg << "Could not open existing HDF5 file '" << filename << "'.";
+      throw std::runtime_error(msg.str());
+    } // if
+  } // if/else
+} // constructor
+
+// ----------------------------------------------------------------------
+// Close HDF5 file.
+void
+pylith::meshio::HDF5::close(void)
+{ // close
+  if (_file >= 0) {
+    herr_t err = H5Fclose(_file);
+    if (err < 0) 
+      throw std::runtime_error("Could not close HDF5 file.");
+  } // if
+  _file = -1;
+} // close
+
+// ----------------------------------------------------------------------
 // Create group.
-hid_t
+void
 pylith::meshio::HDF5::createGroup(const char* name)
 { // createGroup
   hid_t group = H5Gcreate(_file, name, 0, H5P_DEFAULT, H5P_DEFAULT);
   if (group < 0) {
     std::ostringstream msg;
-    msg << "Coule not create group '" << name << "'.";
+    msg << "Could not create group '" << name << "'.";
     throw std::runtime_error(msg.str());
   } // if
 
-  return group;
+  herr_t err = H5Gclose(group);
+  if (err < 0) {
+    std::ostringstream msg;
+    msg << "Could not close group '" << name << "'.";
+    throw std::runtime_error(msg.str());
+  } // if
 } // createGroup
 
 // ----------------------------------------------------------------------
 // Write scalar attribute.
 void
-pylith::meshio::HDF5::writeAttribute(hid_t parent,
+pylith::meshio::HDF5::writeAttribute(const char* parent,
 				     const char* name,
 				     const void* value,
 				     hid_t datatype)
 { // writeAttribute
   try {
     hid_t dataspace = H5Screate(H5S_SCALAR);
-    if (dataspace < 0) {
+    if (dataspace < 0)
       throw std::runtime_error("Could not create dataspace for");
-    } // if
-    hid_t attribute = H5Acreate(parent, name,
+
+    hid_t dataset = H5Dopen(_file, parent, H5P_DEFAULT);
+    if (dataset < 0)
+      throw std::runtime_error("Could not open parent dataset for");
+
+    hid_t attribute = H5Acreate(dataset, name,
 				datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
-    if (attribute < 0) {
+    if (attribute < 0)
       throw std::runtime_error("Could not create");
-    } // if
+
     hid_t err = H5Awrite(attribute, datatype, value);
-    if (err < 0) {
+    if (err < 0)
       throw std::runtime_error("Could not write");
-    } // if
+
     err = H5Aclose(attribute);
-    if (err < 0) {
+    if (err < 0) 
       throw std::runtime_error("Could not close");
-    } // if
+
+    err = H5Dclose(dataset);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataset for");
+
+    err = H5Sclose(dataspace);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataspace for");
+
   } catch (std::exception& err) {
     std::ostringstream msg;
-    msg << err.what() << " attribute '" << name << "'.";
+    msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch
 } // writeAttribute
 
 // ----------------------------------------------------------------------
+// Read scalar attribute.
+void
+pylith::meshio::HDF5::readAttribute(const char* parent,
+				    const char* name,
+				    void* value,
+				    hid_t datatype)
+{ // readAttribute
+  try {
+    hid_t dataset = H5Dopen(_file, parent, H5P_DEFAULT);
+    if (dataset < 0)
+      throw std::runtime_error("Could not open parent dataset for");
+
+    hid_t attribute = H5Aopen(dataset, name, H5P_DEFAULT);
+    if (attribute < 0)
+      throw std::runtime_error("Could not open");
+
+    hid_t dtype = H5Aget_type(attribute);
+    if (dtype < 0)
+      throw std::runtime_error("Could not get datatype of");
+
+    if (H5Tequal(dtype, datatype) <= 0)
+      throw std::runtime_error("Wrong datatype specified for");
+
+    hid_t err = H5Aread(attribute, dtype, value);
+    if (err < 0)
+      throw std::runtime_error("Could not read");
+
+    err = H5Tclose(dtype);
+    if (err < 0) 
+      throw std::runtime_error("Could not close datatype for");
+
+    err = H5Aclose(attribute);
+    if (err < 0) 
+      throw std::runtime_error("Could not close");
+
+    err = H5Dclose(dataset);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataset for");
+
+  } catch (std::exception& err) {
+    std::ostringstream msg;
+    msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+} // readAttribute
+
+// ----------------------------------------------------------------------
 // Write string attribute.
 void
-pylith::meshio::HDF5::writeAttribute(hid_t parent,
+pylith::meshio::HDF5::writeAttribute(const char* parent,
 				     const char* name,
 				     const char* value)
 { // writeAttribute
   try {
     hid_t dataspace = H5Screate(H5S_SCALAR);
-    if (dataspace < 0) {
+    if (dataspace < 0) 
       throw std::runtime_error("Could not create dataspace for");
-    } // if
-    hid_t attribute = H5Acreate(parent, name,
-				H5T_C_S1, dataspace, H5P_DEFAULT, H5P_DEFAULT);
-    if (attribute < 0) {
+
+    hid_t dataset = H5Dopen(_file, parent, H5P_DEFAULT);
+    if (dataset < 0) 
+      throw std::runtime_error("Could not open parent dataset for");
+
+    hid_t datatype = H5Tcopy(H5T_C_S1);
+    if (datatype < 0) 
+      throw std::runtime_error("Could not create datatype for");
+
+    herr_t err = H5Tset_size(datatype, strlen(value)+1);
+    if (err < 0) 
+      throw std::runtime_error("Could not set size of");
+
+    hid_t attribute = H5Acreate(dataset, name,
+				datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+    if (attribute < 0) 
       throw std::runtime_error("Could not create");
-    } // if
-    hid_t err = H5Awrite(attribute, H5T_C_S1, value);
-    if (err < 0) {
+
+    err = H5Awrite(attribute, datatype, value);
+    if (err < 0) 
       throw std::runtime_error("Could not write");
-    } // if
+
     err = H5Aclose(attribute);
-    if (err < 0) {
+    if (err < 0) 
       throw std::runtime_error("Could not close");
-    } // if
+
+    err = H5Tclose(datatype);
+    if (err < 0) 
+      throw std::runtime_error("Could not close datatype for");
+
+    err = H5Dclose(dataset);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataset for");
+
+    err = H5Sclose(dataspace);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataspace for");
+
   } catch (std::exception& err) {
     std::ostringstream msg;
-    msg << err.what() << " attribute '" << name << "'.";
+    msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch
 } // writeAttribute
 
 // ----------------------------------------------------------------------
+// Read string attribute.
+const char*
+pylith::meshio::HDF5::readAttribute(const char* parent,
+				    const char* name)
+{ // readAttribute
+  std::string value;
+
+  try {
+    hid_t dataset = H5Dopen(_file, parent, H5P_DEFAULT);
+    if (dataset < 0)
+      throw std::runtime_error("Could not open parent dataset for");
+
+    hid_t attribute = H5Aopen(dataset, name, H5P_DEFAULT);
+    if (attribute < 0)
+      throw std::runtime_error("Could not open");
+
+    hid_t datatype = H5Aget_type(attribute);
+    if (datatype < 0)
+      throw std::runtime_error("Could not get datatype of");
+
+    // :TODO: Check that datatype is a string
+
+    const int len = H5Tget_size(datatype);
+    if (len <= 0)
+      throw std::runtime_error("Nonpositive size for datatype of");
+
+    char* buffer = (len > 0) ? new char[len] : 0;
+
+    hid_t err = H5Aread(attribute, datatype, (void*)buffer);
+    value = buffer;
+    delete[] buffer; buffer = 0;
+
+    if (err < 0)
+      throw std::runtime_error("Could not read");
+
+    err = H5Tclose(datatype);
+    if (err < 0) 
+      throw std::runtime_error("Could not close datatype for");
+
+    err = H5Aclose(attribute);
+    if (err < 0) 
+      throw std::runtime_error("Could not close");
+
+    err = H5Dclose(dataset);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataset for");
+
+  } catch (std::exception& err) {
+    std::ostringstream msg;
+    msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+
+  return value.c_str();
+} // readAttribute
+
+// ----------------------------------------------------------------------
 // Create dataset.
 void
 pylith::meshio::HDF5::createDataset(const char* parent,
@@ -132,6 +337,11 @@
 				    hid_t datatype)
 { // createDataset
   try {
+    // Open group
+    hid_t group = H5Gopen(_file, parent, H5P_DEFAULT);
+    if (group < 0) 
+      throw std::runtime_error("Could not open group.");
+
     // Create the dataspace
     hid_t dataspace = H5Screate_simple(ndims, dims, 0);
     if (dataspace < 0)
@@ -142,24 +352,35 @@
     if (property < 0)
       throw std::runtime_error("Could not create property for dataset.");
 
-    H5Pset_chunk(property, ndims, dims);
+    herr_t err = H5Pset_chunk(property, ndims, dims);
+    if (err < 0)
+      throw std::runtime_error("Could not set chunk.");
+
+    // Set gzip compression level for chunk.
     //H5Pset_deflate(property, 6);
 
-    std::string fullname = 
-      std::string(parent) + std::string("/") + std::string(name);
-#if defined(OLD_H5INTERFACE)
-    hid_t dataset = H5Dcreate(_file, fullname.c_str(),
-			      datatype, dataspace, property);
-#else
-    hid_t dataset = H5Dcreate(_file, fullname.c_str(),
+    hid_t dataset = H5Dcreate(group, name,
 			      datatype, dataspace, H5P_DEFAULT,
 			      property, H5P_DEFAULT);
-#endif
-    if (dataset < 0)
+    if (dataset < 0) 
       throw std::runtime_error("Could not create dataset.");
-    H5Pclose(property);
-    H5Sclose(dataspace);
-    H5Dclose(dataset);
+
+    err = H5Dclose(dataset);
+    if (err < 0)
+      throw std::runtime_error("Could not close dataset.");
+
+    err = H5Pclose(property);
+    if (err < 0) 
+      throw std::runtime_error("Could not close property.");
+
+    err = H5Sclose(dataspace);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataspace.");
+
+    err = H5Gclose(group);
+    if (err < 0) 
+      throw std::runtime_error("Could not close group.");
+
   } catch (const std::exception& err) {
     std::ostringstream msg;
     msg << "Error occurred while creating dataset '"
@@ -168,8 +389,7 @@
     throw std::runtime_error(msg.str());
   } catch (...) {
     std::ostringstream msg;
-    msg << "Unknown  occurred while creating dataset '"
-	<< parent << "/" << name << "'.";
+    msg << "Unknown  occurred while creating dataset '" << name << "'.";
     throw std::runtime_error(msg.str());
   } // try/catch
 } // createDataset
@@ -240,6 +460,15 @@
 		   H5P_DEFAULT, data);
     if (err < 0)
       throw std::runtime_error("Could not write data.");
+
+    err = H5Sclose(chunkspace);
+    if (err < 0)
+      throw std::runtime_error("Could not close chunk dataspace.");
+
+    err = H5Sclose(dataspace);
+    if (err < 0)
+      throw std::runtime_error("Could not close dataspace.");
+
     err = H5Dclose(dataset);
     if (err < 0)
       throw std::runtime_error("Could not close dataset.");
@@ -247,6 +476,7 @@
     err = H5Gclose(group);
     if (err < 0)
       throw std::runtime_error("Could not close group.");
+
   } catch (const std::exception& err) {
     std::ostringstream msg;
     msg << "Error occurred while writing dataset '"

Modified: short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh	2010-10-17 03:29:17 UTC (rev 17290)
+++ short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh	2010-10-18 00:58:09 UTC (rev 17291)
@@ -19,67 +19,105 @@
 #if !defined(pylith_meshio_hdf5_hh)
 #define pylith_meshio_hdf5_hh
 
-namespace pylith {
-  namespace meshio {
-    class HDF5;
-  } // meshio
-} // pylith
+// Include directives ---------------------------------------------------
+#include "meshiofwd.hh" // forward declarations
 
 extern "C" {
-#include "hdf5.h" // USES hdf5
+#include "hdf5.h" // USES hid_t
 }
 
+// HDF5 -----------------------------------------------------------------
+/// High-level interface for HDF5 operations.
 class pylith::meshio::HDF5
 { // HDF5
-  
+  friend class TestHDF5; // Unit testing
+
 // PUBLIC METHODS -------------------------------------------------------
 public :
 
-  /** Constructor.
+  /// Default constructor.
+  HDF5(void);
+
+  /** Constructor with filename and mode.
    *
    * @param filename Name of HDF5 file
    * @param mode Mode for HDF5 file
+   * @param create If true, create HDF5 file.
    */
   HDF5(const char* filename,
-       hid_t mode);
+       hid_t mode,
+       const bool create =false);
 
   /// Destructor
   ~HDF5(void);
 
+  /** Open HDF5 file.
+   *
+   * @param filename Name of HDF5 file
+   * @param mode Mode for HDF5 file
+   * @param create If true, create HDF5 file.
+   */
+  void open(const char* filename,
+	    hid_t mode,
+	    const bool create =false);
+
+  /// Close HDF5 file.
+  void close(void);
+
   /** Create group.
    *
    * Create group and leave group open for further operations.
    *
    * @param name Name of group (with absolute path).
-   * @returns HDF5 group
    */
-  hid_t createGroup(const char* name);
+  void createGroup(const char* name);
 
   /** Set scalar attribute.
    *
-   * @param parent Parent of attribute.
+   * @param parent Full path of parent dataset for attribute.
    * @param name Name of attribute.
    * @param value Attribute value.
    * @param datatype Datatype of scalar.
    */
-  void writeAttribute(hid_t parent,
+  void writeAttribute(const char* parent,
 		      const char* name,
 		      const void* value,
 		      hid_t datatype);
 
+  /** Read scalar attribute.
+   *
+   * @param parent Full path of parent dataset for attribute.
+   * @param name Name of attribute.
+   * @param datatype Datatype of scalar.
+   * @param value Attribute value.
+   */
+  void readAttribute(const char* parent,
+		     const char* name,
+		     void* value,
+		     hid_t datatype);
+
   /** Set string attribute.
    *
-   * @param parent Parent of attribute.
+   * @param parent Full path of parent dataset for attribute.
    * @param name Name of attribute.
    * @param value String value
    */
-  void writeAttribute(hid_t parent,
+  void writeAttribute(const char* parent,
 		      const char* name,
 		      const char* value);
 
+  /** Read string attribute.
+   *
+   * @param parent Full path of parent dataset for attribute.
+   * @param name Name of attribute.
+   * @param value String value
+   */
+  const char* readAttribute(const char* parent,
+			    const char* name);
+
   /** Create dataset.
    *
-   * @param parent Full path for parent of dataset.
+   * @param parent Full path of parent group for dataset.
    * @param name Name of dataset.
    * @param dims Dimensions of data.
    * @param ndims Number of dimensions of data.
@@ -94,12 +132,13 @@
   /** Create dataset associated with data stored in a raw external
    * binary file.
    *
-   * @param parent Parent of dataset.
+   * @param parent Full path of parent group for dataset.
    * @param name Name of dataset.
    * @param filename Name of external raw data file.
    * @param dims Dimensions of data.
    * @param ndims Number of dimensions of data.
    * @param datatype Type of data.
+   * @returns Dataset identifier.
    */
   void createDatasetRawExternal(const char* parent,
 				const char* name,
@@ -107,10 +146,10 @@
 				const hsize_t* dims,
 				const hsize_t ndims,
 				hid_t datatype);
-
+  
   /** Append slice to dataset.
    *
-   * @param parent Parent of dataset.
+   * @param parent Full path of parent group for dataset.
    * @param name Name of dataset.
    * @param data Data.
    * @param dims Dimensions of data.

Modified: short/3D/PyLith/trunk/libsrc/meshio/meshiofwd.hh
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/meshiofwd.hh	2010-10-17 03:29:17 UTC (rev 17290)
+++ short/3D/PyLith/trunk/libsrc/meshio/meshiofwd.hh	2010-10-18 00:58:09 UTC (rev 17291)
@@ -58,6 +58,8 @@
 
     class UCDFaultFile;
 
+    class HDF5;
+
   } // meshio
 } // pylith
 

Modified: short/3D/PyLith/trunk/unittests/libtests/meshio/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/unittests/libtests/meshio/Makefile.am	2010-10-17 03:29:17 UTC (rev 17290)
+++ short/3D/PyLith/trunk/unittests/libtests/meshio/Makefile.am	2010-10-18 00:58:09 UTC (rev 17291)
@@ -215,6 +215,7 @@
 
 if ENABLE_HDF5
   testmeshio_SOURCES += \
+	TestHDF5.cc \
 	TestDataWriterHDF5.cc \
 	TestDataWriterHDF5Mesh.cc \
 	TestDataWriterHDF5MeshHex8.cc

Added: short/3D/PyLith/trunk/unittests/libtests/meshio/TestHDF5.cc
===================================================================
--- short/3D/PyLith/trunk/unittests/libtests/meshio/TestHDF5.cc	                        (rev 0)
+++ short/3D/PyLith/trunk/unittests/libtests/meshio/TestHDF5.cc	2010-10-18 00:58:09 UTC (rev 17291)
@@ -0,0 +1,155 @@
+// -*- C++ -*-
+//
+// ----------------------------------------------------------------------
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ----------------------------------------------------------------------
+//
+
+#include <portinfo>
+
+#include "TestHDF5.hh" // Implementation of class methods
+
+#include "pylith/meshio/HDF5.hh" // USES HDF5
+
+// ----------------------------------------------------------------------
+CPPUNIT_TEST_SUITE_REGISTRATION( pylith::meshio::TestHDF5 );
+
+// ----------------------------------------------------------------------
+// Test constructor.
+void
+pylith::meshio::TestHDF5::testConstructor(void)
+{ // testConstructor
+  HDF5 one;
+  CPPUNIT_ASSERT(-1 == one._file);
+
+  HDF5 two("test.h5", H5F_ACC_TRUNC, true);
+  CPPUNIT_ASSERT(two._file >= 0);
+  two.close();
+
+  HDF5 three("test.h5", H5F_ACC_RDONLY);
+  CPPUNIT_ASSERT(three._file >= 0);
+} // testConstructor
+
+// ----------------------------------------------------------------------
+// Test open() and close().
+void
+pylith::meshio::TestHDF5::testOpenClose(void)
+{ // testOpenClose
+  HDF5 h5;
+  CPPUNIT_ASSERT(-1 == h5._file);
+
+  h5.open("test.h5", H5F_ACC_TRUNC, true);
+  CPPUNIT_ASSERT(h5._file >= 0);
+  
+  h5.close();
+  CPPUNIT_ASSERT(-1 == h5._file);
+
+  h5.open("test.h5", H5F_ACC_RDONLY);
+  CPPUNIT_ASSERT(h5._file >= 0);
+} // testOpenClose
+
+// ----------------------------------------------------------------------
+// Test createGroup()
+void
+pylith::meshio::TestHDF5::testCreateGroup(void)
+{ // testCreateGroup
+  HDF5 h5("test.h5", H5F_ACC_TRUNC, true);
+
+  h5.createGroup("/mygroup");
+  h5.close();
+
+  h5.open("test.h5", H5F_ACC_RDONLY);
+  hid_t group = H5Gopen(h5._file, "/mygroup", H5P_DEFAULT);
+  CPPUNIT_ASSERT(group >= 0);
+  herr_t err = H5Gclose(group);
+  CPPUNIT_ASSERT(err >= 0);
+} // testCreateGroup
+
+// ----------------------------------------------------------------------
+// Test writeAttribute(scalar) and readAttribute(scalar).
+void
+pylith::meshio::TestHDF5::testAttributeScalar(void)
+{ // testAttributeScalar
+  HDF5 h5("test.h5", H5F_ACC_TRUNC, true);
+
+  const hsize_t ndims = 1;
+  const hsize_t dims[ndims] = { 2 };
+  h5.createDataset("/", "data", dims, ndims, H5T_NATIVE_INT);
+
+  const double scalarE = 2.5;
+  h5.writeAttribute("/data", "myscalar", (void*)&scalarE, H5T_NATIVE_DOUBLE);
+  h5.close();
+
+  const double tolerance = 1.0e-06;
+  h5.open("test.h5", H5F_ACC_RDONLY);
+  double scalar = 0;
+  h5.readAttribute("/data", "myscalar", (void*)&scalar, H5T_NATIVE_DOUBLE);
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(scalarE, scalar, tolerance);
+} // testAttributeScalar
+
+// ----------------------------------------------------------------------
+// Test writeAttribute(string) and readAttribute(string).
+void
+pylith::meshio::TestHDF5::testAttributeString(void)
+{ // testAttributeString
+  HDF5 h5("test.h5", H5F_ACC_TRUNC, true);
+
+  const hsize_t ndims = 1;
+  const hsize_t dims[ndims] = { 2 };
+  h5.createDataset("/", "data", dims, ndims, H5T_NATIVE_INT);
+
+  const std::string valueE = "abcd";
+  h5.writeAttribute("/data", "mystring", valueE.c_str());
+  h5.close();
+
+  h5.open("test.h5", H5F_ACC_RDONLY);
+  std::string value = h5.readAttribute("/data", "mystring");
+  CPPUNIT_ASSERT_EQUAL(valueE, value);
+} // testAttributeString
+
+// ----------------------------------------------------------------------
+// Test createDataset().
+void
+pylith::meshio::TestHDF5::testCreateDataset(void)
+{ // testCreateDataset
+  HDF5 h5("test.h5", H5F_ACC_TRUNC, true);
+
+  const hsize_t ndims = 1;
+  const hsize_t dims[ndims] = { 2 };
+  h5.createDataset("/", "data", dims, ndims, H5T_NATIVE_INT);
+  h5.close();
+
+  h5.open("test.h5", H5F_ACC_RDONLY);
+  hid_t group = H5Gopen(h5._file, "/", H5P_DEFAULT);
+  CPPUNIT_ASSERT(group >= 0);
+  hid_t dataset = H5Dopen(group, "data", H5P_DEFAULT);
+  CPPUNIT_ASSERT(dataset >= 0);
+} // testCreateDataset
+
+// ----------------------------------------------------------------------
+// Test createDatasetRawExternal.
+void
+pylith::meshio::TestHDF5::testCreateDatasetRawExternal(void)
+{ // testCreateDatasetRawExternal
+} // testCreateDatasetRawExternal
+
+// ----------------------------------------------------------------------
+// Test writeDatasetSlice.
+void
+pylith::meshio::TestHDF5::testWriteDatasetSlice(void)
+{ // testWriteDatasetSlice
+} // testWriteDatasetSlice
+
+
+// End of file 

Added: short/3D/PyLith/trunk/unittests/libtests/meshio/TestHDF5.hh
===================================================================
--- short/3D/PyLith/trunk/unittests/libtests/meshio/TestHDF5.hh	                        (rev 0)
+++ short/3D/PyLith/trunk/unittests/libtests/meshio/TestHDF5.hh	2010-10-18 00:58:09 UTC (rev 17291)
@@ -0,0 +1,88 @@
+// -*- C++ -*-
+//
+// ----------------------------------------------------------------------
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ----------------------------------------------------------------------
+//
+
+/**
+ * @file unittests/libtests/meshio/TestHDF5.hh
+ *
+ * @brief C++ TestHDF5 object
+ *
+ * C++ unit testing for HDF5.
+ */
+
+#if !defined(pylith_meshio_testhdf5_hh)
+#define pylith_meshio_testhdf5_hh
+
+#include <cppunit/extensions/HelperMacros.h>
+
+/// Namespace for pylith package
+namespace pylith {
+  namespace meshio {
+    class TestHDF5;
+  } // meshio
+} // pylith
+
+/// C++ unit testing for HDF5
+class pylith::meshio::TestHDF5 : public CppUnit::TestFixture
+{ // class TestHDF5
+
+  // CPPUNIT TEST SUITE /////////////////////////////////////////////////
+  CPPUNIT_TEST_SUITE( TestHDF5 );
+
+  CPPUNIT_TEST( testConstructor );
+  CPPUNIT_TEST( testOpenClose );
+  CPPUNIT_TEST( testCreateGroup );
+  CPPUNIT_TEST( testAttributeScalar );
+  CPPUNIT_TEST( testAttributeString );
+  CPPUNIT_TEST( testCreateDataset );
+  //CPPUNIT_TEST( testCreateDatasetRawExternal );
+  CPPUNIT_TEST( testWriteDatasetSlice );
+
+  CPPUNIT_TEST_SUITE_END();
+
+  // PUBLIC METHODS /////////////////////////////////////////////////////
+public :
+
+  /// Test constructor.
+  void testConstructor(void);
+
+  /// Test open() and close().
+  void testOpenClose(void);
+
+  /// Test createGroup()
+  void testCreateGroup(void);
+
+  /// Test writeAttribute(scalar) and readAttribute(scalar).
+  void testAttributeScalar(void);
+
+  /// Test writeAttribute(string) and readAttribute(string).
+  void testAttributeString(void);
+
+  /// Test createDataset().
+  void testCreateDataset(void);
+
+  /// Test createDatasetRawExternal().
+  void testCreateDatasetRawExternal(void);
+
+  /// Test writeDatasetSlice().
+  void testWriteDatasetSlice(void);
+
+}; // class TestHDF5
+
+#endif // pylith_meshio_testhdf5_hh
+
+// End of file 



More information about the CIG-COMMITS mailing list