[cig-commits] r18487 - in short/3D/PyLith/trunk: . libsrc/pylith/meshio unittests/libtests/meshio/data

brad at geodynamics.org brad at geodynamics.org
Sat May 28 20:15:18 PDT 2011


Author: brad
Date: 2011-05-28 20:15:17 -0700 (Sat, 28 May 2011)
New Revision: 18487

Modified:
   short/3D/PyLith/trunk/TODO
   short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc
   short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5Ext.cc
   short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc
   short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh
   short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc
   short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh
   short/3D/PyLith/trunk/unittests/libtests/meshio/data/tri3_vertex.h5
Log:
More work on Xdmf.

Modified: short/3D/PyLith/trunk/TODO
===================================================================
--- short/3D/PyLith/trunk/TODO	2011-05-29 03:13:35 UTC (rev 18486)
+++ short/3D/PyLith/trunk/TODO	2011-05-29 03:15:17 UTC (rev 18487)
@@ -40,6 +40,8 @@
     Xdmf
 
   (2) Add creation of .xmf metadata file for ParaView/Visit.
+    Add cell_dim attribute to cells
+    Add cell_type attribute to cells
 
 * Field split.
 

Modified: short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc	2011-05-29 03:13:35 UTC (rev 18486)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc	2011-05-29 03:15:17 UTC (rev 18487)
@@ -18,6 +18,9 @@
 
 #include <portinfo>
 
+#include "HDF5.hh" // USES HDF5
+#include "Xdmf.hh" // USES Xdmf
+
 #include <cassert> // USES assert()
 #include <sstream> // USES std::ostringstream
 #include <stdexcept> // USES std::runtime_error
@@ -195,6 +198,15 @@
     err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
     err = VecDestroy(&elemVec); CHECK_PETSC_ERROR(err);
     delete[] tmpVertices; tmpVertices = 0;
+
+    if (!rank) {
+      hid_t h5 = -1;
+      err = PetscViewerHDF5GetFileId(_viewer, &h5); CHECK_PETSC_ERROR(err);
+      assert(h5 >= 0);
+      const int cellDim = mesh.dimension();
+      HDF5::writeAttribute(h5, "/topology/cells", "cell_dim", (void*)&cellDim,
+			   H5T_NATIVE_INT);
+    } // if
   } catch (const std::exception& err) {
     std::ostringstream msg;
     msg << "Error while opening HDF5 file "
@@ -206,7 +218,7 @@
 	<< _filename << ".\n";
     throw std::runtime_error(msg.str());
   } // try/catch
-} // openTimeStep
+} // open
 
 // ----------------------------------------------------------------------
 // Close output files.
@@ -221,12 +233,10 @@
   _timesteps.clear();
   _tstampIndex = 0;
 
-#if 0
   Xdmf metafile;
   const int indexExt = _filename.find(".h5");
   std::string xdmfFilename = std::string(_filename, 0, indexExt) + ".xmf";
-  metadata.write(xdmfFilename, _hdfFilename());
-#endif
+  metafile.write(xdmfFilename.c_str(), _hdf5Filename().c_str());
 } // close
 
 // ----------------------------------------------------------------------
@@ -278,8 +288,9 @@
       _timesteps[field.label()] += 1;
     const int istep = _timesteps[field.label()];
     // Add time stamp to "/vertex_fields/time" if necessary.
+    const int rank = sieveMesh->commRank();
     if (_tstampIndex == istep)
-      _writeTimeStamp(t, "/vertex_fields", sieveMesh->commRank());
+      _writeTimeStamp(t, "/vertex_fields", rank);
 
 #if 0 // debugging
     field.view("writeVertexField");
@@ -297,6 +308,15 @@
     err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
     err = VecSetBlockSize(vector, blockSize); CHECK_PETSC_ERROR(err);
 
+    if (!rank && 0 == istep) {
+      hid_t h5 = -1;
+      err = PetscViewerHDF5GetFileId(_viewer, &h5); CHECK_PETSC_ERROR(err);
+      assert(h5 >= 0);
+      const int vectorFieldType = field.vectorFieldType();
+      std::string fullName = std::string("/vertex_fields/") + field.label();
+      HDF5::writeAttribute(h5, fullName.c_str(), "vector_field_type",
+			   (void*)&vectorFieldType, H5T_NATIVE_INT);
+    } // if
   } catch (const std::exception& err) {
     std::ostringstream msg;
     msg << "Error while writing field '" << field.label() << "' at time " 

Modified: short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5Ext.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5Ext.cc	2011-05-29 03:13:35 UTC (rev 18486)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5Ext.cc	2011-05-29 03:15:17 UTC (rev 18487)
@@ -237,6 +237,9 @@
       dims[1] = numCorners;
       _h5->createDatasetRawExternal("/topology", "cells", filenameCells.c_str(),
 				    dims, ndims, H5T_IEEE_F64BE);
+      const int cellDim = mesh.dimension();
+      _h5->writeAttribute("/topology/cells", "cell_dim", (void*)&cellDim,
+			  H5T_NATIVE_INT);
     } // if
     
   } catch (const std::exception& err) {
@@ -361,6 +364,11 @@
 	_h5->createDatasetRawExternal("/vertex_fields", field.label(),
 				      _datasetFilename(field.label()).c_str(),
 				      dims, ndims, H5T_IEEE_F64BE);
+	const int vectorFieldType = field.vectorFieldType();
+	std::string fullName = std::string("/vertex_fields/") + field.label();
+	_h5->writeAttribute(fullName.c_str(), "vector_field_type",
+			    (void*)&vectorFieldType, H5T_NATIVE_INT);
+
       } else {
 	// Update number of time steps in external dataset info in HDF5 file.
 	const int totalNumTimeSteps = 

Modified: short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc	2011-05-29 03:13:35 UTC (rev 18486)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc	2011-05-29 03:15:17 UTC (rev 18487)
@@ -173,6 +173,7 @@
 { // getDatasetDims
   assert(dims);
   assert(ndims);
+  assert(isOpen());
 
   try {
     // Open group
@@ -231,8 +232,54 @@
 // Get names of datasets in group.
 void
 pylith::meshio::HDF5::getGroupDatasets(string_vector* names,
-				       const char* group)
+				       const char* parent)
 { // getGroupDatasets
+  assert(names);
+  assert(isOpen());
+
+  try {
+    // Open group
+#if defined(PYLITH_HDF5_USE_API_18)
+    hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
+#else
+    hid_t group = H5Gopen(_file, parent);
+#endif
+    if (group < 0)
+      throw std::runtime_error("Could not open group.");
+    
+    H5G_info_t ginfo;
+    herr_t err = H5Gget_info(group, &ginfo);
+    if (err < 0)
+      throw std::runtime_error("Could not get group info.");
+    const int gsize = ginfo.nlinks;
+    
+    names->resize(gsize-1);
+    for (int i=0, index=0; i < gsize; ++i) {
+      char buffer[256];
+      ssize_t namelen = 
+	H5Lget_name_by_idx(group, ".", H5_INDEX_NAME, H5_ITER_NATIVE,
+			   i, buffer, 256, H5P_DEFAULT);
+      if (0 == strcmp("time", buffer))
+	continue;
+      (*names)[index++] = buffer;
+    } // for
+    
+    err = H5Gclose(group);
+    if (err < 0)
+      throw std::runtime_error("Could not close group.");
+    
+  } catch (const std::exception& err) {
+    std::ostringstream msg;
+    msg << "Error occurred while getting names of datasets for group '" 
+	<< parent << "':\n"
+	<< err.what();
+    throw std::runtime_error(msg.str());
+  } catch (...) {
+    std::ostringstream msg;
+    msg << "Unknown occurred while getting names of datasets for group '"
+	<< parent << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch  
 } // getGroupDatasets
 
 // ----------------------------------------------------------------------
@@ -320,6 +367,65 @@
 } // writeAttribute
 
 // ----------------------------------------------------------------------
+// Write scalar attribute (external HDF5 handle).
+void
+pylith::meshio::HDF5::writeAttribute(hid_t h5,
+				     const char* parent,
+				     const char* name,
+				     const void* value,
+				     hid_t datatype)
+{ // writeAttribute
+  assert(parent);
+  assert(name);
+  assert(value);
+
+  try {
+    hid_t dataspace = H5Screate(H5S_SCALAR);
+    if (dataspace < 0)
+      throw std::runtime_error("Could not create dataspace for");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+    hid_t dataset = H5Dopen2(h5, parent, H5P_DEFAULT);
+#else
+    hid_t dataset = H5Dopen(h5, parent);
+#endif
+    if (dataset < 0)
+      throw std::runtime_error("Could not open parent dataset for");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+    hid_t attribute = H5Acreate2(dataset, name,
+				datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+#else
+    hid_t attribute = H5Acreate(dataset, name,
+				datatype, dataspace, H5P_DEFAULT);
+#endif
+    if (attribute < 0)
+      throw std::runtime_error("Could not create");
+
+    hid_t err = H5Awrite(attribute, datatype, value);
+    if (err < 0)
+      throw std::runtime_error("Could not write");
+
+    err = H5Aclose(attribute);
+    if (err < 0) 
+      throw std::runtime_error("Could not close");
+
+    err = H5Dclose(dataset);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataset for");
+
+    err = H5Sclose(dataspace);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataspace for");
+
+  } catch (std::exception& err) {
+    std::ostringstream msg;
+    msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+} // writeAttribute
+
+// ----------------------------------------------------------------------
 // Read scalar attribute.
 void
 pylith::meshio::HDF5::readAttribute(const char* parent,
@@ -444,6 +550,76 @@
 } // writeAttribute
 
 // ----------------------------------------------------------------------
+// Write string attribute (external handle to HDF5 file).
+void
+pylith::meshio::HDF5::writeAttribute(hid_t h5,
+				     const char* parent,
+				     const char* name,
+				     const char* value)
+{ // writeAttribute
+  assert(parent);
+  assert(name);
+  assert(value);
+
+  try {
+    hid_t dataspace = H5Screate(H5S_SCALAR);
+    if (dataspace < 0) 
+      throw std::runtime_error("Could not create dataspace for");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+    hid_t dataset = H5Dopen2(h5, parent, H5P_DEFAULT);
+#else
+    hid_t dataset = H5Dopen(h5, parent);
+#endif
+    if (dataset < 0) 
+      throw std::runtime_error("Could not open parent dataset for");
+
+    hid_t datatype = H5Tcopy(H5T_C_S1);
+    if (datatype < 0) 
+      throw std::runtime_error("Could not create datatype for");
+
+    herr_t err = H5Tset_size(datatype, strlen(value)+1);
+    if (err < 0) 
+      throw std::runtime_error("Could not set size of");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+    hid_t attribute = H5Acreate2(dataset, name,
+				datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+#else
+    hid_t attribute = H5Acreate(dataset, name,
+				datatype, dataspace, H5P_DEFAULT);
+#endif
+    if (attribute < 0) 
+      throw std::runtime_error("Could not create");
+
+    err = H5Awrite(attribute, datatype, value);
+    if (err < 0) 
+      throw std::runtime_error("Could not write");
+
+    err = H5Aclose(attribute);
+    if (err < 0) 
+      throw std::runtime_error("Could not close");
+
+    err = H5Tclose(datatype);
+    if (err < 0) 
+      throw std::runtime_error("Could not close datatype for");
+
+    err = H5Dclose(dataset);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataset for");
+
+    err = H5Sclose(dataspace);
+    if (err < 0) 
+      throw std::runtime_error("Could not close dataspace for");
+
+  } catch (std::exception& err) {
+    std::ostringstream msg;
+    msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
+    throw std::runtime_error(msg.str());
+  } // try/catch
+} // writeAttribute
+
+// ----------------------------------------------------------------------
 // Read string attribute.
 std::string
 pylith::meshio::HDF5::readAttribute(const char* parent,

Modified: short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh	2011-05-29 03:13:35 UTC (rev 18486)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh	2011-05-29 03:15:17 UTC (rev 18487)
@@ -95,10 +95,10 @@
   /** Get names of datasets in group.
    *
    * @param names Names of datasets.
-   * @param group Name of group.
+   * @param group Name of parent.
    */
   void getGroupDatasets(string_vector* names,
-			const char* group);
+			const char* parent);
 
   /** Create group.
    *
@@ -120,6 +120,22 @@
 		      const void* value,
 		      hid_t datatype);
 
+  /** Set scalar attribute (used with external handle to HDF5 file,
+   * such as PetscHDF5Viewer).
+   *
+   * @param h5 HDF5 file.
+   * @param parent Full path of parent dataset for attribute.
+   * @param name Name of attribute.
+   * @param value Attribute value.
+   * @param datatype Datatype of scalar.
+   */
+  static
+  void writeAttribute(hid_t h5,
+		      const char* parent,
+		      const char* name,
+		      const void* value,
+		      hid_t datatype);
+
   /** Read scalar attribute.
    *
    * @param parent Full path of parent dataset for attribute.
@@ -142,6 +158,20 @@
 		      const char* name,
 		      const char* value);
 
+  /** Set string attribute (used with external handle to HDF5 file,
+   * such as PetscHDF5Viewer).
+   *
+   * @param h5 HDF5 file.
+   * @param parent Full path of parent dataset for attribute.
+   * @param name Name of attribute.
+   * @param value String value
+   */
+  static
+  void writeAttribute(hid_t h5,
+		      const char* parent,
+		      const char* name,
+		      const char* value);
+
   /** Read string attribute.
    *
    * @param parent Full path of parent dataset for attribute.

Modified: short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc	2011-05-29 03:13:35 UTC (rev 18486)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc	2011-05-29 03:15:17 UTC (rev 18487)
@@ -81,21 +81,23 @@
   assert(2 == ndims);
   numCells = dims[0];
   numCorners = dims[1];
-
-  if (1 == spaceDim && 2 == numCorners)
+  int cellDim = 0;
+  h5.readAttribute("/topology/cells", "cell_dim", (void*)&cellDim, 
+		  H5T_NATIVE_INT);
+  if (1 == cellDim && 2 == numCorners)
     cellType = "Polyline";
-  else if (2 == spaceDim && 3 == numCorners)
+  else if (2 == cellDim && 3 == numCorners)
     cellType = "Triangle";
-  else if (2 == spaceDim && 4 == numCorners)
+  else if (2 == cellDim && 4 == numCorners)
     cellType = "Quadrilateral";
-  else if (3 == spaceDim && 4 == numCorners)
+  else if (3 == cellDim && 4 == numCorners)
     cellType = "Tetrahedron";
-  else if (3 == spaceDim && 8 == numCorners)
+  else if (3 == cellDim && 8 == numCorners)
     cellType = "Hexahedron";
   else {
     std::ostringstream msg;
     msg << "Unknown cell type with " << numCorners
-	<< " vertices for dimension " << spaceDim << ".";
+	<< " vertices and dimension " << cellDim << ".";
     throw std::runtime_error(msg.str());
   } // else
 
@@ -103,7 +105,7 @@
   _getTimeStamps(&timeStamps, h5);
 
   // Fields metadata
-  _getFieldMetadata(&fieldsMetadata, h5);
+  _getFieldMetadata(&fieldsMetadata, h5, spaceDim);
 
   // Write Xdmf file.
   _file.open(filenameXdmf);
@@ -146,7 +148,7 @@
 
       for (int iField=0; iField < numFields; ++iField) {
 	if (2 == spaceDim && 
-	    std::string("vector") == fieldsMetadata[iField].vectorFieldType) {
+	    std::string("Vector") == fieldsMetadata[iField].vectorFieldType) {
 	  for (int component=0; component < spaceDim; ++component)
 	    _writeGridAttributeComponent(fieldsMetadata[iField],
 					 iTimeStep, component);
@@ -253,27 +255,77 @@
 // Get field metadata from HDF5 file.
 void
 pylith::meshio::Xdmf::_getFieldMetadata(std::vector<FieldMetadata>* metadata,
-					HDF5& h5)
+					HDF5& h5,
+					const int spaceDim)
 { // _getFieldMetadata
   assert(metadata);
 
   string_vector fieldNames;
+  hsize_t* dims = 0;
+  int ndims = 0;
 
   int fieldOffset = 0;
   if (h5.hasGroup("/vertex_fields")) {
-    h5.getGroupDatasets(&fieldNames, "/vertex_fields");
-    metadata->resize(fieldOffset+fieldNames.size());
+    const char* parent = "/vertex_fields";
+    h5.getGroupDatasets(&fieldNames, parent);
+    const int numFields = fieldNames.size();
 
-    // ADD STUFF HERE
+    metadata->resize(fieldOffset+numFields);
+    for (int i=0; i < numFields; ++i) {
+      h5.getDatasetDims(&dims, &ndims, parent, fieldNames[i].c_str());
+      (*metadata)[i].name = fieldNames[i];
+      (*metadata)[i].domain = "Node";
+      if (2 == ndims) {
+	(*metadata)[i].numTimeSteps = 1;
+	(*metadata)[i].numPoints = dims[0];
+	(*metadata)[i].fiberDim = dims[1];
+      } else {
+	assert(3 == ndims);
+	(*metadata)[i].numTimeSteps = dims[0];
+	(*metadata)[i].numPoints = dims[1];
+	(*metadata)[i].fiberDim = dims[2];
+      } // if/else
+    } // for
 
     fieldOffset += fieldNames.size();
   } // if
   if (h5.hasGroup("/cell_fields")) {
-    h5.getGroupDatasets(&fieldNames, "/cell_fields");
-    metadata->resize(fieldOffset+fieldNames.size());
+    const char* parent = "/cell_fields";
+    h5.getGroupDatasets(&fieldNames, parent);
+    const int numFields = fieldNames.size();
 
-    // ADD STUFF HERE
+    metadata->resize(fieldOffset+numFields);
+    for (int i=0; i < numFields; ++i) {
+      h5.getDatasetDims(&dims, &ndims, parent, fieldNames[i].c_str());
+      (*metadata)[i].name = fieldNames[i];
+      (*metadata)[i].domain = "Cell";
+      if (2 == ndims) {
+	(*metadata)[i].numTimeSteps = 1;
+	(*metadata)[i].numPoints = dims[0];
+	(*metadata)[i].fiberDim = dims[1];
+      } else {
+	assert(3 == ndims);
+	(*metadata)[i].numTimeSteps = dims[0];
+	(*metadata)[i].numPoints = dims[1];
+	(*metadata)[i].fiberDim = dims[2];
+      } // if/else
+    } // for
+
+    fieldOffset += fieldNames.size();
   } // if
+
+  const int numFields = metadata->size();
+  for (int i=0; i < numFields; ++i) {
+    const int fiberDim = (*metadata)[i].fiberDim;
+    if (1 == fiberDim)
+      (*metadata)[i].vectorFieldType = "Scalar";
+    else if (fiberDim == spaceDim)
+      (*metadata)[i].vectorFieldType = "Vector";
+    else if (6 == fiberDim)
+      (*metadata)[i].vectorFieldType = "Tensor";
+    else
+      (*metadata)[i].vectorFieldType = "Matrix";
+  } // for
 } // _getFieldMetadata
 
 // ----------------------------------------------------------------------
@@ -376,7 +428,7 @@
   assert(_file.is_open() && _file.good());
 
   std::string h5FullName = "";
-  if (std::string("Vertex") == metadata.domain) {
+  if (std::string("Node") == metadata.domain) {
     h5FullName = std::string("/vertex_fields/") + metadata.name;
   } else if (std::string("Cell") == metadata.domain) {
     h5FullName = std::string("/cell_fields/") + metadata.name;
@@ -425,7 +477,7 @@
   assert(_file.is_open() && _file.good());
 
   std::string h5FullName = "";
-  if (std::string("Vertex") == metadata.domain) {
+  if (std::string("Node") == metadata.domain) {
     h5FullName = std::string("/vertex_fields/") + metadata.name;
   } else if (std::string("Cell") == metadata.domain) {
     h5FullName = std::string("/cell_fields/") + metadata.name;
@@ -439,13 +491,13 @@
   std::string componentName = "unknown";
   switch (component) {
   case 0:
-    componentName = std::string("x-") + std::string(metadata.name);
+    componentName = std::string("x_") + std::string(metadata.name);
     break;
   case 1:
-    componentName = std::string("y-") + std::string(metadata.name);
+    componentName = std::string("y_") + std::string(metadata.name);
     break;
   case 2:
-    componentName = std::string("z-") + std::string(metadata.name);
+    componentName = std::string("z_") + std::string(metadata.name);
     break;
   default:
     { // default

Modified: short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh	2011-05-29 03:13:35 UTC (rev 18486)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh	2011-05-29 03:15:17 UTC (rev 18487)
@@ -95,9 +95,11 @@
    *
    * @param metadata Array of field metadata.
    * @param h5 HDF5 file.
+   * @param spaceDim Spatial dimension.
    */
   void _getFieldMetadata(std::vector<FieldMetadata>* metadata,
-			 HDF5& h5);
+			 HDF5& h5,
+			 const int spaceDim);
 
   /** Write domain cell information.
    *

Modified: short/3D/PyLith/trunk/unittests/libtests/meshio/data/tri3_vertex.h5
===================================================================
(Binary files differ)



More information about the CIG-COMMITS mailing list