[cig-commits] r18406 - in short/3D/PyLith/trunk: . libsrc libsrc/pylith libsrc/pylith/bc libsrc/pylith/faults libsrc/pylith/feassemble libsrc/pylith/friction libsrc/pylith/materials libsrc/pylith/meshio libsrc/pylith/problems libsrc/pylith/topology libsrc/pylith/utils modulesrc/bc modulesrc/faults modulesrc/feassemble modulesrc/friction modulesrc/materials modulesrc/meshio modulesrc/mpi modulesrc/problems modulesrc/topology modulesrc/utils
brad at geodynamics.org
brad at geodynamics.org
Sat May 21 06:46:31 PDT 2011
Author: brad
Date: 2011-05-21 06:46:30 -0700 (Sat, 21 May 2011)
New Revision: 18406
Added:
short/3D/PyLith/trunk/libsrc/pylith/
short/3D/PyLith/trunk/libsrc/pylith/Makefile.am
short/3D/PyLith/trunk/libsrc/pylith/bc/
short/3D/PyLith/trunk/libsrc/pylith/faults/
short/3D/PyLith/trunk/libsrc/pylith/feassemble/
short/3D/PyLith/trunk/libsrc/pylith/friction/
short/3D/PyLith/trunk/libsrc/pylith/materials/
short/3D/PyLith/trunk/libsrc/pylith/meshio/
short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc
short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc
short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh
short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc
short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh
short/3D/PyLith/trunk/libsrc/pylith/problems/
short/3D/PyLith/trunk/libsrc/pylith/topology/
short/3D/PyLith/trunk/libsrc/pylith/topology/Field.cc
short/3D/PyLith/trunk/libsrc/pylith/utils/
Removed:
short/3D/PyLith/trunk/libsrc/Makefile.am
short/3D/PyLith/trunk/libsrc/bc/
short/3D/PyLith/trunk/libsrc/faults/
short/3D/PyLith/trunk/libsrc/feassemble/
short/3D/PyLith/trunk/libsrc/friction/
short/3D/PyLith/trunk/libsrc/materials/
short/3D/PyLith/trunk/libsrc/meshio/
short/3D/PyLith/trunk/libsrc/problems/
short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc
short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc
short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh
short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc
short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh
short/3D/PyLith/trunk/libsrc/pylith/topology/Field.cc
short/3D/PyLith/trunk/libsrc/topology/
short/3D/PyLith/trunk/libsrc/utils/
Modified:
short/3D/PyLith/trunk/configure.ac
short/3D/PyLith/trunk/libsrc/pylith/bc/Makefile.am
short/3D/PyLith/trunk/libsrc/pylith/faults/Makefile.am
short/3D/PyLith/trunk/libsrc/pylith/feassemble/Makefile.am
short/3D/PyLith/trunk/libsrc/pylith/friction/Makefile.am
short/3D/PyLith/trunk/libsrc/pylith/materials/Makefile.am
short/3D/PyLith/trunk/libsrc/pylith/meshio/Makefile.am
short/3D/PyLith/trunk/libsrc/pylith/problems/Makefile.am
short/3D/PyLith/trunk/libsrc/pylith/topology/Makefile.am
short/3D/PyLith/trunk/libsrc/pylith/utils/Makefile.am
short/3D/PyLith/trunk/modulesrc/bc/Makefile.am
short/3D/PyLith/trunk/modulesrc/faults/Makefile.am
short/3D/PyLith/trunk/modulesrc/feassemble/Makefile.am
short/3D/PyLith/trunk/modulesrc/friction/Makefile.am
short/3D/PyLith/trunk/modulesrc/materials/Makefile.am
short/3D/PyLith/trunk/modulesrc/meshio/Makefile.am
short/3D/PyLith/trunk/modulesrc/mpi/Makefile.am
short/3D/PyLith/trunk/modulesrc/problems/Makefile.am
short/3D/PyLith/trunk/modulesrc/topology/Makefile.am
short/3D/PyLith/trunk/modulesrc/utils/Makefile.am
short/3D/PyLith/trunk/subpackage.am
Log:
Create libsrc/pylith to eliminate need to copy header files to include directory when building.
Modified: short/3D/PyLith/trunk/configure.ac
===================================================================
--- short/3D/PyLith/trunk/configure.ac 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/configure.ac 2011-05-21 13:46:30 UTC (rev 18406)
@@ -264,16 +264,17 @@
# ----------------------------------------------------------------------
AC_CONFIG_FILES([Makefile
pylith/Makefile
- libsrc/Makefile
- libsrc/bc/Makefile
- libsrc/feassemble/Makefile
- libsrc/faults/Makefile
- libsrc/friction/Makefile
- libsrc/materials/Makefile
- libsrc/meshio/Makefile
- libsrc/problems/Makefile
- libsrc/topology/Makefile
- libsrc/utils/Makefile
+ libsrc/Makefile
+ libsrc/pylith/Makefile
+ libsrc/pylith/bc/Makefile
+ libsrc/pylith/feassemble/Makefile
+ libsrc/pylith/faults/Makefile
+ libsrc/pylith/friction/Makefile
+ libsrc/pylith/materials/Makefile
+ libsrc/pylith/meshio/Makefile
+ libsrc/pylith/problems/Makefile
+ libsrc/pylith/topology/Makefile
+ libsrc/pylith/utils/Makefile
modulesrc/Makefile
modulesrc/include/Makefile
modulesrc/bc/Makefile
Deleted: short/3D/PyLith/trunk/libsrc/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/libsrc/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -1,195 +0,0 @@
-# -*- Makefile -*-
-#
-# ----------------------------------------------------------------------
-#
-# Brad T. Aagaard, U.S. Geological Survey
-# Charles A. Williams, GNS Science
-# Matthew G. Knepley, University of Chicago
-#
-# This code was developed as part of the Computational Infrastructure
-# for Geodynamics (http://geodynamics.org).
-#
-# Copyright (c) 2010 University of California, Davis
-#
-# See COPYING for license information.
-#
-# ----------------------------------------------------------------------
-#
-
-SUBDIRS = \
- utils \
- topology \
- meshio \
- materials \
- bc \
- feassemble \
- faults \
- friction \
- problems
-
-.cu.lo:
- $(LIBTOOL) --tag=CC --mode=compile nvcc --compile -m64 $(NVCC_FLAGS) -o ${@F} $<
-
-lib_LTLIBRARIES = libpylith.la
-
-libpylith_la_SOURCES = \
- bc/BoundaryCondition.cc \
- bc/BoundaryConditionPoints.cc \
- bc/BCIntegratorSubMesh.cc \
- bc/TimeDependent.cc \
- bc/TimeDependentPoints.cc \
- bc/DirichletBC.cc \
- bc/DirichletBoundary.cc \
- bc/Neumann.cc \
- bc/AbsorbingDampers.cc \
- bc/PointForce.cc \
- faults/Fault.cc \
- faults/SlipTimeFn.cc \
- faults/StepSlipFn.cc \
- faults/ConstRateSlipFn.cc \
- faults/BruneSlipFn.cc \
- faults/TimeHistorySlipFn.cc \
- faults/LiuCosSlipFn.cc \
- faults/EqKinSrc.cc \
- faults/TopologyOps.cc \
- faults/CohesiveTopology.cc \
- faults/FaultCohesive.cc \
- faults/FaultCohesiveLagrange.cc \
- faults/FaultCohesiveKin.cc \
- faults/FaultCohesiveDyn.cc \
- faults/FaultCohesiveTract.cc \
- feassemble/CellGeometry.cc \
- feassemble/Constraint.cc \
- feassemble/GeometryPoint1D.cc \
- feassemble/GeometryPoint2D.cc \
- feassemble/GeometryPoint3D.cc \
- feassemble/GeometryLine1D.cc \
- feassemble/GeometryLine2D.cc \
- feassemble/GeometryLine3D.cc \
- feassemble/GeometryTri2D.cc \
- feassemble/GeometryTri3D.cc \
- feassemble/GeometryTet3D.cc \
- feassemble/GeometryQuad2D.cc \
- feassemble/GeometryQuad3D.cc \
- feassemble/GeometryHex3D.cc \
- feassemble/QuadratureRefCell.cc \
- feassemble/QuadratureEngine.cc \
- feassemble/Quadrature0D.cc \
- feassemble/Quadrature1D.cc \
- feassemble/Quadrature1Din2D.cc \
- feassemble/Quadrature1Din3D.cc \
- feassemble/Quadrature2D.cc \
- feassemble/Quadrature2Din3D.cc \
- feassemble/Quadrature3D.cc \
- feassemble/IntegratorElasticity.cc \
- feassemble/ElasticityImplicit.cc \
- feassemble/ElasticityExplicit.cc \
- feassemble/ElasticityExplicitTri3.cc \
- feassemble/ElasticityExplicitTet4.cc \
- feassemble/IntegratorElasticityLgDeform.cc \
- feassemble/ElasticityImplicitLgDeform.cc \
- feassemble/ElasticityExplicitLgDeform.cc \
- friction/FrictionModel.cc \
- friction/StaticFriction.cc \
- friction/SlipWeakening.cc \
- friction/RateStateAgeing.cc \
- friction/TimeWeakening.cc \
- materials/Metadata.cc \
- materials/Material.cc \
- materials/ElasticMaterial.cc \
- materials/ElasticStrain1D.cc \
- materials/ElasticStress1D.cc \
- materials/ElasticPlaneStrain.cc \
- materials/ElasticPlaneStress.cc \
- materials/ElasticIsotropic3D.cc \
- materials/ViscoelasticMaxwell.cc \
- materials/GenMaxwellIsotropic3D.cc \
- materials/GenMaxwellQpQsIsotropic3D.cc \
- materials/MaxwellIsotropic3D.cc \
- materials/MaxwellPlaneStrain.cc \
- materials/PowerLaw3D.cc \
- materials/DruckerPrager3D.cc \
- meshio/BinaryIO.cc \
- meshio/GMVFile.cc \
- meshio/GMVFileAscii.cc \
- meshio/GMVFileBinary.cc \
- meshio/MeshBuilder.cc \
- meshio/MeshIO.cc \
- meshio/MeshIOAscii.cc \
- meshio/MeshIOLagrit.cc \
- meshio/MeshIOSieve.cc \
- meshio/PsetFile.cc \
- meshio/PsetFileAscii.cc \
- meshio/PsetFileBinary.cc \
- meshio/OutputSolnSubset.cc \
- meshio/UCDFaultFile.cc \
- problems/Formulation.cc \
- problems/Explicit.cc \
- problems/Implicit.cc \
- problems/Solver.cc \
- problems/SolverLinear.cc \
- problems/SolverNonlinear.cc \
- problems/SolverLumped.cc \
- topology/FieldBase.cc \
- topology/Jacobian.cc \
- topology/Mesh.cc \
- topology/MeshOps.cc \
- topology/SubMesh.cc \
- topology/SolutionFields.cc \
- topology/Distributor.cc \
- topology/RefineUniform.cc \
- topology/ReverseCuthillMcKee.cc \
- utils/EventLogger.cc \
- utils/TestArray.cc
-
-
-# TEMPORARY
-libpylith_la_SOURCES += \
- topology/RefineEdges2.cc \
- topology/CellRefinerTri3.cc \
- topology/CellRefinerTet4.cc \
- topology/RefineFace4Edges2.cc \
- topology/CellRefinerQuad4.cc \
- topology/RefineVol8Face4Edges2.cc \
- topology/CellRefinerHex8.cc \
- topology/MeshOrder.cc
-
-
-libpylith_la_LDFLAGS = $(AM_LDFLAGS) $(PYTHON_LA_LDFLAGS)
-libpylith_la_LIBADD = \
- -lspatialdata \
- $(PETSC_LIB)
-if NO_UNDEFINED
-libpylith_la_LIBADD += \
- $(PYTHON_BLDLIBRARY) $(PYTHON_LIBS) $(PYTHON_SYSLIBS)
-endif
-
-INCLUDES = -I$(top_builddir)/include
-INCLUDES += $(PETSC_CC_INCLUDES)
-
-AM_CPPFLAGS = $(PYTHON_EGG_CPPFLAGS) -I$(PYTHON_INCDIR) $(PETSC_SIEVE_FLAGS)
-
-if ENABLE_HDF5
- libpylith_la_SOURCES += \
- meshio/HDF5.cc \
- meshio/Xdmf.cc
- libpylith_la_LIBADD += -lhdf5
-endif
-
-if ENABLE_CUBIT
- libpylith_la_SOURCES += \
- meshio/MeshIOCubit.cc
- libpylith_la_LIBADD += -lnetcdf_c++ -lnetcdf
-endif
-
-if ENABLE_CUDA
- libpylith_la_SOURCES += \
- feassemble/ElasticityImplicitCUDA.cc \
- feassemble/ElasticityImplicitCUDAKernel.cu
- libpylith_la_LIBADD += -lcudart -lcuda
-# -L/usr/local/cuda/lib
-# INCLUDES += -I/usr/local/cuda/include
-endif
-
-
-# End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/Makefile.am (from rev 18400, short/3D/PyLith/trunk/libsrc/Makefile.am)
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/Makefile.am (rev 0)
+++ short/3D/PyLith/trunk/libsrc/pylith/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -0,0 +1,195 @@
+# -*- Makefile -*-
+#
+# ----------------------------------------------------------------------
+#
+# Brad T. Aagaard, U.S. Geological Survey
+# Charles A. Williams, GNS Science
+# Matthew G. Knepley, University of Chicago
+#
+# This code was developed as part of the Computational Infrastructure
+# for Geodynamics (http://geodynamics.org).
+#
+# Copyright (c) 2010 University of California, Davis
+#
+# See COPYING for license information.
+#
+# ----------------------------------------------------------------------
+#
+
+SUBDIRS = \
+ utils \
+ topology \
+ meshio \
+ materials \
+ bc \
+ feassemble \
+ faults \
+ friction \
+ problems
+
+.cu.lo:
+ $(LIBTOOL) --tag=CC --mode=compile nvcc --compile -m64 $(NVCC_FLAGS) -o ${@F} $<
+
+lib_LTLIBRARIES = libpylith.la
+
+libpylith_la_SOURCES = \
+ bc/BoundaryCondition.cc \
+ bc/BoundaryConditionPoints.cc \
+ bc/BCIntegratorSubMesh.cc \
+ bc/TimeDependent.cc \
+ bc/TimeDependentPoints.cc \
+ bc/DirichletBC.cc \
+ bc/DirichletBoundary.cc \
+ bc/Neumann.cc \
+ bc/AbsorbingDampers.cc \
+ bc/PointForce.cc \
+ faults/Fault.cc \
+ faults/SlipTimeFn.cc \
+ faults/StepSlipFn.cc \
+ faults/ConstRateSlipFn.cc \
+ faults/BruneSlipFn.cc \
+ faults/TimeHistorySlipFn.cc \
+ faults/LiuCosSlipFn.cc \
+ faults/EqKinSrc.cc \
+ faults/TopologyOps.cc \
+ faults/CohesiveTopology.cc \
+ faults/FaultCohesive.cc \
+ faults/FaultCohesiveLagrange.cc \
+ faults/FaultCohesiveKin.cc \
+ faults/FaultCohesiveDyn.cc \
+ faults/FaultCohesiveTract.cc \
+ feassemble/CellGeometry.cc \
+ feassemble/Constraint.cc \
+ feassemble/GeometryPoint1D.cc \
+ feassemble/GeometryPoint2D.cc \
+ feassemble/GeometryPoint3D.cc \
+ feassemble/GeometryLine1D.cc \
+ feassemble/GeometryLine2D.cc \
+ feassemble/GeometryLine3D.cc \
+ feassemble/GeometryTri2D.cc \
+ feassemble/GeometryTri3D.cc \
+ feassemble/GeometryTet3D.cc \
+ feassemble/GeometryQuad2D.cc \
+ feassemble/GeometryQuad3D.cc \
+ feassemble/GeometryHex3D.cc \
+ feassemble/QuadratureRefCell.cc \
+ feassemble/QuadratureEngine.cc \
+ feassemble/Quadrature0D.cc \
+ feassemble/Quadrature1D.cc \
+ feassemble/Quadrature1Din2D.cc \
+ feassemble/Quadrature1Din3D.cc \
+ feassemble/Quadrature2D.cc \
+ feassemble/Quadrature2Din3D.cc \
+ feassemble/Quadrature3D.cc \
+ feassemble/IntegratorElasticity.cc \
+ feassemble/ElasticityImplicit.cc \
+ feassemble/ElasticityExplicit.cc \
+ feassemble/ElasticityExplicitTri3.cc \
+ feassemble/ElasticityExplicitTet4.cc \
+ feassemble/IntegratorElasticityLgDeform.cc \
+ feassemble/ElasticityImplicitLgDeform.cc \
+ feassemble/ElasticityExplicitLgDeform.cc \
+ friction/FrictionModel.cc \
+ friction/StaticFriction.cc \
+ friction/SlipWeakening.cc \
+ friction/RateStateAgeing.cc \
+ friction/TimeWeakening.cc \
+ materials/Metadata.cc \
+ materials/Material.cc \
+ materials/ElasticMaterial.cc \
+ materials/ElasticStrain1D.cc \
+ materials/ElasticStress1D.cc \
+ materials/ElasticPlaneStrain.cc \
+ materials/ElasticPlaneStress.cc \
+ materials/ElasticIsotropic3D.cc \
+ materials/ViscoelasticMaxwell.cc \
+ materials/GenMaxwellIsotropic3D.cc \
+ materials/GenMaxwellQpQsIsotropic3D.cc \
+ materials/MaxwellIsotropic3D.cc \
+ materials/MaxwellPlaneStrain.cc \
+ materials/PowerLaw3D.cc \
+ materials/DruckerPrager3D.cc \
+ meshio/BinaryIO.cc \
+ meshio/GMVFile.cc \
+ meshio/GMVFileAscii.cc \
+ meshio/GMVFileBinary.cc \
+ meshio/MeshBuilder.cc \
+ meshio/MeshIO.cc \
+ meshio/MeshIOAscii.cc \
+ meshio/MeshIOLagrit.cc \
+ meshio/MeshIOSieve.cc \
+ meshio/PsetFile.cc \
+ meshio/PsetFileAscii.cc \
+ meshio/PsetFileBinary.cc \
+ meshio/OutputSolnSubset.cc \
+ meshio/UCDFaultFile.cc \
+ problems/Formulation.cc \
+ problems/Explicit.cc \
+ problems/Implicit.cc \
+ problems/Solver.cc \
+ problems/SolverLinear.cc \
+ problems/SolverNonlinear.cc \
+ problems/SolverLumped.cc \
+ topology/FieldBase.cc \
+ topology/Jacobian.cc \
+ topology/Mesh.cc \
+ topology/MeshOps.cc \
+ topology/SubMesh.cc \
+ topology/SolutionFields.cc \
+ topology/Distributor.cc \
+ topology/RefineUniform.cc \
+ topology/ReverseCuthillMcKee.cc \
+ utils/EventLogger.cc \
+ utils/TestArray.cc
+
+
+# TEMPORARY
+libpylith_la_SOURCES += \
+ topology/RefineEdges2.cc \
+ topology/CellRefinerTri3.cc \
+ topology/CellRefinerTet4.cc \
+ topology/RefineFace4Edges2.cc \
+ topology/CellRefinerQuad4.cc \
+ topology/RefineVol8Face4Edges2.cc \
+ topology/CellRefinerHex8.cc \
+ topology/MeshOrder.cc
+
+
+libpylith_la_LDFLAGS = $(AM_LDFLAGS) $(PYTHON_LA_LDFLAGS)
+libpylith_la_LIBADD = \
+ -lspatialdata \
+ $(PETSC_LIB)
+if NO_UNDEFINED
+libpylith_la_LIBADD += \
+ $(PYTHON_BLDLIBRARY) $(PYTHON_LIBS) $(PYTHON_SYSLIBS)
+endif
+
+INCLUDES = -I$(top_srcdir)/libsrc
+INCLUDES += $(PETSC_CC_INCLUDES)
+
+AM_CPPFLAGS = $(PYTHON_EGG_CPPFLAGS) -I$(PYTHON_INCDIR) $(PETSC_SIEVE_FLAGS)
+
+if ENABLE_HDF5
+ libpylith_la_SOURCES += \
+ meshio/HDF5.cc \
+ meshio/Xdmf.cc
+ libpylith_la_LIBADD += -lhdf5
+endif
+
+if ENABLE_CUBIT
+ libpylith_la_SOURCES += \
+ meshio/MeshIOCubit.cc
+ libpylith_la_LIBADD += -lnetcdf_c++ -lnetcdf
+endif
+
+if ENABLE_CUDA
+ libpylith_la_SOURCES += \
+ feassemble/ElasticityImplicitCUDA.cc \
+ feassemble/ElasticityImplicitCUDAKernel.cu
+ libpylith_la_LIBADD += -lcudart -lcuda
+# -L/usr/local/cuda/lib
+# INCLUDES += -I/usr/local/cuda/include
+endif
+
+
+# End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/bc (from rev 18400, short/3D/PyLith/trunk/libsrc/bc)
Modified: short/3D/PyLith/trunk/libsrc/pylith/bc/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/bc/Makefile.am 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/bc/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -43,10 +43,5 @@
noinst_HEADERS =
-# export
-clean-local: clean-subpkgincludeHEADERS
-BUILT_SOURCES = export-subpkgincludeHEADERS
-CLEANFILES = export-subpkgincludeHEADERS
-
# End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/faults (from rev 18400, short/3D/PyLith/trunk/libsrc/faults)
Modified: short/3D/PyLith/trunk/libsrc/pylith/faults/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/faults/Makefile.am 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/faults/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -48,10 +48,5 @@
TopologyVisitors.hh \
TopologyVisitors.cc
-# export
-clean-local: clean-subpkgincludeHEADERS
-BUILT_SOURCES = export-subpkgincludeHEADERS
-CLEANFILES = export-subpkgincludeHEADERS
-
# End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/feassemble (from rev 18400, short/3D/PyLith/trunk/libsrc/feassemble)
Modified: short/3D/PyLith/trunk/libsrc/pylith/feassemble/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/feassemble/Makefile.am 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/feassemble/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -81,10 +81,5 @@
tri3_elasticity.wxm \
tet4_elasticity.wxm
-# export
-clean-local: clean-subpkgincludeHEADERS
-BUILT_SOURCES = export-subpkgincludeHEADERS
-CLEANFILES = export-subpkgincludeHEADERS
-
# End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/friction (from rev 18400, short/3D/PyLith/trunk/libsrc/friction)
Modified: short/3D/PyLith/trunk/libsrc/pylith/friction/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/friction/Makefile.am 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/friction/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -31,10 +31,5 @@
noinst_HEADERS =
-# export
-clean-local: clean-subpkgincludeHEADERS
-BUILT_SOURCES = export-subpkgincludeHEADERS
-CLEANFILES = export-subpkgincludeHEADERS
-
# End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/materials (from rev 18400, short/3D/PyLith/trunk/libsrc/materials)
Modified: short/3D/PyLith/trunk/libsrc/pylith/materials/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/materials/Makefile.am 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/materials/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -52,10 +52,5 @@
noinst_HEADERS =
-# export
-clean-local: clean-subpkgincludeHEADERS
-BUILT_SOURCES = export-subpkgincludeHEADERS
-CLEANFILES = export-subpkgincludeHEADERS
-
# End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/meshio (from rev 18400, short/3D/PyLith/trunk/libsrc/meshio)
Deleted: short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc 2011-05-21 13:46:30 UTC (rev 18406)
@@ -1,386 +0,0 @@
-// -*- C++ -*-
-//
-// ======================================================================
-//
-// Brad T. Aagaard, U.S. Geological Survey
-// Charles A. Williams, GNS Science
-// Matthew G. Knepley, University of Chicago
-//
-// This code was developed as part of the Computational Infrastructure
-// for Geodynamics (http://geodynamics.org).
-//
-// Copyright (c) 2010 University of California, Davis
-//
-// See COPYING for license information.
-//
-// ======================================================================
-//
-
-#include <portinfo>
-
-#include <cassert> // USES assert()
-#include <sstream> // USES std::ostringstream
-#include <stdexcept> // USES std::runtime_error
-
-// ----------------------------------------------------------------------
-// Constructor
-template<typename mesh_type, typename field_type>
-pylith::meshio::DataWriterHDF5<mesh_type,field_type>::DataWriterHDF5(void) :
- _filename("output.h5"),
- _viewer(0)
-{ // constructor
-} // constructor
-
-// ----------------------------------------------------------------------
-// Destructor
-template<typename mesh_type, typename field_type>
-pylith::meshio::DataWriterHDF5<mesh_type,field_type>::~DataWriterHDF5(void)
-{ // destructor
- deallocate();
-} // destructor
-
-// ----------------------------------------------------------------------
-// Deallocate PETSc and local data structures.
-template<typename mesh_type, typename field_type>
-void
-pylith::meshio::DataWriterHDF5<mesh_type, field_type>::deallocate(void)
-{ // deallocate
- if (_viewer) {
- PetscErrorCode err = PetscViewerDestroy(&_viewer); CHECK_PETSC_ERROR(err);
- } // if
- _viewer = 0;
-} // deallocate
-
-// ----------------------------------------------------------------------
-// Copy constructor.
-template<typename mesh_type, typename field_type>
-pylith::meshio::DataWriterHDF5<mesh_type,field_type>::DataWriterHDF5(const DataWriterHDF5<mesh_type, field_type>& w) :
- DataWriter<mesh_type, field_type>(w),
- _filename(w._filename),
- _viewer(0)
-{ // copy constructor
-} // copy constructor
-
-// ----------------------------------------------------------------------
-// Prepare file for data at a new time step.
-template<typename mesh_type, typename field_type>
-void
-pylith::meshio::DataWriterHDF5<mesh_type,field_type>::open(const mesh_type& mesh,
- const int numTimeSteps,
- const char* label,
- const int labelId)
-{ // open
- typedef typename mesh_type::SieveMesh SieveMesh;
- typedef typename mesh_type::SieveMesh::label_sequence label_sequence;
- typedef typename mesh_type::SieveMesh::numbering_type numbering_type;
- typedef typename mesh_type::SieveMesh::sieve_type sieve_type;
-
- DataWriter<mesh_type, field_type>::open(mesh, numTimeSteps, label, labelId);
-
- try {
- PetscErrorCode err = 0;
-
- const std::string& filename = _hdf5Filename();
-
- _timesteps.clear();
-
- err = PetscViewerHDF5Open(mesh.comm(), filename.c_str(), FILE_MODE_WRITE,
- &_viewer);
- CHECK_PETSC_ERROR(err);
-
- const ALE::Obj<SieveMesh>& sieveMesh = mesh.sieveMesh();
- assert(!sieveMesh.isNull());
- const ALE::Obj<typename mesh_type::RealSection>& coordinatesSection =
- sieveMesh->hasRealSection("coordinates_dimensioned") ?
- sieveMesh->getRealSection("coordinates_dimensioned") :
- sieveMesh->getRealSection("coordinates");
- assert(!coordinatesSection.isNull());
- const spatialdata::geocoords::CoordSys* cs = mesh.coordsys();
- assert(cs);
- topology::FieldBase::Metadata metadata;
- // :KLUDGE: We would like to use field_type for the coordinates
- // field. However, the mesh coordinates are Field<mesh_type> and
- // field_type can be Field<Mesh> (e.g., displacement field over a
- // SubMesh).
- const char* context = DataWriter<mesh_type, field_type>::_context.c_str();
- topology::Field<mesh_type> coordinates(mesh, coordinatesSection, metadata);
- coordinates.label("vertices");
- ALE::Obj<numbering_type> vNumbering =
- sieveMesh->hasLabel("censored depth") ?
- sieveMesh->getFactory()->getNumbering(sieveMesh, "censored depth", 0) :
- sieveMesh->getFactory()->getNumbering(sieveMesh, 0);
- assert(!vNumbering.isNull());
- //vNumbering->view("VERTEX NUMBERING");
-
- coordinates.createScatterWithBC(vNumbering, context);
- coordinates.scatterSectionToVector(context);
- PetscVec coordinatesVector = coordinates.vector(context);
- assert(coordinatesVector);
- int blockSize = 1;
- err = VecGetBlockSize(coordinatesVector, &blockSize); // get block size
- CHECK_PETSC_ERROR(err);
- err = VecSetBlockSize(coordinatesVector, cs->spaceDim()); // bs for output
- CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5PushGroup(_viewer, "/geometry"); CHECK_PETSC_ERROR(err);
- err = VecView(coordinatesVector, _viewer);CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
- err = VecSetBlockSize(coordinatesVector, blockSize); // reset block size
- CHECK_PETSC_ERROR(err);
-
- // Account for censored cells
- const int cellDepth = (sieveMesh->depth() == -1) ? -1 : 1;
- const int depth = (0 == label) ? cellDepth : labelId;
- const std::string labelName = (0 == label) ?
- ((sieveMesh->hasLabel("censored depth")) ?
- "censored depth" : "depth") : label;
- assert(!sieveMesh->getFactory().isNull());
- const ALE::Obj<numbering_type>& cNumbering =
- sieveMesh->getFactory()->getNumbering(sieveMesh, labelName, depth);
- assert(!cNumbering.isNull());
- const ALE::Obj<label_sequence>& cells =
- sieveMesh->getLabelStratum(labelName, depth);
- assert(!cells.isNull());
- int numCornersLocal = 0;
- if (cells->size() > 0)
- numCornersLocal = sieveMesh->getNumCellCorners(*cells->begin());
- int numCorners = numCornersLocal;
- err = MPI_Allreduce(&numCornersLocal, &numCorners, 1, MPI_INT, MPI_MAX,
- sieveMesh->comm()); CHECK_PETSC_ERROR(err);
-
- const int ncells = cNumbering->getLocalSize();
- const int conesSize = ncells*numCorners;
- PetscScalar* tmpVertices = (conesSize > 0) ? new PetscScalar[conesSize] : 0;
-
- const Obj<sieve_type>& sieve = sieveMesh->getSieve();
- assert(!sieve.isNull());
- ALE::ISieveVisitor::NConeRetriever<sieve_type>
- ncV(*sieve, (size_t) pow((double) sieve->getMaxConeSize(),
- std::max(0, sieveMesh->depth())));
-
- int k = 0;
- const typename label_sequence::const_iterator cellsEnd = cells->end();
- for (typename label_sequence::iterator c_iter=cells->begin();
- c_iter != cellsEnd;
- ++c_iter)
- if (cNumbering->isLocal(*c_iter)) {
- ncV.clear();
- ALE::ISieveTraversal<sieve_type>::orientedClosure(*sieve, *c_iter, ncV);
- const typename ALE::ISieveVisitor::NConeRetriever<sieve_type>::oriented_point_type* cone =
- ncV.getOrientedPoints();
- const int coneSize = ncV.getOrientedSize();
- for (int c=0; c < coneSize; ++c)
- tmpVertices[k++] = vNumbering->getIndex(cone[c].first);
- } // if
-
- PetscVec elemVec;
- err = VecCreateMPIWithArray(sieveMesh->comm(), conesSize, PETSC_DETERMINE,
- tmpVertices, &elemVec); CHECK_PETSC_ERROR(err);
- err = PetscObjectSetName((PetscObject) elemVec,
- "cells");CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5PushGroup(_viewer, "/topology"); CHECK_PETSC_ERROR(err);
- err = VecSetBlockSize(elemVec, numCorners); CHECK_PETSC_ERROR(err);
- err = VecView(elemVec, _viewer); CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
- err = VecDestroy(&elemVec); CHECK_PETSC_ERROR(err);
- delete[] tmpVertices; tmpVertices = 0;
- } catch (const std::exception& err) {
- std::ostringstream msg;
- msg << "Error while opening HDF5 file "
- << _filename << ".\n" << err.what();
- throw std::runtime_error(msg.str());
- } catch (...) {
- std::ostringstream msg;
- msg << "Unknown error while opening HDF5 file "
- << _filename << ".\n";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // openTimeStep
-
-// ----------------------------------------------------------------------
-// Close output files.
-template<typename mesh_type, typename field_type>
-void
-pylith::meshio::DataWriterHDF5<mesh_type,field_type>::close(void)
-{ // close
- if (_viewer) {
- PetscViewerDestroy(&_viewer);
- } // if
- _viewer = 0;
- _timesteps.clear();
-} // close
-
-// ----------------------------------------------------------------------
-// Write field over vertices to file.
-template<typename mesh_type, typename field_type>
-void
-pylith::meshio::DataWriterHDF5<mesh_type,field_type>::writeVertexField(
- const double t,
- field_type& field,
- const mesh_type& mesh)
-{ // writeVertexField
- typedef typename mesh_type::SieveMesh::numbering_type numbering_type;
-
- assert(_viewer);
-
- try {
- const char* context = DataWriter<mesh_type, field_type>::_context.c_str();
-
- const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh = mesh.sieveMesh();
- assert(!sieveMesh.isNull());
- ALE::Obj<numbering_type> vNumbering =
- sieveMesh->hasLabel("censored depth") ?
- sieveMesh->getFactory()->getNumbering(sieveMesh, "censored depth", 0) :
- sieveMesh->getFactory()->getNumbering(sieveMesh, 0);
- assert(!vNumbering.isNull());
- field.createScatterWithBC(vNumbering, context);
- field.scatterSectionToVector(context);
- PetscVec vector = field.vector(context);
- assert(vector);
-
- const ALE::Obj<typename mesh_type::RealSection>& section = field.section();
- assert(!section.isNull());
- const std::string labelName =
- (sieveMesh->hasLabel("censored depth")) ? "censored depth" : "depth";
- assert(!sieveMesh->getLabelStratum(labelName, 0).isNull());
- int fiberDimLocal =
- (sieveMesh->getLabelStratum(labelName, 0)->size() > 0) ?
- section->getFiberDimension(*sieveMesh->getLabelStratum(labelName, 0)->begin()) : 0;
- int fiberDim = 0;
- MPI_Allreduce(&fiberDimLocal, &fiberDim, 1, MPI_INT, MPI_MAX,
- field.mesh().comm());
- assert(fiberDim > 0);
-
- PetscErrorCode err = 0;
-
- if (_timesteps.find(field.label()) == _timesteps.end())
- _timesteps[field.label()] = 0;
- else
- _timesteps[field.label()] += 1;
- const int istep = _timesteps[field.label()];
-
-#if 1
- field.view("writeVertexField");
- VecView(vector, PETSC_VIEWER_STDOUT_WORLD);
-#endif
-
- // Set temporary block size that matches fiber dimension for output.
- int blockSize = 0;
- err = VecGetBlockSize(vector, &blockSize); CHECK_PETSC_ERROR(err);
- err = VecSetBlockSize(vector, fiberDim); CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5PushGroup(_viewer, "/vertex_fields");
- CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5SetTimestep(_viewer, istep); CHECK_PETSC_ERROR(err);
- err = VecView(vector, _viewer); CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
- err = VecSetBlockSize(vector, blockSize); CHECK_PETSC_ERROR(err);
-
- } catch (const std::exception& err) {
- std::ostringstream msg;
- msg << "Error while writing field '" << field.label() << "' at time "
- << t << " to HDF5 file '" << _filename << "'.\n" << err.what();
- throw std::runtime_error(msg.str());
- } catch (...) {
- std::ostringstream msg;
- msg << "Error while writing field '" << field.label() << "' at time "
- << t << " to HDF5 file '" << _filename << "'.\n";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // writeVertexField
-
-// ----------------------------------------------------------------------
-// Write field over cells to file.
-template<typename mesh_type, typename field_type>
-void
-pylith::meshio::DataWriterHDF5<mesh_type,field_type>::writeCellField(
- const double t,
- field_type& field,
- const char* label,
- const int labelId)
-{ // writeCellField
- typedef typename mesh_type::SieveMesh::numbering_type numbering_type;
-
- assert(_viewer);
-
- try {
- const char* context = DataWriter<mesh_type, field_type>::_context.c_str();
-
- const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
- field.mesh().sieveMesh();
- assert(!sieveMesh.isNull());
- const int cellDepth = (sieveMesh->depth() == -1) ? -1 : 1;
- const int depth = (0 == label) ? cellDepth : labelId;
- const std::string labelName = (0 == label) ?
- ((sieveMesh->hasLabel("censored depth")) ?
- "censored depth" : "depth") : label;
- assert(!sieveMesh->getFactory().isNull());
- const ALE::Obj<typename mesh_type::SieveMesh::numbering_type>& numbering =
- sieveMesh->getFactory()->getNumbering(sieveMesh, labelName, depth);
- assert(!numbering.isNull());
- field.createScatterWithBC(numbering, context);
- field.scatterSectionToVector(context);
- PetscVec vector = field.vector(context);
- assert(vector);
-
- const ALE::Obj<typename mesh_type::RealSection>& section = field.section();
- assert(!section.isNull());
- assert(!sieveMesh->getLabelStratum(labelName, depth).isNull());
- int fiberDimLocal =
- (sieveMesh->getLabelStratum(labelName, depth)->size() > 0) ?
- section->getFiberDimension(*sieveMesh->getLabelStratum(labelName, depth)->begin()) : 0;
- int fiberDim = 0;
- MPI_Allreduce(&fiberDimLocal, &fiberDim, 1, MPI_INT, MPI_MAX,
- field.mesh().comm());
- assert(fiberDim > 0);
-
- PetscErrorCode err = 0;
-
- if (_timesteps.find(field.label()) == _timesteps.end())
- _timesteps[field.label()] = 0;
- else
- _timesteps[field.label()] += 1;
- const int istep = _timesteps[field.label()];
-
- // Set temporary block size that matches fiber dimension for output.
- int blockSize = 0;
- err = VecGetBlockSize(vector, &blockSize); CHECK_PETSC_ERROR(err);
- err = VecSetBlockSize(vector, fiberDim); CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5PushGroup(_viewer, "/cell_fields");
- CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5SetTimestep(_viewer, istep); CHECK_PETSC_ERROR(err);
- err = VecView(vector, _viewer); CHECK_PETSC_ERROR(err);
- err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
- err = VecSetBlockSize(vector, blockSize); CHECK_PETSC_ERROR(err);
-
- } catch (const std::exception& err) {
- std::ostringstream msg;
- msg << "Error while writing field '" << field.label() << "' at time "
- << t << " to HDF5 file '" << _filename << "'.\n" << err.what();
- throw std::runtime_error(msg.str());
- } catch (...) {
- std::ostringstream msg;
- msg << "Error while writing field '" << field.label() << "' at time "
- << t << " to HDF5 file '" << _filename << "'.\n";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // writeCellField
-
-// ----------------------------------------------------------------------
-// Generate filename for HDF5 file.
-template<typename mesh_type, typename field_type>
-std::string
-pylith::meshio::DataWriterHDF5<mesh_type,field_type>::_hdf5Filename(void) const
-{ // _hdf5Filename
- std::ostringstream filename;
- const int indexExt = _filename.find(".h5");
- const int numTimeSteps = DataWriter<mesh_type, field_type>::_numTimeSteps;
- if (0 == numTimeSteps) {
- filename << std::string(_filename, 0, indexExt) << "_info.h5";
- } else {
- filename << _filename;
- } // if/else
-
- return std::string(filename.str());
-} // _hdf5Filename
-
-
-// End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc (from rev 18402, short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc)
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc (rev 0)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/DataWriterHDF5.cc 2011-05-21 13:46:30 UTC (rev 18406)
@@ -0,0 +1,393 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+#include <portinfo>
+
+#include <cassert> // USES assert()
+#include <sstream> // USES std::ostringstream
+#include <stdexcept> // USES std::runtime_error
+
+// ----------------------------------------------------------------------
+// Constructor
+template<typename mesh_type, typename field_type>
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::DataWriterHDF5(void) :
+ _filename("output.h5"),
+ _viewer(0)
+{ // constructor
+} // constructor
+
+// ----------------------------------------------------------------------
+// Destructor
+template<typename mesh_type, typename field_type>
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::~DataWriterHDF5(void)
+{ // destructor
+ deallocate();
+} // destructor
+
+// ----------------------------------------------------------------------
+// Deallocate PETSc and local data structures.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type, field_type>::deallocate(void)
+{ // deallocate
+ if (_viewer) {
+ PetscErrorCode err = PetscViewerDestroy(&_viewer); CHECK_PETSC_ERROR(err);
+ } // if
+ _viewer = 0;
+} // deallocate
+
+// ----------------------------------------------------------------------
+// Copy constructor.
+template<typename mesh_type, typename field_type>
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::DataWriterHDF5(const DataWriterHDF5<mesh_type, field_type>& w) :
+ DataWriter<mesh_type, field_type>(w),
+ _filename(w._filename),
+ _viewer(0)
+{ // copy constructor
+} // copy constructor
+
+// ----------------------------------------------------------------------
+// Prepare file for data at a new time step.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::open(const mesh_type& mesh,
+ const int numTimeSteps,
+ const char* label,
+ const int labelId)
+{ // open
+ typedef typename mesh_type::SieveMesh SieveMesh;
+ typedef typename mesh_type::SieveMesh::label_sequence label_sequence;
+ typedef typename mesh_type::SieveMesh::numbering_type numbering_type;
+ typedef typename mesh_type::SieveMesh::sieve_type sieve_type;
+
+ DataWriter<mesh_type, field_type>::open(mesh, numTimeSteps, label, labelId);
+
+ try {
+ PetscErrorCode err = 0;
+
+ const std::string& filename = _hdf5Filename();
+
+ _timesteps.clear();
+
+ err = PetscViewerHDF5Open(mesh.comm(), filename.c_str(), FILE_MODE_WRITE,
+ &_viewer);
+ CHECK_PETSC_ERROR(err);
+
+ const ALE::Obj<SieveMesh>& sieveMesh = mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+ const ALE::Obj<typename mesh_type::RealSection>& coordinatesSection =
+ sieveMesh->hasRealSection("coordinates_dimensioned") ?
+ sieveMesh->getRealSection("coordinates_dimensioned") :
+ sieveMesh->getRealSection("coordinates");
+ assert(!coordinatesSection.isNull());
+ const spatialdata::geocoords::CoordSys* cs = mesh.coordsys();
+ assert(cs);
+ topology::FieldBase::Metadata metadata;
+ // :KLUDGE: We would like to use field_type for the coordinates
+ // field. However, the mesh coordinates are Field<mesh_type> and
+ // field_type can be Field<Mesh> (e.g., displacement field over a
+ // SubMesh).
+ const char* context = DataWriter<mesh_type, field_type>::_context.c_str();
+ topology::Field<mesh_type> coordinates(mesh, coordinatesSection, metadata);
+ coordinates.label("vertices");
+ ALE::Obj<numbering_type> vNumbering =
+ sieveMesh->hasLabel("censored depth") ?
+ sieveMesh->getFactory()->getNumbering(sieveMesh, "censored depth", 0) :
+ sieveMesh->getFactory()->getNumbering(sieveMesh, 0);
+ assert(!vNumbering.isNull());
+ //vNumbering->view("VERTEX NUMBERING");
+
+ coordinates.createScatterWithBC(vNumbering, context);
+ coordinates.scatterSectionToVector(context);
+ PetscVec coordinatesVector = coordinates.vector(context);
+ assert(coordinatesVector);
+ int blockSize = 1;
+ err = VecGetBlockSize(coordinatesVector, &blockSize); // get block size
+ CHECK_PETSC_ERROR(err);
+ err = VecSetBlockSize(coordinatesVector, cs->spaceDim()); // bs for output
+ CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5PushGroup(_viewer, "/geometry"); CHECK_PETSC_ERROR(err);
+ err = VecView(coordinatesVector, _viewer);CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
+ err = VecSetBlockSize(coordinatesVector, blockSize); // reset block size
+ CHECK_PETSC_ERROR(err);
+
+ // Account for censored cells
+ const int cellDepth = (sieveMesh->depth() == -1) ? -1 : 1;
+ const int depth = (0 == label) ? cellDepth : labelId;
+ const std::string labelName = (0 == label) ?
+ ((sieveMesh->hasLabel("censored depth")) ?
+ "censored depth" : "depth") : label;
+ assert(!sieveMesh->getFactory().isNull());
+ const ALE::Obj<numbering_type>& cNumbering =
+ sieveMesh->getFactory()->getNumbering(sieveMesh, labelName, depth);
+ assert(!cNumbering.isNull());
+ const ALE::Obj<label_sequence>& cells =
+ sieveMesh->getLabelStratum(labelName, depth);
+ assert(!cells.isNull());
+ int numCornersLocal = 0;
+ if (cells->size() > 0)
+ numCornersLocal = sieveMesh->getNumCellCorners(*cells->begin());
+ int numCorners = numCornersLocal;
+ err = MPI_Allreduce(&numCornersLocal, &numCorners, 1, MPI_INT, MPI_MAX,
+ sieveMesh->comm()); CHECK_PETSC_ERROR(err);
+
+ const int ncells = cNumbering->getLocalSize();
+ const int conesSize = ncells*numCorners;
+ PetscScalar* tmpVertices = (conesSize > 0) ? new PetscScalar[conesSize] : 0;
+
+ const Obj<sieve_type>& sieve = sieveMesh->getSieve();
+ assert(!sieve.isNull());
+ ALE::ISieveVisitor::NConeRetriever<sieve_type>
+ ncV(*sieve, (size_t) pow((double) sieve->getMaxConeSize(),
+ std::max(0, sieveMesh->depth())));
+
+ int k = 0;
+ const typename label_sequence::const_iterator cellsEnd = cells->end();
+ for (typename label_sequence::iterator c_iter=cells->begin();
+ c_iter != cellsEnd;
+ ++c_iter)
+ if (cNumbering->isLocal(*c_iter)) {
+ ncV.clear();
+ ALE::ISieveTraversal<sieve_type>::orientedClosure(*sieve, *c_iter, ncV);
+ const typename ALE::ISieveVisitor::NConeRetriever<sieve_type>::oriented_point_type* cone =
+ ncV.getOrientedPoints();
+ const int coneSize = ncV.getOrientedSize();
+ for (int c=0; c < coneSize; ++c)
+ tmpVertices[k++] = vNumbering->getIndex(cone[c].first);
+ } // if
+
+ PetscVec elemVec;
+ err = VecCreateMPIWithArray(sieveMesh->comm(), conesSize, PETSC_DETERMINE,
+ tmpVertices, &elemVec); CHECK_PETSC_ERROR(err);
+ err = PetscObjectSetName((PetscObject) elemVec,
+ "cells");CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5PushGroup(_viewer, "/topology"); CHECK_PETSC_ERROR(err);
+ err = VecSetBlockSize(elemVec, numCorners); CHECK_PETSC_ERROR(err);
+ err = VecView(elemVec, _viewer); CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
+ err = VecDestroy(&elemVec); CHECK_PETSC_ERROR(err);
+ delete[] tmpVertices; tmpVertices = 0;
+ } catch (const std::exception& err) {
+ std::ostringstream msg;
+ msg << "Error while opening HDF5 file "
+ << _filename << ".\n" << err.what();
+ throw std::runtime_error(msg.str());
+ } catch (...) {
+ std::ostringstream msg;
+ msg << "Unknown error while opening HDF5 file "
+ << _filename << ".\n";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // openTimeStep
+
+// ----------------------------------------------------------------------
+// Close output files.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::close(void)
+{ // close
+ if (_viewer) {
+ PetscViewerDestroy(&_viewer);
+ } // if
+ _viewer = 0;
+ _timesteps.clear();
+
+#if 0
+ Xdmf metafile;
+ const int indexExt = _filename.find(".h5");
+ std::string xdmfFilename = std::string(_filename, 0, indexExt) + ".xmf";
+ metadata.write(xdmfFilename, _hdfFilename());
+#endif
+} // close
+
+// ----------------------------------------------------------------------
+// Write field over vertices to file.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::writeVertexField(
+ const double t,
+ field_type& field,
+ const mesh_type& mesh)
+{ // writeVertexField
+ typedef typename mesh_type::SieveMesh::numbering_type numbering_type;
+
+ assert(_viewer);
+
+ try {
+ const char* context = DataWriter<mesh_type, field_type>::_context.c_str();
+
+ const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh = mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+ ALE::Obj<numbering_type> vNumbering =
+ sieveMesh->hasLabel("censored depth") ?
+ sieveMesh->getFactory()->getNumbering(sieveMesh, "censored depth", 0) :
+ sieveMesh->getFactory()->getNumbering(sieveMesh, 0);
+ assert(!vNumbering.isNull());
+ field.createScatterWithBC(vNumbering, context);
+ field.scatterSectionToVector(context);
+ PetscVec vector = field.vector(context);
+ assert(vector);
+
+ const ALE::Obj<typename mesh_type::RealSection>& section = field.section();
+ assert(!section.isNull());
+ const std::string labelName =
+ (sieveMesh->hasLabel("censored depth")) ? "censored depth" : "depth";
+ assert(!sieveMesh->getLabelStratum(labelName, 0).isNull());
+ int fiberDimLocal =
+ (sieveMesh->getLabelStratum(labelName, 0)->size() > 0) ?
+ section->getFiberDimension(*sieveMesh->getLabelStratum(labelName, 0)->begin()) : 0;
+ int fiberDim = 0;
+ MPI_Allreduce(&fiberDimLocal, &fiberDim, 1, MPI_INT, MPI_MAX,
+ field.mesh().comm());
+ assert(fiberDim > 0);
+
+ PetscErrorCode err = 0;
+
+ if (_timesteps.find(field.label()) == _timesteps.end())
+ _timesteps[field.label()] = 0;
+ else
+ _timesteps[field.label()] += 1;
+ const int istep = _timesteps[field.label()];
+
+#if 0 // debugging
+ field.view("writeVertexField");
+ VecView(vector, PETSC_VIEWER_STDOUT_WORLD);
+#endif
+
+ // Set temporary block size that matches fiber dimension for output.
+ int blockSize = 0;
+ err = VecGetBlockSize(vector, &blockSize); CHECK_PETSC_ERROR(err);
+ err = VecSetBlockSize(vector, fiberDim); CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5PushGroup(_viewer, "/vertex_fields");
+ CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5SetTimestep(_viewer, istep); CHECK_PETSC_ERROR(err);
+ err = VecView(vector, _viewer); CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
+ err = VecSetBlockSize(vector, blockSize); CHECK_PETSC_ERROR(err);
+
+ } catch (const std::exception& err) {
+ std::ostringstream msg;
+ msg << "Error while writing field '" << field.label() << "' at time "
+ << t << " to HDF5 file '" << _filename << "'.\n" << err.what();
+ throw std::runtime_error(msg.str());
+ } catch (...) {
+ std::ostringstream msg;
+ msg << "Error while writing field '" << field.label() << "' at time "
+ << t << " to HDF5 file '" << _filename << "'.\n";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // writeVertexField
+
+// ----------------------------------------------------------------------
+// Write field over cells to file.
+template<typename mesh_type, typename field_type>
+void
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::writeCellField(
+ const double t,
+ field_type& field,
+ const char* label,
+ const int labelId)
+{ // writeCellField
+ typedef typename mesh_type::SieveMesh::numbering_type numbering_type;
+
+ assert(_viewer);
+
+ try {
+ const char* context = DataWriter<mesh_type, field_type>::_context.c_str();
+
+ const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
+ field.mesh().sieveMesh();
+ assert(!sieveMesh.isNull());
+ const int cellDepth = (sieveMesh->depth() == -1) ? -1 : 1;
+ const int depth = (0 == label) ? cellDepth : labelId;
+ const std::string labelName = (0 == label) ?
+ ((sieveMesh->hasLabel("censored depth")) ?
+ "censored depth" : "depth") : label;
+ assert(!sieveMesh->getFactory().isNull());
+ const ALE::Obj<typename mesh_type::SieveMesh::numbering_type>& numbering =
+ sieveMesh->getFactory()->getNumbering(sieveMesh, labelName, depth);
+ assert(!numbering.isNull());
+ field.createScatterWithBC(numbering, context);
+ field.scatterSectionToVector(context);
+ PetscVec vector = field.vector(context);
+ assert(vector);
+
+ const ALE::Obj<typename mesh_type::RealSection>& section = field.section();
+ assert(!section.isNull());
+ assert(!sieveMesh->getLabelStratum(labelName, depth).isNull());
+ int fiberDimLocal =
+ (sieveMesh->getLabelStratum(labelName, depth)->size() > 0) ?
+ section->getFiberDimension(*sieveMesh->getLabelStratum(labelName, depth)->begin()) : 0;
+ int fiberDim = 0;
+ MPI_Allreduce(&fiberDimLocal, &fiberDim, 1, MPI_INT, MPI_MAX,
+ field.mesh().comm());
+ assert(fiberDim > 0);
+
+ PetscErrorCode err = 0;
+
+ if (_timesteps.find(field.label()) == _timesteps.end())
+ _timesteps[field.label()] = 0;
+ else
+ _timesteps[field.label()] += 1;
+ const int istep = _timesteps[field.label()];
+
+ // Set temporary block size that matches fiber dimension for output.
+ int blockSize = 0;
+ err = VecGetBlockSize(vector, &blockSize); CHECK_PETSC_ERROR(err);
+ err = VecSetBlockSize(vector, fiberDim); CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5PushGroup(_viewer, "/cell_fields");
+ CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5SetTimestep(_viewer, istep); CHECK_PETSC_ERROR(err);
+ err = VecView(vector, _viewer); CHECK_PETSC_ERROR(err);
+ err = PetscViewerHDF5PopGroup(_viewer); CHECK_PETSC_ERROR(err);
+ err = VecSetBlockSize(vector, blockSize); CHECK_PETSC_ERROR(err);
+
+ } catch (const std::exception& err) {
+ std::ostringstream msg;
+ msg << "Error while writing field '" << field.label() << "' at time "
+ << t << " to HDF5 file '" << _filename << "'.\n" << err.what();
+ throw std::runtime_error(msg.str());
+ } catch (...) {
+ std::ostringstream msg;
+ msg << "Error while writing field '" << field.label() << "' at time "
+ << t << " to HDF5 file '" << _filename << "'.\n";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // writeCellField
+
+// ----------------------------------------------------------------------
+// Generate filename for HDF5 file.
+template<typename mesh_type, typename field_type>
+std::string
+pylith::meshio::DataWriterHDF5<mesh_type,field_type>::_hdf5Filename(void) const
+{ // _hdf5Filename
+ std::ostringstream filename;
+ const int indexExt = _filename.find(".h5");
+ const int numTimeSteps = DataWriter<mesh_type, field_type>::_numTimeSteps;
+ if (0 == numTimeSteps) {
+ filename << std::string(_filename, 0, indexExt) << "_info.h5";
+ } else {
+ filename << _filename;
+ } // if/else
+
+ return std::string(filename.str());
+} // _hdf5Filename
+
+
+// End of file
Deleted: short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc 2011-05-21 13:46:30 UTC (rev 18406)
@@ -1,879 +0,0 @@
-// -*- C++ -*-
-//
-// ======================================================================
-//
-// Brad T. Aagaard, U.S. Geological Survey
-// Charles A. Williams, GNS Science
-// Matthew G. Knepley, University of Chicago
-//
-// This code was developed as part of the Computational Infrastructure
-// for Geodynamics (http://geodynamics.org).
-//
-// Copyright (c) 2010 University of California, Davis
-//
-// See COPYING for license information.
-//
-// ======================================================================
-//
-
-#include "HDF5.hh" // implementation of class methods
-
-#include <cstring> // USES strlen()
-#include <stdexcept> // USES std::runtime_error
-#include <sstream> // USES std::ostringstream
-#include <cassert> // USES assert()
-
-#if H5_VERS_MAJOR == 1 && H5_VERS_MINOR >= 8
-#define PYLITH_HDF5_USE_API_18
-#endif
-
-// ----------------------------------------------------------------------
-// Default constructor.
-pylith::meshio::HDF5::HDF5(void) :
- _file(-1)
-{ // constructor
-} // constructor
-
-// ----------------------------------------------------------------------
-// Constructor with filename and mode.
-pylith::meshio::HDF5::HDF5(const char* filename,
- hid_t mode)
-{ // constructor
- if (H5F_ACC_TRUNC == mode) {
- _file = H5Fcreate(filename, mode, H5P_DEFAULT, H5P_DEFAULT);
- if (_file < 0) {
- std::ostringstream msg;
- msg << "Could not create HDF5 file '" << filename << "'.";
- throw std::runtime_error(msg.str());
- } // if
-
- } else {
- _file = H5Fopen(filename, mode, H5P_DEFAULT);
- if (_file < 0) {
- std::ostringstream msg;
- msg << "Could not open existing HDF5 file '" << filename << "'.";
- throw std::runtime_error(msg.str());
- } // if
- } // if/else
-} // constructor
-
-// ----------------------------------------------------------------------
-// Destructor
-pylith::meshio::HDF5::~HDF5(void)
-{ // destructor
- close();
-} // destructor
-
-// ----------------------------------------------------------------------
-// Open HDF5 file.
-void
-pylith::meshio::HDF5::open(const char* filename,
- hid_t mode)
-{ // open
- assert(filename);
-
- if (_file >= 0) {
- throw std::runtime_error("HDF5 file already open.");
- } // if
-
- if (H5F_ACC_TRUNC == mode) {
- _file = H5Fcreate(filename, mode, H5P_DEFAULT, H5P_DEFAULT);
- if (_file < 0) {
- std::ostringstream msg;
- msg << "Could not create HDF5 file '" << filename << "'.";
- throw std::runtime_error(msg.str());
- } // if
-
- } else {
- _file = H5Fopen(filename, mode, H5P_DEFAULT);
- if (_file < 0) {
- std::ostringstream msg;
- msg << "Could not open existing HDF5 file '" << filename << "'.";
- throw std::runtime_error(msg.str());
- } // if
- } // if/else
-} // constructor
-
-// ----------------------------------------------------------------------
-// Close HDF5 file.
-void
-pylith::meshio::HDF5::close(void)
-{ // close
- if (_file >= 0) {
- herr_t err = H5Fclose(_file);
- if (err < 0)
- throw std::runtime_error("Could not close HDF5 file.");
- } // if
- _file = -1;
-} // close
-
-// ----------------------------------------------------------------------
-// Check if HDF5 file is open.
-bool
-pylith::meshio::HDF5::isOpen(void) const
-{ // isOpen
- return (_file == -1) ? false : true;
-} // isOpen
-
-// ----------------------------------------------------------------------
-// Check if HDF5 file has group.
-bool
-pylith::meshio::HDF5::hasGroup(const char* name)
-{ // hasGroup
- assert(isOpen());
- assert(name);
-
- bool exists = false;
- if (H5Lexists(_file, name, H5P_DEFAULT)) {
- hid_t obj = H5Oopen(_file, name, H5P_DEFAULT);
- assert(obj >= 0);
- H5O_info_t info;
- herr_t err = H5Oget_info(obj, &info);
- assert(err >= 0);
- if (H5O_TYPE_GROUP == info.type)
- exists = true;
- err = H5Oclose(obj);
- assert(err >= 0);
- } // if
-
- return exists;
-} // hasGroup
-
-// ----------------------------------------------------------------------
-// Check if HDF5 file has dataset.
-bool
-pylith::meshio::HDF5::hasDataset(const char* name)
-{ // hasDataset
- assert(isOpen());
- assert(name);
-
- bool exists = false;
- if (H5Lexists(_file, name, H5P_DEFAULT)) {
- hid_t obj = H5Oopen(_file, name, H5P_DEFAULT);
- assert(obj >= 0);
- H5O_info_t info;
- herr_t err = H5Oget_info(obj, &info);
- assert(err >= 0);
- if (H5O_TYPE_DATASET == info.type)
- exists = true;
- err = H5Oclose(obj);
- assert(err >= 0);
- } // if
-
- return exists;
-} // hasDataset
-
-// ----------------------------------------------------------------------
-// Create group.
-void
-pylith::meshio::HDF5::createGroup(const char* name)
-{ // createGroup
- assert(name);
-
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t group = H5Gcreate2(_file, name, 0, H5P_DEFAULT, H5P_DEFAULT);
-#else // depracated HDF5 1.6 API
- hid_t group = H5Gcreate(_file, name, 0);
-#endif
- if (group < 0) {
- std::ostringstream msg;
- msg << "Could not create group '" << name << "'.";
- throw std::runtime_error(msg.str());
- } // if
-
- herr_t err = H5Gclose(group);
- if (err < 0) {
- std::ostringstream msg;
- msg << "Could not close group '" << name << "'.";
- throw std::runtime_error(msg.str());
- } // if
-} // createGroup
-
-// ----------------------------------------------------------------------
-// Write scalar attribute.
-void
-pylith::meshio::HDF5::writeAttribute(const char* parent,
- const char* name,
- const void* value,
- hid_t datatype)
-{ // writeAttribute
- assert(parent);
- assert(name);
- assert(value);
-
- try {
- hid_t dataspace = H5Screate(H5S_SCALAR);
- if (dataspace < 0)
- throw std::runtime_error("Could not create dataspace for");
-
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
-#else
- hid_t dataset = H5Dopen(_file, parent);
-#endif
- if (dataset < 0)
- throw std::runtime_error("Could not open parent dataset for");
-
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t attribute = H5Acreate2(dataset, name,
- datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
-#else
- hid_t attribute = H5Acreate(dataset, name,
- datatype, dataspace, H5P_DEFAULT);
-#endif
- if (attribute < 0)
- throw std::runtime_error("Could not create");
-
- hid_t err = H5Awrite(attribute, datatype, value);
- if (err < 0)
- throw std::runtime_error("Could not write");
-
- err = H5Aclose(attribute);
- if (err < 0)
- throw std::runtime_error("Could not close");
-
- err = H5Dclose(dataset);
- if (err < 0)
- throw std::runtime_error("Could not close dataset for");
-
- err = H5Sclose(dataspace);
- if (err < 0)
- throw std::runtime_error("Could not close dataspace for");
-
- } catch (std::exception& err) {
- std::ostringstream msg;
- msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // writeAttribute
-
-// ----------------------------------------------------------------------
-// Read scalar attribute.
-void
-pylith::meshio::HDF5::readAttribute(const char* parent,
- const char* name,
- void* value,
- hid_t datatype)
-{ // readAttribute
- assert(parent);
- assert(name);
- assert(value);
-
- try {
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
-#else
- hid_t dataset = H5Dopen(_file, parent);
-#endif
- if (dataset < 0)
- throw std::runtime_error("Could not open parent dataset for");
-
- hid_t attribute = H5Aopen(dataset, name, H5P_DEFAULT);
- if (attribute < 0)
- throw std::runtime_error("Could not open");
-
- hid_t dtype = H5Aget_type(attribute);
- if (dtype < 0)
- throw std::runtime_error("Could not get datatype of");
-
- if (H5Tequal(dtype, datatype) <= 0)
- throw std::runtime_error("Wrong datatype specified for");
-
- hid_t err = H5Aread(attribute, dtype, value);
- if (err < 0)
- throw std::runtime_error("Could not read");
-
- err = H5Tclose(dtype);
- if (err < 0)
- throw std::runtime_error("Could not close datatype for");
-
- err = H5Aclose(attribute);
- if (err < 0)
- throw std::runtime_error("Could not close");
-
- err = H5Dclose(dataset);
- if (err < 0)
- throw std::runtime_error("Could not close dataset for");
-
- } catch (std::exception& err) {
- std::ostringstream msg;
- msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // readAttribute
-
-// ----------------------------------------------------------------------
-// Write string attribute.
-void
-pylith::meshio::HDF5::writeAttribute(const char* parent,
- const char* name,
- const char* value)
-{ // writeAttribute
- assert(parent);
- assert(name);
- assert(value);
-
- try {
- hid_t dataspace = H5Screate(H5S_SCALAR);
- if (dataspace < 0)
- throw std::runtime_error("Could not create dataspace for");
-
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
-#else
- hid_t dataset = H5Dopen(_file, parent);
-#endif
- if (dataset < 0)
- throw std::runtime_error("Could not open parent dataset for");
-
- hid_t datatype = H5Tcopy(H5T_C_S1);
- if (datatype < 0)
- throw std::runtime_error("Could not create datatype for");
-
- herr_t err = H5Tset_size(datatype, strlen(value)+1);
- if (err < 0)
- throw std::runtime_error("Could not set size of");
-
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t attribute = H5Acreate2(dataset, name,
- datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
-#else
- hid_t attribute = H5Acreate(dataset, name,
- datatype, dataspace, H5P_DEFAULT);
-#endif
- if (attribute < 0)
- throw std::runtime_error("Could not create");
-
- err = H5Awrite(attribute, datatype, value);
- if (err < 0)
- throw std::runtime_error("Could not write");
-
- err = H5Aclose(attribute);
- if (err < 0)
- throw std::runtime_error("Could not close");
-
- err = H5Tclose(datatype);
- if (err < 0)
- throw std::runtime_error("Could not close datatype for");
-
- err = H5Dclose(dataset);
- if (err < 0)
- throw std::runtime_error("Could not close dataset for");
-
- err = H5Sclose(dataspace);
- if (err < 0)
- throw std::runtime_error("Could not close dataspace for");
-
- } catch (std::exception& err) {
- std::ostringstream msg;
- msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // writeAttribute
-
-// ----------------------------------------------------------------------
-// Read string attribute.
-std::string
-pylith::meshio::HDF5::readAttribute(const char* parent,
- const char* name)
-{ // readAttribute
- assert(parent);
- assert(name);
-
- std::string value;
-
- try {
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
-#else
- hid_t dataset = H5Dopen(_file, parent);
-#endif
- if (dataset < 0)
- throw std::runtime_error("Could not open parent dataset for");
-
- hid_t attribute = H5Aopen(dataset, name, H5P_DEFAULT);
- if (attribute < 0)
- throw std::runtime_error("Could not open");
-
- hid_t datatype = H5Aget_type(attribute);
- if (datatype < 0)
- throw std::runtime_error("Could not get datatype of");
-
- // :TODO: Check that datatype is a string
-
- const int len = H5Tget_size(datatype);
- if (len <= 0)
- throw std::runtime_error("Nonpositive size for datatype of");
-
- char* buffer = (len > 0) ? new char[len] : 0;
-
- hid_t err = H5Aread(attribute, datatype, (void*)buffer);
- value = buffer;
- delete[] buffer; buffer = 0;
-
- if (err < 0)
- throw std::runtime_error("Could not read");
-
- err = H5Tclose(datatype);
- if (err < 0)
- throw std::runtime_error("Could not close datatype for");
-
- err = H5Aclose(attribute);
- if (err < 0)
- throw std::runtime_error("Could not close");
-
- err = H5Dclose(dataset);
- if (err < 0)
- throw std::runtime_error("Could not close dataset for");
-
- } catch (std::exception& err) {
- std::ostringstream msg;
- msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
- throw std::runtime_error(msg.str());
- } // try/catch
-
- return std::string(value);
-} // readAttribute
-
-// ----------------------------------------------------------------------
-// Create dataset.
-void
-pylith::meshio::HDF5::createDataset(const char* parent,
- const char* name,
- const hsize_t* dims,
- const hsize_t* dimsChunk,
- const int ndims,
- hid_t datatype)
-{ // createDataset
- assert(parent);
- assert(name);
- assert(dims);
-
- try {
- // Open group
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
-#else
- hid_t group = H5Gopen(_file, parent);
-#endif
- if (group < 0)
- throw std::runtime_error("Could not open group.");
-
- // Create the dataspace
- hid_t dataspace = H5Screate_simple(ndims, dims, 0);
- if (dataspace < 0)
- throw std::runtime_error("Could not create dataspace.");
-
- // Create chunked dataset
- hid_t property = H5Pcreate(H5P_DATASET_CREATE);
- if (property < 0)
- throw std::runtime_error("Could not create property for dataset.");
-
- herr_t err = H5Pset_chunk(property, ndims, dimsChunk);
- if (err < 0)
- throw std::runtime_error("Could not set chunk.");
-
- // Set gzip compression level for chunk.
- H5Pset_deflate(property, 6);
-
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t dataset = H5Dcreate2(group, name,
- datatype, dataspace, H5P_DEFAULT,
- property, H5P_DEFAULT);
-#else
- hid_t dataset = H5Dcreate(group, name,
- datatype, dataspace, property);
-#endif
- if (dataset < 0)
- throw std::runtime_error("Could not create dataset.");
-
- err = H5Dclose(dataset);
- if (err < 0)
- throw std::runtime_error("Could not close dataset.");
-
- err = H5Pclose(property);
- if (err < 0)
- throw std::runtime_error("Could not close property.");
-
- err = H5Sclose(dataspace);
- if (err < 0)
- throw std::runtime_error("Could not close dataspace.");
-
- err = H5Gclose(group);
- if (err < 0)
- throw std::runtime_error("Could not close group.");
-
- } catch (const std::exception& err) {
- std::ostringstream msg;
- msg << "Error occurred while creating dataset '"
- << parent << "/" << name << "':\n"
- << err.what();
- throw std::runtime_error(msg.str());
- } catch (...) {
- std::ostringstream msg;
- msg << "Unknown occurred while creating dataset '" << name << "'.";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // createDataset
-
-// ----------------------------------------------------------------------
-// Append slice to dataset.
-void
-pylith::meshio::HDF5::writeDatasetChunk(const char* parent,
- const char* name,
- const void* data,
- const hsize_t* dims,
- const hsize_t* dimsChunk,
- const int ndims,
- const int chunk,
- hid_t datatype)
-{ // writeDatasetSlice
- assert(parent);
- assert(name);
- assert(data);
- assert(dims);
- assert(_file > 0);
-
- try {
- // Select hyperslab in file
- hsize_t* count = (ndims > 0) ? new hsize_t[ndims] : 0;
- hsize_t* stride = (ndims > 0) ? new hsize_t[ndims] : 0;
- hsize_t* offset = (ndims > 0) ? new hsize_t[ndims] : 0;
- for (int i=0; i < ndims; ++i) {
- count[i] = 1;
- stride[i] = 1;
- offset[i] = 0;
- } // for
- offset[0] = chunk;
-
- // Open group
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
-#else
- hid_t group = H5Gopen(_file, parent);
-#endif
- if (group < 0)
- throw std::runtime_error("Could not open group.");
-
- // Open the dataset
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t dataset = H5Dopen2(group, name, H5P_DEFAULT);
-#else
- hid_t dataset = H5Dopen(group, name);
-#endif
- if (dataset < 0)
- throw std::runtime_error("Could not open dataset.");
-
- hid_t dataspace = H5Dget_space(dataset);
- if (dataspace < 0)
- throw std::runtime_error("Could not get dataspace.");
-
- hid_t chunkspace = H5Screate_simple(ndims, dimsChunk, 0);
- if (chunkspace < 0)
- throw std::runtime_error("Could not create chunk dataspace.");
-
- herr_t err = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET,
- offset, stride, count, dimsChunk);
- delete[] count; count = 0;
- delete[] stride; stride = 0;
- delete[] offset; offset = 0;
- if (err < 0)
- throw std::runtime_error("Could not select hyperslab.");
-
- err = H5Dwrite(dataset, datatype, chunkspace, dataspace,
- H5P_DEFAULT, data);
- if (err < 0)
- throw std::runtime_error("Could not write data.");
-
- err = H5Sclose(chunkspace);
- if (err < 0)
- throw std::runtime_error("Could not close chunk dataspace.");
-
- err = H5Sclose(dataspace);
- if (err < 0)
- throw std::runtime_error("Could not close dataspace.");
-
- err = H5Dclose(dataset);
- if (err < 0)
- throw std::runtime_error("Could not close dataset.");
-
- err = H5Gclose(group);
- if (err < 0)
- throw std::runtime_error("Could not close group.");
-
- } catch (const std::exception& err) {
- std::ostringstream msg;
- msg << "Error occurred while writing dataset '"
- << parent << "/" << name << "':\n"
- << err.what();
- throw std::runtime_error(msg.str());
- } catch (...) {
- std::ostringstream msg;
- msg << "Unknown occurred while writing dataset '"
- << parent << "/" << name << "'.";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // writeDatasetSlice
-
-// ----------------------------------------------------------------------
-// Read dataset slice.
-void
-pylith::meshio::HDF5::readDatasetChunk(const char* parent,
- const char* name,
- char** const data,
- hsize_t** const dims,
- int* const ndims,
- const int chunk,
- hid_t datatype)
-{ // readDatasetSlice
- assert(parent);
- assert(name);
- assert(data);
- assert(dims);
- assert(_file > 0);
-
- try {
- // Open group
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
-#else
- hid_t group = H5Gopen(_file, parent);
-#endif
- if (group < 0)
- throw std::runtime_error("Could not open group.");
-
- // Open the dataset
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t dataset = H5Dopen2(group, name, H5P_DEFAULT);
-#else
- hid_t dataset = H5Dopen(group, name);
-#endif
- if (dataset < 0)
- throw std::runtime_error("Could not open dataset.");
-
- hid_t dataspace = H5Dget_space(dataset);
- if (dataspace < 0)
- throw std::runtime_error("Could not get dataspace.");
-
- *ndims = H5Sget_simple_extent_ndims(dataspace);
- assert(*ndims > 0);
- delete[] *dims; *dims = (*ndims > 0) ? new hsize_t[*ndims] : 0;
- H5Sget_simple_extent_dims(dataspace, *dims, 0);
-
- // Select hyperslab in file
- hsize_t* dimsChunk = (*ndims > 0) ? new hsize_t[*ndims] : 0;
- hsize_t* count = (*ndims > 0) ? new hsize_t[*ndims] : 0;
- hsize_t* stride = (*ndims > 0) ? new hsize_t[*ndims] : 0;
- hsize_t* offset = (*ndims > 0) ? new hsize_t[*ndims] : 0;
-
- for (int i=0; i < *ndims; ++i) {
- dimsChunk[i] = (*dims)[i];
- count[i] = 1;
- stride[i] = 1;
- offset[i] = 0;
- } // for
- dimsChunk[0] = 1;
- offset[0] = chunk;
-
- hid_t chunkspace = H5Screate_simple(*ndims, dimsChunk, 0);
- if (chunkspace < 0)
- throw std::runtime_error("Could not create chunk dataspace.");
-
- herr_t err = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET,
- offset, stride, count, dimsChunk);
- delete[] count; count = 0;
- delete[] stride; stride = 0;
- delete[] offset; offset = 0;
- if (err < 0)
- throw std::runtime_error("Could not select hyperslab.");
-
- int sizeBytes = H5Tget_size(datatype);
- for (int i=0; i < *ndims; ++i)
- sizeBytes *= (dimsChunk)[i];
- delete[] *data; *data = (sizeBytes > 0) ? new char[sizeBytes] : 0;
- delete[] dimsChunk; dimsChunk = 0;
-
- err = H5Dread(dataset, datatype, chunkspace, dataspace,
- H5P_DEFAULT, (void*)*data);
- if (err < 0)
- throw std::runtime_error("Could not read data.");
-
- err = H5Sclose(chunkspace);
- if (err < 0)
- throw std::runtime_error("Could not close chunk dataspace.");
-
- err = H5Sclose(dataspace);
- if (err < 0)
- throw std::runtime_error("Could not close dataspace.");
-
- err = H5Dclose(dataset);
- if (err < 0)
- throw std::runtime_error("Could not close dataset.");
-
- err = H5Gclose(group);
- if (err < 0)
- throw std::runtime_error("Could not close group.");
-
- } catch (const std::exception& err) {
- std::ostringstream msg;
- msg << "Error occurred while reading dataset '"
- << parent << "/" << name << "':\n"
- << err.what();
- throw std::runtime_error(msg.str());
- } catch (...) {
- std::ostringstream msg;
- msg << "Unknown occurred while reading dataset '"
- << parent << "/" << name << "'.";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // readDatasetSlice
-
-// ----------------------------------------------------------------------
-// Create dataset associated with data stored in a raw external binary
-// file.
-void
-pylith::meshio::HDF5::createDatasetRawExternal(const char* parent,
- const char* name,
- const char* filename,
- const hsize_t* dims,
- const int ndims,
- hid_t datatype)
-{ // createDatasetRawExternal
- assert(parent);
- assert(name);
- assert(filename);
- assert(dims);
-
- try {
- // Open group
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
-#else
- hid_t group = H5Gopen(_file, parent);
-#endif
- if (group < 0)
- throw std::runtime_error("Could not open group.");
-
- // Create the dataspace
- hsize_t* maxDims = (ndims > 0) ? new hsize_t[ndims] : 0;
- maxDims[0] = H5S_UNLIMITED;
- for (int i=1; i < ndims; ++i)
- maxDims[i] = dims[i];
- hid_t dataspace = H5Screate_simple(ndims, dims, maxDims);
- if (dataspace < 0)
- throw std::runtime_error("Could not create dataspace.");
-
- // Create property for external dataset
- hid_t property = H5Pcreate(H5P_DATASET_CREATE);
- if (property < 0)
- throw std::runtime_error("Could not create property for dataset.");
-
- // Set external file
- const off_t offset = 0;
- herr_t err = H5Pset_external(property, filename, offset, H5F_UNLIMITED);
- if (err < 0)
- throw std::runtime_error("Could not set external file property.");
-
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t dataset = H5Dcreate2(group, name,
- datatype, dataspace, H5P_DEFAULT,
- property, H5P_DEFAULT);
-#else
- hid_t dataset = H5Dcreate(group, name,
- datatype, dataspace, property);
-#endif
- if (dataset < 0)
- throw std::runtime_error("Could not create dataset.");
-
- err = H5Dclose(dataset);
- if (err < 0)
- throw std::runtime_error("Could not close dataset.");
-
- err = H5Pclose(property);
- if (err < 0)
- throw std::runtime_error("Could not close property.");
-
- err = H5Sclose(dataspace);
- if (err < 0)
- throw std::runtime_error("Could not close dataspace.");
-
- err = H5Gclose(group);
- if (err < 0)
- throw std::runtime_error("Could not close group.");
-
- } catch (const std::exception& err) {
- std::ostringstream msg;
- msg << "Error occurred while creating dataset '"
- << parent << "/" << name << "':\n"
- << err.what();
- throw std::runtime_error(msg.str());
- } catch (...) {
- std::ostringstream msg;
- msg << "Unknown occurred while creating dataset '" << name << "'.";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // createDatasetRawExternal
-
-// ----------------------------------------------------------------------
-// Create dataset associated with data stored in a raw external binary
-// file.
-void
-pylith::meshio::HDF5::extendDatasetRawExternal(const char* parent,
- const char* name,
- const hsize_t* dims,
- const int ndims)
-{ // extendDatasetRawExternal
- assert(parent);
- assert(name);
- assert(dims);
-
- try {
- // Open group
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
-#else
- hid_t group = H5Gopen(_file, parent);
-#endif
- if (group < 0)
- throw std::runtime_error("Could not open group.");
-
- // Open dataset.
-#if defined(PYLITH_HDF5_USE_API_18)
- hid_t dataset = H5Dopen2(group, name, H5P_DEFAULT);
-#else
- hid_t dataset = H5Dopen(group, name);
-#endif
- if (dataset < 0)
- throw std::runtime_error("Could not open dataset.");
-
-#if defined(PYLITH_HDF5_USE_API_18)
- herr_t err = H5Dset_extent(dataset, dims);
-#else
- herr_t err = H5Dextend(dataset, dims);
-#endif
- if (err < 0)
- throw std::runtime_error("Could not set dataset extent.");
-
- err = H5Dclose(dataset);
- if (err < 0)
- throw std::runtime_error("Could not close dataset.");
-
- err = H5Gclose(group);
- if (err < 0)
- throw std::runtime_error("Could not close group.");
-
- } catch (const std::exception& err) {
- std::ostringstream msg;
- msg << "Error occurred while updating dataset '"
- << parent << "/" << name << "':\n"
- << err.what();
- throw std::runtime_error(msg.str());
- } catch (...) {
- std::ostringstream msg;
- msg << "Unknown occurred while updating dataset '" << name << "'.";
- throw std::runtime_error(msg.str());
- } // try/catch
-} // extendDatasetRawExternal
-
-
-// End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc (from rev 18405, short/3D/PyLith/trunk/libsrc/meshio/HDF5.cc)
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc (rev 0)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.cc 2011-05-21 13:46:30 UTC (rev 18406)
@@ -0,0 +1,903 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+#include "HDF5.hh" // implementation of class methods
+
+#include <cstring> // USES strlen()
+#include <stdexcept> // USES std::runtime_error
+#include <sstream> // USES std::ostringstream
+#include <cassert> // USES assert()
+
+#if H5_VERS_MAJOR == 1 && H5_VERS_MINOR >= 8
+#define PYLITH_HDF5_USE_API_18
+#endif
+
+// ----------------------------------------------------------------------
+// Default constructor.
+pylith::meshio::HDF5::HDF5(void) :
+ _file(-1)
+{ // constructor
+} // constructor
+
+// ----------------------------------------------------------------------
+// Constructor with filename and mode.
+pylith::meshio::HDF5::HDF5(const char* filename,
+ hid_t mode)
+{ // constructor
+ if (H5F_ACC_TRUNC == mode) {
+ _file = H5Fcreate(filename, mode, H5P_DEFAULT, H5P_DEFAULT);
+ if (_file < 0) {
+ std::ostringstream msg;
+ msg << "Could not create HDF5 file '" << filename << "'.";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ } else {
+ _file = H5Fopen(filename, mode, H5P_DEFAULT);
+ if (_file < 0) {
+ std::ostringstream msg;
+ msg << "Could not open existing HDF5 file '" << filename << "'.";
+ throw std::runtime_error(msg.str());
+ } // if
+ } // if/else
+} // constructor
+
+// ----------------------------------------------------------------------
+// Destructor
+pylith::meshio::HDF5::~HDF5(void)
+{ // destructor
+ close();
+} // destructor
+
+// ----------------------------------------------------------------------
+// Open HDF5 file.
+void
+pylith::meshio::HDF5::open(const char* filename,
+ hid_t mode)
+{ // open
+ assert(filename);
+
+ if (_file >= 0) {
+ throw std::runtime_error("HDF5 file already open.");
+ } // if
+
+ if (H5F_ACC_TRUNC == mode) {
+ _file = H5Fcreate(filename, mode, H5P_DEFAULT, H5P_DEFAULT);
+ if (_file < 0) {
+ std::ostringstream msg;
+ msg << "Could not create HDF5 file '" << filename << "'.";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ } else {
+ _file = H5Fopen(filename, mode, H5P_DEFAULT);
+ if (_file < 0) {
+ std::ostringstream msg;
+ msg << "Could not open existing HDF5 file '" << filename << "'.";
+ throw std::runtime_error(msg.str());
+ } // if
+ } // if/else
+} // constructor
+
+// ----------------------------------------------------------------------
+// Close HDF5 file.
+void
+pylith::meshio::HDF5::close(void)
+{ // close
+ if (_file >= 0) {
+ herr_t err = H5Fclose(_file);
+ if (err < 0)
+ throw std::runtime_error("Could not close HDF5 file.");
+ } // if
+ _file = -1;
+} // close
+
+// ----------------------------------------------------------------------
+// Check if HDF5 file is open.
+bool
+pylith::meshio::HDF5::isOpen(void) const
+{ // isOpen
+ return (_file == -1) ? false : true;
+} // isOpen
+
+// ----------------------------------------------------------------------
+// Check if HDF5 file has group.
+bool
+pylith::meshio::HDF5::hasGroup(const char* name)
+{ // hasGroup
+ assert(isOpen());
+ assert(name);
+
+ bool exists = false;
+ if (H5Lexists(_file, name, H5P_DEFAULT)) {
+ hid_t obj = H5Oopen(_file, name, H5P_DEFAULT);
+ assert(obj >= 0);
+ H5O_info_t info;
+ herr_t err = H5Oget_info(obj, &info);
+ assert(err >= 0);
+ if (H5O_TYPE_GROUP == info.type)
+ exists = true;
+ err = H5Oclose(obj);
+ assert(err >= 0);
+ } // if
+
+ return exists;
+} // hasGroup
+
+// ----------------------------------------------------------------------
+// Check if HDF5 file has dataset.
+bool
+pylith::meshio::HDF5::hasDataset(const char* name)
+{ // hasDataset
+ assert(isOpen());
+ assert(name);
+
+ bool exists = false;
+ if (H5Lexists(_file, name, H5P_DEFAULT)) {
+ hid_t obj = H5Oopen(_file, name, H5P_DEFAULT);
+ assert(obj >= 0);
+ H5O_info_t info;
+ herr_t err = H5Oget_info(obj, &info);
+ assert(err >= 0);
+ if (H5O_TYPE_DATASET == info.type)
+ exists = true;
+ err = H5Oclose(obj);
+ assert(err >= 0);
+ } // if
+
+ return exists;
+} // hasDataset
+
+// ----------------------------------------------------------------------
+// Get topology metadata.
+void
+pylith::meshio::HDF5::getTopologyMetadata(int* numCells,
+ int* numCorners,
+ std::string* cellType)
+{ // getTopologyMetadata
+} // getTopologyMetadata
+
+// ----------------------------------------------------------------------
+// Get geometry metadata.
+void
+pylith::meshio::HDF5::getGeometryMetadata(int* numVertices,
+ int* spaceDim)
+{ // getGeometryMetadata
+} // getGeometryMetadata
+
+// ----------------------------------------------------------------------
+// Get metadata for fields.
+void
+pylith::meshio::HDF5::getFieldsMetadata(std::vector<FieldMetadata>* metadata)
+{ // getFieldsMetadata
+} // getFieldsMetadata
+
+// ----------------------------------------------------------------------
+// Create group.
+void
+pylith::meshio::HDF5::createGroup(const char* name)
+{ // createGroup
+ assert(name);
+
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t group = H5Gcreate2(_file, name, 0, H5P_DEFAULT, H5P_DEFAULT);
+#else // depracated HDF5 1.6 API
+ hid_t group = H5Gcreate(_file, name, 0);
+#endif
+ if (group < 0) {
+ std::ostringstream msg;
+ msg << "Could not create group '" << name << "'.";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ herr_t err = H5Gclose(group);
+ if (err < 0) {
+ std::ostringstream msg;
+ msg << "Could not close group '" << name << "'.";
+ throw std::runtime_error(msg.str());
+ } // if
+} // createGroup
+
+// ----------------------------------------------------------------------
+// Write scalar attribute.
+void
+pylith::meshio::HDF5::writeAttribute(const char* parent,
+ const char* name,
+ const void* value,
+ hid_t datatype)
+{ // writeAttribute
+ assert(parent);
+ assert(name);
+ assert(value);
+
+ try {
+ hid_t dataspace = H5Screate(H5S_SCALAR);
+ if (dataspace < 0)
+ throw std::runtime_error("Could not create dataspace for");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
+#else
+ hid_t dataset = H5Dopen(_file, parent);
+#endif
+ if (dataset < 0)
+ throw std::runtime_error("Could not open parent dataset for");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t attribute = H5Acreate2(dataset, name,
+ datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+#else
+ hid_t attribute = H5Acreate(dataset, name,
+ datatype, dataspace, H5P_DEFAULT);
+#endif
+ if (attribute < 0)
+ throw std::runtime_error("Could not create");
+
+ hid_t err = H5Awrite(attribute, datatype, value);
+ if (err < 0)
+ throw std::runtime_error("Could not write");
+
+ err = H5Aclose(attribute);
+ if (err < 0)
+ throw std::runtime_error("Could not close");
+
+ err = H5Dclose(dataset);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataset for");
+
+ err = H5Sclose(dataspace);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataspace for");
+
+ } catch (std::exception& err) {
+ std::ostringstream msg;
+ msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // writeAttribute
+
+// ----------------------------------------------------------------------
+// Read scalar attribute.
+void
+pylith::meshio::HDF5::readAttribute(const char* parent,
+ const char* name,
+ void* value,
+ hid_t datatype)
+{ // readAttribute
+ assert(parent);
+ assert(name);
+ assert(value);
+
+ try {
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
+#else
+ hid_t dataset = H5Dopen(_file, parent);
+#endif
+ if (dataset < 0)
+ throw std::runtime_error("Could not open parent dataset for");
+
+ hid_t attribute = H5Aopen(dataset, name, H5P_DEFAULT);
+ if (attribute < 0)
+ throw std::runtime_error("Could not open");
+
+ hid_t dtype = H5Aget_type(attribute);
+ if (dtype < 0)
+ throw std::runtime_error("Could not get datatype of");
+
+ if (H5Tequal(dtype, datatype) <= 0)
+ throw std::runtime_error("Wrong datatype specified for");
+
+ hid_t err = H5Aread(attribute, dtype, value);
+ if (err < 0)
+ throw std::runtime_error("Could not read");
+
+ err = H5Tclose(dtype);
+ if (err < 0)
+ throw std::runtime_error("Could not close datatype for");
+
+ err = H5Aclose(attribute);
+ if (err < 0)
+ throw std::runtime_error("Could not close");
+
+ err = H5Dclose(dataset);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataset for");
+
+ } catch (std::exception& err) {
+ std::ostringstream msg;
+ msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // readAttribute
+
+// ----------------------------------------------------------------------
+// Write string attribute.
+void
+pylith::meshio::HDF5::writeAttribute(const char* parent,
+ const char* name,
+ const char* value)
+{ // writeAttribute
+ assert(parent);
+ assert(name);
+ assert(value);
+
+ try {
+ hid_t dataspace = H5Screate(H5S_SCALAR);
+ if (dataspace < 0)
+ throw std::runtime_error("Could not create dataspace for");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
+#else
+ hid_t dataset = H5Dopen(_file, parent);
+#endif
+ if (dataset < 0)
+ throw std::runtime_error("Could not open parent dataset for");
+
+ hid_t datatype = H5Tcopy(H5T_C_S1);
+ if (datatype < 0)
+ throw std::runtime_error("Could not create datatype for");
+
+ herr_t err = H5Tset_size(datatype, strlen(value)+1);
+ if (err < 0)
+ throw std::runtime_error("Could not set size of");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t attribute = H5Acreate2(dataset, name,
+ datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+#else
+ hid_t attribute = H5Acreate(dataset, name,
+ datatype, dataspace, H5P_DEFAULT);
+#endif
+ if (attribute < 0)
+ throw std::runtime_error("Could not create");
+
+ err = H5Awrite(attribute, datatype, value);
+ if (err < 0)
+ throw std::runtime_error("Could not write");
+
+ err = H5Aclose(attribute);
+ if (err < 0)
+ throw std::runtime_error("Could not close");
+
+ err = H5Tclose(datatype);
+ if (err < 0)
+ throw std::runtime_error("Could not close datatype for");
+
+ err = H5Dclose(dataset);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataset for");
+
+ err = H5Sclose(dataspace);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataspace for");
+
+ } catch (std::exception& err) {
+ std::ostringstream msg;
+ msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // writeAttribute
+
+// ----------------------------------------------------------------------
+// Read string attribute.
+std::string
+pylith::meshio::HDF5::readAttribute(const char* parent,
+ const char* name)
+{ // readAttribute
+ assert(parent);
+ assert(name);
+
+ std::string value;
+
+ try {
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t dataset = H5Dopen2(_file, parent, H5P_DEFAULT);
+#else
+ hid_t dataset = H5Dopen(_file, parent);
+#endif
+ if (dataset < 0)
+ throw std::runtime_error("Could not open parent dataset for");
+
+ hid_t attribute = H5Aopen(dataset, name, H5P_DEFAULT);
+ if (attribute < 0)
+ throw std::runtime_error("Could not open");
+
+ hid_t datatype = H5Aget_type(attribute);
+ if (datatype < 0)
+ throw std::runtime_error("Could not get datatype of");
+
+ // :TODO: Check that datatype is a string
+
+ const int len = H5Tget_size(datatype);
+ if (len <= 0)
+ throw std::runtime_error("Nonpositive size for datatype of");
+
+ char* buffer = (len > 0) ? new char[len] : 0;
+
+ hid_t err = H5Aread(attribute, datatype, (void*)buffer);
+ value = buffer;
+ delete[] buffer; buffer = 0;
+
+ if (err < 0)
+ throw std::runtime_error("Could not read");
+
+ err = H5Tclose(datatype);
+ if (err < 0)
+ throw std::runtime_error("Could not close datatype for");
+
+ err = H5Aclose(attribute);
+ if (err < 0)
+ throw std::runtime_error("Could not close");
+
+ err = H5Dclose(dataset);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataset for");
+
+ } catch (std::exception& err) {
+ std::ostringstream msg;
+ msg << err.what() << " attribute '" << name << "' of '" << parent << "'.";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+
+ return std::string(value);
+} // readAttribute
+
+// ----------------------------------------------------------------------
+// Create dataset.
+void
+pylith::meshio::HDF5::createDataset(const char* parent,
+ const char* name,
+ const hsize_t* dims,
+ const hsize_t* dimsChunk,
+ const int ndims,
+ hid_t datatype)
+{ // createDataset
+ assert(parent);
+ assert(name);
+ assert(dims);
+
+ try {
+ // Open group
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
+#else
+ hid_t group = H5Gopen(_file, parent);
+#endif
+ if (group < 0)
+ throw std::runtime_error("Could not open group.");
+
+ // Create the dataspace
+ hid_t dataspace = H5Screate_simple(ndims, dims, 0);
+ if (dataspace < 0)
+ throw std::runtime_error("Could not create dataspace.");
+
+ // Create chunked dataset
+ hid_t property = H5Pcreate(H5P_DATASET_CREATE);
+ if (property < 0)
+ throw std::runtime_error("Could not create property for dataset.");
+
+ herr_t err = H5Pset_chunk(property, ndims, dimsChunk);
+ if (err < 0)
+ throw std::runtime_error("Could not set chunk.");
+
+ // Set gzip compression level for chunk.
+ H5Pset_deflate(property, 6);
+
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t dataset = H5Dcreate2(group, name,
+ datatype, dataspace, H5P_DEFAULT,
+ property, H5P_DEFAULT);
+#else
+ hid_t dataset = H5Dcreate(group, name,
+ datatype, dataspace, property);
+#endif
+ if (dataset < 0)
+ throw std::runtime_error("Could not create dataset.");
+
+ err = H5Dclose(dataset);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataset.");
+
+ err = H5Pclose(property);
+ if (err < 0)
+ throw std::runtime_error("Could not close property.");
+
+ err = H5Sclose(dataspace);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataspace.");
+
+ err = H5Gclose(group);
+ if (err < 0)
+ throw std::runtime_error("Could not close group.");
+
+ } catch (const std::exception& err) {
+ std::ostringstream msg;
+ msg << "Error occurred while creating dataset '"
+ << parent << "/" << name << "':\n"
+ << err.what();
+ throw std::runtime_error(msg.str());
+ } catch (...) {
+ std::ostringstream msg;
+ msg << "Unknown occurred while creating dataset '" << name << "'.";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // createDataset
+
+// ----------------------------------------------------------------------
+// Append slice to dataset.
+void
+pylith::meshio::HDF5::writeDatasetChunk(const char* parent,
+ const char* name,
+ const void* data,
+ const hsize_t* dims,
+ const hsize_t* dimsChunk,
+ const int ndims,
+ const int chunk,
+ hid_t datatype)
+{ // writeDatasetSlice
+ assert(parent);
+ assert(name);
+ assert(data);
+ assert(dims);
+ assert(_file > 0);
+
+ try {
+ // Select hyperslab in file
+ hsize_t* count = (ndims > 0) ? new hsize_t[ndims] : 0;
+ hsize_t* stride = (ndims > 0) ? new hsize_t[ndims] : 0;
+ hsize_t* offset = (ndims > 0) ? new hsize_t[ndims] : 0;
+ for (int i=0; i < ndims; ++i) {
+ count[i] = 1;
+ stride[i] = 1;
+ offset[i] = 0;
+ } // for
+ offset[0] = chunk;
+
+ // Open group
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
+#else
+ hid_t group = H5Gopen(_file, parent);
+#endif
+ if (group < 0)
+ throw std::runtime_error("Could not open group.");
+
+ // Open the dataset
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t dataset = H5Dopen2(group, name, H5P_DEFAULT);
+#else
+ hid_t dataset = H5Dopen(group, name);
+#endif
+ if (dataset < 0)
+ throw std::runtime_error("Could not open dataset.");
+
+ hid_t dataspace = H5Dget_space(dataset);
+ if (dataspace < 0)
+ throw std::runtime_error("Could not get dataspace.");
+
+ hid_t chunkspace = H5Screate_simple(ndims, dimsChunk, 0);
+ if (chunkspace < 0)
+ throw std::runtime_error("Could not create chunk dataspace.");
+
+ herr_t err = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET,
+ offset, stride, count, dimsChunk);
+ delete[] count; count = 0;
+ delete[] stride; stride = 0;
+ delete[] offset; offset = 0;
+ if (err < 0)
+ throw std::runtime_error("Could not select hyperslab.");
+
+ err = H5Dwrite(dataset, datatype, chunkspace, dataspace,
+ H5P_DEFAULT, data);
+ if (err < 0)
+ throw std::runtime_error("Could not write data.");
+
+ err = H5Sclose(chunkspace);
+ if (err < 0)
+ throw std::runtime_error("Could not close chunk dataspace.");
+
+ err = H5Sclose(dataspace);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataspace.");
+
+ err = H5Dclose(dataset);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataset.");
+
+ err = H5Gclose(group);
+ if (err < 0)
+ throw std::runtime_error("Could not close group.");
+
+ } catch (const std::exception& err) {
+ std::ostringstream msg;
+ msg << "Error occurred while writing dataset '"
+ << parent << "/" << name << "':\n"
+ << err.what();
+ throw std::runtime_error(msg.str());
+ } catch (...) {
+ std::ostringstream msg;
+ msg << "Unknown occurred while writing dataset '"
+ << parent << "/" << name << "'.";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // writeDatasetSlice
+
+// ----------------------------------------------------------------------
+// Read dataset slice.
+void
+pylith::meshio::HDF5::readDatasetChunk(const char* parent,
+ const char* name,
+ char** const data,
+ hsize_t** const dims,
+ int* const ndims,
+ const int chunk,
+ hid_t datatype)
+{ // readDatasetSlice
+ assert(parent);
+ assert(name);
+ assert(data);
+ assert(dims);
+ assert(_file > 0);
+
+ try {
+ // Open group
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
+#else
+ hid_t group = H5Gopen(_file, parent);
+#endif
+ if (group < 0)
+ throw std::runtime_error("Could not open group.");
+
+ // Open the dataset
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t dataset = H5Dopen2(group, name, H5P_DEFAULT);
+#else
+ hid_t dataset = H5Dopen(group, name);
+#endif
+ if (dataset < 0)
+ throw std::runtime_error("Could not open dataset.");
+
+ hid_t dataspace = H5Dget_space(dataset);
+ if (dataspace < 0)
+ throw std::runtime_error("Could not get dataspace.");
+
+ *ndims = H5Sget_simple_extent_ndims(dataspace);
+ assert(*ndims > 0);
+ delete[] *dims; *dims = (*ndims > 0) ? new hsize_t[*ndims] : 0;
+ H5Sget_simple_extent_dims(dataspace, *dims, 0);
+
+ // Select hyperslab in file
+ hsize_t* dimsChunk = (*ndims > 0) ? new hsize_t[*ndims] : 0;
+ hsize_t* count = (*ndims > 0) ? new hsize_t[*ndims] : 0;
+ hsize_t* stride = (*ndims > 0) ? new hsize_t[*ndims] : 0;
+ hsize_t* offset = (*ndims > 0) ? new hsize_t[*ndims] : 0;
+
+ for (int i=0; i < *ndims; ++i) {
+ dimsChunk[i] = (*dims)[i];
+ count[i] = 1;
+ stride[i] = 1;
+ offset[i] = 0;
+ } // for
+ dimsChunk[0] = 1;
+ offset[0] = chunk;
+
+ hid_t chunkspace = H5Screate_simple(*ndims, dimsChunk, 0);
+ if (chunkspace < 0)
+ throw std::runtime_error("Could not create chunk dataspace.");
+
+ herr_t err = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET,
+ offset, stride, count, dimsChunk);
+ delete[] count; count = 0;
+ delete[] stride; stride = 0;
+ delete[] offset; offset = 0;
+ if (err < 0)
+ throw std::runtime_error("Could not select hyperslab.");
+
+ int sizeBytes = H5Tget_size(datatype);
+ for (int i=0; i < *ndims; ++i)
+ sizeBytes *= (dimsChunk)[i];
+ delete[] *data; *data = (sizeBytes > 0) ? new char[sizeBytes] : 0;
+ delete[] dimsChunk; dimsChunk = 0;
+
+ err = H5Dread(dataset, datatype, chunkspace, dataspace,
+ H5P_DEFAULT, (void*)*data);
+ if (err < 0)
+ throw std::runtime_error("Could not read data.");
+
+ err = H5Sclose(chunkspace);
+ if (err < 0)
+ throw std::runtime_error("Could not close chunk dataspace.");
+
+ err = H5Sclose(dataspace);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataspace.");
+
+ err = H5Dclose(dataset);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataset.");
+
+ err = H5Gclose(group);
+ if (err < 0)
+ throw std::runtime_error("Could not close group.");
+
+ } catch (const std::exception& err) {
+ std::ostringstream msg;
+ msg << "Error occurred while reading dataset '"
+ << parent << "/" << name << "':\n"
+ << err.what();
+ throw std::runtime_error(msg.str());
+ } catch (...) {
+ std::ostringstream msg;
+ msg << "Unknown occurred while reading dataset '"
+ << parent << "/" << name << "'.";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // readDatasetSlice
+
+// ----------------------------------------------------------------------
+// Create dataset associated with data stored in a raw external binary
+// file.
+void
+pylith::meshio::HDF5::createDatasetRawExternal(const char* parent,
+ const char* name,
+ const char* filename,
+ const hsize_t* dims,
+ const int ndims,
+ hid_t datatype)
+{ // createDatasetRawExternal
+ assert(parent);
+ assert(name);
+ assert(filename);
+ assert(dims);
+
+ try {
+ // Open group
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
+#else
+ hid_t group = H5Gopen(_file, parent);
+#endif
+ if (group < 0)
+ throw std::runtime_error("Could not open group.");
+
+ // Create the dataspace
+ hsize_t* maxDims = (ndims > 0) ? new hsize_t[ndims] : 0;
+ maxDims[0] = H5S_UNLIMITED;
+ for (int i=1; i < ndims; ++i)
+ maxDims[i] = dims[i];
+ hid_t dataspace = H5Screate_simple(ndims, dims, maxDims);
+ if (dataspace < 0)
+ throw std::runtime_error("Could not create dataspace.");
+
+ // Create property for external dataset
+ hid_t property = H5Pcreate(H5P_DATASET_CREATE);
+ if (property < 0)
+ throw std::runtime_error("Could not create property for dataset.");
+
+ // Set external file
+ const off_t offset = 0;
+ herr_t err = H5Pset_external(property, filename, offset, H5F_UNLIMITED);
+ if (err < 0)
+ throw std::runtime_error("Could not set external file property.");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t dataset = H5Dcreate2(group, name,
+ datatype, dataspace, H5P_DEFAULT,
+ property, H5P_DEFAULT);
+#else
+ hid_t dataset = H5Dcreate(group, name,
+ datatype, dataspace, property);
+#endif
+ if (dataset < 0)
+ throw std::runtime_error("Could not create dataset.");
+
+ err = H5Dclose(dataset);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataset.");
+
+ err = H5Pclose(property);
+ if (err < 0)
+ throw std::runtime_error("Could not close property.");
+
+ err = H5Sclose(dataspace);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataspace.");
+
+ err = H5Gclose(group);
+ if (err < 0)
+ throw std::runtime_error("Could not close group.");
+
+ } catch (const std::exception& err) {
+ std::ostringstream msg;
+ msg << "Error occurred while creating dataset '"
+ << parent << "/" << name << "':\n"
+ << err.what();
+ throw std::runtime_error(msg.str());
+ } catch (...) {
+ std::ostringstream msg;
+ msg << "Unknown occurred while creating dataset '" << name << "'.";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // createDatasetRawExternal
+
+// ----------------------------------------------------------------------
+// Create dataset associated with data stored in a raw external binary
+// file.
+void
+pylith::meshio::HDF5::extendDatasetRawExternal(const char* parent,
+ const char* name,
+ const hsize_t* dims,
+ const int ndims)
+{ // extendDatasetRawExternal
+ assert(parent);
+ assert(name);
+ assert(dims);
+
+ try {
+ // Open group
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t group = H5Gopen2(_file, parent, H5P_DEFAULT);
+#else
+ hid_t group = H5Gopen(_file, parent);
+#endif
+ if (group < 0)
+ throw std::runtime_error("Could not open group.");
+
+ // Open dataset.
+#if defined(PYLITH_HDF5_USE_API_18)
+ hid_t dataset = H5Dopen2(group, name, H5P_DEFAULT);
+#else
+ hid_t dataset = H5Dopen(group, name);
+#endif
+ if (dataset < 0)
+ throw std::runtime_error("Could not open dataset.");
+
+#if defined(PYLITH_HDF5_USE_API_18)
+ herr_t err = H5Dset_extent(dataset, dims);
+#else
+ herr_t err = H5Dextend(dataset, dims);
+#endif
+ if (err < 0)
+ throw std::runtime_error("Could not set dataset extent.");
+
+ err = H5Dclose(dataset);
+ if (err < 0)
+ throw std::runtime_error("Could not close dataset.");
+
+ err = H5Gclose(group);
+ if (err < 0)
+ throw std::runtime_error("Could not close group.");
+
+ } catch (const std::exception& err) {
+ std::ostringstream msg;
+ msg << "Error occurred while updating dataset '"
+ << parent << "/" << name << "':\n"
+ << err.what();
+ throw std::runtime_error(msg.str());
+ } catch (...) {
+ std::ostringstream msg;
+ msg << "Unknown occurred while updating dataset '" << name << "'.";
+ throw std::runtime_error(msg.str());
+ } // try/catch
+} // extendDatasetRawExternal
+
+
+// End of file
Deleted: short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh 2011-05-21 13:46:30 UTC (rev 18406)
@@ -1,229 +0,0 @@
-// -*- C++ -*-
-//
-// ======================================================================
-//
-// Brad T. Aagaard, U.S. Geological Survey
-// Charles A. Williams, GNS Science
-// Matthew G. Knepley, University of Chicago
-//
-// This code was developed as part of the Computational Infrastructure
-// for Geodynamics (http://geodynamics.org).
-//
-// Copyright (c) 2010 University of California, Davis
-//
-// See COPYING for license information.
-//
-// ======================================================================
-//
-
-#if !defined(pylith_meshio_hdf5_hh)
-#define pylith_meshio_hdf5_hh
-
-// Include directives ---------------------------------------------------
-#include "meshiofwd.hh" // forward declarations
-
-#include <hdf5.h> // USES hid_t
-
-#include <string> // USES std::string
-
-// HDF5 -----------------------------------------------------------------
-/// High-level interface for HDF5 operations.
-class pylith::meshio::HDF5
-{ // HDF5
- friend class TestHDF5; // Unit testing
-
-// PUBLIC METHODS -------------------------------------------------------
-public :
-
- /// Default constructor.
- HDF5(void);
-
- /** Constructor with filename and mode.
- *
- * @param filename Name of HDF5 file
- * @param mode Mode for HDF5 file
- */
- HDF5(const char* filename,
- hid_t mode);
-
- /// Destructor
- ~HDF5(void);
-
- /** Open HDF5 file.
- *
- * @param filename Name of HDF5 file
- * @param mode Mode for HDF5 file
- */
- void open(const char* filename,
- hid_t mode);
-
- /// Close HDF5 file.
- void close(void);
-
- /** Check if HDF5 file is open.
- *
- * @returns True if HDF5 file is open, false otherwise.
- */
- bool isOpen(void) const;
-
- /** Check if HDF5 file has group.
- *
- * @param name Full name of group.
- * @returns True if group exists, false otherwise.
- */
- bool hasGroup(const char* name);
-
- /** Check if HDF5 file has dataset.
- *
- * @param name Full name of dataset.
- * @returns True if dataset exists, false otherwise.
- */
- bool hasDataset(const char* name);
-
- /** Create group.
- *
- * Create group and leave group open for further operations.
- *
- * @param name Name of group (with absolute path).
- */
- void createGroup(const char* name);
-
- /** Set scalar attribute.
- *
- * @param parent Full path of parent dataset for attribute.
- * @param name Name of attribute.
- * @param value Attribute value.
- * @param datatype Datatype of scalar.
- */
- void writeAttribute(const char* parent,
- const char* name,
- const void* value,
- hid_t datatype);
-
- /** Read scalar attribute.
- *
- * @param parent Full path of parent dataset for attribute.
- * @param name Name of attribute.
- * @param datatype Datatype of scalar.
- * @param value Attribute value.
- */
- void readAttribute(const char* parent,
- const char* name,
- void* value,
- hid_t datatype);
-
- /** Set string attribute.
- *
- * @param parent Full path of parent dataset for attribute.
- * @param name Name of attribute.
- * @param value String value
- */
- void writeAttribute(const char* parent,
- const char* name,
- const char* value);
-
- /** Read string attribute.
- *
- * @param parent Full path of parent dataset for attribute.
- * @param name Name of attribute.
- * @param value String value
- */
- std::string readAttribute(const char* parent,
- const char* name);
-
- /** Create dataset.
- *
- * @param parent Full path of parent group for dataset.
- * @param name Name of dataset.
- * @param dims Dimensions of data.
- * @param dimsChunk Dimensions of data chunks.
- * @param ndims Number of dimensions of data.
- * @param datatype Type of data.
- */
- void createDataset(const char* parent,
- const char* name,
- const hsize_t* dims,
- const hsize_t* dimsChunk,
- const int ndims,
- hid_t datatype);
-
- /** Append chunk to dataset.
- *
- * @param parent Full path of parent group for dataset.
- * @param name Name of dataset.
- * @param data Data.
- * @param dims Dimensions of data.
- * @param dimsChunk Dimensions of data chunks.
- * @param ndims Number of dimensions of data.
- * @param chunk Index of data chunk.
- * @param datatype Type of data.
- */
- void writeDatasetChunk(const char* parent,
- const char* name,
- const void* data,
- const hsize_t* dims,
- const hsize_t* dimsChunk,
- const int ndims,
- const int chunk,
- hid_t datatype);
-
- /** Read dataset chunk.
- *
- * Currently this method assumes the chunk size (slice along dim=0).
- *
- * @param parent Full path of parent group for dataset.
- * @param name Name of dataset.
- * @param data Data.
- * @param dims Dimensions of data.
- * @param ndims Number of dimensions of data.
- * @param islice Index of data slice.
- * @param datatype Type of data.
- */
- void readDatasetChunk(const char* parent,
- const char* name,
- char** const data,
- hsize_t** const dims,
- int* const ndims,
- const int chunk,
- hid_t datatype);
-
- /** Create dataset associated with data stored in a raw external
- * binary file.
- *
- * @param parent Full path of parent group for dataset.
- * @param name Name of dataset.
- * @param filename Name of external raw data file.
- * @param dims Dimensions of data.
- * @param ndims Number of dimensions of data.
- * @param datatype Type of data.
- */
- void createDatasetRawExternal(const char* parent,
- const char* name,
- const char* filename,
- const hsize_t* dims,
- const int ndims,
- hid_t datatype);
-
- /** Update the properties of a dataset associated with data stored
- * in a raw external binary file.
- *
- * @param parent Full path of parent group for dataset.
- * @param name Name of dataset.
- * @param dims Dimensions of data.
- * @param ndims Number of dimensions of data.
- */
- void extendDatasetRawExternal(const char* parent,
- const char* name,
- const hsize_t* dims,
- const int ndims);
-
-// PRIVATE MEMBERS ------------------------------------------------------
-private :
-
- hid_t _file; ///< HDF5 file
-
-}; // HDF5
-
-#endif // pylith_meshio_hdf5_hh
-
-// End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh (from rev 18405, short/3D/PyLith/trunk/libsrc/meshio/HDF5.hh)
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh (rev 0)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/HDF5.hh 2011-05-21 13:46:30 UTC (rev 18406)
@@ -0,0 +1,267 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+#if !defined(pylith_meshio_hdf5_hh)
+#define pylith_meshio_hdf5_hh
+
+// Include directives ---------------------------------------------------
+#include "meshiofwd.hh" // forward declarations
+
+#include <hdf5.h> // USES hid_t
+
+#include <vector> // USES std::vector
+#include <string> // USES std::string
+
+// HDF5 -----------------------------------------------------------------
+/// High-level interface for HDF5 operations.
+class pylith::meshio::HDF5
+{ // HDF5
+ friend class TestHDF5; // Unit testing
+
+// PUBLIC STRUCTS -------------------------------------------------------
+public :
+
+ /// Metadata associated with fields.
+ struct FieldMetadata {
+ std::string name; ///< Name of field.
+ std::string vectorFieldType; ///< Type of field.
+ std::string domain; ///< Domain over which field is given.
+ int numPoints; ///< Number of points in field.
+ int fiberDim; ///< Number of values in field at each point, time.
+ int numTimeSteps; ///< Number of time steps for field.
+ }; // FieldMetadata
+
+// PUBLIC METHODS -------------------------------------------------------
+public :
+
+ /// Default constructor.
+ HDF5(void);
+
+ /** Constructor with filename and mode.
+ *
+ * @param filename Name of HDF5 file
+ * @param mode Mode for HDF5 file
+ */
+ HDF5(const char* filename,
+ hid_t mode);
+
+ /// Destructor
+ ~HDF5(void);
+
+ /** Open HDF5 file.
+ *
+ * @param filename Name of HDF5 file
+ * @param mode Mode for HDF5 file
+ */
+ void open(const char* filename,
+ hid_t mode);
+
+ /// Close HDF5 file.
+ void close(void);
+
+ /** Check if HDF5 file is open.
+ *
+ * @returns True if HDF5 file is open, false otherwise.
+ */
+ bool isOpen(void) const;
+
+ /** Check if HDF5 file has group.
+ *
+ * @param name Full name of group.
+ * @returns True if group exists, false otherwise.
+ */
+ bool hasGroup(const char* name);
+
+ /** Check if HDF5 file has dataset.
+ *
+ * @param name Full name of dataset.
+ * @returns True if dataset exists, false otherwise.
+ */
+ bool hasDataset(const char* name);
+
+ /** Get topology metadata.
+ *
+ * @param numCells Number of cells [output]
+ * @param numCorners Number of corners [output]
+ * @param cellType Type of cell [output]
+ */
+ void getTopologyMetadata(int* numCells,
+ int* numCorners,
+ std::string* cellType);
+
+ /** Get geometry metadata.
+ *
+ * @param numVertices Number of vertices [output].
+ * @param spaceDim Spatial dimension [output].
+ */
+ void getGeometryMetadata(int* numVertices,
+ int* spaceDim);
+
+ /** Get metadata for fields.
+ *
+ * @param metadata Array of metadata for fields.
+ */
+ void getFieldsMetadata(std::vector<FieldMetadata>* metadata);
+
+ /** Create group.
+ *
+ * Create group and leave group open for further operations.
+ *
+ * @param name Name of group (with absolute path).
+ */
+ void createGroup(const char* name);
+
+ /** Set scalar attribute.
+ *
+ * @param parent Full path of parent dataset for attribute.
+ * @param name Name of attribute.
+ * @param value Attribute value.
+ * @param datatype Datatype of scalar.
+ */
+ void writeAttribute(const char* parent,
+ const char* name,
+ const void* value,
+ hid_t datatype);
+
+ /** Read scalar attribute.
+ *
+ * @param parent Full path of parent dataset for attribute.
+ * @param name Name of attribute.
+ * @param datatype Datatype of scalar.
+ * @param value Attribute value.
+ */
+ void readAttribute(const char* parent,
+ const char* name,
+ void* value,
+ hid_t datatype);
+
+ /** Set string attribute.
+ *
+ * @param parent Full path of parent dataset for attribute.
+ * @param name Name of attribute.
+ * @param value String value
+ */
+ void writeAttribute(const char* parent,
+ const char* name,
+ const char* value);
+
+ /** Read string attribute.
+ *
+ * @param parent Full path of parent dataset for attribute.
+ * @param name Name of attribute.
+ * @param value String value
+ */
+ std::string readAttribute(const char* parent,
+ const char* name);
+
+ /** Create dataset.
+ *
+ * @param parent Full path of parent group for dataset.
+ * @param name Name of dataset.
+ * @param dims Dimensions of data.
+ * @param dimsChunk Dimensions of data chunks.
+ * @param ndims Number of dimensions of data.
+ * @param datatype Type of data.
+ */
+ void createDataset(const char* parent,
+ const char* name,
+ const hsize_t* dims,
+ const hsize_t* dimsChunk,
+ const int ndims,
+ hid_t datatype);
+
+ /** Append chunk to dataset.
+ *
+ * @param parent Full path of parent group for dataset.
+ * @param name Name of dataset.
+ * @param data Data.
+ * @param dims Dimensions of data.
+ * @param dimsChunk Dimensions of data chunks.
+ * @param ndims Number of dimensions of data.
+ * @param chunk Index of data chunk.
+ * @param datatype Type of data.
+ */
+ void writeDatasetChunk(const char* parent,
+ const char* name,
+ const void* data,
+ const hsize_t* dims,
+ const hsize_t* dimsChunk,
+ const int ndims,
+ const int chunk,
+ hid_t datatype);
+
+ /** Read dataset chunk.
+ *
+ * Currently this method assumes the chunk size (slice along dim=0).
+ *
+ * @param parent Full path of parent group for dataset.
+ * @param name Name of dataset.
+ * @param data Data.
+ * @param dims Dimensions of data.
+ * @param ndims Number of dimensions of data.
+ * @param islice Index of data slice.
+ * @param datatype Type of data.
+ */
+ void readDatasetChunk(const char* parent,
+ const char* name,
+ char** const data,
+ hsize_t** const dims,
+ int* const ndims,
+ const int chunk,
+ hid_t datatype);
+
+ /** Create dataset associated with data stored in a raw external
+ * binary file.
+ *
+ * @param parent Full path of parent group for dataset.
+ * @param name Name of dataset.
+ * @param filename Name of external raw data file.
+ * @param dims Dimensions of data.
+ * @param ndims Number of dimensions of data.
+ * @param datatype Type of data.
+ */
+ void createDatasetRawExternal(const char* parent,
+ const char* name,
+ const char* filename,
+ const hsize_t* dims,
+ const int ndims,
+ hid_t datatype);
+
+ /** Update the properties of a dataset associated with data stored
+ * in a raw external binary file.
+ *
+ * @param parent Full path of parent group for dataset.
+ * @param name Name of dataset.
+ * @param dims Dimensions of data.
+ * @param ndims Number of dimensions of data.
+ */
+ void extendDatasetRawExternal(const char* parent,
+ const char* name,
+ const hsize_t* dims,
+ const int ndims);
+
+// PRIVATE MEMBERS ------------------------------------------------------
+private :
+
+ hid_t _file; ///< HDF5 file
+
+}; // HDF5
+
+#endif // pylith_meshio_hdf5_hh
+
+// End of file
Modified: short/3D/PyLith/trunk/libsrc/pylith/meshio/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/Makefile.am 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -77,10 +77,5 @@
PsetFileAscii.icc \
PsetFileBinary.hh
-# export
-clean-local: clean-subpkgincludeHEADERS
-BUILT_SOURCES = export-subpkgincludeHEADERS
-CLEANFILES = export-subpkgincludeHEADERS
-
# End of file
Deleted: short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/Xdmf.cc 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc 2011-05-21 13:46:30 UTC (rev 18406)
@@ -1,290 +0,0 @@
-// -*- C++ -*-
-//
-// ======================================================================
-//
-// Brad T. Aagaard, U.S. Geological Survey
-// Charles A. Williams, GNS Science
-// Matthew G. Knepley, University of Chicago
-//
-// This code was developed as part of the Computational Infrastructure
-// for Geodynamics (http://geodynamics.org).
-//
-// Copyright (c) 2010 University of California, Davis
-//
-// See COPYING for license information.
-//
-// ======================================================================
-//
-
-#include "Xdmf.hh" // implementation of class methods
-
-#include "pylith/utils/array.hh" // USES int_array, string_vector
-
-#include <string> // USES std::string
-#include <stdexcept> // USES std::runtime_error
-#include <iostream> // USES std::cout
-#include <sstream> // USES std::ostringstream
-#include <cassert> // USES assert()
-
-// ----------------------------------------------------------------------
-// Default constructor.
-pylith::meshio::Xdmf::Xdmf(void)
-{ // constructor
-} // constructor
-
-// ----------------------------------------------------------------------
-// Destructor
-pylith::meshio::Xdmf::~Xdmf(void)
-{ // destructor
-} // destructor
-
-// ----------------------------------------------------------------------
-// Write Xdmf file associated with HDF5 file.
-void
-pylith::meshio::Xdmf::write(const char* filenameXdfm,
- const char* filenameHDF5)
-{ // write
- std::string cellType = 0;
- int numCells = 0;
- int numCorners = 0;
- int numVertices = 0;
- int spaceDim = 0;
- int numTimeSteps = 0;
- double* timeStamps = 0;
- int numFields = 0;
- string_vector fieldName;
- string_vector fieldVectorFieldType;
- string_vector fieldType;
- int_array fieldNumPoints;
- int_array fieldFiberDim;
-
- if (spaceDim == 1) {
- std::cout
- << "WARNING: Xdmf grids not defined for 1-D domains.\n"
- << "Skipping creation of Xdmf file associated with HDF5 file '"
- << filenameHDF5 << "'" << std::endl;
- return;
- } // if
-
- _file
- << "<?xml version=\"1.0\" ?>\n"
- << "<!DOCTYPE Xdmf SYSTEM \"Xdmf.dtd\" [\n"
- << "<!ENTITY HeavyData \"" << filenameHDF5 << "\">\n"
- << "]>\n"
- << "\n"
- << "<Xdmf>\n"
- << " <Domain Name=\"domain\">\n";
-
- _writeDomainCells(numCells, numCorners);
- _writeDomainVertices(numVertices, spaceDim);
-
- _file
- << "<!-- ============================================================ -->\n";
-
- _writeTimeStamps(timeStamps, numTimeSteps);
-
- _file
- << "<Grid Name=\"domain\" GridType=\"Uniform\">\n";
- for (int iTimeStep=0; iTimeStep < numTimeSteps; ++iTimeStep) {
- _writeGridTopology(cellType.c_str(), numCells);
- _writeGridGeometry(spaceDim);
- for (int iField=0; iField < numFields; ++iField) {
- if (2 == spaceDim &&
- std::string("vector") == fieldVectorFieldType[iField]) {
- for (int component=0; component < spaceDim; ++component)
- _writeGridAttributeComponent(fieldName[iField].c_str(),
- fieldVectorFieldType[iField].c_str(),
- fieldType[iField].c_str(),
- numTimeSteps, fieldNumPoints[iField],
- fieldFiberDim[iField],
- iTimeStep, component);
- } else {
- _writeGridAttribute(fieldName[iField].c_str(),
- fieldVectorFieldType[iField].c_str(),
- fieldType[iField].c_str(), numTimeSteps,
- fieldNumPoints[iField], fieldFiberDim[iField],
- iTimeStep);
- } // if/else
- } // for
- } // for
-
- _file
- << " </Grid>\n"
- << " </Grid>\n"
- << " </Domain>\n"
- << "</Xdmf>\n";
- _file.close();
-} // write
-
-// ----------------------------------------------------------------------
-// Write domain cell information.
-void
-pylith::meshio::Xdmf::_writeDomainCells(const int numCells,
- const int numCorners)
-{ // _writeDomainCells
- _file
- << " <DataItem Name=\"cells\"\n"
- << " ItemType=\"Uniform\"\n"
- << " Format=\"HDF\"\n"
- << " NumberType=\"Float\" Precision=\"8\"\n"
- << " Dimensions=\"" << numCells << " " << numCorners << "\">\n"
- << " &HeavyData;:/topology/cells\n"
- << " </DataItem>\n";
-
-} // _writeDomainCells
-
-// ----------------------------------------------------------------------
-// Write domain vertices information.
-void
-pylith::meshio::Xdmf::_writeDomainVertices(const int numVertices,
- const int spaceDim)
-{ // _writeDomainVertices
- _file
- << " <DataItem Name=\"vertices\"\n"
- << " Format=\"HDF\"\n"
- << " Dimensions=\"" << numVertices << " " << spaceDim << "\">\n"
- << " &HeavyData;:/geometry/vertices\n"
- << " </DataItem>\n";
-} // _writeDomainVertices
-
-// ----------------------------------------------------------------------
-// Write time stamps.
-void
-pylith::meshio::Xdmf::_writeTimeStamps(const double* timeStamps,
- const int numTimeSteps)
-{ // _writeTimeStamps
- if (numTimeSteps > 0) {
- assert(timeStamps);
- } // if
-} // _writeTimeStamps
-
-// ----------------------------------------------------------------------
-// Write grid topology information.
-void
-pylith::meshio::Xdmf::_writeGridTopology(const char* cellType,
- const int numCells)
-{ // _writeGridTopology
- _file
- << " <Topology\n"
- << " TopologyType=\"Triangle\"\n"
- << " NumberOfElements=\"" << numCells << "\">\n"
- << " <DataItem Reference=\"XML\">\n"
- << " /Xdmf/Domain/DataItem[@Name=\"cells\"]\n"
- << " </DataItem>\n"
- << " </Topology>\n";
-} // _writeGridTopology
-
-// ----------------------------------------------------------------------
-// Write Grid geometry.
-void
-pylith::meshio::Xdmf::_writeGridGeometry(const int spaceDim)
-{ // _writeGridGeometry
- assert(2 == spaceDim || 3 == spaceDim);
-
- const char* geomType = (spaceDim == 3) ? "XYZ" : "XY";
-
- _file
- << " <Geometry GeometryType=\"" << geomType << "\">\n"
- << " <DataItem Reference=\"XML\">\n"
- << " /Xdmf/Domain/DataItem[@Name=\"vertices\"]\n"
- << " </DataItem>\n"
- << " </Geometry>\n";
-} // _writeGridGeometry
-
-// ----------------------------------------------------------------------
-// Write grid attribute.
-void
-pylith::meshio::Xdmf::_writeGridAttributeComponent(const char* name,
- const char* vectorFieldType,
- const char* center,
- const int numTimeSteps,
- const int numPoints,
- const int fiberDim,
- const int iTime,
- const int component)
-{ // _writeGridAttribute
- std::string componentName = "unknown";
- switch (component) {
- case 0:
- componentName = std::string("x-") + std::string(name);
- break;
- case 1:
- componentName = std::string("y-") + std::string(name);
- break;
- case 2:
- componentName = std::string("z-") + std::string(name);
- break;
- default:
- { // default
- std::ostringstream msg;
- msg << "Unknown component " << component << " while writing Xdmf file.";
- std::cerr << msg.str() << std::endl;
- assert(0);
- throw std::logic_error(msg.str());
- } // default
- } // switch
- _file
- << " <Attribute\n"
- << " Name=\"" << componentName << "\"\n"
- << " Type=\""<< vectorFieldType << "\"\n"
- << " Center=\"" << center << "\">\n"
- << " <DataItem ItemType=\"HyperSlab\"\n"
- << " Dimensions=\"1 " << numPoints << " 1\"\n"
- << " Type=\"HyperSlab\">\n"
- << " <DataItem\n"
- << " Dimensions=\"3 3\"\n"
- << " Format=\"XML\">\n"
- << " 0 0 "<< component << "\n"
- << " 1 1 1\n"
- << " 1 " << numPoints << " 1\n"
- << " </DataItem>\n"
- << " <DataItem\n"
- << " DataType=\"Float\" Precision=\"8\"\n"
- << " Dimensions=\""
- << numTimeSteps << " " << numPoints << " " << fiberDim << "\"\n"
- << " Format=\"HDF\">\n"
- << " &HeavyData;:/vertex_fields/" << name << "\n"
- << " </DataItem>\n"
- << " </DataItem>\n"
- << " </Attribute>\n";
-} // _writeGridAttribute
-
-// ----------------------------------------------------------------------
-// Write grid attribute.
-void
-pylith::meshio::Xdmf::_writeGridAttribute(const char* name,
- const char* vectorFieldType,
- const char* center,
- const int numTimeSteps,
- const int numPoints,
- const int fiberDim,
- const int iTime)
-{ // _writeGridAttribute
- _file
- << " <Attribute\n"
- << " Name=\"" << name << "\"\n"
- << " Type=\"" << vectorFieldType << "\"\n"
- << " Center=\"" << center << "\">\n"
- << " <DataItem ItemType=\"HyperSlab\"\n"
- << " Dimensions=\"1 " << numPoints << " 1\"\n"
- << " Type=\"HyperSlab\">\n"
- << " <DataItem\n"
- << " Dimensions=\"3 3\"\n"
- << " Format=\"XML\">\n"
- << " 0 0 0\n"
- << " 1 1 1\n"
- << " 1 " << numPoints << " 1\n"
- << " </DataItem>\n"
- << " <DataItem\n"
- << " DataType=\"Float\" Precision=\"8\"\n"
- << " Dimensions=\""
- << numTimeSteps << " " << numPoints << " " << fiberDim << "\"\n"
- << " Format=\"HDF\">\n"
- << " &HeavyData;:/vertex_fields/" << name << "\n"
- << " </DataItem>\n"
- << " </DataItem>\n"
- << " </Attribute>\n";
-} // _writeGridAttribute
-
-
-// End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc (from rev 18405, short/3D/PyLith/trunk/libsrc/meshio/Xdmf.cc)
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc (rev 0)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.cc 2011-05-21 13:46:30 UTC (rev 18406)
@@ -0,0 +1,339 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+#include "Xdmf.hh" // implementation of class methods
+
+#include "pylith/utils/array.hh" // USES int_array, string_vector
+
+#include <string> // USES std::string
+#include <stdexcept> // USES std::runtime_error
+#include <iostream> // USES std::cout
+#include <sstream> // USES std::ostringstream
+#include <cassert> // USES assert()
+
+// ----------------------------------------------------------------------
+// Default constructor.
+pylith::meshio::Xdmf::Xdmf(void)
+{ // constructor
+} // constructor
+
+// ----------------------------------------------------------------------
+// Destructor
+pylith::meshio::Xdmf::~Xdmf(void)
+{ // destructor
+} // destructor
+
+// ----------------------------------------------------------------------
+// Write Xdmf file associated with HDF5 file.
+void
+pylith::meshio::Xdmf::write(const char* filenameXdmf,
+ const char* filenameHDF5)
+{ // write
+ assert(filenameXdmf);
+ assert(filenameHDF5);
+
+ int numCells = 0;
+ int numCorners = 0;
+ std::string cellType;
+ int numVertices = 0;
+ int spaceDim = 0;
+ int numTimeSteps = 0;
+ double* timeStamps = 0;
+ std::vector<HDF5::FieldMetadata> fieldsMetadata;
+
+ HDF5 h5(filenameHDF5, H5F_ACC_RDONLY);
+ h5.getTopologyMetadata(&numCells, &numCorners, &cellType);
+ h5.getGeometryMetadata(&numVertices, &spaceDim);
+ h5.getFieldsMetadata(&fieldsMetadata);
+ const int numFields = fieldsMetadata.size();
+ for (int iField=0; iField < numFields; ++iField)
+ if ("vertex_field" == fieldsMetadata[iField].domain) {
+ fieldsMetadata[iField].domain = "Node";
+ } else if ("cell_field" == fieldsMetadata[iField].domain) {
+ fieldsMetadata[iField].domain = "Cell";
+ } else {
+ std::ostringstream msg;
+ msg << "Unknown field type '" << fieldsMetadata[iField].domain
+ << "' for field '"
+ << fieldsMetadata[iField].domain << "'" << std::endl;
+ throw std::runtime_error(msg.str());
+ } // if/else
+
+ if (1 == spaceDim) {
+ std::cout
+ << "WARNING: Xdmf grids not defined for 1-D domains.\n"
+ << "Skipping creation of Xdmf file associated with HDF5 file '"
+ << filenameHDF5 << "'" << std::endl;
+ return;
+ } // if
+
+ _file.open(filenameXdmf);
+ if (!_file.is_open() || !_file.good()) {
+ std::ostringstream msg;
+ msg << "Could not open Xdmf file '" << filenameXdmf
+ << "' for writing metadata forHDF5 file '"
+ << filenameHDF5 << "'.\n";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ _file
+ << "<?xml version=\"1.0\" ?>\n"
+ << "<!DOCTYPE Xdmf SYSTEM \"Xdmf.dtd\" [\n"
+ << "<!ENTITY HeavyData \"" << filenameHDF5 << "\">\n"
+ << "]>\n"
+ << "\n"
+ << "<Xdmf>\n"
+ << " <Domain Name=\"domain\">\n";
+
+ _writeDomainCells(numCells, numCorners);
+ _writeDomainVertices(numVertices, spaceDim);
+
+ _file
+ << "<!-- ============================================================ -->\n";
+
+ _writeTimeStamps(timeStamps, numTimeSteps);
+
+ _file
+ << " <Grid Name=\"domain\" GridType=\"Uniform\">\n";
+ for (int iTimeStep=0; iTimeStep < numTimeSteps; ++iTimeStep) {
+ _writeGridTopology(cellType.c_str(), numCells);
+ _writeGridGeometry(spaceDim);
+ for (int iField=0; iField < numFields; ++iField) {
+ if (2 == spaceDim &&
+ std::string("vector") == fieldsMetadata[iField].vectorFieldType) {
+ for (int component=0; component < spaceDim; ++component)
+ _writeGridAttributeComponent(fieldsMetadata[iField],
+ iTimeStep, component);
+ } else {
+ _writeGridAttribute(fieldsMetadata[iField],
+ iTimeStep);
+ } // if/else
+ } // for
+ _file << " </Grid>\n";
+ } // for
+
+ _file
+ << " </Grid>\n"
+ << " </Domain>\n"
+ << "</Xdmf>\n";
+ _file.close();
+} // write
+
+// ----------------------------------------------------------------------
+// Write domain cell information.
+void
+pylith::meshio::Xdmf::_writeDomainCells(const int numCells,
+ const int numCorners)
+{ // _writeDomainCells
+ assert(_file.is_open() && _file.good());
+ _file
+ << " <DataItem Name=\"cells\"\n"
+ << " ItemType=\"Uniform\"\n"
+ << " Format=\"HDF\"\n"
+ << " NumberType=\"Float\" Precision=\"8\"\n"
+ << " Dimensions=\"" << numCells << " " << numCorners << "\">\n"
+ << " &HeavyData;:/topology/cells\n"
+ << " </DataItem>\n";
+
+} // _writeDomainCells
+
+// ----------------------------------------------------------------------
+// Write domain vertices information.
+void
+pylith::meshio::Xdmf::_writeDomainVertices(const int numVertices,
+ const int spaceDim)
+{ // _writeDomainVertices
+ assert(_file.is_open() && _file.good());
+
+ _file
+ << " <DataItem Name=\"vertices\"\n"
+ << " Format=\"HDF\"\n"
+ << " Dimensions=\"" << numVertices << " " << spaceDim << "\">\n"
+ << " &HeavyData;:/geometry/vertices\n"
+ << " </DataItem>\n";
+} // _writeDomainVertices
+
+// ----------------------------------------------------------------------
+// Write time stamps.
+void
+pylith::meshio::Xdmf::_writeTimeStamps(const double* timeStamps,
+ const int numTimeSteps)
+{ // _writeTimeStamps
+ assert(_file.is_open() && _file.good());
+
+ if (numTimeSteps > 0) {
+ assert(timeStamps);
+ } // if
+} // _writeTimeStamps
+
+// ----------------------------------------------------------------------
+// Write grid topology information.
+void
+pylith::meshio::Xdmf::_writeGridTopology(const char* cellType,
+ const int numCells)
+{ // _writeGridTopology
+ assert(_file.is_open() && _file.good());
+
+ _file
+ << " <Topology\n"
+ << " TopologyType=\"Triangle\"\n"
+ << " NumberOfElements=\"" << numCells << "\">\n"
+ << " <DataItem Reference=\"XML\">\n"
+ << " /Xdmf/Domain/DataItem[@Name=\"cells\"]\n"
+ << " </DataItem>\n"
+ << " </Topology>\n";
+} // _writeGridTopology
+
+// ----------------------------------------------------------------------
+// Write Grid geometry.
+void
+pylith::meshio::Xdmf::_writeGridGeometry(const int spaceDim)
+{ // _writeGridGeometry
+ assert(_file.is_open() && _file.good());
+ assert(2 == spaceDim || 3 == spaceDim);
+
+ const char* geomType = (spaceDim == 3) ? "XYZ" : "XY";
+
+ _file
+ << " <Geometry GeometryType=\"" << geomType << "\">\n"
+ << " <DataItem Reference=\"XML\">\n"
+ << " /Xdmf/Domain/DataItem[@Name=\"vertices\"]\n"
+ << " </DataItem>\n"
+ << " </Geometry>\n";
+} // _writeGridGeometry
+
+// ----------------------------------------------------------------------
+// Write grid attribute.
+void
+pylith::meshio::Xdmf::_writeGridAttribute(const HDF5::FieldMetadata& metadata,
+ const int iTime)
+{ // _writeGridAttribute
+ assert(_file.is_open() && _file.good());
+
+ std::string h5FullName = "";
+ if (std::string("Vertex") == metadata.domain) {
+ h5FullName = std::string("/vertex_fields/") + metadata.name;
+ } else if (std::string("Cell") == metadata.domain) {
+ h5FullName = std::string("/cell_fields/") + metadata.name;
+ } else {
+ std::ostringstream msg;
+ msg << "Unknown domain '" << metadata.domain << "' for field '"
+ << metadata.name << "'." << std::endl;
+ throw std::runtime_error(msg.str());
+ } // if/else
+
+ _file
+ << " <Attribute\n"
+ << " Name=\"" << metadata.name << "\"\n"
+ << " Type=\"" << metadata.vectorFieldType << "\"\n"
+ << " Center=\"" << metadata.domain << "\">\n"
+ << " <DataItem ItemType=\"HyperSlab\"\n"
+ << " Dimensions=\"1 " << metadata.numPoints << " 1\"\n"
+ << " Type=\"HyperSlab\">\n"
+ << " <DataItem\n"
+ << " Dimensions=\"3 3\"\n"
+ << " Format=\"XML\">\n"
+ << " 0 0 0\n"
+ << " 1 1 1\n"
+ << " 1 " << metadata.numPoints << " 1\n"
+ << " </DataItem>\n"
+ << " <DataItem\n"
+ << " DataType=\"Float\" Precision=\"8\"\n"
+ << " Dimensions=\""
+ << metadata.numTimeSteps
+ << " " << metadata.numPoints
+ << " " << metadata.fiberDim << "\"\n"
+ << " Format=\"HDF\">\n"
+ << " &HeavyData;:" << h5FullName << "\n"
+ << " </DataItem>\n"
+ << " </DataItem>\n"
+ << " </Attribute>\n";
+} // _writeGridAttribute
+
+// ----------------------------------------------------------------------
+// Write grid attribute.
+void
+pylith::meshio::Xdmf::_writeGridAttributeComponent(const HDF5::FieldMetadata& metadata,
+ const int iTime,
+ const int component)
+{ // _writeGridAttribute
+ assert(_file.is_open() && _file.good());
+
+ std::string h5FullName = "";
+ if (std::string("Vertex") == metadata.domain) {
+ h5FullName = std::string("/vertex_fields/") + metadata.name;
+ } else if (std::string("Cell") == metadata.domain) {
+ h5FullName = std::string("/cell_fields/") + metadata.name;
+ } else {
+ std::ostringstream msg;
+ msg << "Unknown domain '" << metadata.domain << "' for field '"
+ << metadata.name << "'." << std::endl;
+ throw std::runtime_error(msg.str());
+ } // if/else
+
+ std::string componentName = "unknown";
+ switch (component) {
+ case 0:
+ componentName = std::string("x-") + std::string(metadata.name);
+ break;
+ case 1:
+ componentName = std::string("y-") + std::string(metadata.name);
+ break;
+ case 2:
+ componentName = std::string("z-") + std::string(metadata.name);
+ break;
+ default:
+ { // default
+ std::ostringstream msg;
+ msg << "Unknown component " << component << " while writing Xdmf file.";
+ std::cerr << msg.str() << std::endl;
+ assert(0);
+ throw std::logic_error(msg.str());
+ } // default
+ } // switch
+
+ _file
+ << " <Attribute\n"
+ << " Name=\"" << componentName << "\"\n"
+ << " Type=\""<< metadata.vectorFieldType << "\"\n"
+ << " Center=\"" << metadata.domain << "\">\n"
+ << " <DataItem ItemType=\"HyperSlab\"\n"
+ << " Dimensions=\"1 " << metadata.numPoints << " 1\"\n"
+ << " Type=\"HyperSlab\">\n"
+ << " <DataItem\n"
+ << " Dimensions=\"3 3\"\n"
+ << " Format=\"XML\">\n"
+ << " 0 0 "<< component << "\n"
+ << " 1 1 1\n"
+ << " 1 " << metadata.numPoints << " 1\n"
+ << " </DataItem>\n"
+ << " <DataItem\n"
+ << " DataType=\"Float\" Precision=\"8\"\n"
+ << " Dimensions=\""
+ << metadata.numTimeSteps
+ << " " << metadata.numPoints
+ << " " << metadata.fiberDim << "\"\n"
+ << " Format=\"HDF\">\n"
+ << " &HeavyData;:" << h5FullName << "\n"
+ << " </DataItem>\n"
+ << " </DataItem>\n"
+ << " </Attribute>\n";
+} // _writeGridAttribute
+
+
+// End of file
Deleted: short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/Xdmf.hh 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh 2011-05-21 13:46:30 UTC (rev 18406)
@@ -1,156 +0,0 @@
-// -*- C++ -*-
-//
-// ======================================================================
-//
-// Brad T. Aagaard, U.S. Geological Survey
-// Charles A. Williams, GNS Science
-// Matthew G. Knepley, University of Chicago
-//
-// This code was developed as part of the Computational Infrastructure
-// for Geodynamics (http://geodynamics.org).
-//
-// Copyright (c) 2010 University of California, Davis
-//
-// See COPYING for license information.
-//
-// ======================================================================
-//
-
-/*
- domain/cells (/topology/cells) [ncells, ncorners]
- domain/vertices (/geometry/vertices) [nvertices, spacedim]
-
- time (uniform time step)
- start, step, ntimesteps
-
- Repeat for each time slice
- topology (cell type, ncells, /Xdmf/Domain/DataItem[@Name="cells"])
- geometry (type, /Xdmf/Domain/DataItem[@Name="vertices"]
- Repeat for each field
- attribute (name, scalar, center, ntimesteps, npts, fiberdim)
- if 2-D vector, break up into components
-
-*/
-
-#if !defined(pylith_meshio_xdmf_hh)
-#define pylith_meshio_xdmf_hh
-
-// Include directives ---------------------------------------------------
-#include "meshiofwd.hh" // forward declarations
-
-#include <fstream> // HASA std::ofstream
-#include <string> // USES std::string
-
-// Xdmf -----------------------------------------------------------------
-/// Write Xdmf file associated with HDF5 file for use with VTK.
-class pylith::meshio::Xdmf
-{ // HDF5
- friend class TestXdmf; // Unit testing
-
-// PUBLIC METHODS -------------------------------------------------------
-public :
-
- /// Default constructor.
- Xdmf(void);
-
- /// Destructor
- ~Xdmf(void);
-
- /** Write Xdmf file associated with HDF5 file.
- *
- * @param filenameXdmf Name of Xdmf file.
- * @param filenameHDF5 Name of HDF5 file.
- */
- void write(const char* filenameXdfm,
- const char* filenameHDF5);
-
-// PRIVATE METHODS ------------------------------------------------------
-private :
-
- /** Write domain cell information.
- *
- * @param numCells Number of cells.
- * @param numCorners Number of vertices in a cell.
- */
- void _writeDomainCells(const int numCells,
- const int numCorners);
-
- /** Write domain vertices information.
- *
- * @param numVertices Number of vertices.
- * @param spaceDim Spatial dimension.
- */
- void _writeDomainVertices(const int numVertices,
- const int spaceDim);
-
- /** Write time stamps associated with fields.
- *
- * @param timeStamps Array of time stamps.
- * @param numTimeStamps Number of time stamps.
- */
- void _writeTimeStamps(const double* timeStamps,
- const int numTimeSteps);
-
- /** Write grid topology information.
- *
- * @param cellType Name for cell type.
- * @param numCells Number of cells.
- */
- void _writeGridTopology(const char* cellType,
- const int numCells);
-
- /** Write Grid geometry.
- *
- * @param spaceDim Spatial dimension.
- */
- void _writeGridGeometry(const int spaceDim);
-
- /** Write grid attribute.
- *
- * @param name Name of attribute.
- * @param vectorFieldType Type of vector field.
- * @param center Vertex/Cell center.
- * @param numTimeSteps Number of time steps.
- * @param numPoints Number of vertices or cells.
- * @param fiberDim Fiber dimension for attribute.
- * @param iTime Index of time step.
- */
- void _writeGridAttribute(const char* name,
- const char* vectorFieldType,
- const char* center,
- const int numTimeSteps,
- const int numPoints,
- const int fiberDim,
- const int iTime);
-
- /** Write grid attribute as single component (for 2-D vector).
- *
- * @param name Name of attribute.
- * @param vectorFieldType Type of vector field.
- * @param center Vertex/Cell center.
- * @param numTimeSteps Number of time steps.
- * @param numPoints Number of vertices or cells.
- * @param fiberDim Fiber dimension for attribute.
- * @param iTime Index of time step.
- * @param component Index of component.
- */
- void _writeGridAttributeComponent(const char* name,
- const char* vectorFieldType,
- const char* center,
- const int numTimeSteps,
- const int numPoints,
- const int fiberDim,
- const int iTime,
- const int component);
-
-// PRIVATE MEMBERS ------------------------------------------------------
-private :
-
- std::ofstream _file; ///< Xdmf file
-
-}; // Xdmf
-
-#endif // pylith_meshio_xdmf_hh
-
-
-// End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh (from rev 18405, short/3D/PyLith/trunk/libsrc/meshio/Xdmf.hh)
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh (rev 0)
+++ short/3D/PyLith/trunk/libsrc/pylith/meshio/Xdmf.hh 2011-05-21 13:46:30 UTC (rev 18406)
@@ -0,0 +1,139 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+/*
+ domain/cells (/topology/cells) [ncells, ncorners]
+ domain/vertices (/geometry/vertices) [nvertices, spacedim]
+
+ time (uniform time step)
+ start, step, ntimesteps
+
+ Repeat for each time slice
+ topology (cell type, ncells, /Xdmf/Domain/DataItem[@Name="cells"])
+ geometry (type, /Xdmf/Domain/DataItem[@Name="vertices"]
+ Repeat for each field
+ attribute (name, scalar, center, ntimesteps, npts, fiberdim)
+ if 2-D vector, break up into components
+
+*/
+
+#if !defined(pylith_meshio_xdmf_hh)
+#define pylith_meshio_xdmf_hh
+
+// Include directives ---------------------------------------------------
+#include "meshiofwd.hh" // forward declarations
+
+#include "HDF5.hh" // USES HDF5::FieldMetadata
+
+#include <vector> // USES std::vector
+#include <fstream> // HASA std::ofstream
+#include <string> // USES std::string
+
+// Xdmf -----------------------------------------------------------------
+/// Write Xdmf file associated with HDF5 file for use with VTK.
+class pylith::meshio::Xdmf
+{ // HDF5
+ friend class TestXdmf; // Unit testing
+
+// PUBLIC METHODS -------------------------------------------------------
+public :
+
+ /// Default constructor.
+ Xdmf(void);
+
+ /// Destructor
+ ~Xdmf(void);
+
+ /** Write Xdmf file associated with HDF5 file.
+ *
+ * @param filenameXdmf Name of Xdmf file.
+ * @param filenameHDF5 Name of HDF5 file.
+ */
+ void write(const char* filenameXdmf,
+ const char* filenameHDF5);
+
+// PRIVATE METHODS ------------------------------------------------------
+private :
+
+ /** Write domain cell information.
+ *
+ * @param numCells Number of cells.
+ * @param numCorners Number of vertices in a cell.
+ */
+ void _writeDomainCells(const int numCells,
+ const int numCorners);
+
+ /** Write domain vertices information.
+ *
+ * @param numVertices Number of vertices.
+ * @param spaceDim Spatial dimension.
+ */
+ void _writeDomainVertices(const int numVertices,
+ const int spaceDim);
+
+ /** Write time stamps associated with fields.
+ *
+ * @param timeStamps Array of time stamps.
+ * @param numTimeStamps Number of time stamps.
+ */
+ void _writeTimeStamps(const double* timeStamps,
+ const int numTimeSteps);
+
+ /** Write grid topology information.
+ *
+ * @param cellType Name for cell type.
+ * @param numCells Number of cells.
+ */
+ void _writeGridTopology(const char* cellType,
+ const int numCells);
+
+ /** Write Grid geometry.
+ *
+ * @param spaceDim Spatial dimension.
+ */
+ void _writeGridGeometry(const int spaceDim);
+
+ /** Write grid attribute.
+ *
+ * @param metadata Metadata for field.
+ * @param iTime Index of time step.
+ */
+ void _writeGridAttribute(const HDF5::FieldMetadata& metadata,
+ const int iTime);
+
+ /** Write grid attribute as single component (for 2-D vector).
+ *
+ * @param metadata Metadata for field.
+ * @param iTime Index of time step.
+ * @param component Index of component.
+ */
+ void _writeGridAttributeComponent(const HDF5::FieldMetadata& metadata,
+ const int iTime,
+ const int component);
+
+// PRIVATE MEMBERS ------------------------------------------------------
+private :
+
+ std::ofstream _file; ///< Xdmf file
+
+}; // Xdmf
+
+#endif // pylith_meshio_xdmf_hh
+
+
+// End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/problems (from rev 18400, short/3D/PyLith/trunk/libsrc/problems)
Modified: short/3D/PyLith/trunk/libsrc/pylith/problems/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/problems/Makefile.am 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/problems/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -31,10 +31,5 @@
noinst_HEADERS =
-# export
-clean-local: clean-subpkgincludeHEADERS
-BUILT_SOURCES = export-subpkgincludeHEADERS
-CLEANFILES = export-subpkgincludeHEADERS
-
# End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/topology (from rev 18400, short/3D/PyLith/trunk/libsrc/topology)
Deleted: short/3D/PyLith/trunk/libsrc/pylith/topology/Field.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/topology/Field.cc 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/topology/Field.cc 2011-05-21 13:46:30 UTC (rev 18406)
@@ -1,1182 +0,0 @@
-// -*- C++ -*-
-//
-// ======================================================================
-//
-// Brad T. Aagaard, U.S. Geological Survey
-// Charles A. Williams, GNS Science
-// Matthew G. Knepley, University of Chicago
-//
-// This code was developed as part of the Computational Infrastructure
-// for Geodynamics (http://geodynamics.org).
-//
-// Copyright (c) 2010 University of California, Davis
-//
-// See COPYING for license information.
-//
-// ======================================================================
-//
-
-#include <portinfo>
-
-#include "Field.hh" // implementation of class methods
-
-#include "pylith/utils/array.hh" // USES double_array
-
-#include "spatialdata/geocoords/CoordSys.hh" // USES CoordSys
-#include "spatialdata/units/Nondimensional.hh" // USES Nondimensional
-
-#include "pylith/utils/petscerror.h" // USES CHECK_PETSC_ERROR
-#include <stdexcept> // USES std::runtime_error
-#include <sstream> // USES std::ostringstream
-#include <cassert> // USES assert()
-
-// ----------------------------------------------------------------------
-// Default constructor.
-template<typename mesh_type, typename section_type>
-pylith::topology::Field<mesh_type, section_type>::Field(const mesh_type& mesh) :
- _mesh(mesh)
-{ // constructor
- _metadata.label = "unknown";
- _metadata.vectorFieldType = OTHER;
- _metadata.scale = 1.0;
- _metadata.dimsOkay = false;
-} // constructor
-
-// ----------------------------------------------------------------------
-// Constructor with mesh, section, and metadata.
-template<typename mesh_type, typename section_type>
-pylith::topology::Field<mesh_type, section_type>::Field(const mesh_type& mesh,
- const ALE::Obj<section_type>& section,
- const Metadata& metadata) :
- _metadata(metadata),
- _mesh(mesh),
- _section(section)
-{ // constructor
- assert(!section.isNull());
-} // constructor
-
-// ----------------------------------------------------------------------
-// Destructor.
-template<typename mesh_type, typename section_type>
-pylith::topology::Field<mesh_type, section_type>::~Field(void)
-{ // destructor
- deallocate();
-} // destructor
-
-// ----------------------------------------------------------------------
-// Deallocate PETSc and local data structures.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::deallocate(void)
-{ // deallocate
- PetscErrorCode err = 0;
-
- const typename scatter_map_type::const_iterator scattersEnd = _scatters.end();
- for (typename scatter_map_type::iterator s_iter=_scatters.begin();
- s_iter != scattersEnd;
- ++s_iter) {
- if (s_iter->second.vector) {
- err = VecDestroy(&s_iter->second.vector);CHECK_PETSC_ERROR(err);
- } // if
-
- if (s_iter->second.scatter) {
- err = VecScatterDestroy(&s_iter->second.scatter);CHECK_PETSC_ERROR(err);
- } // if
-
- if (s_iter->second.scatterVec) {
- err = VecDestroy(&s_iter->second.scatterVec);CHECK_PETSC_ERROR(err);
- } // if
- } // for
-} // deallocate
-
-// ----------------------------------------------------------------------
-// Set label for field.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::label(const char* value)
-{ // label
- _metadata.label = value;
- if (!_section.isNull()) {
- _section->setName(value);
- } // if
-
- const typename scatter_map_type::const_iterator scattersEnd = _scatters.end();
- for (typename scatter_map_type::const_iterator s_iter=_scatters.begin();
- s_iter != scattersEnd;
- ++s_iter) {
- if (s_iter->second.vector) {
- PetscErrorCode err =
- PetscObjectSetName((PetscObject)s_iter->second.vector, value);
- CHECK_PETSC_ERROR(err);
- } // if
- } // for
-} // label
-
-// ----------------------------------------------------------------------
-// Get spatial dimension of domain.
-template<typename mesh_type, typename section_type>
-int
-pylith::topology::Field<mesh_type, section_type>::spaceDim(void) const
-{ // spaceDim
- const spatialdata::geocoords::CoordSys* cs = _mesh.coordsys();
- return (cs) ? cs->spaceDim() : 0;
-} // spaceDim
-
-// ----------------------------------------------------------------------
-// Get the chart size.
-template<typename mesh_type, typename section_type>
-int
-pylith::topology::Field<mesh_type, section_type>::chartSize(void) const
-{ // chartSize
- return _section.isNull() ? 0 : _section->getChart().size();
-} // chartSize
-
-// ----------------------------------------------------------------------
-// Get the number of degrees of freedom.
-template<typename mesh_type, typename section_type>
-int
-pylith::topology::Field<mesh_type, section_type>::sectionSize(void) const
-{ // sectionSize
- return _section.isNull() ? 0 : _section->size();
-} // sectionSize
-
-// ----------------------------------------------------------------------
-// Create seive section.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::newSection(void)
-{ // newSection
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("Field");
-
- _section = new section_type(_mesh.comm(), _mesh.debug());
- assert(!_section.isNull());
- _section->setName(_metadata.label);
-
- logger.stagePop();
-} // newSection
-
-// ----------------------------------------------------------------------
-// Create sieve section and set chart and fiber dimesion for a
-// sequence of points.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::newSection(
- const ALE::Obj<label_sequence>& points,
- const int fiberDim)
-{ // newSection
- typedef typename mesh_type::SieveMesh::point_type point_type;
-
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("Field");
- if (fiberDim < 0) {
- std::ostringstream msg;
- msg << "Fiber dimension (" << fiberDim << ") for field '" << _metadata.label
- << "' must be nonnegative.";
- throw std::runtime_error(msg.str());
- } // if
-
- _section = new section_type(_mesh.comm(), _mesh.debug());
- assert(!_section.isNull());
- _section->setName(_metadata.label);
-
- if (points->size() > 0) {
- const point_type pointMin =
- *std::min_element(points->begin(), points->end());
- const point_type pointMax =
- *std::max_element(points->begin(), points->end());
- _section->setChart(chart_type(pointMin, pointMax+1));
- _section->setFiberDimension(points, fiberDim);
- } else // Create empty chart
- _section->setChart(chart_type(0, 0));
-
- logger.stagePop();
-} // newSection
-
-// ----------------------------------------------------------------------
-// Create sieve section and set chart and fiber dimesion for a list of
-// points.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::newSection(const int_array& points,
- const int fiberDim)
-{ // newSection
- typedef typename mesh_type::SieveMesh::point_type point_type;
-
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("Field");
- if (fiberDim < 0) {
- std::ostringstream msg;
- msg << "Fiber dimension (" << fiberDim << ") for field '" << _metadata.label
- << "' must be nonnegative.";
- throw std::runtime_error(msg.str());
- } // if
-
- _section = new section_type(_mesh.comm(), _mesh.debug());
- assert(!_section.isNull());
- _section->setName(_metadata.label);
-
- const int npts = points.size();
- if (npts > 0) {
- const point_type pointMin = points.min();
- const point_type pointMax = points.max();
- _section->setChart(chart_type(pointMin, pointMax+1));
- for (int i=0; i < npts; ++i)
- _section->setFiberDimension(points[i], fiberDim);
- } else // create empty chart
- _section->setChart(chart_type(0, 0));
-
- logger.stagePop();
-} // newSection
-
-// ----------------------------------------------------------------------
-// Create sieve section and set chart and fiber dimesion.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::newSection(const DomainEnum domain,
- const int fiberDim,
- const int stratum)
-{ // newSection
- const ALE::Obj<SieveMesh>& sieveMesh = _mesh.sieveMesh();
- assert(!sieveMesh.isNull());
-
- ALE::Obj<label_sequence> points;
- if (VERTICES_FIELD == domain)
- points = sieveMesh->depthStratum(stratum);
- else if (CELLS_FIELD == domain)
- points = sieveMesh->heightStratum(stratum);
- else {
- std::cerr << "Unknown value for DomainEnum: " << domain << std::endl;
- assert(0);
- throw std::logic_error("Bad domain enum in Field.");
- } // else
-
- newSection(points, fiberDim);
-} // newSection
-
-// ----------------------------------------------------------------------
-// Create section given chart.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::newSection(const Field& src,
- const int fiberDim)
-{ // newSection
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("Field");
-
- if (_section.isNull()) {
- logger.stagePop();
- newSection();
- logger.stagePush("Field");
- } // if
- if (fiberDim < 0) {
- std::ostringstream msg;
- msg << "Fiber dimension (" << fiberDim << ") for field '" << _metadata.label
- << "' must be nonnegative.";
- throw std::runtime_error(msg.str());
- } // if
-
- const ALE::Obj<section_type>& srcSection = src.section();
- if (!srcSection.isNull()) {
- _section->setChart(srcSection->getChart());
- const chart_type& chart = _section->getChart();
- const typename chart_type::const_iterator chartBegin = chart.begin();
- const typename chart_type::const_iterator chartEnd = chart.end();
-
- for (typename chart_type::const_iterator c_iter = chartBegin;
- c_iter != chartEnd;
- ++c_iter)
- if (srcSection->getFiberDimension(*c_iter) > 0)
- _section->setFiberDimension(*c_iter, fiberDim);
- } // if
-
- logger.stagePop();
-} // newSection
-
-// ----------------------------------------------------------------------
-// Create section with same layout as another section.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::cloneSection(const Field& src)
-{ // cloneSection
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("Field");
-
- deallocate();
- std::string origLabel = _metadata.label;
- _metadata = src._metadata;
- label(origLabel.c_str());
-
- const ALE::Obj<section_type>& srcSection = src.section();
- if (!srcSection.isNull() && _section.isNull()) {
- logger.stagePop();
- newSection();
- logger.stagePush("Field");
- }
-
- if (!_section.isNull()) {
- if (!srcSection->sharedStorage()) {
- _section->setAtlas(srcSection->getAtlas());
- _section->allocateStorage();
- _section->setBC(srcSection->getBC());
- _section->copySpaces(srcSection);
- } else {
- _section->setChart(srcSection->getChart());
- const chart_type& chart = _section->getChart();
- const typename chart_type::const_iterator chartBegin = chart.begin();
- const typename chart_type::const_iterator chartEnd = chart.end();
- for (typename chart_type::const_iterator c_iter = chartBegin;
- c_iter != chartEnd;
- ++c_iter) {
- const int fiberDim = srcSection->getFiberDimension(*c_iter);
- if (fiberDim > 0)
- _section->setFiberDimension(*c_iter, fiberDim);
- } // for
- const ALE::Obj<SieveMesh>& sieveMesh = _mesh.sieveMesh();
- assert(!sieveMesh.isNull());
- sieveMesh->allocate(_section);
- _section->setBC(srcSection->getBC());
- _section->copySpaces(srcSection); // :BUG: NEED TO REBUILD SPACES
- } // if/else
-
- // Reuse scatters in clone
- PetscErrorCode err = 0;
- if (src._scatters.size() > 0) {
- const typename scatter_map_type::const_iterator scattersEnd = src._scatters.end();
- for (typename scatter_map_type::const_iterator s_iter=src._scatters.begin();
- s_iter != scattersEnd;
- ++s_iter) {
- ScatterInfo sinfo;
- sinfo.vector = 0;
- sinfo.scatterVec = 0;
-
- // Copy scatter
- sinfo.scatter = s_iter->second.scatter;
- err = PetscObjectReference((PetscObject) sinfo.scatter);
- CHECK_PETSC_ERROR(err);
-
- // Create scatter Vec
- if (_section->sizeWithBC() > 0) {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF,
- _section->getStorageSize(),
- _section->restrictSpace(),
- &sinfo.scatterVec);
- CHECK_PETSC_ERROR(err);
- } else {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
- &sinfo.scatterVec);
- CHECK_PETSC_ERROR(err);
- } // else
-
- // Create vector using sizes from source section
- int vecLocalSize = 0;
- int vecGlobalSize = 0;
- err = VecGetLocalSize(s_iter->second.vector, &vecLocalSize);
- CHECK_PETSC_ERROR(err);
- err = VecGetSize(s_iter->second.vector, &vecGlobalSize); CHECK_PETSC_ERROR(err);
-
- err = VecCreate(_mesh.comm(), &sinfo.vector); CHECK_PETSC_ERROR(err);
- err = PetscObjectSetName((PetscObject)sinfo.vector, _metadata.label.c_str());
- CHECK_PETSC_ERROR(err);
- err = VecSetSizes(sinfo.vector, vecLocalSize, vecGlobalSize);
- CHECK_PETSC_ERROR(err);
- err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
-
- _scatters[s_iter->first] = sinfo;
- } // for
- } // if
- } // if
- logger.stagePop();
-} // cloneSection
-
-// ----------------------------------------------------------------------
-// Clear variables associated with section.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::clear(void)
-{ // clear
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("Field");
-
- deallocate();
- if (!_section.isNull())
- _section->clear();
-
- _metadata.scale = 1.0;
- _metadata.vectorFieldType = OTHER;
- _metadata.dimsOkay = false;
-
- logger.stagePop();
-} // clear
-
-// ----------------------------------------------------------------------
-// Allocate Sieve section.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::allocate(void)
-{ // allocate
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("Field");
-
- assert(!_section.isNull());
-
- const ALE::Obj<SieveMesh>& sieveMesh = _mesh.sieveMesh();
- assert(!sieveMesh.isNull());
- sieveMesh->allocate(_section);
-
- logger.stagePop();
-} // allocate
-
-// ----------------------------------------------------------------------
-// Zero section values (excluding constrained DOF).
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::zero(void)
-{ // zero
- if (!_section.isNull())
- _section->zero(); // Does not zero BC.
-} // zero
-
-// ----------------------------------------------------------------------
-// Zero section values (including constrained DOF).
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::zeroAll(void)
-{ // zeroAll
- if (!_section.isNull()) {
- const chart_type& chart = _section->getChart();
- const typename chart_type::const_iterator chartBegin = chart.begin();
- const typename chart_type::const_iterator chartEnd = chart.end();
-
- // Assume fiber dimension is uniform
- const int fiberDim = (chart.size() > 0) ?
- _section->getFiberDimension(*chartBegin) : 0;
- double_array values(fiberDim);
- values *= 0.0;
-
- for (typename chart_type::const_iterator c_iter = chartBegin;
- c_iter != chartEnd;
- ++c_iter) {
- if (_section->getFiberDimension(*c_iter) > 0) {
- assert(fiberDim == _section->getFiberDimension(*c_iter));
- _section->updatePointAll(*c_iter, &values[0]);
- } // if
- } // for
- } // if
-} // zeroAll
-
-// ----------------------------------------------------------------------
-// Complete section by assembling across processors.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::complete(void)
-{ // complete
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("Completion");
-
- const ALE::Obj<SieveMesh>& sieveMesh = _mesh.sieveMesh();
- assert(!sieveMesh.isNull());
-
- if (!_section.isNull())
- ALE::Completion::completeSectionAdd(sieveMesh->getSendOverlap(),
- sieveMesh->getRecvOverlap(),
- _section, _section);
-
- logger.stagePop();
-} // complete
-
-// ----------------------------------------------------------------------
-// Copy field values and metadata.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::copy(const Field& field)
-{ // copy
- // Check compatibility of sections
- const int srcSize = field.chartSize();
- const int dstSize = chartSize();
- if (field.spaceDim() != spaceDim() ||
- field._metadata.vectorFieldType != _metadata.vectorFieldType ||
- srcSize != dstSize) {
- std::ostringstream msg;
-
- msg << "Cannot copy values from section '" << field._metadata.label
- << "' to section '" << _metadata.label
- << "'. Sections are incompatible.\n"
- << " Source section:\n"
- << " space dim: " << field.spaceDim() << "\n"
- << " vector field type: " << field._metadata.vectorFieldType << "\n"
- << " scale: " << field._metadata.scale << "\n"
- << " size: " << srcSize << "\n"
- << " Destination section:\n"
- << " space dim: " << spaceDim() << "\n"
- << " vector field type: " << _metadata.vectorFieldType << "\n"
- << " scale: " << _metadata.scale << "\n"
- << " size: " << dstSize;
- throw std::runtime_error(msg.str());
- } // if
- assert( (_section.isNull() && field._section.isNull()) ||
- (!_section.isNull() && !field._section.isNull()) );
-
- if (!_section.isNull()) {
- // Copy values from field
- const chart_type& chart = _section->getChart();
- const typename chart_type::const_iterator chartBegin = chart.begin();
- const typename chart_type::const_iterator chartEnd = chart.end();
-
- for (typename chart_type::const_iterator c_iter = chartBegin;
- c_iter != chartEnd;
- ++c_iter) {
- assert(field._section->getFiberDimension(*c_iter) ==
- _section->getFiberDimension(*c_iter));
- if (_section->getFiberDimension(*c_iter) > 0)
- _section->updatePointAll(*c_iter,
- field._section->restrictPoint(*c_iter));
- } // for
- } // if
-
- label(field._metadata.label.c_str()); // Update label
- _metadata.scale = field._metadata.scale;
-} // copy
-
-// ----------------------------------------------------------------------
-// Copy field values.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::copy(const ALE::Obj<section_type>& osection)
-{ // copy
- // Check compatibility of sections
- const int srcSize = osection->getChart().size();
- const int dstSize = chartSize();
- if (srcSize != dstSize) {
- std::ostringstream msg;
-
- msg << "Cannot copy values from Sieve section "
- << _metadata.label << "'. Sections are incompatible.\n"
- << " Source section:\n"
- << " size: " << srcSize << "\n"
- << " Destination section:\n"
- << " space dim: " << spaceDim() << "\n"
- << " vector field type: " << _metadata.vectorFieldType << "\n"
- << " scale: " << _metadata.scale << "\n"
- << " size: " << dstSize;
- throw std::runtime_error(msg.str());
- } // if
- assert( (_section.isNull() && osection.isNull()) ||
- (!_section.isNull() && !osection.isNull()) );
-
- if (!_section.isNull()) {
- // Copy values from field
- const chart_type& chart = _section->getChart();
- const typename chart_type::const_iterator chartEnd = chart.end();
-
- for (typename chart_type::const_iterator c_iter = chart.begin();
- c_iter != chartEnd;
- ++c_iter) {
- assert(osection->getFiberDimension(*c_iter) ==
- _section->getFiberDimension(*c_iter));
- if (osection->getFiberDimension(*c_iter))
- _section->updatePoint(*c_iter, osection->restrictPoint(*c_iter));
- } // for
- } // if
-} // copy
-
-// ----------------------------------------------------------------------
-// Add two fields, storing the result in one of the fields.
-template<typename mesh_type, typename section_type>
-pylith::topology::Field<mesh_type, section_type>&
-pylith::topology::Field<mesh_type, section_type>::operator+=(const Field& field)
-{ // operator+=
- // Check compatibility of sections
- const int srcSize = field.chartSize();
- const int dstSize = chartSize();
- if (field.spaceDim() != spaceDim() ||
- field._metadata.vectorFieldType != _metadata.vectorFieldType ||
- field._metadata.scale != _metadata.scale ||
- srcSize != dstSize) {
- std::ostringstream msg;
-
- msg << "Cannot add values from section '" << field._metadata.label
- << "' to section '" << _metadata.label
- << "'. Sections are incompatible.\n"
- << " Source section:\n"
- << " space dim: " << field.spaceDim() << "\n"
- << " vector field type: " << field._metadata.vectorFieldType << "\n"
- << " scale: " << field._metadata.scale << "\n"
- << " size: " << srcSize << "\n"
- << " Destination section:\n"
- << " space dim: " << spaceDim() << "\n"
- << " vector field type: " << _metadata.vectorFieldType << "\n"
- << " scale: " << _metadata.scale << "\n"
- << " size: " << dstSize;
- throw std::runtime_error(msg.str());
- } // if
- assert( (_section.isNull() && field._section.isNull()) ||
- (!_section.isNull() && !field._section.isNull()) );
-
- if (!_section.isNull()) {
- // Add values from field
- const chart_type& chart = _section->getChart();
- const typename chart_type::const_iterator chartBegin = chart.begin();
- const typename chart_type::const_iterator chartEnd = chart.end();
-
- // Assume fiber dimension is uniform
- const int fiberDim = (chart.size() > 0) ?
- _section->getFiberDimension(*chartBegin) : 0;
- double_array values(fiberDim);
-
- for (typename chart_type::const_iterator c_iter = chartBegin;
- c_iter != chartEnd;
- ++c_iter) {
- if (field._section->getFiberDimension(*c_iter) > 0) {
- assert(fiberDim == field._section->getFiberDimension(*c_iter));
- assert(fiberDim == _section->getFiberDimension(*c_iter));
- field._section->restrictPoint(*c_iter, &values[0], values.size());
- _section->updatePointAllAdd(*c_iter, &values[0]);
- } // if
- } // for
- } // if
-
- return *this;
-} // operator+=
-
-// ----------------------------------------------------------------------
-// Dimensionalize field.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::dimensionalize(void) const
-{ // dimensionalize
- if (!_metadata.dimsOkay) {
- std::ostringstream msg;
- msg << "Cannot dimensionalize field '" << _metadata.label
- << "' because the flag "
- << "has been set to keep field nondimensional.";
- throw std::runtime_error(msg.str());
- } // if
-
- if (!_section.isNull()) {
- const chart_type& chart = _section->getChart();
- const typename chart_type::const_iterator chartEnd = chart.end();
-
- // Assume fiber dimension is uniform
- const int fiberDim = (chart.size() > 0) ?
- _section->getFiberDimension(*chart.begin()) : 0;
- double_array values(fiberDim);
-
- spatialdata::units::Nondimensional normalizer;
-
- for (typename chart_type::const_iterator c_iter = chart.begin();
- c_iter != chartEnd;
- ++c_iter)
- if (_section->getFiberDimension(*c_iter) > 0) {
- assert(fiberDim == _section->getFiberDimension(*c_iter));
-
- _section->restrictPoint(*c_iter, &values[0], values.size());
- normalizer.dimensionalize(&values[0], values.size(), _metadata.scale);
- _section->updatePointAll(*c_iter, &values[0]);
- } // if
- } // if
-} // dimensionalize
-
-// ----------------------------------------------------------------------
-// Print field to standard out.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::view(const char* label) const
-{ // view
- std::string vecFieldString;
- switch(_metadata.vectorFieldType)
- { // switch
- case SCALAR:
- vecFieldString = "scalar";
- break;
- case VECTOR:
- vecFieldString = "vector";
- break;
- case TENSOR:
- vecFieldString = "tensor";
- break;
- case OTHER:
- vecFieldString = "other";
- break;
- case MULTI_SCALAR:
- vecFieldString = "multiple scalars";
- break;
- case MULTI_VECTOR:
- vecFieldString = "multiple vectors";
- break;
- case MULTI_TENSOR:
- vecFieldString = "multiple tensors";
- break;
- case MULTI_OTHER:
- vecFieldString = "multiple other values";
- break;
- default :
- std::cerr << "Unknown vector field value '" << _metadata.vectorFieldType
- << "'." << std::endl;
- assert(0);
- throw std::logic_error("Bad vector field type in Field.");
- } // switch
-
- std::cout << "Viewing field '" << _metadata.label << "' "<< label << ".\n"
- << " vector field type: " << vecFieldString << "\n"
- << " scale: " << _metadata.scale << "\n"
- << " dimensionalize flag: " << _metadata.dimsOkay << std::endl;
- if (!_section.isNull())
- _section->view(label);
-} // view
-
-// ----------------------------------------------------------------------
-// Create PETSc vector scatter for field. This is used to transfer
-// information from the "global" PETSc vector view to the "local"
-// Sieve section view.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::createScatter(const char* context)
-{ // createScatter
- assert(context);
- assert(!_section.isNull());
- assert(!_mesh.sieveMesh().isNull());
-
- PetscErrorCode err = 0;
- const bool createScatterOk = true;
- ScatterInfo& sinfo = _getScatter(context, createScatterOk);
- if (sinfo.scatter) {
- assert(sinfo.scatterVec);
- assert(sinfo.vector);
- return;
- } // if
-
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("GlobalOrder");
-
- // Get global order (create if necessary).
- const std::string& orderLabel = _section->getName();
- const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
- _mesh.sieveMesh();
- assert(!sieveMesh.isNull());
- const ALE::Obj<typename mesh_type::SieveMesh::order_type>& order =
- sieveMesh->getFactory()->getGlobalOrder(sieveMesh, orderLabel,
- _section);
- assert(!order.isNull());
-
- // Create scatter
- err = DMMeshCreateGlobalScatter(_mesh.sieveMesh(), _section, order, false, &sinfo.scatter);
- CHECK_PETSC_ERROR(err);
-
- // Create scatterVec
- if (_section->sizeWithBC() > 0) {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF, _section->getStorageSize(),
- _section->restrictSpace(),
- &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
- } else {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
- &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
- } // if/else
-
- // Create vector
- err = VecCreate(_mesh.comm(), &sinfo.vector);
- CHECK_PETSC_ERROR(err);
- err = PetscObjectSetName((PetscObject)sinfo.vector,
- _metadata.label.c_str());
- CHECK_PETSC_ERROR(err);
- err = VecSetSizes(sinfo.vector,
- order->getLocalSize(), order->getGlobalSize());
- CHECK_PETSC_ERROR(err);
- err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
-
- logger.stagePop();
-} // createScatter
-
-// ----------------------------------------------------------------------
-// Create PETSc vector scatter for field. This is used to transfer
-// information from the "global" PETSc vector view to the "local"
-// Sieve section view. The PETSc vector does not contain constrained
-// DOF. Use createScatterWithBC() to include the constrained DOF in
-// the PETSc vector.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::createScatter(const typename ALE::Obj<typename SieveMesh::numbering_type> numbering,
- const char* context)
-{ // createScatter
- assert(!numbering.isNull());
- assert(context);
- assert(!_section.isNull());
- assert(!_mesh.sieveMesh().isNull());
-
- PetscErrorCode err = 0;
- const bool createScatterOk = true;
- ScatterInfo& sinfo = _getScatter(context, createScatterOk);
-
- // Only create if scatter and scatterVec do not alreay exist.
- if (sinfo.scatter) {
- assert(sinfo.scatterVec);
- assert(sinfo.vector);
- return;
- } // if
-
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("GlobalOrder");
-
- // Get global order (create if necessary).
- const std::string& orderLabel =
- (strlen(context) > 0) ?
- _section->getName() + std::string("_") + std::string(context) :
- _section->getName();
- const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
- _mesh.sieveMesh();
- assert(!sieveMesh.isNull());
- const ALE::Obj<typename mesh_type::SieveMesh::order_type>& order =
- sieveMesh->getFactory()->getGlobalOrder(sieveMesh, orderLabel,
- numbering->getChart().begin(),
- numbering->getChart().end(),
- _section);
- assert(!order.isNull());
-
- // Create scatter
- err = DMMeshCreateGlobalScatter(_mesh.sieveMesh(), _section, order, false, &sinfo.scatter);
- CHECK_PETSC_ERROR(err);
-
- // Create scatterVec
- if (_section->sizeWithBC() > 0) {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF, _section->getStorageSize(),
- _section->restrictSpace(),
- &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
- } else {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
- &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
- } // if/else
-
- // Create vector
- err = VecCreate(_mesh.comm(), &sinfo.vector);
- CHECK_PETSC_ERROR(err);
- err = PetscObjectSetName((PetscObject)sinfo.vector,
- _metadata.label.c_str());
- CHECK_PETSC_ERROR(err);
- err = VecSetSizes(sinfo.vector,
- order->getLocalSize(), order->getGlobalSize());
- CHECK_PETSC_ERROR(err);
- err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
-
-#if 0
- std::cout << "CONTEXT: " << context
- << ", orderLabel: " << orderLabel
- << ", section size w/BC: " << _section->sizeWithBC()
- << ", section size: " << _section->size()
- << ", global numbering size: " << numbering->getGlobalSize()
- << ", global size: " << order->getGlobalSize()
- << std::endl;
-#endif
-
- logger.stagePop();
-} // createScatter
-
-// ----------------------------------------------------------------------
-// Create PETSc vector scatter for field. This is used to transfer
-// information from the "global" PETSc vector view to the "local"
-// Sieve section view. The PETSc vector does not contain constrained
-// DOF. Use createScatterWithBC() to include the constrained DOF in
-// the PETSc vector.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::createScatterWithBC(const char* context)
-{ // createScatterWithBC
- assert(context);
- assert(!_section.isNull());
- assert(!_mesh.sieveMesh().isNull());
-
-
- PetscErrorCode err = 0;
- const bool createScatterOk = true;
- ScatterInfo& sinfo = _getScatter(context, createScatterOk);
- if (sinfo.scatter) {
- assert(sinfo.scatterVec);
- assert(sinfo.vector);
- return;
- } // if
-
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("GlobalOrder");
-
- // Get global order (create if necessary).
- const std::string& orderLabel = _section->getName();
- const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
- _mesh.sieveMesh();
- assert(!sieveMesh.isNull());
- const ALE::Obj<typename mesh_type::SieveMesh::order_type>& order =
- sieveMesh->getFactory()->getGlobalOrderWithBC(sieveMesh, orderLabel,
- _section);
- assert(!order.isNull());
-
- // Create scatter
- err = DMMeshCreateGlobalScatter(_mesh.sieveMesh(), _section, order, true, &sinfo.scatter);
- CHECK_PETSC_ERROR(err);
-
- // Create scatterVec
- if (_section->sizeWithBC() > 0) {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF, _section->getStorageSize(),
- _section->restrictSpace(),
- &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
- } else {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
- &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
- } // if/else
-
- // Create vector
- err = VecCreate(_mesh.comm(), &sinfo.vector);
- CHECK_PETSC_ERROR(err);
- err = PetscObjectSetName((PetscObject)sinfo.vector,
- _metadata.label.c_str());
- CHECK_PETSC_ERROR(err);
- err = VecSetSizes(sinfo.vector,
- order->getLocalSize(), order->getGlobalSize());
- CHECK_PETSC_ERROR(err);
- err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
-
- logger.stagePop();
-} // createScatterWithBC
-
-// ----------------------------------------------------------------------
-// Create PETSc vector scatter for field. This is used to transfer
-// information from the "global" PETSc vector view to the "local"
-// Sieve section view. The PETSc vector includes constrained DOF. Use
-// createScatter() if constrained DOF should be omitted from the PETSc
-// vector.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::createScatterWithBC(const typename ALE::Obj<typename SieveMesh::numbering_type> numbering,
- const char* context)
-{ // createScatterWithBC
- assert(!numbering.isNull());
- assert(context);
- assert(!_section.isNull());
-
- PetscErrorCode err = 0;
- const bool createScatterOk = true;
- ScatterInfo& sinfo = _getScatter(context, createScatterOk);
-
- // Only create if scatter and scatterVec do not alreay exist.
- if (sinfo.scatter) {
- assert(sinfo.scatterVec);
- assert(sinfo.vector);
- return;
- } // if
-
- ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
- logger.stagePush("GlobalOrder");
-
- // Get global order (create if necessary).
- const std::string& orderLabel =
- (strlen(context) > 0) ?
- _section->getName() + std::string("_") + std::string(context) :
- _section->getName();
- const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
- _mesh.sieveMesh();
- assert(!sieveMesh.isNull());
- const ALE::Obj<typename mesh_type::SieveMesh::order_type>& order =
- sieveMesh->getFactory()->getGlobalOrderWithBC(sieveMesh, orderLabel,
- numbering->getChart().begin(),
- numbering->getChart().end(),
- _section);
- assert(!order.isNull());
-
- order->view("GLOBAL ORDER");
-
- // Create scatter
- err = DMMeshCreateGlobalScatter(_mesh.sieveMesh(), _section, order, true, &sinfo.scatter);
- CHECK_PETSC_ERROR(err);
-
- // Create scatterVec
- if (_section->sizeWithBC() > 0) {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF, _section->getStorageSize(),
- _section->restrictSpace(),
- &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
- } else {
- err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
- &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
- } // if/else
-
- // Create vector
- err = VecCreate(_mesh.comm(), &sinfo.vector);
- CHECK_PETSC_ERROR(err);
- err = PetscObjectSetName((PetscObject)sinfo.vector,
- _metadata.label.c_str());
- CHECK_PETSC_ERROR(err);
- err = VecSetSizes(sinfo.vector,
- order->getLocalSize(), order->getGlobalSize());
- CHECK_PETSC_ERROR(err);
- err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
-
-#if 0
- std::cout << "CONTEXT: " << context
- << ", orderLabel: " << orderLabel
- << ", section size w/BC: " << _section->sizeWithBC()
- << ", section size: " << _section->size()
- << ", section storage size: " << _section->getStorageSize()
- << ", global numbering size: " << numbering->getGlobalSize()
- << ", global size: " << order->getGlobalSize()
- << ", scatter from size: " << sinfo.scatter->from_n
- << std::endl;
-#endif
-
- logger.stagePop();
-} // createScatterWithBC
-
-// ----------------------------------------------------------------------
-// Get PETSc vector associated with field.
-template<typename mesh_type, typename section_type>
-PetscVec
-pylith::topology::Field<mesh_type, section_type>::vector(const char* context)
-{ // vector
- ScatterInfo& sinfo = _getScatter(context);
- return sinfo.vector;
-} // vector
-
-// ----------------------------------------------------------------------
-// Get PETSc vector associated with field.
-template<typename mesh_type, typename section_type>
-const PetscVec
-pylith::topology::Field<mesh_type, section_type>::vector(const char* context) const
-{ // vector
- const ScatterInfo& sinfo = _getScatter(context);
- return sinfo.vector;
-} // vector
-
-// ----------------------------------------------------------------------
-// Scatter section information across processors to update the
-// PETSc vector view of the field.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::scatterSectionToVector(const char* context) const
-{ // scatterSectionToVector
- assert(context);
-
- const ScatterInfo& sinfo = _getScatter(context);
- scatterSectionToVector(sinfo.vector, context);
-} // scatterSectionToVector
-
-// ----------------------------------------------------------------------
-// Scatter section information across processors to update the
-// PETSc vector view of the field.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::scatterSectionToVector(const PetscVec vector,
- const char* context) const
-{ // scatterSectionToVector
- assert(vector);
- assert(context);
- assert(!_section.isNull());
-
- PetscErrorCode err = 0;
- const ScatterInfo& sinfo = _getScatter(context);
- err = VecScatterBegin(sinfo.scatter, sinfo.scatterVec, vector,
- INSERT_VALUES, SCATTER_FORWARD); CHECK_PETSC_ERROR(err);
- err = VecScatterEnd(sinfo.scatter, sinfo.scatterVec, vector,
- INSERT_VALUES, SCATTER_FORWARD); CHECK_PETSC_ERROR(err);
-} // scatterSectionToVector
-
-// ----------------------------------------------------------------------
-// Scatter PETSc vector information across processors to update the
-// section view of the field.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::scatterVectorToSection(const char* context) const
-{ // scatterVectorToSection
- assert(context);
-
- const ScatterInfo& sinfo = _getScatter(context);
- scatterVectorToSection(sinfo.vector, context);
-} // scatterVectorToSection
-
-// ----------------------------------------------------------------------
-// Scatter PETSc vector information across processors to update the
-// section view of the field.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::scatterVectorToSection(const PetscVec vector,
- const char* context) const
-{ // scatterVectorToSection
- assert(vector);
- assert(context);
- assert(!_section.isNull());
-
- PetscErrorCode err = 0;
- const ScatterInfo& sinfo = _getScatter(context);
- err = VecScatterBegin(sinfo.scatter, vector, sinfo.scatterVec,
- INSERT_VALUES, SCATTER_REVERSE); CHECK_PETSC_ERROR(err);
- err = VecScatterEnd(sinfo.scatter, vector, sinfo.scatterVec,
- INSERT_VALUES, SCATTER_REVERSE); CHECK_PETSC_ERROR(err);
-} // scatterVectorToSection
-
-// ----------------------------------------------------------------------
-// Setup split field with all one space per spatial dimension.
-template<typename mesh_type, typename section_type>
-void
-pylith::topology::Field<mesh_type, section_type>::splitDefault(void)
-{ // splitDefault
- assert(!_section.isNull());
- const int spaceDim = _mesh.dimension();
- for (int iDim=0; iDim < spaceDim; ++iDim)
- _section->addSpace(); // displacements
-
- const chart_type& chart = _section->getChart();
-
- const typename chart_type::const_iterator chartBegin = chart.begin();
- const typename chart_type::const_iterator chartEnd = chart.end();
- for (int fibration=0; fibration < spaceDim; ++fibration)
- for (typename chart_type::const_iterator c_iter = chart.begin();
- c_iter != chartEnd;
- ++c_iter) {
- assert(spaceDim == _section->getFiberDimension(*c_iter));
- _section->setFiberDimension(*c_iter, 1, fibration);
- } // for
-} // splitDefault
-
-// ----------------------------------------------------------------------
-// Get scatter for given context.
-template<typename mesh_type, typename section_type>
-typename pylith::topology::Field<mesh_type, section_type>::ScatterInfo&
-pylith::topology::Field<mesh_type, section_type>::_getScatter(const char* context,
- const bool createOk)
-{ // _getScatter
- assert(context);
-
- const bool isNewScatter = _scatters.find(context) == _scatters.end();
-
- if (isNewScatter && !createOk) {
- std::ostringstream msg;
- msg << "Scatter for context '" << context << "' does not exist.";
- throw std::runtime_error(msg.str());
- } // if
-
- ScatterInfo& sinfo = _scatters[context];
- if (isNewScatter) {
- sinfo.vector = 0;
- sinfo.scatter = 0;
- sinfo.scatterVec = 0;
- } // if
- assert(_scatters.find(context) != _scatters.end());
-
- return sinfo;
-} // _getScatter
-
-// ----------------------------------------------------------------------
-// Get scatter for given context.
-template<typename mesh_type, typename section_type>
-const typename pylith::topology::Field<mesh_type, section_type>::ScatterInfo&
-pylith::topology::Field<mesh_type, section_type>::_getScatter(const char* context) const
-{ // _getScatter
- assert(context);
-
- const typename scatter_map_type::const_iterator s_iter =
- _scatters.find(context);
- if (s_iter == _scatters.end()) {
- std::ostringstream msg;
- msg << "Scatter for context '" << context << "' does not exist.";
- throw std::runtime_error(msg.str());
- } // if
-
- return s_iter->second;
-} // _getScatter
-
-
-// End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/topology/Field.cc (from rev 18402, short/3D/PyLith/trunk/libsrc/topology/Field.cc)
===================================================================
--- short/3D/PyLith/trunk/libsrc/pylith/topology/Field.cc (rev 0)
+++ short/3D/PyLith/trunk/libsrc/pylith/topology/Field.cc 2011-05-21 13:46:30 UTC (rev 18406)
@@ -0,0 +1,1181 @@
+// -*- C++ -*-
+//
+// ======================================================================
+//
+// Brad T. Aagaard, U.S. Geological Survey
+// Charles A. Williams, GNS Science
+// Matthew G. Knepley, University of Chicago
+//
+// This code was developed as part of the Computational Infrastructure
+// for Geodynamics (http://geodynamics.org).
+//
+// Copyright (c) 2010 University of California, Davis
+//
+// See COPYING for license information.
+//
+// ======================================================================
+//
+
+#include <portinfo>
+
+#include "Field.hh" // implementation of class methods
+
+#include "pylith/utils/array.hh" // USES double_array
+
+#include "spatialdata/geocoords/CoordSys.hh" // USES CoordSys
+#include "spatialdata/units/Nondimensional.hh" // USES Nondimensional
+
+#include "pylith/utils/petscerror.h" // USES CHECK_PETSC_ERROR
+#include <stdexcept> // USES std::runtime_error
+#include <sstream> // USES std::ostringstream
+#include <cassert> // USES assert()
+
+// ----------------------------------------------------------------------
+// Default constructor.
+template<typename mesh_type, typename section_type>
+pylith::topology::Field<mesh_type, section_type>::Field(const mesh_type& mesh) :
+ _mesh(mesh)
+{ // constructor
+ _metadata.label = "unknown";
+ _metadata.vectorFieldType = OTHER;
+ _metadata.scale = 1.0;
+ _metadata.dimsOkay = false;
+} // constructor
+
+// ----------------------------------------------------------------------
+// Constructor with mesh, section, and metadata.
+template<typename mesh_type, typename section_type>
+pylith::topology::Field<mesh_type, section_type>::Field(const mesh_type& mesh,
+ const ALE::Obj<section_type>& section,
+ const Metadata& metadata) :
+ _metadata(metadata),
+ _mesh(mesh),
+ _section(section)
+{ // constructor
+ assert(!section.isNull());
+} // constructor
+
+// ----------------------------------------------------------------------
+// Destructor.
+template<typename mesh_type, typename section_type>
+pylith::topology::Field<mesh_type, section_type>::~Field(void)
+{ // destructor
+ deallocate();
+} // destructor
+
+// ----------------------------------------------------------------------
+// Deallocate PETSc and local data structures.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::deallocate(void)
+{ // deallocate
+ PetscErrorCode err = 0;
+
+ const typename scatter_map_type::const_iterator scattersEnd = _scatters.end();
+ for (typename scatter_map_type::iterator s_iter=_scatters.begin();
+ s_iter != scattersEnd;
+ ++s_iter) {
+ if (s_iter->second.vector) {
+ err = VecDestroy(&s_iter->second.vector);CHECK_PETSC_ERROR(err);
+ } // if
+
+ if (s_iter->second.scatter) {
+ err = VecScatterDestroy(&s_iter->second.scatter);CHECK_PETSC_ERROR(err);
+ } // if
+
+ if (s_iter->second.scatterVec) {
+ err = VecDestroy(&s_iter->second.scatterVec);CHECK_PETSC_ERROR(err);
+ } // if
+ } // for
+} // deallocate
+
+// ----------------------------------------------------------------------
+// Set label for field.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::label(const char* value)
+{ // label
+ _metadata.label = value;
+ if (!_section.isNull()) {
+ _section->setName(value);
+ } // if
+
+ const typename scatter_map_type::const_iterator scattersEnd = _scatters.end();
+ for (typename scatter_map_type::const_iterator s_iter=_scatters.begin();
+ s_iter != scattersEnd;
+ ++s_iter) {
+ if (s_iter->second.vector) {
+ PetscErrorCode err =
+ PetscObjectSetName((PetscObject)s_iter->second.vector, value);
+ CHECK_PETSC_ERROR(err);
+ } // if
+ } // for
+} // label
+
+// ----------------------------------------------------------------------
+// Get spatial dimension of domain.
+template<typename mesh_type, typename section_type>
+int
+pylith::topology::Field<mesh_type, section_type>::spaceDim(void) const
+{ // spaceDim
+ const spatialdata::geocoords::CoordSys* cs = _mesh.coordsys();
+ return (cs) ? cs->spaceDim() : 0;
+} // spaceDim
+
+// ----------------------------------------------------------------------
+// Get the chart size.
+template<typename mesh_type, typename section_type>
+int
+pylith::topology::Field<mesh_type, section_type>::chartSize(void) const
+{ // chartSize
+ return _section.isNull() ? 0 : _section->getChart().size();
+} // chartSize
+
+// ----------------------------------------------------------------------
+// Get the number of degrees of freedom.
+template<typename mesh_type, typename section_type>
+int
+pylith::topology::Field<mesh_type, section_type>::sectionSize(void) const
+{ // sectionSize
+ return _section.isNull() ? 0 : _section->size();
+} // sectionSize
+
+// ----------------------------------------------------------------------
+// Create seive section.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::newSection(void)
+{ // newSection
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("Field");
+
+ _section = new section_type(_mesh.comm(), _mesh.debug());
+ assert(!_section.isNull());
+ _section->setName(_metadata.label);
+
+ logger.stagePop();
+} // newSection
+
+// ----------------------------------------------------------------------
+// Create sieve section and set chart and fiber dimesion for a
+// sequence of points.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::newSection(
+ const ALE::Obj<label_sequence>& points,
+ const int fiberDim)
+{ // newSection
+ typedef typename mesh_type::SieveMesh::point_type point_type;
+
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("Field");
+ if (fiberDim < 0) {
+ std::ostringstream msg;
+ msg << "Fiber dimension (" << fiberDim << ") for field '" << _metadata.label
+ << "' must be nonnegative.";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ _section = new section_type(_mesh.comm(), _mesh.debug());
+ assert(!_section.isNull());
+ _section->setName(_metadata.label);
+
+ if (points->size() > 0) {
+ const point_type pointMin =
+ *std::min_element(points->begin(), points->end());
+ const point_type pointMax =
+ *std::max_element(points->begin(), points->end());
+ _section->setChart(chart_type(pointMin, pointMax+1));
+ _section->setFiberDimension(points, fiberDim);
+ } else // Create empty chart
+ _section->setChart(chart_type(0, 0));
+
+ logger.stagePop();
+} // newSection
+
+// ----------------------------------------------------------------------
+// Create sieve section and set chart and fiber dimesion for a list of
+// points.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::newSection(const int_array& points,
+ const int fiberDim)
+{ // newSection
+ typedef typename mesh_type::SieveMesh::point_type point_type;
+
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("Field");
+ if (fiberDim < 0) {
+ std::ostringstream msg;
+ msg << "Fiber dimension (" << fiberDim << ") for field '" << _metadata.label
+ << "' must be nonnegative.";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ _section = new section_type(_mesh.comm(), _mesh.debug());
+ assert(!_section.isNull());
+ _section->setName(_metadata.label);
+
+ const int npts = points.size();
+ if (npts > 0) {
+ const point_type pointMin = points.min();
+ const point_type pointMax = points.max();
+ _section->setChart(chart_type(pointMin, pointMax+1));
+ for (int i=0; i < npts; ++i)
+ _section->setFiberDimension(points[i], fiberDim);
+ } else // create empty chart
+ _section->setChart(chart_type(0, 0));
+
+ logger.stagePop();
+} // newSection
+
+// ----------------------------------------------------------------------
+// Create sieve section and set chart and fiber dimesion.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::newSection(const DomainEnum domain,
+ const int fiberDim,
+ const int stratum)
+{ // newSection
+ const ALE::Obj<SieveMesh>& sieveMesh = _mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+
+ ALE::Obj<label_sequence> points;
+ if (VERTICES_FIELD == domain)
+ points = sieveMesh->depthStratum(stratum);
+ else if (CELLS_FIELD == domain)
+ points = sieveMesh->heightStratum(stratum);
+ else {
+ std::cerr << "Unknown value for DomainEnum: " << domain << std::endl;
+ assert(0);
+ throw std::logic_error("Bad domain enum in Field.");
+ } // else
+
+ newSection(points, fiberDim);
+} // newSection
+
+// ----------------------------------------------------------------------
+// Create section given chart.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::newSection(const Field& src,
+ const int fiberDim)
+{ // newSection
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("Field");
+
+ if (_section.isNull()) {
+ logger.stagePop();
+ newSection();
+ logger.stagePush("Field");
+ } // if
+ if (fiberDim < 0) {
+ std::ostringstream msg;
+ msg << "Fiber dimension (" << fiberDim << ") for field '" << _metadata.label
+ << "' must be nonnegative.";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ const ALE::Obj<section_type>& srcSection = src.section();
+ if (!srcSection.isNull()) {
+ _section->setChart(srcSection->getChart());
+ const chart_type& chart = _section->getChart();
+ const typename chart_type::const_iterator chartBegin = chart.begin();
+ const typename chart_type::const_iterator chartEnd = chart.end();
+
+ for (typename chart_type::const_iterator c_iter = chartBegin;
+ c_iter != chartEnd;
+ ++c_iter)
+ if (srcSection->getFiberDimension(*c_iter) > 0)
+ _section->setFiberDimension(*c_iter, fiberDim);
+ } // if
+
+ logger.stagePop();
+} // newSection
+
+// ----------------------------------------------------------------------
+// Create section with same layout as another section.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::cloneSection(const Field& src)
+{ // cloneSection
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("Field");
+
+ deallocate();
+ std::string origLabel = _metadata.label;
+ _metadata = src._metadata;
+ label(origLabel.c_str());
+
+ const ALE::Obj<section_type>& srcSection = src.section();
+ if (!srcSection.isNull() && _section.isNull()) {
+ logger.stagePop();
+ newSection();
+ logger.stagePush("Field");
+ }
+
+ if (!_section.isNull()) {
+ if (!srcSection->sharedStorage()) {
+ _section->setAtlas(srcSection->getAtlas());
+ _section->allocateStorage();
+ _section->setBC(srcSection->getBC());
+ _section->copySpaces(srcSection);
+ } else {
+ _section->setChart(srcSection->getChart());
+ const chart_type& chart = _section->getChart();
+ const typename chart_type::const_iterator chartBegin = chart.begin();
+ const typename chart_type::const_iterator chartEnd = chart.end();
+ for (typename chart_type::const_iterator c_iter = chartBegin;
+ c_iter != chartEnd;
+ ++c_iter) {
+ const int fiberDim = srcSection->getFiberDimension(*c_iter);
+ if (fiberDim > 0)
+ _section->setFiberDimension(*c_iter, fiberDim);
+ } // for
+ const ALE::Obj<SieveMesh>& sieveMesh = _mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+ sieveMesh->allocate(_section);
+ _section->setBC(srcSection->getBC());
+ _section->copySpaces(srcSection); // :BUG: NEED TO REBUILD SPACES
+ } // if/else
+
+ // Reuse scatters in clone
+ PetscErrorCode err = 0;
+ if (src._scatters.size() > 0) {
+ const typename scatter_map_type::const_iterator scattersEnd = src._scatters.end();
+ for (typename scatter_map_type::const_iterator s_iter=src._scatters.begin();
+ s_iter != scattersEnd;
+ ++s_iter) {
+ ScatterInfo sinfo;
+ sinfo.vector = 0;
+ sinfo.scatterVec = 0;
+
+ // Copy scatter
+ sinfo.scatter = s_iter->second.scatter;
+ err = PetscObjectReference((PetscObject) sinfo.scatter);
+ CHECK_PETSC_ERROR(err);
+
+ // Create scatter Vec
+ if (_section->sizeWithBC() > 0) {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF,
+ _section->getStorageSize(),
+ _section->restrictSpace(),
+ &sinfo.scatterVec);
+ CHECK_PETSC_ERROR(err);
+ } else {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
+ &sinfo.scatterVec);
+ CHECK_PETSC_ERROR(err);
+ } // else
+
+ // Create vector using sizes from source section
+ int vecLocalSize = 0;
+ int vecGlobalSize = 0;
+ err = VecGetLocalSize(s_iter->second.vector, &vecLocalSize);
+ CHECK_PETSC_ERROR(err);
+ err = VecGetSize(s_iter->second.vector, &vecGlobalSize); CHECK_PETSC_ERROR(err);
+
+ err = VecCreate(_mesh.comm(), &sinfo.vector); CHECK_PETSC_ERROR(err);
+ err = PetscObjectSetName((PetscObject)sinfo.vector, _metadata.label.c_str());
+ CHECK_PETSC_ERROR(err);
+ err = VecSetSizes(sinfo.vector, vecLocalSize, vecGlobalSize);
+ CHECK_PETSC_ERROR(err);
+ err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
+
+ _scatters[s_iter->first] = sinfo;
+ } // for
+ } // if
+ } // if
+ logger.stagePop();
+} // cloneSection
+
+// ----------------------------------------------------------------------
+// Clear variables associated with section.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::clear(void)
+{ // clear
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("Field");
+
+ deallocate();
+ if (!_section.isNull())
+ _section->clear();
+
+ _metadata.scale = 1.0;
+ _metadata.vectorFieldType = OTHER;
+ _metadata.dimsOkay = false;
+
+ logger.stagePop();
+} // clear
+
+// ----------------------------------------------------------------------
+// Allocate Sieve section.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::allocate(void)
+{ // allocate
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("Field");
+
+ assert(!_section.isNull());
+
+ const ALE::Obj<SieveMesh>& sieveMesh = _mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+ sieveMesh->allocate(_section);
+
+ logger.stagePop();
+} // allocate
+
+// ----------------------------------------------------------------------
+// Zero section values (excluding constrained DOF).
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::zero(void)
+{ // zero
+ if (!_section.isNull())
+ _section->zero(); // Does not zero BC.
+} // zero
+
+// ----------------------------------------------------------------------
+// Zero section values (including constrained DOF).
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::zeroAll(void)
+{ // zeroAll
+ if (!_section.isNull()) {
+ const chart_type& chart = _section->getChart();
+ const typename chart_type::const_iterator chartBegin = chart.begin();
+ const typename chart_type::const_iterator chartEnd = chart.end();
+
+ // Assume fiber dimension is uniform
+ const int fiberDim = (chart.size() > 0) ?
+ _section->getFiberDimension(*chartBegin) : 0;
+ double_array values(fiberDim);
+ values *= 0.0;
+
+ for (typename chart_type::const_iterator c_iter = chartBegin;
+ c_iter != chartEnd;
+ ++c_iter) {
+ if (_section->getFiberDimension(*c_iter) > 0) {
+ assert(fiberDim == _section->getFiberDimension(*c_iter));
+ _section->updatePointAll(*c_iter, &values[0]);
+ } // if
+ } // for
+ } // if
+} // zeroAll
+
+// ----------------------------------------------------------------------
+// Complete section by assembling across processors.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::complete(void)
+{ // complete
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("Completion");
+
+ const ALE::Obj<SieveMesh>& sieveMesh = _mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+
+ if (!_section.isNull())
+ ALE::Completion::completeSectionAdd(sieveMesh->getSendOverlap(),
+ sieveMesh->getRecvOverlap(),
+ _section, _section);
+
+ logger.stagePop();
+} // complete
+
+// ----------------------------------------------------------------------
+// Copy field values and metadata.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::copy(const Field& field)
+{ // copy
+ // Check compatibility of sections
+ const int srcSize = field.chartSize();
+ const int dstSize = chartSize();
+ if (field.spaceDim() != spaceDim() ||
+ field._metadata.vectorFieldType != _metadata.vectorFieldType ||
+ srcSize != dstSize) {
+ std::ostringstream msg;
+
+ msg << "Cannot copy values from section '" << field._metadata.label
+ << "' to section '" << _metadata.label
+ << "'. Sections are incompatible.\n"
+ << " Source section:\n"
+ << " space dim: " << field.spaceDim() << "\n"
+ << " vector field type: " << field._metadata.vectorFieldType << "\n"
+ << " scale: " << field._metadata.scale << "\n"
+ << " size: " << srcSize << "\n"
+ << " Destination section:\n"
+ << " space dim: " << spaceDim() << "\n"
+ << " vector field type: " << _metadata.vectorFieldType << "\n"
+ << " scale: " << _metadata.scale << "\n"
+ << " size: " << dstSize;
+ throw std::runtime_error(msg.str());
+ } // if
+ assert( (_section.isNull() && field._section.isNull()) ||
+ (!_section.isNull() && !field._section.isNull()) );
+
+ if (!_section.isNull()) {
+ // Copy values from field
+ const chart_type& chart = _section->getChart();
+ const typename chart_type::const_iterator chartBegin = chart.begin();
+ const typename chart_type::const_iterator chartEnd = chart.end();
+
+ for (typename chart_type::const_iterator c_iter = chartBegin;
+ c_iter != chartEnd;
+ ++c_iter) {
+ assert(field._section->getFiberDimension(*c_iter) ==
+ _section->getFiberDimension(*c_iter));
+ if (_section->getFiberDimension(*c_iter) > 0)
+ _section->updatePointAll(*c_iter,
+ field._section->restrictPoint(*c_iter));
+ } // for
+ } // if
+
+ label(field._metadata.label.c_str()); // Update label
+ _metadata.scale = field._metadata.scale;
+} // copy
+
+// ----------------------------------------------------------------------
+// Copy field values.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::copy(const ALE::Obj<section_type>& osection)
+{ // copy
+ // Check compatibility of sections
+ const int srcSize = osection->getChart().size();
+ const int dstSize = chartSize();
+ if (srcSize != dstSize) {
+ std::ostringstream msg;
+
+ msg << "Cannot copy values from Sieve section "
+ << _metadata.label << "'. Sections are incompatible.\n"
+ << " Source section:\n"
+ << " size: " << srcSize << "\n"
+ << " Destination section:\n"
+ << " space dim: " << spaceDim() << "\n"
+ << " vector field type: " << _metadata.vectorFieldType << "\n"
+ << " scale: " << _metadata.scale << "\n"
+ << " size: " << dstSize;
+ throw std::runtime_error(msg.str());
+ } // if
+ assert( (_section.isNull() && osection.isNull()) ||
+ (!_section.isNull() && !osection.isNull()) );
+
+ if (!_section.isNull()) {
+ // Copy values from field
+ const chart_type& chart = _section->getChart();
+ const typename chart_type::const_iterator chartEnd = chart.end();
+
+ for (typename chart_type::const_iterator c_iter = chart.begin();
+ c_iter != chartEnd;
+ ++c_iter) {
+ assert(osection->getFiberDimension(*c_iter) ==
+ _section->getFiberDimension(*c_iter));
+ if (osection->getFiberDimension(*c_iter))
+ _section->updatePoint(*c_iter, osection->restrictPoint(*c_iter));
+ } // for
+ } // if
+} // copy
+
+// ----------------------------------------------------------------------
+// Add two fields, storing the result in one of the fields.
+template<typename mesh_type, typename section_type>
+pylith::topology::Field<mesh_type, section_type>&
+pylith::topology::Field<mesh_type, section_type>::operator+=(const Field& field)
+{ // operator+=
+ // Check compatibility of sections
+ const int srcSize = field.chartSize();
+ const int dstSize = chartSize();
+ if (field.spaceDim() != spaceDim() ||
+ field._metadata.vectorFieldType != _metadata.vectorFieldType ||
+ field._metadata.scale != _metadata.scale ||
+ srcSize != dstSize) {
+ std::ostringstream msg;
+
+ msg << "Cannot add values from section '" << field._metadata.label
+ << "' to section '" << _metadata.label
+ << "'. Sections are incompatible.\n"
+ << " Source section:\n"
+ << " space dim: " << field.spaceDim() << "\n"
+ << " vector field type: " << field._metadata.vectorFieldType << "\n"
+ << " scale: " << field._metadata.scale << "\n"
+ << " size: " << srcSize << "\n"
+ << " Destination section:\n"
+ << " space dim: " << spaceDim() << "\n"
+ << " vector field type: " << _metadata.vectorFieldType << "\n"
+ << " scale: " << _metadata.scale << "\n"
+ << " size: " << dstSize;
+ throw std::runtime_error(msg.str());
+ } // if
+ assert( (_section.isNull() && field._section.isNull()) ||
+ (!_section.isNull() && !field._section.isNull()) );
+
+ if (!_section.isNull()) {
+ // Add values from field
+ const chart_type& chart = _section->getChart();
+ const typename chart_type::const_iterator chartBegin = chart.begin();
+ const typename chart_type::const_iterator chartEnd = chart.end();
+
+ // Assume fiber dimension is uniform
+ const int fiberDim = (chart.size() > 0) ?
+ _section->getFiberDimension(*chartBegin) : 0;
+ double_array values(fiberDim);
+
+ for (typename chart_type::const_iterator c_iter = chartBegin;
+ c_iter != chartEnd;
+ ++c_iter) {
+ if (field._section->getFiberDimension(*c_iter) > 0) {
+ assert(fiberDim == field._section->getFiberDimension(*c_iter));
+ assert(fiberDim == _section->getFiberDimension(*c_iter));
+ field._section->restrictPoint(*c_iter, &values[0], values.size());
+ _section->updatePointAllAdd(*c_iter, &values[0]);
+ } // if
+ } // for
+ } // if
+
+ return *this;
+} // operator+=
+
+// ----------------------------------------------------------------------
+// Dimensionalize field.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::dimensionalize(void) const
+{ // dimensionalize
+ if (!_metadata.dimsOkay) {
+ std::ostringstream msg;
+ msg << "Cannot dimensionalize field '" << _metadata.label
+ << "' because the flag "
+ << "has been set to keep field nondimensional.";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ if (!_section.isNull()) {
+ const chart_type& chart = _section->getChart();
+ const typename chart_type::const_iterator chartEnd = chart.end();
+
+ // Assume fiber dimension is uniform
+ const int fiberDim = (chart.size() > 0) ?
+ _section->getFiberDimension(*chart.begin()) : 0;
+ double_array values(fiberDim);
+
+ spatialdata::units::Nondimensional normalizer;
+
+ for (typename chart_type::const_iterator c_iter = chart.begin();
+ c_iter != chartEnd;
+ ++c_iter)
+ if (_section->getFiberDimension(*c_iter) > 0) {
+ assert(fiberDim == _section->getFiberDimension(*c_iter));
+
+ _section->restrictPoint(*c_iter, &values[0], values.size());
+ normalizer.dimensionalize(&values[0], values.size(), _metadata.scale);
+ _section->updatePointAll(*c_iter, &values[0]);
+ } // if
+ } // if
+} // dimensionalize
+
+// ----------------------------------------------------------------------
+// Print field to standard out.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::view(const char* label) const
+{ // view
+ std::string vecFieldString;
+ switch(_metadata.vectorFieldType)
+ { // switch
+ case SCALAR:
+ vecFieldString = "scalar";
+ break;
+ case VECTOR:
+ vecFieldString = "vector";
+ break;
+ case TENSOR:
+ vecFieldString = "tensor";
+ break;
+ case OTHER:
+ vecFieldString = "other";
+ break;
+ case MULTI_SCALAR:
+ vecFieldString = "multiple scalars";
+ break;
+ case MULTI_VECTOR:
+ vecFieldString = "multiple vectors";
+ break;
+ case MULTI_TENSOR:
+ vecFieldString = "multiple tensors";
+ break;
+ case MULTI_OTHER:
+ vecFieldString = "multiple other values";
+ break;
+ default :
+ std::cerr << "Unknown vector field value '" << _metadata.vectorFieldType
+ << "'." << std::endl;
+ assert(0);
+ throw std::logic_error("Bad vector field type in Field.");
+ } // switch
+
+ std::cout << "Viewing field '" << _metadata.label << "' "<< label << ".\n"
+ << " vector field type: " << vecFieldString << "\n"
+ << " scale: " << _metadata.scale << "\n"
+ << " dimensionalize flag: " << _metadata.dimsOkay << std::endl;
+ if (!_section.isNull())
+ _section->view(label);
+} // view
+
+// ----------------------------------------------------------------------
+// Create PETSc vector scatter for field. This is used to transfer
+// information from the "global" PETSc vector view to the "local"
+// Sieve section view.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::createScatter(const char* context)
+{ // createScatter
+ assert(context);
+ assert(!_section.isNull());
+ assert(!_mesh.sieveMesh().isNull());
+
+ PetscErrorCode err = 0;
+ const bool createScatterOk = true;
+ ScatterInfo& sinfo = _getScatter(context, createScatterOk);
+ if (sinfo.scatter) {
+ assert(sinfo.scatterVec);
+ assert(sinfo.vector);
+ return;
+ } // if
+
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("GlobalOrder");
+
+ // Get global order (create if necessary).
+ const std::string& orderLabel = _section->getName();
+ const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
+ _mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+ const ALE::Obj<typename mesh_type::SieveMesh::order_type>& order =
+ sieveMesh->getFactory()->getGlobalOrder(sieveMesh, orderLabel,
+ _section);
+ assert(!order.isNull());
+
+ // Create scatter
+ err = DMMeshCreateGlobalScatter(_mesh.sieveMesh(), _section, order, false, &sinfo.scatter);
+ CHECK_PETSC_ERROR(err);
+
+ // Create scatterVec
+ if (_section->sizeWithBC() > 0) {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF, _section->getStorageSize(),
+ _section->restrictSpace(),
+ &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
+ } else {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
+ &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
+ } // if/else
+
+ // Create vector
+ err = VecCreate(_mesh.comm(), &sinfo.vector);
+ CHECK_PETSC_ERROR(err);
+ err = PetscObjectSetName((PetscObject)sinfo.vector,
+ _metadata.label.c_str());
+ CHECK_PETSC_ERROR(err);
+ err = VecSetSizes(sinfo.vector,
+ order->getLocalSize(), order->getGlobalSize());
+ CHECK_PETSC_ERROR(err);
+ err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
+
+ logger.stagePop();
+} // createScatter
+
+// ----------------------------------------------------------------------
+// Create PETSc vector scatter for field. This is used to transfer
+// information from the "global" PETSc vector view to the "local"
+// Sieve section view. The PETSc vector does not contain constrained
+// DOF. Use createScatterWithBC() to include the constrained DOF in
+// the PETSc vector.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::createScatter(const typename ALE::Obj<typename SieveMesh::numbering_type> numbering,
+ const char* context)
+{ // createScatter
+ assert(!numbering.isNull());
+ assert(context);
+ assert(!_section.isNull());
+ assert(!_mesh.sieveMesh().isNull());
+
+ PetscErrorCode err = 0;
+ const bool createScatterOk = true;
+ ScatterInfo& sinfo = _getScatter(context, createScatterOk);
+
+ // Only create if scatter and scatterVec do not alreay exist.
+ if (sinfo.scatter) {
+ assert(sinfo.scatterVec);
+ assert(sinfo.vector);
+ return;
+ } // if
+
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("GlobalOrder");
+
+ // Get global order (create if necessary).
+ const std::string& orderLabel =
+ (strlen(context) > 0) ?
+ _section->getName() + std::string("_") + std::string(context) :
+ _section->getName();
+ const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
+ _mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+ const ALE::Obj<typename mesh_type::SieveMesh::order_type>& order =
+ sieveMesh->getFactory()->getGlobalOrder(sieveMesh, orderLabel,
+ numbering->getChart().begin(),
+ numbering->getChart().end(),
+ _section);
+ assert(!order.isNull());
+
+ // Create scatter
+ err = DMMeshCreateGlobalScatter(_mesh.sieveMesh(), _section, order, false, &sinfo.scatter);
+ CHECK_PETSC_ERROR(err);
+
+ // Create scatterVec
+ if (_section->sizeWithBC() > 0) {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF, _section->getStorageSize(),
+ _section->restrictSpace(),
+ &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
+ } else {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
+ &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
+ } // if/else
+
+ // Create vector
+ err = VecCreate(_mesh.comm(), &sinfo.vector);
+ CHECK_PETSC_ERROR(err);
+ err = PetscObjectSetName((PetscObject)sinfo.vector,
+ _metadata.label.c_str());
+ CHECK_PETSC_ERROR(err);
+ err = VecSetSizes(sinfo.vector,
+ order->getLocalSize(), order->getGlobalSize());
+ CHECK_PETSC_ERROR(err);
+ err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
+
+#if 0
+ std::cout << "CONTEXT: " << context
+ << ", orderLabel: " << orderLabel
+ << ", section size w/BC: " << _section->sizeWithBC()
+ << ", section size: " << _section->size()
+ << ", global numbering size: " << numbering->getGlobalSize()
+ << ", global size: " << order->getGlobalSize()
+ << std::endl;
+#endif
+
+ logger.stagePop();
+} // createScatter
+
+// ----------------------------------------------------------------------
+// Create PETSc vector scatter for field. This is used to transfer
+// information from the "global" PETSc vector view to the "local"
+// Sieve section view. The PETSc vector does not contain constrained
+// DOF. Use createScatterWithBC() to include the constrained DOF in
+// the PETSc vector.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::createScatterWithBC(const char* context)
+{ // createScatterWithBC
+ assert(context);
+ assert(!_section.isNull());
+ assert(!_mesh.sieveMesh().isNull());
+
+
+ PetscErrorCode err = 0;
+ const bool createScatterOk = true;
+ ScatterInfo& sinfo = _getScatter(context, createScatterOk);
+ if (sinfo.scatter) {
+ assert(sinfo.scatterVec);
+ assert(sinfo.vector);
+ return;
+ } // if
+
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("GlobalOrder");
+
+ // Get global order (create if necessary).
+ const std::string& orderLabel = _section->getName();
+ const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
+ _mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+ const ALE::Obj<typename mesh_type::SieveMesh::order_type>& order =
+ sieveMesh->getFactory()->getGlobalOrderWithBC(sieveMesh, orderLabel,
+ _section);
+ assert(!order.isNull());
+
+ // Create scatter
+ err = DMMeshCreateGlobalScatter(_mesh.sieveMesh(), _section, order, true, &sinfo.scatter);
+ CHECK_PETSC_ERROR(err);
+
+ // Create scatterVec
+ if (_section->sizeWithBC() > 0) {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF, _section->getStorageSize(),
+ _section->restrictSpace(),
+ &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
+ } else {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
+ &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
+ } // if/else
+
+ // Create vector
+ err = VecCreate(_mesh.comm(), &sinfo.vector);
+ CHECK_PETSC_ERROR(err);
+ err = PetscObjectSetName((PetscObject)sinfo.vector,
+ _metadata.label.c_str());
+ CHECK_PETSC_ERROR(err);
+ err = VecSetSizes(sinfo.vector,
+ order->getLocalSize(), order->getGlobalSize());
+ CHECK_PETSC_ERROR(err);
+ err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
+
+ logger.stagePop();
+} // createScatterWithBC
+
+// ----------------------------------------------------------------------
+// Create PETSc vector scatter for field. This is used to transfer
+// information from the "global" PETSc vector view to the "local"
+// Sieve section view. The PETSc vector includes constrained DOF. Use
+// createScatter() if constrained DOF should be omitted from the PETSc
+// vector.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::createScatterWithBC(const typename ALE::Obj<typename SieveMesh::numbering_type> numbering,
+ const char* context)
+{ // createScatterWithBC
+ assert(!numbering.isNull());
+ assert(context);
+ assert(!_section.isNull());
+
+ PetscErrorCode err = 0;
+ const bool createScatterOk = true;
+ ScatterInfo& sinfo = _getScatter(context, createScatterOk);
+
+ // Only create if scatter and scatterVec do not alreay exist.
+ if (sinfo.scatter) {
+ assert(sinfo.scatterVec);
+ assert(sinfo.vector);
+ return;
+ } // if
+
+ ALE::MemoryLogger& logger = ALE::MemoryLogger::singleton();
+ logger.stagePush("GlobalOrder");
+
+ // Get global order (create if necessary).
+ const std::string& orderLabel =
+ (strlen(context) > 0) ?
+ _section->getName() + std::string("_") + std::string(context) :
+ _section->getName();
+ const ALE::Obj<typename mesh_type::SieveMesh>& sieveMesh =
+ _mesh.sieveMesh();
+ assert(!sieveMesh.isNull());
+ const ALE::Obj<typename mesh_type::SieveMesh::order_type>& order =
+ sieveMesh->getFactory()->getGlobalOrderWithBC(sieveMesh, orderLabel,
+ numbering->getChart().begin(),
+ numbering->getChart().end(),
+ _section);
+ assert(!order.isNull());
+ //order->view("GLOBAL ORDER"); // DEBUG
+
+ // Create scatter
+ err = DMMeshCreateGlobalScatter(_mesh.sieveMesh(), _section, order, true, &sinfo.scatter);
+ CHECK_PETSC_ERROR(err);
+
+ // Create scatterVec
+ if (_section->sizeWithBC() > 0) {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF, _section->getStorageSize(),
+ _section->restrictSpace(),
+ &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
+ } else {
+ err = VecCreateSeqWithArray(PETSC_COMM_SELF, 0, 0,
+ &sinfo.scatterVec); CHECK_PETSC_ERROR(err);
+ } // if/else
+
+ // Create vector
+ err = VecCreate(_mesh.comm(), &sinfo.vector);
+ CHECK_PETSC_ERROR(err);
+ err = PetscObjectSetName((PetscObject)sinfo.vector,
+ _metadata.label.c_str());
+ CHECK_PETSC_ERROR(err);
+ err = VecSetSizes(sinfo.vector,
+ order->getLocalSize(), order->getGlobalSize());
+ CHECK_PETSC_ERROR(err);
+ err = VecSetFromOptions(sinfo.vector); CHECK_PETSC_ERROR(err);
+
+#if 0
+ std::cout << "CONTEXT: " << context
+ << ", orderLabel: " << orderLabel
+ << ", section size w/BC: " << _section->sizeWithBC()
+ << ", section size: " << _section->size()
+ << ", section storage size: " << _section->getStorageSize()
+ << ", global numbering size: " << numbering->getGlobalSize()
+ << ", global size: " << order->getGlobalSize()
+ << ", scatter from size: " << sinfo.scatter->from_n
+ << std::endl;
+#endif
+
+ logger.stagePop();
+} // createScatterWithBC
+
+// ----------------------------------------------------------------------
+// Get PETSc vector associated with field.
+template<typename mesh_type, typename section_type>
+PetscVec
+pylith::topology::Field<mesh_type, section_type>::vector(const char* context)
+{ // vector
+ ScatterInfo& sinfo = _getScatter(context);
+ return sinfo.vector;
+} // vector
+
+// ----------------------------------------------------------------------
+// Get PETSc vector associated with field.
+template<typename mesh_type, typename section_type>
+const PetscVec
+pylith::topology::Field<mesh_type, section_type>::vector(const char* context) const
+{ // vector
+ const ScatterInfo& sinfo = _getScatter(context);
+ return sinfo.vector;
+} // vector
+
+// ----------------------------------------------------------------------
+// Scatter section information across processors to update the
+// PETSc vector view of the field.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::scatterSectionToVector(const char* context) const
+{ // scatterSectionToVector
+ assert(context);
+
+ const ScatterInfo& sinfo = _getScatter(context);
+ scatterSectionToVector(sinfo.vector, context);
+} // scatterSectionToVector
+
+// ----------------------------------------------------------------------
+// Scatter section information across processors to update the
+// PETSc vector view of the field.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::scatterSectionToVector(const PetscVec vector,
+ const char* context) const
+{ // scatterSectionToVector
+ assert(vector);
+ assert(context);
+ assert(!_section.isNull());
+
+ PetscErrorCode err = 0;
+ const ScatterInfo& sinfo = _getScatter(context);
+ err = VecScatterBegin(sinfo.scatter, sinfo.scatterVec, vector,
+ INSERT_VALUES, SCATTER_FORWARD); CHECK_PETSC_ERROR(err);
+ err = VecScatterEnd(sinfo.scatter, sinfo.scatterVec, vector,
+ INSERT_VALUES, SCATTER_FORWARD); CHECK_PETSC_ERROR(err);
+} // scatterSectionToVector
+
+// ----------------------------------------------------------------------
+// Scatter PETSc vector information across processors to update the
+// section view of the field.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::scatterVectorToSection(const char* context) const
+{ // scatterVectorToSection
+ assert(context);
+
+ const ScatterInfo& sinfo = _getScatter(context);
+ scatterVectorToSection(sinfo.vector, context);
+} // scatterVectorToSection
+
+// ----------------------------------------------------------------------
+// Scatter PETSc vector information across processors to update the
+// section view of the field.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::scatterVectorToSection(const PetscVec vector,
+ const char* context) const
+{ // scatterVectorToSection
+ assert(vector);
+ assert(context);
+ assert(!_section.isNull());
+
+ PetscErrorCode err = 0;
+ const ScatterInfo& sinfo = _getScatter(context);
+ err = VecScatterBegin(sinfo.scatter, vector, sinfo.scatterVec,
+ INSERT_VALUES, SCATTER_REVERSE); CHECK_PETSC_ERROR(err);
+ err = VecScatterEnd(sinfo.scatter, vector, sinfo.scatterVec,
+ INSERT_VALUES, SCATTER_REVERSE); CHECK_PETSC_ERROR(err);
+} // scatterVectorToSection
+
+// ----------------------------------------------------------------------
+// Setup split field with all one space per spatial dimension.
+template<typename mesh_type, typename section_type>
+void
+pylith::topology::Field<mesh_type, section_type>::splitDefault(void)
+{ // splitDefault
+ assert(!_section.isNull());
+ const int spaceDim = _mesh.dimension();
+ for (int iDim=0; iDim < spaceDim; ++iDim)
+ _section->addSpace(); // displacements
+
+ const chart_type& chart = _section->getChart();
+
+ const typename chart_type::const_iterator chartBegin = chart.begin();
+ const typename chart_type::const_iterator chartEnd = chart.end();
+ for (int fibration=0; fibration < spaceDim; ++fibration)
+ for (typename chart_type::const_iterator c_iter = chart.begin();
+ c_iter != chartEnd;
+ ++c_iter) {
+ assert(spaceDim == _section->getFiberDimension(*c_iter));
+ _section->setFiberDimension(*c_iter, 1, fibration);
+ } // for
+} // splitDefault
+
+// ----------------------------------------------------------------------
+// Get scatter for given context.
+template<typename mesh_type, typename section_type>
+typename pylith::topology::Field<mesh_type, section_type>::ScatterInfo&
+pylith::topology::Field<mesh_type, section_type>::_getScatter(const char* context,
+ const bool createOk)
+{ // _getScatter
+ assert(context);
+
+ const bool isNewScatter = _scatters.find(context) == _scatters.end();
+
+ if (isNewScatter && !createOk) {
+ std::ostringstream msg;
+ msg << "Scatter for context '" << context << "' does not exist.";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ ScatterInfo& sinfo = _scatters[context];
+ if (isNewScatter) {
+ sinfo.vector = 0;
+ sinfo.scatter = 0;
+ sinfo.scatterVec = 0;
+ } // if
+ assert(_scatters.find(context) != _scatters.end());
+
+ return sinfo;
+} // _getScatter
+
+// ----------------------------------------------------------------------
+// Get scatter for given context.
+template<typename mesh_type, typename section_type>
+const typename pylith::topology::Field<mesh_type, section_type>::ScatterInfo&
+pylith::topology::Field<mesh_type, section_type>::_getScatter(const char* context) const
+{ // _getScatter
+ assert(context);
+
+ const typename scatter_map_type::const_iterator s_iter =
+ _scatters.find(context);
+ if (s_iter == _scatters.end()) {
+ std::ostringstream msg;
+ msg << "Scatter for context '" << context << "' does not exist.";
+ throw std::runtime_error(msg.str());
+ } // if
+
+ return s_iter->second;
+} // _getScatter
+
+
+// End of file
Modified: short/3D/PyLith/trunk/libsrc/pylith/topology/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/topology/Makefile.am 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/topology/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -60,10 +60,4 @@
MeshOrder.hh
-# export
-clean-local: clean-subpkgincludeHEADERS
-BUILT_SOURCES = export-subpkgincludeHEADERS
-CLEANFILES = export-subpkgincludeHEADERS
-
-
# End of file
Copied: short/3D/PyLith/trunk/libsrc/pylith/utils (from rev 18400, short/3D/PyLith/trunk/libsrc/utils)
Modified: short/3D/PyLith/trunk/libsrc/pylith/utils/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/libsrc/utils/Makefile.am 2011-05-20 19:01:26 UTC (rev 18400)
+++ short/3D/PyLith/trunk/libsrc/pylith/utils/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -36,9 +36,5 @@
noinst_HEADERS =
-# export
-clean-local: clean-subpkgincludeHEADERS
-BUILT_SOURCES = export-subpkgincludeHEADERS
-CLEANFILES = export-subpkgincludeHEADERS
# End of file
Modified: short/3D/PyLith/trunk/modulesrc/bc/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/bc/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/bc/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -49,7 +49,7 @@
dist__bcmodule_la_SOURCES = $(swig_sources) $(swig_generated)
_bcmodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIBS)
if ENABLE_CUBIT
Modified: short/3D/PyLith/trunk/modulesrc/faults/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/faults/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/faults/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -52,7 +52,7 @@
dist__faultsmodule_la_SOURCES = $(swig_sources) $(swig_generated)
_faultsmodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIBS)
if ENABLE_CUBIT
Modified: short/3D/PyLith/trunk/modulesrc/feassemble/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/feassemble/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/feassemble/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -61,7 +61,7 @@
dist__feassemblemodule_la_SOURCES = $(swig_sources) $(swig_generated)
_feassemblemodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIBS)
if ENABLE_CUBIT
Modified: short/3D/PyLith/trunk/modulesrc/friction/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/friction/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/friction/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -45,7 +45,7 @@
dist__frictionmodule_la_SOURCES = $(swig_sources) $(swig_generated)
_frictionmodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIBS)
if ENABLE_CUBIT
Modified: short/3D/PyLith/trunk/modulesrc/materials/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/materials/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/materials/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -54,7 +54,7 @@
dist__materialsmodule_la_SOURCES = $(swig_sources) $(swig_generated)
_materialsmodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIBS)
if ENABLE_CUBIT
Modified: short/3D/PyLith/trunk/modulesrc/meshio/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/meshio/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/meshio/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -51,7 +51,7 @@
dist__meshiomodule_la_SOURCES = $(swig_sources) $(swig_generated)
_meshiomodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIBS)
if ENABLE_CUBIT
Modified: short/3D/PyLith/trunk/modulesrc/mpi/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/mpi/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/mpi/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -38,7 +38,7 @@
dist__mpimodule_la_SOURCES = $(swig_sources) $(swig_generated)
_mpimodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIBS)
if NO_UNDEFINED
Modified: short/3D/PyLith/trunk/modulesrc/problems/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/problems/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/problems/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -44,7 +44,7 @@
dist__problemsmodule_la_SOURCES = $(swig_sources) $(swig_generated)
_problemsmodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIBS)
if ENABLE_CUBIT
Modified: short/3D/PyLith/trunk/modulesrc/topology/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/topology/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/topology/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -48,7 +48,7 @@
dist__topologymodule_la_SOURCES = $(swig_sources) $(swig_generated)
_topologymodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIBS)
if NO_UNDEFINED
Modified: short/3D/PyLith/trunk/modulesrc/utils/Makefile.am
===================================================================
--- short/3D/PyLith/trunk/modulesrc/utils/Makefile.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/modulesrc/utils/Makefile.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -57,7 +57,7 @@
dist__petscmodule_la_SOURCES = $(petsc_swig_sources) $(petsc_swig_generated)
_petscmodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIB)
if NO_UNDEFINED
@@ -72,7 +72,7 @@
dist__utilsmodule_la_SOURCES = $(utils_swig_sources) $(utils_swig_generated)
_utilsmodule_la_LIBADD = \
- $(top_builddir)/libsrc/libpylith.la \
+ $(top_builddir)/libsrc/pylith/libpylith.la \
-lspatialdata \
$(PETSC_LIB)
if NO_UNDEFINED
Modified: short/3D/PyLith/trunk/subpackage.am
===================================================================
--- short/3D/PyLith/trunk/subpackage.am 2011-05-21 00:07:27 UTC (rev 18405)
+++ short/3D/PyLith/trunk/subpackage.am 2011-05-21 13:46:30 UTC (rev 18406)
@@ -39,7 +39,7 @@
clean-nobase_subpkgincludeHEADERS:
$(MAKE) $(AM_MAKEFLAGS) pkgincludedir=$(export_incdir) uninstall-nobase_subpkgincludeHEADERS
-INCLUDES = -I$(top_builddir)/include
+INCLUDES = -I$(top_srcdir)/libsrc
# End of file
More information about the CIG-COMMITS
mailing list