[cig-commits] r17269 - short/3D/PyLith/trunk/libsrc/meshio
knepley at geodynamics.org
knepley at geodynamics.org
Thu Oct 14 06:26:16 PDT 2010
Author: knepley
Date: 2010-10-14 06:26:16 -0700 (Thu, 14 Oct 2010)
New Revision: 17269
Modified:
short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc
Log:
Still not working because I do not understand the field manager
Modified: short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc
===================================================================
--- short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc 2010-10-14 05:06:23 UTC (rev 17268)
+++ short/3D/PyLith/trunk/libsrc/meshio/DataWriterHDF5.cc 2010-10-14 13:26:16 UTC (rev 17269)
@@ -102,19 +102,27 @@
err = VecView(coordinates.vector(), _viewer);CHECK_PETSC_ERROR(err);
Vec elemVec;
- PetscInt numElements, numCorners, *vertices;
PetscScalar *tmpVertices;
PetscBool columnMajor = PETSC_FALSE;
- ALE::PCICE::Builder::outputElementsLocal(sieveMesh, &numElements, &numCorners, &vertices, columnMajor);
+ ///ALE::PCICE::Builder::outputElementsLocal(sieveMesh, &numElements, &numCorners, &vertices, columnMajor);
+ typedef ALE::OrientedConeSectionV<typename mesh_type::SieveMesh::sieve_type> oriented_cones_wrapper_type;
+ Obj<oriented_cones_wrapper_type> cones = new oriented_cones_wrapper_type(sieveMesh->getSieve());
+
// Hack right now, move to HDF5 Section viewer
- err = PetscMalloc(sizeof(PetscScalar)*numElements*numCorners, &tmpVertices);CHECK_PETSC_ERROR(err);
- for(PetscInt i = 0; i < numElements*numCorners; ++i) {tmpVertices[i] = vertices[i];}
- err = VecCreateMPIWithArray(sieveMesh->comm(), numElements*numCorners, PETSC_DETERMINE, tmpVertices, &elemVec);CHECK_PETSC_ERROR(err);
+ err = PetscMalloc(sizeof(PetscScalar)*cones->size(), &tmpVertices);CHECK_PETSC_ERROR(err);
+ for(int p = sieveMesh->getSieve()->getChart().min(), i = 0; p < sieveMesh->getSieve()->getChart().max(); ++p) {
+ const int coneSize = cones->getFiberDimension(p);
+ const typename oriented_cones_wrapper_type::value_type *vertices = cones->restrictPoint(p);
+
+ for(int c = 0; c < coneSize; ++c, ++i) {
+ tmpVertices[i] = vertices[c].first;
+ }
+ }
+ err = VecCreateMPIWithArray(sieveMesh->comm(), cones->size(), PETSC_DETERMINE, tmpVertices, &elemVec);CHECK_PETSC_ERROR(err);
err = VecView(elemVec, _viewer);CHECK_PETSC_ERROR(err);
err = VecDestroy(elemVec);CHECK_PETSC_ERROR(err);
err = PetscFree(tmpVertices);CHECK_PETSC_ERROR(err);
- err = PetscFree(vertices);CHECK_PETSC_ERROR(err);
} catch (const std::exception& err) {
std::ostringstream msg;
msg << "Error while preparing for writing data to HDF5 file "
@@ -155,11 +163,13 @@
// We will try the simplest thing, using the embedded vector. If this is not
// general enough, due to ordering, etc., we can construct an auxiliary vector.
- const PetscVec vector = field.vector();
+ PetscVec vector = field.vector();
if (vector == PETSC_NULL) {
field.createVector();
+ vector = field.vector();
}
// TODO: Create scatter if necessary
+ field.createScatter();
field.scatterSectionToVector();
PetscErrorCode err = 0;
@@ -192,11 +202,13 @@
// We will try the simplest thing, using the embedded vector. If this is not
// general enough, due to ordering, etc., we can construct an auxiliary vector.
- const PetscVec vector = field.vector();
+ PetscVec vector = field.vector();
if (vector == PETSC_NULL) {
field.createVector();
+ vector = field.vector();
}
// TODO: Create scatter if necessary
+ field.createScatter();
field.scatterSectionToVector();
PetscErrorCode err = 0;
More information about the CIG-COMMITS
mailing list