[cig-commits] commit:
Mercurial
hg at geodynamics.org
Mon Nov 24 11:58:35 PST 2008
changeset: 58:4825c205bdb8
user: LukeHodkinson
date: Sun Mar 16 07:22:21 2008 +0000
files: Swarm/src/FileParticleLayout.c Swarm/src/FileParticleLayout.h Swarm/src/Swarm.h Swarm/src/SwarmClass.c Swarm/src/SwarmDump.c Swarm/src/SwarmDump.h
description:
Finishing off adding HDF5 to particle dumping. This
gives us two major advantages; the first being that
we can now use compression to reduce file size, and
the second being that we should be able to store files
larger than a machines address space (fixing that
bug we were having with huge checkpoint file sizes).
diff -r 65081410b863 -r 4825c205bdb8 Swarm/src/FileParticleLayout.c
--- a/Swarm/src/FileParticleLayout.c Fri Mar 14 04:09:43 2008 +0000
+++ b/Swarm/src/FileParticleLayout.c Sun Mar 16 07:22:21 2008 +0000
@@ -27,6 +27,10 @@
** $Id: FileParticleLayout.c 4081 2007-04-27 06:20:07Z LukeHodkinson $
**
**~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/
+
+#ifdef HAVE_HDF5
+#include <hdf5.h>
+#endif
#include <mpi.h>
#include <StGermain/StGermain.h>
@@ -213,6 +217,8 @@ void _FileParticleLayout_SetInitialCount
MPI_Offset bytesCount;
SizeT particleSize = swarm->particleExtensionMgr->finalSize;
div_t division;
+ hid_t file, fileData;
+ int size[2];
Journal_DPrintf( self->debug, "In %s(): for ParticleLayout \"%s\", of type %s\n",
__func__, self->name, self->type );
@@ -221,6 +227,20 @@ void _FileParticleLayout_SetInitialCount
Journal_DPrintf( self->debug, "Finding number of bytes in checkpoint file \"%s\":\n",
self->filename );
+#ifdef HAVE_HDF5
+ /* Read in data size. */
+ file = H5Fopen( filename, H5F_ACC_RDONLY, H5P_DEFAULT );
+ fileData = H5Dopen( file, "/size" );
+ H5Dread( fileData, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, size );
+ H5Dclose( fileData );
+ H5Fclose( file );
+
+ /* Make sure paricle sizes are the same. */
+ assert( size[1] == swarm->particleExtensionMgr->finalSize );
+
+ /* Store number of particles. */
+ self->totalInitialParticles = size[0];
+#else
openResult = MPI_File_open( swarm->comm, filename, MPI_MODE_RDONLY, MPI_INFO_NULL, &mpiFile );
Journal_Firewall(
@@ -257,6 +277,7 @@ void _FileParticleLayout_SetInitialCount
bytesCount,
(unsigned int)particleSize,
division.rem );
+#endif
Journal_DPrintf( self->debug, "calling parent func to set cell counts:\n", bytesCount );
_GlobalParticleLayout_SetInitialCounts( self, swarm );
@@ -268,7 +289,34 @@ void _FileParticleLayout_SetInitialCount
void _FileParticleLayout_InitialiseParticles( void* particleLayout, void* _swarm ) {
FileParticleLayout* self = (FileParticleLayout*)particleLayout;
-
+ Swarm *swarm = (Swarm*)_swarm;
+#ifdef HAVE_HDF5
+ hid_t file;
+ hsize_t size[2];
+
+ /* Open the file and data set. */
+ file = H5Fopen( self->filename, H5F_ACC_RDONLY, H5P_DEFAULT );
+ self->fileData = H5Dopen( file, "/data" );
+ self->fileSpace = H5Dget_space( self->fileData );
+
+ /* Need a memory space for extracting to. */
+ size[0] = 1;
+ size[1] = swarm->particleExtensionMgr->finalSize;
+ self->memSpace = H5Screate_simple( 1, size + 1, NULL );
+ H5Sselect_all( self->memSpace );
+
+ /* Prepare a hyperslab for extracting file data one particle at a time. */
+ self->start[0] = 0; self->start[1] = 0;
+ self->count[0] = 1; self->count[1] = size[1];
+ H5Sselect_hyperslab( self->fileSpace, H5S_SELECT_SET, self->start, NULL, self->count, NULL );
+
+ _GlobalParticleLayout_InitialiseParticles( self, _swarm );
+
+ H5Sclose( self->memSpace );
+ H5Sclose( self->fileSpace );
+ H5Dclose( self->fileData );
+ H5Fclose( file );
+#else
self->file = fopen( self->filename, "rb" );
Journal_Firewall(
self->file != NULL,
@@ -283,6 +331,7 @@ void _FileParticleLayout_InitialiseParti
fclose( self->file );
self->file = NULL;
+#endif
}
void _FileParticleLayout_InitialiseParticle(
@@ -296,6 +345,15 @@ void _FileParticleLayout_InitialiseParti
SizeT particleSize = swarm->particleExtensionMgr->finalSize;
int result;
+#ifdef HAVE_HDF5
+ /* Update the hyperslab. */
+ self->start[0] = newParticle_I;
+ H5Sselect_hyperslab( self->fileSpace, H5S_SELECT_SET, self->start, NULL, self->count, NULL );
+
+ /* Read particle data. */
+ H5Dread( self->fileData, H5T_NATIVE_CHAR, self->memSpace,
+ self->fileSpace, H5P_DEFAULT, particle );
+#else
result = fread( particle, particleSize, 1, self->file );
Journal_Firewall(
@@ -307,6 +365,7 @@ void _FileParticleLayout_InitialiseParti
self->type,
self->name,
newParticle_I );
+#endif
}
diff -r 65081410b863 -r 4825c205bdb8 Swarm/src/FileParticleLayout.h
--- a/Swarm/src/FileParticleLayout.h Fri Mar 14 04:09:43 2008 +0000
+++ b/Swarm/src/FileParticleLayout.h Sun Mar 16 07:22:21 2008 +0000
@@ -46,12 +46,26 @@
extern const Type FileParticleLayout_Type;
/* FileParticleLayout information */
+#ifdef HAVE_HDF5
+ #define __FileParticleLayout \
+ __GlobalParticleLayout \
+ \
+ Name filename; \
+ FILE* file; \
+ Stream* errorStream; \
+ hid_t fileData; \
+ hid_t fileSpace; \
+ hid_t memSpace; \
+ hsize_t start[2]; \
+ hsize_t count[2];
+#else
#define __FileParticleLayout \
__GlobalParticleLayout \
\
Name filename; \
FILE* file; \
Stream* errorStream;
+#endif
struct FileParticleLayout { __FileParticleLayout };
diff -r 65081410b863 -r 4825c205bdb8 Swarm/src/Swarm.h
--- a/Swarm/src/Swarm.h Fri Mar 14 04:09:43 2008 +0000
+++ b/Swarm/src/Swarm.h Sun Mar 16 07:22:21 2008 +0000
@@ -41,7 +41,11 @@
#ifndef __Domain_Swarm_h__
#define __Domain_Swarm_h__
-
+
+#ifdef HAVE_HDF5
+#include <hdf5.h>
+#endif
+
#include "types.h"
#include "shortcuts.h"
diff -r 65081410b863 -r 4825c205bdb8 Swarm/src/SwarmClass.c
--- a/Swarm/src/SwarmClass.c Fri Mar 14 04:09:43 2008 +0000
+++ b/Swarm/src/SwarmClass.c Sun Mar 16 07:22:21 2008 +0000
@@ -27,6 +27,10 @@
** $Id: SwarmClass.c 4191 2007-10-05 06:23:59Z DavidLee $
**
**~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/
+
+#ifdef HAVE_HDF5
+#include <hdf5.h>
+#endif
#include <mpi.h>
#include <StGermain/StGermain.h>
diff -r 65081410b863 -r 4825c205bdb8 Swarm/src/SwarmDump.c
--- a/Swarm/src/SwarmDump.c Fri Mar 14 04:09:43 2008 +0000
+++ b/Swarm/src/SwarmDump.c Sun Mar 16 07:22:21 2008 +0000
@@ -44,6 +44,10 @@
#include <assert.h>
#include <string.h>
+
+#ifdef HAVE_HDF5
+#include <hdf5.h>
+#endif
const Type SwarmDump_Type = "SwarmDump";
@@ -277,11 +281,14 @@ void _SwarmDump_Execute( void* swarmDump
MPI_Barrier( swarm->comm );
}
+#ifdef HAVE_HDF5
+ SwarmDump_DumpToHDF5( self, swarm, filename );
+#else
Stream_RedirectFile( stream, filename );
-
MPIStream_WriteAllProcessors( stream, swarm->particles, particleSize, (SizeT) particleLocalCount, swarm->comm );
-
Stream_CloseFile( stream );
+#endif
+
Memory_Free( filename );
}
Stream_UnIndent( info );
@@ -297,3 +304,48 @@ void SwarmDump_Execute( void* swarmDump,
self->_execute( self, context );
}
+
+#ifdef HAVE_HDF5
+void SwarmDump_DumpToHDF5( SwarmDump* self, Swarm* swarm, const char* filename ) {
+ hid_t file, fileSpace, fileData;
+ hid_t props;
+ hsize_t size[2];
+ int intSize[2];
+
+ /* Open the HDF5 output file. */
+ file = H5Fcreate( filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
+ assert( file );
+
+ /* Dump the size so we don't have to do any divisions later on. */
+ size[0] = (hsize_t)2;
+ fileSpace = H5Screate_simple( 1, size, NULL );
+ fileData = H5Dcreate( file, "/size", H5T_NATIVE_INT, fileSpace, H5P_DEFAULT );
+ intSize[0] = swarm->particleLocalCount;
+ intSize[1] = swarm->particleExtensionMgr->finalSize;
+ H5Dwrite( fileData, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, intSize );
+ H5Dclose( fileData );
+ H5Sclose( fileSpace );
+
+ /* Set our data chunk size so we can compress each chunk individually. */
+ size[0] = (swarm->particleLocalCount > 10000) ? 10000 : swarm->particleLocalCount;
+ size[1] = (hsize_t)swarm->particleExtensionMgr->finalSize;
+ props = H5Pcreate( H5P_DATASET_CREATE );
+ H5Pset_chunk( props, 2, size );
+ H5Pset_deflate( props, 6 ); /* Turn on compression. */
+
+ /* Create our output space and data objects. */
+ size[0] = swarm->particleLocalCount;
+ fileSpace = H5Screate_simple( 2, size, NULL );
+ fileData = H5Dcreate( file, "/data", H5T_NATIVE_CHAR, fileSpace, props );
+ H5Sselect_all( fileSpace ); /* Set the file to recieve everything. */
+
+ /* Dump all data. */
+ H5Dwrite( fileData, H5T_NATIVE_CHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, swarm->particles );
+
+ /* Close off all our handles. */
+ H5Dclose( fileData );
+ H5Sclose( fileSpace );
+ H5Pclose( props );
+ H5Fclose( file );
+}
+#endif
diff -r 65081410b863 -r 4825c205bdb8 Swarm/src/SwarmDump.h
--- a/Swarm/src/SwarmDump.h Fri Mar 14 04:09:43 2008 +0000
+++ b/Swarm/src/SwarmDump.h Sun Mar 16 07:22:21 2008 +0000
@@ -101,5 +101,8 @@
/** Virtual Function Wrappers */
void SwarmDump_Execute( void* swarmDump, void* context ) ;
+#ifdef HAVE_HDF5
+ void SwarmDump_DumpToHDF5( SwarmDump* self, Swarm* swarm, const char* filename );
+#endif
#endif /* __Domain_Swarm_SwarmDump_h__ */
More information about the CIG-COMMITS
mailing list