Peano 4
Loading...
Searching...
No Matches
FileReaderHDF5.cpp
Go to the documentation of this file.
1#include "FileReaderHDF5.h"
2
3
4#include <stdio.h>
5#include <string.h>
6#include <numeric>
7
8
9tarch::logging::Log toolbox::particles::FileReaderHDF5::_log( "toolbox::particles::FileReaderHDF5" );
10
11
12void toolbox::particles::FileReaderHDF5::readHDF5File( const std::string& filename ) {
13
14#ifdef UseHDF5
15
16 logInfo( "readHDF5File()", "Entering filereader..." );
17
18 const H5std_string FILE_NAME ( filename.c_str() );
19
20 /* Group of SPH particles */
21 const H5std_string GROUP_NAME ( "/PartType0" );
22
23 /*
24 * Datasets under the PartType group.
25 * Only PartType0 (SPH particles) is used for now.
26 */
27 const H5std_string DATASET_NAME1( "/Coordinates" );
28 const H5std_string DATASET_NAME2( "/Velocities" );
29 const H5std_string DATASET_NAME3( "/Masses" );
30 const H5std_string DATASET_NAME4( "/SmoothingLength" );
31 const H5std_string DATASET_NAME5( "/InternalEnergy" );
32 const H5std_string DATASET_NAME6( "/ParticleIDs" );
33
34 /*
35 * Open the specified file
36 */
37 H5::H5File file( FILE_NAME, H5F_ACC_RDONLY );
38 logInfo( "readHDF5File()", "File: " << FILE_NAME << " opened." ) ;
39
40 // @TODO read header attributes (contains box size, part number, hydro dimensions, ...)
41
42 /*
43 * Now read the various datasets for this particle type.
44 */
45
46 // Read positions
47 _Coordinates = toolbox::particles::FileReaderHDF5::readDoubleArray( file, GROUP_NAME, DATASET_NAME1 );
48
49 // Read velocities
50 _Velocity = toolbox::particles::FileReaderHDF5::readFloatArray( file, GROUP_NAME, DATASET_NAME2 );
51
52 // Read masses
53 _Mass = toolbox::particles::FileReaderHDF5::readDoubleScalar( file, GROUP_NAME, DATASET_NAME3 );
54
55 // Read SmoothingLength (h)
56 _SmoothingLength = toolbox::particles::FileReaderHDF5::readDoubleScalar( file, GROUP_NAME, DATASET_NAME4 );
57
58 // Read Internal Energy (U)
59 _InternalEnergy = toolbox::particles::FileReaderHDF5::readDoubleScalar( file, GROUP_NAME, DATASET_NAME5 );
60
61 // Keep track of the particles' index in the vectors
62 _ParticleIndex = std::list <int> (_SmoothingLength.size());
63 std::iota(_ParticleIndex.begin(), _ParticleIndex.end(), 0);
64
65 // Read ParticleIDs
66 _ParticleIDs = toolbox::particles::FileReaderHDF5::readIntScalar( file, GROUP_NAME, DATASET_NAME6 );
67
68 logInfo( "readHDF5File()", "Reading file: " << FILE_NAME << " complete." ) ;
69
70#else
71 logError( "readHDF5File()", "tried to use Peano's HDF5 reader though code has been compiled without --with-hdf5 (autotools)" );
72 exit(-1);
73#endif
74
75}
76
77#ifdef UseHDF5
78std::vector< int > toolbox::particles::FileReaderHDF5::readIntScalar(
79 const H5::H5File& file,
80 const H5std_string& groupName,
81 const H5std_string& datasetName
82){
83
84 std::vector< int > result;
85
86 /*
87 * Open the specified group from file
88 */
89 H5::Group group = file.openGroup ( groupName );
90 logInfo( "readIntScalar()", "Group: " << groupName << " opened." ) ;
91
92 /*
93 * Open the specified dataset
94 */
95 H5::DataSet dataset = file.openDataSet( groupName + datasetName );
96 logInfo( "readIntScalar()", "Dataset: " << groupName + datasetName << " opened." ) ;
97
98 /*
99 * Get dataspace (i.e. shape) of the dataset.
100 */
101 H5::DataSpace dataspace = dataset.getSpace();
102
103 /*
104 * Get the number of dimensions in the dataspace.
105 */
106 int rank = dataspace.getSimpleExtentNdims();
107
108 /*
109 * Get the dimension size of each dimension in the dataspace and
110 * display them.
111 */
112 hsize_t dims_out[1];
113 int ndims = dataspace.getSimpleExtentDims( dims_out, NULL );
114
115 logInfo( "readIntScalar()",
116 "Dataspace dimensions: rank " << rank <<
117 ", dimensions " <<
118 (unsigned long)(dims_out[0]) )
119
120 /*
121 * Length of the input data that we want to read. By default we read all.
122 */
123 const int LengthInputDataX = dims_out[0];
124
125 // Output buffer initialization
126 const int LengthDataBufferX = LengthInputDataX;
127 const int RankDataBuffer = rank; // rank=1 for this case (scalar)
128
129 int i;
130 int DataBuffer[LengthDataBufferX];
131 for (i = 0; i < LengthDataBufferX; i++)
132 {
133 DataBuffer[i] = 0;
134 }
135
136 dataspace.selectAll();
137 H5::DataSpace memspace( 1 , dims_out );
138
139 // Now read and store into data buffer
140 dataset.read( DataBuffer, H5::PredType::NATIVE_INT, memspace, dataspace );
141 dataspace.close();
142
143 /* Copy data from buffer into output list or vector container */
144 for (i = 0; i < LengthDataBufferX; i++)
145 {
146 result.push_back( DataBuffer[i] );
147 logDebug( "readIntScalar()", "entry value: " << DataBuffer[i] );
148 }
149
150 logInfo( "readIntScalar()", "read " << result.size() << " entries. " );
151
152 return result;
153}
154
155
156std::vector< double > toolbox::particles::FileReaderHDF5::readDoubleScalar(
157 const H5::H5File& file,
158 const H5std_string& groupName,
159 const H5std_string& datasetName
160){
161
162 std::vector< double > result;
163
164 /*
165 * Open the specified group from file
166 */
167 H5::Group group = file.openGroup ( groupName );
168 logInfo( "readDoubleScalar()", "Group: " << groupName << " opened." ) ;
169
170 /*
171 * Open the specified dataset
172 */
173 H5::DataSet dataset = file.openDataSet( groupName + datasetName );
174 logInfo( "readDoubleScalar()", "Dataset: " << groupName + datasetName << " opened." ) ;
175
176 /*
177 * Get dataspace (i.e. shape) of the dataset.
178 */
179 H5::DataSpace dataspace = dataset.getSpace();
180
181 /*
182 * Get the number of dimensions in the dataspace.
183 */
184 int rank = dataspace.getSimpleExtentNdims();
185
186 /*
187 * Get the dimension size of each dimension in the dataspace .
188 */
189 hsize_t dims_out[1];
190 int ndims = dataspace.getSimpleExtentDims( dims_out, NULL );
191
192 logInfo( "readDoubleScalar()",
193 "Dataspace dimensions: rank " << rank <<
194 ", dimensions " <<
195 (unsigned long)(dims_out[0]) )
196
197 /*
198 * Hyperslab dimensions. If needed, this can be a part of the data only, e.g.
199 * for optimization.
200 */
201 const int LengthInputDataX = dims_out[0];
202 const int RankDataBuffer = rank; // rank=1 for this case.
203
204 // Output buffer dimensions
205 const int LengthDataBufferX = LengthInputDataX;
206
207 int i;
208 float DataBuffer[LengthDataBufferX];
209 for (i = 0; i < LengthDataBufferX; i++)
210 {
211 DataBuffer[i] = 0;
212 }
213
214 dataspace.selectAll();
215
216 /* Define memory space to do the copy */
217 H5::DataSpace memspace( RankDataBuffer , dims_out );
218
219 /* Now read and store into data buffer */
220 dataset.read( DataBuffer, H5::PredType::NATIVE_FLOAT, memspace, dataspace );
221 dataspace.close();
222
223 /* Copy data from buffer into output list or vector container */
224 for (i = 0; i < LengthDataBufferX; i++)
225 {
226 result.push_back( DataBuffer[i] );
227 logDebug( "readDoubleScalar()", "entry value: " << DataBuffer[i] );
228 }
229
230 logInfo( "readDoubleScalar()", "read " << result.size() << " entries. " );
231
232 return result;
233
234}
235
236
237std::list< tarch::la::Vector<Dimensions,double> > toolbox::particles::FileReaderHDF5::readDoubleArray(
238 const H5::H5File& file,
239 const H5std_string& groupName,
240 const H5std_string& datasetName
241){
242
243 std::list< tarch::la::Vector<Dimensions,double> > result;
244
245 /*
246 * Open the specified group
247 */
248 H5::Group group = file.openGroup ( groupName );
249 logInfo( "readHDF5File()", "Group: " << groupName << " opened." ) ;
250
251 /*
252 * Open the specified dataset
253 */
254 H5::DataSet dataset = file.openDataSet( groupName+datasetName );
255 logInfo( "readHDF5File()", "Dataset: " << groupName + datasetName << " opened." ) ;
256
257 /*
258 * Get dataspace (i.e. shape) of the dataset.
259 */
260 H5::DataSpace dataspace = dataset.getSpace();
261
262 /*
263 * Get the number of dimensions in the dataspace.
264 */
265 int rank = dataspace.getSimpleExtentNdims();
266
267 /*
268 * Get the dimension size of each dimension in the dataspace.
269 */
270 hsize_t dims_out[2];
271 int ndims = dataspace.getSimpleExtentDims( dims_out, NULL );
272
273 logInfo( "readHDF5File()",
274 "Dataspace dimensions: rank " << rank <<
275 ", dimensions " <<
276 (unsigned long)(dims_out[0]) << " x " <<
277 (unsigned long)(dims_out[1]) ) ;
278
279 /*
280 * Hyperslab dimensions. If needed, this can be a part of the data only, e.g.
281 * for optimization.
282 */
283 const int LengthInputDataX = dims_out[0];
284 const int LengthInputDataY = dims_out[1];
285 const int RankDataBuffer = rank; // the data will be stored in a table-like format
286
287 // Output buffer dimensions
288 const int LengthDataBufferX = LengthInputDataX;
289 const int LengthDataBufferY = LengthInputDataY;
290
291 /*
292 * Output buffer initialization.
293 */
294 int i, j;
295 double dataBuffer[LengthDataBufferX][LengthDataBufferY];
296 for (i = 0; i < LengthDataBufferX; i++)
297 {
298 for (j = 0; j < LengthDataBufferY; j++)
299 {
300 dataBuffer[i][j] = 0; // @TODO Initialize array to sensible, default values
301 }
302 }
303
304 /*
305 * Define hyperslab in the dataset; implicitly giving strike and
306 * block NULL.
307 */
308 hsize_t offset[2]; // hyperslab offset in the file
309 hsize_t count[2]; // size of the hyperslab in the file
310 offset[0] = 0;
311 offset[1] = 0;
312 count[0] = LengthInputDataX;
313 count[1] = LengthInputDataY;
314
315 dataspace.selectHyperslab( H5S_SELECT_SET, count, offset );
316
317 /*
318 * Define the memory dataspace.
319 */
320 hsize_t dimsm[2]; /* memory space dimensions */
321 dimsm[0] = LengthDataBufferX;
322 dimsm[1] = LengthDataBufferY;
323
324 H5::DataSpace memspace( RankDataBuffer, dimsm );
325
326 /*
327 * Define memory hyperslab.
328 */
329 hsize_t offset_out[2]; // hyperslab offset in memory
330 hsize_t count_out[2]; // size of the hyperslab in memory
331 offset_out[0] = 0;
332 offset_out[1] = 0;
333 count_out[0] = LengthInputDataX;
334 count_out[1] = LengthInputDataY;
335
336 memspace.selectHyperslab( H5S_SELECT_SET, count_out, offset_out );
337
338 // Now read and store into data buffer
339 H5::DataType Datatype;
340 Datatype = H5::PredType::NATIVE_DOUBLE;
341
342 dataset.read( dataBuffer, Datatype, memspace, dataspace );
343 dataspace.close();
344
345 /* Copy the data buffer into the output list or vector container.
346 * These support adding and removing elements, which is useful to identify and
347 * keep particles within a cell while discarding the rest. That will be done
348 * in a later step.
349 */
350
351 /* First construct a single row of the list */
353
354 for (i = 0; i < LengthDataBufferX; i++)
355 {
356 for (j = 0; j < Dimensions; j++)
357 {
358 tmpDataArray[j] = dataBuffer[i][j];
359 }
360 /* Now append this row to the final list */
361 result.push_back( tmpDataArray );
362
363 logDebug( "readDoubleArray()", "tmpDataArray " << tmpDataArray << ", i = " << i );
364 }
365
366 logInfo( "readHDF5File()", "read " << result.size() << " entries. " );
367 return result;
368
369}
370
371
372std::vector< tarch::la::Vector<Dimensions,double> > toolbox::particles::FileReaderHDF5::readFloatArray(
373 const H5::H5File& file,
374 const H5std_string& groupName,
375 const H5std_string& datasetName
376){
377
378 std::vector< tarch::la::Vector<Dimensions,double> > result;
379
380 /*
381 * Open the specified group
382 */
383 H5::Group group = file.openGroup ( groupName );
384 logInfo( "readHDF5File()", "Group: " << groupName << " opened." ) ;
385
386 /*
387 * Open the specified dataset
388 */
389 H5::DataSet dataset = file.openDataSet( groupName+datasetName );
390 logInfo( "readHDF5File()", "Dataset: " << groupName + datasetName << " opened." ) ;
391
392 /*
393 * Get dataspace (i.e. shape) of the dataset.
394 */
395 H5::DataSpace dataspace = dataset.getSpace();
396
397 /*
398 * Get the number of dimensions in the dataspace.
399 */
400 int rank = dataspace.getSimpleExtentNdims();
401
402 /*
403 * Get the dimension size of each dimension in the dataspace.
404 */
405 hsize_t dims_out[2];
406 int ndims = dataspace.getSimpleExtentDims( dims_out, NULL );
407
408 logInfo( "readHDF5File()",
409 "Dataspace dimensions: rank " << rank <<
410 ", dimensions " <<
411 (unsigned long)(dims_out[0]) << " x " <<
412 (unsigned long)(dims_out[1]) ) ;
413
414 /*
415 * Hyperslab dimensions. If needed, this can be a part of the data only, e.g.
416 * for optimization.
417 */
418 const int LengthInputDataX = dims_out[0];
419 const int LengthInputDataY = dims_out[1];
420 const int RankDataBuffer = rank; // the data will be stored in a table-like format
421
422 // Output buffer dimensions
423 const int LengthDataBufferX = LengthInputDataX;
424 const int LengthDataBufferY = LengthInputDataY;
425
426 /*
427 * Output buffer initialization.
428 */
429 int i, j;
430 double dataBuffer[LengthDataBufferX][LengthDataBufferY];
431 for (i = 0; i < LengthDataBufferX; i++)
432 {
433 for (j = 0; j < LengthDataBufferY; j++)
434 {
435 dataBuffer[i][j] = 0; // @TODO Initialize array to sensible, default values
436 }
437 }
438
439 /*
440 * Define hyperslab in the dataset; implicitly giving strike and
441 * block NULL.
442 */
443 hsize_t offset[2]; // hyperslab offset in the file
444 hsize_t count[2]; // size of the hyperslab in the file
445 offset[0] = 0;
446 offset[1] = 0;
447 count[0] = LengthInputDataX;
448 count[1] = LengthInputDataY;
449
450 dataspace.selectHyperslab( H5S_SELECT_SET, count, offset );
451
452 /*
453 * Define the memory dataspace.
454 */
455 hsize_t dimsm[2]; /* memory space dimensions */
456 dimsm[0] = LengthDataBufferX;
457 dimsm[1] = LengthDataBufferY;
458
459 H5::DataSpace memspace( RankDataBuffer, dimsm );
460
461 /*
462 * Define memory hyperslab.
463 */
464 hsize_t offset_out[2]; // hyperslab offset in memory
465 hsize_t count_out[2]; // size of the hyperslab in memory
466 offset_out[0] = 0;
467 offset_out[1] = 0;
468 count_out[0] = LengthInputDataX;
469 count_out[1] = LengthInputDataY;
470
471 memspace.selectHyperslab( H5S_SELECT_SET, count_out, offset_out );
472
473 // Now read and store into data buffer
474 H5::DataType Datatype = H5::PredType::NATIVE_FLOAT;
475
476 dataset.read( dataBuffer, Datatype, memspace, dataspace );
477 dataspace.close();
478
479 /* Copy the data buffer into the output list or vector container.
480 * These support adding and removing elements, which is useful to identify and
481 * keep particles within a cell while discarding the rest. That will be done
482 * in a later step.
483 */
484
485 /* First construct a single row of the list */
487
488 for (i = 0; i < LengthDataBufferX; i++)
489 {
490 for (j = 0; j < Dimensions; j++)
491 {
492 tmpDataArray[j] = dataBuffer[i][j];
493 }
494 /* Now append this row to the final list */
495 result.push_back( tmpDataArray );
496
497 logDebug( "readFloatArray()", "tmpDataArray " << tmpDataArray << ", i = " << i );
498 }
499
500 logInfo( "readHDF5File()", "read " << result.size() << " entries. " );
501 return result;
502
503}
504
505#endif
506
508 _VelocityWithinVoxel.clear();
509 _MassWithinVoxel.clear();
510 _SmoothingLengthWithinVoxel.clear();
511 _InternalEnergyWithinVoxel.clear();
512 _ParticleIDsWithinVoxel.clear();
513}
514
516 return (_VelocityWithinVoxel.empty() &&
517 _MassWithinVoxel.empty() &&
518 _SmoothingLengthWithinVoxel.empty() &&
519 _InternalEnergyWithinVoxel.empty() &&
520 _ParticleIDsWithinVoxel.empty());
521}
522
523std::list< tarch::la::Vector<Dimensions,double> > toolbox::particles::FileReaderHDF5::getParticlesWithinVoxel(
526 bool remove
527) {
528 std::list< tarch::la::Vector<Dimensions,double> > result;
529
530 std::list< tarch::la::Vector<Dimensions,double> >::iterator p = _Coordinates.begin();
531 std::list<int>::iterator pind = _ParticleIndex.begin();
532
533 // Make sure we're starting with a clean slate.
534 if (!isVoxelEmpty()) clearVoxel();
535
536 /* Loop over all coordinates that have been read in. When you find a
537 * particle that belongs in this voxel, remove it from the list of particles
538 * to loop over so they don't get checked again in other voxels. */
539 while ( p!=_Coordinates.end() ) {
540
541 bool overlaps = true;
542
543 for (int d=0; d<Dimensions; d++) {
544 overlaps &= tarch::la::smallerEquals( x(d) - h(d) / 2.0, (*p)(d) );
545 overlaps &= tarch::la::greaterEquals( x(d) + h(d) / 2.0, (*p)(d) );
546 }
547
548 if (overlaps) {
549 /* Store coordinates */
550 result.push_back(*p);
551
552 /* Also store other fields. */
553 _VelocityWithinVoxel.push_back ( _Velocity [*pind] );
554 _MassWithinVoxel.push_back ( _Mass [*pind] );
555 _SmoothingLengthWithinVoxel.push_back( _SmoothingLength[*pind] );
556 _InternalEnergyWithinVoxel.push_back ( _InternalEnergy [*pind] );
557 _ParticleIDsWithinVoxel.push_back ( _ParticleIDs [*pind] );
558
559 if (remove) {
560 p = _Coordinates.erase(p);
561 pind = _ParticleIndex.erase(pind);
562 }
563 else {
564 p++;
565 pind++;
566 }
567 } // overlaps == true
568 else {
569 p++;
570 pind++;
571 }
572 } // loop over particle coordinates
573
574 return result;
575}
576
577
579 _Coordinates.clear();
580 _Mass.clear();
581 _SmoothingLength.clear();
582 _InternalEnergy.clear();
583 _ParticleIDs.clear();
584 _ParticleIndex.clear();
585}
586
587
589 return _Coordinates.empty();
590}
591
592
594 return _Coordinates.size();
595}
596
597
598std::vector < tarch::la::Vector<Dimensions,double> > toolbox::particles::FileReaderHDF5::getVelocityWithinVoxel() const {
599 return _VelocityWithinVoxel;
600}
601
602
604 return _MassWithinVoxel;
605}
606
607
609 return _SmoothingLengthWithinVoxel;
610}
611
612
614 return _InternalEnergyWithinVoxel;
615}
616
617
619 return _ParticleIDsWithinVoxel;
620}
621
622
And from this we can write down f$ nabla phi_i nabla phi_i dx but since we are constructing matrix let s investigate the f$ j
#define logError(methodName, logMacroMessageStream)
Wrapper macro around tarch::tarch::logging::Log to improve logging.
Definition Log.h:464
#define logDebug(methodName, logMacroMessageStream)
Definition Log.h:50
#define logInfo(methodName, logMacroMessageStream)
Wrapper macro around tarch::tarch::logging::Log to improve logging.
Definition Log.h:411
Log Device.
Definition Log.h:516
std::vector< double > _InternalEnergy
std::vector< double > getMassWithinVoxel() const
bool isVoxelEmpty() const
Check whether voxel data carried by this instance of FileReaderHDF5 is empty.
void readHDF5File(const std::string &filename)
Read HDF5 file in SWIFT format.
std::vector< double > getSmoothingLengthWithinVoxel() const
std::vector< double > _SmoothingLength
std::vector< double > getInternalEnergyWithinVoxel() const
void clearVoxel()
Clear data of a voxel carried by this instance of FileReaderHDF5.
std::list< tarch::la::Vector< Dimensions, double > > getParticlesWithinVoxel(const tarch::la::Vector< Dimensions, double > &x, const tarch::la::Vector< Dimensions, double > &h, bool remove)
Take particles from database which fall into given voxel.
std::vector< tarch::la::Vector< Dimensions, double > > getVelocityWithinVoxel() const
std::list< tarch::la::Vector< Dimensions, double > > _Coordinates
static tarch::logging::Log _log
std::vector< int > getParticleIDsWithinVoxel() const
std::vector< tarch::la::Vector< Dimensions, double > > _Velocity
int i
Definition makeIC.py:51
bool greaterEquals(double lhs, double rhs, double tolerance=NUMERICAL_ZERO_DIFFERENCE)
bool smallerEquals(double lhs, double rhs, double tolerance=NUMERICAL_ZERO_DIFFERENCE)
Simple vector class.
Definition Vector.h:134