/* MDAL - Mesh Data Abstraction Library (MIT License) Copyright (C) 2018 Lutra Consulting Limited */ #ifndef MDAL_HDF5_HPP #define MDAL_HDF5_HPP /** A simple C++ wrapper around HDF5 library API */ // for compatibility (older hdf5 version in Travis) #define H5Gopen_vers 1 #include "mdal_utils.hpp" #include "hdf5.h" typedef unsigned char uchar; #include #include #include #include #include #include "stdlib.h" #define HDF_MAX_NAME 1024 struct HdfString { char data [HDF_MAX_NAME]; }; template inline void hdfClose( hid_t id ) { MDAL_UNUSED( id ); assert( false ); } template <> inline void hdfClose( hid_t id ) { H5Fclose( id ); } template <> inline void hdfClose( hid_t id ) { H5Gclose( id ); } template <> inline void hdfClose( hid_t id ) { H5Dclose( id ); } template <> inline void hdfClose( hid_t id ) { H5Aclose( id ); } template <> inline void hdfClose( hid_t id ) { H5Sclose( id ); } template <> inline void hdfClose( hid_t id ) { H5Tclose( id ); } template class HdfH { public: HdfH( hid_t hid ) : id( hid ) {} HdfH( const HdfH &other ) : id( other.id ) { } ~HdfH() { if ( id >= 0 ) hdfClose( id ); } hid_t id; }; class HdfGroup; class HdfDataset; class HdfAttribute; class HdfDataspace; class HdfDataType; class HdfFile { public: enum Mode { ReadOnly, ReadWrite, Create }; typedef HdfH Handle; HdfFile( const std::string &path, HdfFile::Mode mode ); ~HdfFile(); bool isValid() const; hid_t id() const; inline std::vector groups() const; inline HdfGroup group( const std::string &path ) const; inline HdfDataset dataset( const std::string &path ) const; inline HdfAttribute attribute( const std::string &attr_name ) const; inline bool pathExists( const std::string &path ) const; std::string filePath() const; protected: std::shared_ptr d; std::string mPath; }; class HdfDataType { public: typedef HdfH Handle; HdfDataType(); HdfDataType( hid_t type, bool isNativeType = true ); ~HdfDataType(); // Creates new string type with size, use HDF_MAX_NAME for maximum length static HdfDataType createString( int size = HDF_MAX_NAME ); bool isValid() const; hid_t id() const; protected: std::shared_ptr d; hid_t mNativeId = -1; }; class HdfGroup { public: typedef HdfH Handle; static HdfGroup create( hid_t file, const std::string &path ); HdfGroup( hid_t file, const std::string &path ); HdfGroup( std::shared_ptr handle ); bool isValid() const; hid_t id() const; hid_t file_id() const; std::string name() const; std::vector groups() const; std::vector datasets() const; std::vector objects() const; std::string childPath( const std::string &childName ) const; inline HdfGroup group( const std::string &groupName ) const; inline HdfDataset dataset( const std::string &dsName ) const; inline HdfAttribute attribute( const std::string &attr_name ) const; inline bool pathExists( const std::string &path ) const; protected: std::vector objects( H5G_obj_t type ) const; protected: std::shared_ptr d; }; class HdfAttribute { public: typedef HdfH Handle; //! Create new attribute for writing for 1 item HdfAttribute( hid_t obj_id, const std::string &attr_name, HdfDataType type ); //! Open existing attribute for reading HdfAttribute( hid_t obj_id, const std::string &attr_name ); ~HdfAttribute(); bool isValid() const; hid_t id() const; std::string readString() const; void write( const std::string &value ); void write( int value ); protected: std::shared_ptr d; hid_t m_objId; std::string m_name; HdfDataType mType; // when in write mode }; class HdfDataspace { public: typedef HdfH Handle; //! memory dataspace for simple N-D array HdfDataspace( const std::vector &dims ); //! dataspace of the dataset HdfDataspace( hid_t dataset = -1 ); ~HdfDataspace( ); //! select from 1D array void selectHyperslab( hsize_t start, hsize_t count ); //! select from N-D array void selectHyperslab( const std::vector offsets, const std::vector counts ); bool isValid() const; hid_t id() const; protected: std::shared_ptr d; }; class HdfDataset { public: typedef HdfH Handle; //! Create new, simple 1 dimensional dataset HdfDataset( hid_t file, const std::string &path, HdfDataType dtype, size_t nItems = 1 ); //! Create new dataset with custom dimensions HdfDataset( hid_t file, const std::string &path, HdfDataType dtype, HdfDataspace dataspace ); //! Opens dataset for reading HdfDataset( hid_t file, const std::string &path ); ~HdfDataset(); bool isValid() const; hid_t id() const; std::vector dims() const; hsize_t elementCount() const; H5T_class_t type() const; //! Reads full array into vector //! Array can have any number of dimenstions //! and it is fully read into 1D vector std::vector readArrayUint8() const; std::vector readArray() const; std::vector readArrayDouble() const; std::vector readArrayInt() const; std::vector readArrayString() const; //! Reads part of the N-D array into vector, //! for each dimension specified by offset and count //! size of offsets and counts must be same as rank (number of dims) of dataset //! the results array is 1D std::vector readArrayUint8( const std::vector offsets, const std::vector counts ) const; std::vector readArray( const std::vector offsets, const std::vector counts ) const; std::vector readArrayDouble( const std::vector offsets, const std::vector counts ) const; std::vector readArrayInt( const std::vector offsets, const std::vector counts ) const; inline bool hasAttribute( const std::string &attr_name ) const; inline HdfAttribute attribute( const std::string &attr_name ) const; template std::vector readArray( hid_t mem_type_id ) const { hsize_t cnt = elementCount(); std::vector data( cnt ); herr_t status = H5Dread( d->id, mem_type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, data.data() ); if ( status < 0 ) { MDAL::debug( "Failed to read data!" ); return std::vector(); } return data; } template std::vector readArray( hid_t mem_type_id, const std::vector offsets, const std::vector counts ) const { HdfDataspace dataspace( d->id ); dataspace.selectHyperslab( offsets, counts ); hsize_t totalItems = 1; for ( auto it = counts.begin(); it != counts.end(); ++it ) totalItems *= *it; std::vector dims = {totalItems}; HdfDataspace memspace( dims ); memspace.selectHyperslab( 0, totalItems ); std::vector data( totalItems ); herr_t status = H5Dread( d->id, mem_type_id, memspace.id(), dataspace.id(), H5P_DEFAULT, data.data() ); if ( status < 0 ) { MDAL::debug( "Failed to read data!" ); return std::vector(); } return data; } //! Reads float value float readFloat() const; //! Reads string value std::string readString() const; //! Writes string dataset with single entry void write( const std::string &value ); //! Writes array of float data void write( std::vector &value ); void write( float value ); //! Writes array of double data void write( std::vector &value ); protected: std::shared_ptr d; hid_t m_fileId; std::string m_path; HdfDataType mType; // when in write mode }; inline std::vector HdfFile::groups() const { return group( "/" ).groups(); } inline HdfGroup HdfFile::group( const std::string &path ) const { return HdfGroup( d->id, path ); } inline HdfDataset HdfFile::dataset( const std::string &path ) const { return HdfDataset( d->id, path ); } inline HdfGroup HdfGroup::group( const std::string &groupName ) const { return HdfGroup( file_id(), childPath( groupName ) ); } inline HdfDataset HdfGroup::dataset( const std::string &dsName ) const { return HdfDataset( file_id(), childPath( dsName ) ); } inline bool HdfDataset::hasAttribute( const std::string &attr_name ) const { htri_t res = H5Aexists( d->id, attr_name.c_str() ); return res > 0 ; } inline HdfAttribute HdfFile::attribute( const std::string &attr_name ) const { return HdfAttribute( d->id, attr_name ); } inline HdfAttribute HdfGroup::attribute( const std::string &attr_name ) const { return HdfAttribute( d->id, attr_name ); } inline HdfAttribute HdfDataset::attribute( const std::string &attr_name ) const { return HdfAttribute( d->id, attr_name ); } inline bool HdfFile::pathExists( const std::string &path ) const { return H5Lexists( d->id, path.c_str(), H5P_DEFAULT ) > 0; } inline bool HdfGroup::pathExists( const std::string &path ) const { return H5Lexists( d->id, path.c_str(), H5P_DEFAULT ) > 0; } #endif // MDAL_HDF5_HPP