Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2025-04-19 08:55:35

0001 /*
0002  *  Copyright (c), 2017, Adrien Devresse <adrien.devresse@epfl.ch>
0003  *
0004  *  Distributed under the Boost Software License, Version 1.0.
0005  *    (See accompanying file LICENSE_1_0.txt or copy at
0006  *          http://www.boost.org/LICENSE_1_0.txt)
0007  *
0008  */
0009 #pragma once
0010 
0011 #include "../H5Easy.hpp"
0012 #include "H5Easy_misc.hpp"
0013 
0014 namespace H5Easy {
0015 
0016 namespace detail {
0017 
0018 /*
0019 Base template for partial specialization: the fallback if specialized templates don't match.
0020 Used e.g. for scalars.
0021 */
0022 template <typename T, typename = void>
0023 struct io_impl {
0024     inline static DataSet dump(File& file,
0025                                const std::string& path,
0026                                const T& data,
0027                                const DumpOptions& options) {
0028         DataSet dataset = initScalarDataset(file, path, data, options);
0029         dataset.write(data);
0030         if (options.flush()) {
0031             file.flush();
0032         }
0033         return dataset;
0034     }
0035 
0036     inline static T load(const File& file, const std::string& path) {
0037         DataSet dataset = file.getDataSet(path);
0038         T data;
0039         dataset.read(data);
0040         return data;
0041     }
0042 
0043     inline static Attribute dumpAttribute(File& file,
0044                                           const std::string& path,
0045                                           const std::string& key,
0046                                           const T& data,
0047                                           const DumpOptions& options) {
0048         Attribute attribute = initScalarAttribute(file, path, key, data, options);
0049         attribute.write(data);
0050         if (options.flush()) {
0051             file.flush();
0052         }
0053         return attribute;
0054     }
0055 
0056     inline static T loadAttribute(const File& file,
0057                                   const std::string& path,
0058                                   const std::string& key) {
0059         DataSet dataset = file.getDataSet(path);
0060         Attribute attribute = dataset.getAttribute(key);
0061         T data;
0062         attribute.read(data);
0063         return data;
0064     }
0065 
0066     inline static DataSet dump_extend(File& file,
0067                                       const std::string& path,
0068                                       const T& data,
0069                                       const std::vector<size_t>& idx,
0070                                       const DumpOptions& options) {
0071         std::vector<size_t> ones(idx.size(), 1);
0072 
0073         if (file.exist(path)) {
0074             DataSet dataset = file.getDataSet(path);
0075             std::vector<size_t> dims = dataset.getDimensions();
0076             std::vector<size_t> shape = dims;
0077             if (dims.size() != idx.size()) {
0078                 throw detail::error(
0079                     file,
0080                     path,
0081                     "H5Easy::dump: Dimension of the index and the existing field do not match");
0082             }
0083             for (size_t i = 0; i < dims.size(); ++i) {
0084                 shape[i] = std::max(dims[i], idx[i] + 1);
0085             }
0086             if (shape != dims) {
0087                 dataset.resize(shape);
0088             }
0089             dataset.select(idx, ones).write(data);
0090             if (options.flush()) {
0091                 file.flush();
0092             }
0093             return dataset;
0094         }
0095 
0096         std::vector<size_t> shape = idx;
0097         const size_t unlim = DataSpace::UNLIMITED;
0098         std::vector<size_t> unlim_shape(idx.size(), unlim);
0099         std::vector<hsize_t> chunks(idx.size(), 10);
0100         if (options.isChunked()) {
0101             chunks = options.getChunkSize();
0102             if (chunks.size() != idx.size()) {
0103                 throw error(file, path, "H5Easy::dump: Incorrect dimension ChunkSize");
0104             }
0105         }
0106         for (size_t& i: shape) {
0107             i++;
0108         }
0109         DataSpace dataspace = DataSpace(shape, unlim_shape);
0110         DataSetCreateProps props;
0111         props.add(Chunking(chunks));
0112         DataSet dataset = file.createDataSet(path, dataspace, AtomicType<T>(), props, {}, true);
0113         dataset.select(idx, ones).write(data);
0114         if (options.flush()) {
0115             file.flush();
0116         }
0117         return dataset;
0118     }
0119 
0120     inline static T load_part(const File& file,
0121                               const std::string& path,
0122                               const std::vector<size_t>& idx) {
0123         std::vector<size_t> ones(idx.size(), 1);
0124         DataSet dataset = file.getDataSet(path);
0125         T data;
0126         dataset.select(idx, ones).read(data);
0127         return data;
0128     }
0129 };
0130 
0131 }  // namespace detail
0132 }  // namespace H5Easy