Back to home page

EIC code displayed by LXR

 
 

    


Warning, file /include/oneapi/tbb/cache_aligned_allocator.h was not indexed or was modified since last indexation (in which case cross-reference links may be missing, inaccurate or erroneous).

0001 /*
0002     Copyright (c) 2005-2022 Intel Corporation
0003 
0004     Licensed under the Apache License, Version 2.0 (the "License");
0005     you may not use this file except in compliance with the License.
0006     You may obtain a copy of the License at
0007 
0008         http://www.apache.org/licenses/LICENSE-2.0
0009 
0010     Unless required by applicable law or agreed to in writing, software
0011     distributed under the License is distributed on an "AS IS" BASIS,
0012     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0013     See the License for the specific language governing permissions and
0014     limitations under the License.
0015 */
0016 
0017 #ifndef __TBB_cache_aligned_allocator_H
0018 #define __TBB_cache_aligned_allocator_H
0019 
0020 #include "detail/_utils.h"
0021 #include "detail/_namespace_injection.h"
0022 #include <cstdlib>
0023 #include <utility>
0024 
0025 #if __TBB_CPP17_MEMORY_RESOURCE_PRESENT
0026 #include <memory_resource>
0027 #endif
0028 
0029 namespace tbb {
0030 namespace detail {
0031 
0032 namespace r1 {
0033 TBB_EXPORT void*       __TBB_EXPORTED_FUNC cache_aligned_allocate(std::size_t size);
0034 TBB_EXPORT void        __TBB_EXPORTED_FUNC cache_aligned_deallocate(void* p);
0035 TBB_EXPORT std::size_t __TBB_EXPORTED_FUNC cache_line_size();
0036 }
0037 
0038 namespace d1 {
0039 
0040 template<typename T>
0041 class cache_aligned_allocator {
0042 public:
0043     using value_type = T;
0044     using propagate_on_container_move_assignment = std::true_type;
0045 
0046     //! Always defined for TBB containers (supported since C++17 for std containers)
0047     using is_always_equal = std::true_type;
0048 
0049     cache_aligned_allocator() = default;
0050     template<typename U> cache_aligned_allocator(const cache_aligned_allocator<U>&) noexcept {}
0051 
0052     //! Allocate space for n objects, starting on a cache/sector line.
0053     __TBB_nodiscard T* allocate(std::size_t n) {
0054         return static_cast<T*>(r1::cache_aligned_allocate(n * sizeof(value_type)));
0055     }
0056 
0057     //! Free block of memory that starts on a cache line
0058     void deallocate(T* p, std::size_t) {
0059         r1::cache_aligned_deallocate(p);
0060     }
0061 
0062     //! Largest value for which method allocate might succeed.
0063     std::size_t max_size() const noexcept {
0064         return (~std::size_t(0) - r1::cache_line_size()) / sizeof(value_type);
0065     }
0066 
0067 #if TBB_ALLOCATOR_TRAITS_BROKEN
0068     using pointer = value_type*;
0069     using const_pointer = const value_type*;
0070     using reference = value_type&;
0071     using const_reference = const value_type&;
0072     using difference_type = std::ptrdiff_t;
0073     using size_type = std::size_t;
0074     template<typename U> struct rebind {
0075         using other = cache_aligned_allocator<U>;
0076     };
0077     template<typename U, typename... Args>
0078     void construct(U *p, Args&&... args)
0079         { ::new (p) U(std::forward<Args>(args)...); }
0080     void destroy(pointer p) { p->~value_type(); }
0081     pointer address(reference x) const { return &x; }
0082     const_pointer address(const_reference x) const { return &x; }
0083 #endif // TBB_ALLOCATOR_TRAITS_BROKEN
0084 };
0085 
0086 #if TBB_ALLOCATOR_TRAITS_BROKEN
0087     template<>
0088     class cache_aligned_allocator<void> {
0089     public:
0090         using pointer = void*;
0091         using const_pointer = const void*;
0092         using value_type = void;
0093         template<typename U> struct rebind {
0094             using other = cache_aligned_allocator<U>;
0095         };
0096     };
0097 #endif
0098 
0099 template<typename T, typename U>
0100 bool operator==(const cache_aligned_allocator<T>&, const cache_aligned_allocator<U>&) noexcept { return true; }
0101 
0102 #if !__TBB_CPP20_COMPARISONS_PRESENT
0103 template<typename T, typename U>
0104 bool operator!=(const cache_aligned_allocator<T>&, const cache_aligned_allocator<U>&) noexcept { return false; }
0105 #endif
0106 
0107 #if __TBB_CPP17_MEMORY_RESOURCE_PRESENT
0108 
0109 //! C++17 memory resource wrapper to ensure cache line size alignment
0110 class cache_aligned_resource : public std::pmr::memory_resource {
0111 public:
0112     cache_aligned_resource() : cache_aligned_resource(std::pmr::get_default_resource()) {}
0113     explicit cache_aligned_resource(std::pmr::memory_resource* upstream) : m_upstream(upstream) {}
0114 
0115     std::pmr::memory_resource* upstream_resource() const {
0116         return m_upstream;
0117     }
0118 
0119 private:
0120     //! We don't know what memory resource set. Use padding to guarantee alignment
0121     void* do_allocate(std::size_t bytes, std::size_t alignment) override {
0122         // TODO: make it common with tbb_allocator.cpp
0123         std::size_t cache_line_alignment = correct_alignment(alignment);
0124         std::size_t space = correct_size(bytes) + cache_line_alignment;
0125         std::uintptr_t base = reinterpret_cast<std::uintptr_t>(m_upstream->allocate(space));
0126         __TBB_ASSERT(base != 0, "Upstream resource returned nullptr.");
0127 
0128         // Round up to the next cache line (align the base address)
0129         std::uintptr_t result = (base + cache_line_alignment) & ~(cache_line_alignment - 1);
0130         __TBB_ASSERT((result - base) >= sizeof(std::uintptr_t), "Can`t store a base pointer to the header");
0131         __TBB_ASSERT(space - (result - base) >= bytes, "Not enough space for the storage");
0132 
0133         // Record where block actually starts.
0134         (reinterpret_cast<std::uintptr_t*>(result))[-1] = base;
0135         return reinterpret_cast<void*>(result);
0136     }
0137 
0138     void do_deallocate(void* ptr, std::size_t bytes, std::size_t alignment) override {
0139         if (ptr) {
0140             // Recover where block actually starts
0141             std::uintptr_t base = (reinterpret_cast<std::uintptr_t*>(ptr))[-1];
0142             m_upstream->deallocate(reinterpret_cast<void*>(base), correct_size(bytes) + correct_alignment(alignment));
0143         }
0144     }
0145 
0146     bool do_is_equal(const std::pmr::memory_resource& other) const noexcept override {
0147         if (this == &other) { return true; }
0148 #if __TBB_USE_OPTIONAL_RTTI
0149         const cache_aligned_resource* other_res = dynamic_cast<const cache_aligned_resource*>(&other);
0150         return other_res && (upstream_resource() == other_res->upstream_resource());
0151 #else
0152         return false;
0153 #endif
0154     }
0155 
0156     std::size_t correct_alignment(std::size_t alignment) {
0157         __TBB_ASSERT(tbb::detail::is_power_of_two(alignment), "Alignment is not a power of 2");
0158 #if __TBB_CPP17_HW_INTERFERENCE_SIZE_PRESENT
0159         std::size_t cache_line_size = std::hardware_destructive_interference_size;
0160 #else
0161         std::size_t cache_line_size = r1::cache_line_size();
0162 #endif
0163         return alignment < cache_line_size ? cache_line_size : alignment;
0164     }
0165 
0166     std::size_t correct_size(std::size_t bytes) {
0167         // To handle the case, when small size requested. There could be not
0168         // enough space to store the original pointer.
0169         return bytes < sizeof(std::uintptr_t) ? sizeof(std::uintptr_t) : bytes;
0170     }
0171 
0172     std::pmr::memory_resource* m_upstream;
0173 };
0174 
0175 #endif // __TBB_CPP17_MEMORY_RESOURCE_PRESENT
0176 
0177 } // namespace d1
0178 } // namespace detail
0179 
0180 inline namespace v1 {
0181 using detail::d1::cache_aligned_allocator;
0182 #if __TBB_CPP17_MEMORY_RESOURCE_PRESENT
0183 using detail::d1::cache_aligned_resource;
0184 #endif
0185 } // namespace v1
0186 } // namespace tbb
0187 
0188 #endif /* __TBB_cache_aligned_allocator_H */
0189