Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2025-01-31 10:12:24

0001 // Protocol Buffers - Google's data interchange format
0002 // Copyright 2008 Google Inc.  All rights reserved.
0003 //
0004 // Use of this source code is governed by a BSD-style
0005 // license that can be found in the LICENSE file or at
0006 // https://developers.google.com/open-source/licenses/bsd
0007 
0008 // A common header that is included across all protobuf headers.  We do our best
0009 // to avoid #defining any macros here; instead we generally put macros in
0010 // port_def.inc and port_undef.inc so they are not visible from outside of
0011 // protobuf.
0012 
0013 #ifndef GOOGLE_PROTOBUF_PORT_H__
0014 #define GOOGLE_PROTOBUF_PORT_H__
0015 
0016 #include <atomic>
0017 #include <cassert>
0018 #include <cstddef>
0019 #include <cstdint>
0020 #include <new>
0021 #include <string>
0022 #include <type_traits>
0023 #include <typeinfo>
0024 
0025 
0026 #include "absl/base/config.h"
0027 #include "absl/base/prefetch.h"
0028 #include "absl/meta/type_traits.h"
0029 #include "absl/strings/string_view.h"
0030 #include "absl/types/optional.h"
0031 
0032 // must be last
0033 #include "google/protobuf/port_def.inc"
0034 
0035 
0036 namespace google {
0037 namespace protobuf {
0038 
0039 class MessageLite;
0040 
0041 namespace internal {
0042 
0043 template <typename T>
0044 inline PROTOBUF_ALWAYS_INLINE void StrongPointer(T* var) {
0045 #if defined(__GNUC__)
0046   asm("" : : "r"(var));
0047 #else
0048   auto volatile unused = var;
0049   (void)&unused;  // Use address to avoid an extra load of "unused".
0050 #endif
0051 }
0052 
0053 #if defined(__x86_64__) && defined(__linux__) && !defined(__APPLE__) && \
0054     !defined(__ANDROID__) && defined(__clang__) && __clang_major__ >= 19
0055 // Optimized implementation for clang where we can generate a relocation without
0056 // adding runtime instructions.
0057 template <typename T, T ptr>
0058 inline PROTOBUF_ALWAYS_INLINE void StrongPointer() {
0059   // This injects a relocation in the code path without having to run code, but
0060   // we can only do it with a newer clang.
0061   asm(".reloc ., BFD_RELOC_NONE, %p0" ::"Ws"(ptr));
0062 }
0063 
0064 template <typename T>
0065 inline PROTOBUF_ALWAYS_INLINE void StrongReferenceToType() {
0066   static constexpr auto ptr = T::template GetStrongPointerForType<T>();
0067   // This is identical to the implementation of StrongPointer() above, but it
0068   // has to be explicitly inlined here or else Clang 19 will raise an error in
0069   // some configurations.
0070   asm(".reloc ., BFD_RELOC_NONE, %p0" ::"Ws"(ptr));
0071 }
0072 #else   // .reloc
0073 // Portable fallback. It usually generates a single LEA instruction or
0074 // equivalent.
0075 template <typename T, T ptr>
0076 inline PROTOBUF_ALWAYS_INLINE void StrongPointer() {
0077   StrongPointer(ptr);
0078 }
0079 
0080 template <typename T>
0081 inline PROTOBUF_ALWAYS_INLINE void StrongReferenceToType() {
0082   return StrongPointer(T::template GetStrongPointerForType<T>());
0083 }
0084 #endif  // .reloc
0085 
0086 
0087 // See comments on `AllocateAtLeast` for information on size returning new.
0088 struct SizedPtr {
0089   void* p;
0090   size_t n;
0091 };
0092 
0093 // Debug hook allowing setting up test scenarios for AllocateAtLeast usage.
0094 using AllocateAtLeastHookFn = SizedPtr (*)(size_t, void*);
0095 
0096 // `AllocAtLeastHook` API
0097 constexpr bool HaveAllocateAtLeastHook();
0098 void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context = nullptr);
0099 
0100 #if !defined(NDEBUG) && defined(ABSL_HAVE_THREAD_LOCAL) && \
0101     defined(__cpp_inline_variables)
0102 
0103 // Hook data for current thread. These vars must not be accessed directly, use
0104 // the 'HaveAllocateAtLeastHook()` and `SetAllocateAtLeastHook()` API instead.
0105 inline thread_local AllocateAtLeastHookFn allocate_at_least_hook = nullptr;
0106 inline thread_local void* allocate_at_least_hook_context = nullptr;
0107 
0108 constexpr bool HaveAllocateAtLeastHook() { return true; }
0109 inline void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context) {
0110   allocate_at_least_hook = fn;
0111   allocate_at_least_hook_context = context;
0112 }
0113 
0114 #else  // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
0115 
0116 constexpr bool HaveAllocateAtLeastHook() { return false; }
0117 inline void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context) {}
0118 
0119 #endif  // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
0120 
0121 // Allocates at least `size` bytes. This function follows the c++ language
0122 // proposal from D0901R10 (http://wg21.link/D0901R10) and will be implemented
0123 // in terms of the new operator new semantics when available. The allocated
0124 // memory should be released by a call to `SizedDelete` or `::operator delete`.
0125 inline SizedPtr AllocateAtLeast(size_t size) {
0126 #if !defined(NDEBUG) && defined(ABSL_HAVE_THREAD_LOCAL) && \
0127     defined(__cpp_inline_variables)
0128   if (allocate_at_least_hook != nullptr) {
0129     return allocate_at_least_hook(size, allocate_at_least_hook_context);
0130   }
0131 #endif  // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
0132   return {::operator new(size), size};
0133 }
0134 
0135 inline void SizedDelete(void* p, size_t size) {
0136 #if defined(__cpp_sized_deallocation)
0137   ::operator delete(p, size);
0138 #else
0139   // Avoid -Wunused-parameter
0140   (void)size;
0141   ::operator delete(p);
0142 #endif
0143 }
0144 inline void SizedArrayDelete(void* p, size_t size) {
0145 #if defined(__cpp_sized_deallocation)
0146   ::operator delete[](p, size);
0147 #else
0148   // Avoid -Wunused-parameter
0149   (void)size;
0150   ::operator delete[](p);
0151 #endif
0152 }
0153 
0154 // Tag type used to invoke the constinit constructor overload of classes
0155 // such as ArenaStringPtr and MapFieldBase. Such constructors are internal
0156 // implementation details of the library.
0157 struct ConstantInitialized {
0158   explicit ConstantInitialized() = default;
0159 };
0160 
0161 // Tag type used to invoke the arena constructor overload of classes such
0162 // as ExtensionSet and MapFieldLite in aggregate initialization. These
0163 // classes typically don't have move/copy constructors, which rules out
0164 // explicit initialization in pre-C++17.
0165 struct ArenaInitialized {
0166   explicit ArenaInitialized() = default;
0167 };
0168 
0169 template <typename To, typename From>
0170 void AssertDownCast(From* from) {
0171   static_assert(std::is_base_of<From, To>::value, "illegal DownCast");
0172 
0173 #if defined(__cpp_concepts)
0174   // Check that this function is not used to downcast message types.
0175   // For those we should use {Down,Dynamic}CastTo{Message,Generated}.
0176   static_assert(!requires {
0177     std::derived_from<std::remove_pointer_t<To>,
0178                       typename std::remove_pointer_t<To>::MessageLite>;
0179   });
0180 #endif
0181 
0182 #if PROTOBUF_RTTI
0183   // RTTI: debug mode only!
0184   assert(from == nullptr || dynamic_cast<To*>(from) != nullptr);
0185 #endif
0186 }
0187 
0188 template <typename To, typename From>
0189 inline To DownCast(From* f) {
0190   AssertDownCast<std::remove_pointer_t<To>>(f);
0191   return static_cast<To>(f);
0192 }
0193 
0194 template <typename ToRef, typename From>
0195 inline ToRef DownCast(From& f) {
0196   AssertDownCast<std::remove_reference_t<ToRef>>(&f);
0197   return static_cast<ToRef>(f);
0198 }
0199 
0200 // Looks up the name of `T` via RTTI, if RTTI is available.
0201 template <typename T>
0202 inline absl::optional<absl::string_view> RttiTypeName() {
0203 #if PROTOBUF_RTTI
0204   return typeid(T).name();
0205 #else
0206   return absl::nullopt;
0207 #endif
0208 }
0209 
0210 // Helpers for identifying our supported types.
0211 template <typename T>
0212 struct is_supported_integral_type
0213     : absl::disjunction<std::is_same<T, int32_t>, std::is_same<T, uint32_t>,
0214                         std::is_same<T, int64_t>, std::is_same<T, uint64_t>,
0215                         std::is_same<T, bool>> {};
0216 
0217 template <typename T>
0218 struct is_supported_floating_point_type
0219     : absl::disjunction<std::is_same<T, float>, std::is_same<T, double>> {};
0220 
0221 template <typename T>
0222 struct is_supported_string_type
0223     : absl::disjunction<std::is_same<T, std::string>> {};
0224 
0225 template <typename T>
0226 struct is_supported_scalar_type
0227     : absl::disjunction<is_supported_integral_type<T>,
0228                         is_supported_floating_point_type<T>,
0229                         is_supported_string_type<T>> {};
0230 
0231 template <typename T>
0232 struct is_supported_message_type
0233     : absl::disjunction<std::is_base_of<MessageLite, T>> {
0234   static constexpr auto force_complete_type = sizeof(T);
0235 };
0236 
0237 // To prevent sharing cache lines between threads
0238 #ifdef __cpp_aligned_new
0239 enum { kCacheAlignment = 64 };
0240 #else
0241 enum { kCacheAlignment = alignof(max_align_t) };  // do the best we can
0242 #endif
0243 
0244 // The maximum byte alignment we support.
0245 enum { kMaxMessageAlignment = 8 };
0246 
0247 // Returns true if debug string hardening is required
0248 inline constexpr bool DebugHardenStringValues() {
0249 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
0250   return true;
0251 #else
0252   return false;
0253 #endif
0254 }
0255 
0256 // Returns true if debug hardening for clearing oneof message on arenas is
0257 // enabled.
0258 inline constexpr bool DebugHardenClearOneofMessageOnArena() {
0259 #ifdef NDEBUG
0260   return false;
0261 #else
0262   return true;
0263 #endif
0264 }
0265 
0266 // Returns true if pointers are 8B aligned, leaving least significant 3 bits
0267 // available.
0268 inline constexpr bool PtrIsAtLeast8BAligned() { return alignof(void*) >= 8; }
0269 
0270 // Prefetch 5 64-byte cache line starting from 7 cache-lines ahead.
0271 // Constants are somewhat arbitrary and pretty aggressive, but were
0272 // chosen to give a better benchmark results. E.g. this is ~20%
0273 // faster, single cache line prefetch is ~12% faster, increasing
0274 // decreasing distance makes results 2-4% worse. Important note,
0275 // prefetch doesn't require a valid address, so it is ok to prefetch
0276 // past the end of message/valid memory, however we are doing this
0277 // inside inline asm block, since computing the invalid pointer
0278 // is a potential UB. Only insert prefetch once per function,
0279 inline PROTOBUF_ALWAYS_INLINE void Prefetch5LinesFrom7Lines(const void* ptr) {
0280   PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 448);
0281   PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 512);
0282   PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 576);
0283   PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 640);
0284   PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 704);
0285 }
0286 
0287 #if defined(NDEBUG) && ABSL_HAVE_BUILTIN(__builtin_unreachable)
0288 [[noreturn]] ABSL_ATTRIBUTE_COLD PROTOBUF_ALWAYS_INLINE inline void
0289 Unreachable() {
0290   __builtin_unreachable();
0291 }
0292 #elif ABSL_HAVE_BUILTIN(__builtin_FILE) && ABSL_HAVE_BUILTIN(__builtin_LINE)
0293 [[noreturn]] ABSL_ATTRIBUTE_COLD inline void Unreachable(
0294     const char* file = __builtin_FILE(), int line = __builtin_LINE()) {
0295   protobuf_assumption_failed("Unreachable", file, line);
0296 }
0297 #else
0298 [[noreturn]] ABSL_ATTRIBUTE_COLD inline void Unreachable() {
0299   protobuf_assumption_failed("Unreachable", "", 0);
0300 }
0301 #endif
0302 
0303 #ifdef PROTOBUF_TSAN
0304 // TODO: it would be preferable to use __tsan_external_read/
0305 // __tsan_external_write, but they can cause dlopen issues.
0306 template <typename T>
0307 inline PROTOBUF_ALWAYS_INLINE void TSanRead(const T* impl) {
0308   char protobuf_tsan_dummy =
0309       *reinterpret_cast<const char*>(&impl->_tsan_detect_race);
0310   asm volatile("" : "+r"(protobuf_tsan_dummy));
0311 }
0312 
0313 // We currently use a dedicated member for TSan checking so the value of this
0314 // member is not important. We can unconditionally write to it without affecting
0315 // correctness of the rest of the class.
0316 template <typename T>
0317 inline PROTOBUF_ALWAYS_INLINE void TSanWrite(T* impl) {
0318   *reinterpret_cast<char*>(&impl->_tsan_detect_race) = 0;
0319 }
0320 #else
0321 inline PROTOBUF_ALWAYS_INLINE void TSanRead(const void*) {}
0322 inline PROTOBUF_ALWAYS_INLINE void TSanWrite(const void*) {}
0323 #endif
0324 
0325 // This trampoline allows calling from codegen without needing a #include to
0326 // absl. It simplifies IWYU and deps.
0327 inline void PrefetchToLocalCache(const void* ptr) {
0328   absl::PrefetchToLocalCache(ptr);
0329 }
0330 
0331 constexpr bool IsOss() { return true; }
0332 
0333 // Counter library for debugging internal protobuf logic.
0334 // It allows instrumenting code that has different options (eg fast vs slow
0335 // path) to get visibility into how much we are hitting each path.
0336 // When compiled with -DPROTOBUF_INTERNAL_ENABLE_DEBUG_COUNTERS, the counters
0337 // register an atexit handler to dump the table. Otherwise, they are a noop and
0338 // have not runtime cost.
0339 //
0340 // Usage:
0341 //
0342 // if (do_fast) {
0343 //   PROTOBUF_DEBUG_COUNTER("Foo.Fast").Inc();
0344 //   ...
0345 // } else {
0346 //   PROTOBUF_DEBUG_COUNTER("Foo.Slow").Inc();
0347 //   ...
0348 // }
0349 class PROTOBUF_EXPORT RealDebugCounter {
0350  public:
0351   explicit RealDebugCounter(absl::string_view name) { Register(name); }
0352   // Lossy increment.
0353   void Inc() { counter_.store(value() + 1, std::memory_order_relaxed); }
0354   size_t value() const { return counter_.load(std::memory_order_relaxed); }
0355 
0356  private:
0357   void Register(absl::string_view name);
0358   std::atomic<size_t> counter_{};
0359 };
0360 
0361 // When the feature is not enabled, the type is a noop.
0362 class NoopDebugCounter {
0363  public:
0364   explicit constexpr NoopDebugCounter() = default;
0365   constexpr void Inc() {}
0366 };
0367 
0368 }  // namespace internal
0369 }  // namespace protobuf
0370 }  // namespace google
0371 
0372 #include "google/protobuf/port_undef.inc"
0373 
0374 #endif  // GOOGLE_PROTOBUF_PORT_H__