File indexing completed on 2025-01-31 10:12:24
0001
0002
0003
0004
0005
0006
0007
0008 #ifndef GOOGLE_PROTOBUF_PARSE_CONTEXT_H__
0009 #define GOOGLE_PROTOBUF_PARSE_CONTEXT_H__
0010
0011 #include <cstdint>
0012 #include <cstring>
0013 #include <string>
0014 #include <type_traits>
0015 #include <utility>
0016
0017 #include "absl/base/config.h"
0018 #include "absl/log/absl_check.h"
0019 #include "absl/log/absl_log.h"
0020 #include "absl/strings/cord.h"
0021 #include "absl/strings/internal/resize_uninitialized.h"
0022 #include "absl/strings/string_view.h"
0023 #include "absl/types/optional.h"
0024 #include "google/protobuf/arena.h"
0025 #include "google/protobuf/arenastring.h"
0026 #include "google/protobuf/endian.h"
0027 #include "google/protobuf/inlined_string_field.h"
0028 #include "google/protobuf/io/coded_stream.h"
0029 #include "google/protobuf/io/zero_copy_stream.h"
0030 #include "google/protobuf/metadata_lite.h"
0031 #include "google/protobuf/port.h"
0032 #include "google/protobuf/repeated_field.h"
0033 #include "google/protobuf/wire_format_lite.h"
0034
0035
0036
0037 #include "google/protobuf/port_def.inc"
0038
0039
0040 namespace google {
0041 namespace protobuf {
0042
0043 class UnknownFieldSet;
0044 class DescriptorPool;
0045 class MessageFactory;
0046
0047 namespace internal {
0048
0049
0050 PROTOBUF_EXPORT void WriteVarint(uint32_t num, uint64_t val, std::string* s);
0051 PROTOBUF_EXPORT void WriteLengthDelimited(uint32_t num, absl::string_view val,
0052 std::string* s);
0053
0054 inline void WriteVarint(uint32_t num, uint64_t val, UnknownFieldSet* s);
0055 inline void WriteLengthDelimited(uint32_t num, absl::string_view val,
0056 UnknownFieldSet* s);
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096 class PROTOBUF_EXPORT EpsCopyInputStream {
0097 public:
0098 enum { kMaxCordBytesToCopy = 512 };
0099 explicit EpsCopyInputStream(bool enable_aliasing)
0100 : aliasing_(enable_aliasing ? kOnPatch : kNoAliasing) {}
0101
0102 void BackUp(const char* ptr) {
0103 ABSL_DCHECK(ptr <= buffer_end_ + kSlopBytes);
0104 int count;
0105 if (next_chunk_ == patch_buffer_) {
0106 count = static_cast<int>(buffer_end_ + kSlopBytes - ptr);
0107 } else {
0108 count = size_ + static_cast<int>(buffer_end_ - ptr);
0109 }
0110 if (count > 0) StreamBackUp(count);
0111 }
0112
0113
0114
0115
0116
0117 class LimitToken {
0118 public:
0119 LimitToken() { PROTOBUF_POISON_MEMORY_REGION(&token_, sizeof(token_)); }
0120
0121 explicit LimitToken(int token) : token_(token) {
0122 PROTOBUF_UNPOISON_MEMORY_REGION(&token_, sizeof(token_));
0123 }
0124
0125 LimitToken(const LimitToken&) = delete;
0126 LimitToken& operator=(const LimitToken&) = delete;
0127
0128 LimitToken(LimitToken&& other) { *this = std::move(other); }
0129
0130 LimitToken& operator=(LimitToken&& other) {
0131 PROTOBUF_UNPOISON_MEMORY_REGION(&token_, sizeof(token_));
0132 token_ = other.token_;
0133 PROTOBUF_POISON_MEMORY_REGION(&other.token_, sizeof(token_));
0134 return *this;
0135 }
0136
0137 ~LimitToken() { PROTOBUF_UNPOISON_MEMORY_REGION(&token_, sizeof(token_)); }
0138
0139 int token() && {
0140 int t = token_;
0141 PROTOBUF_POISON_MEMORY_REGION(&token_, sizeof(token_));
0142 return t;
0143 }
0144
0145 private:
0146 int token_;
0147 };
0148
0149
0150 PROTOBUF_NODISCARD LimitToken PushLimit(const char* ptr, int limit) {
0151 ABSL_DCHECK(limit >= 0 && limit <= INT_MAX - kSlopBytes);
0152
0153
0154 limit += static_cast<int>(ptr - buffer_end_);
0155 limit_end_ = buffer_end_ + (std::min)(0, limit);
0156 auto old_limit = limit_;
0157 limit_ = limit;
0158 return LimitToken(old_limit - limit);
0159 }
0160
0161 PROTOBUF_NODISCARD bool PopLimit(LimitToken delta) {
0162
0163
0164 limit_ = limit_ + std::move(delta).token();
0165 if (PROTOBUF_PREDICT_FALSE(!EndedAtLimit())) return false;
0166
0167
0168 limit_end_ = buffer_end_ + (std::min)(0, limit_);
0169 return true;
0170 }
0171
0172 PROTOBUF_NODISCARD const char* Skip(const char* ptr, int size) {
0173 if (size <= buffer_end_ + kSlopBytes - ptr) {
0174 return ptr + size;
0175 }
0176 return SkipFallback(ptr, size);
0177 }
0178 PROTOBUF_NODISCARD const char* ReadString(const char* ptr, int size,
0179 std::string* s) {
0180 if (size <= buffer_end_ + kSlopBytes - ptr) {
0181
0182
0183
0184
0185 absl::strings_internal::STLStringResizeUninitialized(s, size);
0186 char* z = &(*s)[0];
0187 memcpy(z, ptr, size);
0188 return ptr + size;
0189 }
0190 return ReadStringFallback(ptr, size, s);
0191 }
0192 PROTOBUF_NODISCARD const char* AppendString(const char* ptr, int size,
0193 std::string* s) {
0194 if (size <= buffer_end_ + kSlopBytes - ptr) {
0195 s->append(ptr, size);
0196 return ptr + size;
0197 }
0198 return AppendStringFallback(ptr, size, s);
0199 }
0200
0201 PROTOBUF_NODISCARD const char* ReadArenaString(const char* ptr,
0202 ArenaStringPtr* s,
0203 Arena* arena);
0204
0205 PROTOBUF_NODISCARD const char* ReadCord(const char* ptr, int size,
0206 ::absl::Cord* cord) {
0207 if (size <= std::min<int>(static_cast<int>(buffer_end_ + kSlopBytes - ptr),
0208 kMaxCordBytesToCopy)) {
0209 *cord = absl::string_view(ptr, size);
0210 return ptr + size;
0211 }
0212 return ReadCordFallback(ptr, size, cord);
0213 }
0214
0215
0216 template <typename Tag, typename T>
0217 PROTOBUF_NODISCARD const char* ReadRepeatedFixed(const char* ptr,
0218 Tag expected_tag,
0219 RepeatedField<T>* out);
0220
0221 template <typename T>
0222 PROTOBUF_NODISCARD const char* ReadPackedFixed(const char* ptr, int size,
0223 RepeatedField<T>* out);
0224 template <typename Add>
0225 PROTOBUF_NODISCARD const char* ReadPackedVarint(const char* ptr, Add add) {
0226 return ReadPackedVarint(ptr, add, [](int) {});
0227 }
0228 template <typename Add, typename SizeCb>
0229 PROTOBUF_NODISCARD const char* ReadPackedVarint(const char* ptr, Add add,
0230 SizeCb size_callback);
0231
0232 uint32_t LastTag() const { return last_tag_minus_1_ + 1; }
0233 bool ConsumeEndGroup(uint32_t start_tag) {
0234 bool res = last_tag_minus_1_ == start_tag;
0235 last_tag_minus_1_ = 0;
0236 return res;
0237 }
0238 bool EndedAtLimit() const { return last_tag_minus_1_ == 0; }
0239 bool EndedAtEndOfStream() const { return last_tag_minus_1_ == 1; }
0240 void SetLastTag(uint32_t tag) { last_tag_minus_1_ = tag - 1; }
0241 void SetEndOfStream() { last_tag_minus_1_ = 1; }
0242 bool IsExceedingLimit(const char* ptr) {
0243 return ptr > limit_end_ &&
0244 (next_chunk_ == nullptr || ptr - buffer_end_ > limit_);
0245 }
0246 bool AliasingEnabled() const { return aliasing_ != kNoAliasing; }
0247 int BytesUntilLimit(const char* ptr) const {
0248 return limit_ + static_cast<int>(buffer_end_ - ptr);
0249 }
0250
0251 int MaximumReadSize(const char* ptr) const {
0252 return static_cast<int>(limit_end_ - ptr) + kSlopBytes;
0253 }
0254
0255
0256 bool DataAvailable(const char* ptr) { return ptr < limit_end_; }
0257
0258 protected:
0259
0260
0261
0262 bool DoneWithCheck(const char** ptr, int d) {
0263 ABSL_DCHECK(*ptr);
0264 if (PROTOBUF_PREDICT_TRUE(*ptr < limit_end_)) return false;
0265 int overrun = static_cast<int>(*ptr - buffer_end_);
0266 ABSL_DCHECK_LE(overrun, kSlopBytes);
0267 if (overrun ==
0268 limit_) {
0269
0270
0271 if (overrun > 0 && next_chunk_ == nullptr) *ptr = nullptr;
0272 return true;
0273 }
0274 auto res = DoneFallback(overrun, d);
0275 *ptr = res.first;
0276 return res.second;
0277 }
0278
0279 const char* InitFrom(absl::string_view flat) {
0280 overall_limit_ = 0;
0281 if (flat.size() > kSlopBytes) {
0282 limit_ = kSlopBytes;
0283 limit_end_ = buffer_end_ = flat.data() + flat.size() - kSlopBytes;
0284 next_chunk_ = patch_buffer_;
0285 if (aliasing_ == kOnPatch) aliasing_ = kNoDelta;
0286 return flat.data();
0287 } else {
0288 if (!flat.empty()) {
0289 std::memcpy(patch_buffer_, flat.data(), flat.size());
0290 }
0291 limit_ = 0;
0292 limit_end_ = buffer_end_ = patch_buffer_ + flat.size();
0293 next_chunk_ = nullptr;
0294 if (aliasing_ == kOnPatch) {
0295 aliasing_ = reinterpret_cast<std::uintptr_t>(flat.data()) -
0296 reinterpret_cast<std::uintptr_t>(patch_buffer_);
0297 }
0298 return patch_buffer_;
0299 }
0300 }
0301
0302 const char* InitFrom(io::ZeroCopyInputStream* zcis);
0303
0304 const char* InitFrom(io::ZeroCopyInputStream* zcis, int limit) {
0305 if (limit == -1) return InitFrom(zcis);
0306 overall_limit_ = limit;
0307 auto res = InitFrom(zcis);
0308 limit_ = limit - static_cast<int>(buffer_end_ - res);
0309 limit_end_ = buffer_end_ + (std::min)(0, limit_);
0310 return res;
0311 }
0312
0313 private:
0314 enum { kSlopBytes = 16, kPatchBufferSize = 32 };
0315 static_assert(kPatchBufferSize >= kSlopBytes * 2,
0316 "Patch buffer needs to be at least large enough to hold all "
0317 "the slop bytes from the previous buffer, plus the first "
0318 "kSlopBytes from the next buffer.");
0319
0320 const char* limit_end_;
0321 const char* buffer_end_;
0322 const char* next_chunk_;
0323 int size_;
0324 int limit_;
0325 io::ZeroCopyInputStream* zcis_ = nullptr;
0326 char patch_buffer_[kPatchBufferSize] = {};
0327 enum { kNoAliasing = 0, kOnPatch = 1, kNoDelta = 2 };
0328 std::uintptr_t aliasing_ = kNoAliasing;
0329
0330
0331
0332
0333
0334
0335
0336
0337
0338
0339
0340
0341
0342 uint32_t last_tag_minus_1_ = 0;
0343 int overall_limit_ = INT_MAX;
0344
0345
0346 enum { kSafeStringSize = 50000000 };
0347
0348
0349
0350
0351
0352
0353
0354
0355 std::pair<const char*, bool> DoneFallback(int overrun, int depth);
0356
0357
0358
0359
0360
0361 const char* Next();
0362
0363
0364
0365
0366
0367 inline const char* NextBuffer(int overrun, int depth);
0368 const char* SkipFallback(const char* ptr, int size);
0369 const char* AppendStringFallback(const char* ptr, int size, std::string* str);
0370 const char* ReadStringFallback(const char* ptr, int size, std::string* str);
0371 const char* ReadCordFallback(const char* ptr, int size, absl::Cord* cord);
0372 static bool ParseEndsInSlopRegion(const char* begin, int overrun, int depth);
0373 bool StreamNext(const void** data) {
0374 bool res = zcis_->Next(data, &size_);
0375 if (res) overall_limit_ -= size_;
0376 return res;
0377 }
0378 void StreamBackUp(int count) {
0379 zcis_->BackUp(count);
0380 overall_limit_ += count;
0381 }
0382
0383 template <typename A>
0384 const char* AppendSize(const char* ptr, int size, const A& append) {
0385 int chunk_size = static_cast<int>(buffer_end_ + kSlopBytes - ptr);
0386 do {
0387 ABSL_DCHECK(size > chunk_size);
0388 if (next_chunk_ == nullptr) return nullptr;
0389 append(ptr, chunk_size);
0390 ptr += chunk_size;
0391 size -= chunk_size;
0392
0393
0394
0395 if (limit_ <= kSlopBytes) return nullptr;
0396 ptr = Next();
0397 if (ptr == nullptr) return nullptr;
0398 ptr += kSlopBytes;
0399 chunk_size = static_cast<int>(buffer_end_ + kSlopBytes - ptr);
0400 } while (size > chunk_size);
0401 append(ptr, size);
0402 return ptr + size;
0403 }
0404
0405
0406
0407
0408
0409
0410 template <typename A>
0411 const char* AppendUntilEnd(const char* ptr, const A& append) {
0412 if (ptr - buffer_end_ > limit_) return nullptr;
0413 while (limit_ > kSlopBytes) {
0414 size_t chunk_size = buffer_end_ + kSlopBytes - ptr;
0415 append(ptr, chunk_size);
0416 ptr = Next();
0417 if (ptr == nullptr) return limit_end_;
0418 ptr += kSlopBytes;
0419 }
0420 auto end = buffer_end_ + limit_;
0421 ABSL_DCHECK(end >= ptr);
0422 append(ptr, end - ptr);
0423 return end;
0424 }
0425
0426 PROTOBUF_NODISCARD const char* AppendString(const char* ptr,
0427 std::string* str) {
0428 return AppendUntilEnd(
0429 ptr, [str](const char* p, ptrdiff_t s) { str->append(p, s); });
0430 }
0431 friend class ImplicitWeakMessage;
0432
0433
0434 friend PROTOBUF_EXPORT std::pair<const char*, int32_t> ReadSizeFallback(
0435 const char* p, uint32_t res);
0436 };
0437
0438 using LazyEagerVerifyFnType = const char* (*)(const char* ptr,
0439 ParseContext* ctx);
0440 using LazyEagerVerifyFnRef = std::remove_pointer<LazyEagerVerifyFnType>::type&;
0441
0442
0443
0444
0445
0446 class PROTOBUF_EXPORT ParseContext : public EpsCopyInputStream {
0447 public:
0448 struct Data {
0449 const DescriptorPool* pool = nullptr;
0450 MessageFactory* factory = nullptr;
0451 };
0452
0453 template <typename... T>
0454 ParseContext(int depth, bool aliasing, const char** start, T&&... args)
0455 : EpsCopyInputStream(aliasing), depth_(depth) {
0456 *start = InitFrom(std::forward<T>(args)...);
0457 }
0458
0459 struct Spawn {};
0460 static constexpr Spawn kSpawn = {};
0461
0462
0463
0464
0465
0466
0467 template <typename... T>
0468 ParseContext(Spawn, const ParseContext& ctx, const char** start, T&&... args)
0469 : EpsCopyInputStream(false),
0470 depth_(ctx.depth_),
0471 data_(ctx.data_)
0472 {
0473 *start = InitFrom(std::forward<T>(args)...);
0474 }
0475
0476
0477
0478
0479 ParseContext(ParseContext&&) = delete;
0480 ParseContext& operator=(ParseContext&&) = delete;
0481 ParseContext& operator=(const ParseContext&) = delete;
0482
0483 void TrackCorrectEnding() { group_depth_ = 0; }
0484
0485
0486
0487 bool Done(const char** ptr) { return DoneWithCheck(ptr, group_depth_); }
0488
0489 int depth() const { return depth_; }
0490
0491 Data& data() { return data_; }
0492 const Data& data() const { return data_; }
0493
0494 const char* ParseMessage(MessageLite* msg, const char* ptr);
0495
0496
0497
0498 template <typename Func>
0499 PROTOBUF_NODISCARD const char* ParseLengthDelimitedInlined(const char*,
0500 const Func& func);
0501
0502
0503
0504 template <typename Func>
0505 PROTOBUF_NODISCARD const char* ParseGroupInlined(const char* ptr,
0506 uint32_t start_tag,
0507 const Func& func);
0508
0509
0510
0511 template <typename Parser = TcParser>
0512 PROTOBUF_ALWAYS_INLINE const char* ParseMessage(
0513 MessageLite* msg, const TcParseTableBase* tc_table, const char* ptr) {
0514 return ParseLengthDelimitedInlined(ptr, [&](const char* ptr) {
0515 return Parser::ParseLoop(msg, ptr, this, tc_table);
0516 });
0517 }
0518 template <typename Parser = TcParser>
0519 PROTOBUF_ALWAYS_INLINE const char* ParseGroup(
0520 MessageLite* msg, const TcParseTableBase* tc_table, const char* ptr,
0521 uint32_t start_tag) {
0522 return ParseGroupInlined(ptr, start_tag, [&](const char* ptr) {
0523 return Parser::ParseLoop(msg, ptr, this, tc_table);
0524 });
0525 }
0526
0527 PROTOBUF_NODISCARD PROTOBUF_NDEBUG_INLINE const char* ParseGroup(
0528 MessageLite* msg, const char* ptr, uint32_t tag) {
0529 if (--depth_ < 0) return nullptr;
0530 group_depth_++;
0531 auto old_depth = depth_;
0532 auto old_group_depth = group_depth_;
0533 ptr = msg->_InternalParse(ptr, this);
0534 if (ptr != nullptr) {
0535 ABSL_DCHECK_EQ(old_depth, depth_);
0536 ABSL_DCHECK_EQ(old_group_depth, group_depth_);
0537 }
0538 group_depth_--;
0539 depth_++;
0540 if (PROTOBUF_PREDICT_FALSE(!ConsumeEndGroup(tag))) return nullptr;
0541 return ptr;
0542 }
0543
0544 private:
0545
0546
0547
0548
0549
0550
0551
0552
0553 PROTOBUF_NODISCARD const char* ReadSizeAndPushLimitAndDepth(
0554 const char* ptr, LimitToken* old_limit);
0555
0556
0557
0558 PROTOBUF_NODISCARD PROTOBUF_ALWAYS_INLINE const char*
0559 ReadSizeAndPushLimitAndDepthInlined(const char* ptr, LimitToken* old_limit);
0560
0561
0562
0563
0564
0565
0566 int depth_;
0567
0568
0569 int group_depth_ = INT_MIN;
0570 Data data_;
0571 };
0572
0573 template <int>
0574 struct EndianHelper;
0575
0576 template <>
0577 struct EndianHelper<1> {
0578 static uint8_t Load(const void* p) { return *static_cast<const uint8_t*>(p); }
0579 };
0580
0581 template <>
0582 struct EndianHelper<2> {
0583 static uint16_t Load(const void* p) {
0584 uint16_t tmp;
0585 std::memcpy(&tmp, p, 2);
0586 return little_endian::ToHost(tmp);
0587 }
0588 };
0589
0590 template <>
0591 struct EndianHelper<4> {
0592 static uint32_t Load(const void* p) {
0593 uint32_t tmp;
0594 std::memcpy(&tmp, p, 4);
0595 return little_endian::ToHost(tmp);
0596 }
0597 };
0598
0599 template <>
0600 struct EndianHelper<8> {
0601 static uint64_t Load(const void* p) {
0602 uint64_t tmp;
0603 std::memcpy(&tmp, p, 8);
0604 return little_endian::ToHost(tmp);
0605 }
0606 };
0607
0608 template <typename T>
0609 T UnalignedLoad(const char* p) {
0610 auto tmp = EndianHelper<sizeof(T)>::Load(p);
0611 T res;
0612 memcpy(&res, &tmp, sizeof(T));
0613 return res;
0614 }
0615 template <typename T, typename Void,
0616 typename = std::enable_if_t<std::is_same<Void, void>::value>>
0617 T UnalignedLoad(const Void* p) {
0618 return UnalignedLoad<T>(reinterpret_cast<const char*>(p));
0619 }
0620
0621 PROTOBUF_EXPORT
0622 std::pair<const char*, uint32_t> VarintParseSlow32(const char* p, uint32_t res);
0623 PROTOBUF_EXPORT
0624 std::pair<const char*, uint64_t> VarintParseSlow64(const char* p, uint32_t res);
0625
0626 inline const char* VarintParseSlow(const char* p, uint32_t res, uint32_t* out) {
0627 auto tmp = VarintParseSlow32(p, res);
0628 *out = tmp.second;
0629 return tmp.first;
0630 }
0631
0632 inline const char* VarintParseSlow(const char* p, uint32_t res, uint64_t* out) {
0633 auto tmp = VarintParseSlow64(p, res);
0634 *out = tmp.second;
0635 return tmp.first;
0636 }
0637
0638 #ifdef __aarch64__
0639
0640
0641
0642
0643
0644
0645
0646
0647
0648
0649
0650
0651
0652
0653
0654
0655
0656
0657
0658
0659
0660
0661
0662
0663
0664
0665
0666
0667
0668
0669
0670
0671
0672
0673
0674
0675
0676
0677
0678
0679
0680
0681
0682
0683
0684
0685
0686
0687
0688
0689
0690
0691
0692
0693
0694
0695
0696
0697
0698
0699
0700
0701
0702
0703
0704
0705
0706
0707
0708
0709
0710
0711
0712
0713
0714
0715
0716
0717
0718
0719 template <typename V1Type>
0720 PROTOBUF_ALWAYS_INLINE inline V1Type ValueBarrier(V1Type value1) {
0721 asm("" : "+r"(value1));
0722 return value1;
0723 }
0724
0725 template <typename V1Type, typename V2Type>
0726 PROTOBUF_ALWAYS_INLINE inline V1Type ValueBarrier(V1Type value1,
0727 V2Type value2) {
0728 asm("" : "+r"(value1) : "r"(value2));
0729 return value1;
0730 }
0731
0732
0733 static PROTOBUF_ALWAYS_INLINE inline uint64_t Ubfx7(uint64_t data,
0734 uint64_t start) {
0735 return ValueBarrier((data >> start) & 0x7f);
0736 }
0737
0738 PROTOBUF_ALWAYS_INLINE inline uint64_t ExtractAndMergeTwoChunks(
0739 uint64_t data, uint64_t first_byte) {
0740 ABSL_DCHECK_LE(first_byte, 6U);
0741 uint64_t first = Ubfx7(data, first_byte * 8);
0742 uint64_t second = Ubfx7(data, (first_byte + 1) * 8);
0743 return ValueBarrier(first | (second << 7));
0744 }
0745
0746 struct SlowPathEncodedInfo {
0747 const char* p;
0748 uint64_t last8;
0749 uint64_t valid_bits;
0750 uint64_t valid_chunk_bits;
0751 uint64_t masked_cont_bits;
0752 };
0753
0754
0755
0756
0757 PROTOBUF_ALWAYS_INLINE inline SlowPathEncodedInfo ComputeLengthAndUpdateP(
0758 const char* p) {
0759 SlowPathEncodedInfo result;
0760
0761 std::memcpy(&result.last8, p + 2, sizeof(result.last8));
0762 uint64_t mask = ValueBarrier(0x8080808080808080);
0763
0764 result.masked_cont_bits = ValueBarrier(mask & ~result.last8);
0765
0766
0767
0768 result.valid_bits = absl::countr_zero(result.masked_cont_bits);
0769
0770
0771
0772 uint64_t set_continuation_bits = result.valid_bits >> 3;
0773
0774 result.p = p + set_continuation_bits + 3;
0775
0776
0777
0778 result.valid_chunk_bits = result.valid_bits - set_continuation_bits;
0779 return result;
0780 }
0781
0782 inline PROTOBUF_ALWAYS_INLINE std::pair<const char*, uint64_t>
0783 VarintParseSlowArm64(const char* p, uint64_t first8) {
0784 constexpr uint64_t kResultMaskUnshifted = 0xffffffffffffc000ULL;
0785 constexpr uint64_t kFirstResultBitChunk2 = 2 * 7;
0786 constexpr uint64_t kFirstResultBitChunk4 = 4 * 7;
0787 constexpr uint64_t kFirstResultBitChunk6 = 6 * 7;
0788 constexpr uint64_t kFirstResultBitChunk8 = 8 * 7;
0789
0790 SlowPathEncodedInfo info = ComputeLengthAndUpdateP(p);
0791
0792
0793 uint64_t merged_01 = ExtractAndMergeTwoChunks(first8, 0);
0794 uint64_t merged_23 = ExtractAndMergeTwoChunks(first8, 2);
0795 uint64_t merged_45 = ExtractAndMergeTwoChunks(first8, 4);
0796
0797 uint64_t result = merged_01 | (merged_23 << kFirstResultBitChunk2) |
0798 (merged_45 << kFirstResultBitChunk4);
0799
0800 uint64_t result_mask = kResultMaskUnshifted << info.valid_chunk_bits;
0801
0802 if (PROTOBUF_PREDICT_FALSE(info.masked_cont_bits == 0)) {
0803 return {nullptr, 0};
0804 }
0805
0806
0807 if (PROTOBUF_PREDICT_FALSE((info.valid_bits & 0x20) != 0)) {
0808
0809 uint64_t merged_67 = ExtractAndMergeTwoChunks(first8, 6);
0810
0811 uint64_t merged_89 =
0812 ExtractAndMergeTwoChunks(info.last8, 6);
0813 result |= merged_67 << kFirstResultBitChunk6;
0814 result |= merged_89 << kFirstResultBitChunk8;
0815
0816 }
0817
0818 result &= ~result_mask;
0819 return {info.p, result};
0820 }
0821
0822
0823
0824 inline PROTOBUF_ALWAYS_INLINE std::pair<const char*, uint32_t>
0825 VarintParseSlowArm32(const char* p, uint64_t first8) {
0826 constexpr uint64_t kResultMaskUnshifted = 0xffffffffffffc000ULL;
0827 constexpr uint64_t kFirstResultBitChunk1 = 1 * 7;
0828 constexpr uint64_t kFirstResultBitChunk3 = 3 * 7;
0829
0830
0831 SlowPathEncodedInfo info = ComputeLengthAndUpdateP(p);
0832
0833 uint64_t merged_12 = ExtractAndMergeTwoChunks(first8, 1);
0834 uint64_t merged_34 = ExtractAndMergeTwoChunks(first8, 3);
0835 first8 = ValueBarrier(first8, p);
0836 uint64_t result = Ubfx7(first8, 0);
0837 result = ValueBarrier(result | merged_12 << kFirstResultBitChunk1);
0838 result = ValueBarrier(result | merged_34 << kFirstResultBitChunk3);
0839 uint64_t result_mask = kResultMaskUnshifted << info.valid_chunk_bits;
0840 result &= ~result_mask;
0841
0842
0843
0844 info.masked_cont_bits = ValueBarrier(info.masked_cont_bits, result);
0845 if (PROTOBUF_PREDICT_FALSE(info.masked_cont_bits == 0)) {
0846 return {nullptr, 0};
0847 }
0848 return {info.p, result};
0849 }
0850
0851 static const char* VarintParseSlowArm(const char* p, uint32_t* out,
0852 uint64_t first8) {
0853 auto tmp = VarintParseSlowArm32(p, first8);
0854 *out = tmp.second;
0855 return tmp.first;
0856 }
0857
0858 static const char* VarintParseSlowArm(const char* p, uint64_t* out,
0859 uint64_t first8) {
0860 auto tmp = VarintParseSlowArm64(p, first8);
0861 *out = tmp.second;
0862 return tmp.first;
0863 }
0864 #endif
0865
0866
0867 template <typename T>
0868 PROTOBUF_NODISCARD const char* VarintParse(const char* p, T* out) {
0869 #if defined(__aarch64__) && defined(ABSL_IS_LITTLE_ENDIAN)
0870
0871 uint64_t first8;
0872 std::memcpy(&first8, p, sizeof(first8));
0873 if (PROTOBUF_PREDICT_TRUE((first8 & 0x80) == 0)) {
0874 *out = static_cast<uint8_t>(first8);
0875 return p + 1;
0876 }
0877 if (PROTOBUF_PREDICT_TRUE((first8 & 0x8000) == 0)) {
0878 uint64_t chunk1;
0879 uint64_t chunk2;
0880
0881 chunk1 = Ubfx7(first8, 0);
0882 chunk2 = Ubfx7(first8, 8);
0883 *out = chunk1 | (chunk2 << 7);
0884 return p + 2;
0885 }
0886 return VarintParseSlowArm(p, out, first8);
0887 #else
0888 auto ptr = reinterpret_cast<const uint8_t*>(p);
0889 uint32_t res = ptr[0];
0890 if ((res & 0x80) == 0) {
0891 *out = res;
0892 return p + 1;
0893 }
0894 return VarintParseSlow(p, res, out);
0895 #endif
0896 }
0897
0898
0899
0900
0901 PROTOBUF_EXPORT
0902 std::pair<const char*, uint32_t> ReadTagFallback(const char* p, uint32_t res);
0903
0904
0905 inline const char* ReadTag(const char* p, uint32_t* out,
0906 uint32_t = 0) {
0907 uint32_t res = static_cast<uint8_t>(p[0]);
0908 if (res < 128) {
0909 *out = res;
0910 return p + 1;
0911 }
0912 uint32_t second = static_cast<uint8_t>(p[1]);
0913 res += (second - 1) << 7;
0914 if (second < 128) {
0915 *out = res;
0916 return p + 2;
0917 }
0918 auto tmp = ReadTagFallback(p, res);
0919 *out = tmp.second;
0920 return tmp.first;
0921 }
0922
0923
0924
0925
0926
0927
0928 template <class T>
0929 PROTOBUF_NODISCARD PROTOBUF_ALWAYS_INLINE constexpr T RotateLeft(
0930 T x, int s) noexcept {
0931 return static_cast<T>(x << (s & (std::numeric_limits<T>::digits - 1))) |
0932 static_cast<T>(x >> ((-s) & (std::numeric_limits<T>::digits - 1)));
0933 }
0934
0935 PROTOBUF_NODISCARD inline PROTOBUF_ALWAYS_INLINE uint64_t
0936 RotRight7AndReplaceLowByte(uint64_t res, const char& byte) {
0937
0938 #if defined(__x86_64__) && defined(__GNUC__)
0939
0940
0941
0942
0943
0944
0945
0946 asm("ror $7,%0\n\t"
0947 "movb %1,%b0"
0948 : "+r"(res)
0949 : "m"(byte));
0950 #else
0951 res = RotateLeft(res, -7);
0952 res = res & ~0xFF;
0953 res |= 0xFF & byte;
0954 #endif
0955 return res;
0956 }
0957
0958 inline PROTOBUF_ALWAYS_INLINE const char* ReadTagInlined(const char* ptr,
0959 uint32_t* out) {
0960 uint64_t res = 0xFF & ptr[0];
0961 if (PROTOBUF_PREDICT_FALSE(res >= 128)) {
0962 res = RotRight7AndReplaceLowByte(res, ptr[1]);
0963 if (PROTOBUF_PREDICT_FALSE(res & 0x80)) {
0964 res = RotRight7AndReplaceLowByte(res, ptr[2]);
0965 if (PROTOBUF_PREDICT_FALSE(res & 0x80)) {
0966 res = RotRight7AndReplaceLowByte(res, ptr[3]);
0967 if (PROTOBUF_PREDICT_FALSE(res & 0x80)) {
0968
0969
0970
0971 res = RotRight7AndReplaceLowByte(res, ptr[4]);
0972 if (PROTOBUF_PREDICT_FALSE(res & 0x80)) {
0973
0974
0975 *out = 0;
0976 return nullptr;
0977 }
0978 *out = static_cast<uint32_t>(RotateLeft(res, 28));
0979 #if defined(__GNUC__)
0980
0981
0982
0983 asm("" : "+r"(ptr));
0984 #endif
0985 return ptr + 5;
0986 }
0987 *out = static_cast<uint32_t>(RotateLeft(res, 21));
0988 return ptr + 4;
0989 }
0990 *out = static_cast<uint32_t>(RotateLeft(res, 14));
0991 return ptr + 3;
0992 }
0993 *out = static_cast<uint32_t>(RotateLeft(res, 7));
0994 return ptr + 2;
0995 }
0996 *out = static_cast<uint32_t>(res);
0997 return ptr + 1;
0998 }
0999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009 inline uint32_t DecodeTwoBytes(const char** ptr) {
1010 uint32_t value = UnalignedLoad<uint16_t>(*ptr);
1011
1012 uint32_t x = static_cast<int8_t>(value);
1013 value &= x;
1014
1015
1016
1017
1018 value += x;
1019
1020 *ptr += value < x ? 2 : 1;
1021 return value;
1022 }
1023
1024
1025 inline const char* ParseBigVarint(const char* p, uint64_t* out) {
1026 auto pnew = p;
1027 auto tmp = DecodeTwoBytes(&pnew);
1028 uint64_t res = tmp >> 1;
1029 if (PROTOBUF_PREDICT_TRUE(static_cast<std::int16_t>(tmp) >= 0)) {
1030 *out = res;
1031 return pnew;
1032 }
1033 for (std::uint32_t i = 1; i < 5; i++) {
1034 pnew = p + 2 * i;
1035 tmp = DecodeTwoBytes(&pnew);
1036 res += (static_cast<std::uint64_t>(tmp) - 2) << (14 * i - 1);
1037 if (PROTOBUF_PREDICT_TRUE(static_cast<std::int16_t>(tmp) >= 0)) {
1038 *out = res;
1039 return pnew;
1040 }
1041 }
1042 return nullptr;
1043 }
1044
1045 PROTOBUF_EXPORT
1046 std::pair<const char*, int32_t> ReadSizeFallback(const char* p, uint32_t first);
1047
1048
1049
1050 inline uint32_t ReadSize(const char** pp) {
1051 auto p = *pp;
1052 uint32_t res = static_cast<uint8_t>(p[0]);
1053 if (res < 128) {
1054 *pp = p + 1;
1055 return res;
1056 }
1057 auto x = ReadSizeFallback(p, res);
1058 *pp = x.first;
1059 return x.second;
1060 }
1061
1062
1063
1064
1065
1066
1067 inline uint64_t ReadVarint64(const char** p) {
1068 uint64_t tmp;
1069 *p = VarintParse(*p, &tmp);
1070 return tmp;
1071 }
1072
1073 inline uint32_t ReadVarint32(const char** p) {
1074 uint32_t tmp;
1075 *p = VarintParse(*p, &tmp);
1076 return tmp;
1077 }
1078
1079 inline int64_t ReadVarintZigZag64(const char** p) {
1080 uint64_t tmp;
1081 *p = VarintParse(*p, &tmp);
1082 return WireFormatLite::ZigZagDecode64(tmp);
1083 }
1084
1085 inline int32_t ReadVarintZigZag32(const char** p) {
1086 uint64_t tmp;
1087 *p = VarintParse(*p, &tmp);
1088 return WireFormatLite::ZigZagDecode32(static_cast<uint32_t>(tmp));
1089 }
1090
1091 template <typename Func>
1092 PROTOBUF_NODISCARD inline PROTOBUF_ALWAYS_INLINE const char*
1093 ParseContext::ParseLengthDelimitedInlined(const char* ptr, const Func& func) {
1094 LimitToken old;
1095 ptr = ReadSizeAndPushLimitAndDepthInlined(ptr, &old);
1096 if (ptr == nullptr) return ptr;
1097 auto old_depth = depth_;
1098 PROTOBUF_ALWAYS_INLINE_CALL ptr = func(ptr);
1099 if (ptr != nullptr) ABSL_DCHECK_EQ(old_depth, depth_);
1100 depth_++;
1101 if (!PopLimit(std::move(old))) return nullptr;
1102 return ptr;
1103 }
1104
1105 template <typename Func>
1106 PROTOBUF_NODISCARD inline PROTOBUF_ALWAYS_INLINE const char*
1107 ParseContext::ParseGroupInlined(const char* ptr, uint32_t start_tag,
1108 const Func& func) {
1109 if (--depth_ < 0) return nullptr;
1110 group_depth_++;
1111 auto old_depth = depth_;
1112 auto old_group_depth = group_depth_;
1113 PROTOBUF_ALWAYS_INLINE_CALL ptr = func(ptr);
1114 if (ptr != nullptr) {
1115 ABSL_DCHECK_EQ(old_depth, depth_);
1116 ABSL_DCHECK_EQ(old_group_depth, group_depth_);
1117 }
1118 group_depth_--;
1119 depth_++;
1120 if (PROTOBUF_PREDICT_FALSE(!ConsumeEndGroup(start_tag))) return nullptr;
1121 return ptr;
1122 }
1123
1124 inline const char* ParseContext::ReadSizeAndPushLimitAndDepthInlined(
1125 const char* ptr, LimitToken* old_limit) {
1126 int size = ReadSize(&ptr);
1127 if (PROTOBUF_PREDICT_FALSE(!ptr) || depth_ <= 0) {
1128 return nullptr;
1129 }
1130 *old_limit = PushLimit(ptr, size);
1131 --depth_;
1132 return ptr;
1133 }
1134
1135 template <typename Tag, typename T>
1136 const char* EpsCopyInputStream::ReadRepeatedFixed(const char* ptr,
1137 Tag expected_tag,
1138 RepeatedField<T>* out) {
1139 do {
1140 out->Add(UnalignedLoad<T>(ptr));
1141 ptr += sizeof(T);
1142 if (PROTOBUF_PREDICT_FALSE(ptr >= limit_end_)) return ptr;
1143 } while (UnalignedLoad<Tag>(ptr) == expected_tag && (ptr += sizeof(Tag)));
1144 return ptr;
1145 }
1146
1147
1148
1149 #define GOOGLE_PROTOBUF_ASSERT_RETURN(predicate, ret) \
1150 if (!(predicate)) { \
1151 \
1152 \
1153 return ret; \
1154 }
1155
1156 #define GOOGLE_PROTOBUF_PARSER_ASSERT(predicate) \
1157 GOOGLE_PROTOBUF_ASSERT_RETURN(predicate, nullptr)
1158
1159 template <typename T>
1160 const char* EpsCopyInputStream::ReadPackedFixed(const char* ptr, int size,
1161 RepeatedField<T>* out) {
1162 GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
1163 int nbytes = static_cast<int>(buffer_end_ + kSlopBytes - ptr);
1164 while (size > nbytes) {
1165 int num = nbytes / sizeof(T);
1166 int old_entries = out->size();
1167 out->Reserve(old_entries + num);
1168 int block_size = num * sizeof(T);
1169 auto dst = out->AddNAlreadyReserved(num);
1170 #ifdef ABSL_IS_LITTLE_ENDIAN
1171 std::memcpy(dst, ptr, block_size);
1172 #else
1173 for (int i = 0; i < num; i++)
1174 dst[i] = UnalignedLoad<T>(ptr + i * sizeof(T));
1175 #endif
1176 size -= block_size;
1177 if (limit_ <= kSlopBytes) return nullptr;
1178 ptr = Next();
1179 if (ptr == nullptr) return nullptr;
1180 ptr += kSlopBytes - (nbytes - block_size);
1181 nbytes = static_cast<int>(buffer_end_ + kSlopBytes - ptr);
1182 }
1183 int num = size / sizeof(T);
1184 int block_size = num * sizeof(T);
1185 if (num == 0) return size == block_size ? ptr : nullptr;
1186 int old_entries = out->size();
1187 out->Reserve(old_entries + num);
1188 auto dst = out->AddNAlreadyReserved(num);
1189 #ifdef ABSL_IS_LITTLE_ENDIAN
1190 ABSL_CHECK(dst != nullptr) << out << "," << num;
1191 std::memcpy(dst, ptr, block_size);
1192 #else
1193 for (int i = 0; i < num; i++) dst[i] = UnalignedLoad<T>(ptr + i * sizeof(T));
1194 #endif
1195 ptr += block_size;
1196 if (size != block_size) return nullptr;
1197 return ptr;
1198 }
1199
1200 template <typename Add>
1201 const char* ReadPackedVarintArray(const char* ptr, const char* end, Add add) {
1202 while (ptr < end) {
1203 uint64_t varint;
1204 ptr = VarintParse(ptr, &varint);
1205 if (ptr == nullptr) return nullptr;
1206 add(varint);
1207 }
1208 return ptr;
1209 }
1210
1211 template <typename Add, typename SizeCb>
1212 const char* EpsCopyInputStream::ReadPackedVarint(const char* ptr, Add add,
1213 SizeCb size_callback) {
1214 int size = ReadSize(&ptr);
1215 size_callback(size);
1216
1217 GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
1218 int chunk_size = static_cast<int>(buffer_end_ - ptr);
1219 while (size > chunk_size) {
1220 ptr = ReadPackedVarintArray(ptr, buffer_end_, add);
1221 if (ptr == nullptr) return nullptr;
1222 int overrun = static_cast<int>(ptr - buffer_end_);
1223 ABSL_DCHECK(overrun >= 0 && overrun <= kSlopBytes);
1224 if (size - chunk_size <= kSlopBytes) {
1225
1226
1227
1228 char buf[kSlopBytes + 10] = {};
1229 std::memcpy(buf, buffer_end_, kSlopBytes);
1230 ABSL_CHECK_LE(size - chunk_size, kSlopBytes);
1231 auto end = buf + (size - chunk_size);
1232 auto res = ReadPackedVarintArray(buf + overrun, end, add);
1233 if (res == nullptr || res != end) return nullptr;
1234 return buffer_end_ + (res - buf);
1235 }
1236 size -= overrun + chunk_size;
1237 ABSL_DCHECK_GT(size, 0);
1238
1239 if (limit_ <= kSlopBytes) return nullptr;
1240 ptr = Next();
1241 if (ptr == nullptr) return nullptr;
1242 ptr += overrun;
1243 chunk_size = static_cast<int>(buffer_end_ - ptr);
1244 }
1245 auto end = ptr + size;
1246 ptr = ReadPackedVarintArray(ptr, end, add);
1247 return end == ptr ? ptr : nullptr;
1248 }
1249
1250
1251 PROTOBUF_EXPORT
1252 bool VerifyUTF8(absl::string_view s, const char* field_name);
1253
1254 inline bool VerifyUTF8(const std::string* s, const char* field_name) {
1255 return VerifyUTF8(*s, field_name);
1256 }
1257
1258
1259 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* InlineGreedyStringParser(
1260 std::string* s, const char* ptr, ParseContext* ctx);
1261
1262 PROTOBUF_NODISCARD inline const char* InlineCordParser(::absl::Cord* cord,
1263 const char* ptr,
1264 ParseContext* ctx) {
1265 int size = ReadSize(&ptr);
1266 if (!ptr) return nullptr;
1267 return ctx->ReadCord(ptr, size, cord);
1268 }
1269
1270
1271 template <typename T>
1272 PROTOBUF_NODISCARD const char* FieldParser(uint64_t tag, T& field_parser,
1273 const char* ptr, ParseContext* ctx) {
1274 uint32_t number = tag >> 3;
1275 GOOGLE_PROTOBUF_PARSER_ASSERT(number != 0);
1276 using WireType = internal::WireFormatLite::WireType;
1277 switch (tag & 7) {
1278 case WireType::WIRETYPE_VARINT: {
1279 uint64_t value;
1280 ptr = VarintParse(ptr, &value);
1281 GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
1282 field_parser.AddVarint(number, value);
1283 break;
1284 }
1285 case WireType::WIRETYPE_FIXED64: {
1286 uint64_t value = UnalignedLoad<uint64_t>(ptr);
1287 ptr += 8;
1288 field_parser.AddFixed64(number, value);
1289 break;
1290 }
1291 case WireType::WIRETYPE_LENGTH_DELIMITED: {
1292 ptr = field_parser.ParseLengthDelimited(number, ptr, ctx);
1293 GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
1294 break;
1295 }
1296 case WireType::WIRETYPE_START_GROUP: {
1297 ptr = field_parser.ParseGroup(number, ptr, ctx);
1298 GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
1299 break;
1300 }
1301 case WireType::WIRETYPE_END_GROUP: {
1302 ABSL_LOG(FATAL) << "Can't happen";
1303 break;
1304 }
1305 case WireType::WIRETYPE_FIXED32: {
1306 uint32_t value = UnalignedLoad<uint32_t>(ptr);
1307 ptr += 4;
1308 field_parser.AddFixed32(number, value);
1309 break;
1310 }
1311 default:
1312 return nullptr;
1313 }
1314 return ptr;
1315 }
1316
1317 template <typename T>
1318 PROTOBUF_NODISCARD const char* WireFormatParser(T& field_parser,
1319 const char* ptr,
1320 ParseContext* ctx) {
1321 while (!ctx->Done(&ptr)) {
1322 uint32_t tag;
1323 ptr = ReadTag(ptr, &tag);
1324 GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr);
1325 if (tag == 0 || (tag & 7) == 4) {
1326 ctx->SetLastTag(tag);
1327 return ptr;
1328 }
1329 ptr = FieldParser(tag, field_parser, ptr, ctx);
1330 GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr);
1331 }
1332 return ptr;
1333 }
1334
1335
1336
1337
1338
1339 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedInt32Parser(
1340 void* object, const char* ptr, ParseContext* ctx);
1341 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedUInt32Parser(
1342 void* object, const char* ptr, ParseContext* ctx);
1343 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedInt64Parser(
1344 void* object, const char* ptr, ParseContext* ctx);
1345 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedUInt64Parser(
1346 void* object, const char* ptr, ParseContext* ctx);
1347 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedSInt32Parser(
1348 void* object, const char* ptr, ParseContext* ctx);
1349 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedSInt64Parser(
1350 void* object, const char* ptr, ParseContext* ctx);
1351 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedEnumParser(
1352 void* object, const char* ptr, ParseContext* ctx);
1353
1354 template <typename T>
1355 PROTOBUF_NODISCARD const char* PackedEnumParser(void* object, const char* ptr,
1356 ParseContext* ctx,
1357 bool (*is_valid)(int),
1358 InternalMetadata* metadata,
1359 int field_num) {
1360 return ctx->ReadPackedVarint(
1361 ptr, [object, is_valid, metadata, field_num](int32_t val) {
1362 if (is_valid(val)) {
1363 static_cast<RepeatedField<int>*>(object)->Add(val);
1364 } else {
1365 WriteVarint(field_num, val, metadata->mutable_unknown_fields<T>());
1366 }
1367 });
1368 }
1369
1370 template <typename T>
1371 PROTOBUF_NODISCARD const char* PackedEnumParserArg(
1372 void* object, const char* ptr, ParseContext* ctx,
1373 bool (*is_valid)(const void*, int), const void* data,
1374 InternalMetadata* metadata, int field_num) {
1375 return ctx->ReadPackedVarint(
1376 ptr, [object, is_valid, data, metadata, field_num](int32_t val) {
1377 if (is_valid(data, val)) {
1378 static_cast<RepeatedField<int>*>(object)->Add(val);
1379 } else {
1380 WriteVarint(field_num, val, metadata->mutable_unknown_fields<T>());
1381 }
1382 });
1383 }
1384
1385 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedBoolParser(
1386 void* object, const char* ptr, ParseContext* ctx);
1387 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedFixed32Parser(
1388 void* object, const char* ptr, ParseContext* ctx);
1389 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedSFixed32Parser(
1390 void* object, const char* ptr, ParseContext* ctx);
1391 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedFixed64Parser(
1392 void* object, const char* ptr, ParseContext* ctx);
1393 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedSFixed64Parser(
1394 void* object, const char* ptr, ParseContext* ctx);
1395 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedFloatParser(
1396 void* object, const char* ptr, ParseContext* ctx);
1397 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* PackedDoubleParser(
1398 void* object, const char* ptr, ParseContext* ctx);
1399
1400
1401 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* UnknownGroupLiteParse(
1402 std::string* unknown, const char* ptr, ParseContext* ctx);
1403
1404
1405
1406 PROTOBUF_NODISCARD PROTOBUF_EXPORT const char* UnknownFieldParse(
1407 uint32_t tag, std::string* unknown, const char* ptr, ParseContext* ctx);
1408
1409 }
1410 }
1411 }
1412
1413 #include "google/protobuf/port_undef.inc"
1414
1415 #endif