File indexing completed on 2024-05-18 08:30:30
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079 #if defined (__cplusplus)
0080 extern "C" {
0081 #endif
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096
0097
0098
0099
0100
0101
0102 #if (defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)) \
0103 && !defined(XXH_INLINE_ALL_31684351384)
0104
0105 # define XXH_INLINE_ALL_31684351384
0106
0107 # undef XXH_STATIC_LINKING_ONLY
0108 # define XXH_STATIC_LINKING_ONLY
0109
0110 # undef XXH_PUBLIC_API
0111 # if defined(__GNUC__)
0112 # define XXH_PUBLIC_API static __inline __attribute__((unused))
0113 # elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) )
0114 # define XXH_PUBLIC_API static inline
0115 # elif defined(_MSC_VER)
0116 # define XXH_PUBLIC_API static __inline
0117 # else
0118
0119 # define XXH_PUBLIC_API static
0120 # endif
0121
0122
0123
0124
0125
0126
0127
0128
0129
0130
0131
0132
0133
0134
0135 # undef XXH_versionNumber
0136
0137 # undef XXH32
0138 # undef XXH32_createState
0139 # undef XXH32_freeState
0140 # undef XXH32_reset
0141 # undef XXH32_update
0142 # undef XXH32_digest
0143 # undef XXH32_copyState
0144 # undef XXH32_canonicalFromHash
0145 # undef XXH32_hashFromCanonical
0146
0147 # undef XXH64
0148 # undef XXH64_createState
0149 # undef XXH64_freeState
0150 # undef XXH64_reset
0151 # undef XXH64_update
0152 # undef XXH64_digest
0153 # undef XXH64_copyState
0154 # undef XXH64_canonicalFromHash
0155 # undef XXH64_hashFromCanonical
0156
0157 # undef XXH3_64bits
0158 # undef XXH3_64bits_withSecret
0159 # undef XXH3_64bits_withSeed
0160 # undef XXH3_64bits_withSecretandSeed
0161 # undef XXH3_createState
0162 # undef XXH3_freeState
0163 # undef XXH3_copyState
0164 # undef XXH3_64bits_reset
0165 # undef XXH3_64bits_reset_withSeed
0166 # undef XXH3_64bits_reset_withSecret
0167 # undef XXH3_64bits_update
0168 # undef XXH3_64bits_digest
0169 # undef XXH3_generateSecret
0170
0171 # undef XXH128
0172 # undef XXH3_128bits
0173 # undef XXH3_128bits_withSeed
0174 # undef XXH3_128bits_withSecret
0175 # undef XXH3_128bits_reset
0176 # undef XXH3_128bits_reset_withSeed
0177 # undef XXH3_128bits_reset_withSecret
0178 # undef XXH3_128bits_reset_withSecretandSeed
0179 # undef XXH3_128bits_update
0180 # undef XXH3_128bits_digest
0181 # undef XXH128_isEqual
0182 # undef XXH128_cmp
0183 # undef XXH128_canonicalFromHash
0184 # undef XXH128_hashFromCanonical
0185
0186 # undef XXH_NAMESPACE
0187
0188
0189 # define XXH_NAMESPACE XXH_INLINE_
0190
0191
0192
0193
0194
0195
0196
0197 # define XXH_IPREF(Id) XXH_NAMESPACE ## Id
0198 # define XXH_OK XXH_IPREF(XXH_OK)
0199 # define XXH_ERROR XXH_IPREF(XXH_ERROR)
0200 # define XXH_errorcode XXH_IPREF(XXH_errorcode)
0201 # define XXH32_canonical_t XXH_IPREF(XXH32_canonical_t)
0202 # define XXH64_canonical_t XXH_IPREF(XXH64_canonical_t)
0203 # define XXH128_canonical_t XXH_IPREF(XXH128_canonical_t)
0204 # define XXH32_state_s XXH_IPREF(XXH32_state_s)
0205 # define XXH32_state_t XXH_IPREF(XXH32_state_t)
0206 # define XXH64_state_s XXH_IPREF(XXH64_state_s)
0207 # define XXH64_state_t XXH_IPREF(XXH64_state_t)
0208 # define XXH3_state_s XXH_IPREF(XXH3_state_s)
0209 # define XXH3_state_t XXH_IPREF(XXH3_state_t)
0210 # define XXH128_hash_t XXH_IPREF(XXH128_hash_t)
0211
0212 # undef XXHASH_H_5627135585666179
0213 # undef XXHASH_H_STATIC_13879238742
0214 #endif
0215
0216
0217
0218
0219
0220
0221 #ifndef XXHASH_H_5627135585666179
0222 #define XXHASH_H_5627135585666179 1
0223
0224
0225
0226
0227
0228
0229
0230
0231 #if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API)
0232 # if defined(WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT))
0233 # ifdef XXH_EXPORT
0234 # define XXH_PUBLIC_API __declspec(dllexport)
0235 # elif XXH_IMPORT
0236 # define XXH_PUBLIC_API __declspec(dllimport)
0237 # endif
0238 # else
0239 # define XXH_PUBLIC_API
0240 # endif
0241 #endif
0242
0243 #ifdef XXH_DOXYGEN
0244
0245
0246
0247
0248
0249
0250
0251
0252
0253
0254
0255
0256
0257 # define XXH_NAMESPACE
0258 # undef XXH_NAMESPACE
0259 #endif
0260
0261 #ifdef XXH_NAMESPACE
0262 # define XXH_CAT(A,B) A##B
0263 # define XXH_NAME2(A,B) XXH_CAT(A,B)
0264 # define XXH_versionNumber XXH_NAME2(XXH_NAMESPACE, XXH_versionNumber)
0265
0266 # define XXH32 XXH_NAME2(XXH_NAMESPACE, XXH32)
0267 # define XXH32_createState XXH_NAME2(XXH_NAMESPACE, XXH32_createState)
0268 # define XXH32_freeState XXH_NAME2(XXH_NAMESPACE, XXH32_freeState)
0269 # define XXH32_reset XXH_NAME2(XXH_NAMESPACE, XXH32_reset)
0270 # define XXH32_update XXH_NAME2(XXH_NAMESPACE, XXH32_update)
0271 # define XXH32_digest XXH_NAME2(XXH_NAMESPACE, XXH32_digest)
0272 # define XXH32_copyState XXH_NAME2(XXH_NAMESPACE, XXH32_copyState)
0273 # define XXH32_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH32_canonicalFromHash)
0274 # define XXH32_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH32_hashFromCanonical)
0275
0276 # define XXH64 XXH_NAME2(XXH_NAMESPACE, XXH64)
0277 # define XXH64_createState XXH_NAME2(XXH_NAMESPACE, XXH64_createState)
0278 # define XXH64_freeState XXH_NAME2(XXH_NAMESPACE, XXH64_freeState)
0279 # define XXH64_reset XXH_NAME2(XXH_NAMESPACE, XXH64_reset)
0280 # define XXH64_update XXH_NAME2(XXH_NAMESPACE, XXH64_update)
0281 # define XXH64_digest XXH_NAME2(XXH_NAMESPACE, XXH64_digest)
0282 # define XXH64_copyState XXH_NAME2(XXH_NAMESPACE, XXH64_copyState)
0283 # define XXH64_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH64_canonicalFromHash)
0284 # define XXH64_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH64_hashFromCanonical)
0285
0286 # define XXH3_64bits XXH_NAME2(XXH_NAMESPACE, XXH3_64bits)
0287 # define XXH3_64bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecret)
0288 # define XXH3_64bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSeed)
0289 # define XXH3_64bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecretandSeed)
0290 # define XXH3_createState XXH_NAME2(XXH_NAMESPACE, XXH3_createState)
0291 # define XXH3_freeState XXH_NAME2(XXH_NAMESPACE, XXH3_freeState)
0292 # define XXH3_copyState XXH_NAME2(XXH_NAMESPACE, XXH3_copyState)
0293 # define XXH3_64bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset)
0294 # define XXH3_64bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSeed)
0295 # define XXH3_64bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecret)
0296 # define XXH3_64bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecretandSeed)
0297 # define XXH3_64bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_update)
0298 # define XXH3_64bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_digest)
0299 # define XXH3_generateSecret XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret)
0300 # define XXH3_generateSecret_fromSeed XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret_fromSeed)
0301
0302 # define XXH128 XXH_NAME2(XXH_NAMESPACE, XXH128)
0303 # define XXH3_128bits XXH_NAME2(XXH_NAMESPACE, XXH3_128bits)
0304 # define XXH3_128bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSeed)
0305 # define XXH3_128bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecret)
0306 # define XXH3_128bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecretandSeed)
0307 # define XXH3_128bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset)
0308 # define XXH3_128bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSeed)
0309 # define XXH3_128bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecret)
0310 # define XXH3_128bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecretandSeed)
0311 # define XXH3_128bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_update)
0312 # define XXH3_128bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_digest)
0313 # define XXH128_isEqual XXH_NAME2(XXH_NAMESPACE, XXH128_isEqual)
0314 # define XXH128_cmp XXH_NAME2(XXH_NAMESPACE, XXH128_cmp)
0315 # define XXH128_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH128_canonicalFromHash)
0316 # define XXH128_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH128_hashFromCanonical)
0317 #endif
0318
0319
0320
0321
0322
0323 #define XXH_VERSION_MAJOR 0
0324 #define XXH_VERSION_MINOR 8
0325 #define XXH_VERSION_RELEASE 1
0326 #define XXH_VERSION_NUMBER (XXH_VERSION_MAJOR *100*100 + XXH_VERSION_MINOR *100 + XXH_VERSION_RELEASE)
0327
0328
0329
0330
0331
0332
0333
0334
0335
0336 XXH_PUBLIC_API unsigned XXH_versionNumber (void);
0337
0338
0339
0340
0341
0342 #include <stddef.h> /* size_t */
0343 typedef enum { XXH_OK=0, XXH_ERROR } XXH_errorcode;
0344
0345
0346
0347
0348
0349 #if defined(XXH_DOXYGEN)
0350
0351
0352
0353
0354
0355 typedef uint32_t XXH32_hash_t;
0356
0357 #elif !defined (__VMS) \
0358 && (defined (__cplusplus) \
0359 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
0360 # include <stdint.h>
0361 typedef uint32_t XXH32_hash_t;
0362
0363 #else
0364 # include <limits.h>
0365 # if UINT_MAX == 0xFFFFFFFFUL
0366 typedef unsigned int XXH32_hash_t;
0367 # else
0368 # if ULONG_MAX == 0xFFFFFFFFUL
0369 typedef unsigned long XXH32_hash_t;
0370 # else
0371 # error "unsupported platform: need a 32-bit type"
0372 # endif
0373 # endif
0374 #endif
0375
0376
0377
0378
0379
0380
0381
0382
0383
0384
0385
0386
0387
0388
0389
0390
0391
0392
0393
0394
0395
0396
0397
0398
0399
0400
0401
0402
0403
0404
0405
0406
0407
0408
0409
0410
0411
0412
0413
0414
0415 XXH_PUBLIC_API XXH32_hash_t XXH32 (const void* input, size_t length, XXH32_hash_t seed);
0416
0417
0418
0419
0420
0421
0422
0423
0424
0425
0426
0427
0428
0429
0430
0431
0432
0433
0434
0435
0436
0437
0438
0439
0440
0441
0442
0443
0444
0445
0446
0447
0448
0449
0450
0451
0452
0453
0454
0455
0456
0457
0458
0459
0460
0461
0462
0463
0464
0465
0466
0467
0468
0469
0470
0471
0472
0473 typedef struct XXH32_state_s XXH32_state_t;
0474
0475
0476
0477
0478
0479
0480
0481 XXH_PUBLIC_API XXH32_state_t* XXH32_createState(void);
0482
0483
0484
0485
0486
0487
0488
0489 XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t* statePtr);
0490
0491
0492
0493
0494
0495
0496
0497
0498 XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t* dst_state, const XXH32_state_t* src_state);
0499
0500
0501
0502
0503
0504
0505
0506
0507
0508
0509
0510
0511
0512
0513 XXH_PUBLIC_API XXH_errorcode XXH32_reset (XXH32_state_t* statePtr, XXH32_hash_t seed);
0514
0515
0516
0517
0518
0519
0520
0521
0522
0523
0524
0525
0526
0527
0528
0529
0530
0531
0532
0533 XXH_PUBLIC_API XXH_errorcode XXH32_update (XXH32_state_t* statePtr, const void* input, size_t length);
0534
0535
0536
0537
0538
0539
0540
0541
0542
0543
0544
0545
0546
0547
0548
0549 XXH_PUBLIC_API XXH32_hash_t XXH32_digest (const XXH32_state_t* statePtr);
0550
0551
0552
0553
0554
0555
0556
0557
0558
0559
0560
0561
0562
0563
0564
0565
0566
0567
0568
0569
0570
0571
0572
0573
0574
0575 typedef struct {
0576 unsigned char digest[4];
0577 } XXH32_canonical_t;
0578
0579
0580
0581
0582
0583
0584
0585
0586
0587
0588 XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t* dst, XXH32_hash_t hash);
0589
0590
0591
0592
0593
0594
0595
0596
0597
0598
0599
0600 XXH_PUBLIC_API XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t* src);
0601
0602
0603 #ifdef __has_attribute
0604 # define XXH_HAS_ATTRIBUTE(x) __has_attribute(x)
0605 #else
0606 # define XXH_HAS_ATTRIBUTE(x) 0
0607 #endif
0608
0609
0610 #if defined(__STDC_VERSION__) && (__STDC_VERSION__ > 201710L) && defined(__has_c_attribute)
0611 # define XXH_HAS_C_ATTRIBUTE(x) __has_c_attribute(x)
0612 #else
0613 # define XXH_HAS_C_ATTRIBUTE(x) 0
0614 #endif
0615
0616 #if defined(__cplusplus) && defined(__has_cpp_attribute)
0617 # define XXH_HAS_CPP_ATTRIBUTE(x) __has_cpp_attribute(x)
0618 #else
0619 # define XXH_HAS_CPP_ATTRIBUTE(x) 0
0620 #endif
0621
0622
0623
0624
0625
0626
0627
0628 #if XXH_HAS_C_ATTRIBUTE(x)
0629 # define XXH_FALLTHROUGH [[fallthrough]]
0630 #elif XXH_HAS_CPP_ATTRIBUTE(x)
0631 # define XXH_FALLTHROUGH [[fallthrough]]
0632 #elif XXH_HAS_ATTRIBUTE(__fallthrough__)
0633 # define XXH_FALLTHROUGH __attribute__ ((fallthrough))
0634 #else
0635 # define XXH_FALLTHROUGH
0636 #endif
0637
0638
0639
0640
0641
0642
0643
0644 #ifndef XXH_NO_LONG_LONG
0645
0646
0647
0648 #if defined(XXH_DOXYGEN)
0649
0650
0651
0652
0653
0654 typedef uint64_t XXH64_hash_t;
0655 #elif !defined (__VMS) \
0656 && (defined (__cplusplus) \
0657 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
0658 # include <stdint.h>
0659 typedef uint64_t XXH64_hash_t;
0660 #else
0661 # include <limits.h>
0662 # if defined(__LP64__) && ULONG_MAX == 0xFFFFFFFFFFFFFFFFULL
0663
0664 typedef unsigned long XXH64_hash_t;
0665 # else
0666
0667 typedef unsigned long long XXH64_hash_t;
0668 # endif
0669 #endif
0670
0671
0672
0673
0674
0675
0676
0677
0678
0679
0680
0681
0682
0683
0684
0685
0686
0687
0688
0689
0690
0691
0692
0693
0694
0695
0696
0697
0698
0699
0700
0701
0702
0703
0704
0705
0706
0707
0708
0709 XXH_PUBLIC_API XXH64_hash_t XXH64(const void* input, size_t length, XXH64_hash_t seed);
0710
0711
0712
0713
0714
0715
0716
0717 typedef struct XXH64_state_s XXH64_state_t;
0718 XXH_PUBLIC_API XXH64_state_t* XXH64_createState(void);
0719 XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t* statePtr);
0720 XXH_PUBLIC_API void XXH64_copyState(XXH64_state_t* dst_state, const XXH64_state_t* src_state);
0721
0722 XXH_PUBLIC_API XXH_errorcode XXH64_reset (XXH64_state_t* statePtr, XXH64_hash_t seed);
0723 XXH_PUBLIC_API XXH_errorcode XXH64_update (XXH64_state_t* statePtr, const void* input, size_t length);
0724 XXH_PUBLIC_API XXH64_hash_t XXH64_digest (const XXH64_state_t* statePtr);
0725
0726
0727 typedef struct { unsigned char digest[sizeof(XXH64_hash_t)]; } XXH64_canonical_t;
0728 XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH64_canonical_t* dst, XXH64_hash_t hash);
0729 XXH_PUBLIC_API XXH64_hash_t XXH64_hashFromCanonical(const XXH64_canonical_t* src);
0730
0731
0732
0733
0734
0735
0736
0737
0738
0739
0740
0741
0742
0743
0744
0745
0746
0747
0748
0749
0750
0751
0752
0753
0754
0755
0756
0757
0758
0759
0760
0761
0762
0763
0764
0765
0766
0767
0768
0769
0770
0771
0772
0773
0774
0775
0776
0777
0778
0779
0780
0781
0782
0783 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits(const void* data, size_t len);
0784
0785
0786
0787
0788
0789
0790
0791
0792 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits_withSeed(const void* data, size_t len, XXH64_hash_t seed);
0793
0794
0795
0796
0797
0798
0799
0800
0801 #define XXH3_SECRET_SIZE_MIN 136
0802
0803
0804
0805
0806
0807
0808
0809
0810
0811
0812
0813
0814
0815
0816
0817
0818
0819
0820 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits_withSecret(const void* data, size_t len, const void* secret, size_t secretSize);
0821
0822
0823
0824
0825
0826
0827
0828
0829
0830
0831
0832
0833
0834
0835
0836 typedef struct XXH3_state_s XXH3_state_t;
0837 XXH_PUBLIC_API XXH3_state_t* XXH3_createState(void);
0838 XXH_PUBLIC_API XXH_errorcode XXH3_freeState(XXH3_state_t* statePtr);
0839 XXH_PUBLIC_API void XXH3_copyState(XXH3_state_t* dst_state, const XXH3_state_t* src_state);
0840
0841
0842
0843
0844
0845
0846 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset(XXH3_state_t* statePtr);
0847
0848
0849
0850
0851
0852 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset_withSeed(XXH3_state_t* statePtr, XXH64_hash_t seed);
0853
0854
0855
0856
0857
0858
0859
0860
0861
0862 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset_withSecret(XXH3_state_t* statePtr, const void* secret, size_t secretSize);
0863
0864 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_update (XXH3_state_t* statePtr, const void* input, size_t length);
0865 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits_digest (const XXH3_state_t* statePtr);
0866
0867
0868
0869
0870
0871
0872
0873
0874
0875
0876
0877
0878
0879
0880
0881 typedef struct {
0882 XXH64_hash_t low64;
0883 XXH64_hash_t high64;
0884 } XXH128_hash_t;
0885
0886 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits(const void* data, size_t len);
0887 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits_withSeed(const void* data, size_t len, XXH64_hash_t seed);
0888 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits_withSecret(const void* data, size_t len, const void* secret, size_t secretSize);
0889
0890
0891
0892
0893
0894
0895
0896
0897
0898
0899
0900
0901
0902
0903 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset(XXH3_state_t* statePtr);
0904 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset_withSeed(XXH3_state_t* statePtr, XXH64_hash_t seed);
0905 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset_withSecret(XXH3_state_t* statePtr, const void* secret, size_t secretSize);
0906
0907 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_update (XXH3_state_t* statePtr, const void* input, size_t length);
0908 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits_digest (const XXH3_state_t* statePtr);
0909
0910
0911
0912
0913
0914
0915
0916
0917
0918 XXH_PUBLIC_API int XXH128_isEqual(XXH128_hash_t h1, XXH128_hash_t h2);
0919
0920
0921
0922
0923
0924
0925
0926
0927
0928
0929 XXH_PUBLIC_API int XXH128_cmp(const void* h128_1, const void* h128_2);
0930
0931
0932
0933 typedef struct { unsigned char digest[sizeof(XXH128_hash_t)]; } XXH128_canonical_t;
0934 XXH_PUBLIC_API void XXH128_canonicalFromHash(XXH128_canonical_t* dst, XXH128_hash_t hash);
0935 XXH_PUBLIC_API XXH128_hash_t XXH128_hashFromCanonical(const XXH128_canonical_t* src);
0936
0937
0938 #endif
0939
0940
0941
0942
0943 #endif
0944
0945
0946
0947 #if defined(XXH_STATIC_LINKING_ONLY) && !defined(XXHASH_H_STATIC_13879238742)
0948 #define XXHASH_H_STATIC_13879238742
0949
0950
0951
0952
0953
0954
0955
0956
0957
0958
0959
0960
0961
0962
0963
0964
0965
0966
0967
0968
0969
0970
0971
0972
0973
0974
0975 struct XXH32_state_s {
0976 XXH32_hash_t total_len_32;
0977 XXH32_hash_t large_len;
0978 XXH32_hash_t v[4];
0979 XXH32_hash_t mem32[4];
0980 XXH32_hash_t memsize;
0981 XXH32_hash_t reserved;
0982 };
0983
0984
0985 #ifndef XXH_NO_LONG_LONG
0986
0987
0988
0989
0990
0991
0992
0993
0994
0995
0996
0997
0998
0999 struct XXH64_state_s {
1000 XXH64_hash_t total_len;
1001 XXH64_hash_t v[4];
1002 XXH64_hash_t mem64[4];
1003 XXH32_hash_t memsize;
1004 XXH32_hash_t reserved32;
1005 XXH64_hash_t reserved64;
1006 };
1007
1008 #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
1009 # include <stdalign.h>
1010 # define XXH_ALIGN(n) alignas(n)
1011 #elif defined(__cplusplus) && (__cplusplus >= 201103L)
1012
1013 # define XXH_ALIGN(n) alignas(n)
1014 #elif defined(__GNUC__)
1015 # define XXH_ALIGN(n) __attribute__ ((aligned(n)))
1016 #elif defined(_MSC_VER)
1017 # define XXH_ALIGN(n) __declspec(align(n))
1018 #else
1019 # define XXH_ALIGN(n)
1020 #endif
1021
1022
1023 #if !(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) \
1024 && ! (defined(__cplusplus) && (__cplusplus >= 201103L)) \
1025 && defined(__GNUC__)
1026 # define XXH_ALIGN_MEMBER(align, type) type XXH_ALIGN(align)
1027 #else
1028 # define XXH_ALIGN_MEMBER(align, type) XXH_ALIGN(align) type
1029 #endif
1030
1031
1032
1033
1034
1035
1036
1037
1038 #define XXH3_INTERNALBUFFER_SIZE 256
1039
1040
1041
1042
1043
1044
1045
1046
1047 #define XXH3_SECRET_DEFAULT_SIZE 192
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071 struct XXH3_state_s {
1072 XXH_ALIGN_MEMBER(64, XXH64_hash_t acc[8]);
1073
1074 XXH_ALIGN_MEMBER(64, unsigned char customSecret[XXH3_SECRET_DEFAULT_SIZE]);
1075
1076 XXH_ALIGN_MEMBER(64, unsigned char buffer[XXH3_INTERNALBUFFER_SIZE]);
1077
1078 XXH32_hash_t bufferedSize;
1079
1080 XXH32_hash_t useSeed;
1081
1082 size_t nbStripesSoFar;
1083
1084 XXH64_hash_t totalLen;
1085
1086 size_t nbStripesPerBlock;
1087
1088 size_t secretLimit;
1089
1090 XXH64_hash_t seed;
1091
1092 XXH64_hash_t reserved64;
1093
1094 const unsigned char* extSecret;
1095
1096
1097
1098 };
1099
1100 #undef XXH_ALIGN_MEMBER
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113 #define XXH3_INITSTATE(XXH3_state_ptr) { (XXH3_state_ptr)->seed = 0; }
1114
1115
1116
1117
1118
1119 XXH_PUBLIC_API XXH128_hash_t XXH128(const void* data, size_t len, XXH64_hash_t seed);
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153 XXH_PUBLIC_API XXH_errorcode XXH3_generateSecret(void* secretBuffer, size_t secretSize, const void* customSeed, size_t customSeedSize);
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169 XXH_PUBLIC_API void XXH3_generateSecret_fromSeed(void* secretBuffer, XXH64_hash_t seed);
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198 XXH_PUBLIC_API XXH64_hash_t
1199 XXH3_64bits_withSecretandSeed(const void* data, size_t len,
1200 const void* secret, size_t secretSize,
1201 XXH64_hash_t seed);
1202
1203 XXH_PUBLIC_API XXH128_hash_t
1204 XXH3_128bits_withSecretandSeed(const void* data, size_t len,
1205 const void* secret, size_t secretSize,
1206 XXH64_hash_t seed64);
1207
1208 XXH_PUBLIC_API XXH_errorcode
1209 XXH3_64bits_reset_withSecretandSeed(XXH3_state_t* statePtr,
1210 const void* secret, size_t secretSize,
1211 XXH64_hash_t seed64);
1212
1213 XXH_PUBLIC_API XXH_errorcode
1214 XXH3_128bits_reset_withSecretandSeed(XXH3_state_t* statePtr,
1215 const void* secret, size_t secretSize,
1216 XXH64_hash_t seed64);
1217
1218
1219 #endif
1220 #if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)
1221 # define XXH_IMPLEMENTATION
1222 #endif
1223
1224 #endif
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254 #if ( defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API) \
1255 || defined(XXH_IMPLEMENTATION) ) && !defined(XXH_IMPLEM_13a8737387)
1256 # define XXH_IMPLEM_13a8737387
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268 #ifdef XXH_DOXYGEN
1269
1270
1271
1272
1273
1274 # define XXH_NO_LONG_LONG
1275 # undef XXH_NO_LONG_LONG
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326 # define XXH_FORCE_MEMORY_ACCESS 0
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354 # define XXH_FORCE_ALIGN_CHECK 0
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376 # define XXH_NO_INLINE_HINTS 0
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388 # define XXH32_ENDJMP 0
1389
1390
1391
1392
1393
1394
1395
1396
1397 # define XXH_OLD_NAMES
1398 # undef XXH_OLD_NAMES
1399 #endif
1400
1401
1402
1403
1404 #ifndef XXH_FORCE_MEMORY_ACCESS
1405
1406 # if !defined(__clang__) && \
1407 ( \
1408 (defined(__INTEL_COMPILER) && !defined(_WIN32)) || \
1409 ( \
1410 defined(__GNUC__) && ( \
1411 (defined(__ARM_ARCH) && __ARM_ARCH >= 7) || \
1412 ( \
1413 defined(__mips__) && \
1414 (__mips <= 5 || __mips_isa_rev < 6) && \
1415 (!defined(__mips16) || defined(__mips_mips16e2)) \
1416 ) \
1417 ) \
1418 ) \
1419 )
1420 # define XXH_FORCE_MEMORY_ACCESS 1
1421 # endif
1422 #endif
1423
1424 #ifndef XXH_FORCE_ALIGN_CHECK
1425 # if defined(__i386) || defined(__x86_64__) || defined(__aarch64__) \
1426 || defined(_M_IX86) || defined(_M_X64) || defined(_M_ARM64)
1427 # define XXH_FORCE_ALIGN_CHECK 0
1428 # else
1429 # define XXH_FORCE_ALIGN_CHECK 1
1430 # endif
1431 #endif
1432
1433 #ifndef XXH_NO_INLINE_HINTS
1434 # if defined(__OPTIMIZE_SIZE__) \
1435 || defined(__NO_INLINE__)
1436 # define XXH_NO_INLINE_HINTS 1
1437 # else
1438 # define XXH_NO_INLINE_HINTS 0
1439 # endif
1440 #endif
1441
1442 #ifndef XXH32_ENDJMP
1443
1444 # define XXH32_ENDJMP 0
1445 #endif
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460 #include <stdlib.h>
1461
1462
1463
1464
1465
1466 static void* XXH_malloc(size_t s) { return malloc(s); }
1467
1468
1469
1470
1471
1472 static void XXH_free(void* p) { free(p); }
1473
1474 #include <string.h>
1475
1476
1477
1478
1479
1480 static void* XXH_memcpy(void* dest, const void* src, size_t size)
1481 {
1482 return memcpy(dest,src,size);
1483 }
1484
1485 #include <limits.h> /* ULLONG_MAX */
1486
1487
1488
1489
1490
1491 #ifdef _MSC_VER
1492 # pragma warning(disable : 4127)
1493 #endif
1494
1495 #if XXH_NO_INLINE_HINTS
1496 # if defined(__GNUC__) || defined(__clang__)
1497 # define XXH_FORCE_INLINE static __attribute__((unused))
1498 # else
1499 # define XXH_FORCE_INLINE static
1500 # endif
1501 # define XXH_NO_INLINE static
1502
1503 #elif defined(__GNUC__) || defined(__clang__)
1504 # define XXH_FORCE_INLINE static __inline__ __attribute__((always_inline, unused))
1505 # define XXH_NO_INLINE static __attribute__((noinline))
1506 #elif defined(_MSC_VER)
1507 # define XXH_FORCE_INLINE static __forceinline
1508 # define XXH_NO_INLINE static __declspec(noinline)
1509 #elif defined (__cplusplus) \
1510 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L))
1511 # define XXH_FORCE_INLINE static inline
1512 # define XXH_NO_INLINE static
1513 #else
1514 # define XXH_FORCE_INLINE static
1515 # define XXH_NO_INLINE static
1516 #endif
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531 #ifndef XXH_DEBUGLEVEL
1532 # ifdef DEBUGLEVEL
1533 # define XXH_DEBUGLEVEL DEBUGLEVEL
1534 # else
1535 # define XXH_DEBUGLEVEL 0
1536 # endif
1537 #endif
1538
1539 #if (XXH_DEBUGLEVEL>=1)
1540 # include <assert.h> /* note: can still be disabled with NDEBUG */
1541 # define XXH_ASSERT(c) assert(c)
1542 #else
1543 # define XXH_ASSERT(c) ((void)0)
1544 #endif
1545
1546
1547 #ifndef XXH_STATIC_ASSERT
1548 # if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
1549 # include <assert.h>
1550 # define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { static_assert((c),m); } while(0)
1551 # elif defined(__cplusplus) && (__cplusplus >= 201103L)
1552 # define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { static_assert((c),m); } while(0)
1553 # else
1554 # define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { struct xxh_sa { char x[(c) ? 1 : -1]; }; } while(0)
1555 # endif
1556 # define XXH_STATIC_ASSERT(c) XXH_STATIC_ASSERT_WITH_MESSAGE((c),#c)
1557 #endif
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575 #if defined(__GNUC__) || defined(__clang__)
1576 # define XXH_COMPILER_GUARD(var) __asm__ __volatile__("" : "+r" (var))
1577 #else
1578 # define XXH_COMPILER_GUARD(var) ((void)0)
1579 #endif
1580
1581
1582
1583
1584 #if !defined (__VMS) \
1585 && (defined (__cplusplus) \
1586 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
1587 # include <stdint.h>
1588 typedef uint8_t xxh_u8;
1589 #else
1590 typedef unsigned char xxh_u8;
1591 #endif
1592 typedef XXH32_hash_t xxh_u32;
1593
1594 #ifdef XXH_OLD_NAMES
1595 # define BYTE xxh_u8
1596 # define U8 xxh_u8
1597 # define U32 xxh_u32
1598 #endif
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
1653
1654
1655
1656
1657 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
1658
1659
1660
1661
1662
1663 static xxh_u32 XXH_read32(const void* memPtr) { return *(const xxh_u32*) memPtr; }
1664
1665 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
1666
1667
1668
1669
1670
1671
1672
1673 #ifdef XXH_OLD_NAMES
1674 typedef union { xxh_u32 u32; } __attribute__((packed)) unalign;
1675 #endif
1676 static xxh_u32 XXH_read32(const void* ptr)
1677 {
1678 typedef union { xxh_u32 u32; } __attribute__((packed)) xxh_unalign;
1679 return ((const xxh_unalign*)ptr)->u32;
1680 }
1681
1682 #else
1683
1684
1685
1686
1687
1688 static xxh_u32 XXH_read32(const void* memPtr)
1689 {
1690 xxh_u32 val;
1691 XXH_memcpy(&val, memPtr, sizeof(val));
1692 return val;
1693 }
1694
1695 #endif
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716 #ifndef XXH_CPU_LITTLE_ENDIAN
1717
1718
1719
1720
1721 # if defined(_WIN32) \
1722 || defined(__LITTLE_ENDIAN__) \
1723 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
1724 # define XXH_CPU_LITTLE_ENDIAN 1
1725 # elif defined(__BIG_ENDIAN__) \
1726 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
1727 # define XXH_CPU_LITTLE_ENDIAN 0
1728 # else
1729
1730
1731
1732
1733
1734
1735 static int XXH_isLittleEndian(void)
1736 {
1737
1738
1739
1740
1741 const union { xxh_u32 u; xxh_u8 c[4]; } one = { 1 };
1742 return one.c[0];
1743 }
1744 # define XXH_CPU_LITTLE_ENDIAN XXH_isLittleEndian()
1745 # endif
1746 #endif
1747
1748
1749
1750
1751
1752
1753
1754 #define XXH_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
1755
1756 #ifdef __has_builtin
1757 # define XXH_HAS_BUILTIN(x) __has_builtin(x)
1758 #else
1759 # define XXH_HAS_BUILTIN(x) 0
1760 #endif
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775 #if !defined(NO_CLANG_BUILTIN) && XXH_HAS_BUILTIN(__builtin_rotateleft32) \
1776 && XXH_HAS_BUILTIN(__builtin_rotateleft64)
1777 # define XXH_rotl32 __builtin_rotateleft32
1778 # define XXH_rotl64 __builtin_rotateleft64
1779
1780 #elif defined(_MSC_VER)
1781 # define XXH_rotl32(x,r) _rotl(x,r)
1782 # define XXH_rotl64(x,r) _rotl64(x,r)
1783 #else
1784 # define XXH_rotl32(x,r) (((x) << (r)) | ((x) >> (32 - (r))))
1785 # define XXH_rotl64(x,r) (((x) << (r)) | ((x) >> (64 - (r))))
1786 #endif
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796 #if defined(_MSC_VER)
1797 # define XXH_swap32 _byteswap_ulong
1798 #elif XXH_GCC_VERSION >= 403
1799 # define XXH_swap32 __builtin_bswap32
1800 #else
1801 static xxh_u32 XXH_swap32 (xxh_u32 x)
1802 {
1803 return ((x << 24) & 0xff000000 ) |
1804 ((x << 8) & 0x00ff0000 ) |
1805 ((x >> 8) & 0x0000ff00 ) |
1806 ((x >> 24) & 0x000000ff );
1807 }
1808 #endif
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819 typedef enum {
1820 XXH_aligned,
1821 XXH_unaligned
1822 } XXH_alignment;
1823
1824
1825
1826
1827
1828
1829 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
1830
1831 XXH_FORCE_INLINE xxh_u32 XXH_readLE32(const void* memPtr)
1832 {
1833 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
1834 return bytePtr[0]
1835 | ((xxh_u32)bytePtr[1] << 8)
1836 | ((xxh_u32)bytePtr[2] << 16)
1837 | ((xxh_u32)bytePtr[3] << 24);
1838 }
1839
1840 XXH_FORCE_INLINE xxh_u32 XXH_readBE32(const void* memPtr)
1841 {
1842 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
1843 return bytePtr[3]
1844 | ((xxh_u32)bytePtr[2] << 8)
1845 | ((xxh_u32)bytePtr[1] << 16)
1846 | ((xxh_u32)bytePtr[0] << 24);
1847 }
1848
1849 #else
1850 XXH_FORCE_INLINE xxh_u32 XXH_readLE32(const void* ptr)
1851 {
1852 return XXH_CPU_LITTLE_ENDIAN ? XXH_read32(ptr) : XXH_swap32(XXH_read32(ptr));
1853 }
1854
1855 static xxh_u32 XXH_readBE32(const void* ptr)
1856 {
1857 return XXH_CPU_LITTLE_ENDIAN ? XXH_swap32(XXH_read32(ptr)) : XXH_read32(ptr);
1858 }
1859 #endif
1860
1861 XXH_FORCE_INLINE xxh_u32
1862 XXH_readLE32_align(const void* ptr, XXH_alignment align)
1863 {
1864 if (align==XXH_unaligned) {
1865 return XXH_readLE32(ptr);
1866 } else {
1867 return XXH_CPU_LITTLE_ENDIAN ? *(const xxh_u32*)ptr : XXH_swap32(*(const xxh_u32*)ptr);
1868 }
1869 }
1870
1871
1872
1873
1874
1875
1876 XXH_PUBLIC_API unsigned XXH_versionNumber (void) { return XXH_VERSION_NUMBER; }
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889 #define XXH_PRIME32_1 0x9E3779B1U
1890 #define XXH_PRIME32_2 0x85EBCA77U
1891 #define XXH_PRIME32_3 0xC2B2AE3DU
1892 #define XXH_PRIME32_4 0x27D4EB2FU
1893 #define XXH_PRIME32_5 0x165667B1U
1894
1895 #ifdef XXH_OLD_NAMES
1896 # define PRIME32_1 XXH_PRIME32_1
1897 # define PRIME32_2 XXH_PRIME32_2
1898 # define PRIME32_3 XXH_PRIME32_3
1899 # define PRIME32_4 XXH_PRIME32_4
1900 # define PRIME32_5 XXH_PRIME32_5
1901 #endif
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914 static xxh_u32 XXH32_round(xxh_u32 acc, xxh_u32 input)
1915 {
1916 acc += input * XXH_PRIME32_2;
1917 acc = XXH_rotl32(acc, 13);
1918 acc *= XXH_PRIME32_1;
1919 #if (defined(__SSE4_1__) || defined(__aarch64__)) && !defined(XXH_ENABLE_AUTOVECTORIZE)
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953 XXH_COMPILER_GUARD(acc);
1954 #endif
1955 return acc;
1956 }
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968 static xxh_u32 XXH32_avalanche(xxh_u32 h32)
1969 {
1970 h32 ^= h32 >> 15;
1971 h32 *= XXH_PRIME32_2;
1972 h32 ^= h32 >> 13;
1973 h32 *= XXH_PRIME32_3;
1974 h32 ^= h32 >> 16;
1975 return(h32);
1976 }
1977
1978 #define XXH_get32bits(p) XXH_readLE32_align(p, align)
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994 static xxh_u32
1995 XXH32_finalize(xxh_u32 h32, const xxh_u8* ptr, size_t len, XXH_alignment align)
1996 {
1997 #define XXH_PROCESS1 do { \
1998 h32 += (*ptr++) * XXH_PRIME32_5; \
1999 h32 = XXH_rotl32(h32, 11) * XXH_PRIME32_1; \
2000 } while (0)
2001
2002 #define XXH_PROCESS4 do { \
2003 h32 += XXH_get32bits(ptr) * XXH_PRIME32_3; \
2004 ptr += 4; \
2005 h32 = XXH_rotl32(h32, 17) * XXH_PRIME32_4; \
2006 } while (0)
2007
2008 if (ptr==NULL) XXH_ASSERT(len == 0);
2009
2010
2011 if (!XXH32_ENDJMP) {
2012 len &= 15;
2013 while (len >= 4) {
2014 XXH_PROCESS4;
2015 len -= 4;
2016 }
2017 while (len > 0) {
2018 XXH_PROCESS1;
2019 --len;
2020 }
2021 return XXH32_avalanche(h32);
2022 } else {
2023 switch(len&15) {
2024 case 12: XXH_PROCESS4;
2025 XXH_FALLTHROUGH;
2026 case 8: XXH_PROCESS4;
2027 XXH_FALLTHROUGH;
2028 case 4: XXH_PROCESS4;
2029 return XXH32_avalanche(h32);
2030
2031 case 13: XXH_PROCESS4;
2032 XXH_FALLTHROUGH;
2033 case 9: XXH_PROCESS4;
2034 XXH_FALLTHROUGH;
2035 case 5: XXH_PROCESS4;
2036 XXH_PROCESS1;
2037 return XXH32_avalanche(h32);
2038
2039 case 14: XXH_PROCESS4;
2040 XXH_FALLTHROUGH;
2041 case 10: XXH_PROCESS4;
2042 XXH_FALLTHROUGH;
2043 case 6: XXH_PROCESS4;
2044 XXH_PROCESS1;
2045 XXH_PROCESS1;
2046 return XXH32_avalanche(h32);
2047
2048 case 15: XXH_PROCESS4;
2049 XXH_FALLTHROUGH;
2050 case 11: XXH_PROCESS4;
2051 XXH_FALLTHROUGH;
2052 case 7: XXH_PROCESS4;
2053 XXH_FALLTHROUGH;
2054 case 3: XXH_PROCESS1;
2055 XXH_FALLTHROUGH;
2056 case 2: XXH_PROCESS1;
2057 XXH_FALLTHROUGH;
2058 case 1: XXH_PROCESS1;
2059 XXH_FALLTHROUGH;
2060 case 0: return XXH32_avalanche(h32);
2061 }
2062 XXH_ASSERT(0);
2063 return h32;
2064 }
2065 }
2066
2067 #ifdef XXH_OLD_NAMES
2068 # define PROCESS1 XXH_PROCESS1
2069 # define PROCESS4 XXH_PROCESS4
2070 #else
2071 # undef XXH_PROCESS1
2072 # undef XXH_PROCESS4
2073 #endif
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083 XXH_FORCE_INLINE xxh_u32
2084 XXH32_endian_align(const xxh_u8* input, size_t len, xxh_u32 seed, XXH_alignment align)
2085 {
2086 xxh_u32 h32;
2087
2088 if (input==NULL) XXH_ASSERT(len == 0);
2089
2090 if (len>=16) {
2091 const xxh_u8* const bEnd = input + len;
2092 const xxh_u8* const limit = bEnd - 15;
2093 xxh_u32 v1 = seed + XXH_PRIME32_1 + XXH_PRIME32_2;
2094 xxh_u32 v2 = seed + XXH_PRIME32_2;
2095 xxh_u32 v3 = seed + 0;
2096 xxh_u32 v4 = seed - XXH_PRIME32_1;
2097
2098 do {
2099 v1 = XXH32_round(v1, XXH_get32bits(input)); input += 4;
2100 v2 = XXH32_round(v2, XXH_get32bits(input)); input += 4;
2101 v3 = XXH32_round(v3, XXH_get32bits(input)); input += 4;
2102 v4 = XXH32_round(v4, XXH_get32bits(input)); input += 4;
2103 } while (input < limit);
2104
2105 h32 = XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7)
2106 + XXH_rotl32(v3, 12) + XXH_rotl32(v4, 18);
2107 } else {
2108 h32 = seed + XXH_PRIME32_5;
2109 }
2110
2111 h32 += (xxh_u32)len;
2112
2113 return XXH32_finalize(h32, input, len&15, align);
2114 }
2115
2116
2117 XXH_PUBLIC_API XXH32_hash_t XXH32 (const void* input, size_t len, XXH32_hash_t seed)
2118 {
2119 #if 0
2120
2121 XXH32_state_t state;
2122 XXH32_reset(&state, seed);
2123 XXH32_update(&state, (const xxh_u8*)input, len);
2124 return XXH32_digest(&state);
2125 #else
2126 if (XXH_FORCE_ALIGN_CHECK) {
2127 if ((((size_t)input) & 3) == 0) {
2128 return XXH32_endian_align((const xxh_u8*)input, len, seed, XXH_aligned);
2129 } }
2130
2131 return XXH32_endian_align((const xxh_u8*)input, len, seed, XXH_unaligned);
2132 #endif
2133 }
2134
2135
2136
2137
2138
2139
2140
2141 XXH_PUBLIC_API XXH32_state_t* XXH32_createState(void)
2142 {
2143 return (XXH32_state_t*)XXH_malloc(sizeof(XXH32_state_t));
2144 }
2145
2146 XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t* statePtr)
2147 {
2148 XXH_free(statePtr);
2149 return XXH_OK;
2150 }
2151
2152
2153 XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t* dstState, const XXH32_state_t* srcState)
2154 {
2155 XXH_memcpy(dstState, srcState, sizeof(*dstState));
2156 }
2157
2158
2159 XXH_PUBLIC_API XXH_errorcode XXH32_reset(XXH32_state_t* statePtr, XXH32_hash_t seed)
2160 {
2161 XXH32_state_t state;
2162 memset(&state, 0, sizeof(state));
2163 state.v[0] = seed + XXH_PRIME32_1 + XXH_PRIME32_2;
2164 state.v[1] = seed + XXH_PRIME32_2;
2165 state.v[2] = seed + 0;
2166 state.v[3] = seed - XXH_PRIME32_1;
2167
2168 XXH_memcpy(statePtr, &state, sizeof(state) - sizeof(state.reserved));
2169 return XXH_OK;
2170 }
2171
2172
2173
2174 XXH_PUBLIC_API XXH_errorcode
2175 XXH32_update(XXH32_state_t* state, const void* input, size_t len)
2176 {
2177 if (input==NULL) {
2178 XXH_ASSERT(len == 0);
2179 return XXH_OK;
2180 }
2181
2182 { const xxh_u8* p = (const xxh_u8*)input;
2183 const xxh_u8* const bEnd = p + len;
2184
2185 state->total_len_32 += (XXH32_hash_t)len;
2186 state->large_len |= (XXH32_hash_t)((len>=16) | (state->total_len_32>=16));
2187
2188 if (state->memsize + len < 16) {
2189 XXH_memcpy((xxh_u8*)(state->mem32) + state->memsize, input, len);
2190 state->memsize += (XXH32_hash_t)len;
2191 return XXH_OK;
2192 }
2193
2194 if (state->memsize) {
2195 XXH_memcpy((xxh_u8*)(state->mem32) + state->memsize, input, 16-state->memsize);
2196 { const xxh_u32* p32 = state->mem32;
2197 state->v[0] = XXH32_round(state->v[0], XXH_readLE32(p32)); p32++;
2198 state->v[1] = XXH32_round(state->v[1], XXH_readLE32(p32)); p32++;
2199 state->v[2] = XXH32_round(state->v[2], XXH_readLE32(p32)); p32++;
2200 state->v[3] = XXH32_round(state->v[3], XXH_readLE32(p32));
2201 }
2202 p += 16-state->memsize;
2203 state->memsize = 0;
2204 }
2205
2206 if (p <= bEnd-16) {
2207 const xxh_u8* const limit = bEnd - 16;
2208
2209 do {
2210 state->v[0] = XXH32_round(state->v[0], XXH_readLE32(p)); p+=4;
2211 state->v[1] = XXH32_round(state->v[1], XXH_readLE32(p)); p+=4;
2212 state->v[2] = XXH32_round(state->v[2], XXH_readLE32(p)); p+=4;
2213 state->v[3] = XXH32_round(state->v[3], XXH_readLE32(p)); p+=4;
2214 } while (p<=limit);
2215
2216 }
2217
2218 if (p < bEnd) {
2219 XXH_memcpy(state->mem32, p, (size_t)(bEnd-p));
2220 state->memsize = (unsigned)(bEnd-p);
2221 }
2222 }
2223
2224 return XXH_OK;
2225 }
2226
2227
2228
2229 XXH_PUBLIC_API XXH32_hash_t XXH32_digest(const XXH32_state_t* state)
2230 {
2231 xxh_u32 h32;
2232
2233 if (state->large_len) {
2234 h32 = XXH_rotl32(state->v[0], 1)
2235 + XXH_rotl32(state->v[1], 7)
2236 + XXH_rotl32(state->v[2], 12)
2237 + XXH_rotl32(state->v[3], 18);
2238 } else {
2239 h32 = state->v[2] + XXH_PRIME32_5;
2240 }
2241
2242 h32 += state->total_len_32;
2243
2244 return XXH32_finalize(h32, (const xxh_u8*)state->mem32, state->memsize, XXH_aligned);
2245 }
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264 XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t* dst, XXH32_hash_t hash)
2265 {
2266 XXH_STATIC_ASSERT(sizeof(XXH32_canonical_t) == sizeof(XXH32_hash_t));
2267 if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap32(hash);
2268 XXH_memcpy(dst, &hash, sizeof(*dst));
2269 }
2270
2271 XXH_PUBLIC_API XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t* src)
2272 {
2273 return XXH_readBE32(src);
2274 }
2275
2276
2277 #ifndef XXH_NO_LONG_LONG
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289 typedef XXH64_hash_t xxh_u64;
2290
2291 #ifdef XXH_OLD_NAMES
2292 # define U64 xxh_u64
2293 #endif
2294
2295 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2296
2297
2298
2299
2300 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
2301
2302
2303 static xxh_u64 XXH_read64(const void* memPtr)
2304 {
2305 return *(const xxh_u64*) memPtr;
2306 }
2307
2308 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
2309
2310
2311
2312
2313
2314
2315
2316 #ifdef XXH_OLD_NAMES
2317 typedef union { xxh_u32 u32; xxh_u64 u64; } __attribute__((packed)) unalign64;
2318 #endif
2319 static xxh_u64 XXH_read64(const void* ptr)
2320 {
2321 typedef union { xxh_u32 u32; xxh_u64 u64; } __attribute__((packed)) xxh_unalign64;
2322 return ((const xxh_unalign64*)ptr)->u64;
2323 }
2324
2325 #else
2326
2327
2328
2329
2330
2331 static xxh_u64 XXH_read64(const void* memPtr)
2332 {
2333 xxh_u64 val;
2334 XXH_memcpy(&val, memPtr, sizeof(val));
2335 return val;
2336 }
2337
2338 #endif
2339
2340 #if defined(_MSC_VER)
2341 # define XXH_swap64 _byteswap_uint64
2342 #elif XXH_GCC_VERSION >= 403
2343 # define XXH_swap64 __builtin_bswap64
2344 #else
2345 static xxh_u64 XXH_swap64(xxh_u64 x)
2346 {
2347 return ((x << 56) & 0xff00000000000000ULL) |
2348 ((x << 40) & 0x00ff000000000000ULL) |
2349 ((x << 24) & 0x0000ff0000000000ULL) |
2350 ((x << 8) & 0x000000ff00000000ULL) |
2351 ((x >> 8) & 0x00000000ff000000ULL) |
2352 ((x >> 24) & 0x0000000000ff0000ULL) |
2353 ((x >> 40) & 0x000000000000ff00ULL) |
2354 ((x >> 56) & 0x00000000000000ffULL);
2355 }
2356 #endif
2357
2358
2359
2360 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2361
2362 XXH_FORCE_INLINE xxh_u64 XXH_readLE64(const void* memPtr)
2363 {
2364 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
2365 return bytePtr[0]
2366 | ((xxh_u64)bytePtr[1] << 8)
2367 | ((xxh_u64)bytePtr[2] << 16)
2368 | ((xxh_u64)bytePtr[3] << 24)
2369 | ((xxh_u64)bytePtr[4] << 32)
2370 | ((xxh_u64)bytePtr[5] << 40)
2371 | ((xxh_u64)bytePtr[6] << 48)
2372 | ((xxh_u64)bytePtr[7] << 56);
2373 }
2374
2375 XXH_FORCE_INLINE xxh_u64 XXH_readBE64(const void* memPtr)
2376 {
2377 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
2378 return bytePtr[7]
2379 | ((xxh_u64)bytePtr[6] << 8)
2380 | ((xxh_u64)bytePtr[5] << 16)
2381 | ((xxh_u64)bytePtr[4] << 24)
2382 | ((xxh_u64)bytePtr[3] << 32)
2383 | ((xxh_u64)bytePtr[2] << 40)
2384 | ((xxh_u64)bytePtr[1] << 48)
2385 | ((xxh_u64)bytePtr[0] << 56);
2386 }
2387
2388 #else
2389 XXH_FORCE_INLINE xxh_u64 XXH_readLE64(const void* ptr)
2390 {
2391 return XXH_CPU_LITTLE_ENDIAN ? XXH_read64(ptr) : XXH_swap64(XXH_read64(ptr));
2392 }
2393
2394 static xxh_u64 XXH_readBE64(const void* ptr)
2395 {
2396 return XXH_CPU_LITTLE_ENDIAN ? XXH_swap64(XXH_read64(ptr)) : XXH_read64(ptr);
2397 }
2398 #endif
2399
2400 XXH_FORCE_INLINE xxh_u64
2401 XXH_readLE64_align(const void* ptr, XXH_alignment align)
2402 {
2403 if (align==XXH_unaligned)
2404 return XXH_readLE64(ptr);
2405 else
2406 return XXH_CPU_LITTLE_ENDIAN ? *(const xxh_u64*)ptr : XXH_swap64(*(const xxh_u64*)ptr);
2407 }
2408
2409
2410
2411
2412
2413
2414
2415
2416
2417
2418 #define XXH_PRIME64_1 0x9E3779B185EBCA87ULL
2419 #define XXH_PRIME64_2 0xC2B2AE3D27D4EB4FULL
2420 #define XXH_PRIME64_3 0x165667B19E3779F9ULL
2421 #define XXH_PRIME64_4 0x85EBCA77C2B2AE63ULL
2422 #define XXH_PRIME64_5 0x27D4EB2F165667C5ULL
2423
2424 #ifdef XXH_OLD_NAMES
2425 # define PRIME64_1 XXH_PRIME64_1
2426 # define PRIME64_2 XXH_PRIME64_2
2427 # define PRIME64_3 XXH_PRIME64_3
2428 # define PRIME64_4 XXH_PRIME64_4
2429 # define PRIME64_5 XXH_PRIME64_5
2430 #endif
2431
2432 static xxh_u64 XXH64_round(xxh_u64 acc, xxh_u64 input)
2433 {
2434 acc += input * XXH_PRIME64_2;
2435 acc = XXH_rotl64(acc, 31);
2436 acc *= XXH_PRIME64_1;
2437 return acc;
2438 }
2439
2440 static xxh_u64 XXH64_mergeRound(xxh_u64 acc, xxh_u64 val)
2441 {
2442 val = XXH64_round(0, val);
2443 acc ^= val;
2444 acc = acc * XXH_PRIME64_1 + XXH_PRIME64_4;
2445 return acc;
2446 }
2447
2448 static xxh_u64 XXH64_avalanche(xxh_u64 h64)
2449 {
2450 h64 ^= h64 >> 33;
2451 h64 *= XXH_PRIME64_2;
2452 h64 ^= h64 >> 29;
2453 h64 *= XXH_PRIME64_3;
2454 h64 ^= h64 >> 32;
2455 return h64;
2456 }
2457
2458
2459 #define XXH_get64bits(p) XXH_readLE64_align(p, align)
2460
2461 static xxh_u64
2462 XXH64_finalize(xxh_u64 h64, const xxh_u8* ptr, size_t len, XXH_alignment align)
2463 {
2464 if (ptr==NULL) XXH_ASSERT(len == 0);
2465 len &= 31;
2466 while (len >= 8) {
2467 xxh_u64 const k1 = XXH64_round(0, XXH_get64bits(ptr));
2468 ptr += 8;
2469 h64 ^= k1;
2470 h64 = XXH_rotl64(h64,27) * XXH_PRIME64_1 + XXH_PRIME64_4;
2471 len -= 8;
2472 }
2473 if (len >= 4) {
2474 h64 ^= (xxh_u64)(XXH_get32bits(ptr)) * XXH_PRIME64_1;
2475 ptr += 4;
2476 h64 = XXH_rotl64(h64, 23) * XXH_PRIME64_2 + XXH_PRIME64_3;
2477 len -= 4;
2478 }
2479 while (len > 0) {
2480 h64 ^= (*ptr++) * XXH_PRIME64_5;
2481 h64 = XXH_rotl64(h64, 11) * XXH_PRIME64_1;
2482 --len;
2483 }
2484 return XXH64_avalanche(h64);
2485 }
2486
2487 #ifdef XXH_OLD_NAMES
2488 # define PROCESS1_64 XXH_PROCESS1_64
2489 # define PROCESS4_64 XXH_PROCESS4_64
2490 # define PROCESS8_64 XXH_PROCESS8_64
2491 #else
2492 # undef XXH_PROCESS1_64
2493 # undef XXH_PROCESS4_64
2494 # undef XXH_PROCESS8_64
2495 #endif
2496
2497 XXH_FORCE_INLINE xxh_u64
2498 XXH64_endian_align(const xxh_u8* input, size_t len, xxh_u64 seed, XXH_alignment align)
2499 {
2500 xxh_u64 h64;
2501 if (input==NULL) XXH_ASSERT(len == 0);
2502
2503 if (len>=32) {
2504 const xxh_u8* const bEnd = input + len;
2505 const xxh_u8* const limit = bEnd - 31;
2506 xxh_u64 v1 = seed + XXH_PRIME64_1 + XXH_PRIME64_2;
2507 xxh_u64 v2 = seed + XXH_PRIME64_2;
2508 xxh_u64 v3 = seed + 0;
2509 xxh_u64 v4 = seed - XXH_PRIME64_1;
2510
2511 do {
2512 v1 = XXH64_round(v1, XXH_get64bits(input)); input+=8;
2513 v2 = XXH64_round(v2, XXH_get64bits(input)); input+=8;
2514 v3 = XXH64_round(v3, XXH_get64bits(input)); input+=8;
2515 v4 = XXH64_round(v4, XXH_get64bits(input)); input+=8;
2516 } while (input<limit);
2517
2518 h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
2519 h64 = XXH64_mergeRound(h64, v1);
2520 h64 = XXH64_mergeRound(h64, v2);
2521 h64 = XXH64_mergeRound(h64, v3);
2522 h64 = XXH64_mergeRound(h64, v4);
2523
2524 } else {
2525 h64 = seed + XXH_PRIME64_5;
2526 }
2527
2528 h64 += (xxh_u64) len;
2529
2530 return XXH64_finalize(h64, input, len, align);
2531 }
2532
2533
2534
2535 XXH_PUBLIC_API XXH64_hash_t XXH64 (const void* input, size_t len, XXH64_hash_t seed)
2536 {
2537 #if 0
2538
2539 XXH64_state_t state;
2540 XXH64_reset(&state, seed);
2541 XXH64_update(&state, (const xxh_u8*)input, len);
2542 return XXH64_digest(&state);
2543 #else
2544 if (XXH_FORCE_ALIGN_CHECK) {
2545 if ((((size_t)input) & 7)==0) {
2546 return XXH64_endian_align((const xxh_u8*)input, len, seed, XXH_aligned);
2547 } }
2548
2549 return XXH64_endian_align((const xxh_u8*)input, len, seed, XXH_unaligned);
2550
2551 #endif
2552 }
2553
2554
2555
2556
2557 XXH_PUBLIC_API XXH64_state_t* XXH64_createState(void)
2558 {
2559 return (XXH64_state_t*)XXH_malloc(sizeof(XXH64_state_t));
2560 }
2561
2562 XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t* statePtr)
2563 {
2564 XXH_free(statePtr);
2565 return XXH_OK;
2566 }
2567
2568
2569 XXH_PUBLIC_API void XXH64_copyState(XXH64_state_t* dstState, const XXH64_state_t* srcState)
2570 {
2571 XXH_memcpy(dstState, srcState, sizeof(*dstState));
2572 }
2573
2574
2575 XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH64_state_t* statePtr, XXH64_hash_t seed)
2576 {
2577 XXH64_state_t state;
2578 memset(&state, 0, sizeof(state));
2579 state.v[0] = seed + XXH_PRIME64_1 + XXH_PRIME64_2;
2580 state.v[1] = seed + XXH_PRIME64_2;
2581 state.v[2] = seed + 0;
2582 state.v[3] = seed - XXH_PRIME64_1;
2583
2584 XXH_memcpy(statePtr, &state, sizeof(state) - sizeof(state.reserved64));
2585 return XXH_OK;
2586 }
2587
2588
2589 XXH_PUBLIC_API XXH_errorcode
2590 XXH64_update (XXH64_state_t* state, const void* input, size_t len)
2591 {
2592 if (input==NULL) {
2593 XXH_ASSERT(len == 0);
2594 return XXH_OK;
2595 }
2596
2597 { const xxh_u8* p = (const xxh_u8*)input;
2598 const xxh_u8* const bEnd = p + len;
2599
2600 state->total_len += len;
2601
2602 if (state->memsize + len < 32) {
2603 XXH_memcpy(((xxh_u8*)state->mem64) + state->memsize, input, len);
2604 state->memsize += (xxh_u32)len;
2605 return XXH_OK;
2606 }
2607
2608 if (state->memsize) {
2609 XXH_memcpy(((xxh_u8*)state->mem64) + state->memsize, input, 32-state->memsize);
2610 state->v[0] = XXH64_round(state->v[0], XXH_readLE64(state->mem64+0));
2611 state->v[1] = XXH64_round(state->v[1], XXH_readLE64(state->mem64+1));
2612 state->v[2] = XXH64_round(state->v[2], XXH_readLE64(state->mem64+2));
2613 state->v[3] = XXH64_round(state->v[3], XXH_readLE64(state->mem64+3));
2614 p += 32 - state->memsize;
2615 state->memsize = 0;
2616 }
2617
2618 if (p+32 <= bEnd) {
2619 const xxh_u8* const limit = bEnd - 32;
2620
2621 do {
2622 state->v[0] = XXH64_round(state->v[0], XXH_readLE64(p)); p+=8;
2623 state->v[1] = XXH64_round(state->v[1], XXH_readLE64(p)); p+=8;
2624 state->v[2] = XXH64_round(state->v[2], XXH_readLE64(p)); p+=8;
2625 state->v[3] = XXH64_round(state->v[3], XXH_readLE64(p)); p+=8;
2626 } while (p<=limit);
2627
2628 }
2629
2630 if (p < bEnd) {
2631 XXH_memcpy(state->mem64, p, (size_t)(bEnd-p));
2632 state->memsize = (unsigned)(bEnd-p);
2633 }
2634 }
2635
2636 return XXH_OK;
2637 }
2638
2639
2640
2641 XXH_PUBLIC_API XXH64_hash_t XXH64_digest(const XXH64_state_t* state)
2642 {
2643 xxh_u64 h64;
2644
2645 if (state->total_len >= 32) {
2646 h64 = XXH_rotl64(state->v[0], 1) + XXH_rotl64(state->v[1], 7) + XXH_rotl64(state->v[2], 12) + XXH_rotl64(state->v[3], 18);
2647 h64 = XXH64_mergeRound(h64, state->v[0]);
2648 h64 = XXH64_mergeRound(h64, state->v[1]);
2649 h64 = XXH64_mergeRound(h64, state->v[2]);
2650 h64 = XXH64_mergeRound(h64, state->v[3]);
2651 } else {
2652 h64 = state->v[2] + XXH_PRIME64_5;
2653 }
2654
2655 h64 += (xxh_u64) state->total_len;
2656
2657 return XXH64_finalize(h64, (const xxh_u8*)state->mem64, (size_t)state->total_len, XXH_aligned);
2658 }
2659
2660
2661
2662
2663
2664 XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH64_canonical_t* dst, XXH64_hash_t hash)
2665 {
2666 XXH_STATIC_ASSERT(sizeof(XXH64_canonical_t) == sizeof(XXH64_hash_t));
2667 if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap64(hash);
2668 XXH_memcpy(dst, &hash, sizeof(*dst));
2669 }
2670
2671
2672 XXH_PUBLIC_API XXH64_hash_t XXH64_hashFromCanonical(const XXH64_canonical_t* src)
2673 {
2674 return XXH_readBE64(src);
2675 }
2676
2677 #ifndef XXH_NO_XXH3
2678
2679
2680
2681
2682
2683
2684
2685
2686
2687
2688
2689
2690
2691
2692 #if ((defined(sun) || defined(__sun)) && __cplusplus)
2693 # define XXH_RESTRICT
2694 #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
2695 # define XXH_RESTRICT restrict
2696 #else
2697
2698 # define XXH_RESTRICT
2699 #endif
2700
2701 #if (defined(__GNUC__) && (__GNUC__ >= 3)) \
2702 || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) \
2703 || defined(__clang__)
2704 # define XXH_likely(x) __builtin_expect(x, 1)
2705 # define XXH_unlikely(x) __builtin_expect(x, 0)
2706 #else
2707 # define XXH_likely(x) (x)
2708 # define XXH_unlikely(x) (x)
2709 #endif
2710
2711 #if defined(__GNUC__)
2712 # if defined(__AVX2__)
2713 # include <immintrin.h>
2714 # elif defined(__SSE2__)
2715 # include <emmintrin.h>
2716 # elif defined(__ARM_NEON__) || defined(__ARM_NEON)
2717 # define inline __inline__
2718 # include <arm_neon.h>
2719 # undef inline
2720 # endif
2721 #elif defined(_MSC_VER)
2722 # include <intrin.h>
2723 #endif
2724
2725
2726
2727
2728
2729
2730
2731
2732
2733
2734
2735
2736
2737
2738
2739
2740
2741
2742
2743
2744
2745
2746
2747
2748
2749
2750
2751
2752
2753
2754
2755
2756
2757
2758
2759
2760
2761
2762
2763
2764
2765
2766
2767
2768
2769
2770
2771
2772
2773
2774
2775
2776
2777
2778
2779
2780
2781
2782
2783
2784
2785
2786
2787
2788
2789
2790
2791
2792
2793
2794 #if defined(__thumb__) && !defined(__thumb2__) && defined(__ARM_ARCH_ISA_ARM)
2795 # warning "XXH3 is highly inefficient without ARM or Thumb-2."
2796 #endif
2797
2798
2799
2800
2801
2802 #ifdef XXH_DOXYGEN
2803
2804
2805
2806
2807
2808
2809
2810
2811
2812
2813 # define XXH_VECTOR XXH_SCALAR
2814
2815
2816
2817
2818
2819
2820
2821
2822
2823 enum XXH_VECTOR_TYPE {
2824 XXH_SCALAR = 0,
2825 XXH_SSE2 = 1,
2826
2827
2828
2829
2830
2831 XXH_AVX2 = 2,
2832 XXH_AVX512 = 3,
2833 XXH_NEON = 4,
2834 XXH_VSX = 5,
2835 };
2836
2837
2838
2839
2840
2841
2842
2843
2844
2845 # define XXH_ACC_ALIGN 8
2846 #endif
2847
2848
2849 #ifndef XXH_DOXYGEN
2850 # define XXH_SCALAR 0
2851 # define XXH_SSE2 1
2852 # define XXH_AVX2 2
2853 # define XXH_AVX512 3
2854 # define XXH_NEON 4
2855 # define XXH_VSX 5
2856 #endif
2857
2858 #ifndef XXH_VECTOR
2859 # if defined(__AVX512F__)
2860 # define XXH_VECTOR XXH_AVX512
2861 # elif defined(__AVX2__)
2862 # define XXH_VECTOR XXH_AVX2
2863 # elif defined(__SSE2__) || defined(_M_AMD64) || defined(_M_X64) || (defined(_M_IX86_FP) && (_M_IX86_FP == 2))
2864 # define XXH_VECTOR XXH_SSE2
2865 # elif ( \
2866 defined(__ARM_NEON__) || defined(__ARM_NEON) \
2867 || defined(_M_ARM64) || defined(_M_ARM_ARMV7VE) \
2868 ) && ( \
2869 defined(_WIN32) || defined(__LITTLE_ENDIAN__) \
2870 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) \
2871 )
2872 # define XXH_VECTOR XXH_NEON
2873 # elif (defined(__PPC64__) && defined(__POWER8_VECTOR__)) \
2874 || (defined(__s390x__) && defined(__VEC__)) \
2875 && defined(__GNUC__)
2876 # define XXH_VECTOR XXH_VSX
2877 # else
2878 # define XXH_VECTOR XXH_SCALAR
2879 # endif
2880 #endif
2881
2882
2883
2884
2885
2886 #ifndef XXH_ACC_ALIGN
2887 # if defined(XXH_X86DISPATCH)
2888 # define XXH_ACC_ALIGN 64
2889 # elif XXH_VECTOR == XXH_SCALAR
2890 # define XXH_ACC_ALIGN 8
2891 # elif XXH_VECTOR == XXH_SSE2
2892 # define XXH_ACC_ALIGN 16
2893 # elif XXH_VECTOR == XXH_AVX2
2894 # define XXH_ACC_ALIGN 32
2895 # elif XXH_VECTOR == XXH_NEON
2896 # define XXH_ACC_ALIGN 16
2897 # elif XXH_VECTOR == XXH_VSX
2898 # define XXH_ACC_ALIGN 16
2899 # elif XXH_VECTOR == XXH_AVX512
2900 # define XXH_ACC_ALIGN 64
2901 # endif
2902 #endif
2903
2904 #if defined(XXH_X86DISPATCH) || XXH_VECTOR == XXH_SSE2 \
2905 || XXH_VECTOR == XXH_AVX2 || XXH_VECTOR == XXH_AVX512
2906 # define XXH_SEC_ALIGN XXH_ACC_ALIGN
2907 #else
2908 # define XXH_SEC_ALIGN 8
2909 #endif
2910
2911
2912
2913
2914
2915
2916
2917
2918
2919
2920
2921
2922
2923
2924
2925
2926
2927
2928
2929
2930
2931
2932 #if XXH_VECTOR == XXH_AVX2 \
2933 && defined(__GNUC__) && !defined(__clang__) \
2934 && defined(__OPTIMIZE__) && !defined(__OPTIMIZE_SIZE__)
2935 # pragma GCC push_options
2936 # pragma GCC optimize("-O2")
2937 #endif
2938
2939
2940 #if XXH_VECTOR == XXH_NEON
2941
2942
2943
2944
2945
2946
2947
2948
2949
2950
2951
2952
2953
2954
2955
2956
2957
2958
2959
2960
2961
2962
2963
2964
2965
2966
2967
2968
2969
2970
2971
2972
2973
2974
2975
2976
2977
2978
2979
2980
2981
2982
2983
2984
2985
2986
2987
2988
2989
2990
2991
2992
2993
2994
2995
2996
2997
2998
2999
3000
3001
3002
3003
3004
3005
3006
3007
3008
3009
3010
3011
3012
3013
3014
3015
3016
3017
3018
3019
3020
3021 # if !defined(XXH_NO_VZIP_HACK) \
3022 && defined(__GNUC__) \
3023 && !defined(__aarch64__) && !defined(__arm64__) && !defined(_M_ARM64)
3024 # define XXH_SPLIT_IN_PLACE(in, outLo, outHi) \
3025 do { \
3026 \
3027 \
3028 \
3029 __asm__("vzip.32 %e0, %f0" : "+w" (in)); \
3030 (outLo) = vget_low_u32 (vreinterpretq_u32_u64(in)); \
3031 (outHi) = vget_high_u32(vreinterpretq_u32_u64(in)); \
3032 } while (0)
3033 # else
3034 # define XXH_SPLIT_IN_PLACE(in, outLo, outHi) \
3035 do { \
3036 (outLo) = vmovn_u64 (in); \
3037 (outHi) = vshrn_n_u64 ((in), 32); \
3038 } while (0)
3039 # endif
3040 #endif
3041
3042
3043
3044
3045
3046
3047
3048
3049
3050 #if XXH_VECTOR == XXH_VSX
3051 # if defined(__s390x__)
3052 # include <s390intrin.h>
3053 # else
3054
3055
3056
3057
3058
3059
3060
3061
3062 # if defined(__GNUC__) && !defined(__APPLE_ALTIVEC__)
3063 # define __APPLE_ALTIVEC__
3064 # endif
3065 # include <altivec.h>
3066 # endif
3067
3068 typedef __vector unsigned long long xxh_u64x2;
3069 typedef __vector unsigned char xxh_u8x16;
3070 typedef __vector unsigned xxh_u32x4;
3071
3072 # ifndef XXH_VSX_BE
3073 # if defined(__BIG_ENDIAN__) \
3074 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
3075 # define XXH_VSX_BE 1
3076 # elif defined(__VEC_ELEMENT_REG_ORDER__) && __VEC_ELEMENT_REG_ORDER__ == __ORDER_BIG_ENDIAN__
3077 # warning "-maltivec=be is not recommended. Please use native endianness."
3078 # define XXH_VSX_BE 1
3079 # else
3080 # define XXH_VSX_BE 0
3081 # endif
3082 # endif
3083
3084 # if XXH_VSX_BE
3085 # if defined(__POWER9_VECTOR__) || (defined(__clang__) && defined(__s390x__))
3086 # define XXH_vec_revb vec_revb
3087 # else
3088
3089
3090
3091 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_revb(xxh_u64x2 val)
3092 {
3093 xxh_u8x16 const vByteSwap = { 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01, 0x00,
3094 0x0F, 0x0E, 0x0D, 0x0C, 0x0B, 0x0A, 0x09, 0x08 };
3095 return vec_perm(val, val, vByteSwap);
3096 }
3097 # endif
3098 # endif
3099
3100
3101
3102
3103 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_loadu(const void *ptr)
3104 {
3105 xxh_u64x2 ret;
3106 XXH_memcpy(&ret, ptr, sizeof(xxh_u64x2));
3107 # if XXH_VSX_BE
3108 ret = XXH_vec_revb(ret);
3109 # endif
3110 return ret;
3111 }
3112
3113
3114
3115
3116
3117
3118
3119 # if defined(__s390x__)
3120
3121 # define XXH_vec_mulo vec_mulo
3122 # define XXH_vec_mule vec_mule
3123 # elif defined(__clang__) && XXH_HAS_BUILTIN(__builtin_altivec_vmuleuw)
3124
3125 # define XXH_vec_mulo __builtin_altivec_vmulouw
3126 # define XXH_vec_mule __builtin_altivec_vmuleuw
3127 # else
3128
3129
3130 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mulo(xxh_u32x4 a, xxh_u32x4 b)
3131 {
3132 xxh_u64x2 result;
3133 __asm__("vmulouw %0, %1, %2" : "=v" (result) : "v" (a), "v" (b));
3134 return result;
3135 }
3136 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mule(xxh_u32x4 a, xxh_u32x4 b)
3137 {
3138 xxh_u64x2 result;
3139 __asm__("vmuleuw %0, %1, %2" : "=v" (result) : "v" (a), "v" (b));
3140 return result;
3141 }
3142 # endif
3143 #endif
3144
3145
3146
3147
3148 #if defined(XXH_NO_PREFETCH)
3149 # define XXH_PREFETCH(ptr) (void)(ptr)
3150 #else
3151 # if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_IX86))
3152 # include <mmintrin.h> /* https://msdn.microsoft.com/fr-fr/library/84szxsww(v=vs.90).aspx */
3153 # define XXH_PREFETCH(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T0)
3154 # elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) )
3155 # define XXH_PREFETCH(ptr) __builtin_prefetch((ptr), 0 , 3 )
3156 # else
3157 # define XXH_PREFETCH(ptr) (void)(ptr)
3158 # endif
3159 #endif
3160
3161
3162
3163
3164
3165
3166 #define XXH_SECRET_DEFAULT_SIZE 192
3167
3168 #if (XXH_SECRET_DEFAULT_SIZE < XXH3_SECRET_SIZE_MIN)
3169 # error "default keyset is not large enough"
3170 #endif
3171
3172
3173 XXH_ALIGN(64) static const xxh_u8 XXH3_kSecret[XXH_SECRET_DEFAULT_SIZE] = {
3174 0xb8, 0xfe, 0x6c, 0x39, 0x23, 0xa4, 0x4b, 0xbe, 0x7c, 0x01, 0x81, 0x2c, 0xf7, 0x21, 0xad, 0x1c,
3175 0xde, 0xd4, 0x6d, 0xe9, 0x83, 0x90, 0x97, 0xdb, 0x72, 0x40, 0xa4, 0xa4, 0xb7, 0xb3, 0x67, 0x1f,
3176 0xcb, 0x79, 0xe6, 0x4e, 0xcc, 0xc0, 0xe5, 0x78, 0x82, 0x5a, 0xd0, 0x7d, 0xcc, 0xff, 0x72, 0x21,
3177 0xb8, 0x08, 0x46, 0x74, 0xf7, 0x43, 0x24, 0x8e, 0xe0, 0x35, 0x90, 0xe6, 0x81, 0x3a, 0x26, 0x4c,
3178 0x3c, 0x28, 0x52, 0xbb, 0x91, 0xc3, 0x00, 0xcb, 0x88, 0xd0, 0x65, 0x8b, 0x1b, 0x53, 0x2e, 0xa3,
3179 0x71, 0x64, 0x48, 0x97, 0xa2, 0x0d, 0xf9, 0x4e, 0x38, 0x19, 0xef, 0x46, 0xa9, 0xde, 0xac, 0xd8,
3180 0xa8, 0xfa, 0x76, 0x3f, 0xe3, 0x9c, 0x34, 0x3f, 0xf9, 0xdc, 0xbb, 0xc7, 0xc7, 0x0b, 0x4f, 0x1d,
3181 0x8a, 0x51, 0xe0, 0x4b, 0xcd, 0xb4, 0x59, 0x31, 0xc8, 0x9f, 0x7e, 0xc9, 0xd9, 0x78, 0x73, 0x64,
3182 0xea, 0xc5, 0xac, 0x83, 0x34, 0xd3, 0xeb, 0xc3, 0xc5, 0x81, 0xa0, 0xff, 0xfa, 0x13, 0x63, 0xeb,
3183 0x17, 0x0d, 0xdd, 0x51, 0xb7, 0xf0, 0xda, 0x49, 0xd3, 0x16, 0x55, 0x26, 0x29, 0xd4, 0x68, 0x9e,
3184 0x2b, 0x16, 0xbe, 0x58, 0x7d, 0x47, 0xa1, 0xfc, 0x8f, 0xf8, 0xb8, 0xd1, 0x7a, 0xd0, 0x31, 0xce,
3185 0x45, 0xcb, 0x3a, 0x8f, 0x95, 0x16, 0x04, 0x28, 0xaf, 0xd7, 0xfb, 0xca, 0xbb, 0x4b, 0x40, 0x7e,
3186 };
3187
3188
3189 #ifdef XXH_OLD_NAMES
3190 # define kSecret XXH3_kSecret
3191 #endif
3192
3193 #ifdef XXH_DOXYGEN
3194
3195
3196
3197
3198
3199
3200
3201
3202
3203
3204
3205
3206
3207
3208
3209
3210 XXH_FORCE_INLINE xxh_u64
3211 XXH_mult32to64(xxh_u64 x, xxh_u64 y)
3212 {
3213 return (x & 0xFFFFFFFF) * (y & 0xFFFFFFFF);
3214 }
3215 #elif defined(_MSC_VER) && defined(_M_IX86)
3216 # include <intrin.h>
3217 # define XXH_mult32to64(x, y) __emulu((unsigned)(x), (unsigned)(y))
3218 #else
3219
3220
3221
3222
3223
3224
3225
3226 # define XXH_mult32to64(x, y) ((xxh_u64)(xxh_u32)(x) * (xxh_u64)(xxh_u32)(y))
3227 #endif
3228
3229
3230
3231
3232
3233
3234
3235
3236
3237
3238 static XXH128_hash_t
3239 XXH_mult64to128(xxh_u64 lhs, xxh_u64 rhs)
3240 {
3241
3242
3243
3244
3245
3246
3247
3248
3249
3250
3251
3252
3253
3254
3255
3256 #if defined(__GNUC__) && !defined(__wasm__) \
3257 && defined(__SIZEOF_INT128__) \
3258 || (defined(_INTEGRAL_MAX_BITS) && _INTEGRAL_MAX_BITS >= 128)
3259
3260 __uint128_t const product = (__uint128_t)lhs * (__uint128_t)rhs;
3261 XXH128_hash_t r128;
3262 r128.low64 = (xxh_u64)(product);
3263 r128.high64 = (xxh_u64)(product >> 64);
3264 return r128;
3265
3266
3267
3268
3269
3270
3271
3272
3273 #elif defined(_M_X64) || defined(_M_IA64)
3274
3275 #ifndef _MSC_VER
3276 # pragma intrinsic(_umul128)
3277 #endif
3278 xxh_u64 product_high;
3279 xxh_u64 const product_low = _umul128(lhs, rhs, &product_high);
3280 XXH128_hash_t r128;
3281 r128.low64 = product_low;
3282 r128.high64 = product_high;
3283 return r128;
3284
3285
3286
3287
3288
3289
3290 #elif defined(_M_ARM64)
3291
3292 #ifndef _MSC_VER
3293 # pragma intrinsic(__umulh)
3294 #endif
3295 XXH128_hash_t r128;
3296 r128.low64 = lhs * rhs;
3297 r128.high64 = __umulh(lhs, rhs);
3298 return r128;
3299
3300 #else
3301
3302
3303
3304
3305
3306
3307
3308
3309
3310
3311
3312
3313
3314
3315
3316
3317
3318
3319
3320
3321
3322
3323
3324
3325
3326
3327
3328
3329
3330
3331
3332
3333
3334
3335
3336
3337
3338
3339
3340
3341
3342
3343
3344
3345 xxh_u64 const lo_lo = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs & 0xFFFFFFFF);
3346 xxh_u64 const hi_lo = XXH_mult32to64(lhs >> 32, rhs & 0xFFFFFFFF);
3347 xxh_u64 const lo_hi = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs >> 32);
3348 xxh_u64 const hi_hi = XXH_mult32to64(lhs >> 32, rhs >> 32);
3349
3350
3351 xxh_u64 const cross = (lo_lo >> 32) + (hi_lo & 0xFFFFFFFF) + lo_hi;
3352 xxh_u64 const upper = (hi_lo >> 32) + (cross >> 32) + hi_hi;
3353 xxh_u64 const lower = (cross << 32) | (lo_lo & 0xFFFFFFFF);
3354
3355 XXH128_hash_t r128;
3356 r128.low64 = lower;
3357 r128.high64 = upper;
3358 return r128;
3359 #endif
3360 }
3361
3362
3363
3364
3365
3366
3367
3368
3369
3370
3371
3372 static xxh_u64
3373 XXH3_mul128_fold64(xxh_u64 lhs, xxh_u64 rhs)
3374 {
3375 XXH128_hash_t product = XXH_mult64to128(lhs, rhs);
3376 return product.low64 ^ product.high64;
3377 }
3378
3379
3380 XXH_FORCE_INLINE xxh_u64 XXH_xorshift64(xxh_u64 v64, int shift)
3381 {
3382 XXH_ASSERT(0 <= shift && shift < 64);
3383 return v64 ^ (v64 >> shift);
3384 }
3385
3386
3387
3388
3389
3390 static XXH64_hash_t XXH3_avalanche(xxh_u64 h64)
3391 {
3392 h64 = XXH_xorshift64(h64, 37);
3393 h64 *= 0x165667919E3779F9ULL;
3394 h64 = XXH_xorshift64(h64, 32);
3395 return h64;
3396 }
3397
3398
3399
3400
3401
3402
3403 static XXH64_hash_t XXH3_rrmxmx(xxh_u64 h64, xxh_u64 len)
3404 {
3405
3406 h64 ^= XXH_rotl64(h64, 49) ^ XXH_rotl64(h64, 24);
3407 h64 *= 0x9FB21C651E98DF25ULL;
3408 h64 ^= (h64 >> 35) + len ;
3409 h64 *= 0x9FB21C651E98DF25ULL;
3410 return XXH_xorshift64(h64, 28);
3411 }
3412
3413
3414
3415
3416
3417
3418
3419
3420
3421
3422
3423
3424
3425
3426
3427
3428
3429
3430
3431
3432
3433
3434
3435
3436
3437
3438
3439
3440
3441
3442
3443
3444
3445
3446
3447 XXH_FORCE_INLINE XXH64_hash_t
3448 XXH3_len_1to3_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
3449 {
3450 XXH_ASSERT(input != NULL);
3451 XXH_ASSERT(1 <= len && len <= 3);
3452 XXH_ASSERT(secret != NULL);
3453
3454
3455
3456
3457
3458 { xxh_u8 const c1 = input[0];
3459 xxh_u8 const c2 = input[len >> 1];
3460 xxh_u8 const c3 = input[len - 1];
3461 xxh_u32 const combined = ((xxh_u32)c1 << 16) | ((xxh_u32)c2 << 24)
3462 | ((xxh_u32)c3 << 0) | ((xxh_u32)len << 8);
3463 xxh_u64 const bitflip = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
3464 xxh_u64 const keyed = (xxh_u64)combined ^ bitflip;
3465 return XXH64_avalanche(keyed);
3466 }
3467 }
3468
3469 XXH_FORCE_INLINE XXH64_hash_t
3470 XXH3_len_4to8_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
3471 {
3472 XXH_ASSERT(input != NULL);
3473 XXH_ASSERT(secret != NULL);
3474 XXH_ASSERT(4 <= len && len <= 8);
3475 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
3476 { xxh_u32 const input1 = XXH_readLE32(input);
3477 xxh_u32 const input2 = XXH_readLE32(input + len - 4);
3478 xxh_u64 const bitflip = (XXH_readLE64(secret+8) ^ XXH_readLE64(secret+16)) - seed;
3479 xxh_u64 const input64 = input2 + (((xxh_u64)input1) << 32);
3480 xxh_u64 const keyed = input64 ^ bitflip;
3481 return XXH3_rrmxmx(keyed, len);
3482 }
3483 }
3484
3485 XXH_FORCE_INLINE XXH64_hash_t
3486 XXH3_len_9to16_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
3487 {
3488 XXH_ASSERT(input != NULL);
3489 XXH_ASSERT(secret != NULL);
3490 XXH_ASSERT(9 <= len && len <= 16);
3491 { xxh_u64 const bitflip1 = (XXH_readLE64(secret+24) ^ XXH_readLE64(secret+32)) + seed;
3492 xxh_u64 const bitflip2 = (XXH_readLE64(secret+40) ^ XXH_readLE64(secret+48)) - seed;
3493 xxh_u64 const input_lo = XXH_readLE64(input) ^ bitflip1;
3494 xxh_u64 const input_hi = XXH_readLE64(input + len - 8) ^ bitflip2;
3495 xxh_u64 const acc = len
3496 + XXH_swap64(input_lo) + input_hi
3497 + XXH3_mul128_fold64(input_lo, input_hi);
3498 return XXH3_avalanche(acc);
3499 }
3500 }
3501
3502 XXH_FORCE_INLINE XXH64_hash_t
3503 XXH3_len_0to16_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
3504 {
3505 XXH_ASSERT(len <= 16);
3506 { if (XXH_likely(len > 8)) return XXH3_len_9to16_64b(input, len, secret, seed);
3507 if (XXH_likely(len >= 4)) return XXH3_len_4to8_64b(input, len, secret, seed);
3508 if (len) return XXH3_len_1to3_64b(input, len, secret, seed);
3509 return XXH64_avalanche(seed ^ (XXH_readLE64(secret+56) ^ XXH_readLE64(secret+64)));
3510 }
3511 }
3512
3513
3514
3515
3516
3517
3518
3519
3520
3521
3522
3523
3524
3525
3526
3527
3528
3529
3530
3531
3532
3533
3534
3535
3536
3537
3538
3539 XXH_FORCE_INLINE xxh_u64 XXH3_mix16B(const xxh_u8* XXH_RESTRICT input,
3540 const xxh_u8* XXH_RESTRICT secret, xxh_u64 seed64)
3541 {
3542 #if defined(__GNUC__) && !defined(__clang__) \
3543 && defined(__i386__) && defined(__SSE2__) \
3544 && !defined(XXH_ENABLE_AUTOVECTORIZE)
3545
3546
3547
3548
3549
3550
3551
3552
3553
3554
3555
3556
3557
3558
3559
3560 XXH_COMPILER_GUARD(seed64);
3561 #endif
3562 { xxh_u64 const input_lo = XXH_readLE64(input);
3563 xxh_u64 const input_hi = XXH_readLE64(input+8);
3564 return XXH3_mul128_fold64(
3565 input_lo ^ (XXH_readLE64(secret) + seed64),
3566 input_hi ^ (XXH_readLE64(secret+8) - seed64)
3567 );
3568 }
3569 }
3570
3571
3572 XXH_FORCE_INLINE XXH64_hash_t
3573 XXH3_len_17to128_64b(const xxh_u8* XXH_RESTRICT input, size_t len,
3574 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
3575 XXH64_hash_t seed)
3576 {
3577 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
3578 XXH_ASSERT(16 < len && len <= 128);
3579
3580 { xxh_u64 acc = len * XXH_PRIME64_1;
3581 if (len > 32) {
3582 if (len > 64) {
3583 if (len > 96) {
3584 acc += XXH3_mix16B(input+48, secret+96, seed);
3585 acc += XXH3_mix16B(input+len-64, secret+112, seed);
3586 }
3587 acc += XXH3_mix16B(input+32, secret+64, seed);
3588 acc += XXH3_mix16B(input+len-48, secret+80, seed);
3589 }
3590 acc += XXH3_mix16B(input+16, secret+32, seed);
3591 acc += XXH3_mix16B(input+len-32, secret+48, seed);
3592 }
3593 acc += XXH3_mix16B(input+0, secret+0, seed);
3594 acc += XXH3_mix16B(input+len-16, secret+16, seed);
3595
3596 return XXH3_avalanche(acc);
3597 }
3598 }
3599
3600 #define XXH3_MIDSIZE_MAX 240
3601
3602 XXH_NO_INLINE XXH64_hash_t
3603 XXH3_len_129to240_64b(const xxh_u8* XXH_RESTRICT input, size_t len,
3604 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
3605 XXH64_hash_t seed)
3606 {
3607 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
3608 XXH_ASSERT(128 < len && len <= XXH3_MIDSIZE_MAX);
3609
3610 #define XXH3_MIDSIZE_STARTOFFSET 3
3611 #define XXH3_MIDSIZE_LASTOFFSET 17
3612
3613 { xxh_u64 acc = len * XXH_PRIME64_1;
3614 int const nbRounds = (int)len / 16;
3615 int i;
3616 for (i=0; i<8; i++) {
3617 acc += XXH3_mix16B(input+(16*i), secret+(16*i), seed);
3618 }
3619 acc = XXH3_avalanche(acc);
3620 XXH_ASSERT(nbRounds >= 8);
3621 #if defined(__clang__) \
3622 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
3623 && !defined(XXH_ENABLE_AUTOVECTORIZE)
3624
3625
3626
3627
3628
3629
3630
3631
3632
3633
3634
3635
3636
3637
3638
3639
3640
3641
3642
3643
3644 #pragma clang loop vectorize(disable)
3645 #endif
3646 for (i=8 ; i < nbRounds; i++) {
3647 acc += XXH3_mix16B(input+(16*i), secret+(16*(i-8)) + XXH3_MIDSIZE_STARTOFFSET, seed);
3648 }
3649
3650 acc += XXH3_mix16B(input + len - 16, secret + XXH3_SECRET_SIZE_MIN - XXH3_MIDSIZE_LASTOFFSET, seed);
3651 return XXH3_avalanche(acc);
3652 }
3653 }
3654
3655
3656
3657
3658 #define XXH_STRIPE_LEN 64
3659 #define XXH_SECRET_CONSUME_RATE 8
3660 #define XXH_ACC_NB (XXH_STRIPE_LEN / sizeof(xxh_u64))
3661
3662 #ifdef XXH_OLD_NAMES
3663 # define STRIPE_LEN XXH_STRIPE_LEN
3664 # define ACC_NB XXH_ACC_NB
3665 #endif
3666
3667 XXH_FORCE_INLINE void XXH_writeLE64(void* dst, xxh_u64 v64)
3668 {
3669 if (!XXH_CPU_LITTLE_ENDIAN) v64 = XXH_swap64(v64);
3670 XXH_memcpy(dst, &v64, sizeof(v64));
3671 }
3672
3673
3674
3675
3676
3677
3678 #if !defined (__VMS) \
3679 && (defined (__cplusplus) \
3680 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
3681 typedef int64_t xxh_i64;
3682 #else
3683
3684 typedef long long xxh_i64;
3685 #endif
3686
3687
3688
3689
3690
3691
3692
3693
3694
3695
3696
3697
3698
3699
3700
3701
3702
3703
3704
3705
3706
3707
3708
3709
3710 #if (XXH_VECTOR == XXH_AVX512) \
3711 || (defined(XXH_DISPATCH_AVX512) && XXH_DISPATCH_AVX512 != 0)
3712
3713 #ifndef XXH_TARGET_AVX512
3714 # define XXH_TARGET_AVX512
3715 #endif
3716
3717 XXH_FORCE_INLINE XXH_TARGET_AVX512 void
3718 XXH3_accumulate_512_avx512(void* XXH_RESTRICT acc,
3719 const void* XXH_RESTRICT input,
3720 const void* XXH_RESTRICT secret)
3721 {
3722 __m512i* const xacc = (__m512i *) acc;
3723 XXH_ASSERT((((size_t)acc) & 63) == 0);
3724 XXH_STATIC_ASSERT(XXH_STRIPE_LEN == sizeof(__m512i));
3725
3726 {
3727
3728 __m512i const data_vec = _mm512_loadu_si512 (input);
3729
3730 __m512i const key_vec = _mm512_loadu_si512 (secret);
3731
3732 __m512i const data_key = _mm512_xor_si512 (data_vec, key_vec);
3733
3734 __m512i const data_key_lo = _mm512_shuffle_epi32 (data_key, (_MM_PERM_ENUM)_MM_SHUFFLE(0, 3, 0, 1));
3735
3736 __m512i const product = _mm512_mul_epu32 (data_key, data_key_lo);
3737
3738 __m512i const data_swap = _mm512_shuffle_epi32(data_vec, (_MM_PERM_ENUM)_MM_SHUFFLE(1, 0, 3, 2));
3739 __m512i const sum = _mm512_add_epi64(*xacc, data_swap);
3740
3741 *xacc = _mm512_add_epi64(product, sum);
3742 }
3743 }
3744
3745
3746
3747
3748
3749
3750
3751
3752
3753
3754
3755
3756
3757
3758
3759
3760
3761
3762
3763
3764
3765
3766 XXH_FORCE_INLINE XXH_TARGET_AVX512 void
3767 XXH3_scrambleAcc_avx512(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
3768 {
3769 XXH_ASSERT((((size_t)acc) & 63) == 0);
3770 XXH_STATIC_ASSERT(XXH_STRIPE_LEN == sizeof(__m512i));
3771 { __m512i* const xacc = (__m512i*) acc;
3772 const __m512i prime32 = _mm512_set1_epi32((int)XXH_PRIME32_1);
3773
3774
3775 __m512i const acc_vec = *xacc;
3776 __m512i const shifted = _mm512_srli_epi64 (acc_vec, 47);
3777 __m512i const data_vec = _mm512_xor_si512 (acc_vec, shifted);
3778
3779 __m512i const key_vec = _mm512_loadu_si512 (secret);
3780 __m512i const data_key = _mm512_xor_si512 (data_vec, key_vec);
3781
3782
3783 __m512i const data_key_hi = _mm512_shuffle_epi32 (data_key, (_MM_PERM_ENUM)_MM_SHUFFLE(0, 3, 0, 1));
3784 __m512i const prod_lo = _mm512_mul_epu32 (data_key, prime32);
3785 __m512i const prod_hi = _mm512_mul_epu32 (data_key_hi, prime32);
3786 *xacc = _mm512_add_epi64(prod_lo, _mm512_slli_epi64(prod_hi, 32));
3787 }
3788 }
3789
3790 XXH_FORCE_INLINE XXH_TARGET_AVX512 void
3791 XXH3_initCustomSecret_avx512(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
3792 {
3793 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 63) == 0);
3794 XXH_STATIC_ASSERT(XXH_SEC_ALIGN == 64);
3795 XXH_ASSERT(((size_t)customSecret & 63) == 0);
3796 (void)(&XXH_writeLE64);
3797 { int const nbRounds = XXH_SECRET_DEFAULT_SIZE / sizeof(__m512i);
3798 __m512i const seed = _mm512_mask_set1_epi64(_mm512_set1_epi64((xxh_i64)seed64), 0xAA, (xxh_i64)(0U - seed64));
3799
3800 const __m512i* const src = (const __m512i*) ((const void*) XXH3_kSecret);
3801 __m512i* const dest = ( __m512i*) customSecret;
3802 int i;
3803 XXH_ASSERT(((size_t)src & 63) == 0);
3804 XXH_ASSERT(((size_t)dest & 63) == 0);
3805 for (i=0; i < nbRounds; ++i) {
3806
3807
3808 union {
3809 const __m512i* cp;
3810 void* p;
3811 } remote_const_void;
3812 remote_const_void.cp = src + i;
3813 dest[i] = _mm512_add_epi64(_mm512_stream_load_si512(remote_const_void.p), seed);
3814 } }
3815 }
3816
3817 #endif
3818
3819 #if (XXH_VECTOR == XXH_AVX2) \
3820 || (defined(XXH_DISPATCH_AVX2) && XXH_DISPATCH_AVX2 != 0)
3821
3822 #ifndef XXH_TARGET_AVX2
3823 # define XXH_TARGET_AVX2
3824 #endif
3825
3826 XXH_FORCE_INLINE XXH_TARGET_AVX2 void
3827 XXH3_accumulate_512_avx2( void* XXH_RESTRICT acc,
3828 const void* XXH_RESTRICT input,
3829 const void* XXH_RESTRICT secret)
3830 {
3831 XXH_ASSERT((((size_t)acc) & 31) == 0);
3832 { __m256i* const xacc = (__m256i *) acc;
3833
3834
3835 const __m256i* const xinput = (const __m256i *) input;
3836
3837
3838 const __m256i* const xsecret = (const __m256i *) secret;
3839
3840 size_t i;
3841 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m256i); i++) {
3842
3843 __m256i const data_vec = _mm256_loadu_si256 (xinput+i);
3844
3845 __m256i const key_vec = _mm256_loadu_si256 (xsecret+i);
3846
3847 __m256i const data_key = _mm256_xor_si256 (data_vec, key_vec);
3848
3849 __m256i const data_key_lo = _mm256_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
3850
3851 __m256i const product = _mm256_mul_epu32 (data_key, data_key_lo);
3852
3853 __m256i const data_swap = _mm256_shuffle_epi32(data_vec, _MM_SHUFFLE(1, 0, 3, 2));
3854 __m256i const sum = _mm256_add_epi64(xacc[i], data_swap);
3855
3856 xacc[i] = _mm256_add_epi64(product, sum);
3857 } }
3858 }
3859
3860 XXH_FORCE_INLINE XXH_TARGET_AVX2 void
3861 XXH3_scrambleAcc_avx2(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
3862 {
3863 XXH_ASSERT((((size_t)acc) & 31) == 0);
3864 { __m256i* const xacc = (__m256i*) acc;
3865
3866
3867 const __m256i* const xsecret = (const __m256i *) secret;
3868 const __m256i prime32 = _mm256_set1_epi32((int)XXH_PRIME32_1);
3869
3870 size_t i;
3871 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m256i); i++) {
3872
3873 __m256i const acc_vec = xacc[i];
3874 __m256i const shifted = _mm256_srli_epi64 (acc_vec, 47);
3875 __m256i const data_vec = _mm256_xor_si256 (acc_vec, shifted);
3876
3877 __m256i const key_vec = _mm256_loadu_si256 (xsecret+i);
3878 __m256i const data_key = _mm256_xor_si256 (data_vec, key_vec);
3879
3880
3881 __m256i const data_key_hi = _mm256_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
3882 __m256i const prod_lo = _mm256_mul_epu32 (data_key, prime32);
3883 __m256i const prod_hi = _mm256_mul_epu32 (data_key_hi, prime32);
3884 xacc[i] = _mm256_add_epi64(prod_lo, _mm256_slli_epi64(prod_hi, 32));
3885 }
3886 }
3887 }
3888
3889 XXH_FORCE_INLINE XXH_TARGET_AVX2 void XXH3_initCustomSecret_avx2(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
3890 {
3891 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 31) == 0);
3892 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE / sizeof(__m256i)) == 6);
3893 XXH_STATIC_ASSERT(XXH_SEC_ALIGN <= 64);
3894 (void)(&XXH_writeLE64);
3895 XXH_PREFETCH(customSecret);
3896 { __m256i const seed = _mm256_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64, (xxh_i64)(0U - seed64), (xxh_i64)seed64);
3897
3898 const __m256i* const src = (const __m256i*) ((const void*) XXH3_kSecret);
3899 __m256i* dest = ( __m256i*) customSecret;
3900
3901 # if defined(__GNUC__) || defined(__clang__)
3902
3903
3904
3905
3906
3907 XXH_COMPILER_GUARD(dest);
3908 # endif
3909 XXH_ASSERT(((size_t)src & 31) == 0);
3910 XXH_ASSERT(((size_t)dest & 31) == 0);
3911
3912
3913 dest[0] = _mm256_add_epi64(_mm256_stream_load_si256(src+0), seed);
3914 dest[1] = _mm256_add_epi64(_mm256_stream_load_si256(src+1), seed);
3915 dest[2] = _mm256_add_epi64(_mm256_stream_load_si256(src+2), seed);
3916 dest[3] = _mm256_add_epi64(_mm256_stream_load_si256(src+3), seed);
3917 dest[4] = _mm256_add_epi64(_mm256_stream_load_si256(src+4), seed);
3918 dest[5] = _mm256_add_epi64(_mm256_stream_load_si256(src+5), seed);
3919 }
3920 }
3921
3922 #endif
3923
3924
3925 #if (XXH_VECTOR == XXH_SSE2) || defined(XXH_X86DISPATCH)
3926
3927 #ifndef XXH_TARGET_SSE2
3928 # define XXH_TARGET_SSE2
3929 #endif
3930
3931 XXH_FORCE_INLINE XXH_TARGET_SSE2 void
3932 XXH3_accumulate_512_sse2( void* XXH_RESTRICT acc,
3933 const void* XXH_RESTRICT input,
3934 const void* XXH_RESTRICT secret)
3935 {
3936
3937 XXH_ASSERT((((size_t)acc) & 15) == 0);
3938 { __m128i* const xacc = (__m128i *) acc;
3939
3940
3941 const __m128i* const xinput = (const __m128i *) input;
3942
3943
3944 const __m128i* const xsecret = (const __m128i *) secret;
3945
3946 size_t i;
3947 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m128i); i++) {
3948
3949 __m128i const data_vec = _mm_loadu_si128 (xinput+i);
3950
3951 __m128i const key_vec = _mm_loadu_si128 (xsecret+i);
3952
3953 __m128i const data_key = _mm_xor_si128 (data_vec, key_vec);
3954
3955 __m128i const data_key_lo = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
3956
3957 __m128i const product = _mm_mul_epu32 (data_key, data_key_lo);
3958
3959 __m128i const data_swap = _mm_shuffle_epi32(data_vec, _MM_SHUFFLE(1,0,3,2));
3960 __m128i const sum = _mm_add_epi64(xacc[i], data_swap);
3961
3962 xacc[i] = _mm_add_epi64(product, sum);
3963 } }
3964 }
3965
3966 XXH_FORCE_INLINE XXH_TARGET_SSE2 void
3967 XXH3_scrambleAcc_sse2(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
3968 {
3969 XXH_ASSERT((((size_t)acc) & 15) == 0);
3970 { __m128i* const xacc = (__m128i*) acc;
3971
3972
3973 const __m128i* const xsecret = (const __m128i *) secret;
3974 const __m128i prime32 = _mm_set1_epi32((int)XXH_PRIME32_1);
3975
3976 size_t i;
3977 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m128i); i++) {
3978
3979 __m128i const acc_vec = xacc[i];
3980 __m128i const shifted = _mm_srli_epi64 (acc_vec, 47);
3981 __m128i const data_vec = _mm_xor_si128 (acc_vec, shifted);
3982
3983 __m128i const key_vec = _mm_loadu_si128 (xsecret+i);
3984 __m128i const data_key = _mm_xor_si128 (data_vec, key_vec);
3985
3986
3987 __m128i const data_key_hi = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
3988 __m128i const prod_lo = _mm_mul_epu32 (data_key, prime32);
3989 __m128i const prod_hi = _mm_mul_epu32 (data_key_hi, prime32);
3990 xacc[i] = _mm_add_epi64(prod_lo, _mm_slli_epi64(prod_hi, 32));
3991 }
3992 }
3993 }
3994
3995 XXH_FORCE_INLINE XXH_TARGET_SSE2 void XXH3_initCustomSecret_sse2(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
3996 {
3997 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
3998 (void)(&XXH_writeLE64);
3999 { int const nbRounds = XXH_SECRET_DEFAULT_SIZE / sizeof(__m128i);
4000
4001 # if defined(_MSC_VER) && defined(_M_IX86) && _MSC_VER < 1900
4002
4003 XXH_ALIGN(16) const xxh_i64 seed64x2[2] = { (xxh_i64)seed64, (xxh_i64)(0U - seed64) };
4004 __m128i const seed = _mm_load_si128((__m128i const*)seed64x2);
4005 # else
4006 __m128i const seed = _mm_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64);
4007 # endif
4008 int i;
4009
4010 const void* const src16 = XXH3_kSecret;
4011 __m128i* dst16 = (__m128i*) customSecret;
4012 # if defined(__GNUC__) || defined(__clang__)
4013
4014
4015
4016
4017
4018 XXH_COMPILER_GUARD(dst16);
4019 # endif
4020 XXH_ASSERT(((size_t)src16 & 15) == 0);
4021 XXH_ASSERT(((size_t)dst16 & 15) == 0);
4022
4023 for (i=0; i < nbRounds; ++i) {
4024 dst16[i] = _mm_add_epi64(_mm_load_si128((const __m128i *)src16+i), seed);
4025 } }
4026 }
4027
4028 #endif
4029
4030 #if (XXH_VECTOR == XXH_NEON)
4031
4032 XXH_FORCE_INLINE void
4033 XXH3_accumulate_512_neon( void* XXH_RESTRICT acc,
4034 const void* XXH_RESTRICT input,
4035 const void* XXH_RESTRICT secret)
4036 {
4037 XXH_ASSERT((((size_t)acc) & 15) == 0);
4038 {
4039 uint64x2_t* const xacc = (uint64x2_t *) acc;
4040
4041 uint8_t const* const xinput = (const uint8_t *) input;
4042 uint8_t const* const xsecret = (const uint8_t *) secret;
4043
4044 size_t i;
4045 for (i=0; i < XXH_STRIPE_LEN / sizeof(uint64x2_t); i++) {
4046
4047 uint8x16_t data_vec = vld1q_u8(xinput + (i * 16));
4048
4049 uint8x16_t key_vec = vld1q_u8(xsecret + (i * 16));
4050 uint64x2_t data_key;
4051 uint32x2_t data_key_lo, data_key_hi;
4052
4053 uint64x2_t const data64 = vreinterpretq_u64_u8(data_vec);
4054 uint64x2_t const swapped = vextq_u64(data64, data64, 1);
4055 xacc[i] = vaddq_u64 (xacc[i], swapped);
4056
4057 data_key = vreinterpretq_u64_u8(veorq_u8(data_vec, key_vec));
4058
4059
4060
4061 XXH_SPLIT_IN_PLACE(data_key, data_key_lo, data_key_hi);
4062
4063 xacc[i] = vmlal_u32 (xacc[i], data_key_lo, data_key_hi);
4064
4065 }
4066 }
4067 }
4068
4069 XXH_FORCE_INLINE void
4070 XXH3_scrambleAcc_neon(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
4071 {
4072 XXH_ASSERT((((size_t)acc) & 15) == 0);
4073
4074 { uint64x2_t* xacc = (uint64x2_t*) acc;
4075 uint8_t const* xsecret = (uint8_t const*) secret;
4076 uint32x2_t prime = vdup_n_u32 (XXH_PRIME32_1);
4077
4078 size_t i;
4079 for (i=0; i < XXH_STRIPE_LEN/sizeof(uint64x2_t); i++) {
4080
4081 uint64x2_t acc_vec = xacc[i];
4082 uint64x2_t shifted = vshrq_n_u64 (acc_vec, 47);
4083 uint64x2_t data_vec = veorq_u64 (acc_vec, shifted);
4084
4085
4086 uint8x16_t key_vec = vld1q_u8 (xsecret + (i * 16));
4087 uint64x2_t data_key = veorq_u64 (data_vec, vreinterpretq_u64_u8(key_vec));
4088
4089
4090 uint32x2_t data_key_lo, data_key_hi;
4091
4092
4093
4094 XXH_SPLIT_IN_PLACE(data_key, data_key_lo, data_key_hi);
4095 {
4096
4097
4098
4099
4100
4101
4102
4103
4104
4105
4106
4107
4108
4109
4110
4111
4112
4113 uint64x2_t prod_hi = vmull_u32 (data_key_hi, prime);
4114
4115 xacc[i] = vshlq_n_u64(prod_hi, 32);
4116
4117 xacc[i] = vmlal_u32(xacc[i], data_key_lo, prime);
4118 }
4119 } }
4120 }
4121
4122 #endif
4123
4124 #if (XXH_VECTOR == XXH_VSX)
4125
4126 XXH_FORCE_INLINE void
4127 XXH3_accumulate_512_vsx( void* XXH_RESTRICT acc,
4128 const void* XXH_RESTRICT input,
4129 const void* XXH_RESTRICT secret)
4130 {
4131
4132 unsigned long long* const xacc = (unsigned long long*) acc;
4133 xxh_u64x2 const* const xinput = (xxh_u64x2 const*) input;
4134 xxh_u64x2 const* const xsecret = (xxh_u64x2 const*) secret;
4135 xxh_u64x2 const v32 = { 32, 32 };
4136 size_t i;
4137 for (i = 0; i < XXH_STRIPE_LEN / sizeof(xxh_u64x2); i++) {
4138
4139 xxh_u64x2 const data_vec = XXH_vec_loadu(xinput + i);
4140
4141 xxh_u64x2 const key_vec = XXH_vec_loadu(xsecret + i);
4142 xxh_u64x2 const data_key = data_vec ^ key_vec;
4143
4144 xxh_u32x4 const shuffled = (xxh_u32x4)vec_rl(data_key, v32);
4145
4146 xxh_u64x2 const product = XXH_vec_mulo((xxh_u32x4)data_key, shuffled);
4147
4148 xxh_u64x2 acc_vec = vec_xl(0, xacc + 2 * i);
4149 acc_vec += product;
4150
4151
4152 #ifdef __s390x__
4153 acc_vec += vec_permi(data_vec, data_vec, 2);
4154 #else
4155 acc_vec += vec_xxpermdi(data_vec, data_vec, 2);
4156 #endif
4157
4158 vec_xst(acc_vec, 0, xacc + 2 * i);
4159 }
4160 }
4161
4162 XXH_FORCE_INLINE void
4163 XXH3_scrambleAcc_vsx(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
4164 {
4165 XXH_ASSERT((((size_t)acc) & 15) == 0);
4166
4167 { xxh_u64x2* const xacc = (xxh_u64x2*) acc;
4168 const xxh_u64x2* const xsecret = (const xxh_u64x2*) secret;
4169
4170 xxh_u64x2 const v32 = { 32, 32 };
4171 xxh_u64x2 const v47 = { 47, 47 };
4172 xxh_u32x4 const prime = { XXH_PRIME32_1, XXH_PRIME32_1, XXH_PRIME32_1, XXH_PRIME32_1 };
4173 size_t i;
4174 for (i = 0; i < XXH_STRIPE_LEN / sizeof(xxh_u64x2); i++) {
4175
4176 xxh_u64x2 const acc_vec = xacc[i];
4177 xxh_u64x2 const data_vec = acc_vec ^ (acc_vec >> v47);
4178
4179
4180 xxh_u64x2 const key_vec = XXH_vec_loadu(xsecret + i);
4181 xxh_u64x2 const data_key = data_vec ^ key_vec;
4182
4183
4184
4185 xxh_u64x2 const prod_even = XXH_vec_mule((xxh_u32x4)data_key, prime);
4186
4187 xxh_u64x2 const prod_odd = XXH_vec_mulo((xxh_u32x4)data_key, prime);
4188 xacc[i] = prod_odd + (prod_even << v32);
4189 } }
4190 }
4191
4192 #endif
4193
4194
4195
4196 XXH_FORCE_INLINE void
4197 XXH3_accumulate_512_scalar(void* XXH_RESTRICT acc,
4198 const void* XXH_RESTRICT input,
4199 const void* XXH_RESTRICT secret)
4200 {
4201 xxh_u64* const xacc = (xxh_u64*) acc;
4202 const xxh_u8* const xinput = (const xxh_u8*) input;
4203 const xxh_u8* const xsecret = (const xxh_u8*) secret;
4204 size_t i;
4205 XXH_ASSERT(((size_t)acc & (XXH_ACC_ALIGN-1)) == 0);
4206 for (i=0; i < XXH_ACC_NB; i++) {
4207 xxh_u64 const data_val = XXH_readLE64(xinput + 8*i);
4208 xxh_u64 const data_key = data_val ^ XXH_readLE64(xsecret + i*8);
4209 xacc[i ^ 1] += data_val;
4210 xacc[i] += XXH_mult32to64(data_key & 0xFFFFFFFF, data_key >> 32);
4211 }
4212 }
4213
4214 XXH_FORCE_INLINE void
4215 XXH3_scrambleAcc_scalar(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
4216 {
4217 xxh_u64* const xacc = (xxh_u64*) acc;
4218 const xxh_u8* const xsecret = (const xxh_u8*) secret;
4219 size_t i;
4220 XXH_ASSERT((((size_t)acc) & (XXH_ACC_ALIGN-1)) == 0);
4221 for (i=0; i < XXH_ACC_NB; i++) {
4222 xxh_u64 const key64 = XXH_readLE64(xsecret + 8*i);
4223 xxh_u64 acc64 = xacc[i];
4224 acc64 = XXH_xorshift64(acc64, 47);
4225 acc64 ^= key64;
4226 acc64 *= XXH_PRIME32_1;
4227 xacc[i] = acc64;
4228 }
4229 }
4230
4231 XXH_FORCE_INLINE void
4232 XXH3_initCustomSecret_scalar(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
4233 {
4234
4235
4236
4237
4238
4239 const xxh_u8* kSecretPtr = XXH3_kSecret;
4240 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
4241
4242 #if defined(__clang__) && defined(__aarch64__)
4243
4244
4245
4246
4247
4248
4249
4250
4251
4252
4253
4254
4255
4256
4257
4258
4259
4260
4261
4262
4263
4264
4265
4266
4267
4268
4269
4270
4271 XXH_COMPILER_GUARD(kSecretPtr);
4272 #endif
4273
4274
4275
4276
4277 XXH_ASSERT(kSecretPtr == XXH3_kSecret);
4278
4279 { int const nbRounds = XXH_SECRET_DEFAULT_SIZE / 16;
4280 int i;
4281 for (i=0; i < nbRounds; i++) {
4282
4283
4284
4285
4286
4287
4288 xxh_u64 lo = XXH_readLE64(kSecretPtr + 16*i) + seed64;
4289 xxh_u64 hi = XXH_readLE64(kSecretPtr + 16*i + 8) - seed64;
4290 XXH_writeLE64((xxh_u8*)customSecret + 16*i, lo);
4291 XXH_writeLE64((xxh_u8*)customSecret + 16*i + 8, hi);
4292 } }
4293 }
4294
4295
4296 typedef void (*XXH3_f_accumulate_512)(void* XXH_RESTRICT, const void*, const void*);
4297 typedef void (*XXH3_f_scrambleAcc)(void* XXH_RESTRICT, const void*);
4298 typedef void (*XXH3_f_initCustomSecret)(void* XXH_RESTRICT, xxh_u64);
4299
4300
4301 #if (XXH_VECTOR == XXH_AVX512)
4302
4303 #define XXH3_accumulate_512 XXH3_accumulate_512_avx512
4304 #define XXH3_scrambleAcc XXH3_scrambleAcc_avx512
4305 #define XXH3_initCustomSecret XXH3_initCustomSecret_avx512
4306
4307 #elif (XXH_VECTOR == XXH_AVX2)
4308
4309 #define XXH3_accumulate_512 XXH3_accumulate_512_avx2
4310 #define XXH3_scrambleAcc XXH3_scrambleAcc_avx2
4311 #define XXH3_initCustomSecret XXH3_initCustomSecret_avx2
4312
4313 #elif (XXH_VECTOR == XXH_SSE2)
4314
4315 #define XXH3_accumulate_512 XXH3_accumulate_512_sse2
4316 #define XXH3_scrambleAcc XXH3_scrambleAcc_sse2
4317 #define XXH3_initCustomSecret XXH3_initCustomSecret_sse2
4318
4319 #elif (XXH_VECTOR == XXH_NEON)
4320
4321 #define XXH3_accumulate_512 XXH3_accumulate_512_neon
4322 #define XXH3_scrambleAcc XXH3_scrambleAcc_neon
4323 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
4324
4325 #elif (XXH_VECTOR == XXH_VSX)
4326
4327 #define XXH3_accumulate_512 XXH3_accumulate_512_vsx
4328 #define XXH3_scrambleAcc XXH3_scrambleAcc_vsx
4329 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
4330
4331 #else
4332
4333 #define XXH3_accumulate_512 XXH3_accumulate_512_scalar
4334 #define XXH3_scrambleAcc XXH3_scrambleAcc_scalar
4335 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
4336
4337 #endif
4338
4339
4340
4341 #ifndef XXH_PREFETCH_DIST
4342 # ifdef __clang__
4343 # define XXH_PREFETCH_DIST 320
4344 # else
4345 # if (XXH_VECTOR == XXH_AVX512)
4346 # define XXH_PREFETCH_DIST 512
4347 # else
4348 # define XXH_PREFETCH_DIST 384
4349 # endif
4350 # endif
4351 #endif
4352
4353
4354
4355
4356
4357
4358 XXH_FORCE_INLINE void
4359 XXH3_accumulate( xxh_u64* XXH_RESTRICT acc,
4360 const xxh_u8* XXH_RESTRICT input,
4361 const xxh_u8* XXH_RESTRICT secret,
4362 size_t nbStripes,
4363 XXH3_f_accumulate_512 f_acc512)
4364 {
4365 size_t n;
4366 for (n = 0; n < nbStripes; n++ ) {
4367 const xxh_u8* const in = input + n*XXH_STRIPE_LEN;
4368 XXH_PREFETCH(in + XXH_PREFETCH_DIST);
4369 f_acc512(acc,
4370 in,
4371 secret + n*XXH_SECRET_CONSUME_RATE);
4372 }
4373 }
4374
4375 XXH_FORCE_INLINE void
4376 XXH3_hashLong_internal_loop(xxh_u64* XXH_RESTRICT acc,
4377 const xxh_u8* XXH_RESTRICT input, size_t len,
4378 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
4379 XXH3_f_accumulate_512 f_acc512,
4380 XXH3_f_scrambleAcc f_scramble)
4381 {
4382 size_t const nbStripesPerBlock = (secretSize - XXH_STRIPE_LEN) / XXH_SECRET_CONSUME_RATE;
4383 size_t const block_len = XXH_STRIPE_LEN * nbStripesPerBlock;
4384 size_t const nb_blocks = (len - 1) / block_len;
4385
4386 size_t n;
4387
4388 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN);
4389
4390 for (n = 0; n < nb_blocks; n++) {
4391 XXH3_accumulate(acc, input + n*block_len, secret, nbStripesPerBlock, f_acc512);
4392 f_scramble(acc, secret + secretSize - XXH_STRIPE_LEN);
4393 }
4394
4395
4396 XXH_ASSERT(len > XXH_STRIPE_LEN);
4397 { size_t const nbStripes = ((len - 1) - (block_len * nb_blocks)) / XXH_STRIPE_LEN;
4398 XXH_ASSERT(nbStripes <= (secretSize / XXH_SECRET_CONSUME_RATE));
4399 XXH3_accumulate(acc, input + nb_blocks*block_len, secret, nbStripes, f_acc512);
4400
4401
4402 { const xxh_u8* const p = input + len - XXH_STRIPE_LEN;
4403 #define XXH_SECRET_LASTACC_START 7
4404 f_acc512(acc, p, secret + secretSize - XXH_STRIPE_LEN - XXH_SECRET_LASTACC_START);
4405 } }
4406 }
4407
4408 XXH_FORCE_INLINE xxh_u64
4409 XXH3_mix2Accs(const xxh_u64* XXH_RESTRICT acc, const xxh_u8* XXH_RESTRICT secret)
4410 {
4411 return XXH3_mul128_fold64(
4412 acc[0] ^ XXH_readLE64(secret),
4413 acc[1] ^ XXH_readLE64(secret+8) );
4414 }
4415
4416 static XXH64_hash_t
4417 XXH3_mergeAccs(const xxh_u64* XXH_RESTRICT acc, const xxh_u8* XXH_RESTRICT secret, xxh_u64 start)
4418 {
4419 xxh_u64 result64 = start;
4420 size_t i = 0;
4421
4422 for (i = 0; i < 4; i++) {
4423 result64 += XXH3_mix2Accs(acc+2*i, secret + 16*i);
4424 #if defined(__clang__) \
4425 && (defined(__arm__) || defined(__thumb__)) \
4426 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
4427 && !defined(XXH_ENABLE_AUTOVECTORIZE)
4428
4429
4430
4431
4432
4433
4434
4435
4436 XXH_COMPILER_GUARD(result64);
4437 #endif
4438 }
4439
4440 return XXH3_avalanche(result64);
4441 }
4442
4443 #define XXH3_INIT_ACC { XXH_PRIME32_3, XXH_PRIME64_1, XXH_PRIME64_2, XXH_PRIME64_3, \
4444 XXH_PRIME64_4, XXH_PRIME32_2, XXH_PRIME64_5, XXH_PRIME32_1 }
4445
4446 XXH_FORCE_INLINE XXH64_hash_t
4447 XXH3_hashLong_64b_internal(const void* XXH_RESTRICT input, size_t len,
4448 const void* XXH_RESTRICT secret, size_t secretSize,
4449 XXH3_f_accumulate_512 f_acc512,
4450 XXH3_f_scrambleAcc f_scramble)
4451 {
4452 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
4453
4454 XXH3_hashLong_internal_loop(acc, (const xxh_u8*)input, len, (const xxh_u8*)secret, secretSize, f_acc512, f_scramble);
4455
4456
4457 XXH_STATIC_ASSERT(sizeof(acc) == 64);
4458
4459 #define XXH_SECRET_MERGEACCS_START 11
4460 XXH_ASSERT(secretSize >= sizeof(acc) + XXH_SECRET_MERGEACCS_START);
4461 return XXH3_mergeAccs(acc, (const xxh_u8*)secret + XXH_SECRET_MERGEACCS_START, (xxh_u64)len * XXH_PRIME64_1);
4462 }
4463
4464
4465
4466
4467
4468
4469 XXH_FORCE_INLINE XXH64_hash_t
4470 XXH3_hashLong_64b_withSecret(const void* XXH_RESTRICT input, size_t len,
4471 XXH64_hash_t seed64, const xxh_u8* XXH_RESTRICT secret, size_t secretLen)
4472 {
4473 (void)seed64;
4474 return XXH3_hashLong_64b_internal(input, len, secret, secretLen, XXH3_accumulate_512, XXH3_scrambleAcc);
4475 }
4476
4477
4478
4479
4480
4481
4482
4483 XXH_NO_INLINE XXH64_hash_t
4484 XXH3_hashLong_64b_default(const void* XXH_RESTRICT input, size_t len,
4485 XXH64_hash_t seed64, const xxh_u8* XXH_RESTRICT secret, size_t secretLen)
4486 {
4487 (void)seed64; (void)secret; (void)secretLen;
4488 return XXH3_hashLong_64b_internal(input, len, XXH3_kSecret, sizeof(XXH3_kSecret), XXH3_accumulate_512, XXH3_scrambleAcc);
4489 }
4490
4491
4492
4493
4494
4495
4496
4497
4498
4499
4500
4501
4502 XXH_FORCE_INLINE XXH64_hash_t
4503 XXH3_hashLong_64b_withSeed_internal(const void* input, size_t len,
4504 XXH64_hash_t seed,
4505 XXH3_f_accumulate_512 f_acc512,
4506 XXH3_f_scrambleAcc f_scramble,
4507 XXH3_f_initCustomSecret f_initSec)
4508 {
4509 if (seed == 0)
4510 return XXH3_hashLong_64b_internal(input, len,
4511 XXH3_kSecret, sizeof(XXH3_kSecret),
4512 f_acc512, f_scramble);
4513 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
4514 f_initSec(secret, seed);
4515 return XXH3_hashLong_64b_internal(input, len, secret, sizeof(secret),
4516 f_acc512, f_scramble);
4517 }
4518 }
4519
4520
4521
4522
4523 XXH_NO_INLINE XXH64_hash_t
4524 XXH3_hashLong_64b_withSeed(const void* input, size_t len,
4525 XXH64_hash_t seed, const xxh_u8* secret, size_t secretLen)
4526 {
4527 (void)secret; (void)secretLen;
4528 return XXH3_hashLong_64b_withSeed_internal(input, len, seed,
4529 XXH3_accumulate_512, XXH3_scrambleAcc, XXH3_initCustomSecret);
4530 }
4531
4532
4533 typedef XXH64_hash_t (*XXH3_hashLong64_f)(const void* XXH_RESTRICT, size_t,
4534 XXH64_hash_t, const xxh_u8* XXH_RESTRICT, size_t);
4535
4536 XXH_FORCE_INLINE XXH64_hash_t
4537 XXH3_64bits_internal(const void* XXH_RESTRICT input, size_t len,
4538 XXH64_hash_t seed64, const void* XXH_RESTRICT secret, size_t secretLen,
4539 XXH3_hashLong64_f f_hashLong)
4540 {
4541 XXH_ASSERT(secretLen >= XXH3_SECRET_SIZE_MIN);
4542
4543
4544
4545
4546
4547
4548
4549 if (len <= 16)
4550 return XXH3_len_0to16_64b((const xxh_u8*)input, len, (const xxh_u8*)secret, seed64);
4551 if (len <= 128)
4552 return XXH3_len_17to128_64b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
4553 if (len <= XXH3_MIDSIZE_MAX)
4554 return XXH3_len_129to240_64b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
4555 return f_hashLong(input, len, seed64, (const xxh_u8*)secret, secretLen);
4556 }
4557
4558
4559
4560
4561
4562 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits(const void* input, size_t len)
4563 {
4564 return XXH3_64bits_internal(input, len, 0, XXH3_kSecret, sizeof(XXH3_kSecret), XXH3_hashLong_64b_default);
4565 }
4566
4567
4568 XXH_PUBLIC_API XXH64_hash_t
4569 XXH3_64bits_withSecret(const void* input, size_t len, const void* secret, size_t secretSize)
4570 {
4571 return XXH3_64bits_internal(input, len, 0, secret, secretSize, XXH3_hashLong_64b_withSecret);
4572 }
4573
4574
4575 XXH_PUBLIC_API XXH64_hash_t
4576 XXH3_64bits_withSeed(const void* input, size_t len, XXH64_hash_t seed)
4577 {
4578 return XXH3_64bits_internal(input, len, seed, XXH3_kSecret, sizeof(XXH3_kSecret), XXH3_hashLong_64b_withSeed);
4579 }
4580
4581 XXH_PUBLIC_API XXH64_hash_t
4582 XXH3_64bits_withSecretandSeed(const void* input, size_t len, const void* secret, size_t secretSize, XXH64_hash_t seed)
4583 {
4584 if (len <= XXH3_MIDSIZE_MAX)
4585 return XXH3_64bits_internal(input, len, seed, XXH3_kSecret, sizeof(XXH3_kSecret), NULL);
4586 return XXH3_hashLong_64b_withSecret(input, len, seed, (const xxh_u8*)secret, secretSize);
4587 }
4588
4589
4590
4591
4592
4593
4594
4595
4596
4597
4598
4599
4600
4601
4602
4603
4604
4605
4606
4607
4608
4609
4610
4611
4612
4613
4614
4615 static void* XXH_alignedMalloc(size_t s, size_t align)
4616 {
4617 XXH_ASSERT(align <= 128 && align >= 8);
4618 XXH_ASSERT((align & (align-1)) == 0);
4619 XXH_ASSERT(s != 0 && s < (s + align));
4620 {
4621 xxh_u8* base = (xxh_u8*)XXH_malloc(s + align);
4622 if (base != NULL) {
4623
4624
4625
4626
4627
4628
4629 size_t offset = align - ((size_t)base & (align - 1));
4630
4631 xxh_u8* ptr = base + offset;
4632
4633 XXH_ASSERT((size_t)ptr % align == 0);
4634
4635
4636 ptr[-1] = (xxh_u8)offset;
4637 return ptr;
4638 }
4639 return NULL;
4640 }
4641 }
4642
4643
4644
4645
4646 static void XXH_alignedFree(void* p)
4647 {
4648 if (p != NULL) {
4649 xxh_u8* ptr = (xxh_u8*)p;
4650
4651 xxh_u8 offset = ptr[-1];
4652
4653 xxh_u8* base = ptr - offset;
4654 XXH_free(base);
4655 }
4656 }
4657
4658 XXH_PUBLIC_API XXH3_state_t* XXH3_createState(void)
4659 {
4660 XXH3_state_t* const state = (XXH3_state_t*)XXH_alignedMalloc(sizeof(XXH3_state_t), 64);
4661 if (state==NULL) return NULL;
4662 XXH3_INITSTATE(state);
4663 return state;
4664 }
4665
4666
4667 XXH_PUBLIC_API XXH_errorcode XXH3_freeState(XXH3_state_t* statePtr)
4668 {
4669 XXH_alignedFree(statePtr);
4670 return XXH_OK;
4671 }
4672
4673
4674 XXH_PUBLIC_API void
4675 XXH3_copyState(XXH3_state_t* dst_state, const XXH3_state_t* src_state)
4676 {
4677 XXH_memcpy(dst_state, src_state, sizeof(*dst_state));
4678 }
4679
4680 static void
4681 XXH3_reset_internal(XXH3_state_t* statePtr,
4682 XXH64_hash_t seed,
4683 const void* secret, size_t secretSize)
4684 {
4685 size_t const initStart = offsetof(XXH3_state_t, bufferedSize);
4686 size_t const initLength = offsetof(XXH3_state_t, nbStripesPerBlock) - initStart;
4687 XXH_ASSERT(offsetof(XXH3_state_t, nbStripesPerBlock) > initStart);
4688 XXH_ASSERT(statePtr != NULL);
4689
4690 memset((char*)statePtr + initStart, 0, initLength);
4691 statePtr->acc[0] = XXH_PRIME32_3;
4692 statePtr->acc[1] = XXH_PRIME64_1;
4693 statePtr->acc[2] = XXH_PRIME64_2;
4694 statePtr->acc[3] = XXH_PRIME64_3;
4695 statePtr->acc[4] = XXH_PRIME64_4;
4696 statePtr->acc[5] = XXH_PRIME32_2;
4697 statePtr->acc[6] = XXH_PRIME64_5;
4698 statePtr->acc[7] = XXH_PRIME32_1;
4699 statePtr->seed = seed;
4700 statePtr->useSeed = (seed != 0);
4701 statePtr->extSecret = (const unsigned char*)secret;
4702 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN);
4703 statePtr->secretLimit = secretSize - XXH_STRIPE_LEN;
4704 statePtr->nbStripesPerBlock = statePtr->secretLimit / XXH_SECRET_CONSUME_RATE;
4705 }
4706
4707
4708 XXH_PUBLIC_API XXH_errorcode
4709 XXH3_64bits_reset(XXH3_state_t* statePtr)
4710 {
4711 if (statePtr == NULL) return XXH_ERROR;
4712 XXH3_reset_internal(statePtr, 0, XXH3_kSecret, XXH_SECRET_DEFAULT_SIZE);
4713 return XXH_OK;
4714 }
4715
4716
4717 XXH_PUBLIC_API XXH_errorcode
4718 XXH3_64bits_reset_withSecret(XXH3_state_t* statePtr, const void* secret, size_t secretSize)
4719 {
4720 if (statePtr == NULL) return XXH_ERROR;
4721 XXH3_reset_internal(statePtr, 0, secret, secretSize);
4722 if (secret == NULL) return XXH_ERROR;
4723 if (secretSize < XXH3_SECRET_SIZE_MIN) return XXH_ERROR;
4724 return XXH_OK;
4725 }
4726
4727
4728 XXH_PUBLIC_API XXH_errorcode
4729 XXH3_64bits_reset_withSeed(XXH3_state_t* statePtr, XXH64_hash_t seed)
4730 {
4731 if (statePtr == NULL) return XXH_ERROR;
4732 if (seed==0) return XXH3_64bits_reset(statePtr);
4733 if ((seed != statePtr->seed) || (statePtr->extSecret != NULL))
4734 XXH3_initCustomSecret(statePtr->customSecret, seed);
4735 XXH3_reset_internal(statePtr, seed, NULL, XXH_SECRET_DEFAULT_SIZE);
4736 return XXH_OK;
4737 }
4738
4739
4740 XXH_PUBLIC_API XXH_errorcode
4741 XXH3_64bits_reset_withSecretandSeed(XXH3_state_t* statePtr, const void* secret, size_t secretSize, XXH64_hash_t seed64)
4742 {
4743 if (statePtr == NULL) return XXH_ERROR;
4744 if (secret == NULL) return XXH_ERROR;
4745 if (secretSize < XXH3_SECRET_SIZE_MIN) return XXH_ERROR;
4746 XXH3_reset_internal(statePtr, seed64, secret, secretSize);
4747 statePtr->useSeed = 1;
4748 return XXH_OK;
4749 }
4750
4751
4752
4753
4754 XXH_FORCE_INLINE void
4755 XXH3_consumeStripes(xxh_u64* XXH_RESTRICT acc,
4756 size_t* XXH_RESTRICT nbStripesSoFarPtr, size_t nbStripesPerBlock,
4757 const xxh_u8* XXH_RESTRICT input, size_t nbStripes,
4758 const xxh_u8* XXH_RESTRICT secret, size_t secretLimit,
4759 XXH3_f_accumulate_512 f_acc512,
4760 XXH3_f_scrambleAcc f_scramble)
4761 {
4762 XXH_ASSERT(nbStripes <= nbStripesPerBlock);
4763 XXH_ASSERT(*nbStripesSoFarPtr < nbStripesPerBlock);
4764 if (nbStripesPerBlock - *nbStripesSoFarPtr <= nbStripes) {
4765
4766 size_t const nbStripesToEndofBlock = nbStripesPerBlock - *nbStripesSoFarPtr;
4767 size_t const nbStripesAfterBlock = nbStripes - nbStripesToEndofBlock;
4768 XXH3_accumulate(acc, input, secret + nbStripesSoFarPtr[0] * XXH_SECRET_CONSUME_RATE, nbStripesToEndofBlock, f_acc512);
4769 f_scramble(acc, secret + secretLimit);
4770 XXH3_accumulate(acc, input + nbStripesToEndofBlock * XXH_STRIPE_LEN, secret, nbStripesAfterBlock, f_acc512);
4771 *nbStripesSoFarPtr = nbStripesAfterBlock;
4772 } else {
4773 XXH3_accumulate(acc, input, secret + nbStripesSoFarPtr[0] * XXH_SECRET_CONSUME_RATE, nbStripes, f_acc512);
4774 *nbStripesSoFarPtr += nbStripes;
4775 }
4776 }
4777
4778 #ifndef XXH3_STREAM_USE_STACK
4779 # ifndef __clang__
4780 # define XXH3_STREAM_USE_STACK 1
4781 # endif
4782 #endif
4783
4784
4785
4786 XXH_FORCE_INLINE XXH_errorcode
4787 XXH3_update(XXH3_state_t* XXH_RESTRICT const state,
4788 const xxh_u8* XXH_RESTRICT input, size_t len,
4789 XXH3_f_accumulate_512 f_acc512,
4790 XXH3_f_scrambleAcc f_scramble)
4791 {
4792 if (input==NULL) {
4793 XXH_ASSERT(len == 0);
4794 return XXH_OK;
4795 }
4796
4797 XXH_ASSERT(state != NULL);
4798 { const xxh_u8* const bEnd = input + len;
4799 const unsigned char* const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
4800 #if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
4801
4802
4803
4804
4805 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[8]; memcpy(acc, state->acc, sizeof(acc));
4806 #else
4807 xxh_u64* XXH_RESTRICT const acc = state->acc;
4808 #endif
4809 state->totalLen += len;
4810 XXH_ASSERT(state->bufferedSize <= XXH3_INTERNALBUFFER_SIZE);
4811
4812
4813 if (state->bufferedSize + len <= XXH3_INTERNALBUFFER_SIZE) {
4814 XXH_memcpy(state->buffer + state->bufferedSize, input, len);
4815 state->bufferedSize += (XXH32_hash_t)len;
4816 return XXH_OK;
4817 }
4818
4819
4820 #define XXH3_INTERNALBUFFER_STRIPES (XXH3_INTERNALBUFFER_SIZE / XXH_STRIPE_LEN)
4821 XXH_STATIC_ASSERT(XXH3_INTERNALBUFFER_SIZE % XXH_STRIPE_LEN == 0);
4822
4823
4824
4825
4826
4827 if (state->bufferedSize) {
4828 size_t const loadSize = XXH3_INTERNALBUFFER_SIZE - state->bufferedSize;
4829 XXH_memcpy(state->buffer + state->bufferedSize, input, loadSize);
4830 input += loadSize;
4831 XXH3_consumeStripes(acc,
4832 &state->nbStripesSoFar, state->nbStripesPerBlock,
4833 state->buffer, XXH3_INTERNALBUFFER_STRIPES,
4834 secret, state->secretLimit,
4835 f_acc512, f_scramble);
4836 state->bufferedSize = 0;
4837 }
4838 XXH_ASSERT(input < bEnd);
4839
4840
4841 if ((size_t)(bEnd - input) > state->nbStripesPerBlock * XXH_STRIPE_LEN) {
4842 size_t nbStripes = (size_t)(bEnd - 1 - input) / XXH_STRIPE_LEN;
4843 XXH_ASSERT(state->nbStripesPerBlock >= state->nbStripesSoFar);
4844
4845 { size_t const nbStripesToEnd = state->nbStripesPerBlock - state->nbStripesSoFar;
4846 XXH_ASSERT(nbStripes <= nbStripes);
4847 XXH3_accumulate(acc, input, secret + state->nbStripesSoFar * XXH_SECRET_CONSUME_RATE, nbStripesToEnd, f_acc512);
4848 f_scramble(acc, secret + state->secretLimit);
4849 state->nbStripesSoFar = 0;
4850 input += nbStripesToEnd * XXH_STRIPE_LEN;
4851 nbStripes -= nbStripesToEnd;
4852 }
4853
4854 while(nbStripes >= state->nbStripesPerBlock) {
4855 XXH3_accumulate(acc, input, secret, state->nbStripesPerBlock, f_acc512);
4856 f_scramble(acc, secret + state->secretLimit);
4857 input += state->nbStripesPerBlock * XXH_STRIPE_LEN;
4858 nbStripes -= state->nbStripesPerBlock;
4859 }
4860
4861 XXH3_accumulate(acc, input, secret, nbStripes, f_acc512);
4862 input += nbStripes * XXH_STRIPE_LEN;
4863 XXH_ASSERT(input < bEnd);
4864 state->nbStripesSoFar = nbStripes;
4865
4866 XXH_memcpy(state->buffer + sizeof(state->buffer) - XXH_STRIPE_LEN, input - XXH_STRIPE_LEN, XXH_STRIPE_LEN);
4867 XXH_ASSERT(bEnd - input <= XXH_STRIPE_LEN);
4868 } else {
4869
4870
4871 if (bEnd - input > XXH3_INTERNALBUFFER_SIZE) {
4872 const xxh_u8* const limit = bEnd - XXH3_INTERNALBUFFER_SIZE;
4873 do {
4874 XXH3_consumeStripes(acc,
4875 &state->nbStripesSoFar, state->nbStripesPerBlock,
4876 input, XXH3_INTERNALBUFFER_STRIPES,
4877 secret, state->secretLimit,
4878 f_acc512, f_scramble);
4879 input += XXH3_INTERNALBUFFER_SIZE;
4880 } while (input<limit);
4881
4882 XXH_memcpy(state->buffer + sizeof(state->buffer) - XXH_STRIPE_LEN, input - XXH_STRIPE_LEN, XXH_STRIPE_LEN);
4883 }
4884 }
4885
4886
4887 XXH_ASSERT(input < bEnd);
4888 XXH_ASSERT(bEnd - input <= XXH3_INTERNALBUFFER_SIZE);
4889 XXH_ASSERT(state->bufferedSize == 0);
4890 XXH_memcpy(state->buffer, input, (size_t)(bEnd-input));
4891 state->bufferedSize = (XXH32_hash_t)(bEnd-input);
4892 #if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
4893
4894 memcpy(state->acc, acc, sizeof(acc));
4895 #endif
4896 }
4897
4898 return XXH_OK;
4899 }
4900
4901
4902 XXH_PUBLIC_API XXH_errorcode
4903 XXH3_64bits_update(XXH3_state_t* state, const void* input, size_t len)
4904 {
4905 return XXH3_update(state, (const xxh_u8*)input, len,
4906 XXH3_accumulate_512, XXH3_scrambleAcc);
4907 }
4908
4909
4910 XXH_FORCE_INLINE void
4911 XXH3_digest_long (XXH64_hash_t* acc,
4912 const XXH3_state_t* state,
4913 const unsigned char* secret)
4914 {
4915
4916
4917
4918
4919 XXH_memcpy(acc, state->acc, sizeof(state->acc));
4920 if (state->bufferedSize >= XXH_STRIPE_LEN) {
4921 size_t const nbStripes = (state->bufferedSize - 1) / XXH_STRIPE_LEN;
4922 size_t nbStripesSoFar = state->nbStripesSoFar;
4923 XXH3_consumeStripes(acc,
4924 &nbStripesSoFar, state->nbStripesPerBlock,
4925 state->buffer, nbStripes,
4926 secret, state->secretLimit,
4927 XXH3_accumulate_512, XXH3_scrambleAcc);
4928
4929 XXH3_accumulate_512(acc,
4930 state->buffer + state->bufferedSize - XXH_STRIPE_LEN,
4931 secret + state->secretLimit - XXH_SECRET_LASTACC_START);
4932 } else {
4933 xxh_u8 lastStripe[XXH_STRIPE_LEN];
4934 size_t const catchupSize = XXH_STRIPE_LEN - state->bufferedSize;
4935 XXH_ASSERT(state->bufferedSize > 0);
4936 XXH_memcpy(lastStripe, state->buffer + sizeof(state->buffer) - catchupSize, catchupSize);
4937 XXH_memcpy(lastStripe + catchupSize, state->buffer, state->bufferedSize);
4938 XXH3_accumulate_512(acc,
4939 lastStripe,
4940 secret + state->secretLimit - XXH_SECRET_LASTACC_START);
4941 }
4942 }
4943
4944
4945 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits_digest (const XXH3_state_t* state)
4946 {
4947 const unsigned char* const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
4948 if (state->totalLen > XXH3_MIDSIZE_MAX) {
4949 XXH_ALIGN(XXH_ACC_ALIGN) XXH64_hash_t acc[XXH_ACC_NB];
4950 XXH3_digest_long(acc, state, secret);
4951 return XXH3_mergeAccs(acc,
4952 secret + XXH_SECRET_MERGEACCS_START,
4953 (xxh_u64)state->totalLen * XXH_PRIME64_1);
4954 }
4955
4956 if (state->useSeed)
4957 return XXH3_64bits_withSeed(state->buffer, (size_t)state->totalLen, state->seed);
4958 return XXH3_64bits_withSecret(state->buffer, (size_t)(state->totalLen),
4959 secret, state->secretLimit + XXH_STRIPE_LEN);
4960 }
4961
4962
4963
4964
4965
4966
4967
4968
4969
4970
4971
4972
4973
4974
4975
4976
4977
4978
4979
4980
4981 XXH_FORCE_INLINE XXH128_hash_t
4982 XXH3_len_1to3_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
4983 {
4984
4985 XXH_ASSERT(input != NULL);
4986 XXH_ASSERT(1 <= len && len <= 3);
4987 XXH_ASSERT(secret != NULL);
4988
4989
4990
4991
4992
4993 { xxh_u8 const c1 = input[0];
4994 xxh_u8 const c2 = input[len >> 1];
4995 xxh_u8 const c3 = input[len - 1];
4996 xxh_u32 const combinedl = ((xxh_u32)c1 <<16) | ((xxh_u32)c2 << 24)
4997 | ((xxh_u32)c3 << 0) | ((xxh_u32)len << 8);
4998 xxh_u32 const combinedh = XXH_rotl32(XXH_swap32(combinedl), 13);
4999 xxh_u64 const bitflipl = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
5000 xxh_u64 const bitfliph = (XXH_readLE32(secret+8) ^ XXH_readLE32(secret+12)) - seed;
5001 xxh_u64 const keyed_lo = (xxh_u64)combinedl ^ bitflipl;
5002 xxh_u64 const keyed_hi = (xxh_u64)combinedh ^ bitfliph;
5003 XXH128_hash_t h128;
5004 h128.low64 = XXH64_avalanche(keyed_lo);
5005 h128.high64 = XXH64_avalanche(keyed_hi);
5006 return h128;
5007 }
5008 }
5009
5010 XXH_FORCE_INLINE XXH128_hash_t
5011 XXH3_len_4to8_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
5012 {
5013 XXH_ASSERT(input != NULL);
5014 XXH_ASSERT(secret != NULL);
5015 XXH_ASSERT(4 <= len && len <= 8);
5016 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
5017 { xxh_u32 const input_lo = XXH_readLE32(input);
5018 xxh_u32 const input_hi = XXH_readLE32(input + len - 4);
5019 xxh_u64 const input_64 = input_lo + ((xxh_u64)input_hi << 32);
5020 xxh_u64 const bitflip = (XXH_readLE64(secret+16) ^ XXH_readLE64(secret+24)) + seed;
5021 xxh_u64 const keyed = input_64 ^ bitflip;
5022
5023
5024 XXH128_hash_t m128 = XXH_mult64to128(keyed, XXH_PRIME64_1 + (len << 2));
5025
5026 m128.high64 += (m128.low64 << 1);
5027 m128.low64 ^= (m128.high64 >> 3);
5028
5029 m128.low64 = XXH_xorshift64(m128.low64, 35);
5030 m128.low64 *= 0x9FB21C651E98DF25ULL;
5031 m128.low64 = XXH_xorshift64(m128.low64, 28);
5032 m128.high64 = XXH3_avalanche(m128.high64);
5033 return m128;
5034 }
5035 }
5036
5037 XXH_FORCE_INLINE XXH128_hash_t
5038 XXH3_len_9to16_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
5039 {
5040 XXH_ASSERT(input != NULL);
5041 XXH_ASSERT(secret != NULL);
5042 XXH_ASSERT(9 <= len && len <= 16);
5043 { xxh_u64 const bitflipl = (XXH_readLE64(secret+32) ^ XXH_readLE64(secret+40)) - seed;
5044 xxh_u64 const bitfliph = (XXH_readLE64(secret+48) ^ XXH_readLE64(secret+56)) + seed;
5045 xxh_u64 const input_lo = XXH_readLE64(input);
5046 xxh_u64 input_hi = XXH_readLE64(input + len - 8);
5047 XXH128_hash_t m128 = XXH_mult64to128(input_lo ^ input_hi ^ bitflipl, XXH_PRIME64_1);
5048
5049
5050
5051
5052 m128.low64 += (xxh_u64)(len - 1) << 54;
5053 input_hi ^= bitfliph;
5054
5055
5056
5057
5058
5059
5060
5061 if (sizeof(void *) < sizeof(xxh_u64)) {
5062
5063
5064
5065
5066
5067
5068 m128.high64 += (input_hi & 0xFFFFFFFF00000000ULL) + XXH_mult32to64((xxh_u32)input_hi, XXH_PRIME32_2);
5069 } else {
5070
5071
5072
5073
5074
5075
5076
5077
5078
5079
5080
5081
5082
5083
5084
5085
5086
5087
5088
5089
5090
5091
5092
5093
5094 m128.high64 += input_hi + XXH_mult32to64((xxh_u32)input_hi, XXH_PRIME32_2 - 1);
5095 }
5096
5097 m128.low64 ^= XXH_swap64(m128.high64);
5098
5099 {
5100 XXH128_hash_t h128 = XXH_mult64to128(m128.low64, XXH_PRIME64_2);
5101 h128.high64 += m128.high64 * XXH_PRIME64_2;
5102
5103 h128.low64 = XXH3_avalanche(h128.low64);
5104 h128.high64 = XXH3_avalanche(h128.high64);
5105 return h128;
5106 } }
5107 }
5108
5109
5110
5111
5112 XXH_FORCE_INLINE XXH128_hash_t
5113 XXH3_len_0to16_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
5114 {
5115 XXH_ASSERT(len <= 16);
5116 { if (len > 8) return XXH3_len_9to16_128b(input, len, secret, seed);
5117 if (len >= 4) return XXH3_len_4to8_128b(input, len, secret, seed);
5118 if (len) return XXH3_len_1to3_128b(input, len, secret, seed);
5119 { XXH128_hash_t h128;
5120 xxh_u64 const bitflipl = XXH_readLE64(secret+64) ^ XXH_readLE64(secret+72);
5121 xxh_u64 const bitfliph = XXH_readLE64(secret+80) ^ XXH_readLE64(secret+88);
5122 h128.low64 = XXH64_avalanche(seed ^ bitflipl);
5123 h128.high64 = XXH64_avalanche( seed ^ bitfliph);
5124 return h128;
5125 } }
5126 }
5127
5128
5129
5130
5131 XXH_FORCE_INLINE XXH128_hash_t
5132 XXH128_mix32B(XXH128_hash_t acc, const xxh_u8* input_1, const xxh_u8* input_2,
5133 const xxh_u8* secret, XXH64_hash_t seed)
5134 {
5135 acc.low64 += XXH3_mix16B (input_1, secret+0, seed);
5136 acc.low64 ^= XXH_readLE64(input_2) + XXH_readLE64(input_2 + 8);
5137 acc.high64 += XXH3_mix16B (input_2, secret+16, seed);
5138 acc.high64 ^= XXH_readLE64(input_1) + XXH_readLE64(input_1 + 8);
5139 return acc;
5140 }
5141
5142
5143 XXH_FORCE_INLINE XXH128_hash_t
5144 XXH3_len_17to128_128b(const xxh_u8* XXH_RESTRICT input, size_t len,
5145 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
5146 XXH64_hash_t seed)
5147 {
5148 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
5149 XXH_ASSERT(16 < len && len <= 128);
5150
5151 { XXH128_hash_t acc;
5152 acc.low64 = len * XXH_PRIME64_1;
5153 acc.high64 = 0;
5154 if (len > 32) {
5155 if (len > 64) {
5156 if (len > 96) {
5157 acc = XXH128_mix32B(acc, input+48, input+len-64, secret+96, seed);
5158 }
5159 acc = XXH128_mix32B(acc, input+32, input+len-48, secret+64, seed);
5160 }
5161 acc = XXH128_mix32B(acc, input+16, input+len-32, secret+32, seed);
5162 }
5163 acc = XXH128_mix32B(acc, input, input+len-16, secret, seed);
5164 { XXH128_hash_t h128;
5165 h128.low64 = acc.low64 + acc.high64;
5166 h128.high64 = (acc.low64 * XXH_PRIME64_1)
5167 + (acc.high64 * XXH_PRIME64_4)
5168 + ((len - seed) * XXH_PRIME64_2);
5169 h128.low64 = XXH3_avalanche(h128.low64);
5170 h128.high64 = (XXH64_hash_t)0 - XXH3_avalanche(h128.high64);
5171 return h128;
5172 }
5173 }
5174 }
5175
5176 XXH_NO_INLINE XXH128_hash_t
5177 XXH3_len_129to240_128b(const xxh_u8* XXH_RESTRICT input, size_t len,
5178 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
5179 XXH64_hash_t seed)
5180 {
5181 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
5182 XXH_ASSERT(128 < len && len <= XXH3_MIDSIZE_MAX);
5183
5184 { XXH128_hash_t acc;
5185 int const nbRounds = (int)len / 32;
5186 int i;
5187 acc.low64 = len * XXH_PRIME64_1;
5188 acc.high64 = 0;
5189 for (i=0; i<4; i++) {
5190 acc = XXH128_mix32B(acc,
5191 input + (32 * i),
5192 input + (32 * i) + 16,
5193 secret + (32 * i),
5194 seed);
5195 }
5196 acc.low64 = XXH3_avalanche(acc.low64);
5197 acc.high64 = XXH3_avalanche(acc.high64);
5198 XXH_ASSERT(nbRounds >= 4);
5199 for (i=4 ; i < nbRounds; i++) {
5200 acc = XXH128_mix32B(acc,
5201 input + (32 * i),
5202 input + (32 * i) + 16,
5203 secret + XXH3_MIDSIZE_STARTOFFSET + (32 * (i - 4)),
5204 seed);
5205 }
5206
5207 acc = XXH128_mix32B(acc,
5208 input + len - 16,
5209 input + len - 32,
5210 secret + XXH3_SECRET_SIZE_MIN - XXH3_MIDSIZE_LASTOFFSET - 16,
5211 0ULL - seed);
5212
5213 { XXH128_hash_t h128;
5214 h128.low64 = acc.low64 + acc.high64;
5215 h128.high64 = (acc.low64 * XXH_PRIME64_1)
5216 + (acc.high64 * XXH_PRIME64_4)
5217 + ((len - seed) * XXH_PRIME64_2);
5218 h128.low64 = XXH3_avalanche(h128.low64);
5219 h128.high64 = (XXH64_hash_t)0 - XXH3_avalanche(h128.high64);
5220 return h128;
5221 }
5222 }
5223 }
5224
5225 XXH_FORCE_INLINE XXH128_hash_t
5226 XXH3_hashLong_128b_internal(const void* XXH_RESTRICT input, size_t len,
5227 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
5228 XXH3_f_accumulate_512 f_acc512,
5229 XXH3_f_scrambleAcc f_scramble)
5230 {
5231 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
5232
5233 XXH3_hashLong_internal_loop(acc, (const xxh_u8*)input, len, secret, secretSize, f_acc512, f_scramble);
5234
5235
5236 XXH_STATIC_ASSERT(sizeof(acc) == 64);
5237 XXH_ASSERT(secretSize >= sizeof(acc) + XXH_SECRET_MERGEACCS_START);
5238 { XXH128_hash_t h128;
5239 h128.low64 = XXH3_mergeAccs(acc,
5240 secret + XXH_SECRET_MERGEACCS_START,
5241 (xxh_u64)len * XXH_PRIME64_1);
5242 h128.high64 = XXH3_mergeAccs(acc,
5243 secret + secretSize
5244 - sizeof(acc) - XXH_SECRET_MERGEACCS_START,
5245 ~((xxh_u64)len * XXH_PRIME64_2));
5246 return h128;
5247 }
5248 }
5249
5250
5251
5252
5253 XXH_NO_INLINE XXH128_hash_t
5254 XXH3_hashLong_128b_default(const void* XXH_RESTRICT input, size_t len,
5255 XXH64_hash_t seed64,
5256 const void* XXH_RESTRICT secret, size_t secretLen)
5257 {
5258 (void)seed64; (void)secret; (void)secretLen;
5259 return XXH3_hashLong_128b_internal(input, len, XXH3_kSecret, sizeof(XXH3_kSecret),
5260 XXH3_accumulate_512, XXH3_scrambleAcc);
5261 }
5262
5263
5264
5265
5266
5267 XXH_FORCE_INLINE XXH128_hash_t
5268 XXH3_hashLong_128b_withSecret(const void* XXH_RESTRICT input, size_t len,
5269 XXH64_hash_t seed64,
5270 const void* XXH_RESTRICT secret, size_t secretLen)
5271 {
5272 (void)seed64;
5273 return XXH3_hashLong_128b_internal(input, len, (const xxh_u8*)secret, secretLen,
5274 XXH3_accumulate_512, XXH3_scrambleAcc);
5275 }
5276
5277 XXH_FORCE_INLINE XXH128_hash_t
5278 XXH3_hashLong_128b_withSeed_internal(const void* XXH_RESTRICT input, size_t len,
5279 XXH64_hash_t seed64,
5280 XXH3_f_accumulate_512 f_acc512,
5281 XXH3_f_scrambleAcc f_scramble,
5282 XXH3_f_initCustomSecret f_initSec)
5283 {
5284 if (seed64 == 0)
5285 return XXH3_hashLong_128b_internal(input, len,
5286 XXH3_kSecret, sizeof(XXH3_kSecret),
5287 f_acc512, f_scramble);
5288 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
5289 f_initSec(secret, seed64);
5290 return XXH3_hashLong_128b_internal(input, len, (const xxh_u8*)secret, sizeof(secret),
5291 f_acc512, f_scramble);
5292 }
5293 }
5294
5295
5296
5297
5298 XXH_NO_INLINE XXH128_hash_t
5299 XXH3_hashLong_128b_withSeed(const void* input, size_t len,
5300 XXH64_hash_t seed64, const void* XXH_RESTRICT secret, size_t secretLen)
5301 {
5302 (void)secret; (void)secretLen;
5303 return XXH3_hashLong_128b_withSeed_internal(input, len, seed64,
5304 XXH3_accumulate_512, XXH3_scrambleAcc, XXH3_initCustomSecret);
5305 }
5306
5307 typedef XXH128_hash_t (*XXH3_hashLong128_f)(const void* XXH_RESTRICT, size_t,
5308 XXH64_hash_t, const void* XXH_RESTRICT, size_t);
5309
5310 XXH_FORCE_INLINE XXH128_hash_t
5311 XXH3_128bits_internal(const void* input, size_t len,
5312 XXH64_hash_t seed64, const void* XXH_RESTRICT secret, size_t secretLen,
5313 XXH3_hashLong128_f f_hl128)
5314 {
5315 XXH_ASSERT(secretLen >= XXH3_SECRET_SIZE_MIN);
5316
5317
5318
5319
5320
5321
5322 if (len <= 16)
5323 return XXH3_len_0to16_128b((const xxh_u8*)input, len, (const xxh_u8*)secret, seed64);
5324 if (len <= 128)
5325 return XXH3_len_17to128_128b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
5326 if (len <= XXH3_MIDSIZE_MAX)
5327 return XXH3_len_129to240_128b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
5328 return f_hl128(input, len, seed64, secret, secretLen);
5329 }
5330
5331
5332
5333
5334
5335 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits(const void* input, size_t len)
5336 {
5337 return XXH3_128bits_internal(input, len, 0,
5338 XXH3_kSecret, sizeof(XXH3_kSecret),
5339 XXH3_hashLong_128b_default);
5340 }
5341
5342
5343 XXH_PUBLIC_API XXH128_hash_t
5344 XXH3_128bits_withSecret(const void* input, size_t len, const void* secret, size_t secretSize)
5345 {
5346 return XXH3_128bits_internal(input, len, 0,
5347 (const xxh_u8*)secret, secretSize,
5348 XXH3_hashLong_128b_withSecret);
5349 }
5350
5351
5352 XXH_PUBLIC_API XXH128_hash_t
5353 XXH3_128bits_withSeed(const void* input, size_t len, XXH64_hash_t seed)
5354 {
5355 return XXH3_128bits_internal(input, len, seed,
5356 XXH3_kSecret, sizeof(XXH3_kSecret),
5357 XXH3_hashLong_128b_withSeed);
5358 }
5359
5360
5361 XXH_PUBLIC_API XXH128_hash_t
5362 XXH3_128bits_withSecretandSeed(const void* input, size_t len, const void* secret, size_t secretSize, XXH64_hash_t seed)
5363 {
5364 if (len <= XXH3_MIDSIZE_MAX)
5365 return XXH3_128bits_internal(input, len, seed, XXH3_kSecret, sizeof(XXH3_kSecret), NULL);
5366 return XXH3_hashLong_128b_withSecret(input, len, seed, secret, secretSize);
5367 }
5368
5369
5370 XXH_PUBLIC_API XXH128_hash_t
5371 XXH128(const void* input, size_t len, XXH64_hash_t seed)
5372 {
5373 return XXH3_128bits_withSeed(input, len, seed);
5374 }
5375
5376
5377
5378
5379
5380
5381
5382
5383
5384
5385 XXH_PUBLIC_API XXH_errorcode
5386 XXH3_128bits_reset(XXH3_state_t* statePtr)
5387 {
5388 return XXH3_64bits_reset(statePtr);
5389 }
5390
5391
5392 XXH_PUBLIC_API XXH_errorcode
5393 XXH3_128bits_reset_withSecret(XXH3_state_t* statePtr, const void* secret, size_t secretSize)
5394 {
5395 return XXH3_64bits_reset_withSecret(statePtr, secret, secretSize);
5396 }
5397
5398
5399 XXH_PUBLIC_API XXH_errorcode
5400 XXH3_128bits_reset_withSeed(XXH3_state_t* statePtr, XXH64_hash_t seed)
5401 {
5402 return XXH3_64bits_reset_withSeed(statePtr, seed);
5403 }
5404
5405
5406 XXH_PUBLIC_API XXH_errorcode
5407 XXH3_128bits_reset_withSecretandSeed(XXH3_state_t* statePtr, const void* secret, size_t secretSize, XXH64_hash_t seed)
5408 {
5409 return XXH3_64bits_reset_withSecretandSeed(statePtr, secret, secretSize, seed);
5410 }
5411
5412
5413 XXH_PUBLIC_API XXH_errorcode
5414 XXH3_128bits_update(XXH3_state_t* state, const void* input, size_t len)
5415 {
5416 return XXH3_update(state, (const xxh_u8*)input, len,
5417 XXH3_accumulate_512, XXH3_scrambleAcc);
5418 }
5419
5420
5421 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits_digest (const XXH3_state_t* state)
5422 {
5423 const unsigned char* const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
5424 if (state->totalLen > XXH3_MIDSIZE_MAX) {
5425 XXH_ALIGN(XXH_ACC_ALIGN) XXH64_hash_t acc[XXH_ACC_NB];
5426 XXH3_digest_long(acc, state, secret);
5427 XXH_ASSERT(state->secretLimit + XXH_STRIPE_LEN >= sizeof(acc) + XXH_SECRET_MERGEACCS_START);
5428 { XXH128_hash_t h128;
5429 h128.low64 = XXH3_mergeAccs(acc,
5430 secret + XXH_SECRET_MERGEACCS_START,
5431 (xxh_u64)state->totalLen * XXH_PRIME64_1);
5432 h128.high64 = XXH3_mergeAccs(acc,
5433 secret + state->secretLimit + XXH_STRIPE_LEN
5434 - sizeof(acc) - XXH_SECRET_MERGEACCS_START,
5435 ~((xxh_u64)state->totalLen * XXH_PRIME64_2));
5436 return h128;
5437 }
5438 }
5439
5440 if (state->seed)
5441 return XXH3_128bits_withSeed(state->buffer, (size_t)state->totalLen, state->seed);
5442 return XXH3_128bits_withSecret(state->buffer, (size_t)(state->totalLen),
5443 secret, state->secretLimit + XXH_STRIPE_LEN);
5444 }
5445
5446
5447
5448 #include <string.h> /* memcmp, memcpy */
5449
5450
5451
5452 XXH_PUBLIC_API int XXH128_isEqual(XXH128_hash_t h1, XXH128_hash_t h2)
5453 {
5454
5455 return !(memcmp(&h1, &h2, sizeof(h1)));
5456 }
5457
5458
5459
5460
5461
5462
5463 XXH_PUBLIC_API int XXH128_cmp(const void* h128_1, const void* h128_2)
5464 {
5465 XXH128_hash_t const h1 = *(const XXH128_hash_t*)h128_1;
5466 XXH128_hash_t const h2 = *(const XXH128_hash_t*)h128_2;
5467 int const hcmp = (h1.high64 > h2.high64) - (h2.high64 > h1.high64);
5468
5469 if (hcmp) return hcmp;
5470 return (h1.low64 > h2.low64) - (h2.low64 > h1.low64);
5471 }
5472
5473
5474
5475
5476 XXH_PUBLIC_API void
5477 XXH128_canonicalFromHash(XXH128_canonical_t* dst, XXH128_hash_t hash)
5478 {
5479 XXH_STATIC_ASSERT(sizeof(XXH128_canonical_t) == sizeof(XXH128_hash_t));
5480 if (XXH_CPU_LITTLE_ENDIAN) {
5481 hash.high64 = XXH_swap64(hash.high64);
5482 hash.low64 = XXH_swap64(hash.low64);
5483 }
5484 XXH_memcpy(dst, &hash.high64, sizeof(hash.high64));
5485 XXH_memcpy((char*)dst + sizeof(hash.high64), &hash.low64, sizeof(hash.low64));
5486 }
5487
5488
5489 XXH_PUBLIC_API XXH128_hash_t
5490 XXH128_hashFromCanonical(const XXH128_canonical_t* src)
5491 {
5492 XXH128_hash_t h;
5493 h.high64 = XXH_readBE64(src);
5494 h.low64 = XXH_readBE64(src->digest + 8);
5495 return h;
5496 }
5497
5498
5499
5500
5501
5502
5503
5504 #define XXH_MIN(x, y) (((x) > (y)) ? (y) : (x))
5505
5506 static void XXH3_combine16(void* dst, XXH128_hash_t h128)
5507 {
5508 XXH_writeLE64( dst, XXH_readLE64(dst) ^ h128.low64 );
5509 XXH_writeLE64( (char*)dst+8, XXH_readLE64((char*)dst+8) ^ h128.high64 );
5510 }
5511
5512
5513 XXH_PUBLIC_API XXH_errorcode
5514 XXH3_generateSecret(void* secretBuffer, size_t secretSize, const void* customSeed, size_t customSeedSize)
5515 {
5516 XXH_ASSERT(secretBuffer != NULL);
5517 if (secretBuffer == NULL) return XXH_ERROR;
5518 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN);
5519 if (secretSize < XXH3_SECRET_SIZE_MIN) return XXH_ERROR;
5520 if (customSeedSize == 0) {
5521 customSeed = XXH3_kSecret;
5522 customSeedSize = XXH_SECRET_DEFAULT_SIZE;
5523 }
5524 XXH_ASSERT(customSeed != NULL);
5525 if (customSeed == NULL) return XXH_ERROR;
5526
5527
5528 { size_t pos = 0;
5529 while (pos < secretSize) {
5530 size_t const toCopy = XXH_MIN((secretSize - pos), customSeedSize);
5531 memcpy((char*)secretBuffer + pos, customSeed, toCopy);
5532 pos += toCopy;
5533 } }
5534
5535 { size_t const nbSeg16 = secretSize / 16;
5536 size_t n;
5537 XXH128_canonical_t scrambler;
5538 XXH128_canonicalFromHash(&scrambler, XXH128(customSeed, customSeedSize, 0));
5539 for (n=0; n<nbSeg16; n++) {
5540 XXH128_hash_t const h128 = XXH128(&scrambler, sizeof(scrambler), n);
5541 XXH3_combine16((char*)secretBuffer + n*16, h128);
5542 }
5543
5544 XXH3_combine16((char*)secretBuffer + secretSize - 16, XXH128_hashFromCanonical(&scrambler));
5545 }
5546 return XXH_OK;
5547 }
5548
5549
5550 XXH_PUBLIC_API void
5551 XXH3_generateSecret_fromSeed(void* secretBuffer, XXH64_hash_t seed)
5552 {
5553 XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
5554 XXH3_initCustomSecret(secret, seed);
5555 XXH_ASSERT(secretBuffer != NULL);
5556 memcpy(secretBuffer, secret, XXH_SECRET_DEFAULT_SIZE);
5557 }
5558
5559
5560
5561
5562 #if XXH_VECTOR == XXH_AVX2 \
5563 && defined(__GNUC__) && !defined(__clang__) \
5564 && defined(__OPTIMIZE__) && !defined(__OPTIMIZE_SIZE__)
5565 # pragma GCC pop_options
5566 #endif
5567
5568 #endif
5569
5570 #endif
5571
5572
5573
5574
5575 #endif
5576
5577
5578 #if defined (__cplusplus)
5579 }
5580 #endif