File indexing completed on 2025-01-29 10:22:08
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096
0097
0098
0099
0100
0101
0102
0103
0104
0105
0106
0107
0108
0109
0110
0111
0112
0113
0114
0115
0116
0117
0118
0119
0120
0121
0122
0123
0124
0125
0126
0127
0128
0129
0130
0131
0132
0133
0134
0135
0136
0137
0138
0139
0140
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150
0151
0152
0153
0154
0155
0156
0157
0158
0159
0160
0161
0162
0163
0164
0165
0166
0167
0168
0169
0170
0171
0172 #if defined (__cplusplus)
0173 extern "C" {
0174 #endif
0175
0176
0177
0178
0179
0180
0181
0182
0183
0184 #ifdef XXH_DOXYGEN
0185
0186
0187
0188
0189
0190
0191
0192
0193
0194
0195
0196 # define XXH_STATIC_LINKING_ONLY
0197
0198
0199
0200
0201
0202
0203
0204
0205
0206
0207
0208
0209 # define XXH_IMPLEMENTATION
0210
0211
0212
0213
0214
0215
0216
0217
0218
0219
0220
0221
0222
0223
0224
0225
0226
0227
0228
0229
0230 # define XXH_INLINE_ALL
0231 # undef XXH_INLINE_ALL
0232
0233
0234
0235 # define XXH_PRIVATE_API
0236 # undef XXH_PRIVATE_API
0237
0238
0239
0240
0241
0242
0243
0244
0245
0246
0247
0248
0249
0250 # define XXH_NAMESPACE
0251 # undef XXH_NAMESPACE
0252 #endif
0253
0254 #if (defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)) \
0255 && !defined(XXH_INLINE_ALL_31684351384)
0256
0257 # define XXH_INLINE_ALL_31684351384
0258
0259 # undef XXH_STATIC_LINKING_ONLY
0260 # define XXH_STATIC_LINKING_ONLY
0261
0262 # undef XXH_PUBLIC_API
0263 # if defined(__GNUC__)
0264 # define XXH_PUBLIC_API static __inline __attribute__((unused))
0265 # elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) )
0266 # define XXH_PUBLIC_API static inline
0267 # elif defined(_MSC_VER)
0268 # define XXH_PUBLIC_API static __inline
0269 # else
0270
0271 # define XXH_PUBLIC_API static
0272 # endif
0273
0274
0275
0276
0277
0278
0279
0280
0281
0282
0283
0284
0285
0286
0287 # undef XXH_versionNumber
0288
0289 # undef XXH32
0290 # undef XXH32_createState
0291 # undef XXH32_freeState
0292 # undef XXH32_reset
0293 # undef XXH32_update
0294 # undef XXH32_digest
0295 # undef XXH32_copyState
0296 # undef XXH32_canonicalFromHash
0297 # undef XXH32_hashFromCanonical
0298
0299 # undef XXH64
0300 # undef XXH64_createState
0301 # undef XXH64_freeState
0302 # undef XXH64_reset
0303 # undef XXH64_update
0304 # undef XXH64_digest
0305 # undef XXH64_copyState
0306 # undef XXH64_canonicalFromHash
0307 # undef XXH64_hashFromCanonical
0308
0309 # undef XXH3_64bits
0310 # undef XXH3_64bits_withSecret
0311 # undef XXH3_64bits_withSeed
0312 # undef XXH3_64bits_withSecretandSeed
0313 # undef XXH3_createState
0314 # undef XXH3_freeState
0315 # undef XXH3_copyState
0316 # undef XXH3_64bits_reset
0317 # undef XXH3_64bits_reset_withSeed
0318 # undef XXH3_64bits_reset_withSecret
0319 # undef XXH3_64bits_update
0320 # undef XXH3_64bits_digest
0321 # undef XXH3_generateSecret
0322
0323 # undef XXH128
0324 # undef XXH3_128bits
0325 # undef XXH3_128bits_withSeed
0326 # undef XXH3_128bits_withSecret
0327 # undef XXH3_128bits_reset
0328 # undef XXH3_128bits_reset_withSeed
0329 # undef XXH3_128bits_reset_withSecret
0330 # undef XXH3_128bits_reset_withSecretandSeed
0331 # undef XXH3_128bits_update
0332 # undef XXH3_128bits_digest
0333 # undef XXH128_isEqual
0334 # undef XXH128_cmp
0335 # undef XXH128_canonicalFromHash
0336 # undef XXH128_hashFromCanonical
0337
0338 # undef XXH_NAMESPACE
0339
0340
0341 # define XXH_NAMESPACE XXH_INLINE_
0342
0343
0344
0345
0346
0347
0348
0349 # define XXH_IPREF(Id) XXH_NAMESPACE ## Id
0350 # define XXH_OK XXH_IPREF(XXH_OK)
0351 # define XXH_ERROR XXH_IPREF(XXH_ERROR)
0352 # define XXH_errorcode XXH_IPREF(XXH_errorcode)
0353 # define XXH32_canonical_t XXH_IPREF(XXH32_canonical_t)
0354 # define XXH64_canonical_t XXH_IPREF(XXH64_canonical_t)
0355 # define XXH128_canonical_t XXH_IPREF(XXH128_canonical_t)
0356 # define XXH32_state_s XXH_IPREF(XXH32_state_s)
0357 # define XXH32_state_t XXH_IPREF(XXH32_state_t)
0358 # define XXH64_state_s XXH_IPREF(XXH64_state_s)
0359 # define XXH64_state_t XXH_IPREF(XXH64_state_t)
0360 # define XXH3_state_s XXH_IPREF(XXH3_state_s)
0361 # define XXH3_state_t XXH_IPREF(XXH3_state_t)
0362 # define XXH128_hash_t XXH_IPREF(XXH128_hash_t)
0363
0364 # undef XXHASH_H_5627135585666179
0365 # undef XXHASH_H_STATIC_13879238742
0366 #endif
0367
0368
0369
0370
0371 #ifndef XXHASH_H_5627135585666179
0372 #define XXHASH_H_5627135585666179 1
0373
0374
0375 #if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API)
0376 # if defined(WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT))
0377 # ifdef XXH_EXPORT
0378 # define XXH_PUBLIC_API __declspec(dllexport)
0379 # elif XXH_IMPORT
0380 # define XXH_PUBLIC_API __declspec(dllimport)
0381 # endif
0382 # else
0383 # define XXH_PUBLIC_API
0384 # endif
0385 #endif
0386
0387 #ifdef XXH_NAMESPACE
0388 # define XXH_CAT(A,B) A##B
0389 # define XXH_NAME2(A,B) XXH_CAT(A,B)
0390 # define XXH_versionNumber XXH_NAME2(XXH_NAMESPACE, XXH_versionNumber)
0391
0392 # define XXH32 XXH_NAME2(XXH_NAMESPACE, XXH32)
0393 # define XXH32_createState XXH_NAME2(XXH_NAMESPACE, XXH32_createState)
0394 # define XXH32_freeState XXH_NAME2(XXH_NAMESPACE, XXH32_freeState)
0395 # define XXH32_reset XXH_NAME2(XXH_NAMESPACE, XXH32_reset)
0396 # define XXH32_update XXH_NAME2(XXH_NAMESPACE, XXH32_update)
0397 # define XXH32_digest XXH_NAME2(XXH_NAMESPACE, XXH32_digest)
0398 # define XXH32_copyState XXH_NAME2(XXH_NAMESPACE, XXH32_copyState)
0399 # define XXH32_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH32_canonicalFromHash)
0400 # define XXH32_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH32_hashFromCanonical)
0401
0402 # define XXH64 XXH_NAME2(XXH_NAMESPACE, XXH64)
0403 # define XXH64_createState XXH_NAME2(XXH_NAMESPACE, XXH64_createState)
0404 # define XXH64_freeState XXH_NAME2(XXH_NAMESPACE, XXH64_freeState)
0405 # define XXH64_reset XXH_NAME2(XXH_NAMESPACE, XXH64_reset)
0406 # define XXH64_update XXH_NAME2(XXH_NAMESPACE, XXH64_update)
0407 # define XXH64_digest XXH_NAME2(XXH_NAMESPACE, XXH64_digest)
0408 # define XXH64_copyState XXH_NAME2(XXH_NAMESPACE, XXH64_copyState)
0409 # define XXH64_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH64_canonicalFromHash)
0410 # define XXH64_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH64_hashFromCanonical)
0411
0412 # define XXH3_64bits XXH_NAME2(XXH_NAMESPACE, XXH3_64bits)
0413 # define XXH3_64bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecret)
0414 # define XXH3_64bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSeed)
0415 # define XXH3_64bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecretandSeed)
0416 # define XXH3_createState XXH_NAME2(XXH_NAMESPACE, XXH3_createState)
0417 # define XXH3_freeState XXH_NAME2(XXH_NAMESPACE, XXH3_freeState)
0418 # define XXH3_copyState XXH_NAME2(XXH_NAMESPACE, XXH3_copyState)
0419 # define XXH3_64bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset)
0420 # define XXH3_64bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSeed)
0421 # define XXH3_64bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecret)
0422 # define XXH3_64bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecretandSeed)
0423 # define XXH3_64bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_update)
0424 # define XXH3_64bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_digest)
0425 # define XXH3_generateSecret XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret)
0426 # define XXH3_generateSecret_fromSeed XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret_fromSeed)
0427
0428 # define XXH128 XXH_NAME2(XXH_NAMESPACE, XXH128)
0429 # define XXH3_128bits XXH_NAME2(XXH_NAMESPACE, XXH3_128bits)
0430 # define XXH3_128bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSeed)
0431 # define XXH3_128bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecret)
0432 # define XXH3_128bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecretandSeed)
0433 # define XXH3_128bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset)
0434 # define XXH3_128bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSeed)
0435 # define XXH3_128bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecret)
0436 # define XXH3_128bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecretandSeed)
0437 # define XXH3_128bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_update)
0438 # define XXH3_128bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_digest)
0439 # define XXH128_isEqual XXH_NAME2(XXH_NAMESPACE, XXH128_isEqual)
0440 # define XXH128_cmp XXH_NAME2(XXH_NAMESPACE, XXH128_cmp)
0441 # define XXH128_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH128_canonicalFromHash)
0442 # define XXH128_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH128_hashFromCanonical)
0443 #endif
0444
0445
0446
0447
0448
0449
0450
0451 #if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API)
0452 # if defined(WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT))
0453 # ifdef XXH_EXPORT
0454 # define XXH_PUBLIC_API __declspec(dllexport)
0455 # elif XXH_IMPORT
0456 # define XXH_PUBLIC_API __declspec(dllimport)
0457 # endif
0458 # else
0459 # define XXH_PUBLIC_API
0460 # endif
0461 #endif
0462
0463 #if defined (__GNUC__)
0464 # define XXH_CONSTF __attribute__((const))
0465 # define XXH_PUREF __attribute__((pure))
0466 # define XXH_MALLOCF __attribute__((malloc))
0467 #else
0468 # define XXH_CONSTF
0469 # define XXH_PUREF
0470 # define XXH_MALLOCF
0471 #endif
0472
0473
0474
0475
0476 #define XXH_VERSION_MAJOR 0
0477 #define XXH_VERSION_MINOR 8
0478 #define XXH_VERSION_RELEASE 2
0479
0480 #define XXH_VERSION_NUMBER (XXH_VERSION_MAJOR *100*100 + XXH_VERSION_MINOR *100 + XXH_VERSION_RELEASE)
0481
0482
0483
0484
0485
0486
0487
0488
0489
0490 XXH_PUBLIC_API XXH_CONSTF unsigned XXH_versionNumber (void);
0491
0492
0493
0494
0495
0496 #include <stddef.h> /* size_t */
0497
0498
0499
0500 typedef enum {
0501 XXH_OK = 0,
0502 XXH_ERROR
0503 } XXH_errorcode;
0504
0505
0506
0507
0508
0509 #if defined(XXH_DOXYGEN)
0510
0511
0512
0513
0514
0515 typedef uint32_t XXH32_hash_t;
0516
0517 #elif !defined (__VMS) \
0518 && (defined (__cplusplus) \
0519 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
0520 # include <stdint.h>
0521 typedef uint32_t XXH32_hash_t;
0522
0523 #else
0524 # include <limits.h>
0525 # if UINT_MAX == 0xFFFFFFFFUL
0526 typedef unsigned int XXH32_hash_t;
0527 # elif ULONG_MAX == 0xFFFFFFFFUL
0528 typedef unsigned long XXH32_hash_t;
0529 # else
0530 # error "unsupported platform: need a 32-bit type"
0531 # endif
0532 #endif
0533
0534
0535
0536
0537
0538
0539
0540
0541
0542
0543
0544
0545
0546
0547
0548
0549
0550
0551
0552
0553
0554
0555
0556
0557
0558
0559
0560
0561
0562
0563
0564
0565
0566
0567
0568
0569
0570
0571
0572
0573
0574
0575 XXH_PUBLIC_API XXH_PUREF XXH32_hash_t XXH32 (const void* input, size_t length, XXH32_hash_t seed);
0576
0577 #ifndef XXH_NO_STREAM
0578
0579
0580
0581
0582
0583
0584
0585
0586
0587
0588
0589
0590
0591
0592
0593
0594
0595
0596
0597
0598
0599
0600
0601
0602
0603
0604
0605
0606
0607
0608
0609 typedef struct XXH32_state_s XXH32_state_t;
0610
0611
0612
0613
0614
0615
0616
0617 XXH_PUBLIC_API XXH_MALLOCF XXH32_state_t* XXH32_createState(void);
0618
0619
0620
0621
0622
0623
0624
0625 XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t* statePtr);
0626
0627
0628
0629
0630
0631
0632
0633
0634 XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t* dst_state, const XXH32_state_t* src_state);
0635
0636
0637
0638
0639
0640
0641
0642
0643
0644
0645
0646
0647
0648
0649 XXH_PUBLIC_API XXH_errorcode XXH32_reset (XXH32_state_t* statePtr, XXH32_hash_t seed);
0650
0651
0652
0653
0654
0655
0656
0657
0658
0659
0660
0661
0662
0663
0664
0665
0666
0667
0668
0669 XXH_PUBLIC_API XXH_errorcode XXH32_update (XXH32_state_t* statePtr, const void* input, size_t length);
0670
0671
0672
0673
0674
0675
0676
0677
0678
0679
0680
0681
0682
0683
0684
0685 XXH_PUBLIC_API XXH_PUREF XXH32_hash_t XXH32_digest (const XXH32_state_t* statePtr);
0686 #endif
0687
0688
0689
0690
0691
0692
0693
0694
0695
0696
0697
0698
0699
0700
0701
0702
0703
0704
0705
0706
0707
0708
0709
0710
0711
0712 typedef struct {
0713 unsigned char digest[4];
0714 } XXH32_canonical_t;
0715
0716
0717
0718
0719
0720
0721
0722
0723
0724
0725 XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t* dst, XXH32_hash_t hash);
0726
0727
0728
0729
0730
0731
0732
0733
0734
0735
0736
0737 XXH_PUBLIC_API XXH_PUREF XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t* src);
0738
0739
0740
0741 #ifdef __has_attribute
0742 # define XXH_HAS_ATTRIBUTE(x) __has_attribute(x)
0743 #else
0744 # define XXH_HAS_ATTRIBUTE(x) 0
0745 #endif
0746
0747
0748
0749
0750
0751
0752
0753
0754 #define XXH_C23_VN 201711L
0755
0756
0757
0758
0759 #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= XXH_C23_VN) && defined(__has_c_attribute)
0760 # define XXH_HAS_C_ATTRIBUTE(x) __has_c_attribute(x)
0761 #else
0762 # define XXH_HAS_C_ATTRIBUTE(x) 0
0763 #endif
0764
0765
0766
0767 #if defined(__cplusplus) && defined(__has_cpp_attribute)
0768 # define XXH_HAS_CPP_ATTRIBUTE(x) __has_cpp_attribute(x)
0769 #else
0770 # define XXH_HAS_CPP_ATTRIBUTE(x) 0
0771 #endif
0772
0773
0774
0775
0776
0777
0778
0779
0780
0781 #if XXH_HAS_C_ATTRIBUTE(fallthrough) || XXH_HAS_CPP_ATTRIBUTE(fallthrough)
0782 # define XXH_FALLTHROUGH [[fallthrough]]
0783 #elif XXH_HAS_ATTRIBUTE(__fallthrough__)
0784 # define XXH_FALLTHROUGH __attribute__ ((__fallthrough__))
0785 #else
0786 # define XXH_FALLTHROUGH
0787 #endif
0788
0789
0790
0791
0792
0793
0794
0795
0796 #if XXH_HAS_ATTRIBUTE(noescape)
0797 # define XXH_NOESCAPE __attribute__((noescape))
0798 #else
0799 # define XXH_NOESCAPE
0800 #endif
0801
0802
0803
0804
0805
0806
0807
0808
0809
0810 #ifndef XXH_NO_LONG_LONG
0811
0812
0813
0814 #if defined(XXH_DOXYGEN)
0815
0816
0817
0818
0819
0820 typedef uint64_t XXH64_hash_t;
0821 #elif !defined (__VMS) \
0822 && (defined (__cplusplus) \
0823 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
0824 # include <stdint.h>
0825 typedef uint64_t XXH64_hash_t;
0826 #else
0827 # include <limits.h>
0828 # if defined(__LP64__) && ULONG_MAX == 0xFFFFFFFFFFFFFFFFULL
0829
0830 typedef unsigned long XXH64_hash_t;
0831 # else
0832
0833 typedef unsigned long long XXH64_hash_t;
0834 # endif
0835 #endif
0836
0837
0838
0839
0840
0841
0842
0843
0844
0845
0846
0847
0848
0849
0850
0851
0852
0853
0854
0855
0856
0857
0858
0859
0860
0861
0862
0863
0864
0865
0866
0867
0868
0869
0870
0871
0872
0873
0874 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH64(XXH_NOESCAPE const void* input, size_t length, XXH64_hash_t seed);
0875
0876
0877 #ifndef XXH_NO_STREAM
0878
0879
0880
0881
0882
0883 typedef struct XXH64_state_s XXH64_state_t;
0884
0885
0886
0887
0888
0889
0890
0891 XXH_PUBLIC_API XXH_MALLOCF XXH64_state_t* XXH64_createState(void);
0892
0893
0894
0895
0896
0897
0898
0899
0900 XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t* statePtr);
0901
0902
0903
0904
0905
0906
0907
0908
0909
0910 XXH_PUBLIC_API void XXH64_copyState(XXH_NOESCAPE XXH64_state_t* dst_state, const XXH64_state_t* src_state);
0911
0912
0913
0914
0915
0916
0917
0918
0919
0920
0921
0922
0923
0924
0925 XXH_PUBLIC_API XXH_errorcode XXH64_reset (XXH_NOESCAPE XXH64_state_t* statePtr, XXH64_hash_t seed);
0926
0927
0928
0929
0930
0931
0932
0933
0934
0935
0936
0937
0938
0939
0940
0941
0942
0943
0944
0945 XXH_PUBLIC_API XXH_errorcode XXH64_update (XXH_NOESCAPE XXH64_state_t* statePtr, XXH_NOESCAPE const void* input, size_t length);
0946
0947
0948
0949
0950
0951
0952
0953
0954
0955
0956
0957
0958
0959
0960
0961 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH64_digest (XXH_NOESCAPE const XXH64_state_t* statePtr);
0962 #endif
0963
0964
0965
0966
0967
0968 typedef struct { unsigned char digest[sizeof(XXH64_hash_t)]; } XXH64_canonical_t;
0969
0970
0971
0972
0973
0974
0975
0976
0977
0978
0979 XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH_NOESCAPE XXH64_canonical_t* dst, XXH64_hash_t hash);
0980
0981
0982
0983
0984
0985
0986
0987
0988
0989
0990
0991 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH64_hashFromCanonical(XXH_NOESCAPE const XXH64_canonical_t* src);
0992
0993 #ifndef XXH_NO_XXH3
0994
0995
0996
0997
0998
0999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH3_64bits(XXH_NOESCAPE const void* input, size_t length);
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH3_64bits_withSeed(XXH_NOESCAPE const void* input, size_t length, XXH64_hash_t seed);
1085
1086
1087
1088
1089
1090
1091
1092
1093 #define XXH3_SECRET_SIZE_MIN 136
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH3_64bits_withSecret(XXH_NOESCAPE const void* data, size_t len, XXH_NOESCAPE const void* secret, size_t secretSize);
1114
1115
1116
1117 #ifndef XXH_NO_STREAM
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130 typedef struct XXH3_state_s XXH3_state_t;
1131 XXH_PUBLIC_API XXH_MALLOCF XXH3_state_t* XXH3_createState(void);
1132 XXH_PUBLIC_API XXH_errorcode XXH3_freeState(XXH3_state_t* statePtr);
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142 XXH_PUBLIC_API void XXH3_copyState(XXH_NOESCAPE XXH3_state_t* dst_state, XXH_NOESCAPE const XXH3_state_t* src_state);
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset(XXH_NOESCAPE XXH3_state_t* statePtr);
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset_withSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH64_hash_t seed);
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset_withSecret(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize);
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_update (XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* input, size_t length);
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH3_64bits_digest (XXH_NOESCAPE const XXH3_state_t* statePtr);
1223 #endif
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239 typedef struct {
1240 XXH64_hash_t low64;
1241 XXH64_hash_t high64;
1242 } XXH128_hash_t;
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH3_128bits(XXH_NOESCAPE const void* data, size_t len);
1262
1263 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH3_128bits_withSeed(XXH_NOESCAPE const void* data, size_t len, XXH64_hash_t seed);
1264
1265 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH3_128bits_withSecret(XXH_NOESCAPE const void* data, size_t len, XXH_NOESCAPE const void* secret, size_t secretSize);
1266
1267
1268 #ifndef XXH_NO_STREAM
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset(XXH_NOESCAPE XXH3_state_t* statePtr);
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset_withSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH64_hash_t seed);
1313
1314 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset_withSecret(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize);
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_update (XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* input, size_t length);
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH3_128bits_digest (XXH_NOESCAPE const XXH3_state_t* statePtr);
1351 #endif
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361 XXH_PUBLIC_API XXH_PUREF int XXH128_isEqual(XXH128_hash_t h1, XXH128_hash_t h2);
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371 XXH_PUBLIC_API XXH_PUREF int XXH128_cmp(XXH_NOESCAPE const void* h128_1, XXH_NOESCAPE const void* h128_2);
1372
1373
1374
1375 typedef struct { unsigned char digest[sizeof(XXH128_hash_t)]; } XXH128_canonical_t;
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387 XXH_PUBLIC_API void XXH128_canonicalFromHash(XXH_NOESCAPE XXH128_canonical_t* dst, XXH128_hash_t hash);
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH128_hashFromCanonical(XXH_NOESCAPE const XXH128_canonical_t* src);
1400
1401
1402 #endif
1403 #endif
1404
1405
1406
1407
1408 #endif
1409
1410
1411
1412 #if defined(XXH_STATIC_LINKING_ONLY) && !defined(XXHASH_H_STATIC_13879238742)
1413 #define XXHASH_H_STATIC_13879238742
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440 struct XXH32_state_s {
1441 XXH32_hash_t total_len_32;
1442 XXH32_hash_t large_len;
1443 XXH32_hash_t v[4];
1444 XXH32_hash_t mem32[4];
1445 XXH32_hash_t memsize;
1446 XXH32_hash_t reserved;
1447 };
1448
1449
1450 #ifndef XXH_NO_LONG_LONG
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464 struct XXH64_state_s {
1465 XXH64_hash_t total_len;
1466 XXH64_hash_t v[4];
1467 XXH64_hash_t mem64[4];
1468 XXH32_hash_t memsize;
1469 XXH32_hash_t reserved32;
1470 XXH64_hash_t reserved64;
1471 };
1472
1473 #ifndef XXH_NO_XXH3
1474
1475 #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
1476 # include <stdalign.h>
1477 # define XXH_ALIGN(n) alignas(n)
1478 #elif defined(__cplusplus) && (__cplusplus >= 201103L)
1479
1480 # define XXH_ALIGN(n) alignas(n)
1481 #elif defined(__GNUC__)
1482 # define XXH_ALIGN(n) __attribute__ ((aligned(n)))
1483 #elif defined(_MSC_VER)
1484 # define XXH_ALIGN(n) __declspec(align(n))
1485 #else
1486 # define XXH_ALIGN(n)
1487 #endif
1488
1489
1490 #if !(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) \
1491 && ! (defined(__cplusplus) && (__cplusplus >= 201103L)) \
1492 && defined(__GNUC__)
1493 # define XXH_ALIGN_MEMBER(align, type) type XXH_ALIGN(align)
1494 #else
1495 # define XXH_ALIGN_MEMBER(align, type) XXH_ALIGN(align) type
1496 #endif
1497
1498
1499
1500
1501
1502
1503
1504
1505 #define XXH3_INTERNALBUFFER_SIZE 256
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515 #define XXH3_SECRET_DEFAULT_SIZE 192
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539 struct XXH3_state_s {
1540 XXH_ALIGN_MEMBER(64, XXH64_hash_t acc[8]);
1541
1542 XXH_ALIGN_MEMBER(64, unsigned char customSecret[XXH3_SECRET_DEFAULT_SIZE]);
1543
1544 XXH_ALIGN_MEMBER(64, unsigned char buffer[XXH3_INTERNALBUFFER_SIZE]);
1545
1546 XXH32_hash_t bufferedSize;
1547
1548 XXH32_hash_t useSeed;
1549
1550 size_t nbStripesSoFar;
1551
1552 XXH64_hash_t totalLen;
1553
1554 size_t nbStripesPerBlock;
1555
1556 size_t secretLimit;
1557
1558 XXH64_hash_t seed;
1559
1560 XXH64_hash_t reserved64;
1561
1562 const unsigned char* extSecret;
1563
1564
1565
1566 };
1567
1568 #undef XXH_ALIGN_MEMBER
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581 #define XXH3_INITSTATE(XXH3_state_ptr) \
1582 do { \
1583 XXH3_state_t* tmp_xxh3_state_ptr = (XXH3_state_ptr); \
1584 tmp_xxh3_state_ptr->seed = 0; \
1585 tmp_xxh3_state_ptr->extSecret = NULL; \
1586 } while(0)
1587
1588
1589
1590
1591
1592 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH128(XXH_NOESCAPE const void* data, size_t len, XXH64_hash_t seed);
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649 XXH_PUBLIC_API XXH_errorcode XXH3_generateSecret(XXH_NOESCAPE void* secretBuffer, size_t secretSize, XXH_NOESCAPE const void* customSeed, size_t customSeedSize);
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688 XXH_PUBLIC_API void XXH3_generateSecret_fromSeed(XXH_NOESCAPE void* secretBuffer, XXH64_hash_t seed);
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t
1717 XXH3_64bits_withSecretandSeed(XXH_NOESCAPE const void* data, size_t len,
1718 XXH_NOESCAPE const void* secret, size_t secretSize,
1719 XXH64_hash_t seed);
1720
1721 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t
1722 XXH3_128bits_withSecretandSeed(XXH_NOESCAPE const void* input, size_t length,
1723 XXH_NOESCAPE const void* secret, size_t secretSize,
1724 XXH64_hash_t seed64);
1725 #ifndef XXH_NO_STREAM
1726
1727 XXH_PUBLIC_API XXH_errorcode
1728 XXH3_64bits_reset_withSecretandSeed(XXH_NOESCAPE XXH3_state_t* statePtr,
1729 XXH_NOESCAPE const void* secret, size_t secretSize,
1730 XXH64_hash_t seed64);
1731
1732 XXH_PUBLIC_API XXH_errorcode
1733 XXH3_128bits_reset_withSecretandSeed(XXH_NOESCAPE XXH3_state_t* statePtr,
1734 XXH_NOESCAPE const void* secret, size_t secretSize,
1735 XXH64_hash_t seed64);
1736 #endif
1737
1738 #endif
1739 #endif
1740 #if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)
1741 # define XXH_IMPLEMENTATION
1742 #endif
1743
1744 #endif
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774 #if ( defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API) \
1775 || defined(XXH_IMPLEMENTATION) ) && !defined(XXH_IMPLEM_13a8737387)
1776 # define XXH_IMPLEM_13a8737387
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788 #ifdef XXH_DOXYGEN
1789
1790
1791
1792
1793
1794 # define XXH_NO_LONG_LONG
1795 # undef XXH_NO_LONG_LONG
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846 # define XXH_FORCE_MEMORY_ACCESS 0
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874 # define XXH_SIZE_OPT 0
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904 # define XXH_FORCE_ALIGN_CHECK 0
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926 # define XXH_NO_INLINE_HINTS 0
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943 # define XXH3_INLINE_SECRET 0
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955 # define XXH32_ENDJMP 0
1956
1957
1958
1959
1960
1961
1962
1963
1964 # define XXH_OLD_NAMES
1965 # undef XXH_OLD_NAMES
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975 # define XXH_NO_STREAM
1976 # undef XXH_NO_STREAM
1977 #endif
1978
1979
1980
1981
1982 #ifndef XXH_FORCE_MEMORY_ACCESS
1983
1984
1985
1986 # if defined(__GNUC__) && !(defined(__ARM_ARCH) && __ARM_ARCH < 7 && defined(__ARM_FEATURE_UNALIGNED))
1987 # define XXH_FORCE_MEMORY_ACCESS 1
1988 # endif
1989 #endif
1990
1991 #ifndef XXH_SIZE_OPT
1992
1993 # if (defined(__GNUC__) || defined(__clang__)) && defined(__OPTIMIZE_SIZE__)
1994 # define XXH_SIZE_OPT 1
1995 # else
1996 # define XXH_SIZE_OPT 0
1997 # endif
1998 #endif
1999
2000 #ifndef XXH_FORCE_ALIGN_CHECK
2001
2002 # if XXH_SIZE_OPT >= 1 || \
2003 defined(__i386) || defined(__x86_64__) || defined(__aarch64__) || defined(__ARM_FEATURE_UNALIGNED) \
2004 || defined(_M_IX86) || defined(_M_X64) || defined(_M_ARM64) || defined(_M_ARM)
2005 # define XXH_FORCE_ALIGN_CHECK 0
2006 # else
2007 # define XXH_FORCE_ALIGN_CHECK 1
2008 # endif
2009 #endif
2010
2011 #ifndef XXH_NO_INLINE_HINTS
2012 # if XXH_SIZE_OPT >= 1 || defined(__NO_INLINE__)
2013 # define XXH_NO_INLINE_HINTS 1
2014 # else
2015 # define XXH_NO_INLINE_HINTS 0
2016 # endif
2017 #endif
2018
2019 #ifndef XXH3_INLINE_SECRET
2020 # if (defined(__GNUC__) && !defined(__clang__) && __GNUC__ >= 12) \
2021 || !defined(XXH_INLINE_ALL)
2022 # define XXH3_INLINE_SECRET 0
2023 # else
2024 # define XXH3_INLINE_SECRET 1
2025 # endif
2026 #endif
2027
2028 #ifndef XXH32_ENDJMP
2029
2030 # define XXH32_ENDJMP 0
2031 #endif
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042 #if defined(XXH_NO_STREAM)
2043
2044 #elif defined(XXH_NO_STDLIB)
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055 static XXH_CONSTF void* XXH_malloc(size_t s) { (void)s; return NULL; }
2056 static void XXH_free(void* p) { (void)p; }
2057
2058 #else
2059
2060
2061
2062
2063
2064 #include <stdlib.h>
2065
2066
2067
2068
2069
2070 static XXH_MALLOCF void* XXH_malloc(size_t s) { return malloc(s); }
2071
2072
2073
2074
2075
2076 static void XXH_free(void* p) { free(p); }
2077
2078 #endif
2079
2080 #include <string.h>
2081
2082
2083
2084
2085
2086 static void* XXH_memcpy(void* dest, const void* src, size_t size)
2087 {
2088 return memcpy(dest,src,size);
2089 }
2090
2091 #include <limits.h> /* ULLONG_MAX */
2092
2093
2094
2095
2096
2097 #ifdef _MSC_VER
2098 # pragma warning(disable : 4127)
2099 #endif
2100
2101 #if XXH_NO_INLINE_HINTS
2102 # if defined(__GNUC__) || defined(__clang__)
2103 # define XXH_FORCE_INLINE static __attribute__((unused))
2104 # else
2105 # define XXH_FORCE_INLINE static
2106 # endif
2107 # define XXH_NO_INLINE static
2108
2109 #elif defined(__GNUC__) || defined(__clang__)
2110 # define XXH_FORCE_INLINE static __inline__ __attribute__((always_inline, unused))
2111 # define XXH_NO_INLINE static __attribute__((noinline))
2112 #elif defined(_MSC_VER)
2113 # define XXH_FORCE_INLINE static __forceinline
2114 # define XXH_NO_INLINE static __declspec(noinline)
2115 #elif defined (__cplusplus) \
2116 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L))
2117 # define XXH_FORCE_INLINE static inline
2118 # define XXH_NO_INLINE static
2119 #else
2120 # define XXH_FORCE_INLINE static
2121 # define XXH_NO_INLINE static
2122 #endif
2123
2124 #if XXH3_INLINE_SECRET
2125 # define XXH3_WITH_SECRET_INLINE XXH_FORCE_INLINE
2126 #else
2127 # define XXH3_WITH_SECRET_INLINE XXH_NO_INLINE
2128 #endif
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142 #ifndef XXH_DEBUGLEVEL
2143 # ifdef DEBUGLEVEL
2144 # define XXH_DEBUGLEVEL DEBUGLEVEL
2145 # else
2146 # define XXH_DEBUGLEVEL 0
2147 # endif
2148 #endif
2149
2150 #if (XXH_DEBUGLEVEL>=1)
2151 # include <assert.h> /* note: can still be disabled with NDEBUG */
2152 # define XXH_ASSERT(c) assert(c)
2153 #else
2154 # if defined(__INTEL_COMPILER)
2155 # define XXH_ASSERT(c) XXH_ASSUME((unsigned char) (c))
2156 # else
2157 # define XXH_ASSERT(c) XXH_ASSUME(c)
2158 # endif
2159 #endif
2160
2161
2162 #ifndef XXH_STATIC_ASSERT
2163 # if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
2164 # define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { _Static_assert((c),m); } while(0)
2165 # elif defined(__cplusplus) && (__cplusplus >= 201103L)
2166 # define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { static_assert((c),m); } while(0)
2167 # else
2168 # define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { struct xxh_sa { char x[(c) ? 1 : -1]; }; } while(0)
2169 # endif
2170 # define XXH_STATIC_ASSERT(c) XXH_STATIC_ASSERT_WITH_MESSAGE((c),#c)
2171 #endif
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189 #if defined(__GNUC__) || defined(__clang__)
2190 # define XXH_COMPILER_GUARD(var) __asm__("" : "+r" (var))
2191 #else
2192 # define XXH_COMPILER_GUARD(var) ((void)0)
2193 #endif
2194
2195
2196
2197 #if defined(__clang__) && defined(__ARM_ARCH) && !defined(__wasm__)
2198 # define XXH_COMPILER_GUARD_CLANG_NEON(var) __asm__("" : "+w" (var))
2199 #else
2200 # define XXH_COMPILER_GUARD_CLANG_NEON(var) ((void)0)
2201 #endif
2202
2203
2204
2205
2206 #if !defined (__VMS) \
2207 && (defined (__cplusplus) \
2208 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
2209 # include <stdint.h>
2210 typedef uint8_t xxh_u8;
2211 #else
2212 typedef unsigned char xxh_u8;
2213 #endif
2214 typedef XXH32_hash_t xxh_u32;
2215
2216 #ifdef XXH_OLD_NAMES
2217 # warning "XXH_OLD_NAMES is planned to be removed starting v0.9. If the program depends on it, consider moving away from it by employing newer type names directly"
2218 # define BYTE xxh_u8
2219 # define U8 xxh_u8
2220 # define U32 xxh_u32
2221 #endif
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2276
2277
2278
2279
2280 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
2281
2282
2283
2284
2285
2286 static xxh_u32 XXH_read32(const void* memPtr) { return *(const xxh_u32*) memPtr; }
2287
2288 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
2289
2290
2291
2292
2293
2294
2295
2296
2297 #ifdef XXH_OLD_NAMES
2298 typedef union { xxh_u32 u32; } __attribute__((packed)) unalign;
2299 #endif
2300 static xxh_u32 XXH_read32(const void* ptr)
2301 {
2302 typedef __attribute__((aligned(1))) xxh_u32 xxh_unalign32;
2303 return *((const xxh_unalign32*)ptr);
2304 }
2305
2306 #else
2307
2308
2309
2310
2311
2312 static xxh_u32 XXH_read32(const void* memPtr)
2313 {
2314 xxh_u32 val;
2315 XXH_memcpy(&val, memPtr, sizeof(val));
2316 return val;
2317 }
2318
2319 #endif
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
2337
2338
2339
2340 #ifndef XXH_CPU_LITTLE_ENDIAN
2341
2342
2343
2344
2345 # if defined(_WIN32) \
2346 || defined(__LITTLE_ENDIAN__) \
2347 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
2348 # define XXH_CPU_LITTLE_ENDIAN 1
2349 # elif defined(__BIG_ENDIAN__) \
2350 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
2351 # define XXH_CPU_LITTLE_ENDIAN 0
2352 # else
2353
2354
2355
2356
2357
2358
2359 static int XXH_isLittleEndian(void)
2360 {
2361
2362
2363
2364
2365 const union { xxh_u32 u; xxh_u8 c[4]; } one = { 1 };
2366 return one.c[0];
2367 }
2368 # define XXH_CPU_LITTLE_ENDIAN XXH_isLittleEndian()
2369 # endif
2370 #endif
2371
2372
2373
2374
2375
2376
2377
2378 #define XXH_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
2379
2380 #ifdef __has_builtin
2381 # define XXH_HAS_BUILTIN(x) __has_builtin(x)
2382 #else
2383 # define XXH_HAS_BUILTIN(x) 0
2384 #endif
2385
2386
2387
2388
2389
2390
2391
2392
2393
2394
2395
2396
2397
2398
2399
2400
2401
2402
2403
2404
2405
2406
2407
2408
2409
2410
2411
2412
2413
2414
2415 #if XXH_HAS_BUILTIN(__builtin_unreachable)
2416 # define XXH_UNREACHABLE() __builtin_unreachable()
2417
2418 #elif defined(_MSC_VER)
2419 # define XXH_UNREACHABLE() __assume(0)
2420
2421 #else
2422 # define XXH_UNREACHABLE()
2423 #endif
2424
2425 #if XXH_HAS_BUILTIN(__builtin_assume)
2426 # define XXH_ASSUME(c) __builtin_assume(c)
2427 #else
2428 # define XXH_ASSUME(c) if (!(c)) { XXH_UNREACHABLE(); }
2429 #endif
2430
2431
2432
2433
2434
2435
2436
2437
2438
2439
2440
2441
2442
2443
2444 #if !defined(NO_CLANG_BUILTIN) && XXH_HAS_BUILTIN(__builtin_rotateleft32) \
2445 && XXH_HAS_BUILTIN(__builtin_rotateleft64)
2446 # define XXH_rotl32 __builtin_rotateleft32
2447 # define XXH_rotl64 __builtin_rotateleft64
2448
2449 #elif defined(_MSC_VER)
2450 # define XXH_rotl32(x,r) _rotl(x,r)
2451 # define XXH_rotl64(x,r) _rotl64(x,r)
2452 #else
2453 # define XXH_rotl32(x,r) (((x) << (r)) | ((x) >> (32 - (r))))
2454 # define XXH_rotl64(x,r) (((x) << (r)) | ((x) >> (64 - (r))))
2455 #endif
2456
2457
2458
2459
2460
2461
2462
2463
2464
2465 #if defined(_MSC_VER)
2466 # define XXH_swap32 _byteswap_ulong
2467 #elif XXH_GCC_VERSION >= 403
2468 # define XXH_swap32 __builtin_bswap32
2469 #else
2470 static xxh_u32 XXH_swap32 (xxh_u32 x)
2471 {
2472 return ((x << 24) & 0xff000000 ) |
2473 ((x << 8) & 0x00ff0000 ) |
2474 ((x >> 8) & 0x0000ff00 ) |
2475 ((x >> 24) & 0x000000ff );
2476 }
2477 #endif
2478
2479
2480
2481
2482
2483
2484
2485
2486
2487
2488 typedef enum {
2489 XXH_aligned,
2490 XXH_unaligned
2491 } XXH_alignment;
2492
2493
2494
2495
2496
2497
2498 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2499
2500 XXH_FORCE_INLINE xxh_u32 XXH_readLE32(const void* memPtr)
2501 {
2502 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
2503 return bytePtr[0]
2504 | ((xxh_u32)bytePtr[1] << 8)
2505 | ((xxh_u32)bytePtr[2] << 16)
2506 | ((xxh_u32)bytePtr[3] << 24);
2507 }
2508
2509 XXH_FORCE_INLINE xxh_u32 XXH_readBE32(const void* memPtr)
2510 {
2511 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
2512 return bytePtr[3]
2513 | ((xxh_u32)bytePtr[2] << 8)
2514 | ((xxh_u32)bytePtr[1] << 16)
2515 | ((xxh_u32)bytePtr[0] << 24);
2516 }
2517
2518 #else
2519 XXH_FORCE_INLINE xxh_u32 XXH_readLE32(const void* ptr)
2520 {
2521 return XXH_CPU_LITTLE_ENDIAN ? XXH_read32(ptr) : XXH_swap32(XXH_read32(ptr));
2522 }
2523
2524 static xxh_u32 XXH_readBE32(const void* ptr)
2525 {
2526 return XXH_CPU_LITTLE_ENDIAN ? XXH_swap32(XXH_read32(ptr)) : XXH_read32(ptr);
2527 }
2528 #endif
2529
2530 XXH_FORCE_INLINE xxh_u32
2531 XXH_readLE32_align(const void* ptr, XXH_alignment align)
2532 {
2533 if (align==XXH_unaligned) {
2534 return XXH_readLE32(ptr);
2535 } else {
2536 return XXH_CPU_LITTLE_ENDIAN ? *(const xxh_u32*)ptr : XXH_swap32(*(const xxh_u32*)ptr);
2537 }
2538 }
2539
2540
2541
2542
2543
2544
2545 XXH_PUBLIC_API unsigned XXH_versionNumber (void) { return XXH_VERSION_NUMBER; }
2546
2547
2548
2549
2550
2551
2552
2553
2554
2555
2556
2557
2558
2559
2560 #define XXH_PRIME32_1 0x9E3779B1U
2561 #define XXH_PRIME32_2 0x85EBCA77U
2562 #define XXH_PRIME32_3 0xC2B2AE3DU
2563 #define XXH_PRIME32_4 0x27D4EB2FU
2564 #define XXH_PRIME32_5 0x165667B1U
2565
2566 #ifdef XXH_OLD_NAMES
2567 # define PRIME32_1 XXH_PRIME32_1
2568 # define PRIME32_2 XXH_PRIME32_2
2569 # define PRIME32_3 XXH_PRIME32_3
2570 # define PRIME32_4 XXH_PRIME32_4
2571 # define PRIME32_5 XXH_PRIME32_5
2572 #endif
2573
2574
2575
2576
2577
2578
2579
2580
2581
2582
2583
2584
2585 static xxh_u32 XXH32_round(xxh_u32 acc, xxh_u32 input)
2586 {
2587 acc += input * XXH_PRIME32_2;
2588 acc = XXH_rotl32(acc, 13);
2589 acc *= XXH_PRIME32_1;
2590 #if (defined(__SSE4_1__) || defined(__aarch64__) || defined(__wasm_simd128__)) && !defined(XXH_ENABLE_AUTOVECTORIZE)
2591
2592
2593
2594
2595
2596
2597
2598
2599
2600
2601
2602
2603
2604
2605
2606
2607
2608
2609
2610
2611
2612
2613
2614
2615
2616
2617
2618
2619
2620
2621
2622
2623
2624
2625
2626
2627 XXH_COMPILER_GUARD(acc);
2628 #endif
2629 return acc;
2630 }
2631
2632
2633
2634
2635
2636
2637
2638
2639
2640
2641
2642 static xxh_u32 XXH32_avalanche(xxh_u32 hash)
2643 {
2644 hash ^= hash >> 15;
2645 hash *= XXH_PRIME32_2;
2646 hash ^= hash >> 13;
2647 hash *= XXH_PRIME32_3;
2648 hash ^= hash >> 16;
2649 return hash;
2650 }
2651
2652 #define XXH_get32bits(p) XXH_readLE32_align(p, align)
2653
2654
2655
2656
2657
2658
2659
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669 static XXH_PUREF xxh_u32
2670 XXH32_finalize(xxh_u32 hash, const xxh_u8* ptr, size_t len, XXH_alignment align)
2671 {
2672 #define XXH_PROCESS1 do { \
2673 hash += (*ptr++) * XXH_PRIME32_5; \
2674 hash = XXH_rotl32(hash, 11) * XXH_PRIME32_1; \
2675 } while (0)
2676
2677 #define XXH_PROCESS4 do { \
2678 hash += XXH_get32bits(ptr) * XXH_PRIME32_3; \
2679 ptr += 4; \
2680 hash = XXH_rotl32(hash, 17) * XXH_PRIME32_4; \
2681 } while (0)
2682
2683 if (ptr==NULL) XXH_ASSERT(len == 0);
2684
2685
2686 if (!XXH32_ENDJMP) {
2687 len &= 15;
2688 while (len >= 4) {
2689 XXH_PROCESS4;
2690 len -= 4;
2691 }
2692 while (len > 0) {
2693 XXH_PROCESS1;
2694 --len;
2695 }
2696 return XXH32_avalanche(hash);
2697 } else {
2698 switch(len&15) {
2699 case 12: XXH_PROCESS4;
2700 XXH_FALLTHROUGH;
2701 case 8: XXH_PROCESS4;
2702 XXH_FALLTHROUGH;
2703 case 4: XXH_PROCESS4;
2704 return XXH32_avalanche(hash);
2705
2706 case 13: XXH_PROCESS4;
2707 XXH_FALLTHROUGH;
2708 case 9: XXH_PROCESS4;
2709 XXH_FALLTHROUGH;
2710 case 5: XXH_PROCESS4;
2711 XXH_PROCESS1;
2712 return XXH32_avalanche(hash);
2713
2714 case 14: XXH_PROCESS4;
2715 XXH_FALLTHROUGH;
2716 case 10: XXH_PROCESS4;
2717 XXH_FALLTHROUGH;
2718 case 6: XXH_PROCESS4;
2719 XXH_PROCESS1;
2720 XXH_PROCESS1;
2721 return XXH32_avalanche(hash);
2722
2723 case 15: XXH_PROCESS4;
2724 XXH_FALLTHROUGH;
2725 case 11: XXH_PROCESS4;
2726 XXH_FALLTHROUGH;
2727 case 7: XXH_PROCESS4;
2728 XXH_FALLTHROUGH;
2729 case 3: XXH_PROCESS1;
2730 XXH_FALLTHROUGH;
2731 case 2: XXH_PROCESS1;
2732 XXH_FALLTHROUGH;
2733 case 1: XXH_PROCESS1;
2734 XXH_FALLTHROUGH;
2735 case 0: return XXH32_avalanche(hash);
2736 }
2737 XXH_ASSERT(0);
2738 return hash;
2739 }
2740 }
2741
2742 #ifdef XXH_OLD_NAMES
2743 # define PROCESS1 XXH_PROCESS1
2744 # define PROCESS4 XXH_PROCESS4
2745 #else
2746 # undef XXH_PROCESS1
2747 # undef XXH_PROCESS4
2748 #endif
2749
2750
2751
2752
2753
2754
2755
2756
2757
2758 XXH_FORCE_INLINE XXH_PUREF xxh_u32
2759 XXH32_endian_align(const xxh_u8* input, size_t len, xxh_u32 seed, XXH_alignment align)
2760 {
2761 xxh_u32 h32;
2762
2763 if (input==NULL) XXH_ASSERT(len == 0);
2764
2765 if (len>=16) {
2766 const xxh_u8* const bEnd = input + len;
2767 const xxh_u8* const limit = bEnd - 15;
2768 xxh_u32 v1 = seed + XXH_PRIME32_1 + XXH_PRIME32_2;
2769 xxh_u32 v2 = seed + XXH_PRIME32_2;
2770 xxh_u32 v3 = seed + 0;
2771 xxh_u32 v4 = seed - XXH_PRIME32_1;
2772
2773 do {
2774 v1 = XXH32_round(v1, XXH_get32bits(input)); input += 4;
2775 v2 = XXH32_round(v2, XXH_get32bits(input)); input += 4;
2776 v3 = XXH32_round(v3, XXH_get32bits(input)); input += 4;
2777 v4 = XXH32_round(v4, XXH_get32bits(input)); input += 4;
2778 } while (input < limit);
2779
2780 h32 = XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7)
2781 + XXH_rotl32(v3, 12) + XXH_rotl32(v4, 18);
2782 } else {
2783 h32 = seed + XXH_PRIME32_5;
2784 }
2785
2786 h32 += (xxh_u32)len;
2787
2788 return XXH32_finalize(h32, input, len&15, align);
2789 }
2790
2791
2792 XXH_PUBLIC_API XXH32_hash_t XXH32 (const void* input, size_t len, XXH32_hash_t seed)
2793 {
2794 #if !defined(XXH_NO_STREAM) && XXH_SIZE_OPT >= 2
2795
2796 XXH32_state_t state;
2797 XXH32_reset(&state, seed);
2798 XXH32_update(&state, (const xxh_u8*)input, len);
2799 return XXH32_digest(&state);
2800 #else
2801 if (XXH_FORCE_ALIGN_CHECK) {
2802 if ((((size_t)input) & 3) == 0) {
2803 return XXH32_endian_align((const xxh_u8*)input, len, seed, XXH_aligned);
2804 } }
2805
2806 return XXH32_endian_align((const xxh_u8*)input, len, seed, XXH_unaligned);
2807 #endif
2808 }
2809
2810
2811
2812
2813 #ifndef XXH_NO_STREAM
2814
2815 XXH_PUBLIC_API XXH32_state_t* XXH32_createState(void)
2816 {
2817 return (XXH32_state_t*)XXH_malloc(sizeof(XXH32_state_t));
2818 }
2819
2820 XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t* statePtr)
2821 {
2822 XXH_free(statePtr);
2823 return XXH_OK;
2824 }
2825
2826
2827 XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t* dstState, const XXH32_state_t* srcState)
2828 {
2829 XXH_memcpy(dstState, srcState, sizeof(*dstState));
2830 }
2831
2832
2833 XXH_PUBLIC_API XXH_errorcode XXH32_reset(XXH32_state_t* statePtr, XXH32_hash_t seed)
2834 {
2835 XXH_ASSERT(statePtr != NULL);
2836 memset(statePtr, 0, sizeof(*statePtr));
2837 statePtr->v[0] = seed + XXH_PRIME32_1 + XXH_PRIME32_2;
2838 statePtr->v[1] = seed + XXH_PRIME32_2;
2839 statePtr->v[2] = seed + 0;
2840 statePtr->v[3] = seed - XXH_PRIME32_1;
2841 return XXH_OK;
2842 }
2843
2844
2845
2846 XXH_PUBLIC_API XXH_errorcode
2847 XXH32_update(XXH32_state_t* state, const void* input, size_t len)
2848 {
2849 if (input==NULL) {
2850 XXH_ASSERT(len == 0);
2851 return XXH_OK;
2852 }
2853
2854 { const xxh_u8* p = (const xxh_u8*)input;
2855 const xxh_u8* const bEnd = p + len;
2856
2857 state->total_len_32 += (XXH32_hash_t)len;
2858 state->large_len |= (XXH32_hash_t)((len>=16) | (state->total_len_32>=16));
2859
2860 if (state->memsize + len < 16) {
2861 XXH_memcpy((xxh_u8*)(state->mem32) + state->memsize, input, len);
2862 state->memsize += (XXH32_hash_t)len;
2863 return XXH_OK;
2864 }
2865
2866 if (state->memsize) {
2867 XXH_memcpy((xxh_u8*)(state->mem32) + state->memsize, input, 16-state->memsize);
2868 { const xxh_u32* p32 = state->mem32;
2869 state->v[0] = XXH32_round(state->v[0], XXH_readLE32(p32)); p32++;
2870 state->v[1] = XXH32_round(state->v[1], XXH_readLE32(p32)); p32++;
2871 state->v[2] = XXH32_round(state->v[2], XXH_readLE32(p32)); p32++;
2872 state->v[3] = XXH32_round(state->v[3], XXH_readLE32(p32));
2873 }
2874 p += 16-state->memsize;
2875 state->memsize = 0;
2876 }
2877
2878 if (p <= bEnd-16) {
2879 const xxh_u8* const limit = bEnd - 16;
2880
2881 do {
2882 state->v[0] = XXH32_round(state->v[0], XXH_readLE32(p)); p+=4;
2883 state->v[1] = XXH32_round(state->v[1], XXH_readLE32(p)); p+=4;
2884 state->v[2] = XXH32_round(state->v[2], XXH_readLE32(p)); p+=4;
2885 state->v[3] = XXH32_round(state->v[3], XXH_readLE32(p)); p+=4;
2886 } while (p<=limit);
2887
2888 }
2889
2890 if (p < bEnd) {
2891 XXH_memcpy(state->mem32, p, (size_t)(bEnd-p));
2892 state->memsize = (unsigned)(bEnd-p);
2893 }
2894 }
2895
2896 return XXH_OK;
2897 }
2898
2899
2900
2901 XXH_PUBLIC_API XXH32_hash_t XXH32_digest(const XXH32_state_t* state)
2902 {
2903 xxh_u32 h32;
2904
2905 if (state->large_len) {
2906 h32 = XXH_rotl32(state->v[0], 1)
2907 + XXH_rotl32(state->v[1], 7)
2908 + XXH_rotl32(state->v[2], 12)
2909 + XXH_rotl32(state->v[3], 18);
2910 } else {
2911 h32 = state->v[2] + XXH_PRIME32_5;
2912 }
2913
2914 h32 += state->total_len_32;
2915
2916 return XXH32_finalize(h32, (const xxh_u8*)state->mem32, state->memsize, XXH_aligned);
2917 }
2918 #endif
2919
2920
2921
2922
2923
2924
2925
2926
2927
2928
2929
2930
2931
2932
2933
2934
2935
2936 XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t* dst, XXH32_hash_t hash)
2937 {
2938 XXH_STATIC_ASSERT(sizeof(XXH32_canonical_t) == sizeof(XXH32_hash_t));
2939 if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap32(hash);
2940 XXH_memcpy(dst, &hash, sizeof(*dst));
2941 }
2942
2943 XXH_PUBLIC_API XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t* src)
2944 {
2945 return XXH_readBE32(src);
2946 }
2947
2948
2949 #ifndef XXH_NO_LONG_LONG
2950
2951
2952
2953
2954
2955
2956
2957
2958
2959
2960
2961 typedef XXH64_hash_t xxh_u64;
2962
2963 #ifdef XXH_OLD_NAMES
2964 # define U64 xxh_u64
2965 #endif
2966
2967 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2968
2969
2970
2971
2972 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
2973
2974
2975 static xxh_u64 XXH_read64(const void* memPtr)
2976 {
2977 return *(const xxh_u64*) memPtr;
2978 }
2979
2980 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
2981
2982
2983
2984
2985
2986
2987
2988
2989 #ifdef XXH_OLD_NAMES
2990 typedef union { xxh_u32 u32; xxh_u64 u64; } __attribute__((packed)) unalign64;
2991 #endif
2992 static xxh_u64 XXH_read64(const void* ptr)
2993 {
2994 typedef __attribute__((aligned(1))) xxh_u64 xxh_unalign64;
2995 return *((const xxh_unalign64*)ptr);
2996 }
2997
2998 #else
2999
3000
3001
3002
3003
3004 static xxh_u64 XXH_read64(const void* memPtr)
3005 {
3006 xxh_u64 val;
3007 XXH_memcpy(&val, memPtr, sizeof(val));
3008 return val;
3009 }
3010
3011 #endif
3012
3013 #if defined(_MSC_VER)
3014 # define XXH_swap64 _byteswap_uint64
3015 #elif XXH_GCC_VERSION >= 403
3016 # define XXH_swap64 __builtin_bswap64
3017 #else
3018 static xxh_u64 XXH_swap64(xxh_u64 x)
3019 {
3020 return ((x << 56) & 0xff00000000000000ULL) |
3021 ((x << 40) & 0x00ff000000000000ULL) |
3022 ((x << 24) & 0x0000ff0000000000ULL) |
3023 ((x << 8) & 0x000000ff00000000ULL) |
3024 ((x >> 8) & 0x00000000ff000000ULL) |
3025 ((x >> 24) & 0x0000000000ff0000ULL) |
3026 ((x >> 40) & 0x000000000000ff00ULL) |
3027 ((x >> 56) & 0x00000000000000ffULL);
3028 }
3029 #endif
3030
3031
3032
3033 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
3034
3035 XXH_FORCE_INLINE xxh_u64 XXH_readLE64(const void* memPtr)
3036 {
3037 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
3038 return bytePtr[0]
3039 | ((xxh_u64)bytePtr[1] << 8)
3040 | ((xxh_u64)bytePtr[2] << 16)
3041 | ((xxh_u64)bytePtr[3] << 24)
3042 | ((xxh_u64)bytePtr[4] << 32)
3043 | ((xxh_u64)bytePtr[5] << 40)
3044 | ((xxh_u64)bytePtr[6] << 48)
3045 | ((xxh_u64)bytePtr[7] << 56);
3046 }
3047
3048 XXH_FORCE_INLINE xxh_u64 XXH_readBE64(const void* memPtr)
3049 {
3050 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
3051 return bytePtr[7]
3052 | ((xxh_u64)bytePtr[6] << 8)
3053 | ((xxh_u64)bytePtr[5] << 16)
3054 | ((xxh_u64)bytePtr[4] << 24)
3055 | ((xxh_u64)bytePtr[3] << 32)
3056 | ((xxh_u64)bytePtr[2] << 40)
3057 | ((xxh_u64)bytePtr[1] << 48)
3058 | ((xxh_u64)bytePtr[0] << 56);
3059 }
3060
3061 #else
3062 XXH_FORCE_INLINE xxh_u64 XXH_readLE64(const void* ptr)
3063 {
3064 return XXH_CPU_LITTLE_ENDIAN ? XXH_read64(ptr) : XXH_swap64(XXH_read64(ptr));
3065 }
3066
3067 static xxh_u64 XXH_readBE64(const void* ptr)
3068 {
3069 return XXH_CPU_LITTLE_ENDIAN ? XXH_swap64(XXH_read64(ptr)) : XXH_read64(ptr);
3070 }
3071 #endif
3072
3073 XXH_FORCE_INLINE xxh_u64
3074 XXH_readLE64_align(const void* ptr, XXH_alignment align)
3075 {
3076 if (align==XXH_unaligned)
3077 return XXH_readLE64(ptr);
3078 else
3079 return XXH_CPU_LITTLE_ENDIAN ? *(const xxh_u64*)ptr : XXH_swap64(*(const xxh_u64*)ptr);
3080 }
3081
3082
3083
3084
3085
3086
3087
3088
3089
3090
3091
3092
3093 #define XXH_PRIME64_1 0x9E3779B185EBCA87ULL
3094 #define XXH_PRIME64_2 0xC2B2AE3D27D4EB4FULL
3095 #define XXH_PRIME64_3 0x165667B19E3779F9ULL
3096 #define XXH_PRIME64_4 0x85EBCA77C2B2AE63ULL
3097 #define XXH_PRIME64_5 0x27D4EB2F165667C5ULL
3098
3099 #ifdef XXH_OLD_NAMES
3100 # define PRIME64_1 XXH_PRIME64_1
3101 # define PRIME64_2 XXH_PRIME64_2
3102 # define PRIME64_3 XXH_PRIME64_3
3103 # define PRIME64_4 XXH_PRIME64_4
3104 # define PRIME64_5 XXH_PRIME64_5
3105 #endif
3106
3107
3108 static xxh_u64 XXH64_round(xxh_u64 acc, xxh_u64 input)
3109 {
3110 acc += input * XXH_PRIME64_2;
3111 acc = XXH_rotl64(acc, 31);
3112 acc *= XXH_PRIME64_1;
3113 return acc;
3114 }
3115
3116 static xxh_u64 XXH64_mergeRound(xxh_u64 acc, xxh_u64 val)
3117 {
3118 val = XXH64_round(0, val);
3119 acc ^= val;
3120 acc = acc * XXH_PRIME64_1 + XXH_PRIME64_4;
3121 return acc;
3122 }
3123
3124
3125 static xxh_u64 XXH64_avalanche(xxh_u64 hash)
3126 {
3127 hash ^= hash >> 33;
3128 hash *= XXH_PRIME64_2;
3129 hash ^= hash >> 29;
3130 hash *= XXH_PRIME64_3;
3131 hash ^= hash >> 32;
3132 return hash;
3133 }
3134
3135
3136 #define XXH_get64bits(p) XXH_readLE64_align(p, align)
3137
3138
3139
3140
3141
3142
3143
3144
3145
3146
3147
3148
3149
3150
3151
3152
3153 static XXH_PUREF xxh_u64
3154 XXH64_finalize(xxh_u64 hash, const xxh_u8* ptr, size_t len, XXH_alignment align)
3155 {
3156 if (ptr==NULL) XXH_ASSERT(len == 0);
3157 len &= 31;
3158 while (len >= 8) {
3159 xxh_u64 const k1 = XXH64_round(0, XXH_get64bits(ptr));
3160 ptr += 8;
3161 hash ^= k1;
3162 hash = XXH_rotl64(hash,27) * XXH_PRIME64_1 + XXH_PRIME64_4;
3163 len -= 8;
3164 }
3165 if (len >= 4) {
3166 hash ^= (xxh_u64)(XXH_get32bits(ptr)) * XXH_PRIME64_1;
3167 ptr += 4;
3168 hash = XXH_rotl64(hash, 23) * XXH_PRIME64_2 + XXH_PRIME64_3;
3169 len -= 4;
3170 }
3171 while (len > 0) {
3172 hash ^= (*ptr++) * XXH_PRIME64_5;
3173 hash = XXH_rotl64(hash, 11) * XXH_PRIME64_1;
3174 --len;
3175 }
3176 return XXH64_avalanche(hash);
3177 }
3178
3179 #ifdef XXH_OLD_NAMES
3180 # define PROCESS1_64 XXH_PROCESS1_64
3181 # define PROCESS4_64 XXH_PROCESS4_64
3182 # define PROCESS8_64 XXH_PROCESS8_64
3183 #else
3184 # undef XXH_PROCESS1_64
3185 # undef XXH_PROCESS4_64
3186 # undef XXH_PROCESS8_64
3187 #endif
3188
3189
3190
3191
3192
3193
3194
3195
3196
3197 XXH_FORCE_INLINE XXH_PUREF xxh_u64
3198 XXH64_endian_align(const xxh_u8* input, size_t len, xxh_u64 seed, XXH_alignment align)
3199 {
3200 xxh_u64 h64;
3201 if (input==NULL) XXH_ASSERT(len == 0);
3202
3203 if (len>=32) {
3204 const xxh_u8* const bEnd = input + len;
3205 const xxh_u8* const limit = bEnd - 31;
3206 xxh_u64 v1 = seed + XXH_PRIME64_1 + XXH_PRIME64_2;
3207 xxh_u64 v2 = seed + XXH_PRIME64_2;
3208 xxh_u64 v3 = seed + 0;
3209 xxh_u64 v4 = seed - XXH_PRIME64_1;
3210
3211 do {
3212 v1 = XXH64_round(v1, XXH_get64bits(input)); input+=8;
3213 v2 = XXH64_round(v2, XXH_get64bits(input)); input+=8;
3214 v3 = XXH64_round(v3, XXH_get64bits(input)); input+=8;
3215 v4 = XXH64_round(v4, XXH_get64bits(input)); input+=8;
3216 } while (input<limit);
3217
3218 h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
3219 h64 = XXH64_mergeRound(h64, v1);
3220 h64 = XXH64_mergeRound(h64, v2);
3221 h64 = XXH64_mergeRound(h64, v3);
3222 h64 = XXH64_mergeRound(h64, v4);
3223
3224 } else {
3225 h64 = seed + XXH_PRIME64_5;
3226 }
3227
3228 h64 += (xxh_u64) len;
3229
3230 return XXH64_finalize(h64, input, len, align);
3231 }
3232
3233
3234
3235 XXH_PUBLIC_API XXH64_hash_t XXH64 (XXH_NOESCAPE const void* input, size_t len, XXH64_hash_t seed)
3236 {
3237 #if !defined(XXH_NO_STREAM) && XXH_SIZE_OPT >= 2
3238
3239 XXH64_state_t state;
3240 XXH64_reset(&state, seed);
3241 XXH64_update(&state, (const xxh_u8*)input, len);
3242 return XXH64_digest(&state);
3243 #else
3244 if (XXH_FORCE_ALIGN_CHECK) {
3245 if ((((size_t)input) & 7)==0) {
3246 return XXH64_endian_align((const xxh_u8*)input, len, seed, XXH_aligned);
3247 } }
3248
3249 return XXH64_endian_align((const xxh_u8*)input, len, seed, XXH_unaligned);
3250
3251 #endif
3252 }
3253
3254
3255 #ifndef XXH_NO_STREAM
3256
3257 XXH_PUBLIC_API XXH64_state_t* XXH64_createState(void)
3258 {
3259 return (XXH64_state_t*)XXH_malloc(sizeof(XXH64_state_t));
3260 }
3261
3262 XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t* statePtr)
3263 {
3264 XXH_free(statePtr);
3265 return XXH_OK;
3266 }
3267
3268
3269 XXH_PUBLIC_API void XXH64_copyState(XXH_NOESCAPE XXH64_state_t* dstState, const XXH64_state_t* srcState)
3270 {
3271 XXH_memcpy(dstState, srcState, sizeof(*dstState));
3272 }
3273
3274
3275 XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH_NOESCAPE XXH64_state_t* statePtr, XXH64_hash_t seed)
3276 {
3277 XXH_ASSERT(statePtr != NULL);
3278 memset(statePtr, 0, sizeof(*statePtr));
3279 statePtr->v[0] = seed + XXH_PRIME64_1 + XXH_PRIME64_2;
3280 statePtr->v[1] = seed + XXH_PRIME64_2;
3281 statePtr->v[2] = seed + 0;
3282 statePtr->v[3] = seed - XXH_PRIME64_1;
3283 return XXH_OK;
3284 }
3285
3286
3287 XXH_PUBLIC_API XXH_errorcode
3288 XXH64_update (XXH_NOESCAPE XXH64_state_t* state, XXH_NOESCAPE const void* input, size_t len)
3289 {
3290 if (input==NULL) {
3291 XXH_ASSERT(len == 0);
3292 return XXH_OK;
3293 }
3294
3295 { const xxh_u8* p = (const xxh_u8*)input;
3296 const xxh_u8* const bEnd = p + len;
3297
3298 state->total_len += len;
3299
3300 if (state->memsize + len < 32) {
3301 XXH_memcpy(((xxh_u8*)state->mem64) + state->memsize, input, len);
3302 state->memsize += (xxh_u32)len;
3303 return XXH_OK;
3304 }
3305
3306 if (state->memsize) {
3307 XXH_memcpy(((xxh_u8*)state->mem64) + state->memsize, input, 32-state->memsize);
3308 state->v[0] = XXH64_round(state->v[0], XXH_readLE64(state->mem64+0));
3309 state->v[1] = XXH64_round(state->v[1], XXH_readLE64(state->mem64+1));
3310 state->v[2] = XXH64_round(state->v[2], XXH_readLE64(state->mem64+2));
3311 state->v[3] = XXH64_round(state->v[3], XXH_readLE64(state->mem64+3));
3312 p += 32 - state->memsize;
3313 state->memsize = 0;
3314 }
3315
3316 if (p+32 <= bEnd) {
3317 const xxh_u8* const limit = bEnd - 32;
3318
3319 do {
3320 state->v[0] = XXH64_round(state->v[0], XXH_readLE64(p)); p+=8;
3321 state->v[1] = XXH64_round(state->v[1], XXH_readLE64(p)); p+=8;
3322 state->v[2] = XXH64_round(state->v[2], XXH_readLE64(p)); p+=8;
3323 state->v[3] = XXH64_round(state->v[3], XXH_readLE64(p)); p+=8;
3324 } while (p<=limit);
3325
3326 }
3327
3328 if (p < bEnd) {
3329 XXH_memcpy(state->mem64, p, (size_t)(bEnd-p));
3330 state->memsize = (unsigned)(bEnd-p);
3331 }
3332 }
3333
3334 return XXH_OK;
3335 }
3336
3337
3338
3339 XXH_PUBLIC_API XXH64_hash_t XXH64_digest(XXH_NOESCAPE const XXH64_state_t* state)
3340 {
3341 xxh_u64 h64;
3342
3343 if (state->total_len >= 32) {
3344 h64 = XXH_rotl64(state->v[0], 1) + XXH_rotl64(state->v[1], 7) + XXH_rotl64(state->v[2], 12) + XXH_rotl64(state->v[3], 18);
3345 h64 = XXH64_mergeRound(h64, state->v[0]);
3346 h64 = XXH64_mergeRound(h64, state->v[1]);
3347 h64 = XXH64_mergeRound(h64, state->v[2]);
3348 h64 = XXH64_mergeRound(h64, state->v[3]);
3349 } else {
3350 h64 = state->v[2] + XXH_PRIME64_5;
3351 }
3352
3353 h64 += (xxh_u64) state->total_len;
3354
3355 return XXH64_finalize(h64, (const xxh_u8*)state->mem64, (size_t)state->total_len, XXH_aligned);
3356 }
3357 #endif
3358
3359
3360
3361
3362 XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH_NOESCAPE XXH64_canonical_t* dst, XXH64_hash_t hash)
3363 {
3364 XXH_STATIC_ASSERT(sizeof(XXH64_canonical_t) == sizeof(XXH64_hash_t));
3365 if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap64(hash);
3366 XXH_memcpy(dst, &hash, sizeof(*dst));
3367 }
3368
3369
3370 XXH_PUBLIC_API XXH64_hash_t XXH64_hashFromCanonical(XXH_NOESCAPE const XXH64_canonical_t* src)
3371 {
3372 return XXH_readBE64(src);
3373 }
3374
3375 #ifndef XXH_NO_XXH3
3376
3377
3378
3379
3380
3381
3382
3383
3384
3385
3386
3387
3388
3389
3390 #if ((defined(sun) || defined(__sun)) && __cplusplus)
3391 # define XXH_RESTRICT
3392 #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
3393 # define XXH_RESTRICT restrict
3394 #elif (defined (__GNUC__) && ((__GNUC__ > 3) || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))) \
3395 || (defined (__clang__)) \
3396 || (defined (_MSC_VER) && (_MSC_VER >= 1400)) \
3397 || (defined (__INTEL_COMPILER) && (__INTEL_COMPILER >= 1300))
3398
3399
3400
3401
3402 # define XXH_RESTRICT __restrict
3403 #else
3404 # define XXH_RESTRICT
3405 #endif
3406
3407 #if (defined(__GNUC__) && (__GNUC__ >= 3)) \
3408 || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) \
3409 || defined(__clang__)
3410 # define XXH_likely(x) __builtin_expect(x, 1)
3411 # define XXH_unlikely(x) __builtin_expect(x, 0)
3412 #else
3413 # define XXH_likely(x) (x)
3414 # define XXH_unlikely(x) (x)
3415 #endif
3416
3417 #ifndef XXH_HAS_INCLUDE
3418 # ifdef __has_include
3419 # define XXH_HAS_INCLUDE(x) __has_include(x)
3420 # else
3421 # define XXH_HAS_INCLUDE(x) 0
3422 # endif
3423 #endif
3424
3425 #if defined(__GNUC__) || defined(__clang__)
3426 # if defined(__ARM_FEATURE_SVE)
3427 # include <arm_sve.h>
3428 # endif
3429 # if defined(__ARM_NEON__) || defined(__ARM_NEON) \
3430 || (defined(_M_ARM) && _M_ARM >= 7) \
3431 || defined(_M_ARM64) || defined(_M_ARM64EC) \
3432 || (defined(__wasm_simd128__) && XXH_HAS_INCLUDE(<arm_neon.h>))
3433 # define inline __inline__
3434 # include <arm_neon.h>
3435 # undef inline
3436 # elif defined(__AVX2__)
3437 # include <immintrin.h>
3438 # elif defined(__SSE2__)
3439 # include <emmintrin.h>
3440 # endif
3441 #endif
3442
3443 #if defined(_MSC_VER)
3444 # include <intrin.h>
3445 #endif
3446
3447
3448
3449
3450
3451
3452
3453
3454
3455
3456
3457
3458
3459
3460
3461
3462
3463
3464
3465
3466
3467
3468
3469
3470
3471
3472
3473
3474
3475
3476
3477
3478
3479
3480
3481
3482
3483
3484
3485
3486
3487
3488
3489
3490
3491
3492
3493
3494
3495
3496
3497
3498
3499
3500
3501
3502
3503
3504
3505
3506
3507
3508
3509
3510
3511
3512
3513
3514
3515
3516 #if defined(__thumb__) && !defined(__thumb2__) && defined(__ARM_ARCH_ISA_ARM)
3517 # warning "XXH3 is highly inefficient without ARM or Thumb-2."
3518 #endif
3519
3520
3521
3522
3523
3524 #ifdef XXH_DOXYGEN
3525
3526
3527
3528
3529
3530
3531
3532
3533
3534
3535 # define XXH_VECTOR XXH_SCALAR
3536
3537
3538
3539
3540
3541
3542
3543
3544
3545 enum XXH_VECTOR_TYPE {
3546 XXH_SCALAR = 0,
3547 XXH_SSE2 = 1,
3548
3549
3550
3551
3552
3553 XXH_AVX2 = 2,
3554 XXH_AVX512 = 3,
3555 XXH_NEON = 4,
3556
3557
3558
3559
3560 XXH_VSX = 5,
3561 XXH_SVE = 6,
3562 };
3563
3564
3565
3566
3567
3568
3569
3570
3571
3572 # define XXH_ACC_ALIGN 8
3573 #endif
3574
3575
3576 #ifndef XXH_DOXYGEN
3577 # define XXH_SCALAR 0
3578 # define XXH_SSE2 1
3579 # define XXH_AVX2 2
3580 # define XXH_AVX512 3
3581 # define XXH_NEON 4
3582 # define XXH_VSX 5
3583 # define XXH_SVE 6
3584 #endif
3585
3586 #ifndef XXH_VECTOR
3587 # if defined(__ARM_FEATURE_SVE)
3588 # define XXH_VECTOR XXH_SVE
3589 # elif ( \
3590 defined(__ARM_NEON__) || defined(__ARM_NEON) \
3591 || defined(_M_ARM) || defined(_M_ARM64) || defined(_M_ARM64EC) \
3592 || (defined(__wasm_simd128__) && XXH_HAS_INCLUDE(<arm_neon.h>)) \
3593 ) && ( \
3594 defined(_WIN32) || defined(__LITTLE_ENDIAN__) \
3595 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) \
3596 )
3597 # define XXH_VECTOR XXH_NEON
3598 # elif defined(__AVX512F__)
3599 # define XXH_VECTOR XXH_AVX512
3600 # elif defined(__AVX2__)
3601 # define XXH_VECTOR XXH_AVX2
3602 # elif defined(__SSE2__) || defined(_M_AMD64) || defined(_M_X64) || (defined(_M_IX86_FP) && (_M_IX86_FP == 2))
3603 # define XXH_VECTOR XXH_SSE2
3604 # elif (defined(__PPC64__) && defined(__POWER8_VECTOR__)) \
3605 || (defined(__s390x__) && defined(__VEC__)) \
3606 && defined(__GNUC__)
3607 # define XXH_VECTOR XXH_VSX
3608 # else
3609 # define XXH_VECTOR XXH_SCALAR
3610 # endif
3611 #endif
3612
3613
3614 #if (XXH_VECTOR == XXH_SVE) && !defined(__ARM_FEATURE_SVE)
3615 # ifdef _MSC_VER
3616 # pragma warning(once : 4606)
3617 # else
3618 # warning "__ARM_FEATURE_SVE isn't supported. Use SCALAR instead."
3619 # endif
3620 # undef XXH_VECTOR
3621 # define XXH_VECTOR XXH_SCALAR
3622 #endif
3623
3624
3625
3626
3627
3628 #ifndef XXH_ACC_ALIGN
3629 # if defined(XXH_X86DISPATCH)
3630 # define XXH_ACC_ALIGN 64
3631 # elif XXH_VECTOR == XXH_SCALAR
3632 # define XXH_ACC_ALIGN 8
3633 # elif XXH_VECTOR == XXH_SSE2
3634 # define XXH_ACC_ALIGN 16
3635 # elif XXH_VECTOR == XXH_AVX2
3636 # define XXH_ACC_ALIGN 32
3637 # elif XXH_VECTOR == XXH_NEON
3638 # define XXH_ACC_ALIGN 16
3639 # elif XXH_VECTOR == XXH_VSX
3640 # define XXH_ACC_ALIGN 16
3641 # elif XXH_VECTOR == XXH_AVX512
3642 # define XXH_ACC_ALIGN 64
3643 # elif XXH_VECTOR == XXH_SVE
3644 # define XXH_ACC_ALIGN 64
3645 # endif
3646 #endif
3647
3648 #if defined(XXH_X86DISPATCH) || XXH_VECTOR == XXH_SSE2 \
3649 || XXH_VECTOR == XXH_AVX2 || XXH_VECTOR == XXH_AVX512
3650 # define XXH_SEC_ALIGN XXH_ACC_ALIGN
3651 #elif XXH_VECTOR == XXH_SVE
3652 # define XXH_SEC_ALIGN XXH_ACC_ALIGN
3653 #else
3654 # define XXH_SEC_ALIGN 8
3655 #endif
3656
3657 #if defined(__GNUC__) || defined(__clang__)
3658 # define XXH_ALIASING __attribute__((may_alias))
3659 #else
3660 # define XXH_ALIASING
3661 #endif
3662
3663
3664
3665
3666
3667
3668
3669
3670
3671
3672
3673
3674
3675
3676
3677
3678
3679
3680
3681
3682
3683
3684 #if XXH_VECTOR == XXH_AVX2 \
3685 && defined(__GNUC__) && !defined(__clang__) \
3686 && defined(__OPTIMIZE__) && XXH_SIZE_OPT <= 0
3687 # pragma GCC push_options
3688 # pragma GCC optimize("-O2")
3689 #endif
3690
3691 #if XXH_VECTOR == XXH_NEON
3692
3693
3694
3695
3696
3697
3698
3699
3700 typedef uint64x2_t xxh_aliasing_uint64x2_t XXH_ALIASING;
3701
3702
3703
3704
3705
3706
3707
3708
3709
3710
3711
3712
3713
3714
3715 #if defined(__aarch64__) && defined(__GNUC__) && !defined(__clang__)
3716 XXH_FORCE_INLINE uint64x2_t XXH_vld1q_u64(void const* ptr)
3717 {
3718 return *(xxh_aliasing_uint64x2_t const *)ptr;
3719 }
3720 #else
3721 XXH_FORCE_INLINE uint64x2_t XXH_vld1q_u64(void const* ptr)
3722 {
3723 return vreinterpretq_u64_u8(vld1q_u8((uint8_t const*)ptr));
3724 }
3725 #endif
3726
3727
3728
3729
3730
3731
3732
3733
3734
3735 #if defined(__aarch64__) && defined(__GNUC__) && !defined(__clang__) && __GNUC__ < 11
3736 XXH_FORCE_INLINE uint64x2_t
3737 XXH_vmlal_low_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
3738 {
3739
3740 __asm__("umlal %0.2d, %1.2s, %2.2s" : "+w" (acc) : "w" (lhs), "w" (rhs));
3741 return acc;
3742 }
3743 XXH_FORCE_INLINE uint64x2_t
3744 XXH_vmlal_high_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
3745 {
3746
3747 return vmlal_high_u32(acc, lhs, rhs);
3748 }
3749 #else
3750
3751 XXH_FORCE_INLINE uint64x2_t
3752 XXH_vmlal_low_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
3753 {
3754 return vmlal_u32(acc, vget_low_u32(lhs), vget_low_u32(rhs));
3755 }
3756
3757
3758 XXH_FORCE_INLINE uint64x2_t
3759 XXH_vmlal_high_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
3760 {
3761 return vmlal_u32(acc, vget_high_u32(lhs), vget_high_u32(rhs));
3762 }
3763 #endif
3764
3765
3766
3767
3768
3769
3770
3771
3772
3773
3774
3775
3776
3777
3778
3779
3780
3781
3782
3783
3784
3785
3786
3787
3788
3789
3790
3791
3792
3793
3794
3795
3796
3797
3798
3799
3800
3801
3802
3803 # ifndef XXH3_NEON_LANES
3804 # if (defined(__aarch64__) || defined(__arm64__) || defined(_M_ARM64) || defined(_M_ARM64EC)) \
3805 && !defined(__APPLE__) && XXH_SIZE_OPT <= 0
3806 # define XXH3_NEON_LANES 6
3807 # else
3808 # define XXH3_NEON_LANES XXH_ACC_NB
3809 # endif
3810 # endif
3811 #endif
3812
3813
3814
3815
3816
3817
3818
3819
3820
3821 #if XXH_VECTOR == XXH_VSX
3822
3823
3824
3825
3826
3827
3828
3829
3830 # pragma push_macro("bool")
3831 # pragma push_macro("vector")
3832 # pragma push_macro("pixel")
3833
3834 # undef bool
3835 # undef vector
3836 # undef pixel
3837
3838 # if defined(__s390x__)
3839 # include <s390intrin.h>
3840 # else
3841 # include <altivec.h>
3842 # endif
3843
3844
3845 # pragma pop_macro("pixel")
3846 # pragma pop_macro("vector")
3847 # pragma pop_macro("bool")
3848
3849 typedef __vector unsigned long long xxh_u64x2;
3850 typedef __vector unsigned char xxh_u8x16;
3851 typedef __vector unsigned xxh_u32x4;
3852
3853
3854
3855
3856 typedef xxh_u64x2 xxh_aliasing_u64x2 XXH_ALIASING;
3857
3858 # ifndef XXH_VSX_BE
3859 # if defined(__BIG_ENDIAN__) \
3860 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
3861 # define XXH_VSX_BE 1
3862 # elif defined(__VEC_ELEMENT_REG_ORDER__) && __VEC_ELEMENT_REG_ORDER__ == __ORDER_BIG_ENDIAN__
3863 # warning "-maltivec=be is not recommended. Please use native endianness."
3864 # define XXH_VSX_BE 1
3865 # else
3866 # define XXH_VSX_BE 0
3867 # endif
3868 # endif
3869
3870 # if XXH_VSX_BE
3871 # if defined(__POWER9_VECTOR__) || (defined(__clang__) && defined(__s390x__))
3872 # define XXH_vec_revb vec_revb
3873 # else
3874
3875
3876
3877 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_revb(xxh_u64x2 val)
3878 {
3879 xxh_u8x16 const vByteSwap = { 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01, 0x00,
3880 0x0F, 0x0E, 0x0D, 0x0C, 0x0B, 0x0A, 0x09, 0x08 };
3881 return vec_perm(val, val, vByteSwap);
3882 }
3883 # endif
3884 # endif
3885
3886
3887
3888
3889 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_loadu(const void *ptr)
3890 {
3891 xxh_u64x2 ret;
3892 XXH_memcpy(&ret, ptr, sizeof(xxh_u64x2));
3893 # if XXH_VSX_BE
3894 ret = XXH_vec_revb(ret);
3895 # endif
3896 return ret;
3897 }
3898
3899
3900
3901
3902
3903
3904
3905 # if defined(__s390x__)
3906
3907 # define XXH_vec_mulo vec_mulo
3908 # define XXH_vec_mule vec_mule
3909 # elif defined(__clang__) && XXH_HAS_BUILTIN(__builtin_altivec_vmuleuw) && !defined(__ibmxl__)
3910
3911
3912 # define XXH_vec_mulo __builtin_altivec_vmulouw
3913 # define XXH_vec_mule __builtin_altivec_vmuleuw
3914 # else
3915
3916
3917 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mulo(xxh_u32x4 a, xxh_u32x4 b)
3918 {
3919 xxh_u64x2 result;
3920 __asm__("vmulouw %0, %1, %2" : "=v" (result) : "v" (a), "v" (b));
3921 return result;
3922 }
3923 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mule(xxh_u32x4 a, xxh_u32x4 b)
3924 {
3925 xxh_u64x2 result;
3926 __asm__("vmuleuw %0, %1, %2" : "=v" (result) : "v" (a), "v" (b));
3927 return result;
3928 }
3929 # endif
3930 #endif
3931
3932 #if XXH_VECTOR == XXH_SVE
3933 #define ACCRND(acc, offset) \
3934 do { \
3935 svuint64_t input_vec = svld1_u64(mask, xinput + offset); \
3936 svuint64_t secret_vec = svld1_u64(mask, xsecret + offset); \
3937 svuint64_t mixed = sveor_u64_x(mask, secret_vec, input_vec); \
3938 svuint64_t swapped = svtbl_u64(input_vec, kSwap); \
3939 svuint64_t mixed_lo = svextw_u64_x(mask, mixed); \
3940 svuint64_t mixed_hi = svlsr_n_u64_x(mask, mixed, 32); \
3941 svuint64_t mul = svmad_u64_x(mask, mixed_lo, mixed_hi, swapped); \
3942 acc = svadd_u64_x(mask, acc, mul); \
3943 } while (0)
3944 #endif
3945
3946
3947
3948 #if defined(XXH_NO_PREFETCH)
3949 # define XXH_PREFETCH(ptr) (void)(ptr)
3950 #else
3951 # if XXH_SIZE_OPT >= 1
3952 # define XXH_PREFETCH(ptr) (void)(ptr)
3953 # elif defined(_MSC_VER) && (defined(_M_X64) || defined(_M_IX86))
3954 # include <mmintrin.h> /* https://msdn.microsoft.com/fr-fr/library/84szxsww(v=vs.90).aspx */
3955 # define XXH_PREFETCH(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T0)
3956 # elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) )
3957 # define XXH_PREFETCH(ptr) __builtin_prefetch((ptr), 0 , 3 )
3958 # else
3959 # define XXH_PREFETCH(ptr) (void)(ptr)
3960 # endif
3961 #endif
3962
3963
3964
3965
3966
3967
3968 #define XXH_SECRET_DEFAULT_SIZE 192
3969
3970 #if (XXH_SECRET_DEFAULT_SIZE < XXH3_SECRET_SIZE_MIN)
3971 # error "default keyset is not large enough"
3972 #endif
3973
3974
3975 XXH_ALIGN(64) static const xxh_u8 XXH3_kSecret[XXH_SECRET_DEFAULT_SIZE] = {
3976 0xb8, 0xfe, 0x6c, 0x39, 0x23, 0xa4, 0x4b, 0xbe, 0x7c, 0x01, 0x81, 0x2c, 0xf7, 0x21, 0xad, 0x1c,
3977 0xde, 0xd4, 0x6d, 0xe9, 0x83, 0x90, 0x97, 0xdb, 0x72, 0x40, 0xa4, 0xa4, 0xb7, 0xb3, 0x67, 0x1f,
3978 0xcb, 0x79, 0xe6, 0x4e, 0xcc, 0xc0, 0xe5, 0x78, 0x82, 0x5a, 0xd0, 0x7d, 0xcc, 0xff, 0x72, 0x21,
3979 0xb8, 0x08, 0x46, 0x74, 0xf7, 0x43, 0x24, 0x8e, 0xe0, 0x35, 0x90, 0xe6, 0x81, 0x3a, 0x26, 0x4c,
3980 0x3c, 0x28, 0x52, 0xbb, 0x91, 0xc3, 0x00, 0xcb, 0x88, 0xd0, 0x65, 0x8b, 0x1b, 0x53, 0x2e, 0xa3,
3981 0x71, 0x64, 0x48, 0x97, 0xa2, 0x0d, 0xf9, 0x4e, 0x38, 0x19, 0xef, 0x46, 0xa9, 0xde, 0xac, 0xd8,
3982 0xa8, 0xfa, 0x76, 0x3f, 0xe3, 0x9c, 0x34, 0x3f, 0xf9, 0xdc, 0xbb, 0xc7, 0xc7, 0x0b, 0x4f, 0x1d,
3983 0x8a, 0x51, 0xe0, 0x4b, 0xcd, 0xb4, 0x59, 0x31, 0xc8, 0x9f, 0x7e, 0xc9, 0xd9, 0x78, 0x73, 0x64,
3984 0xea, 0xc5, 0xac, 0x83, 0x34, 0xd3, 0xeb, 0xc3, 0xc5, 0x81, 0xa0, 0xff, 0xfa, 0x13, 0x63, 0xeb,
3985 0x17, 0x0d, 0xdd, 0x51, 0xb7, 0xf0, 0xda, 0x49, 0xd3, 0x16, 0x55, 0x26, 0x29, 0xd4, 0x68, 0x9e,
3986 0x2b, 0x16, 0xbe, 0x58, 0x7d, 0x47, 0xa1, 0xfc, 0x8f, 0xf8, 0xb8, 0xd1, 0x7a, 0xd0, 0x31, 0xce,
3987 0x45, 0xcb, 0x3a, 0x8f, 0x95, 0x16, 0x04, 0x28, 0xaf, 0xd7, 0xfb, 0xca, 0xbb, 0x4b, 0x40, 0x7e,
3988 };
3989
3990 static const xxh_u64 PRIME_MX1 = 0x165667919E3779F9ULL;
3991 static const xxh_u64 PRIME_MX2 = 0x9FB21C651E98DF25ULL;
3992
3993 #ifdef XXH_OLD_NAMES
3994 # define kSecret XXH3_kSecret
3995 #endif
3996
3997 #ifdef XXH_DOXYGEN
3998
3999
4000
4001
4002
4003
4004
4005
4006
4007
4008
4009
4010
4011
4012
4013
4014 XXH_FORCE_INLINE xxh_u64
4015 XXH_mult32to64(xxh_u64 x, xxh_u64 y)
4016 {
4017 return (x & 0xFFFFFFFF) * (y & 0xFFFFFFFF);
4018 }
4019 #elif defined(_MSC_VER) && defined(_M_IX86)
4020 # define XXH_mult32to64(x, y) __emulu((unsigned)(x), (unsigned)(y))
4021 #else
4022
4023
4024
4025
4026
4027
4028
4029 # define XXH_mult32to64(x, y) ((xxh_u64)(xxh_u32)(x) * (xxh_u64)(xxh_u32)(y))
4030 #endif
4031
4032
4033
4034
4035
4036
4037
4038
4039
4040
4041 static XXH128_hash_t
4042 XXH_mult64to128(xxh_u64 lhs, xxh_u64 rhs)
4043 {
4044
4045
4046
4047
4048
4049
4050
4051
4052
4053
4054
4055
4056
4057
4058
4059 #if (defined(__GNUC__) || defined(__clang__)) && !defined(__wasm__) \
4060 && defined(__SIZEOF_INT128__) \
4061 || (defined(_INTEGRAL_MAX_BITS) && _INTEGRAL_MAX_BITS >= 128)
4062
4063 __uint128_t const product = (__uint128_t)lhs * (__uint128_t)rhs;
4064 XXH128_hash_t r128;
4065 r128.low64 = (xxh_u64)(product);
4066 r128.high64 = (xxh_u64)(product >> 64);
4067 return r128;
4068
4069
4070
4071
4072
4073
4074
4075
4076 #elif (defined(_M_X64) || defined(_M_IA64)) && !defined(_M_ARM64EC)
4077
4078 #ifndef _MSC_VER
4079 # pragma intrinsic(_umul128)
4080 #endif
4081 xxh_u64 product_high;
4082 xxh_u64 const product_low = _umul128(lhs, rhs, &product_high);
4083 XXH128_hash_t r128;
4084 r128.low64 = product_low;
4085 r128.high64 = product_high;
4086 return r128;
4087
4088
4089
4090
4091
4092
4093 #elif defined(_M_ARM64) || defined(_M_ARM64EC)
4094
4095 #ifndef _MSC_VER
4096 # pragma intrinsic(__umulh)
4097 #endif
4098 XXH128_hash_t r128;
4099 r128.low64 = lhs * rhs;
4100 r128.high64 = __umulh(lhs, rhs);
4101 return r128;
4102
4103 #else
4104
4105
4106
4107
4108
4109
4110
4111
4112
4113
4114
4115
4116
4117
4118
4119
4120
4121
4122
4123
4124
4125
4126
4127
4128
4129
4130
4131
4132
4133
4134
4135
4136
4137
4138
4139
4140
4141
4142
4143
4144
4145
4146
4147
4148 xxh_u64 const lo_lo = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs & 0xFFFFFFFF);
4149 xxh_u64 const hi_lo = XXH_mult32to64(lhs >> 32, rhs & 0xFFFFFFFF);
4150 xxh_u64 const lo_hi = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs >> 32);
4151 xxh_u64 const hi_hi = XXH_mult32to64(lhs >> 32, rhs >> 32);
4152
4153
4154 xxh_u64 const cross = (lo_lo >> 32) + (hi_lo & 0xFFFFFFFF) + lo_hi;
4155 xxh_u64 const upper = (hi_lo >> 32) + (cross >> 32) + hi_hi;
4156 xxh_u64 const lower = (cross << 32) | (lo_lo & 0xFFFFFFFF);
4157
4158 XXH128_hash_t r128;
4159 r128.low64 = lower;
4160 r128.high64 = upper;
4161 return r128;
4162 #endif
4163 }
4164
4165
4166
4167
4168
4169
4170
4171
4172
4173
4174
4175 static xxh_u64
4176 XXH3_mul128_fold64(xxh_u64 lhs, xxh_u64 rhs)
4177 {
4178 XXH128_hash_t product = XXH_mult64to128(lhs, rhs);
4179 return product.low64 ^ product.high64;
4180 }
4181
4182
4183 XXH_FORCE_INLINE XXH_CONSTF xxh_u64 XXH_xorshift64(xxh_u64 v64, int shift)
4184 {
4185 XXH_ASSERT(0 <= shift && shift < 64);
4186 return v64 ^ (v64 >> shift);
4187 }
4188
4189
4190
4191
4192
4193 static XXH64_hash_t XXH3_avalanche(xxh_u64 h64)
4194 {
4195 h64 = XXH_xorshift64(h64, 37);
4196 h64 *= PRIME_MX1;
4197 h64 = XXH_xorshift64(h64, 32);
4198 return h64;
4199 }
4200
4201
4202
4203
4204
4205
4206 static XXH64_hash_t XXH3_rrmxmx(xxh_u64 h64, xxh_u64 len)
4207 {
4208
4209 h64 ^= XXH_rotl64(h64, 49) ^ XXH_rotl64(h64, 24);
4210 h64 *= PRIME_MX2;
4211 h64 ^= (h64 >> 35) + len ;
4212 h64 *= PRIME_MX2;
4213 return XXH_xorshift64(h64, 28);
4214 }
4215
4216
4217
4218
4219
4220
4221
4222
4223
4224
4225
4226
4227
4228
4229
4230
4231
4232
4233
4234
4235
4236
4237
4238
4239
4240
4241
4242
4243
4244
4245
4246
4247
4248
4249
4250 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4251 XXH3_len_1to3_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
4252 {
4253 XXH_ASSERT(input != NULL);
4254 XXH_ASSERT(1 <= len && len <= 3);
4255 XXH_ASSERT(secret != NULL);
4256
4257
4258
4259
4260
4261 { xxh_u8 const c1 = input[0];
4262 xxh_u8 const c2 = input[len >> 1];
4263 xxh_u8 const c3 = input[len - 1];
4264 xxh_u32 const combined = ((xxh_u32)c1 << 16) | ((xxh_u32)c2 << 24)
4265 | ((xxh_u32)c3 << 0) | ((xxh_u32)len << 8);
4266 xxh_u64 const bitflip = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
4267 xxh_u64 const keyed = (xxh_u64)combined ^ bitflip;
4268 return XXH64_avalanche(keyed);
4269 }
4270 }
4271
4272 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4273 XXH3_len_4to8_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
4274 {
4275 XXH_ASSERT(input != NULL);
4276 XXH_ASSERT(secret != NULL);
4277 XXH_ASSERT(4 <= len && len <= 8);
4278 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
4279 { xxh_u32 const input1 = XXH_readLE32(input);
4280 xxh_u32 const input2 = XXH_readLE32(input + len - 4);
4281 xxh_u64 const bitflip = (XXH_readLE64(secret+8) ^ XXH_readLE64(secret+16)) - seed;
4282 xxh_u64 const input64 = input2 + (((xxh_u64)input1) << 32);
4283 xxh_u64 const keyed = input64 ^ bitflip;
4284 return XXH3_rrmxmx(keyed, len);
4285 }
4286 }
4287
4288 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4289 XXH3_len_9to16_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
4290 {
4291 XXH_ASSERT(input != NULL);
4292 XXH_ASSERT(secret != NULL);
4293 XXH_ASSERT(9 <= len && len <= 16);
4294 { xxh_u64 const bitflip1 = (XXH_readLE64(secret+24) ^ XXH_readLE64(secret+32)) + seed;
4295 xxh_u64 const bitflip2 = (XXH_readLE64(secret+40) ^ XXH_readLE64(secret+48)) - seed;
4296 xxh_u64 const input_lo = XXH_readLE64(input) ^ bitflip1;
4297 xxh_u64 const input_hi = XXH_readLE64(input + len - 8) ^ bitflip2;
4298 xxh_u64 const acc = len
4299 + XXH_swap64(input_lo) + input_hi
4300 + XXH3_mul128_fold64(input_lo, input_hi);
4301 return XXH3_avalanche(acc);
4302 }
4303 }
4304
4305 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4306 XXH3_len_0to16_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
4307 {
4308 XXH_ASSERT(len <= 16);
4309 { if (XXH_likely(len > 8)) return XXH3_len_9to16_64b(input, len, secret, seed);
4310 if (XXH_likely(len >= 4)) return XXH3_len_4to8_64b(input, len, secret, seed);
4311 if (len) return XXH3_len_1to3_64b(input, len, secret, seed);
4312 return XXH64_avalanche(seed ^ (XXH_readLE64(secret+56) ^ XXH_readLE64(secret+64)));
4313 }
4314 }
4315
4316
4317
4318
4319
4320
4321
4322
4323
4324
4325
4326
4327
4328
4329
4330
4331
4332
4333
4334
4335
4336
4337
4338
4339
4340
4341
4342 XXH_FORCE_INLINE xxh_u64 XXH3_mix16B(const xxh_u8* XXH_RESTRICT input,
4343 const xxh_u8* XXH_RESTRICT secret, xxh_u64 seed64)
4344 {
4345 #if defined(__GNUC__) && !defined(__clang__) \
4346 && defined(__i386__) && defined(__SSE2__) \
4347 && !defined(XXH_ENABLE_AUTOVECTORIZE)
4348
4349
4350
4351
4352
4353
4354
4355
4356
4357
4358
4359
4360
4361
4362
4363 XXH_COMPILER_GUARD(seed64);
4364 #endif
4365 { xxh_u64 const input_lo = XXH_readLE64(input);
4366 xxh_u64 const input_hi = XXH_readLE64(input+8);
4367 return XXH3_mul128_fold64(
4368 input_lo ^ (XXH_readLE64(secret) + seed64),
4369 input_hi ^ (XXH_readLE64(secret+8) - seed64)
4370 );
4371 }
4372 }
4373
4374
4375 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4376 XXH3_len_17to128_64b(const xxh_u8* XXH_RESTRICT input, size_t len,
4377 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
4378 XXH64_hash_t seed)
4379 {
4380 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
4381 XXH_ASSERT(16 < len && len <= 128);
4382
4383 { xxh_u64 acc = len * XXH_PRIME64_1;
4384 #if XXH_SIZE_OPT >= 1
4385
4386 unsigned int i = (unsigned int)(len - 1) / 32;
4387 do {
4388 acc += XXH3_mix16B(input+16 * i, secret+32*i, seed);
4389 acc += XXH3_mix16B(input+len-16*(i+1), secret+32*i+16, seed);
4390 } while (i-- != 0);
4391 #else
4392 if (len > 32) {
4393 if (len > 64) {
4394 if (len > 96) {
4395 acc += XXH3_mix16B(input+48, secret+96, seed);
4396 acc += XXH3_mix16B(input+len-64, secret+112, seed);
4397 }
4398 acc += XXH3_mix16B(input+32, secret+64, seed);
4399 acc += XXH3_mix16B(input+len-48, secret+80, seed);
4400 }
4401 acc += XXH3_mix16B(input+16, secret+32, seed);
4402 acc += XXH3_mix16B(input+len-32, secret+48, seed);
4403 }
4404 acc += XXH3_mix16B(input+0, secret+0, seed);
4405 acc += XXH3_mix16B(input+len-16, secret+16, seed);
4406 #endif
4407 return XXH3_avalanche(acc);
4408 }
4409 }
4410
4411 #define XXH3_MIDSIZE_MAX 240
4412
4413 XXH_NO_INLINE XXH_PUREF XXH64_hash_t
4414 XXH3_len_129to240_64b(const xxh_u8* XXH_RESTRICT input, size_t len,
4415 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
4416 XXH64_hash_t seed)
4417 {
4418 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
4419 XXH_ASSERT(128 < len && len <= XXH3_MIDSIZE_MAX);
4420
4421 #define XXH3_MIDSIZE_STARTOFFSET 3
4422 #define XXH3_MIDSIZE_LASTOFFSET 17
4423
4424 { xxh_u64 acc = len * XXH_PRIME64_1;
4425 xxh_u64 acc_end;
4426 unsigned int const nbRounds = (unsigned int)len / 16;
4427 unsigned int i;
4428 XXH_ASSERT(128 < len && len <= XXH3_MIDSIZE_MAX);
4429 for (i=0; i<8; i++) {
4430 acc += XXH3_mix16B(input+(16*i), secret+(16*i), seed);
4431 }
4432
4433 acc_end = XXH3_mix16B(input + len - 16, secret + XXH3_SECRET_SIZE_MIN - XXH3_MIDSIZE_LASTOFFSET, seed);
4434 XXH_ASSERT(nbRounds >= 8);
4435 acc = XXH3_avalanche(acc);
4436 #if defined(__clang__) \
4437 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
4438 && !defined(XXH_ENABLE_AUTOVECTORIZE)
4439
4440
4441
4442
4443
4444
4445
4446
4447
4448
4449
4450
4451
4452
4453
4454
4455
4456
4457
4458
4459 #pragma clang loop vectorize(disable)
4460 #endif
4461 for (i=8 ; i < nbRounds; i++) {
4462
4463
4464
4465 XXH_COMPILER_GUARD(acc);
4466 acc_end += XXH3_mix16B(input+(16*i), secret+(16*(i-8)) + XXH3_MIDSIZE_STARTOFFSET, seed);
4467 }
4468 return XXH3_avalanche(acc + acc_end);
4469 }
4470 }
4471
4472
4473
4474
4475 #define XXH_STRIPE_LEN 64
4476 #define XXH_SECRET_CONSUME_RATE 8
4477 #define XXH_ACC_NB (XXH_STRIPE_LEN / sizeof(xxh_u64))
4478
4479 #ifdef XXH_OLD_NAMES
4480 # define STRIPE_LEN XXH_STRIPE_LEN
4481 # define ACC_NB XXH_ACC_NB
4482 #endif
4483
4484 #ifndef XXH_PREFETCH_DIST
4485 # ifdef __clang__
4486 # define XXH_PREFETCH_DIST 320
4487 # else
4488 # if (XXH_VECTOR == XXH_AVX512)
4489 # define XXH_PREFETCH_DIST 512
4490 # else
4491 # define XXH_PREFETCH_DIST 384
4492 # endif
4493 # endif
4494 #endif
4495
4496
4497
4498
4499
4500
4501
4502
4503
4504
4505
4506 #define XXH3_ACCUMULATE_TEMPLATE(name) \
4507 void \
4508 XXH3_accumulate_##name(xxh_u64* XXH_RESTRICT acc, \
4509 const xxh_u8* XXH_RESTRICT input, \
4510 const xxh_u8* XXH_RESTRICT secret, \
4511 size_t nbStripes) \
4512 { \
4513 size_t n; \
4514 for (n = 0; n < nbStripes; n++ ) { \
4515 const xxh_u8* const in = input + n*XXH_STRIPE_LEN; \
4516 XXH_PREFETCH(in + XXH_PREFETCH_DIST); \
4517 XXH3_accumulate_512_##name( \
4518 acc, \
4519 in, \
4520 secret + n*XXH_SECRET_CONSUME_RATE); \
4521 } \
4522 }
4523
4524
4525 XXH_FORCE_INLINE void XXH_writeLE64(void* dst, xxh_u64 v64)
4526 {
4527 if (!XXH_CPU_LITTLE_ENDIAN) v64 = XXH_swap64(v64);
4528 XXH_memcpy(dst, &v64, sizeof(v64));
4529 }
4530
4531
4532
4533
4534
4535
4536 #if !defined (__VMS) \
4537 && (defined (__cplusplus) \
4538 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
4539 typedef int64_t xxh_i64;
4540 #else
4541
4542 typedef long long xxh_i64;
4543 #endif
4544
4545
4546
4547
4548
4549
4550
4551
4552
4553
4554
4555
4556
4557
4558
4559
4560
4561
4562
4563
4564
4565
4566
4567
4568
4569 #if (XXH_VECTOR == XXH_AVX512) \
4570 || (defined(XXH_DISPATCH_AVX512) && XXH_DISPATCH_AVX512 != 0)
4571
4572 #ifndef XXH_TARGET_AVX512
4573 # define XXH_TARGET_AVX512
4574 #endif
4575
4576 XXH_FORCE_INLINE XXH_TARGET_AVX512 void
4577 XXH3_accumulate_512_avx512(void* XXH_RESTRICT acc,
4578 const void* XXH_RESTRICT input,
4579 const void* XXH_RESTRICT secret)
4580 {
4581 __m512i* const xacc = (__m512i *) acc;
4582 XXH_ASSERT((((size_t)acc) & 63) == 0);
4583 XXH_STATIC_ASSERT(XXH_STRIPE_LEN == sizeof(__m512i));
4584
4585 {
4586
4587 __m512i const data_vec = _mm512_loadu_si512 (input);
4588
4589 __m512i const key_vec = _mm512_loadu_si512 (secret);
4590
4591 __m512i const data_key = _mm512_xor_si512 (data_vec, key_vec);
4592
4593 __m512i const data_key_lo = _mm512_srli_epi64 (data_key, 32);
4594
4595 __m512i const product = _mm512_mul_epu32 (data_key, data_key_lo);
4596
4597 __m512i const data_swap = _mm512_shuffle_epi32(data_vec, (_MM_PERM_ENUM)_MM_SHUFFLE(1, 0, 3, 2));
4598 __m512i const sum = _mm512_add_epi64(*xacc, data_swap);
4599
4600 *xacc = _mm512_add_epi64(product, sum);
4601 }
4602 }
4603 XXH_FORCE_INLINE XXH_TARGET_AVX512 XXH3_ACCUMULATE_TEMPLATE(avx512)
4604
4605
4606
4607
4608
4609
4610
4611
4612
4613
4614
4615
4616
4617
4618
4619
4620
4621
4622
4623
4624
4625
4626 XXH_FORCE_INLINE XXH_TARGET_AVX512 void
4627 XXH3_scrambleAcc_avx512(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
4628 {
4629 XXH_ASSERT((((size_t)acc) & 63) == 0);
4630 XXH_STATIC_ASSERT(XXH_STRIPE_LEN == sizeof(__m512i));
4631 { __m512i* const xacc = (__m512i*) acc;
4632 const __m512i prime32 = _mm512_set1_epi32((int)XXH_PRIME32_1);
4633
4634
4635 __m512i const acc_vec = *xacc;
4636 __m512i const shifted = _mm512_srli_epi64 (acc_vec, 47);
4637
4638 __m512i const key_vec = _mm512_loadu_si512 (secret);
4639 __m512i const data_key = _mm512_ternarylogic_epi32(key_vec, acc_vec, shifted, 0x96 );
4640
4641
4642 __m512i const data_key_hi = _mm512_srli_epi64 (data_key, 32);
4643 __m512i const prod_lo = _mm512_mul_epu32 (data_key, prime32);
4644 __m512i const prod_hi = _mm512_mul_epu32 (data_key_hi, prime32);
4645 *xacc = _mm512_add_epi64(prod_lo, _mm512_slli_epi64(prod_hi, 32));
4646 }
4647 }
4648
4649 XXH_FORCE_INLINE XXH_TARGET_AVX512 void
4650 XXH3_initCustomSecret_avx512(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
4651 {
4652 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 63) == 0);
4653 XXH_STATIC_ASSERT(XXH_SEC_ALIGN == 64);
4654 XXH_ASSERT(((size_t)customSecret & 63) == 0);
4655 (void)(&XXH_writeLE64);
4656 { int const nbRounds = XXH_SECRET_DEFAULT_SIZE / sizeof(__m512i);
4657 __m512i const seed_pos = _mm512_set1_epi64((xxh_i64)seed64);
4658 __m512i const seed = _mm512_mask_sub_epi64(seed_pos, 0xAA, _mm512_set1_epi8(0), seed_pos);
4659
4660 const __m512i* const src = (const __m512i*) ((const void*) XXH3_kSecret);
4661 __m512i* const dest = ( __m512i*) customSecret;
4662 int i;
4663 XXH_ASSERT(((size_t)src & 63) == 0);
4664 XXH_ASSERT(((size_t)dest & 63) == 0);
4665 for (i=0; i < nbRounds; ++i) {
4666 dest[i] = _mm512_add_epi64(_mm512_load_si512(src + i), seed);
4667 } }
4668 }
4669
4670 #endif
4671
4672 #if (XXH_VECTOR == XXH_AVX2) \
4673 || (defined(XXH_DISPATCH_AVX2) && XXH_DISPATCH_AVX2 != 0)
4674
4675 #ifndef XXH_TARGET_AVX2
4676 # define XXH_TARGET_AVX2
4677 #endif
4678
4679 XXH_FORCE_INLINE XXH_TARGET_AVX2 void
4680 XXH3_accumulate_512_avx2( void* XXH_RESTRICT acc,
4681 const void* XXH_RESTRICT input,
4682 const void* XXH_RESTRICT secret)
4683 {
4684 XXH_ASSERT((((size_t)acc) & 31) == 0);
4685 { __m256i* const xacc = (__m256i *) acc;
4686
4687
4688 const __m256i* const xinput = (const __m256i *) input;
4689
4690
4691 const __m256i* const xsecret = (const __m256i *) secret;
4692
4693 size_t i;
4694 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m256i); i++) {
4695
4696 __m256i const data_vec = _mm256_loadu_si256 (xinput+i);
4697
4698 __m256i const key_vec = _mm256_loadu_si256 (xsecret+i);
4699
4700 __m256i const data_key = _mm256_xor_si256 (data_vec, key_vec);
4701
4702 __m256i const data_key_lo = _mm256_srli_epi64 (data_key, 32);
4703
4704 __m256i const product = _mm256_mul_epu32 (data_key, data_key_lo);
4705
4706 __m256i const data_swap = _mm256_shuffle_epi32(data_vec, _MM_SHUFFLE(1, 0, 3, 2));
4707 __m256i const sum = _mm256_add_epi64(xacc[i], data_swap);
4708
4709 xacc[i] = _mm256_add_epi64(product, sum);
4710 } }
4711 }
4712 XXH_FORCE_INLINE XXH_TARGET_AVX2 XXH3_ACCUMULATE_TEMPLATE(avx2)
4713
4714 XXH_FORCE_INLINE XXH_TARGET_AVX2 void
4715 XXH3_scrambleAcc_avx2(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
4716 {
4717 XXH_ASSERT((((size_t)acc) & 31) == 0);
4718 { __m256i* const xacc = (__m256i*) acc;
4719
4720
4721 const __m256i* const xsecret = (const __m256i *) secret;
4722 const __m256i prime32 = _mm256_set1_epi32((int)XXH_PRIME32_1);
4723
4724 size_t i;
4725 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m256i); i++) {
4726
4727 __m256i const acc_vec = xacc[i];
4728 __m256i const shifted = _mm256_srli_epi64 (acc_vec, 47);
4729 __m256i const data_vec = _mm256_xor_si256 (acc_vec, shifted);
4730
4731 __m256i const key_vec = _mm256_loadu_si256 (xsecret+i);
4732 __m256i const data_key = _mm256_xor_si256 (data_vec, key_vec);
4733
4734
4735 __m256i const data_key_hi = _mm256_srli_epi64 (data_key, 32);
4736 __m256i const prod_lo = _mm256_mul_epu32 (data_key, prime32);
4737 __m256i const prod_hi = _mm256_mul_epu32 (data_key_hi, prime32);
4738 xacc[i] = _mm256_add_epi64(prod_lo, _mm256_slli_epi64(prod_hi, 32));
4739 }
4740 }
4741 }
4742
4743 XXH_FORCE_INLINE XXH_TARGET_AVX2 void XXH3_initCustomSecret_avx2(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
4744 {
4745 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 31) == 0);
4746 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE / sizeof(__m256i)) == 6);
4747 XXH_STATIC_ASSERT(XXH_SEC_ALIGN <= 64);
4748 (void)(&XXH_writeLE64);
4749 XXH_PREFETCH(customSecret);
4750 { __m256i const seed = _mm256_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64, (xxh_i64)(0U - seed64), (xxh_i64)seed64);
4751
4752 const __m256i* const src = (const __m256i*) ((const void*) XXH3_kSecret);
4753 __m256i* dest = ( __m256i*) customSecret;
4754
4755 # if defined(__GNUC__) || defined(__clang__)
4756
4757
4758
4759
4760
4761 XXH_COMPILER_GUARD(dest);
4762 # endif
4763 XXH_ASSERT(((size_t)src & 31) == 0);
4764 XXH_ASSERT(((size_t)dest & 31) == 0);
4765
4766
4767 dest[0] = _mm256_add_epi64(_mm256_load_si256(src+0), seed);
4768 dest[1] = _mm256_add_epi64(_mm256_load_si256(src+1), seed);
4769 dest[2] = _mm256_add_epi64(_mm256_load_si256(src+2), seed);
4770 dest[3] = _mm256_add_epi64(_mm256_load_si256(src+3), seed);
4771 dest[4] = _mm256_add_epi64(_mm256_load_si256(src+4), seed);
4772 dest[5] = _mm256_add_epi64(_mm256_load_si256(src+5), seed);
4773 }
4774 }
4775
4776 #endif
4777
4778
4779 #if (XXH_VECTOR == XXH_SSE2) || defined(XXH_X86DISPATCH)
4780
4781 #ifndef XXH_TARGET_SSE2
4782 # define XXH_TARGET_SSE2
4783 #endif
4784
4785 XXH_FORCE_INLINE XXH_TARGET_SSE2 void
4786 XXH3_accumulate_512_sse2( void* XXH_RESTRICT acc,
4787 const void* XXH_RESTRICT input,
4788 const void* XXH_RESTRICT secret)
4789 {
4790
4791 XXH_ASSERT((((size_t)acc) & 15) == 0);
4792 { __m128i* const xacc = (__m128i *) acc;
4793
4794
4795 const __m128i* const xinput = (const __m128i *) input;
4796
4797
4798 const __m128i* const xsecret = (const __m128i *) secret;
4799
4800 size_t i;
4801 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m128i); i++) {
4802
4803 __m128i const data_vec = _mm_loadu_si128 (xinput+i);
4804
4805 __m128i const key_vec = _mm_loadu_si128 (xsecret+i);
4806
4807 __m128i const data_key = _mm_xor_si128 (data_vec, key_vec);
4808
4809 __m128i const data_key_lo = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
4810
4811 __m128i const product = _mm_mul_epu32 (data_key, data_key_lo);
4812
4813 __m128i const data_swap = _mm_shuffle_epi32(data_vec, _MM_SHUFFLE(1,0,3,2));
4814 __m128i const sum = _mm_add_epi64(xacc[i], data_swap);
4815
4816 xacc[i] = _mm_add_epi64(product, sum);
4817 } }
4818 }
4819 XXH_FORCE_INLINE XXH_TARGET_SSE2 XXH3_ACCUMULATE_TEMPLATE(sse2)
4820
4821 XXH_FORCE_INLINE XXH_TARGET_SSE2 void
4822 XXH3_scrambleAcc_sse2(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
4823 {
4824 XXH_ASSERT((((size_t)acc) & 15) == 0);
4825 { __m128i* const xacc = (__m128i*) acc;
4826
4827
4828 const __m128i* const xsecret = (const __m128i *) secret;
4829 const __m128i prime32 = _mm_set1_epi32((int)XXH_PRIME32_1);
4830
4831 size_t i;
4832 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m128i); i++) {
4833
4834 __m128i const acc_vec = xacc[i];
4835 __m128i const shifted = _mm_srli_epi64 (acc_vec, 47);
4836 __m128i const data_vec = _mm_xor_si128 (acc_vec, shifted);
4837
4838 __m128i const key_vec = _mm_loadu_si128 (xsecret+i);
4839 __m128i const data_key = _mm_xor_si128 (data_vec, key_vec);
4840
4841
4842 __m128i const data_key_hi = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
4843 __m128i const prod_lo = _mm_mul_epu32 (data_key, prime32);
4844 __m128i const prod_hi = _mm_mul_epu32 (data_key_hi, prime32);
4845 xacc[i] = _mm_add_epi64(prod_lo, _mm_slli_epi64(prod_hi, 32));
4846 }
4847 }
4848 }
4849
4850 XXH_FORCE_INLINE XXH_TARGET_SSE2 void XXH3_initCustomSecret_sse2(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
4851 {
4852 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
4853 (void)(&XXH_writeLE64);
4854 { int const nbRounds = XXH_SECRET_DEFAULT_SIZE / sizeof(__m128i);
4855
4856 # if defined(_MSC_VER) && defined(_M_IX86) && _MSC_VER < 1900
4857
4858 XXH_ALIGN(16) const xxh_i64 seed64x2[2] = { (xxh_i64)seed64, (xxh_i64)(0U - seed64) };
4859 __m128i const seed = _mm_load_si128((__m128i const*)seed64x2);
4860 # else
4861 __m128i const seed = _mm_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64);
4862 # endif
4863 int i;
4864
4865 const void* const src16 = XXH3_kSecret;
4866 __m128i* dst16 = (__m128i*) customSecret;
4867 # if defined(__GNUC__) || defined(__clang__)
4868
4869
4870
4871
4872
4873 XXH_COMPILER_GUARD(dst16);
4874 # endif
4875 XXH_ASSERT(((size_t)src16 & 15) == 0);
4876 XXH_ASSERT(((size_t)dst16 & 15) == 0);
4877
4878 for (i=0; i < nbRounds; ++i) {
4879 dst16[i] = _mm_add_epi64(_mm_load_si128((const __m128i *)src16+i), seed);
4880 } }
4881 }
4882
4883 #endif
4884
4885 #if (XXH_VECTOR == XXH_NEON)
4886
4887
4888 XXH_FORCE_INLINE void
4889 XXH3_scalarRound(void* XXH_RESTRICT acc, void const* XXH_RESTRICT input,
4890 void const* XXH_RESTRICT secret, size_t lane);
4891
4892 XXH_FORCE_INLINE void
4893 XXH3_scalarScrambleRound(void* XXH_RESTRICT acc,
4894 void const* XXH_RESTRICT secret, size_t lane);
4895
4896
4897
4898
4899
4900
4901
4902
4903
4904
4905
4906
4907
4908
4909
4910
4911
4912
4913
4914
4915
4916
4917
4918
4919
4920 XXH_FORCE_INLINE void
4921 XXH3_accumulate_512_neon( void* XXH_RESTRICT acc,
4922 const void* XXH_RESTRICT input,
4923 const void* XXH_RESTRICT secret)
4924 {
4925 XXH_ASSERT((((size_t)acc) & 15) == 0);
4926 XXH_STATIC_ASSERT(XXH3_NEON_LANES > 0 && XXH3_NEON_LANES <= XXH_ACC_NB && XXH3_NEON_LANES % 2 == 0);
4927 {
4928 xxh_aliasing_uint64x2_t* const xacc = (xxh_aliasing_uint64x2_t*) acc;
4929
4930 uint8_t const* xinput = (const uint8_t *) input;
4931 uint8_t const* xsecret = (const uint8_t *) secret;
4932
4933 size_t i;
4934 #ifdef __wasm_simd128__
4935
4936
4937
4938
4939
4940
4941
4942
4943
4944
4945
4946
4947
4948
4949
4950
4951
4952 XXH_COMPILER_GUARD(xsecret);
4953 #endif
4954
4955 for (i = XXH3_NEON_LANES; i < XXH_ACC_NB; i++) {
4956 XXH3_scalarRound(acc, input, secret, i);
4957 }
4958 i = 0;
4959
4960 for (; i+1 < XXH3_NEON_LANES / 2; i+=2) {
4961
4962 uint64x2_t data_vec_1 = XXH_vld1q_u64(xinput + (i * 16));
4963 uint64x2_t data_vec_2 = XXH_vld1q_u64(xinput + ((i+1) * 16));
4964
4965 uint64x2_t key_vec_1 = XXH_vld1q_u64(xsecret + (i * 16));
4966 uint64x2_t key_vec_2 = XXH_vld1q_u64(xsecret + ((i+1) * 16));
4967
4968 uint64x2_t data_swap_1 = vextq_u64(data_vec_1, data_vec_1, 1);
4969 uint64x2_t data_swap_2 = vextq_u64(data_vec_2, data_vec_2, 1);
4970
4971 uint64x2_t data_key_1 = veorq_u64(data_vec_1, key_vec_1);
4972 uint64x2_t data_key_2 = veorq_u64(data_vec_2, key_vec_2);
4973
4974
4975
4976
4977
4978
4979
4980
4981
4982
4983
4984
4985
4986
4987 uint32x4x2_t unzipped = vuzpq_u32(
4988 vreinterpretq_u32_u64(data_key_1),
4989 vreinterpretq_u32_u64(data_key_2)
4990 );
4991
4992 uint32x4_t data_key_lo = unzipped.val[0];
4993
4994 uint32x4_t data_key_hi = unzipped.val[1];
4995
4996
4997
4998
4999
5000
5001
5002 uint64x2_t sum_1 = XXH_vmlal_low_u32(data_swap_1, data_key_lo, data_key_hi);
5003 uint64x2_t sum_2 = XXH_vmlal_high_u32(data_swap_2, data_key_lo, data_key_hi);
5004
5005
5006
5007
5008
5009
5010
5011
5012
5013
5014
5015
5016 XXH_COMPILER_GUARD_CLANG_NEON(sum_1);
5017 XXH_COMPILER_GUARD_CLANG_NEON(sum_2);
5018
5019 xacc[i] = vaddq_u64(xacc[i], sum_1);
5020 xacc[i+1] = vaddq_u64(xacc[i+1], sum_2);
5021 }
5022
5023 for (; i < XXH3_NEON_LANES / 2; i++) {
5024
5025 uint64x2_t data_vec = XXH_vld1q_u64(xinput + (i * 16));
5026
5027 uint64x2_t key_vec = XXH_vld1q_u64(xsecret + (i * 16));
5028
5029 uint64x2_t data_swap = vextq_u64(data_vec, data_vec, 1);
5030
5031 uint64x2_t data_key = veorq_u64(data_vec, key_vec);
5032
5033
5034 uint32x2_t data_key_lo = vmovn_u64(data_key);
5035
5036 uint32x2_t data_key_hi = vshrn_n_u64(data_key, 32);
5037
5038 uint64x2_t sum = vmlal_u32(data_swap, data_key_lo, data_key_hi);
5039
5040 XXH_COMPILER_GUARD_CLANG_NEON(sum);
5041
5042 xacc[i] = vaddq_u64 (xacc[i], sum);
5043 }
5044 }
5045 }
5046 XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(neon)
5047
5048 XXH_FORCE_INLINE void
5049 XXH3_scrambleAcc_neon(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5050 {
5051 XXH_ASSERT((((size_t)acc) & 15) == 0);
5052
5053 { xxh_aliasing_uint64x2_t* xacc = (xxh_aliasing_uint64x2_t*) acc;
5054 uint8_t const* xsecret = (uint8_t const*) secret;
5055
5056 size_t i;
5057
5058 #ifndef __wasm_simd128__
5059
5060 uint32x2_t const kPrimeLo = vdup_n_u32(XXH_PRIME32_1);
5061
5062 uint32x4_t const kPrimeHi = vreinterpretq_u32_u64(vdupq_n_u64((xxh_u64)XXH_PRIME32_1 << 32));
5063 #endif
5064
5065
5066 for (i = XXH3_NEON_LANES; i < XXH_ACC_NB; i++) {
5067 XXH3_scalarScrambleRound(acc, secret, i);
5068 }
5069 for (i=0; i < XXH3_NEON_LANES / 2; i++) {
5070
5071 uint64x2_t acc_vec = xacc[i];
5072 uint64x2_t shifted = vshrq_n_u64(acc_vec, 47);
5073 uint64x2_t data_vec = veorq_u64(acc_vec, shifted);
5074
5075
5076 uint64x2_t key_vec = XXH_vld1q_u64(xsecret + (i * 16));
5077 uint64x2_t data_key = veorq_u64(data_vec, key_vec);
5078
5079 #ifdef __wasm_simd128__
5080
5081 xacc[i] = data_key * XXH_PRIME32_1;
5082 #else
5083
5084
5085
5086
5087
5088
5089
5090
5091
5092
5093
5094 uint32x4_t prod_hi = vmulq_u32 (vreinterpretq_u32_u64(data_key), kPrimeHi);
5095
5096 uint32x2_t data_key_lo = vmovn_u64(data_key);
5097
5098 xacc[i] = vmlal_u32(vreinterpretq_u64_u32(prod_hi), data_key_lo, kPrimeLo);
5099 #endif
5100 }
5101 }
5102 }
5103 #endif
5104
5105 #if (XXH_VECTOR == XXH_VSX)
5106
5107 XXH_FORCE_INLINE void
5108 XXH3_accumulate_512_vsx( void* XXH_RESTRICT acc,
5109 const void* XXH_RESTRICT input,
5110 const void* XXH_RESTRICT secret)
5111 {
5112
5113 xxh_aliasing_u64x2* const xacc = (xxh_aliasing_u64x2*) acc;
5114 xxh_u8 const* const xinput = (xxh_u8 const*) input;
5115 xxh_u8 const* const xsecret = (xxh_u8 const*) secret;
5116 xxh_u64x2 const v32 = { 32, 32 };
5117 size_t i;
5118 for (i = 0; i < XXH_STRIPE_LEN / sizeof(xxh_u64x2); i++) {
5119
5120 xxh_u64x2 const data_vec = XXH_vec_loadu(xinput + 16*i);
5121
5122 xxh_u64x2 const key_vec = XXH_vec_loadu(xsecret + 16*i);
5123 xxh_u64x2 const data_key = data_vec ^ key_vec;
5124
5125 xxh_u32x4 const shuffled = (xxh_u32x4)vec_rl(data_key, v32);
5126
5127 xxh_u64x2 const product = XXH_vec_mulo((xxh_u32x4)data_key, shuffled);
5128
5129 xxh_u64x2 acc_vec = xacc[i];
5130 acc_vec += product;
5131
5132
5133 #ifdef __s390x__
5134 acc_vec += vec_permi(data_vec, data_vec, 2);
5135 #else
5136 acc_vec += vec_xxpermdi(data_vec, data_vec, 2);
5137 #endif
5138 xacc[i] = acc_vec;
5139 }
5140 }
5141 XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(vsx)
5142
5143 XXH_FORCE_INLINE void
5144 XXH3_scrambleAcc_vsx(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5145 {
5146 XXH_ASSERT((((size_t)acc) & 15) == 0);
5147
5148 { xxh_aliasing_u64x2* const xacc = (xxh_aliasing_u64x2*) acc;
5149 const xxh_u8* const xsecret = (const xxh_u8*) secret;
5150
5151 xxh_u64x2 const v32 = { 32, 32 };
5152 xxh_u64x2 const v47 = { 47, 47 };
5153 xxh_u32x4 const prime = { XXH_PRIME32_1, XXH_PRIME32_1, XXH_PRIME32_1, XXH_PRIME32_1 };
5154 size_t i;
5155 for (i = 0; i < XXH_STRIPE_LEN / sizeof(xxh_u64x2); i++) {
5156
5157 xxh_u64x2 const acc_vec = xacc[i];
5158 xxh_u64x2 const data_vec = acc_vec ^ (acc_vec >> v47);
5159
5160
5161 xxh_u64x2 const key_vec = XXH_vec_loadu(xsecret + 16*i);
5162 xxh_u64x2 const data_key = data_vec ^ key_vec;
5163
5164
5165
5166 xxh_u64x2 const prod_even = XXH_vec_mule((xxh_u32x4)data_key, prime);
5167
5168 xxh_u64x2 const prod_odd = XXH_vec_mulo((xxh_u32x4)data_key, prime);
5169 xacc[i] = prod_odd + (prod_even << v32);
5170 } }
5171 }
5172
5173 #endif
5174
5175 #if (XXH_VECTOR == XXH_SVE)
5176
5177 XXH_FORCE_INLINE void
5178 XXH3_accumulate_512_sve( void* XXH_RESTRICT acc,
5179 const void* XXH_RESTRICT input,
5180 const void* XXH_RESTRICT secret)
5181 {
5182 uint64_t *xacc = (uint64_t *)acc;
5183 const uint64_t *xinput = (const uint64_t *)(const void *)input;
5184 const uint64_t *xsecret = (const uint64_t *)(const void *)secret;
5185 svuint64_t kSwap = sveor_n_u64_z(svptrue_b64(), svindex_u64(0, 1), 1);
5186 uint64_t element_count = svcntd();
5187 if (element_count >= 8) {
5188 svbool_t mask = svptrue_pat_b64(SV_VL8);
5189 svuint64_t vacc = svld1_u64(mask, xacc);
5190 ACCRND(vacc, 0);
5191 svst1_u64(mask, xacc, vacc);
5192 } else if (element_count == 2) {
5193 svbool_t mask = svptrue_pat_b64(SV_VL2);
5194 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5195 svuint64_t acc1 = svld1_u64(mask, xacc + 2);
5196 svuint64_t acc2 = svld1_u64(mask, xacc + 4);
5197 svuint64_t acc3 = svld1_u64(mask, xacc + 6);
5198 ACCRND(acc0, 0);
5199 ACCRND(acc1, 2);
5200 ACCRND(acc2, 4);
5201 ACCRND(acc3, 6);
5202 svst1_u64(mask, xacc + 0, acc0);
5203 svst1_u64(mask, xacc + 2, acc1);
5204 svst1_u64(mask, xacc + 4, acc2);
5205 svst1_u64(mask, xacc + 6, acc3);
5206 } else {
5207 svbool_t mask = svptrue_pat_b64(SV_VL4);
5208 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5209 svuint64_t acc1 = svld1_u64(mask, xacc + 4);
5210 ACCRND(acc0, 0);
5211 ACCRND(acc1, 4);
5212 svst1_u64(mask, xacc + 0, acc0);
5213 svst1_u64(mask, xacc + 4, acc1);
5214 }
5215 }
5216
5217 XXH_FORCE_INLINE void
5218 XXH3_accumulate_sve(xxh_u64* XXH_RESTRICT acc,
5219 const xxh_u8* XXH_RESTRICT input,
5220 const xxh_u8* XXH_RESTRICT secret,
5221 size_t nbStripes)
5222 {
5223 if (nbStripes != 0) {
5224 uint64_t *xacc = (uint64_t *)acc;
5225 const uint64_t *xinput = (const uint64_t *)(const void *)input;
5226 const uint64_t *xsecret = (const uint64_t *)(const void *)secret;
5227 svuint64_t kSwap = sveor_n_u64_z(svptrue_b64(), svindex_u64(0, 1), 1);
5228 uint64_t element_count = svcntd();
5229 if (element_count >= 8) {
5230 svbool_t mask = svptrue_pat_b64(SV_VL8);
5231 svuint64_t vacc = svld1_u64(mask, xacc + 0);
5232 do {
5233
5234 svprfd(mask, xinput + 128, SV_PLDL1STRM);
5235 ACCRND(vacc, 0);
5236 xinput += 8;
5237 xsecret += 1;
5238 nbStripes--;
5239 } while (nbStripes != 0);
5240
5241 svst1_u64(mask, xacc + 0, vacc);
5242 } else if (element_count == 2) {
5243 svbool_t mask = svptrue_pat_b64(SV_VL2);
5244 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5245 svuint64_t acc1 = svld1_u64(mask, xacc + 2);
5246 svuint64_t acc2 = svld1_u64(mask, xacc + 4);
5247 svuint64_t acc3 = svld1_u64(mask, xacc + 6);
5248 do {
5249 svprfd(mask, xinput + 128, SV_PLDL1STRM);
5250 ACCRND(acc0, 0);
5251 ACCRND(acc1, 2);
5252 ACCRND(acc2, 4);
5253 ACCRND(acc3, 6);
5254 xinput += 8;
5255 xsecret += 1;
5256 nbStripes--;
5257 } while (nbStripes != 0);
5258
5259 svst1_u64(mask, xacc + 0, acc0);
5260 svst1_u64(mask, xacc + 2, acc1);
5261 svst1_u64(mask, xacc + 4, acc2);
5262 svst1_u64(mask, xacc + 6, acc3);
5263 } else {
5264 svbool_t mask = svptrue_pat_b64(SV_VL4);
5265 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5266 svuint64_t acc1 = svld1_u64(mask, xacc + 4);
5267 do {
5268 svprfd(mask, xinput + 128, SV_PLDL1STRM);
5269 ACCRND(acc0, 0);
5270 ACCRND(acc1, 4);
5271 xinput += 8;
5272 xsecret += 1;
5273 nbStripes--;
5274 } while (nbStripes != 0);
5275
5276 svst1_u64(mask, xacc + 0, acc0);
5277 svst1_u64(mask, xacc + 4, acc1);
5278 }
5279 }
5280 }
5281
5282 #endif
5283
5284
5285
5286 #if defined(__aarch64__) && (defined(__GNUC__) || defined(__clang__))
5287
5288
5289
5290
5291
5292
5293
5294
5295
5296
5297
5298
5299
5300
5301 XXH_FORCE_INLINE xxh_u64
5302 XXH_mult32to64_add64(xxh_u64 lhs, xxh_u64 rhs, xxh_u64 acc)
5303 {
5304 xxh_u64 ret;
5305
5306 __asm__("umaddl %x0, %w1, %w2, %x3" : "=r" (ret) : "r" (lhs), "r" (rhs), "r" (acc));
5307 return ret;
5308 }
5309 #else
5310 XXH_FORCE_INLINE xxh_u64
5311 XXH_mult32to64_add64(xxh_u64 lhs, xxh_u64 rhs, xxh_u64 acc)
5312 {
5313 return XXH_mult32to64((xxh_u32)lhs, (xxh_u32)rhs) + acc;
5314 }
5315 #endif
5316
5317
5318
5319
5320
5321
5322
5323
5324 XXH_FORCE_INLINE void
5325 XXH3_scalarRound(void* XXH_RESTRICT acc,
5326 void const* XXH_RESTRICT input,
5327 void const* XXH_RESTRICT secret,
5328 size_t lane)
5329 {
5330 xxh_u64* xacc = (xxh_u64*) acc;
5331 xxh_u8 const* xinput = (xxh_u8 const*) input;
5332 xxh_u8 const* xsecret = (xxh_u8 const*) secret;
5333 XXH_ASSERT(lane < XXH_ACC_NB);
5334 XXH_ASSERT(((size_t)acc & (XXH_ACC_ALIGN-1)) == 0);
5335 {
5336 xxh_u64 const data_val = XXH_readLE64(xinput + lane * 8);
5337 xxh_u64 const data_key = data_val ^ XXH_readLE64(xsecret + lane * 8);
5338 xacc[lane ^ 1] += data_val;
5339 xacc[lane] = XXH_mult32to64_add64(data_key , data_key >> 32, xacc[lane]);
5340 }
5341 }
5342
5343
5344
5345
5346
5347 XXH_FORCE_INLINE void
5348 XXH3_accumulate_512_scalar(void* XXH_RESTRICT acc,
5349 const void* XXH_RESTRICT input,
5350 const void* XXH_RESTRICT secret)
5351 {
5352 size_t i;
5353
5354 #if defined(__GNUC__) && !defined(__clang__) \
5355 && (defined(__arm__) || defined(__thumb2__)) \
5356 && defined(__ARM_FEATURE_UNALIGNED) \
5357 && XXH_SIZE_OPT <= 0
5358 # pragma GCC unroll 8
5359 #endif
5360 for (i=0; i < XXH_ACC_NB; i++) {
5361 XXH3_scalarRound(acc, input, secret, i);
5362 }
5363 }
5364 XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(scalar)
5365
5366
5367
5368
5369
5370
5371
5372
5373 XXH_FORCE_INLINE void
5374 XXH3_scalarScrambleRound(void* XXH_RESTRICT acc,
5375 void const* XXH_RESTRICT secret,
5376 size_t lane)
5377 {
5378 xxh_u64* const xacc = (xxh_u64*) acc;
5379 const xxh_u8* const xsecret = (const xxh_u8*) secret;
5380 XXH_ASSERT((((size_t)acc) & (XXH_ACC_ALIGN-1)) == 0);
5381 XXH_ASSERT(lane < XXH_ACC_NB);
5382 {
5383 xxh_u64 const key64 = XXH_readLE64(xsecret + lane * 8);
5384 xxh_u64 acc64 = xacc[lane];
5385 acc64 = XXH_xorshift64(acc64, 47);
5386 acc64 ^= key64;
5387 acc64 *= XXH_PRIME32_1;
5388 xacc[lane] = acc64;
5389 }
5390 }
5391
5392
5393
5394
5395
5396 XXH_FORCE_INLINE void
5397 XXH3_scrambleAcc_scalar(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5398 {
5399 size_t i;
5400 for (i=0; i < XXH_ACC_NB; i++) {
5401 XXH3_scalarScrambleRound(acc, secret, i);
5402 }
5403 }
5404
5405 XXH_FORCE_INLINE void
5406 XXH3_initCustomSecret_scalar(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
5407 {
5408
5409
5410
5411
5412
5413 const xxh_u8* kSecretPtr = XXH3_kSecret;
5414 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
5415
5416 #if defined(__GNUC__) && defined(__aarch64__)
5417
5418
5419
5420
5421
5422
5423
5424
5425
5426
5427
5428
5429
5430
5431
5432
5433
5434
5435
5436
5437
5438
5439
5440
5441
5442
5443
5444
5445
5446
5447
5448
5449 XXH_COMPILER_GUARD(kSecretPtr);
5450 #endif
5451 { int const nbRounds = XXH_SECRET_DEFAULT_SIZE / 16;
5452 int i;
5453 for (i=0; i < nbRounds; i++) {
5454
5455
5456
5457
5458
5459
5460 xxh_u64 lo = XXH_readLE64(kSecretPtr + 16*i) + seed64;
5461 xxh_u64 hi = XXH_readLE64(kSecretPtr + 16*i + 8) - seed64;
5462 XXH_writeLE64((xxh_u8*)customSecret + 16*i, lo);
5463 XXH_writeLE64((xxh_u8*)customSecret + 16*i + 8, hi);
5464 } }
5465 }
5466
5467
5468 typedef void (*XXH3_f_accumulate)(xxh_u64* XXH_RESTRICT, const xxh_u8* XXH_RESTRICT, const xxh_u8* XXH_RESTRICT, size_t);
5469 typedef void (*XXH3_f_scrambleAcc)(void* XXH_RESTRICT, const void*);
5470 typedef void (*XXH3_f_initCustomSecret)(void* XXH_RESTRICT, xxh_u64);
5471
5472
5473 #if (XXH_VECTOR == XXH_AVX512)
5474
5475 #define XXH3_accumulate_512 XXH3_accumulate_512_avx512
5476 #define XXH3_accumulate XXH3_accumulate_avx512
5477 #define XXH3_scrambleAcc XXH3_scrambleAcc_avx512
5478 #define XXH3_initCustomSecret XXH3_initCustomSecret_avx512
5479
5480 #elif (XXH_VECTOR == XXH_AVX2)
5481
5482 #define XXH3_accumulate_512 XXH3_accumulate_512_avx2
5483 #define XXH3_accumulate XXH3_accumulate_avx2
5484 #define XXH3_scrambleAcc XXH3_scrambleAcc_avx2
5485 #define XXH3_initCustomSecret XXH3_initCustomSecret_avx2
5486
5487 #elif (XXH_VECTOR == XXH_SSE2)
5488
5489 #define XXH3_accumulate_512 XXH3_accumulate_512_sse2
5490 #define XXH3_accumulate XXH3_accumulate_sse2
5491 #define XXH3_scrambleAcc XXH3_scrambleAcc_sse2
5492 #define XXH3_initCustomSecret XXH3_initCustomSecret_sse2
5493
5494 #elif (XXH_VECTOR == XXH_NEON)
5495
5496 #define XXH3_accumulate_512 XXH3_accumulate_512_neon
5497 #define XXH3_accumulate XXH3_accumulate_neon
5498 #define XXH3_scrambleAcc XXH3_scrambleAcc_neon
5499 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5500
5501 #elif (XXH_VECTOR == XXH_VSX)
5502
5503 #define XXH3_accumulate_512 XXH3_accumulate_512_vsx
5504 #define XXH3_accumulate XXH3_accumulate_vsx
5505 #define XXH3_scrambleAcc XXH3_scrambleAcc_vsx
5506 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5507
5508 #elif (XXH_VECTOR == XXH_SVE)
5509 #define XXH3_accumulate_512 XXH3_accumulate_512_sve
5510 #define XXH3_accumulate XXH3_accumulate_sve
5511 #define XXH3_scrambleAcc XXH3_scrambleAcc_scalar
5512 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5513
5514 #else
5515
5516 #define XXH3_accumulate_512 XXH3_accumulate_512_scalar
5517 #define XXH3_accumulate XXH3_accumulate_scalar
5518 #define XXH3_scrambleAcc XXH3_scrambleAcc_scalar
5519 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5520
5521 #endif
5522
5523 #if XXH_SIZE_OPT >= 1
5524 # undef XXH3_initCustomSecret
5525 # define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5526 #endif
5527
5528 XXH_FORCE_INLINE void
5529 XXH3_hashLong_internal_loop(xxh_u64* XXH_RESTRICT acc,
5530 const xxh_u8* XXH_RESTRICT input, size_t len,
5531 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
5532 XXH3_f_accumulate f_acc,
5533 XXH3_f_scrambleAcc f_scramble)
5534 {
5535 size_t const nbStripesPerBlock = (secretSize - XXH_STRIPE_LEN) / XXH_SECRET_CONSUME_RATE;
5536 size_t const block_len = XXH_STRIPE_LEN * nbStripesPerBlock;
5537 size_t const nb_blocks = (len - 1) / block_len;
5538
5539 size_t n;
5540
5541 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN);
5542
5543 for (n = 0; n < nb_blocks; n++) {
5544 f_acc(acc, input + n*block_len, secret, nbStripesPerBlock);
5545 f_scramble(acc, secret + secretSize - XXH_STRIPE_LEN);
5546 }
5547
5548
5549 XXH_ASSERT(len > XXH_STRIPE_LEN);
5550 { size_t const nbStripes = ((len - 1) - (block_len * nb_blocks)) / XXH_STRIPE_LEN;
5551 XXH_ASSERT(nbStripes <= (secretSize / XXH_SECRET_CONSUME_RATE));
5552 f_acc(acc, input + nb_blocks*block_len, secret, nbStripes);
5553
5554
5555 { const xxh_u8* const p = input + len - XXH_STRIPE_LEN;
5556 #define XXH_SECRET_LASTACC_START 7
5557 XXH3_accumulate_512(acc, p, secret + secretSize - XXH_STRIPE_LEN - XXH_SECRET_LASTACC_START);
5558 } }
5559 }
5560
5561 XXH_FORCE_INLINE xxh_u64
5562 XXH3_mix2Accs(const xxh_u64* XXH_RESTRICT acc, const xxh_u8* XXH_RESTRICT secret)
5563 {
5564 return XXH3_mul128_fold64(
5565 acc[0] ^ XXH_readLE64(secret),
5566 acc[1] ^ XXH_readLE64(secret+8) );
5567 }
5568
5569 static XXH64_hash_t
5570 XXH3_mergeAccs(const xxh_u64* XXH_RESTRICT acc, const xxh_u8* XXH_RESTRICT secret, xxh_u64 start)
5571 {
5572 xxh_u64 result64 = start;
5573 size_t i = 0;
5574
5575 for (i = 0; i < 4; i++) {
5576 result64 += XXH3_mix2Accs(acc+2*i, secret + 16*i);
5577 #if defined(__clang__) \
5578 && (defined(__arm__) || defined(__thumb__)) \
5579 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
5580 && !defined(XXH_ENABLE_AUTOVECTORIZE)
5581
5582
5583
5584
5585
5586
5587
5588
5589 XXH_COMPILER_GUARD(result64);
5590 #endif
5591 }
5592
5593 return XXH3_avalanche(result64);
5594 }
5595
5596 #define XXH3_INIT_ACC { XXH_PRIME32_3, XXH_PRIME64_1, XXH_PRIME64_2, XXH_PRIME64_3, \
5597 XXH_PRIME64_4, XXH_PRIME32_2, XXH_PRIME64_5, XXH_PRIME32_1 }
5598
5599 XXH_FORCE_INLINE XXH64_hash_t
5600 XXH3_hashLong_64b_internal(const void* XXH_RESTRICT input, size_t len,
5601 const void* XXH_RESTRICT secret, size_t secretSize,
5602 XXH3_f_accumulate f_acc,
5603 XXH3_f_scrambleAcc f_scramble)
5604 {
5605 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
5606
5607 XXH3_hashLong_internal_loop(acc, (const xxh_u8*)input, len, (const xxh_u8*)secret, secretSize, f_acc, f_scramble);
5608
5609
5610 XXH_STATIC_ASSERT(sizeof(acc) == 64);
5611
5612 #define XXH_SECRET_MERGEACCS_START 11
5613 XXH_ASSERT(secretSize >= sizeof(acc) + XXH_SECRET_MERGEACCS_START);
5614 return XXH3_mergeAccs(acc, (const xxh_u8*)secret + XXH_SECRET_MERGEACCS_START, (xxh_u64)len * XXH_PRIME64_1);
5615 }
5616
5617
5618
5619
5620
5621
5622
5623
5624 XXH3_WITH_SECRET_INLINE XXH64_hash_t
5625 XXH3_hashLong_64b_withSecret(const void* XXH_RESTRICT input, size_t len,
5626 XXH64_hash_t seed64, const xxh_u8* XXH_RESTRICT secret, size_t secretLen)
5627 {
5628 (void)seed64;
5629 return XXH3_hashLong_64b_internal(input, len, secret, secretLen, XXH3_accumulate, XXH3_scrambleAcc);
5630 }
5631
5632
5633
5634
5635
5636
5637
5638 XXH_NO_INLINE XXH_PUREF XXH64_hash_t
5639 XXH3_hashLong_64b_default(const void* XXH_RESTRICT input, size_t len,
5640 XXH64_hash_t seed64, const xxh_u8* XXH_RESTRICT secret, size_t secretLen)
5641 {
5642 (void)seed64; (void)secret; (void)secretLen;
5643 return XXH3_hashLong_64b_internal(input, len, XXH3_kSecret, sizeof(XXH3_kSecret), XXH3_accumulate, XXH3_scrambleAcc);
5644 }
5645
5646
5647
5648
5649
5650
5651
5652
5653
5654
5655
5656
5657 XXH_FORCE_INLINE XXH64_hash_t
5658 XXH3_hashLong_64b_withSeed_internal(const void* input, size_t len,
5659 XXH64_hash_t seed,
5660 XXH3_f_accumulate f_acc,
5661 XXH3_f_scrambleAcc f_scramble,
5662 XXH3_f_initCustomSecret f_initSec)
5663 {
5664 #if XXH_SIZE_OPT <= 0
5665 if (seed == 0)
5666 return XXH3_hashLong_64b_internal(input, len,
5667 XXH3_kSecret, sizeof(XXH3_kSecret),
5668 f_acc, f_scramble);
5669 #endif
5670 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
5671 f_initSec(secret, seed);
5672 return XXH3_hashLong_64b_internal(input, len, secret, sizeof(secret),
5673 f_acc, f_scramble);
5674 }
5675 }
5676
5677
5678
5679
5680 XXH_NO_INLINE XXH64_hash_t
5681 XXH3_hashLong_64b_withSeed(const void* XXH_RESTRICT input, size_t len,
5682 XXH64_hash_t seed, const xxh_u8* XXH_RESTRICT secret, size_t secretLen)
5683 {
5684 (void)secret; (void)secretLen;
5685 return XXH3_hashLong_64b_withSeed_internal(input, len, seed,
5686 XXH3_accumulate, XXH3_scrambleAcc, XXH3_initCustomSecret);
5687 }
5688
5689
5690 typedef XXH64_hash_t (*XXH3_hashLong64_f)(const void* XXH_RESTRICT, size_t,
5691 XXH64_hash_t, const xxh_u8* XXH_RESTRICT, size_t);
5692
5693 XXH_FORCE_INLINE XXH64_hash_t
5694 XXH3_64bits_internal(const void* XXH_RESTRICT input, size_t len,
5695 XXH64_hash_t seed64, const void* XXH_RESTRICT secret, size_t secretLen,
5696 XXH3_hashLong64_f f_hashLong)
5697 {
5698 XXH_ASSERT(secretLen >= XXH3_SECRET_SIZE_MIN);
5699
5700
5701
5702
5703
5704
5705
5706 if (len <= 16)
5707 return XXH3_len_0to16_64b((const xxh_u8*)input, len, (const xxh_u8*)secret, seed64);
5708 if (len <= 128)
5709 return XXH3_len_17to128_64b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
5710 if (len <= XXH3_MIDSIZE_MAX)
5711 return XXH3_len_129to240_64b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
5712 return f_hashLong(input, len, seed64, (const xxh_u8*)secret, secretLen);
5713 }
5714
5715
5716
5717
5718
5719 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits(XXH_NOESCAPE const void* input, size_t length)
5720 {
5721 return XXH3_64bits_internal(input, length, 0, XXH3_kSecret, sizeof(XXH3_kSecret), XXH3_hashLong_64b_default);
5722 }
5723
5724
5725 XXH_PUBLIC_API XXH64_hash_t
5726 XXH3_64bits_withSecret(XXH_NOESCAPE const void* input, size_t length, XXH_NOESCAPE const void* secret, size_t secretSize)
5727 {
5728 return XXH3_64bits_internal(input, length, 0, secret, secretSize, XXH3_hashLong_64b_withSecret);
5729 }
5730
5731
5732 XXH_PUBLIC_API XXH64_hash_t
5733 XXH3_64bits_withSeed(XXH_NOESCAPE const void* input, size_t length, XXH64_hash_t seed)
5734 {
5735 return XXH3_64bits_internal(input, length, seed, XXH3_kSecret, sizeof(XXH3_kSecret), XXH3_hashLong_64b_withSeed);
5736 }
5737
5738 XXH_PUBLIC_API XXH64_hash_t
5739 XXH3_64bits_withSecretandSeed(XXH_NOESCAPE const void* input, size_t length, XXH_NOESCAPE const void* secret, size_t secretSize, XXH64_hash_t seed)
5740 {
5741 if (length <= XXH3_MIDSIZE_MAX)
5742 return XXH3_64bits_internal(input, length, seed, XXH3_kSecret, sizeof(XXH3_kSecret), NULL);
5743 return XXH3_hashLong_64b_withSecret(input, length, seed, (const xxh_u8*)secret, secretSize);
5744 }
5745
5746
5747
5748 #ifndef XXH_NO_STREAM
5749
5750
5751
5752
5753
5754
5755
5756
5757
5758
5759
5760
5761
5762
5763
5764
5765
5766
5767
5768
5769
5770
5771
5772 static XXH_MALLOCF void* XXH_alignedMalloc(size_t s, size_t align)
5773 {
5774 XXH_ASSERT(align <= 128 && align >= 8);
5775 XXH_ASSERT((align & (align-1)) == 0);
5776 XXH_ASSERT(s != 0 && s < (s + align));
5777 {
5778 xxh_u8* base = (xxh_u8*)XXH_malloc(s + align);
5779 if (base != NULL) {
5780
5781
5782
5783
5784
5785
5786 size_t offset = align - ((size_t)base & (align - 1));
5787
5788 xxh_u8* ptr = base + offset;
5789
5790 XXH_ASSERT((size_t)ptr % align == 0);
5791
5792
5793 ptr[-1] = (xxh_u8)offset;
5794 return ptr;
5795 }
5796 return NULL;
5797 }
5798 }
5799
5800
5801
5802
5803 static void XXH_alignedFree(void* p)
5804 {
5805 if (p != NULL) {
5806 xxh_u8* ptr = (xxh_u8*)p;
5807
5808 xxh_u8 offset = ptr[-1];
5809
5810 xxh_u8* base = ptr - offset;
5811 XXH_free(base);
5812 }
5813 }
5814
5815
5816
5817
5818
5819
5820
5821 XXH_PUBLIC_API XXH3_state_t* XXH3_createState(void)
5822 {
5823 XXH3_state_t* const state = (XXH3_state_t*)XXH_alignedMalloc(sizeof(XXH3_state_t), 64);
5824 if (state==NULL) return NULL;
5825 XXH3_INITSTATE(state);
5826 return state;
5827 }
5828
5829
5830
5831
5832
5833
5834
5835
5836
5837 XXH_PUBLIC_API XXH_errorcode XXH3_freeState(XXH3_state_t* statePtr)
5838 {
5839 XXH_alignedFree(statePtr);
5840 return XXH_OK;
5841 }
5842
5843
5844 XXH_PUBLIC_API void
5845 XXH3_copyState(XXH_NOESCAPE XXH3_state_t* dst_state, XXH_NOESCAPE const XXH3_state_t* src_state)
5846 {
5847 XXH_memcpy(dst_state, src_state, sizeof(*dst_state));
5848 }
5849
5850 static void
5851 XXH3_reset_internal(XXH3_state_t* statePtr,
5852 XXH64_hash_t seed,
5853 const void* secret, size_t secretSize)
5854 {
5855 size_t const initStart = offsetof(XXH3_state_t, bufferedSize);
5856 size_t const initLength = offsetof(XXH3_state_t, nbStripesPerBlock) - initStart;
5857 XXH_ASSERT(offsetof(XXH3_state_t, nbStripesPerBlock) > initStart);
5858 XXH_ASSERT(statePtr != NULL);
5859
5860 memset((char*)statePtr + initStart, 0, initLength);
5861 statePtr->acc[0] = XXH_PRIME32_3;
5862 statePtr->acc[1] = XXH_PRIME64_1;
5863 statePtr->acc[2] = XXH_PRIME64_2;
5864 statePtr->acc[3] = XXH_PRIME64_3;
5865 statePtr->acc[4] = XXH_PRIME64_4;
5866 statePtr->acc[5] = XXH_PRIME32_2;
5867 statePtr->acc[6] = XXH_PRIME64_5;
5868 statePtr->acc[7] = XXH_PRIME32_1;
5869 statePtr->seed = seed;
5870 statePtr->useSeed = (seed != 0);
5871 statePtr->extSecret = (const unsigned char*)secret;
5872 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN);
5873 statePtr->secretLimit = secretSize - XXH_STRIPE_LEN;
5874 statePtr->nbStripesPerBlock = statePtr->secretLimit / XXH_SECRET_CONSUME_RATE;
5875 }
5876
5877
5878 XXH_PUBLIC_API XXH_errorcode
5879 XXH3_64bits_reset(XXH_NOESCAPE XXH3_state_t* statePtr)
5880 {
5881 if (statePtr == NULL) return XXH_ERROR;
5882 XXH3_reset_internal(statePtr, 0, XXH3_kSecret, XXH_SECRET_DEFAULT_SIZE);
5883 return XXH_OK;
5884 }
5885
5886
5887 XXH_PUBLIC_API XXH_errorcode
5888 XXH3_64bits_reset_withSecret(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize)
5889 {
5890 if (statePtr == NULL) return XXH_ERROR;
5891 XXH3_reset_internal(statePtr, 0, secret, secretSize);
5892 if (secret == NULL) return XXH_ERROR;
5893 if (secretSize < XXH3_SECRET_SIZE_MIN) return XXH_ERROR;
5894 return XXH_OK;
5895 }
5896
5897
5898 XXH_PUBLIC_API XXH_errorcode
5899 XXH3_64bits_reset_withSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH64_hash_t seed)
5900 {
5901 if (statePtr == NULL) return XXH_ERROR;
5902 if (seed==0) return XXH3_64bits_reset(statePtr);
5903 if ((seed != statePtr->seed) || (statePtr->extSecret != NULL))
5904 XXH3_initCustomSecret(statePtr->customSecret, seed);
5905 XXH3_reset_internal(statePtr, seed, NULL, XXH_SECRET_DEFAULT_SIZE);
5906 return XXH_OK;
5907 }
5908
5909
5910 XXH_PUBLIC_API XXH_errorcode
5911 XXH3_64bits_reset_withSecretandSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize, XXH64_hash_t seed64)
5912 {
5913 if (statePtr == NULL) return XXH_ERROR;
5914 if (secret == NULL) return XXH_ERROR;
5915 if (secretSize < XXH3_SECRET_SIZE_MIN) return XXH_ERROR;
5916 XXH3_reset_internal(statePtr, seed64, secret, secretSize);
5917 statePtr->useSeed = 1;
5918 return XXH_OK;
5919 }
5920
5921
5922
5923
5924
5925
5926
5927
5928
5929
5930
5931
5932
5933
5934
5935
5936
5937
5938 XXH_FORCE_INLINE const xxh_u8 *
5939 XXH3_consumeStripes(xxh_u64* XXH_RESTRICT acc,
5940 size_t* XXH_RESTRICT nbStripesSoFarPtr, size_t nbStripesPerBlock,
5941 const xxh_u8* XXH_RESTRICT input, size_t nbStripes,
5942 const xxh_u8* XXH_RESTRICT secret, size_t secretLimit,
5943 XXH3_f_accumulate f_acc,
5944 XXH3_f_scrambleAcc f_scramble)
5945 {
5946 const xxh_u8* initialSecret = secret + *nbStripesSoFarPtr * XXH_SECRET_CONSUME_RATE;
5947
5948 if (nbStripes >= (nbStripesPerBlock - *nbStripesSoFarPtr)) {
5949
5950 size_t nbStripesThisIter = nbStripesPerBlock - *nbStripesSoFarPtr;
5951
5952 do {
5953
5954 f_acc(acc, input, initialSecret, nbStripesThisIter);
5955 f_scramble(acc, secret + secretLimit);
5956 input += nbStripesThisIter * XXH_STRIPE_LEN;
5957 nbStripes -= nbStripesThisIter;
5958
5959 nbStripesThisIter = nbStripesPerBlock;
5960 initialSecret = secret;
5961 } while (nbStripes >= nbStripesPerBlock);
5962 *nbStripesSoFarPtr = 0;
5963 }
5964
5965 if (nbStripes > 0) {
5966 f_acc(acc, input, initialSecret, nbStripes);
5967 input += nbStripes * XXH_STRIPE_LEN;
5968 *nbStripesSoFarPtr += nbStripes;
5969 }
5970
5971 return input;
5972 }
5973
5974 #ifndef XXH3_STREAM_USE_STACK
5975 # if XXH_SIZE_OPT <= 0 && !defined(__clang__)
5976 # define XXH3_STREAM_USE_STACK 1
5977 # endif
5978 #endif
5979
5980
5981
5982 XXH_FORCE_INLINE XXH_errorcode
5983 XXH3_update(XXH3_state_t* XXH_RESTRICT const state,
5984 const xxh_u8* XXH_RESTRICT input, size_t len,
5985 XXH3_f_accumulate f_acc,
5986 XXH3_f_scrambleAcc f_scramble)
5987 {
5988 if (input==NULL) {
5989 XXH_ASSERT(len == 0);
5990 return XXH_OK;
5991 }
5992
5993 XXH_ASSERT(state != NULL);
5994 { const xxh_u8* const bEnd = input + len;
5995 const unsigned char* const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
5996 #if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
5997
5998
5999
6000
6001 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[8];
6002 XXH_memcpy(acc, state->acc, sizeof(acc));
6003 #else
6004 xxh_u64* XXH_RESTRICT const acc = state->acc;
6005 #endif
6006 state->totalLen += len;
6007 XXH_ASSERT(state->bufferedSize <= XXH3_INTERNALBUFFER_SIZE);
6008
6009
6010 if (len <= XXH3_INTERNALBUFFER_SIZE - state->bufferedSize) {
6011 XXH_memcpy(state->buffer + state->bufferedSize, input, len);
6012 state->bufferedSize += (XXH32_hash_t)len;
6013 return XXH_OK;
6014 }
6015
6016
6017 #define XXH3_INTERNALBUFFER_STRIPES (XXH3_INTERNALBUFFER_SIZE / XXH_STRIPE_LEN)
6018 XXH_STATIC_ASSERT(XXH3_INTERNALBUFFER_SIZE % XXH_STRIPE_LEN == 0);
6019
6020
6021
6022
6023
6024 if (state->bufferedSize) {
6025 size_t const loadSize = XXH3_INTERNALBUFFER_SIZE - state->bufferedSize;
6026 XXH_memcpy(state->buffer + state->bufferedSize, input, loadSize);
6027 input += loadSize;
6028 XXH3_consumeStripes(acc,
6029 &state->nbStripesSoFar, state->nbStripesPerBlock,
6030 state->buffer, XXH3_INTERNALBUFFER_STRIPES,
6031 secret, state->secretLimit,
6032 f_acc, f_scramble);
6033 state->bufferedSize = 0;
6034 }
6035 XXH_ASSERT(input < bEnd);
6036 if (bEnd - input > XXH3_INTERNALBUFFER_SIZE) {
6037 size_t nbStripes = (size_t)(bEnd - 1 - input) / XXH_STRIPE_LEN;
6038 input = XXH3_consumeStripes(acc,
6039 &state->nbStripesSoFar, state->nbStripesPerBlock,
6040 input, nbStripes,
6041 secret, state->secretLimit,
6042 f_acc, f_scramble);
6043 XXH_memcpy(state->buffer + sizeof(state->buffer) - XXH_STRIPE_LEN, input - XXH_STRIPE_LEN, XXH_STRIPE_LEN);
6044
6045 }
6046
6047 XXH_ASSERT(input < bEnd);
6048 XXH_ASSERT(bEnd - input <= XXH3_INTERNALBUFFER_SIZE);
6049 XXH_ASSERT(state->bufferedSize == 0);
6050 XXH_memcpy(state->buffer, input, (size_t)(bEnd-input));
6051 state->bufferedSize = (XXH32_hash_t)(bEnd-input);
6052 #if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
6053
6054 XXH_memcpy(state->acc, acc, sizeof(acc));
6055 #endif
6056 }
6057
6058 return XXH_OK;
6059 }
6060
6061
6062 XXH_PUBLIC_API XXH_errorcode
6063 XXH3_64bits_update(XXH_NOESCAPE XXH3_state_t* state, XXH_NOESCAPE const void* input, size_t len)
6064 {
6065 return XXH3_update(state, (const xxh_u8*)input, len,
6066 XXH3_accumulate, XXH3_scrambleAcc);
6067 }
6068
6069
6070 XXH_FORCE_INLINE void
6071 XXH3_digest_long (XXH64_hash_t* acc,
6072 const XXH3_state_t* state,
6073 const unsigned char* secret)
6074 {
6075 xxh_u8 lastStripe[XXH_STRIPE_LEN];
6076 const xxh_u8* lastStripePtr;
6077
6078
6079
6080
6081
6082 XXH_memcpy(acc, state->acc, sizeof(state->acc));
6083 if (state->bufferedSize >= XXH_STRIPE_LEN) {
6084
6085 size_t const nbStripes = (state->bufferedSize - 1) / XXH_STRIPE_LEN;
6086 size_t nbStripesSoFar = state->nbStripesSoFar;
6087 XXH3_consumeStripes(acc,
6088 &nbStripesSoFar, state->nbStripesPerBlock,
6089 state->buffer, nbStripes,
6090 secret, state->secretLimit,
6091 XXH3_accumulate, XXH3_scrambleAcc);
6092 lastStripePtr = state->buffer + state->bufferedSize - XXH_STRIPE_LEN;
6093 } else {
6094
6095 size_t const catchupSize = XXH_STRIPE_LEN - state->bufferedSize;
6096 XXH_ASSERT(state->bufferedSize > 0);
6097 XXH_memcpy(lastStripe, state->buffer + sizeof(state->buffer) - catchupSize, catchupSize);
6098 XXH_memcpy(lastStripe + catchupSize, state->buffer, state->bufferedSize);
6099 lastStripePtr = lastStripe;
6100 }
6101
6102 XXH3_accumulate_512(acc,
6103 lastStripePtr,
6104 secret + state->secretLimit - XXH_SECRET_LASTACC_START);
6105 }
6106
6107
6108 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits_digest (XXH_NOESCAPE const XXH3_state_t* state)
6109 {
6110 const unsigned char* const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
6111 if (state->totalLen > XXH3_MIDSIZE_MAX) {
6112 XXH_ALIGN(XXH_ACC_ALIGN) XXH64_hash_t acc[XXH_ACC_NB];
6113 XXH3_digest_long(acc, state, secret);
6114 return XXH3_mergeAccs(acc,
6115 secret + XXH_SECRET_MERGEACCS_START,
6116 (xxh_u64)state->totalLen * XXH_PRIME64_1);
6117 }
6118
6119 if (state->useSeed)
6120 return XXH3_64bits_withSeed(state->buffer, (size_t)state->totalLen, state->seed);
6121 return XXH3_64bits_withSecret(state->buffer, (size_t)(state->totalLen),
6122 secret, state->secretLimit + XXH_STRIPE_LEN);
6123 }
6124 #endif
6125
6126
6127
6128
6129
6130
6131
6132
6133
6134
6135
6136
6137
6138
6139
6140
6141
6142
6143
6144 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6145 XXH3_len_1to3_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
6146 {
6147
6148 XXH_ASSERT(input != NULL);
6149 XXH_ASSERT(1 <= len && len <= 3);
6150 XXH_ASSERT(secret != NULL);
6151
6152
6153
6154
6155
6156 { xxh_u8 const c1 = input[0];
6157 xxh_u8 const c2 = input[len >> 1];
6158 xxh_u8 const c3 = input[len - 1];
6159 xxh_u32 const combinedl = ((xxh_u32)c1 <<16) | ((xxh_u32)c2 << 24)
6160 | ((xxh_u32)c3 << 0) | ((xxh_u32)len << 8);
6161 xxh_u32 const combinedh = XXH_rotl32(XXH_swap32(combinedl), 13);
6162 xxh_u64 const bitflipl = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
6163 xxh_u64 const bitfliph = (XXH_readLE32(secret+8) ^ XXH_readLE32(secret+12)) - seed;
6164 xxh_u64 const keyed_lo = (xxh_u64)combinedl ^ bitflipl;
6165 xxh_u64 const keyed_hi = (xxh_u64)combinedh ^ bitfliph;
6166 XXH128_hash_t h128;
6167 h128.low64 = XXH64_avalanche(keyed_lo);
6168 h128.high64 = XXH64_avalanche(keyed_hi);
6169 return h128;
6170 }
6171 }
6172
6173 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6174 XXH3_len_4to8_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
6175 {
6176 XXH_ASSERT(input != NULL);
6177 XXH_ASSERT(secret != NULL);
6178 XXH_ASSERT(4 <= len && len <= 8);
6179 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
6180 { xxh_u32 const input_lo = XXH_readLE32(input);
6181 xxh_u32 const input_hi = XXH_readLE32(input + len - 4);
6182 xxh_u64 const input_64 = input_lo + ((xxh_u64)input_hi << 32);
6183 xxh_u64 const bitflip = (XXH_readLE64(secret+16) ^ XXH_readLE64(secret+24)) + seed;
6184 xxh_u64 const keyed = input_64 ^ bitflip;
6185
6186
6187 XXH128_hash_t m128 = XXH_mult64to128(keyed, XXH_PRIME64_1 + (len << 2));
6188
6189 m128.high64 += (m128.low64 << 1);
6190 m128.low64 ^= (m128.high64 >> 3);
6191
6192 m128.low64 = XXH_xorshift64(m128.low64, 35);
6193 m128.low64 *= PRIME_MX2;
6194 m128.low64 = XXH_xorshift64(m128.low64, 28);
6195 m128.high64 = XXH3_avalanche(m128.high64);
6196 return m128;
6197 }
6198 }
6199
6200 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6201 XXH3_len_9to16_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
6202 {
6203 XXH_ASSERT(input != NULL);
6204 XXH_ASSERT(secret != NULL);
6205 XXH_ASSERT(9 <= len && len <= 16);
6206 { xxh_u64 const bitflipl = (XXH_readLE64(secret+32) ^ XXH_readLE64(secret+40)) - seed;
6207 xxh_u64 const bitfliph = (XXH_readLE64(secret+48) ^ XXH_readLE64(secret+56)) + seed;
6208 xxh_u64 const input_lo = XXH_readLE64(input);
6209 xxh_u64 input_hi = XXH_readLE64(input + len - 8);
6210 XXH128_hash_t m128 = XXH_mult64to128(input_lo ^ input_hi ^ bitflipl, XXH_PRIME64_1);
6211
6212
6213
6214
6215 m128.low64 += (xxh_u64)(len - 1) << 54;
6216 input_hi ^= bitfliph;
6217
6218
6219
6220
6221
6222
6223
6224 if (sizeof(void *) < sizeof(xxh_u64)) {
6225
6226
6227
6228
6229
6230
6231 m128.high64 += (input_hi & 0xFFFFFFFF00000000ULL) + XXH_mult32to64((xxh_u32)input_hi, XXH_PRIME32_2);
6232 } else {
6233
6234
6235
6236
6237
6238
6239
6240
6241
6242
6243
6244
6245
6246
6247
6248
6249
6250
6251
6252
6253
6254
6255
6256
6257 m128.high64 += input_hi + XXH_mult32to64((xxh_u32)input_hi, XXH_PRIME32_2 - 1);
6258 }
6259
6260 m128.low64 ^= XXH_swap64(m128.high64);
6261
6262 {
6263 XXH128_hash_t h128 = XXH_mult64to128(m128.low64, XXH_PRIME64_2);
6264 h128.high64 += m128.high64 * XXH_PRIME64_2;
6265
6266 h128.low64 = XXH3_avalanche(h128.low64);
6267 h128.high64 = XXH3_avalanche(h128.high64);
6268 return h128;
6269 } }
6270 }
6271
6272
6273
6274
6275 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6276 XXH3_len_0to16_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
6277 {
6278 XXH_ASSERT(len <= 16);
6279 { if (len > 8) return XXH3_len_9to16_128b(input, len, secret, seed);
6280 if (len >= 4) return XXH3_len_4to8_128b(input, len, secret, seed);
6281 if (len) return XXH3_len_1to3_128b(input, len, secret, seed);
6282 { XXH128_hash_t h128;
6283 xxh_u64 const bitflipl = XXH_readLE64(secret+64) ^ XXH_readLE64(secret+72);
6284 xxh_u64 const bitfliph = XXH_readLE64(secret+80) ^ XXH_readLE64(secret+88);
6285 h128.low64 = XXH64_avalanche(seed ^ bitflipl);
6286 h128.high64 = XXH64_avalanche( seed ^ bitfliph);
6287 return h128;
6288 } }
6289 }
6290
6291
6292
6293
6294 XXH_FORCE_INLINE XXH128_hash_t
6295 XXH128_mix32B(XXH128_hash_t acc, const xxh_u8* input_1, const xxh_u8* input_2,
6296 const xxh_u8* secret, XXH64_hash_t seed)
6297 {
6298 acc.low64 += XXH3_mix16B (input_1, secret+0, seed);
6299 acc.low64 ^= XXH_readLE64(input_2) + XXH_readLE64(input_2 + 8);
6300 acc.high64 += XXH3_mix16B (input_2, secret+16, seed);
6301 acc.high64 ^= XXH_readLE64(input_1) + XXH_readLE64(input_1 + 8);
6302 return acc;
6303 }
6304
6305
6306 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6307 XXH3_len_17to128_128b(const xxh_u8* XXH_RESTRICT input, size_t len,
6308 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
6309 XXH64_hash_t seed)
6310 {
6311 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
6312 XXH_ASSERT(16 < len && len <= 128);
6313
6314 { XXH128_hash_t acc;
6315 acc.low64 = len * XXH_PRIME64_1;
6316 acc.high64 = 0;
6317
6318 #if XXH_SIZE_OPT >= 1
6319 {
6320
6321 unsigned int i = (unsigned int)(len - 1) / 32;
6322 do {
6323 acc = XXH128_mix32B(acc, input+16*i, input+len-16*(i+1), secret+32*i, seed);
6324 } while (i-- != 0);
6325 }
6326 #else
6327 if (len > 32) {
6328 if (len > 64) {
6329 if (len > 96) {
6330 acc = XXH128_mix32B(acc, input+48, input+len-64, secret+96, seed);
6331 }
6332 acc = XXH128_mix32B(acc, input+32, input+len-48, secret+64, seed);
6333 }
6334 acc = XXH128_mix32B(acc, input+16, input+len-32, secret+32, seed);
6335 }
6336 acc = XXH128_mix32B(acc, input, input+len-16, secret, seed);
6337 #endif
6338 { XXH128_hash_t h128;
6339 h128.low64 = acc.low64 + acc.high64;
6340 h128.high64 = (acc.low64 * XXH_PRIME64_1)
6341 + (acc.high64 * XXH_PRIME64_4)
6342 + ((len - seed) * XXH_PRIME64_2);
6343 h128.low64 = XXH3_avalanche(h128.low64);
6344 h128.high64 = (XXH64_hash_t)0 - XXH3_avalanche(h128.high64);
6345 return h128;
6346 }
6347 }
6348 }
6349
6350 XXH_NO_INLINE XXH_PUREF XXH128_hash_t
6351 XXH3_len_129to240_128b(const xxh_u8* XXH_RESTRICT input, size_t len,
6352 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
6353 XXH64_hash_t seed)
6354 {
6355 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
6356 XXH_ASSERT(128 < len && len <= XXH3_MIDSIZE_MAX);
6357
6358 { XXH128_hash_t acc;
6359 unsigned i;
6360 acc.low64 = len * XXH_PRIME64_1;
6361 acc.high64 = 0;
6362
6363
6364
6365
6366
6367
6368 for (i = 32; i < 160; i += 32) {
6369 acc = XXH128_mix32B(acc,
6370 input + i - 32,
6371 input + i - 16,
6372 secret + i - 32,
6373 seed);
6374 }
6375 acc.low64 = XXH3_avalanche(acc.low64);
6376 acc.high64 = XXH3_avalanche(acc.high64);
6377
6378
6379
6380
6381
6382 for (i=160; i <= len; i += 32) {
6383 acc = XXH128_mix32B(acc,
6384 input + i - 32,
6385 input + i - 16,
6386 secret + XXH3_MIDSIZE_STARTOFFSET + i - 160,
6387 seed);
6388 }
6389
6390 acc = XXH128_mix32B(acc,
6391 input + len - 16,
6392 input + len - 32,
6393 secret + XXH3_SECRET_SIZE_MIN - XXH3_MIDSIZE_LASTOFFSET - 16,
6394 (XXH64_hash_t)0 - seed);
6395
6396 { XXH128_hash_t h128;
6397 h128.low64 = acc.low64 + acc.high64;
6398 h128.high64 = (acc.low64 * XXH_PRIME64_1)
6399 + (acc.high64 * XXH_PRIME64_4)
6400 + ((len - seed) * XXH_PRIME64_2);
6401 h128.low64 = XXH3_avalanche(h128.low64);
6402 h128.high64 = (XXH64_hash_t)0 - XXH3_avalanche(h128.high64);
6403 return h128;
6404 }
6405 }
6406 }
6407
6408 XXH_FORCE_INLINE XXH128_hash_t
6409 XXH3_hashLong_128b_internal(const void* XXH_RESTRICT input, size_t len,
6410 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
6411 XXH3_f_accumulate f_acc,
6412 XXH3_f_scrambleAcc f_scramble)
6413 {
6414 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
6415
6416 XXH3_hashLong_internal_loop(acc, (const xxh_u8*)input, len, secret, secretSize, f_acc, f_scramble);
6417
6418
6419 XXH_STATIC_ASSERT(sizeof(acc) == 64);
6420 XXH_ASSERT(secretSize >= sizeof(acc) + XXH_SECRET_MERGEACCS_START);
6421 { XXH128_hash_t h128;
6422 h128.low64 = XXH3_mergeAccs(acc,
6423 secret + XXH_SECRET_MERGEACCS_START,
6424 (xxh_u64)len * XXH_PRIME64_1);
6425 h128.high64 = XXH3_mergeAccs(acc,
6426 secret + secretSize
6427 - sizeof(acc) - XXH_SECRET_MERGEACCS_START,
6428 ~((xxh_u64)len * XXH_PRIME64_2));
6429 return h128;
6430 }
6431 }
6432
6433
6434
6435
6436 XXH_NO_INLINE XXH_PUREF XXH128_hash_t
6437 XXH3_hashLong_128b_default(const void* XXH_RESTRICT input, size_t len,
6438 XXH64_hash_t seed64,
6439 const void* XXH_RESTRICT secret, size_t secretLen)
6440 {
6441 (void)seed64; (void)secret; (void)secretLen;
6442 return XXH3_hashLong_128b_internal(input, len, XXH3_kSecret, sizeof(XXH3_kSecret),
6443 XXH3_accumulate, XXH3_scrambleAcc);
6444 }
6445
6446
6447
6448
6449
6450
6451
6452
6453 XXH3_WITH_SECRET_INLINE XXH128_hash_t
6454 XXH3_hashLong_128b_withSecret(const void* XXH_RESTRICT input, size_t len,
6455 XXH64_hash_t seed64,
6456 const void* XXH_RESTRICT secret, size_t secretLen)
6457 {
6458 (void)seed64;
6459 return XXH3_hashLong_128b_internal(input, len, (const xxh_u8*)secret, secretLen,
6460 XXH3_accumulate, XXH3_scrambleAcc);
6461 }
6462
6463 XXH_FORCE_INLINE XXH128_hash_t
6464 XXH3_hashLong_128b_withSeed_internal(const void* XXH_RESTRICT input, size_t len,
6465 XXH64_hash_t seed64,
6466 XXH3_f_accumulate f_acc,
6467 XXH3_f_scrambleAcc f_scramble,
6468 XXH3_f_initCustomSecret f_initSec)
6469 {
6470 if (seed64 == 0)
6471 return XXH3_hashLong_128b_internal(input, len,
6472 XXH3_kSecret, sizeof(XXH3_kSecret),
6473 f_acc, f_scramble);
6474 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
6475 f_initSec(secret, seed64);
6476 return XXH3_hashLong_128b_internal(input, len, (const xxh_u8*)secret, sizeof(secret),
6477 f_acc, f_scramble);
6478 }
6479 }
6480
6481
6482
6483
6484 XXH_NO_INLINE XXH128_hash_t
6485 XXH3_hashLong_128b_withSeed(const void* input, size_t len,
6486 XXH64_hash_t seed64, const void* XXH_RESTRICT secret, size_t secretLen)
6487 {
6488 (void)secret; (void)secretLen;
6489 return XXH3_hashLong_128b_withSeed_internal(input, len, seed64,
6490 XXH3_accumulate, XXH3_scrambleAcc, XXH3_initCustomSecret);
6491 }
6492
6493 typedef XXH128_hash_t (*XXH3_hashLong128_f)(const void* XXH_RESTRICT, size_t,
6494 XXH64_hash_t, const void* XXH_RESTRICT, size_t);
6495
6496 XXH_FORCE_INLINE XXH128_hash_t
6497 XXH3_128bits_internal(const void* input, size_t len,
6498 XXH64_hash_t seed64, const void* XXH_RESTRICT secret, size_t secretLen,
6499 XXH3_hashLong128_f f_hl128)
6500 {
6501 XXH_ASSERT(secretLen >= XXH3_SECRET_SIZE_MIN);
6502
6503
6504
6505
6506
6507
6508 if (len <= 16)
6509 return XXH3_len_0to16_128b((const xxh_u8*)input, len, (const xxh_u8*)secret, seed64);
6510 if (len <= 128)
6511 return XXH3_len_17to128_128b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
6512 if (len <= XXH3_MIDSIZE_MAX)
6513 return XXH3_len_129to240_128b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
6514 return f_hl128(input, len, seed64, secret, secretLen);
6515 }
6516
6517
6518
6519
6520
6521 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits(XXH_NOESCAPE const void* input, size_t len)
6522 {
6523 return XXH3_128bits_internal(input, len, 0,
6524 XXH3_kSecret, sizeof(XXH3_kSecret),
6525 XXH3_hashLong_128b_default);
6526 }
6527
6528
6529 XXH_PUBLIC_API XXH128_hash_t
6530 XXH3_128bits_withSecret(XXH_NOESCAPE const void* input, size_t len, XXH_NOESCAPE const void* secret, size_t secretSize)
6531 {
6532 return XXH3_128bits_internal(input, len, 0,
6533 (const xxh_u8*)secret, secretSize,
6534 XXH3_hashLong_128b_withSecret);
6535 }
6536
6537
6538 XXH_PUBLIC_API XXH128_hash_t
6539 XXH3_128bits_withSeed(XXH_NOESCAPE const void* input, size_t len, XXH64_hash_t seed)
6540 {
6541 return XXH3_128bits_internal(input, len, seed,
6542 XXH3_kSecret, sizeof(XXH3_kSecret),
6543 XXH3_hashLong_128b_withSeed);
6544 }
6545
6546
6547 XXH_PUBLIC_API XXH128_hash_t
6548 XXH3_128bits_withSecretandSeed(XXH_NOESCAPE const void* input, size_t len, XXH_NOESCAPE const void* secret, size_t secretSize, XXH64_hash_t seed)
6549 {
6550 if (len <= XXH3_MIDSIZE_MAX)
6551 return XXH3_128bits_internal(input, len, seed, XXH3_kSecret, sizeof(XXH3_kSecret), NULL);
6552 return XXH3_hashLong_128b_withSecret(input, len, seed, secret, secretSize);
6553 }
6554
6555
6556 XXH_PUBLIC_API XXH128_hash_t
6557 XXH128(XXH_NOESCAPE const void* input, size_t len, XXH64_hash_t seed)
6558 {
6559 return XXH3_128bits_withSeed(input, len, seed);
6560 }
6561
6562
6563
6564 #ifndef XXH_NO_STREAM
6565
6566
6567
6568
6569
6570
6571 XXH_PUBLIC_API XXH_errorcode
6572 XXH3_128bits_reset(XXH_NOESCAPE XXH3_state_t* statePtr)
6573 {
6574 return XXH3_64bits_reset(statePtr);
6575 }
6576
6577
6578 XXH_PUBLIC_API XXH_errorcode
6579 XXH3_128bits_reset_withSecret(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize)
6580 {
6581 return XXH3_64bits_reset_withSecret(statePtr, secret, secretSize);
6582 }
6583
6584
6585 XXH_PUBLIC_API XXH_errorcode
6586 XXH3_128bits_reset_withSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH64_hash_t seed)
6587 {
6588 return XXH3_64bits_reset_withSeed(statePtr, seed);
6589 }
6590
6591
6592 XXH_PUBLIC_API XXH_errorcode
6593 XXH3_128bits_reset_withSecretandSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize, XXH64_hash_t seed)
6594 {
6595 return XXH3_64bits_reset_withSecretandSeed(statePtr, secret, secretSize, seed);
6596 }
6597
6598
6599 XXH_PUBLIC_API XXH_errorcode
6600 XXH3_128bits_update(XXH_NOESCAPE XXH3_state_t* state, XXH_NOESCAPE const void* input, size_t len)
6601 {
6602 return XXH3_64bits_update(state, input, len);
6603 }
6604
6605
6606 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits_digest (XXH_NOESCAPE const XXH3_state_t* state)
6607 {
6608 const unsigned char* const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
6609 if (state->totalLen > XXH3_MIDSIZE_MAX) {
6610 XXH_ALIGN(XXH_ACC_ALIGN) XXH64_hash_t acc[XXH_ACC_NB];
6611 XXH3_digest_long(acc, state, secret);
6612 XXH_ASSERT(state->secretLimit + XXH_STRIPE_LEN >= sizeof(acc) + XXH_SECRET_MERGEACCS_START);
6613 { XXH128_hash_t h128;
6614 h128.low64 = XXH3_mergeAccs(acc,
6615 secret + XXH_SECRET_MERGEACCS_START,
6616 (xxh_u64)state->totalLen * XXH_PRIME64_1);
6617 h128.high64 = XXH3_mergeAccs(acc,
6618 secret + state->secretLimit + XXH_STRIPE_LEN
6619 - sizeof(acc) - XXH_SECRET_MERGEACCS_START,
6620 ~((xxh_u64)state->totalLen * XXH_PRIME64_2));
6621 return h128;
6622 }
6623 }
6624
6625 if (state->seed)
6626 return XXH3_128bits_withSeed(state->buffer, (size_t)state->totalLen, state->seed);
6627 return XXH3_128bits_withSecret(state->buffer, (size_t)(state->totalLen),
6628 secret, state->secretLimit + XXH_STRIPE_LEN);
6629 }
6630 #endif
6631
6632
6633 #include <string.h> /* memcmp, memcpy */
6634
6635
6636
6637 XXH_PUBLIC_API int XXH128_isEqual(XXH128_hash_t h1, XXH128_hash_t h2)
6638 {
6639
6640 return !(memcmp(&h1, &h2, sizeof(h1)));
6641 }
6642
6643
6644
6645
6646
6647
6648 XXH_PUBLIC_API int XXH128_cmp(XXH_NOESCAPE const void* h128_1, XXH_NOESCAPE const void* h128_2)
6649 {
6650 XXH128_hash_t const h1 = *(const XXH128_hash_t*)h128_1;
6651 XXH128_hash_t const h2 = *(const XXH128_hash_t*)h128_2;
6652 int const hcmp = (h1.high64 > h2.high64) - (h2.high64 > h1.high64);
6653
6654 if (hcmp) return hcmp;
6655 return (h1.low64 > h2.low64) - (h2.low64 > h1.low64);
6656 }
6657
6658
6659
6660
6661 XXH_PUBLIC_API void
6662 XXH128_canonicalFromHash(XXH_NOESCAPE XXH128_canonical_t* dst, XXH128_hash_t hash)
6663 {
6664 XXH_STATIC_ASSERT(sizeof(XXH128_canonical_t) == sizeof(XXH128_hash_t));
6665 if (XXH_CPU_LITTLE_ENDIAN) {
6666 hash.high64 = XXH_swap64(hash.high64);
6667 hash.low64 = XXH_swap64(hash.low64);
6668 }
6669 XXH_memcpy(dst, &hash.high64, sizeof(hash.high64));
6670 XXH_memcpy((char*)dst + sizeof(hash.high64), &hash.low64, sizeof(hash.low64));
6671 }
6672
6673
6674 XXH_PUBLIC_API XXH128_hash_t
6675 XXH128_hashFromCanonical(XXH_NOESCAPE const XXH128_canonical_t* src)
6676 {
6677 XXH128_hash_t h;
6678 h.high64 = XXH_readBE64(src);
6679 h.low64 = XXH_readBE64(src->digest + 8);
6680 return h;
6681 }
6682
6683
6684
6685
6686
6687
6688
6689 #define XXH_MIN(x, y) (((x) > (y)) ? (y) : (x))
6690
6691 XXH_FORCE_INLINE void XXH3_combine16(void* dst, XXH128_hash_t h128)
6692 {
6693 XXH_writeLE64( dst, XXH_readLE64(dst) ^ h128.low64 );
6694 XXH_writeLE64( (char*)dst+8, XXH_readLE64((char*)dst+8) ^ h128.high64 );
6695 }
6696
6697
6698 XXH_PUBLIC_API XXH_errorcode
6699 XXH3_generateSecret(XXH_NOESCAPE void* secretBuffer, size_t secretSize, XXH_NOESCAPE const void* customSeed, size_t customSeedSize)
6700 {
6701 #if (XXH_DEBUGLEVEL >= 1)
6702 XXH_ASSERT(secretBuffer != NULL);
6703 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN);
6704 #else
6705
6706 if (secretBuffer == NULL) return XXH_ERROR;
6707 if (secretSize < XXH3_SECRET_SIZE_MIN) return XXH_ERROR;
6708 #endif
6709
6710 if (customSeedSize == 0) {
6711 customSeed = XXH3_kSecret;
6712 customSeedSize = XXH_SECRET_DEFAULT_SIZE;
6713 }
6714 #if (XXH_DEBUGLEVEL >= 1)
6715 XXH_ASSERT(customSeed != NULL);
6716 #else
6717 if (customSeed == NULL) return XXH_ERROR;
6718 #endif
6719
6720
6721 { size_t pos = 0;
6722 while (pos < secretSize) {
6723 size_t const toCopy = XXH_MIN((secretSize - pos), customSeedSize);
6724 memcpy((char*)secretBuffer + pos, customSeed, toCopy);
6725 pos += toCopy;
6726 } }
6727
6728 { size_t const nbSeg16 = secretSize / 16;
6729 size_t n;
6730 XXH128_canonical_t scrambler;
6731 XXH128_canonicalFromHash(&scrambler, XXH128(customSeed, customSeedSize, 0));
6732 for (n=0; n<nbSeg16; n++) {
6733 XXH128_hash_t const h128 = XXH128(&scrambler, sizeof(scrambler), n);
6734 XXH3_combine16((char*)secretBuffer + n*16, h128);
6735 }
6736
6737 XXH3_combine16((char*)secretBuffer + secretSize - 16, XXH128_hashFromCanonical(&scrambler));
6738 }
6739 return XXH_OK;
6740 }
6741
6742
6743 XXH_PUBLIC_API void
6744 XXH3_generateSecret_fromSeed(XXH_NOESCAPE void* secretBuffer, XXH64_hash_t seed)
6745 {
6746 XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
6747 XXH3_initCustomSecret(secret, seed);
6748 XXH_ASSERT(secretBuffer != NULL);
6749 memcpy(secretBuffer, secret, XXH_SECRET_DEFAULT_SIZE);
6750 }
6751
6752
6753
6754
6755 #if XXH_VECTOR == XXH_AVX2 \
6756 && defined(__GNUC__) && !defined(__clang__) \
6757 && defined(__OPTIMIZE__) && XXH_SIZE_OPT <= 0
6758 # pragma GCC pop_options
6759 #endif
6760
6761 #endif
6762
6763 #endif
6764
6765
6766
6767
6768 #endif
6769
6770
6771 #if defined (__cplusplus)
6772 }
6773 #endif