File indexing completed on 2025-09-18 09:40:33
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096
0097
0098
0099
0100
0101
0102
0103
0104
0105
0106
0107
0108
0109
0110
0111
0112
0113
0114
0115
0116
0117
0118
0119
0120
0121
0122
0123
0124
0125
0126
0127
0128
0129
0130
0131
0132
0133
0134
0135
0136
0137
0138
0139
0140
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150
0151
0152
0153
0154
0155
0156
0157
0158
0159
0160
0161
0162
0163
0164
0165
0166
0167
0168
0169
0170
0171
0172
0173
0174
0175
0176
0177
0178
0179
0180
0181
0182
0183
0184
0185
0186
0187
0188
0189
0190
0191
0192
0193
0194
0195
0196
0197
0198
0199
0200
0201
0202
0203
0204
0205
0206
0207
0208
0209
0210
0211
0212
0213
0214
0215
0216
0217
0218
0219
0220
0221
0222
0223
0224
0225
0226
0227
0228
0229
0230
0231
0232
0233
0234
0235
0236
0237
0238
0239
0240
0241
0242
0243
0244 #if defined (__cplusplus)
0245 extern "C" {
0246 #endif
0247
0248
0249
0250
0251
0252
0253
0254
0255
0256 #ifdef XXH_DOXYGEN
0257
0258
0259
0260
0261
0262
0263
0264
0265
0266
0267
0268 # define XXH_STATIC_LINKING_ONLY
0269
0270
0271
0272
0273
0274
0275
0276
0277
0278
0279
0280
0281 # define XXH_IMPLEMENTATION
0282
0283
0284
0285
0286
0287
0288
0289
0290
0291
0292
0293
0294
0295
0296
0297
0298
0299
0300
0301
0302 # define XXH_INLINE_ALL
0303 # undef XXH_INLINE_ALL
0304
0305
0306
0307 # define XXH_PRIVATE_API
0308 # undef XXH_PRIVATE_API
0309
0310
0311
0312
0313
0314
0315
0316
0317
0318
0319
0320
0321
0322 # define XXH_NAMESPACE
0323 # undef XXH_NAMESPACE
0324 #endif
0325
0326 #if (defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)) \
0327 && !defined(XXH_INLINE_ALL_31684351384)
0328
0329 # define XXH_INLINE_ALL_31684351384
0330
0331 # undef XXH_STATIC_LINKING_ONLY
0332 # define XXH_STATIC_LINKING_ONLY
0333
0334 # undef XXH_PUBLIC_API
0335 # if defined(__GNUC__)
0336 # define XXH_PUBLIC_API static __inline __attribute__((__unused__))
0337 # elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) )
0338 # define XXH_PUBLIC_API static inline
0339 # elif defined(_MSC_VER)
0340 # define XXH_PUBLIC_API static __inline
0341 # else
0342
0343 # define XXH_PUBLIC_API static
0344 # endif
0345
0346
0347
0348
0349
0350
0351
0352
0353
0354
0355
0356
0357
0358
0359 # undef XXH_versionNumber
0360
0361 # undef XXH32
0362 # undef XXH32_createState
0363 # undef XXH32_freeState
0364 # undef XXH32_reset
0365 # undef XXH32_update
0366 # undef XXH32_digest
0367 # undef XXH32_copyState
0368 # undef XXH32_canonicalFromHash
0369 # undef XXH32_hashFromCanonical
0370
0371 # undef XXH64
0372 # undef XXH64_createState
0373 # undef XXH64_freeState
0374 # undef XXH64_reset
0375 # undef XXH64_update
0376 # undef XXH64_digest
0377 # undef XXH64_copyState
0378 # undef XXH64_canonicalFromHash
0379 # undef XXH64_hashFromCanonical
0380
0381 # undef XXH3_64bits
0382 # undef XXH3_64bits_withSecret
0383 # undef XXH3_64bits_withSeed
0384 # undef XXH3_64bits_withSecretandSeed
0385 # undef XXH3_createState
0386 # undef XXH3_freeState
0387 # undef XXH3_copyState
0388 # undef XXH3_64bits_reset
0389 # undef XXH3_64bits_reset_withSeed
0390 # undef XXH3_64bits_reset_withSecret
0391 # undef XXH3_64bits_update
0392 # undef XXH3_64bits_digest
0393 # undef XXH3_generateSecret
0394
0395 # undef XXH128
0396 # undef XXH3_128bits
0397 # undef XXH3_128bits_withSeed
0398 # undef XXH3_128bits_withSecret
0399 # undef XXH3_128bits_reset
0400 # undef XXH3_128bits_reset_withSeed
0401 # undef XXH3_128bits_reset_withSecret
0402 # undef XXH3_128bits_reset_withSecretandSeed
0403 # undef XXH3_128bits_update
0404 # undef XXH3_128bits_digest
0405 # undef XXH128_isEqual
0406 # undef XXH128_cmp
0407 # undef XXH128_canonicalFromHash
0408 # undef XXH128_hashFromCanonical
0409
0410 # undef XXH_NAMESPACE
0411
0412
0413 # define XXH_NAMESPACE XXH_INLINE_
0414
0415
0416
0417
0418
0419
0420
0421 # define XXH_IPREF(Id) XXH_NAMESPACE ## Id
0422 # define XXH_OK XXH_IPREF(XXH_OK)
0423 # define XXH_ERROR XXH_IPREF(XXH_ERROR)
0424 # define XXH_errorcode XXH_IPREF(XXH_errorcode)
0425 # define XXH32_canonical_t XXH_IPREF(XXH32_canonical_t)
0426 # define XXH64_canonical_t XXH_IPREF(XXH64_canonical_t)
0427 # define XXH128_canonical_t XXH_IPREF(XXH128_canonical_t)
0428 # define XXH32_state_s XXH_IPREF(XXH32_state_s)
0429 # define XXH32_state_t XXH_IPREF(XXH32_state_t)
0430 # define XXH64_state_s XXH_IPREF(XXH64_state_s)
0431 # define XXH64_state_t XXH_IPREF(XXH64_state_t)
0432 # define XXH3_state_s XXH_IPREF(XXH3_state_s)
0433 # define XXH3_state_t XXH_IPREF(XXH3_state_t)
0434 # define XXH128_hash_t XXH_IPREF(XXH128_hash_t)
0435
0436 # undef XXHASH_H_5627135585666179
0437 # undef XXHASH_H_STATIC_13879238742
0438 #endif
0439
0440
0441
0442
0443 #ifndef XXHASH_H_5627135585666179
0444 #define XXHASH_H_5627135585666179 1
0445
0446
0447 #if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API)
0448 # if defined(_WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT))
0449 # ifdef XXH_EXPORT
0450 # define XXH_PUBLIC_API __declspec(dllexport)
0451 # elif XXH_IMPORT
0452 # define XXH_PUBLIC_API __declspec(dllimport)
0453 # endif
0454 # else
0455 # define XXH_PUBLIC_API
0456 # endif
0457 #endif
0458
0459 #ifdef XXH_NAMESPACE
0460 # define XXH_CAT(A,B) A##B
0461 # define XXH_NAME2(A,B) XXH_CAT(A,B)
0462 # define XXH_versionNumber XXH_NAME2(XXH_NAMESPACE, XXH_versionNumber)
0463
0464 # define XXH32 XXH_NAME2(XXH_NAMESPACE, XXH32)
0465 # define XXH32_createState XXH_NAME2(XXH_NAMESPACE, XXH32_createState)
0466 # define XXH32_freeState XXH_NAME2(XXH_NAMESPACE, XXH32_freeState)
0467 # define XXH32_reset XXH_NAME2(XXH_NAMESPACE, XXH32_reset)
0468 # define XXH32_update XXH_NAME2(XXH_NAMESPACE, XXH32_update)
0469 # define XXH32_digest XXH_NAME2(XXH_NAMESPACE, XXH32_digest)
0470 # define XXH32_copyState XXH_NAME2(XXH_NAMESPACE, XXH32_copyState)
0471 # define XXH32_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH32_canonicalFromHash)
0472 # define XXH32_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH32_hashFromCanonical)
0473
0474 # define XXH64 XXH_NAME2(XXH_NAMESPACE, XXH64)
0475 # define XXH64_createState XXH_NAME2(XXH_NAMESPACE, XXH64_createState)
0476 # define XXH64_freeState XXH_NAME2(XXH_NAMESPACE, XXH64_freeState)
0477 # define XXH64_reset XXH_NAME2(XXH_NAMESPACE, XXH64_reset)
0478 # define XXH64_update XXH_NAME2(XXH_NAMESPACE, XXH64_update)
0479 # define XXH64_digest XXH_NAME2(XXH_NAMESPACE, XXH64_digest)
0480 # define XXH64_copyState XXH_NAME2(XXH_NAMESPACE, XXH64_copyState)
0481 # define XXH64_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH64_canonicalFromHash)
0482 # define XXH64_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH64_hashFromCanonical)
0483
0484 # define XXH3_64bits XXH_NAME2(XXH_NAMESPACE, XXH3_64bits)
0485 # define XXH3_64bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecret)
0486 # define XXH3_64bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSeed)
0487 # define XXH3_64bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecretandSeed)
0488 # define XXH3_createState XXH_NAME2(XXH_NAMESPACE, XXH3_createState)
0489 # define XXH3_freeState XXH_NAME2(XXH_NAMESPACE, XXH3_freeState)
0490 # define XXH3_copyState XXH_NAME2(XXH_NAMESPACE, XXH3_copyState)
0491 # define XXH3_64bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset)
0492 # define XXH3_64bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSeed)
0493 # define XXH3_64bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecret)
0494 # define XXH3_64bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecretandSeed)
0495 # define XXH3_64bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_update)
0496 # define XXH3_64bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_digest)
0497 # define XXH3_generateSecret XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret)
0498 # define XXH3_generateSecret_fromSeed XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret_fromSeed)
0499
0500 # define XXH128 XXH_NAME2(XXH_NAMESPACE, XXH128)
0501 # define XXH3_128bits XXH_NAME2(XXH_NAMESPACE, XXH3_128bits)
0502 # define XXH3_128bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSeed)
0503 # define XXH3_128bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecret)
0504 # define XXH3_128bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecretandSeed)
0505 # define XXH3_128bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset)
0506 # define XXH3_128bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSeed)
0507 # define XXH3_128bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecret)
0508 # define XXH3_128bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecretandSeed)
0509 # define XXH3_128bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_update)
0510 # define XXH3_128bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_digest)
0511 # define XXH128_isEqual XXH_NAME2(XXH_NAMESPACE, XXH128_isEqual)
0512 # define XXH128_cmp XXH_NAME2(XXH_NAMESPACE, XXH128_cmp)
0513 # define XXH128_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH128_canonicalFromHash)
0514 # define XXH128_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH128_hashFromCanonical)
0515 #endif
0516
0517
0518
0519
0520
0521
0522
0523 #if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API)
0524 # if defined(_WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT))
0525 # ifdef XXH_EXPORT
0526 # define XXH_PUBLIC_API __declspec(dllexport)
0527 # elif XXH_IMPORT
0528 # define XXH_PUBLIC_API __declspec(dllimport)
0529 # endif
0530 # else
0531 # define XXH_PUBLIC_API
0532 # endif
0533 #endif
0534
0535 #if defined (__GNUC__)
0536 # define XXH_CONSTF __attribute__((__const__))
0537 # define XXH_PUREF __attribute__((__pure__))
0538 # define XXH_MALLOCF __attribute__((__malloc__))
0539 #else
0540 # define XXH_CONSTF
0541 # define XXH_PUREF
0542 # define XXH_MALLOCF
0543 #endif
0544
0545
0546
0547
0548 #define XXH_VERSION_MAJOR 0
0549 #define XXH_VERSION_MINOR 8
0550 #define XXH_VERSION_RELEASE 3
0551
0552 #define XXH_VERSION_NUMBER (XXH_VERSION_MAJOR *100*100 + XXH_VERSION_MINOR *100 + XXH_VERSION_RELEASE)
0553
0554
0555
0556
0557
0558
0559
0560
0561
0562 XXH_PUBLIC_API XXH_CONSTF unsigned XXH_versionNumber (void);
0563
0564
0565
0566
0567
0568 #include <stddef.h> /* size_t */
0569
0570
0571
0572 typedef enum {
0573 XXH_OK = 0,
0574 XXH_ERROR
0575 } XXH_errorcode;
0576
0577
0578
0579
0580
0581 #if defined(XXH_DOXYGEN)
0582
0583
0584
0585
0586
0587 typedef uint32_t XXH32_hash_t;
0588
0589 #elif !defined (__VMS) \
0590 && (defined (__cplusplus) \
0591 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
0592 # ifdef _AIX
0593 # include <inttypes.h>
0594 # else
0595 # include <stdint.h>
0596 # endif
0597 typedef uint32_t XXH32_hash_t;
0598
0599 #else
0600 # include <limits.h>
0601 # if UINT_MAX == 0xFFFFFFFFUL
0602 typedef unsigned int XXH32_hash_t;
0603 # elif ULONG_MAX == 0xFFFFFFFFUL
0604 typedef unsigned long XXH32_hash_t;
0605 # else
0606 # error "unsupported platform: need a 32-bit type"
0607 # endif
0608 #endif
0609
0610
0611
0612
0613
0614
0615
0616
0617
0618
0619
0620
0621
0622
0623
0624
0625
0626
0627
0628
0629
0630
0631
0632
0633
0634
0635
0636
0637
0638
0639
0640
0641
0642
0643 XXH_PUBLIC_API XXH_PUREF XXH32_hash_t XXH32 (const void* input, size_t length, XXH32_hash_t seed);
0644
0645 #ifndef XXH_NO_STREAM
0646
0647
0648
0649
0650
0651
0652
0653 typedef struct XXH32_state_s XXH32_state_t;
0654
0655
0656
0657
0658
0659
0660
0661
0662
0663
0664
0665 XXH_PUBLIC_API XXH_MALLOCF XXH32_state_t* XXH32_createState(void);
0666
0667
0668
0669
0670
0671
0672
0673
0674
0675
0676
0677
0678 XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t* statePtr);
0679
0680
0681
0682
0683
0684
0685
0686
0687 XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t* dst_state, const XXH32_state_t* src_state);
0688
0689
0690
0691
0692
0693
0694
0695
0696
0697
0698
0699
0700
0701
0702
0703
0704
0705 XXH_PUBLIC_API XXH_errorcode XXH32_reset (XXH32_state_t* statePtr, XXH32_hash_t seed);
0706
0707
0708
0709
0710
0711
0712
0713
0714
0715
0716
0717
0718
0719
0720
0721
0722
0723
0724
0725
0726
0727
0728 XXH_PUBLIC_API XXH_errorcode XXH32_update (XXH32_state_t* statePtr, const void* input, size_t length);
0729
0730
0731
0732
0733
0734
0735
0736
0737
0738
0739
0740
0741
0742
0743
0744
0745
0746 XXH_PUBLIC_API XXH_PUREF XXH32_hash_t XXH32_digest (const XXH32_state_t* statePtr);
0747 #endif
0748
0749
0750
0751
0752
0753
0754 typedef struct {
0755 unsigned char digest[4];
0756 } XXH32_canonical_t;
0757
0758
0759
0760
0761
0762
0763
0764
0765
0766
0767
0768
0769 XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t* dst, XXH32_hash_t hash);
0770
0771
0772
0773
0774
0775
0776
0777
0778
0779
0780
0781
0782
0783 XXH_PUBLIC_API XXH_PUREF XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t* src);
0784
0785
0786
0787 #ifdef __has_attribute
0788 # define XXH_HAS_ATTRIBUTE(x) __has_attribute(x)
0789 #else
0790 # define XXH_HAS_ATTRIBUTE(x) 0
0791 #endif
0792
0793
0794
0795
0796
0797
0798
0799
0800 #define XXH_C23_VN 201711L
0801
0802
0803
0804
0805 #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= XXH_C23_VN) && defined(__has_c_attribute)
0806 # define XXH_HAS_C_ATTRIBUTE(x) __has_c_attribute(x)
0807 #else
0808 # define XXH_HAS_C_ATTRIBUTE(x) 0
0809 #endif
0810
0811
0812
0813 #if defined(__cplusplus) && defined(__has_cpp_attribute)
0814 # define XXH_HAS_CPP_ATTRIBUTE(x) __has_cpp_attribute(x)
0815 #else
0816 # define XXH_HAS_CPP_ATTRIBUTE(x) 0
0817 #endif
0818
0819
0820
0821
0822
0823
0824
0825
0826
0827 #if XXH_HAS_C_ATTRIBUTE(fallthrough) || XXH_HAS_CPP_ATTRIBUTE(fallthrough)
0828 # define XXH_FALLTHROUGH [[fallthrough]]
0829 #elif XXH_HAS_ATTRIBUTE(__fallthrough__)
0830 # define XXH_FALLTHROUGH __attribute__ ((__fallthrough__))
0831 #else
0832 # define XXH_FALLTHROUGH
0833 #endif
0834
0835
0836
0837
0838
0839
0840
0841
0842 #if XXH_HAS_ATTRIBUTE(noescape)
0843 # define XXH_NOESCAPE __attribute__((__noescape__))
0844 #else
0845 # define XXH_NOESCAPE
0846 #endif
0847
0848
0849
0850
0851
0852
0853
0854
0855
0856 #ifndef XXH_NO_LONG_LONG
0857
0858
0859
0860 #if defined(XXH_DOXYGEN)
0861
0862
0863
0864
0865
0866 typedef uint64_t XXH64_hash_t;
0867 #elif !defined (__VMS) \
0868 && (defined (__cplusplus) \
0869 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
0870 # ifdef _AIX
0871 # include <inttypes.h>
0872 # else
0873 # include <stdint.h>
0874 # endif
0875 typedef uint64_t XXH64_hash_t;
0876 #else
0877 # include <limits.h>
0878 # if defined(__LP64__) && ULONG_MAX == 0xFFFFFFFFFFFFFFFFULL
0879
0880 typedef unsigned long XXH64_hash_t;
0881 # else
0882
0883 typedef unsigned long long XXH64_hash_t;
0884 # endif
0885 #endif
0886
0887
0888
0889
0890
0891
0892
0893
0894
0895
0896
0897
0898
0899
0900
0901
0902
0903
0904
0905
0906
0907
0908
0909
0910
0911
0912
0913
0914
0915
0916
0917 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH64(XXH_NOESCAPE const void* input, size_t length, XXH64_hash_t seed);
0918
0919
0920 #ifndef XXH_NO_STREAM
0921
0922
0923
0924
0925
0926
0927 typedef struct XXH64_state_s XXH64_state_t;
0928
0929
0930
0931
0932
0933
0934
0935
0936
0937
0938
0939 XXH_PUBLIC_API XXH_MALLOCF XXH64_state_t* XXH64_createState(void);
0940
0941
0942
0943
0944
0945
0946
0947
0948
0949
0950
0951
0952 XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t* statePtr);
0953
0954
0955
0956
0957
0958
0959
0960
0961
0962 XXH_PUBLIC_API void XXH64_copyState(XXH_NOESCAPE XXH64_state_t* dst_state, const XXH64_state_t* src_state);
0963
0964
0965
0966
0967
0968
0969
0970
0971
0972
0973
0974
0975
0976
0977
0978
0979
0980 XXH_PUBLIC_API XXH_errorcode XXH64_reset (XXH_NOESCAPE XXH64_state_t* statePtr, XXH64_hash_t seed);
0981
0982
0983
0984
0985
0986
0987
0988
0989
0990
0991
0992
0993
0994
0995
0996
0997
0998
0999
1000
1001
1002
1003 XXH_PUBLIC_API XXH_errorcode XXH64_update (XXH_NOESCAPE XXH64_state_t* statePtr, XXH_NOESCAPE const void* input, size_t length);
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH64_digest (XXH_NOESCAPE const XXH64_state_t* statePtr);
1022 #endif
1023
1024
1025
1026
1027
1028 typedef struct { unsigned char digest[sizeof(XXH64_hash_t)]; } XXH64_canonical_t;
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041 XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH_NOESCAPE XXH64_canonical_t* dst, XXH64_hash_t hash);
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH64_hashFromCanonical(XXH_NOESCAPE const XXH64_canonical_t* src);
1056
1057 #ifndef XXH_NO_XXH3
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120 # define XXH_SCALAR 0
1121 # define XXH_SSE2 1
1122 # define XXH_AVX2 2
1123 # define XXH_AVX512 3
1124 # define XXH_NEON 4
1125 # define XXH_VSX 5
1126 # define XXH_SVE 6
1127 # define XXH_LSX 7
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH3_64bits(XXH_NOESCAPE const void* input, size_t length);
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH3_64bits_withSeed(XXH_NOESCAPE const void* input, size_t length, XXH64_hash_t seed);
1183
1184
1185
1186
1187
1188
1189
1190
1191 #define XXH3_SECRET_SIZE_MIN 136
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH3_64bits_withSecret(XXH_NOESCAPE const void* data, size_t len, XXH_NOESCAPE const void* secret, size_t secretSize);
1226
1227
1228
1229 #ifndef XXH_NO_STREAM
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243 typedef struct XXH3_state_s XXH3_state_t;
1244 XXH_PUBLIC_API XXH_MALLOCF XXH3_state_t* XXH3_createState(void);
1245 XXH_PUBLIC_API XXH_errorcode XXH3_freeState(XXH3_state_t* statePtr);
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255 XXH_PUBLIC_API void XXH3_copyState(XXH_NOESCAPE XXH3_state_t* dst_state, XXH_NOESCAPE const XXH3_state_t* src_state);
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset(XXH_NOESCAPE XXH3_state_t* statePtr);
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset_withSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH64_hash_t seed);
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset_withSecret(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize);
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347 XXH_PUBLIC_API XXH_errorcode XXH3_64bits_update (XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* input, size_t length);
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t XXH3_64bits_digest (XXH_NOESCAPE const XXH3_state_t* statePtr);
1366 #endif
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382 typedef struct {
1383 XXH64_hash_t low64;
1384 XXH64_hash_t high64;
1385 } XXH128_hash_t;
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH3_128bits(XXH_NOESCAPE const void* data, size_t len);
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH3_128bits_withSeed(XXH_NOESCAPE const void* data, size_t len, XXH64_hash_t seed);
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH3_128bits_withSecret(XXH_NOESCAPE const void* data, size_t len, XXH_NOESCAPE const void* secret, size_t secretSize);
1454
1455
1456 #ifndef XXH_NO_STREAM
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset(XXH_NOESCAPE XXH3_state_t* statePtr);
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset_withSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH64_hash_t seed);
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset_withSecret(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize);
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554 XXH_PUBLIC_API XXH_errorcode XXH3_128bits_update (XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* input, size_t length);
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH3_128bits_digest (XXH_NOESCAPE const XXH3_state_t* statePtr);
1572 #endif
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587 XXH_PUBLIC_API XXH_PUREF int XXH128_isEqual(XXH128_hash_t h1, XXH128_hash_t h2);
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601 XXH_PUBLIC_API XXH_PUREF int XXH128_cmp(XXH_NOESCAPE const void* h128_1, XXH_NOESCAPE const void* h128_2);
1602
1603
1604
1605 typedef struct { unsigned char digest[sizeof(XXH128_hash_t)]; } XXH128_canonical_t;
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618 XXH_PUBLIC_API void XXH128_canonicalFromHash(XXH_NOESCAPE XXH128_canonical_t* dst, XXH128_hash_t hash);
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH128_hashFromCanonical(XXH_NOESCAPE const XXH128_canonical_t* src);
1632
1633
1634 #endif
1635 #endif
1636
1637
1638
1639
1640 #endif
1641
1642
1643
1644 #if defined(XXH_STATIC_LINKING_ONLY) && !defined(XXHASH_H_STATIC_13879238742)
1645 #define XXHASH_H_STATIC_13879238742
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672 struct XXH32_state_s {
1673 XXH32_hash_t total_len_32;
1674 XXH32_hash_t large_len;
1675 XXH32_hash_t acc[4];
1676 unsigned char buffer[16];
1677 XXH32_hash_t bufferedSize;
1678 XXH32_hash_t reserved;
1679 };
1680
1681
1682 #ifndef XXH_NO_LONG_LONG
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696 struct XXH64_state_s {
1697 XXH64_hash_t total_len;
1698 XXH64_hash_t acc[4];
1699 unsigned char buffer[32];
1700 XXH32_hash_t bufferedSize;
1701 XXH32_hash_t reserved32;
1702 XXH64_hash_t reserved64;
1703 };
1704
1705 #ifndef XXH_NO_XXH3
1706
1707 #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
1708 # define XXH_ALIGN(n) _Alignas(n)
1709 #elif defined(__cplusplus) && (__cplusplus >= 201103L)
1710
1711 # define XXH_ALIGN(n) alignas(n)
1712 #elif defined(__GNUC__)
1713 # define XXH_ALIGN(n) __attribute__ ((aligned(n)))
1714 #elif defined(_MSC_VER)
1715 # define XXH_ALIGN(n) __declspec(align(n))
1716 #else
1717 # define XXH_ALIGN(n)
1718 #endif
1719
1720
1721 #if !(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) \
1722 && ! (defined(__cplusplus) && (__cplusplus >= 201103L)) \
1723 && defined(__GNUC__)
1724 # define XXH_ALIGN_MEMBER(align, type) type XXH_ALIGN(align)
1725 #else
1726 # define XXH_ALIGN_MEMBER(align, type) XXH_ALIGN(align) type
1727 #endif
1728
1729
1730
1731
1732
1733
1734
1735
1736 #define XXH3_INTERNALBUFFER_SIZE 256
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746 #define XXH3_SECRET_DEFAULT_SIZE 192
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770 struct XXH3_state_s {
1771 XXH_ALIGN_MEMBER(64, XXH64_hash_t acc[8]);
1772
1773 XXH_ALIGN_MEMBER(64, unsigned char customSecret[XXH3_SECRET_DEFAULT_SIZE]);
1774
1775 XXH_ALIGN_MEMBER(64, unsigned char buffer[XXH3_INTERNALBUFFER_SIZE]);
1776
1777 XXH32_hash_t bufferedSize;
1778
1779 XXH32_hash_t useSeed;
1780
1781 size_t nbStripesSoFar;
1782
1783 XXH64_hash_t totalLen;
1784
1785 size_t nbStripesPerBlock;
1786
1787 size_t secretLimit;
1788
1789 XXH64_hash_t seed;
1790
1791 XXH64_hash_t reserved64;
1792
1793 const unsigned char* extSecret;
1794
1795
1796
1797 };
1798
1799 #undef XXH_ALIGN_MEMBER
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812 #define XXH3_INITSTATE(XXH3_state_ptr) \
1813 do { \
1814 XXH3_state_t* tmp_xxh3_state_ptr = (XXH3_state_ptr); \
1815 tmp_xxh3_state_ptr->seed = 0; \
1816 tmp_xxh3_state_ptr->extSecret = NULL; \
1817 } while(0)
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t XXH128(XXH_NOESCAPE const void* data, size_t len, XXH64_hash_t seed);
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900 XXH_PUBLIC_API XXH_errorcode XXH3_generateSecret(XXH_NOESCAPE void* secretBuffer, size_t secretSize, XXH_NOESCAPE const void* customSeed, size_t customSeedSize);
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940 XXH_PUBLIC_API void XXH3_generateSecret_fromSeed(XXH_NOESCAPE void* secretBuffer, XXH64_hash_t seed);
1941
1942
1943
1944
1945 #define XXH3_MIDSIZE_MAX 240
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981 XXH_PUBLIC_API XXH_PUREF XXH64_hash_t
1982 XXH3_64bits_withSecretandSeed(XXH_NOESCAPE const void* data, size_t len,
1983 XXH_NOESCAPE const void* secret, size_t secretSize,
1984 XXH64_hash_t seed);
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000 XXH_PUBLIC_API XXH_PUREF XXH128_hash_t
2001 XXH3_128bits_withSecretandSeed(XXH_NOESCAPE const void* input, size_t length,
2002 XXH_NOESCAPE const void* secret, size_t secretSize,
2003 XXH64_hash_t seed64);
2004
2005 #ifndef XXH_NO_STREAM
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019 XXH_PUBLIC_API XXH_errorcode
2020 XXH3_64bits_reset_withSecretandSeed(XXH_NOESCAPE XXH3_state_t* statePtr,
2021 XXH_NOESCAPE const void* secret, size_t secretSize,
2022 XXH64_hash_t seed64);
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045 XXH_PUBLIC_API XXH_errorcode
2046 XXH3_128bits_reset_withSecretandSeed(XXH_NOESCAPE XXH3_state_t* statePtr,
2047 XXH_NOESCAPE const void* secret, size_t secretSize,
2048 XXH64_hash_t seed64);
2049
2050 #endif
2051
2052 #endif
2053 #endif
2054 #if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)
2055 # define XXH_IMPLEMENTATION
2056 #endif
2057
2058 #endif
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088 #if ( defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API) \
2089 || defined(XXH_IMPLEMENTATION) ) && !defined(XXH_IMPLEM_13a8737387)
2090 # define XXH_IMPLEM_13a8737387
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102 #ifdef XXH_DOXYGEN
2103
2104
2105
2106
2107
2108 # define XXH_NO_LONG_LONG
2109 # undef XXH_NO_LONG_LONG
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160 # define XXH_FORCE_MEMORY_ACCESS 0
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188 # define XXH_SIZE_OPT 0
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218 # define XXH_FORCE_ALIGN_CHECK 0
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240 # define XXH_NO_INLINE_HINTS 0
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257 # define XXH3_INLINE_SECRET 0
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269 # define XXH32_ENDJMP 0
2270
2271
2272
2273
2274
2275
2276
2277
2278 # define XXH_OLD_NAMES
2279 # undef XXH_OLD_NAMES
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289 # define XXH_NO_STREAM
2290 # undef XXH_NO_STREAM
2291 #endif
2292
2293
2294
2295
2296 #ifndef XXH_FORCE_MEMORY_ACCESS
2297
2298
2299
2300 # if defined(__GNUC__) && !(defined(__ARM_ARCH) && __ARM_ARCH < 7 && defined(__ARM_FEATURE_UNALIGNED))
2301 # define XXH_FORCE_MEMORY_ACCESS 1
2302 # endif
2303 #endif
2304
2305 #ifndef XXH_SIZE_OPT
2306
2307 # if (defined(__GNUC__) || defined(__clang__)) && defined(__OPTIMIZE_SIZE__)
2308 # define XXH_SIZE_OPT 1
2309 # else
2310 # define XXH_SIZE_OPT 0
2311 # endif
2312 #endif
2313
2314 #ifndef XXH_FORCE_ALIGN_CHECK
2315
2316 # if XXH_SIZE_OPT >= 1 || \
2317 defined(__i386) || defined(__x86_64__) || defined(__aarch64__) || defined(__ARM_FEATURE_UNALIGNED) \
2318 || defined(_M_IX86) || defined(_M_X64) || defined(_M_ARM64) || defined(_M_ARM)
2319 # define XXH_FORCE_ALIGN_CHECK 0
2320 # else
2321 # define XXH_FORCE_ALIGN_CHECK 1
2322 # endif
2323 #endif
2324
2325 #ifndef XXH_NO_INLINE_HINTS
2326 # if XXH_SIZE_OPT >= 1 || defined(__NO_INLINE__)
2327 # define XXH_NO_INLINE_HINTS 1
2328 # else
2329 # define XXH_NO_INLINE_HINTS 0
2330 # endif
2331 #endif
2332
2333 #ifndef XXH3_INLINE_SECRET
2334 # if (defined(__GNUC__) && !defined(__clang__) && __GNUC__ >= 12) \
2335 || !defined(XXH_INLINE_ALL)
2336 # define XXH3_INLINE_SECRET 0
2337 # else
2338 # define XXH3_INLINE_SECRET 1
2339 # endif
2340 #endif
2341
2342 #ifndef XXH32_ENDJMP
2343
2344 # define XXH32_ENDJMP 0
2345 #endif
2346
2347
2348
2349
2350
2351
2352
2353
2354
2355
2356 #if defined(XXH_NO_STREAM)
2357
2358 #elif defined(XXH_NO_STDLIB)
2359
2360
2361
2362
2363
2364
2365
2366
2367
2368
2369 static XXH_CONSTF void* XXH_malloc(size_t s) { (void)s; return NULL; }
2370 static void XXH_free(void* p) { (void)p; }
2371
2372 #else
2373
2374
2375
2376
2377
2378 #include <stdlib.h>
2379
2380
2381
2382
2383
2384 static XXH_MALLOCF void* XXH_malloc(size_t s) { return malloc(s); }
2385
2386
2387
2388
2389
2390 static void XXH_free(void* p) { free(p); }
2391
2392 #endif
2393
2394 #include <string.h>
2395
2396
2397
2398
2399
2400 static void* XXH_memcpy(void* dest, const void* src, size_t size)
2401 {
2402 return memcpy(dest,src,size);
2403 }
2404
2405 #include <limits.h> /* ULLONG_MAX */
2406
2407
2408
2409
2410
2411 #ifdef _MSC_VER
2412 # pragma warning(disable : 4127)
2413 #endif
2414
2415 #if XXH_NO_INLINE_HINTS
2416 # if defined(__GNUC__) || defined(__clang__)
2417 # define XXH_FORCE_INLINE static __attribute__((__unused__))
2418 # else
2419 # define XXH_FORCE_INLINE static
2420 # endif
2421 # define XXH_NO_INLINE static
2422
2423 #elif defined(__GNUC__) || defined(__clang__)
2424 # define XXH_FORCE_INLINE static __inline__ __attribute__((__always_inline__, __unused__))
2425 # define XXH_NO_INLINE static __attribute__((__noinline__))
2426 #elif defined(_MSC_VER)
2427 # define XXH_FORCE_INLINE static __forceinline
2428 # define XXH_NO_INLINE static __declspec(noinline)
2429 #elif defined (__cplusplus) \
2430 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L))
2431 # define XXH_FORCE_INLINE static inline
2432 # define XXH_NO_INLINE static
2433 #else
2434 # define XXH_FORCE_INLINE static
2435 # define XXH_NO_INLINE static
2436 #endif
2437
2438 #if defined(XXH_INLINE_ALL)
2439 # define XXH_STATIC XXH_FORCE_INLINE
2440 #else
2441 # define XXH_STATIC static
2442 #endif
2443
2444 #if XXH3_INLINE_SECRET
2445 # define XXH3_WITH_SECRET_INLINE XXH_FORCE_INLINE
2446 #else
2447 # define XXH3_WITH_SECRET_INLINE XXH_NO_INLINE
2448 #endif
2449
2450 #if ((defined(sun) || defined(__sun)) && __cplusplus)
2451 # define XXH_RESTRICT
2452 #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
2453 # define XXH_RESTRICT restrict
2454 #elif (defined (__GNUC__) && ((__GNUC__ > 3) || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))) \
2455 || (defined (__clang__)) \
2456 || (defined (_MSC_VER) && (_MSC_VER >= 1400)) \
2457 || (defined (__INTEL_COMPILER) && (__INTEL_COMPILER >= 1300))
2458
2459
2460
2461
2462 # define XXH_RESTRICT __restrict
2463 #else
2464 # define XXH_RESTRICT
2465 #endif
2466
2467
2468
2469
2470
2471
2472
2473
2474
2475
2476
2477
2478 #ifndef XXH_DEBUGLEVEL
2479 # ifdef DEBUGLEVEL
2480 # define XXH_DEBUGLEVEL DEBUGLEVEL
2481 # else
2482 # define XXH_DEBUGLEVEL 0
2483 # endif
2484 #endif
2485
2486 #if (XXH_DEBUGLEVEL>=1)
2487 # include <assert.h> /* note: can still be disabled with NDEBUG */
2488 # define XXH_ASSERT(c) assert(c)
2489 #else
2490 # if defined(__INTEL_COMPILER)
2491 # define XXH_ASSERT(c) XXH_ASSUME((unsigned char) (c))
2492 # else
2493 # define XXH_ASSERT(c) XXH_ASSUME(c)
2494 # endif
2495 #endif
2496
2497
2498 #ifndef XXH_STATIC_ASSERT
2499 # if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
2500 # define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { _Static_assert((c),m); } while(0)
2501 # elif defined(__cplusplus) && (__cplusplus >= 201103L)
2502 # define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { static_assert((c),m); } while(0)
2503 # else
2504 # define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { struct xxh_sa { char x[(c) ? 1 : -1]; }; } while(0)
2505 # endif
2506 # define XXH_STATIC_ASSERT(c) XXH_STATIC_ASSERT_WITH_MESSAGE((c),#c)
2507 #endif
2508
2509
2510
2511
2512
2513
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523
2524
2525 #if defined(__GNUC__) || defined(__clang__)
2526 # define XXH_COMPILER_GUARD(var) __asm__("" : "+r" (var))
2527 #else
2528 # define XXH_COMPILER_GUARD(var) ((void)0)
2529 #endif
2530
2531
2532
2533 #if defined(__clang__) && defined(__ARM_ARCH) && !defined(__wasm__)
2534 # define XXH_COMPILER_GUARD_CLANG_NEON(var) __asm__("" : "+w" (var))
2535 #else
2536 # define XXH_COMPILER_GUARD_CLANG_NEON(var) ((void)0)
2537 #endif
2538
2539
2540
2541
2542 #if !defined (__VMS) \
2543 && (defined (__cplusplus) \
2544 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
2545 # ifdef _AIX
2546 # include <inttypes.h>
2547 # else
2548 # include <stdint.h>
2549 # endif
2550 typedef uint8_t xxh_u8;
2551 #else
2552 typedef unsigned char xxh_u8;
2553 #endif
2554 typedef XXH32_hash_t xxh_u32;
2555
2556 #ifdef XXH_OLD_NAMES
2557 # warning "XXH_OLD_NAMES is planned to be removed starting v0.9. If the program depends on it, consider moving away from it by employing newer type names directly"
2558 # define BYTE xxh_u8
2559 # define U8 xxh_u8
2560 # define U32 xxh_u32
2561 #endif
2562
2563
2564
2565
2566
2567
2568
2569
2570
2571
2572
2573
2574
2575
2576
2577
2578
2579
2580
2581
2582
2583
2584
2585
2586
2587
2588
2589
2590
2591
2592
2593
2594
2595
2596
2597
2598
2599
2600
2601
2602
2603
2604
2605
2606
2607
2608
2609
2610
2611
2612
2613
2614
2615 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2616
2617
2618
2619
2620 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
2621
2622
2623
2624
2625
2626 static xxh_u32 XXH_read32(const void* memPtr) { return *(const xxh_u32*) memPtr; }
2627
2628 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
2629
2630
2631
2632
2633
2634
2635
2636
2637 #ifdef XXH_OLD_NAMES
2638 typedef union { xxh_u32 u32; } __attribute__((__packed__)) unalign;
2639 #endif
2640 static xxh_u32 XXH_read32(const void* ptr)
2641 {
2642 typedef __attribute__((__aligned__(1))) xxh_u32 xxh_unalign32;
2643 return *((const xxh_unalign32*)ptr);
2644 }
2645
2646 #else
2647
2648
2649
2650
2651
2652 static xxh_u32 XXH_read32(const void* memPtr)
2653 {
2654 xxh_u32 val;
2655 XXH_memcpy(&val, memPtr, sizeof(val));
2656 return val;
2657 }
2658
2659 #endif
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669
2670
2671
2672
2673
2674
2675
2676
2677
2678
2679
2680 #ifndef XXH_CPU_LITTLE_ENDIAN
2681
2682
2683
2684
2685 # if defined(_WIN32) \
2686 || defined(__LITTLE_ENDIAN__) \
2687 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
2688 # define XXH_CPU_LITTLE_ENDIAN 1
2689 # elif defined(__BIG_ENDIAN__) \
2690 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
2691 # define XXH_CPU_LITTLE_ENDIAN 0
2692 # else
2693
2694
2695
2696
2697
2698
2699 static int XXH_isLittleEndian(void)
2700 {
2701
2702
2703
2704
2705 const union { xxh_u32 u; xxh_u8 c[4]; } one = { 1 };
2706 return one.c[0];
2707 }
2708 # define XXH_CPU_LITTLE_ENDIAN XXH_isLittleEndian()
2709 # endif
2710 #endif
2711
2712
2713
2714
2715
2716
2717
2718 #define XXH_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
2719
2720 #ifdef __has_builtin
2721 # define XXH_HAS_BUILTIN(x) __has_builtin(x)
2722 #else
2723 # define XXH_HAS_BUILTIN(x) 0
2724 #endif
2725
2726
2727
2728
2729
2730
2731
2732
2733
2734
2735
2736
2737
2738
2739
2740
2741
2742
2743
2744
2745
2746
2747
2748
2749
2750
2751
2752
2753
2754
2755 #if XXH_HAS_BUILTIN(__builtin_unreachable)
2756 # define XXH_UNREACHABLE() __builtin_unreachable()
2757
2758 #elif defined(_MSC_VER)
2759 # define XXH_UNREACHABLE() __assume(0)
2760
2761 #else
2762 # define XXH_UNREACHABLE()
2763 #endif
2764
2765 #if XXH_HAS_BUILTIN(__builtin_assume)
2766 # define XXH_ASSUME(c) __builtin_assume(c)
2767 #else
2768 # define XXH_ASSUME(c) if (!(c)) { XXH_UNREACHABLE(); }
2769 #endif
2770
2771
2772
2773
2774
2775
2776
2777
2778
2779
2780
2781
2782
2783
2784 #if !defined(NO_CLANG_BUILTIN) && XXH_HAS_BUILTIN(__builtin_rotateleft32) \
2785 && XXH_HAS_BUILTIN(__builtin_rotateleft64)
2786 # define XXH_rotl32 __builtin_rotateleft32
2787 # define XXH_rotl64 __builtin_rotateleft64
2788 #elif XXH_HAS_BUILTIN(__builtin_stdc_rotate_left)
2789 # define XXH_rotl32 __builtin_stdc_rotate_left
2790 # define XXH_rotl64 __builtin_stdc_rotate_left
2791
2792 #elif defined(_MSC_VER)
2793 # define XXH_rotl32(x,r) _rotl(x,r)
2794 # define XXH_rotl64(x,r) _rotl64(x,r)
2795 #else
2796 # define XXH_rotl32(x,r) (((x) << (r)) | ((x) >> (32 - (r))))
2797 # define XXH_rotl64(x,r) (((x) << (r)) | ((x) >> (64 - (r))))
2798 #endif
2799
2800
2801
2802
2803
2804
2805
2806
2807
2808 #if defined(_MSC_VER)
2809 # define XXH_swap32 _byteswap_ulong
2810 #elif XXH_GCC_VERSION >= 403
2811 # define XXH_swap32 __builtin_bswap32
2812 #else
2813 static xxh_u32 XXH_swap32 (xxh_u32 x)
2814 {
2815 return ((x << 24) & 0xff000000 ) |
2816 ((x << 8) & 0x00ff0000 ) |
2817 ((x >> 8) & 0x0000ff00 ) |
2818 ((x >> 24) & 0x000000ff );
2819 }
2820 #endif
2821
2822
2823
2824
2825
2826
2827
2828
2829
2830
2831 typedef enum {
2832 XXH_aligned,
2833 XXH_unaligned
2834 } XXH_alignment;
2835
2836
2837
2838
2839
2840
2841 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2842
2843 XXH_FORCE_INLINE xxh_u32 XXH_readLE32(const void* memPtr)
2844 {
2845 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
2846 return bytePtr[0]
2847 | ((xxh_u32)bytePtr[1] << 8)
2848 | ((xxh_u32)bytePtr[2] << 16)
2849 | ((xxh_u32)bytePtr[3] << 24);
2850 }
2851
2852 XXH_FORCE_INLINE xxh_u32 XXH_readBE32(const void* memPtr)
2853 {
2854 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
2855 return bytePtr[3]
2856 | ((xxh_u32)bytePtr[2] << 8)
2857 | ((xxh_u32)bytePtr[1] << 16)
2858 | ((xxh_u32)bytePtr[0] << 24);
2859 }
2860
2861 #else
2862 XXH_FORCE_INLINE xxh_u32 XXH_readLE32(const void* ptr)
2863 {
2864 return XXH_CPU_LITTLE_ENDIAN ? XXH_read32(ptr) : XXH_swap32(XXH_read32(ptr));
2865 }
2866
2867 static xxh_u32 XXH_readBE32(const void* ptr)
2868 {
2869 return XXH_CPU_LITTLE_ENDIAN ? XXH_swap32(XXH_read32(ptr)) : XXH_read32(ptr);
2870 }
2871 #endif
2872
2873 XXH_FORCE_INLINE xxh_u32
2874 XXH_readLE32_align(const void* ptr, XXH_alignment align)
2875 {
2876 if (align==XXH_unaligned) {
2877 return XXH_readLE32(ptr);
2878 } else {
2879 return XXH_CPU_LITTLE_ENDIAN ? *(const xxh_u32*)ptr : XXH_swap32(*(const xxh_u32*)ptr);
2880 }
2881 }
2882
2883
2884
2885
2886
2887
2888 XXH_PUBLIC_API unsigned XXH_versionNumber (void) { return XXH_VERSION_NUMBER; }
2889
2890
2891
2892
2893
2894
2895
2896
2897
2898
2899
2900
2901
2902
2903 #define XXH_PRIME32_1 0x9E3779B1U
2904 #define XXH_PRIME32_2 0x85EBCA77U
2905 #define XXH_PRIME32_3 0xC2B2AE3DU
2906 #define XXH_PRIME32_4 0x27D4EB2FU
2907 #define XXH_PRIME32_5 0x165667B1U
2908
2909 #ifdef XXH_OLD_NAMES
2910 # define PRIME32_1 XXH_PRIME32_1
2911 # define PRIME32_2 XXH_PRIME32_2
2912 # define PRIME32_3 XXH_PRIME32_3
2913 # define PRIME32_4 XXH_PRIME32_4
2914 # define PRIME32_5 XXH_PRIME32_5
2915 #endif
2916
2917
2918
2919
2920
2921
2922
2923
2924
2925
2926
2927
2928 static xxh_u32 XXH32_round(xxh_u32 acc, xxh_u32 input)
2929 {
2930 acc += input * XXH_PRIME32_2;
2931 acc = XXH_rotl32(acc, 13);
2932 acc *= XXH_PRIME32_1;
2933 #if (defined(__SSE4_1__) || defined(__aarch64__) || defined(__wasm_simd128__)) && !defined(XXH_ENABLE_AUTOVECTORIZE)
2934
2935
2936
2937
2938
2939
2940
2941
2942
2943
2944
2945
2946
2947
2948
2949
2950
2951
2952
2953
2954
2955
2956
2957
2958
2959
2960
2961
2962
2963
2964
2965
2966
2967
2968
2969
2970 XXH_COMPILER_GUARD(acc);
2971 #endif
2972 return acc;
2973 }
2974
2975
2976
2977
2978
2979
2980
2981
2982
2983
2984
2985 static xxh_u32 XXH32_avalanche(xxh_u32 hash)
2986 {
2987 hash ^= hash >> 15;
2988 hash *= XXH_PRIME32_2;
2989 hash ^= hash >> 13;
2990 hash *= XXH_PRIME32_3;
2991 hash ^= hash >> 16;
2992 return hash;
2993 }
2994
2995 #define XXH_get32bits(p) XXH_readLE32_align(p, align)
2996
2997
2998
2999
3000
3001 XXH_FORCE_INLINE void
3002 XXH32_initAccs(xxh_u32 *acc, xxh_u32 seed)
3003 {
3004 XXH_ASSERT(acc != NULL);
3005 acc[0] = seed + XXH_PRIME32_1 + XXH_PRIME32_2;
3006 acc[1] = seed + XXH_PRIME32_2;
3007 acc[2] = seed + 0;
3008 acc[3] = seed - XXH_PRIME32_1;
3009 }
3010
3011
3012
3013
3014
3015
3016
3017 XXH_FORCE_INLINE const xxh_u8 *
3018 XXH32_consumeLong(
3019 xxh_u32 *XXH_RESTRICT acc,
3020 xxh_u8 const *XXH_RESTRICT input,
3021 size_t len,
3022 XXH_alignment align
3023 )
3024 {
3025 const xxh_u8* const bEnd = input + len;
3026 const xxh_u8* const limit = bEnd - 15;
3027 XXH_ASSERT(acc != NULL);
3028 XXH_ASSERT(input != NULL);
3029 XXH_ASSERT(len >= 16);
3030 do {
3031 acc[0] = XXH32_round(acc[0], XXH_get32bits(input)); input += 4;
3032 acc[1] = XXH32_round(acc[1], XXH_get32bits(input)); input += 4;
3033 acc[2] = XXH32_round(acc[2], XXH_get32bits(input)); input += 4;
3034 acc[3] = XXH32_round(acc[3], XXH_get32bits(input)); input += 4;
3035 } while (input < limit);
3036
3037 return input;
3038 }
3039
3040
3041
3042
3043
3044 XXH_FORCE_INLINE XXH_PUREF xxh_u32
3045 XXH32_mergeAccs(const xxh_u32 *acc)
3046 {
3047 XXH_ASSERT(acc != NULL);
3048 return XXH_rotl32(acc[0], 1) + XXH_rotl32(acc[1], 7)
3049 + XXH_rotl32(acc[2], 12) + XXH_rotl32(acc[3], 18);
3050 }
3051
3052
3053
3054
3055
3056
3057
3058
3059
3060
3061
3062
3063
3064
3065
3066
3067 static XXH_PUREF xxh_u32
3068 XXH32_finalize(xxh_u32 hash, const xxh_u8* ptr, size_t len, XXH_alignment align)
3069 {
3070 #define XXH_PROCESS1 do { \
3071 hash += (*ptr++) * XXH_PRIME32_5; \
3072 hash = XXH_rotl32(hash, 11) * XXH_PRIME32_1; \
3073 } while (0)
3074
3075 #define XXH_PROCESS4 do { \
3076 hash += XXH_get32bits(ptr) * XXH_PRIME32_3; \
3077 ptr += 4; \
3078 hash = XXH_rotl32(hash, 17) * XXH_PRIME32_4; \
3079 } while (0)
3080
3081 if (ptr==NULL) XXH_ASSERT(len == 0);
3082
3083
3084 if (!XXH32_ENDJMP) {
3085 len &= 15;
3086 while (len >= 4) {
3087 XXH_PROCESS4;
3088 len -= 4;
3089 }
3090 while (len > 0) {
3091 XXH_PROCESS1;
3092 --len;
3093 }
3094 return XXH32_avalanche(hash);
3095 } else {
3096 switch(len&15) {
3097 case 12: XXH_PROCESS4;
3098 XXH_FALLTHROUGH;
3099 case 8: XXH_PROCESS4;
3100 XXH_FALLTHROUGH;
3101 case 4: XXH_PROCESS4;
3102 return XXH32_avalanche(hash);
3103
3104 case 13: XXH_PROCESS4;
3105 XXH_FALLTHROUGH;
3106 case 9: XXH_PROCESS4;
3107 XXH_FALLTHROUGH;
3108 case 5: XXH_PROCESS4;
3109 XXH_PROCESS1;
3110 return XXH32_avalanche(hash);
3111
3112 case 14: XXH_PROCESS4;
3113 XXH_FALLTHROUGH;
3114 case 10: XXH_PROCESS4;
3115 XXH_FALLTHROUGH;
3116 case 6: XXH_PROCESS4;
3117 XXH_PROCESS1;
3118 XXH_PROCESS1;
3119 return XXH32_avalanche(hash);
3120
3121 case 15: XXH_PROCESS4;
3122 XXH_FALLTHROUGH;
3123 case 11: XXH_PROCESS4;
3124 XXH_FALLTHROUGH;
3125 case 7: XXH_PROCESS4;
3126 XXH_FALLTHROUGH;
3127 case 3: XXH_PROCESS1;
3128 XXH_FALLTHROUGH;
3129 case 2: XXH_PROCESS1;
3130 XXH_FALLTHROUGH;
3131 case 1: XXH_PROCESS1;
3132 XXH_FALLTHROUGH;
3133 case 0: return XXH32_avalanche(hash);
3134 }
3135 XXH_ASSERT(0);
3136 return hash;
3137 }
3138 }
3139
3140 #ifdef XXH_OLD_NAMES
3141 # define PROCESS1 XXH_PROCESS1
3142 # define PROCESS4 XXH_PROCESS4
3143 #else
3144 # undef XXH_PROCESS1
3145 # undef XXH_PROCESS4
3146 #endif
3147
3148
3149
3150
3151
3152
3153
3154
3155
3156 XXH_FORCE_INLINE XXH_PUREF xxh_u32
3157 XXH32_endian_align(const xxh_u8* input, size_t len, xxh_u32 seed, XXH_alignment align)
3158 {
3159 xxh_u32 h32;
3160
3161 if (input==NULL) XXH_ASSERT(len == 0);
3162
3163 if (len>=16) {
3164 xxh_u32 acc[4];
3165 XXH32_initAccs(acc, seed);
3166
3167 input = XXH32_consumeLong(acc, input, len, align);
3168
3169 h32 = XXH32_mergeAccs(acc);
3170 } else {
3171 h32 = seed + XXH_PRIME32_5;
3172 }
3173
3174 h32 += (xxh_u32)len;
3175
3176 return XXH32_finalize(h32, input, len&15, align);
3177 }
3178
3179
3180 XXH_PUBLIC_API XXH32_hash_t XXH32 (const void* input, size_t len, XXH32_hash_t seed)
3181 {
3182 #if !defined(XXH_NO_STREAM) && XXH_SIZE_OPT >= 2
3183
3184 XXH32_state_t state;
3185 XXH32_reset(&state, seed);
3186 XXH32_update(&state, (const xxh_u8*)input, len);
3187 return XXH32_digest(&state);
3188 #else
3189 if (XXH_FORCE_ALIGN_CHECK) {
3190 if ((((size_t)input) & 3) == 0) {
3191 return XXH32_endian_align((const xxh_u8*)input, len, seed, XXH_aligned);
3192 } }
3193
3194 return XXH32_endian_align((const xxh_u8*)input, len, seed, XXH_unaligned);
3195 #endif
3196 }
3197
3198
3199
3200
3201 #ifndef XXH_NO_STREAM
3202
3203 XXH_PUBLIC_API XXH32_state_t* XXH32_createState(void)
3204 {
3205 return (XXH32_state_t*)XXH_malloc(sizeof(XXH32_state_t));
3206 }
3207
3208 XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t* statePtr)
3209 {
3210 XXH_free(statePtr);
3211 return XXH_OK;
3212 }
3213
3214
3215 XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t* dstState, const XXH32_state_t* srcState)
3216 {
3217 XXH_memcpy(dstState, srcState, sizeof(*dstState));
3218 }
3219
3220
3221 XXH_PUBLIC_API XXH_errorcode XXH32_reset(XXH32_state_t* statePtr, XXH32_hash_t seed)
3222 {
3223 XXH_ASSERT(statePtr != NULL);
3224 memset(statePtr, 0, sizeof(*statePtr));
3225 XXH32_initAccs(statePtr->acc, seed);
3226 return XXH_OK;
3227 }
3228
3229
3230
3231 XXH_PUBLIC_API XXH_errorcode
3232 XXH32_update(XXH32_state_t* state, const void* input, size_t len)
3233 {
3234 if (input==NULL) {
3235 XXH_ASSERT(len == 0);
3236 return XXH_OK;
3237 }
3238
3239 state->total_len_32 += (XXH32_hash_t)len;
3240 state->large_len |= (XXH32_hash_t)((len>=16) | (state->total_len_32>=16));
3241
3242 XXH_ASSERT(state->bufferedSize < sizeof(state->buffer));
3243 if (len < sizeof(state->buffer) - state->bufferedSize) {
3244 XXH_memcpy(state->buffer + state->bufferedSize, input, len);
3245 state->bufferedSize += (XXH32_hash_t)len;
3246 return XXH_OK;
3247 }
3248
3249 { const xxh_u8* xinput = (const xxh_u8*)input;
3250 const xxh_u8* const bEnd = xinput + len;
3251
3252 if (state->bufferedSize) {
3253 XXH_memcpy(state->buffer + state->bufferedSize, xinput, sizeof(state->buffer) - state->bufferedSize);
3254 xinput += sizeof(state->buffer) - state->bufferedSize;
3255
3256 (void)XXH32_consumeLong(state->acc, state->buffer, sizeof(state->buffer), XXH_aligned);
3257 state->bufferedSize = 0;
3258 }
3259
3260 XXH_ASSERT(xinput <= bEnd);
3261 if ((size_t)(bEnd - xinput) >= sizeof(state->buffer)) {
3262
3263 xinput = XXH32_consumeLong(state->acc, xinput, (size_t)(bEnd - xinput), XXH_unaligned);
3264 }
3265
3266 if (xinput < bEnd) {
3267
3268 XXH_memcpy(state->buffer, xinput, (size_t)(bEnd-xinput));
3269 state->bufferedSize = (unsigned)(bEnd-xinput);
3270 }
3271 }
3272
3273 return XXH_OK;
3274 }
3275
3276
3277
3278 XXH_PUBLIC_API XXH32_hash_t XXH32_digest(const XXH32_state_t* state)
3279 {
3280 xxh_u32 h32;
3281
3282 if (state->large_len) {
3283 h32 = XXH32_mergeAccs(state->acc);
3284 } else {
3285 h32 = state->acc[2] + XXH_PRIME32_5;
3286 }
3287
3288 h32 += state->total_len_32;
3289
3290 return XXH32_finalize(h32, state->buffer, state->bufferedSize, XXH_aligned);
3291 }
3292 #endif
3293
3294
3295
3296
3297 XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t* dst, XXH32_hash_t hash)
3298 {
3299 XXH_STATIC_ASSERT(sizeof(XXH32_canonical_t) == sizeof(XXH32_hash_t));
3300 if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap32(hash);
3301 XXH_memcpy(dst, &hash, sizeof(*dst));
3302 }
3303
3304 XXH_PUBLIC_API XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t* src)
3305 {
3306 return XXH_readBE32(src);
3307 }
3308
3309
3310 #ifndef XXH_NO_LONG_LONG
3311
3312
3313
3314
3315
3316
3317
3318
3319
3320
3321
3322 typedef XXH64_hash_t xxh_u64;
3323
3324 #ifdef XXH_OLD_NAMES
3325 # define U64 xxh_u64
3326 #endif
3327
3328 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
3329
3330
3331
3332
3333 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
3334
3335
3336 static xxh_u64 XXH_read64(const void* memPtr)
3337 {
3338 return *(const xxh_u64*) memPtr;
3339 }
3340
3341 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
3342
3343
3344
3345
3346
3347
3348
3349
3350 #ifdef XXH_OLD_NAMES
3351 typedef union { xxh_u32 u32; xxh_u64 u64; } __attribute__((__packed__)) unalign64;
3352 #endif
3353 static xxh_u64 XXH_read64(const void* ptr)
3354 {
3355 typedef __attribute__((__aligned__(1))) xxh_u64 xxh_unalign64;
3356 return *((const xxh_unalign64*)ptr);
3357 }
3358
3359 #else
3360
3361
3362
3363
3364
3365 static xxh_u64 XXH_read64(const void* memPtr)
3366 {
3367 xxh_u64 val;
3368 XXH_memcpy(&val, memPtr, sizeof(val));
3369 return val;
3370 }
3371
3372 #endif
3373
3374 #if defined(_MSC_VER)
3375 # define XXH_swap64 _byteswap_uint64
3376 #elif XXH_GCC_VERSION >= 403
3377 # define XXH_swap64 __builtin_bswap64
3378 #else
3379 static xxh_u64 XXH_swap64(xxh_u64 x)
3380 {
3381 return ((x << 56) & 0xff00000000000000ULL) |
3382 ((x << 40) & 0x00ff000000000000ULL) |
3383 ((x << 24) & 0x0000ff0000000000ULL) |
3384 ((x << 8) & 0x000000ff00000000ULL) |
3385 ((x >> 8) & 0x00000000ff000000ULL) |
3386 ((x >> 24) & 0x0000000000ff0000ULL) |
3387 ((x >> 40) & 0x000000000000ff00ULL) |
3388 ((x >> 56) & 0x00000000000000ffULL);
3389 }
3390 #endif
3391
3392
3393
3394 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
3395
3396 XXH_FORCE_INLINE xxh_u64 XXH_readLE64(const void* memPtr)
3397 {
3398 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
3399 return bytePtr[0]
3400 | ((xxh_u64)bytePtr[1] << 8)
3401 | ((xxh_u64)bytePtr[2] << 16)
3402 | ((xxh_u64)bytePtr[3] << 24)
3403 | ((xxh_u64)bytePtr[4] << 32)
3404 | ((xxh_u64)bytePtr[5] << 40)
3405 | ((xxh_u64)bytePtr[6] << 48)
3406 | ((xxh_u64)bytePtr[7] << 56);
3407 }
3408
3409 XXH_FORCE_INLINE xxh_u64 XXH_readBE64(const void* memPtr)
3410 {
3411 const xxh_u8* bytePtr = (const xxh_u8 *)memPtr;
3412 return bytePtr[7]
3413 | ((xxh_u64)bytePtr[6] << 8)
3414 | ((xxh_u64)bytePtr[5] << 16)
3415 | ((xxh_u64)bytePtr[4] << 24)
3416 | ((xxh_u64)bytePtr[3] << 32)
3417 | ((xxh_u64)bytePtr[2] << 40)
3418 | ((xxh_u64)bytePtr[1] << 48)
3419 | ((xxh_u64)bytePtr[0] << 56);
3420 }
3421
3422 #else
3423 XXH_FORCE_INLINE xxh_u64 XXH_readLE64(const void* ptr)
3424 {
3425 return XXH_CPU_LITTLE_ENDIAN ? XXH_read64(ptr) : XXH_swap64(XXH_read64(ptr));
3426 }
3427
3428 static xxh_u64 XXH_readBE64(const void* ptr)
3429 {
3430 return XXH_CPU_LITTLE_ENDIAN ? XXH_swap64(XXH_read64(ptr)) : XXH_read64(ptr);
3431 }
3432 #endif
3433
3434 XXH_FORCE_INLINE xxh_u64
3435 XXH_readLE64_align(const void* ptr, XXH_alignment align)
3436 {
3437 if (align==XXH_unaligned)
3438 return XXH_readLE64(ptr);
3439 else
3440 return XXH_CPU_LITTLE_ENDIAN ? *(const xxh_u64*)ptr : XXH_swap64(*(const xxh_u64*)ptr);
3441 }
3442
3443
3444
3445
3446
3447
3448
3449
3450
3451
3452
3453
3454 #define XXH_PRIME64_1 0x9E3779B185EBCA87ULL
3455 #define XXH_PRIME64_2 0xC2B2AE3D27D4EB4FULL
3456 #define XXH_PRIME64_3 0x165667B19E3779F9ULL
3457 #define XXH_PRIME64_4 0x85EBCA77C2B2AE63ULL
3458 #define XXH_PRIME64_5 0x27D4EB2F165667C5ULL
3459
3460 #ifdef XXH_OLD_NAMES
3461 # define PRIME64_1 XXH_PRIME64_1
3462 # define PRIME64_2 XXH_PRIME64_2
3463 # define PRIME64_3 XXH_PRIME64_3
3464 # define PRIME64_4 XXH_PRIME64_4
3465 # define PRIME64_5 XXH_PRIME64_5
3466 #endif
3467
3468
3469 static xxh_u64 XXH64_round(xxh_u64 acc, xxh_u64 input)
3470 {
3471 acc += input * XXH_PRIME64_2;
3472 acc = XXH_rotl64(acc, 31);
3473 acc *= XXH_PRIME64_1;
3474 #if (defined(__AVX512F__)) && !defined(XXH_ENABLE_AUTOVECTORIZE)
3475
3476
3477
3478
3479
3480
3481
3482
3483
3484
3485
3486
3487
3488
3489 XXH_COMPILER_GUARD(acc);
3490 #endif
3491 return acc;
3492 }
3493
3494 static xxh_u64 XXH64_mergeRound(xxh_u64 acc, xxh_u64 val)
3495 {
3496 val = XXH64_round(0, val);
3497 acc ^= val;
3498 acc = acc * XXH_PRIME64_1 + XXH_PRIME64_4;
3499 return acc;
3500 }
3501
3502
3503 static xxh_u64 XXH64_avalanche(xxh_u64 hash)
3504 {
3505 hash ^= hash >> 33;
3506 hash *= XXH_PRIME64_2;
3507 hash ^= hash >> 29;
3508 hash *= XXH_PRIME64_3;
3509 hash ^= hash >> 32;
3510 return hash;
3511 }
3512
3513
3514 #define XXH_get64bits(p) XXH_readLE64_align(p, align)
3515
3516
3517
3518
3519
3520 XXH_FORCE_INLINE void
3521 XXH64_initAccs(xxh_u64 *acc, xxh_u64 seed)
3522 {
3523 XXH_ASSERT(acc != NULL);
3524 acc[0] = seed + XXH_PRIME64_1 + XXH_PRIME64_2;
3525 acc[1] = seed + XXH_PRIME64_2;
3526 acc[2] = seed + 0;
3527 acc[3] = seed - XXH_PRIME64_1;
3528 }
3529
3530
3531
3532
3533
3534
3535
3536 XXH_FORCE_INLINE const xxh_u8 *
3537 XXH64_consumeLong(
3538 xxh_u64 *XXH_RESTRICT acc,
3539 xxh_u8 const *XXH_RESTRICT input,
3540 size_t len,
3541 XXH_alignment align
3542 )
3543 {
3544 const xxh_u8* const bEnd = input + len;
3545 const xxh_u8* const limit = bEnd - 31;
3546 XXH_ASSERT(acc != NULL);
3547 XXH_ASSERT(input != NULL);
3548 XXH_ASSERT(len >= 32);
3549 do {
3550
3551 if (sizeof(void *) < sizeof(xxh_u64)) {
3552 size_t i;
3553 for (i = 0; i < 4; i++) {
3554 acc[i] = XXH64_round(acc[i], XXH_get64bits(input));
3555 input += 8;
3556 }
3557 } else {
3558 acc[0] = XXH64_round(acc[0], XXH_get64bits(input)); input += 8;
3559 acc[1] = XXH64_round(acc[1], XXH_get64bits(input)); input += 8;
3560 acc[2] = XXH64_round(acc[2], XXH_get64bits(input)); input += 8;
3561 acc[3] = XXH64_round(acc[3], XXH_get64bits(input)); input += 8;
3562 }
3563 } while (input < limit);
3564
3565 return input;
3566 }
3567
3568
3569
3570
3571
3572 XXH_FORCE_INLINE XXH_PUREF xxh_u64
3573 XXH64_mergeAccs(const xxh_u64 *acc)
3574 {
3575 XXH_ASSERT(acc != NULL);
3576 {
3577 xxh_u64 h64 = XXH_rotl64(acc[0], 1) + XXH_rotl64(acc[1], 7)
3578 + XXH_rotl64(acc[2], 12) + XXH_rotl64(acc[3], 18);
3579
3580 if (sizeof(void *) < sizeof(xxh_u64)) {
3581 size_t i;
3582 for (i = 0; i < 4; i++) {
3583 h64 = XXH64_mergeRound(h64, acc[i]);
3584 }
3585 } else {
3586 h64 = XXH64_mergeRound(h64, acc[0]);
3587 h64 = XXH64_mergeRound(h64, acc[1]);
3588 h64 = XXH64_mergeRound(h64, acc[2]);
3589 h64 = XXH64_mergeRound(h64, acc[3]);
3590 }
3591 return h64;
3592 }
3593 }
3594
3595
3596
3597
3598
3599
3600
3601
3602
3603
3604
3605
3606
3607
3608
3609
3610 XXH_STATIC XXH_PUREF xxh_u64
3611 XXH64_finalize(xxh_u64 hash, const xxh_u8* ptr, size_t len, XXH_alignment align)
3612 {
3613 if (ptr==NULL) XXH_ASSERT(len == 0);
3614 len &= 31;
3615 while (len >= 8) {
3616 xxh_u64 const k1 = XXH64_round(0, XXH_get64bits(ptr));
3617 ptr += 8;
3618 hash ^= k1;
3619 hash = XXH_rotl64(hash,27) * XXH_PRIME64_1 + XXH_PRIME64_4;
3620 len -= 8;
3621 }
3622 if (len >= 4) {
3623 hash ^= (xxh_u64)(XXH_get32bits(ptr)) * XXH_PRIME64_1;
3624 ptr += 4;
3625 hash = XXH_rotl64(hash, 23) * XXH_PRIME64_2 + XXH_PRIME64_3;
3626 len -= 4;
3627 }
3628 while (len > 0) {
3629 hash ^= (*ptr++) * XXH_PRIME64_5;
3630 hash = XXH_rotl64(hash, 11) * XXH_PRIME64_1;
3631 --len;
3632 }
3633 return XXH64_avalanche(hash);
3634 }
3635
3636 #ifdef XXH_OLD_NAMES
3637 # define PROCESS1_64 XXH_PROCESS1_64
3638 # define PROCESS4_64 XXH_PROCESS4_64
3639 # define PROCESS8_64 XXH_PROCESS8_64
3640 #else
3641 # undef XXH_PROCESS1_64
3642 # undef XXH_PROCESS4_64
3643 # undef XXH_PROCESS8_64
3644 #endif
3645
3646
3647
3648
3649
3650
3651
3652
3653
3654 XXH_FORCE_INLINE XXH_PUREF xxh_u64
3655 XXH64_endian_align(const xxh_u8* input, size_t len, xxh_u64 seed, XXH_alignment align)
3656 {
3657 xxh_u64 h64;
3658 if (input==NULL) XXH_ASSERT(len == 0);
3659
3660 if (len>=32) {
3661 xxh_u64 acc[4];
3662 XXH64_initAccs(acc, seed);
3663
3664 input = XXH64_consumeLong(acc, input, len, align);
3665
3666 h64 = XXH64_mergeAccs(acc);
3667 } else {
3668 h64 = seed + XXH_PRIME64_5;
3669 }
3670
3671 h64 += (xxh_u64) len;
3672
3673 return XXH64_finalize(h64, input, len, align);
3674 }
3675
3676
3677
3678 XXH_PUBLIC_API XXH64_hash_t XXH64 (XXH_NOESCAPE const void* input, size_t len, XXH64_hash_t seed)
3679 {
3680 #if !defined(XXH_NO_STREAM) && XXH_SIZE_OPT >= 2
3681
3682 XXH64_state_t state;
3683 XXH64_reset(&state, seed);
3684 XXH64_update(&state, (const xxh_u8*)input, len);
3685 return XXH64_digest(&state);
3686 #else
3687 if (XXH_FORCE_ALIGN_CHECK) {
3688 if ((((size_t)input) & 7)==0) {
3689 return XXH64_endian_align((const xxh_u8*)input, len, seed, XXH_aligned);
3690 } }
3691
3692 return XXH64_endian_align((const xxh_u8*)input, len, seed, XXH_unaligned);
3693
3694 #endif
3695 }
3696
3697
3698 #ifndef XXH_NO_STREAM
3699
3700 XXH_PUBLIC_API XXH64_state_t* XXH64_createState(void)
3701 {
3702 return (XXH64_state_t*)XXH_malloc(sizeof(XXH64_state_t));
3703 }
3704
3705 XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t* statePtr)
3706 {
3707 XXH_free(statePtr);
3708 return XXH_OK;
3709 }
3710
3711
3712 XXH_PUBLIC_API void XXH64_copyState(XXH_NOESCAPE XXH64_state_t* dstState, const XXH64_state_t* srcState)
3713 {
3714 XXH_memcpy(dstState, srcState, sizeof(*dstState));
3715 }
3716
3717
3718 XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH_NOESCAPE XXH64_state_t* statePtr, XXH64_hash_t seed)
3719 {
3720 XXH_ASSERT(statePtr != NULL);
3721 memset(statePtr, 0, sizeof(*statePtr));
3722 XXH64_initAccs(statePtr->acc, seed);
3723 return XXH_OK;
3724 }
3725
3726
3727 XXH_PUBLIC_API XXH_errorcode
3728 XXH64_update (XXH_NOESCAPE XXH64_state_t* state, XXH_NOESCAPE const void* input, size_t len)
3729 {
3730 if (input==NULL) {
3731 XXH_ASSERT(len == 0);
3732 return XXH_OK;
3733 }
3734
3735 state->total_len += len;
3736
3737 XXH_ASSERT(state->bufferedSize <= sizeof(state->buffer));
3738 if (len < sizeof(state->buffer) - state->bufferedSize) {
3739 XXH_memcpy(state->buffer + state->bufferedSize, input, len);
3740 state->bufferedSize += (XXH32_hash_t)len;
3741 return XXH_OK;
3742 }
3743
3744 { const xxh_u8* xinput = (const xxh_u8*)input;
3745 const xxh_u8* const bEnd = xinput + len;
3746
3747 if (state->bufferedSize) {
3748 XXH_memcpy(state->buffer + state->bufferedSize, xinput, sizeof(state->buffer) - state->bufferedSize);
3749 xinput += sizeof(state->buffer) - state->bufferedSize;
3750
3751 (void)XXH64_consumeLong(state->acc, state->buffer, sizeof(state->buffer), XXH_aligned);
3752 state->bufferedSize = 0;
3753 }
3754
3755 XXH_ASSERT(xinput <= bEnd);
3756 if ((size_t)(bEnd - xinput) >= sizeof(state->buffer)) {
3757
3758 xinput = XXH64_consumeLong(state->acc, xinput, (size_t)(bEnd - xinput), XXH_unaligned);
3759 }
3760
3761 if (xinput < bEnd) {
3762
3763 XXH_memcpy(state->buffer, xinput, (size_t)(bEnd-xinput));
3764 state->bufferedSize = (unsigned)(bEnd-xinput);
3765 }
3766 }
3767
3768 return XXH_OK;
3769 }
3770
3771
3772
3773 XXH_PUBLIC_API XXH64_hash_t XXH64_digest(XXH_NOESCAPE const XXH64_state_t* state)
3774 {
3775 xxh_u64 h64;
3776
3777 if (state->total_len >= 32) {
3778 h64 = XXH64_mergeAccs(state->acc);
3779 } else {
3780 h64 = state->acc[2] + XXH_PRIME64_5;
3781 }
3782
3783 h64 += (xxh_u64) state->total_len;
3784
3785 return XXH64_finalize(h64, state->buffer, (size_t)state->total_len, XXH_aligned);
3786 }
3787 #endif
3788
3789
3790
3791
3792 XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH_NOESCAPE XXH64_canonical_t* dst, XXH64_hash_t hash)
3793 {
3794 XXH_STATIC_ASSERT(sizeof(XXH64_canonical_t) == sizeof(XXH64_hash_t));
3795 if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap64(hash);
3796 XXH_memcpy(dst, &hash, sizeof(*dst));
3797 }
3798
3799
3800 XXH_PUBLIC_API XXH64_hash_t XXH64_hashFromCanonical(XXH_NOESCAPE const XXH64_canonical_t* src)
3801 {
3802 return XXH_readBE64(src);
3803 }
3804
3805 #ifndef XXH_NO_XXH3
3806
3807
3808
3809
3810
3811
3812
3813
3814
3815
3816
3817
3818
3819
3820
3821 #if (defined(__GNUC__) && (__GNUC__ >= 3)) \
3822 || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) \
3823 || defined(__clang__)
3824 # define XXH_likely(x) __builtin_expect(x, 1)
3825 # define XXH_unlikely(x) __builtin_expect(x, 0)
3826 #else
3827 # define XXH_likely(x) (x)
3828 # define XXH_unlikely(x) (x)
3829 #endif
3830
3831 #ifndef XXH_HAS_INCLUDE
3832 # ifdef __has_include
3833
3834
3835
3836
3837 # define XXH_HAS_INCLUDE __has_include
3838 # else
3839 # define XXH_HAS_INCLUDE(x) 0
3840 # endif
3841 #endif
3842
3843 #if defined(__GNUC__) || defined(__clang__)
3844 # if defined(__ARM_FEATURE_SVE)
3845 # include <arm_sve.h>
3846 # endif
3847 # if defined(__ARM_NEON__) || defined(__ARM_NEON) \
3848 || (defined(_M_ARM) && _M_ARM >= 7) \
3849 || defined(_M_ARM64) || defined(_M_ARM64EC) \
3850 || (defined(__wasm_simd128__) && XXH_HAS_INCLUDE(<arm_neon.h>))
3851 # define inline __inline__
3852 # include <arm_neon.h>
3853 # undef inline
3854 # elif defined(__AVX2__)
3855 # include <immintrin.h>
3856 # elif defined(__SSE2__)
3857 # include <emmintrin.h>
3858 # elif defined(__loongarch_sx)
3859 # include <lsxintrin.h>
3860 # endif
3861 #endif
3862
3863 #if defined(_MSC_VER)
3864 # include <intrin.h>
3865 #endif
3866
3867
3868
3869
3870
3871
3872
3873
3874
3875
3876
3877
3878
3879
3880
3881
3882
3883
3884
3885
3886
3887
3888
3889
3890
3891
3892
3893
3894
3895
3896
3897
3898
3899
3900
3901
3902
3903
3904
3905
3906
3907
3908
3909
3910
3911
3912
3913
3914
3915
3916
3917
3918
3919
3920
3921
3922
3923
3924
3925
3926
3927
3928
3929
3930
3931
3932
3933
3934
3935
3936 #if defined(__thumb__) && !defined(__thumb2__) && defined(__ARM_ARCH_ISA_ARM)
3937 # warning "XXH3 is highly inefficient without ARM or Thumb-2."
3938 #endif
3939
3940
3941
3942
3943
3944 #ifdef XXH_DOXYGEN
3945
3946
3947
3948
3949
3950
3951
3952
3953
3954
3955 # define XXH_VECTOR XXH_SCALAR
3956
3957
3958
3959
3960
3961
3962
3963
3964
3965 # define XXH_ACC_ALIGN 8
3966 #endif
3967
3968
3969 #ifndef XXH_DOXYGEN
3970 #endif
3971
3972 #ifndef XXH_VECTOR
3973 # if defined(__ARM_FEATURE_SVE)
3974 # define XXH_VECTOR XXH_SVE
3975 # elif ( \
3976 defined(__ARM_NEON__) || defined(__ARM_NEON) \
3977 || defined(_M_ARM) || defined(_M_ARM64) || defined(_M_ARM64EC) \
3978 || (defined(__wasm_simd128__) && XXH_HAS_INCLUDE(<arm_neon.h>)) \
3979 ) && ( \
3980 defined(_WIN32) || defined(__LITTLE_ENDIAN__) \
3981 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) \
3982 )
3983 # define XXH_VECTOR XXH_NEON
3984 # elif defined(__AVX512F__)
3985 # define XXH_VECTOR XXH_AVX512
3986 # elif defined(__AVX2__)
3987 # define XXH_VECTOR XXH_AVX2
3988 # elif defined(__SSE2__) || defined(_M_AMD64) || defined(_M_X64) || (defined(_M_IX86_FP) && (_M_IX86_FP == 2))
3989 # define XXH_VECTOR XXH_SSE2
3990 # elif (defined(__PPC64__) && defined(__POWER8_VECTOR__)) \
3991 || (defined(__s390x__) && defined(__VEC__)) \
3992 && defined(__GNUC__)
3993 # define XXH_VECTOR XXH_VSX
3994 # elif defined(__loongarch_sx)
3995 # define XXH_VECTOR XXH_LSX
3996 # else
3997 # define XXH_VECTOR XXH_SCALAR
3998 # endif
3999 #endif
4000
4001
4002 #if (XXH_VECTOR == XXH_SVE) && !defined(__ARM_FEATURE_SVE)
4003 # ifdef _MSC_VER
4004 # pragma warning(once : 4606)
4005 # else
4006 # warning "__ARM_FEATURE_SVE isn't supported. Use SCALAR instead."
4007 # endif
4008 # undef XXH_VECTOR
4009 # define XXH_VECTOR XXH_SCALAR
4010 #endif
4011
4012
4013
4014
4015
4016 #ifndef XXH_ACC_ALIGN
4017 # if defined(XXH_X86DISPATCH)
4018 # define XXH_ACC_ALIGN 64
4019 # elif XXH_VECTOR == XXH_SCALAR
4020 # define XXH_ACC_ALIGN 8
4021 # elif XXH_VECTOR == XXH_SSE2
4022 # define XXH_ACC_ALIGN 16
4023 # elif XXH_VECTOR == XXH_AVX2
4024 # define XXH_ACC_ALIGN 32
4025 # elif XXH_VECTOR == XXH_NEON
4026 # define XXH_ACC_ALIGN 16
4027 # elif XXH_VECTOR == XXH_VSX
4028 # define XXH_ACC_ALIGN 16
4029 # elif XXH_VECTOR == XXH_AVX512
4030 # define XXH_ACC_ALIGN 64
4031 # elif XXH_VECTOR == XXH_SVE
4032 # define XXH_ACC_ALIGN 64
4033 # elif XXH_VECTOR == XXH_LSX
4034 # define XXH_ACC_ALIGN 64
4035 # endif
4036 #endif
4037
4038 #if defined(XXH_X86DISPATCH) || XXH_VECTOR == XXH_SSE2 \
4039 || XXH_VECTOR == XXH_AVX2 || XXH_VECTOR == XXH_AVX512
4040 # define XXH_SEC_ALIGN XXH_ACC_ALIGN
4041 #elif XXH_VECTOR == XXH_SVE
4042 # define XXH_SEC_ALIGN XXH_ACC_ALIGN
4043 #else
4044 # define XXH_SEC_ALIGN 8
4045 #endif
4046
4047 #if defined(__GNUC__) || defined(__clang__)
4048 # define XXH_ALIASING __attribute__((__may_alias__))
4049 #else
4050 # define XXH_ALIASING
4051 #endif
4052
4053
4054
4055
4056
4057
4058
4059
4060
4061
4062
4063
4064
4065
4066
4067
4068
4069
4070
4071
4072
4073
4074 #if XXH_VECTOR == XXH_AVX2 \
4075 && defined(__GNUC__) && !defined(__clang__) \
4076 && defined(__OPTIMIZE__) && XXH_SIZE_OPT <= 0
4077 # pragma GCC push_options
4078 # pragma GCC optimize("-O2")
4079 #endif
4080
4081 #if XXH_VECTOR == XXH_NEON
4082
4083
4084
4085
4086
4087
4088
4089
4090 typedef uint64x2_t xxh_aliasing_uint64x2_t XXH_ALIASING;
4091
4092
4093
4094
4095
4096
4097
4098
4099
4100
4101
4102
4103
4104
4105 #if defined(__aarch64__) && defined(__GNUC__) && !defined(__clang__)
4106 XXH_FORCE_INLINE uint64x2_t XXH_vld1q_u64(void const* ptr)
4107 {
4108 return *(xxh_aliasing_uint64x2_t const *)ptr;
4109 }
4110 #else
4111 XXH_FORCE_INLINE uint64x2_t XXH_vld1q_u64(void const* ptr)
4112 {
4113 return vreinterpretq_u64_u8(vld1q_u8((uint8_t const*)ptr));
4114 }
4115 #endif
4116
4117
4118
4119
4120
4121
4122
4123
4124
4125 #if defined(__aarch64__) && defined(__GNUC__) && !defined(__clang__) && __GNUC__ < 11
4126 XXH_FORCE_INLINE uint64x2_t
4127 XXH_vmlal_low_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
4128 {
4129
4130 __asm__("umlal %0.2d, %1.2s, %2.2s" : "+w" (acc) : "w" (lhs), "w" (rhs));
4131 return acc;
4132 }
4133 XXH_FORCE_INLINE uint64x2_t
4134 XXH_vmlal_high_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
4135 {
4136
4137 return vmlal_high_u32(acc, lhs, rhs);
4138 }
4139 #else
4140
4141 XXH_FORCE_INLINE uint64x2_t
4142 XXH_vmlal_low_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
4143 {
4144 return vmlal_u32(acc, vget_low_u32(lhs), vget_low_u32(rhs));
4145 }
4146
4147
4148 XXH_FORCE_INLINE uint64x2_t
4149 XXH_vmlal_high_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
4150 {
4151 return vmlal_u32(acc, vget_high_u32(lhs), vget_high_u32(rhs));
4152 }
4153 #endif
4154
4155
4156
4157
4158
4159
4160
4161
4162
4163
4164
4165
4166
4167
4168
4169
4170
4171
4172
4173
4174
4175
4176
4177
4178
4179
4180
4181
4182
4183
4184
4185
4186
4187
4188
4189
4190
4191
4192
4193 # ifndef XXH3_NEON_LANES
4194 # if (defined(__aarch64__) || defined(__arm64__) || defined(_M_ARM64) || defined(_M_ARM64EC)) \
4195 && !defined(__APPLE__) && XXH_SIZE_OPT <= 0
4196 # define XXH3_NEON_LANES 6
4197 # else
4198 # define XXH3_NEON_LANES XXH_ACC_NB
4199 # endif
4200 # endif
4201 #endif
4202
4203
4204
4205
4206
4207
4208
4209
4210
4211 #if XXH_VECTOR == XXH_VSX
4212
4213
4214
4215
4216
4217
4218
4219
4220 # pragma push_macro("bool")
4221 # pragma push_macro("vector")
4222 # pragma push_macro("pixel")
4223
4224 # undef bool
4225 # undef vector
4226 # undef pixel
4227
4228 # if defined(__s390x__)
4229 # include <s390intrin.h>
4230 # else
4231 # include <altivec.h>
4232 # endif
4233
4234
4235 # pragma pop_macro("pixel")
4236 # pragma pop_macro("vector")
4237 # pragma pop_macro("bool")
4238
4239 typedef __vector unsigned long long xxh_u64x2;
4240 typedef __vector unsigned char xxh_u8x16;
4241 typedef __vector unsigned xxh_u32x4;
4242
4243
4244
4245
4246 typedef xxh_u64x2 xxh_aliasing_u64x2 XXH_ALIASING;
4247
4248 # ifndef XXH_VSX_BE
4249 # if defined(__BIG_ENDIAN__) \
4250 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
4251 # define XXH_VSX_BE 1
4252 # elif defined(__VEC_ELEMENT_REG_ORDER__) && __VEC_ELEMENT_REG_ORDER__ == __ORDER_BIG_ENDIAN__
4253 # warning "-maltivec=be is not recommended. Please use native endianness."
4254 # define XXH_VSX_BE 1
4255 # else
4256 # define XXH_VSX_BE 0
4257 # endif
4258 # endif
4259
4260 # if XXH_VSX_BE
4261 # if defined(__POWER9_VECTOR__) || (defined(__clang__) && defined(__s390x__))
4262 # define XXH_vec_revb vec_revb
4263 # else
4264
4265
4266
4267 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_revb(xxh_u64x2 val)
4268 {
4269 xxh_u8x16 const vByteSwap = { 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01, 0x00,
4270 0x0F, 0x0E, 0x0D, 0x0C, 0x0B, 0x0A, 0x09, 0x08 };
4271 return vec_perm(val, val, vByteSwap);
4272 }
4273 # endif
4274 # endif
4275
4276
4277
4278
4279 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_loadu(const void *ptr)
4280 {
4281 xxh_u64x2 ret;
4282 XXH_memcpy(&ret, ptr, sizeof(xxh_u64x2));
4283 # if XXH_VSX_BE
4284 ret = XXH_vec_revb(ret);
4285 # endif
4286 return ret;
4287 }
4288
4289
4290
4291
4292
4293
4294
4295 # if defined(__s390x__)
4296
4297 # define XXH_vec_mulo vec_mulo
4298 # define XXH_vec_mule vec_mule
4299 # elif defined(__clang__) && XXH_HAS_BUILTIN(__builtin_altivec_vmuleuw) && !defined(__ibmxl__)
4300
4301
4302 # define XXH_vec_mulo __builtin_altivec_vmulouw
4303 # define XXH_vec_mule __builtin_altivec_vmuleuw
4304 # else
4305
4306
4307 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mulo(xxh_u32x4 a, xxh_u32x4 b)
4308 {
4309 xxh_u64x2 result;
4310 __asm__("vmulouw %0, %1, %2" : "=v" (result) : "v" (a), "v" (b));
4311 return result;
4312 }
4313 XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mule(xxh_u32x4 a, xxh_u32x4 b)
4314 {
4315 xxh_u64x2 result;
4316 __asm__("vmuleuw %0, %1, %2" : "=v" (result) : "v" (a), "v" (b));
4317 return result;
4318 }
4319 # endif
4320 #endif
4321
4322 #if XXH_VECTOR == XXH_SVE
4323 #define ACCRND(acc, offset) \
4324 do { \
4325 svuint64_t input_vec = svld1_u64(mask, xinput + offset); \
4326 svuint64_t secret_vec = svld1_u64(mask, xsecret + offset); \
4327 svuint64_t mixed = sveor_u64_x(mask, secret_vec, input_vec); \
4328 svuint64_t swapped = svtbl_u64(input_vec, kSwap); \
4329 svuint64_t mixed_lo = svextw_u64_x(mask, mixed); \
4330 svuint64_t mixed_hi = svlsr_n_u64_x(mask, mixed, 32); \
4331 svuint64_t mul = svmad_u64_x(mask, mixed_lo, mixed_hi, swapped); \
4332 acc = svadd_u64_x(mask, acc, mul); \
4333 } while (0)
4334 #endif
4335
4336
4337
4338 #if defined(XXH_NO_PREFETCH)
4339 # define XXH_PREFETCH(ptr) (void)(ptr)
4340 #else
4341 # if XXH_SIZE_OPT >= 1
4342 # define XXH_PREFETCH(ptr) (void)(ptr)
4343 # elif defined(_MSC_VER) && (defined(_M_X64) || defined(_M_IX86))
4344 # include <mmintrin.h> /* https://msdn.microsoft.com/fr-fr/library/84szxsww(v=vs.90).aspx */
4345 # define XXH_PREFETCH(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T0)
4346 # elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) )
4347 # define XXH_PREFETCH(ptr) __builtin_prefetch((ptr), 0 , 3 )
4348 # else
4349 # define XXH_PREFETCH(ptr) (void)(ptr)
4350 # endif
4351 #endif
4352
4353
4354
4355
4356
4357
4358 #define XXH_SECRET_DEFAULT_SIZE 192
4359
4360 #if (XXH_SECRET_DEFAULT_SIZE < XXH3_SECRET_SIZE_MIN)
4361 # error "default keyset is not large enough"
4362 #endif
4363
4364
4365 XXH_ALIGN(64) static const xxh_u8 XXH3_kSecret[XXH_SECRET_DEFAULT_SIZE] = {
4366 0xb8, 0xfe, 0x6c, 0x39, 0x23, 0xa4, 0x4b, 0xbe, 0x7c, 0x01, 0x81, 0x2c, 0xf7, 0x21, 0xad, 0x1c,
4367 0xde, 0xd4, 0x6d, 0xe9, 0x83, 0x90, 0x97, 0xdb, 0x72, 0x40, 0xa4, 0xa4, 0xb7, 0xb3, 0x67, 0x1f,
4368 0xcb, 0x79, 0xe6, 0x4e, 0xcc, 0xc0, 0xe5, 0x78, 0x82, 0x5a, 0xd0, 0x7d, 0xcc, 0xff, 0x72, 0x21,
4369 0xb8, 0x08, 0x46, 0x74, 0xf7, 0x43, 0x24, 0x8e, 0xe0, 0x35, 0x90, 0xe6, 0x81, 0x3a, 0x26, 0x4c,
4370 0x3c, 0x28, 0x52, 0xbb, 0x91, 0xc3, 0x00, 0xcb, 0x88, 0xd0, 0x65, 0x8b, 0x1b, 0x53, 0x2e, 0xa3,
4371 0x71, 0x64, 0x48, 0x97, 0xa2, 0x0d, 0xf9, 0x4e, 0x38, 0x19, 0xef, 0x46, 0xa9, 0xde, 0xac, 0xd8,
4372 0xa8, 0xfa, 0x76, 0x3f, 0xe3, 0x9c, 0x34, 0x3f, 0xf9, 0xdc, 0xbb, 0xc7, 0xc7, 0x0b, 0x4f, 0x1d,
4373 0x8a, 0x51, 0xe0, 0x4b, 0xcd, 0xb4, 0x59, 0x31, 0xc8, 0x9f, 0x7e, 0xc9, 0xd9, 0x78, 0x73, 0x64,
4374 0xea, 0xc5, 0xac, 0x83, 0x34, 0xd3, 0xeb, 0xc3, 0xc5, 0x81, 0xa0, 0xff, 0xfa, 0x13, 0x63, 0xeb,
4375 0x17, 0x0d, 0xdd, 0x51, 0xb7, 0xf0, 0xda, 0x49, 0xd3, 0x16, 0x55, 0x26, 0x29, 0xd4, 0x68, 0x9e,
4376 0x2b, 0x16, 0xbe, 0x58, 0x7d, 0x47, 0xa1, 0xfc, 0x8f, 0xf8, 0xb8, 0xd1, 0x7a, 0xd0, 0x31, 0xce,
4377 0x45, 0xcb, 0x3a, 0x8f, 0x95, 0x16, 0x04, 0x28, 0xaf, 0xd7, 0xfb, 0xca, 0xbb, 0x4b, 0x40, 0x7e,
4378 };
4379
4380 static const xxh_u64 PRIME_MX1 = 0x165667919E3779F9ULL;
4381 static const xxh_u64 PRIME_MX2 = 0x9FB21C651E98DF25ULL;
4382
4383 #ifdef XXH_OLD_NAMES
4384 # define kSecret XXH3_kSecret
4385 #endif
4386
4387 #ifdef XXH_DOXYGEN
4388
4389
4390
4391
4392
4393
4394
4395
4396
4397
4398
4399
4400
4401
4402
4403
4404 XXH_FORCE_INLINE xxh_u64
4405 XXH_mult32to64(xxh_u64 x, xxh_u64 y)
4406 {
4407 return (x & 0xFFFFFFFF) * (y & 0xFFFFFFFF);
4408 }
4409 #elif defined(_MSC_VER) && defined(_M_IX86)
4410 # define XXH_mult32to64(x, y) __emulu((unsigned)(x), (unsigned)(y))
4411 #else
4412
4413
4414
4415
4416
4417
4418
4419 # define XXH_mult32to64(x, y) ((xxh_u64)(xxh_u32)(x) * (xxh_u64)(xxh_u32)(y))
4420 #endif
4421
4422
4423
4424
4425
4426
4427
4428
4429
4430
4431 static XXH128_hash_t
4432 XXH_mult64to128(xxh_u64 lhs, xxh_u64 rhs)
4433 {
4434
4435
4436
4437
4438
4439
4440
4441
4442
4443
4444
4445
4446
4447
4448
4449 #if (defined(__GNUC__) || defined(__clang__)) && !defined(__wasm__) \
4450 && defined(__SIZEOF_INT128__) \
4451 || (defined(_INTEGRAL_MAX_BITS) && _INTEGRAL_MAX_BITS >= 128)
4452
4453 __uint128_t const product = (__uint128_t)lhs * (__uint128_t)rhs;
4454 XXH128_hash_t r128;
4455 r128.low64 = (xxh_u64)(product);
4456 r128.high64 = (xxh_u64)(product >> 64);
4457 return r128;
4458
4459
4460
4461
4462
4463
4464
4465
4466 #elif (defined(_M_X64) || defined(_M_IA64)) && !defined(_M_ARM64EC)
4467
4468 #ifndef _MSC_VER
4469 # pragma intrinsic(_umul128)
4470 #endif
4471 xxh_u64 product_high;
4472 xxh_u64 const product_low = _umul128(lhs, rhs, &product_high);
4473 XXH128_hash_t r128;
4474 r128.low64 = product_low;
4475 r128.high64 = product_high;
4476 return r128;
4477
4478
4479
4480
4481
4482
4483 #elif defined(_M_ARM64) || defined(_M_ARM64EC)
4484
4485 #ifndef _MSC_VER
4486 # pragma intrinsic(__umulh)
4487 #endif
4488 XXH128_hash_t r128;
4489 r128.low64 = lhs * rhs;
4490 r128.high64 = __umulh(lhs, rhs);
4491 return r128;
4492
4493 #else
4494
4495
4496
4497
4498
4499
4500
4501
4502
4503
4504
4505
4506
4507
4508
4509
4510
4511
4512
4513
4514
4515
4516
4517
4518
4519
4520
4521
4522
4523
4524
4525
4526
4527
4528
4529
4530
4531
4532
4533
4534
4535
4536
4537
4538 xxh_u64 const lo_lo = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs & 0xFFFFFFFF);
4539 xxh_u64 const hi_lo = XXH_mult32to64(lhs >> 32, rhs & 0xFFFFFFFF);
4540 xxh_u64 const lo_hi = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs >> 32);
4541 xxh_u64 const hi_hi = XXH_mult32to64(lhs >> 32, rhs >> 32);
4542
4543
4544 xxh_u64 const cross = (lo_lo >> 32) + (hi_lo & 0xFFFFFFFF) + lo_hi;
4545 xxh_u64 const upper = (hi_lo >> 32) + (cross >> 32) + hi_hi;
4546 xxh_u64 const lower = (cross << 32) | (lo_lo & 0xFFFFFFFF);
4547
4548 XXH128_hash_t r128;
4549 r128.low64 = lower;
4550 r128.high64 = upper;
4551 return r128;
4552 #endif
4553 }
4554
4555
4556
4557
4558
4559
4560
4561
4562
4563
4564
4565 static xxh_u64
4566 XXH3_mul128_fold64(xxh_u64 lhs, xxh_u64 rhs)
4567 {
4568 XXH128_hash_t product = XXH_mult64to128(lhs, rhs);
4569 return product.low64 ^ product.high64;
4570 }
4571
4572
4573 XXH_FORCE_INLINE XXH_CONSTF xxh_u64 XXH_xorshift64(xxh_u64 v64, int shift)
4574 {
4575 XXH_ASSERT(0 <= shift && shift < 64);
4576 return v64 ^ (v64 >> shift);
4577 }
4578
4579
4580
4581
4582
4583 static XXH64_hash_t XXH3_avalanche(xxh_u64 h64)
4584 {
4585 h64 = XXH_xorshift64(h64, 37);
4586 h64 *= PRIME_MX1;
4587 h64 = XXH_xorshift64(h64, 32);
4588 return h64;
4589 }
4590
4591
4592
4593
4594
4595
4596 static XXH64_hash_t XXH3_rrmxmx(xxh_u64 h64, xxh_u64 len)
4597 {
4598
4599 h64 ^= XXH_rotl64(h64, 49) ^ XXH_rotl64(h64, 24);
4600 h64 *= PRIME_MX2;
4601 h64 ^= (h64 >> 35) + len ;
4602 h64 *= PRIME_MX2;
4603 return XXH_xorshift64(h64, 28);
4604 }
4605
4606
4607
4608
4609
4610
4611
4612
4613
4614
4615
4616
4617
4618
4619
4620
4621
4622
4623
4624
4625
4626
4627
4628
4629
4630
4631
4632
4633
4634
4635
4636
4637
4638
4639
4640 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4641 XXH3_len_1to3_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
4642 {
4643 XXH_ASSERT(input != NULL);
4644 XXH_ASSERT(1 <= len && len <= 3);
4645 XXH_ASSERT(secret != NULL);
4646
4647
4648
4649
4650
4651 { xxh_u8 const c1 = input[0];
4652 xxh_u8 const c2 = input[len >> 1];
4653 xxh_u8 const c3 = input[len - 1];
4654 xxh_u32 const combined = ((xxh_u32)c1 << 16) | ((xxh_u32)c2 << 24)
4655 | ((xxh_u32)c3 << 0) | ((xxh_u32)len << 8);
4656 xxh_u64 const bitflip = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
4657 xxh_u64 const keyed = (xxh_u64)combined ^ bitflip;
4658 return XXH64_avalanche(keyed);
4659 }
4660 }
4661
4662 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4663 XXH3_len_4to8_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
4664 {
4665 XXH_ASSERT(input != NULL);
4666 XXH_ASSERT(secret != NULL);
4667 XXH_ASSERT(4 <= len && len <= 8);
4668 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
4669 { xxh_u32 const input1 = XXH_readLE32(input);
4670 xxh_u32 const input2 = XXH_readLE32(input + len - 4);
4671 xxh_u64 const bitflip = (XXH_readLE64(secret+8) ^ XXH_readLE64(secret+16)) - seed;
4672 xxh_u64 const input64 = input2 + (((xxh_u64)input1) << 32);
4673 xxh_u64 const keyed = input64 ^ bitflip;
4674 return XXH3_rrmxmx(keyed, len);
4675 }
4676 }
4677
4678 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4679 XXH3_len_9to16_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
4680 {
4681 XXH_ASSERT(input != NULL);
4682 XXH_ASSERT(secret != NULL);
4683 XXH_ASSERT(9 <= len && len <= 16);
4684 { xxh_u64 const bitflip1 = (XXH_readLE64(secret+24) ^ XXH_readLE64(secret+32)) + seed;
4685 xxh_u64 const bitflip2 = (XXH_readLE64(secret+40) ^ XXH_readLE64(secret+48)) - seed;
4686 xxh_u64 const input_lo = XXH_readLE64(input) ^ bitflip1;
4687 xxh_u64 const input_hi = XXH_readLE64(input + len - 8) ^ bitflip2;
4688 xxh_u64 const acc = len
4689 + XXH_swap64(input_lo) + input_hi
4690 + XXH3_mul128_fold64(input_lo, input_hi);
4691 return XXH3_avalanche(acc);
4692 }
4693 }
4694
4695 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4696 XXH3_len_0to16_64b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
4697 {
4698 XXH_ASSERT(len <= 16);
4699 { if (XXH_likely(len > 8)) return XXH3_len_9to16_64b(input, len, secret, seed);
4700 if (XXH_likely(len >= 4)) return XXH3_len_4to8_64b(input, len, secret, seed);
4701 if (len) return XXH3_len_1to3_64b(input, len, secret, seed);
4702 return XXH64_avalanche(seed ^ (XXH_readLE64(secret+56) ^ XXH_readLE64(secret+64)));
4703 }
4704 }
4705
4706
4707
4708
4709
4710
4711
4712
4713
4714
4715
4716
4717
4718
4719
4720
4721
4722
4723
4724
4725
4726
4727
4728
4729
4730
4731
4732 XXH_FORCE_INLINE xxh_u64 XXH3_mix16B(const xxh_u8* XXH_RESTRICT input,
4733 const xxh_u8* XXH_RESTRICT secret, xxh_u64 seed64)
4734 {
4735 #if defined(__GNUC__) && !defined(__clang__) \
4736 && defined(__i386__) && defined(__SSE2__) \
4737 && !defined(XXH_ENABLE_AUTOVECTORIZE)
4738
4739
4740
4741
4742
4743
4744
4745
4746
4747
4748
4749
4750
4751
4752
4753 XXH_COMPILER_GUARD(seed64);
4754 #endif
4755 { xxh_u64 const input_lo = XXH_readLE64(input);
4756 xxh_u64 const input_hi = XXH_readLE64(input+8);
4757 return XXH3_mul128_fold64(
4758 input_lo ^ (XXH_readLE64(secret) + seed64),
4759 input_hi ^ (XXH_readLE64(secret+8) - seed64)
4760 );
4761 }
4762 }
4763
4764
4765 XXH_FORCE_INLINE XXH_PUREF XXH64_hash_t
4766 XXH3_len_17to128_64b(const xxh_u8* XXH_RESTRICT input, size_t len,
4767 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
4768 XXH64_hash_t seed)
4769 {
4770 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
4771 XXH_ASSERT(16 < len && len <= 128);
4772
4773 { xxh_u64 acc = len * XXH_PRIME64_1;
4774 #if XXH_SIZE_OPT >= 1
4775
4776 unsigned int i = (unsigned int)(len - 1) / 32;
4777 do {
4778 acc += XXH3_mix16B(input+16 * i, secret+32*i, seed);
4779 acc += XXH3_mix16B(input+len-16*(i+1), secret+32*i+16, seed);
4780 } while (i-- != 0);
4781 #else
4782 if (len > 32) {
4783 if (len > 64) {
4784 if (len > 96) {
4785 acc += XXH3_mix16B(input+48, secret+96, seed);
4786 acc += XXH3_mix16B(input+len-64, secret+112, seed);
4787 }
4788 acc += XXH3_mix16B(input+32, secret+64, seed);
4789 acc += XXH3_mix16B(input+len-48, secret+80, seed);
4790 }
4791 acc += XXH3_mix16B(input+16, secret+32, seed);
4792 acc += XXH3_mix16B(input+len-32, secret+48, seed);
4793 }
4794 acc += XXH3_mix16B(input+0, secret+0, seed);
4795 acc += XXH3_mix16B(input+len-16, secret+16, seed);
4796 #endif
4797 return XXH3_avalanche(acc);
4798 }
4799 }
4800
4801 XXH_NO_INLINE XXH_PUREF XXH64_hash_t
4802 XXH3_len_129to240_64b(const xxh_u8* XXH_RESTRICT input, size_t len,
4803 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
4804 XXH64_hash_t seed)
4805 {
4806 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
4807 XXH_ASSERT(128 < len && len <= XXH3_MIDSIZE_MAX);
4808
4809 #define XXH3_MIDSIZE_STARTOFFSET 3
4810 #define XXH3_MIDSIZE_LASTOFFSET 17
4811
4812 { xxh_u64 acc = len * XXH_PRIME64_1;
4813 xxh_u64 acc_end;
4814 unsigned int const nbRounds = (unsigned int)len / 16;
4815 unsigned int i;
4816 XXH_ASSERT(128 < len && len <= XXH3_MIDSIZE_MAX);
4817 for (i=0; i<8; i++) {
4818 acc += XXH3_mix16B(input+(16*i), secret+(16*i), seed);
4819 }
4820
4821 acc_end = XXH3_mix16B(input + len - 16, secret + XXH3_SECRET_SIZE_MIN - XXH3_MIDSIZE_LASTOFFSET, seed);
4822 XXH_ASSERT(nbRounds >= 8);
4823 acc = XXH3_avalanche(acc);
4824 #if defined(__clang__) \
4825 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
4826 && !defined(XXH_ENABLE_AUTOVECTORIZE)
4827
4828
4829
4830
4831
4832
4833
4834
4835
4836
4837
4838
4839
4840
4841
4842
4843
4844
4845
4846
4847 #pragma clang loop vectorize(disable)
4848 #endif
4849 for (i=8 ; i < nbRounds; i++) {
4850
4851
4852
4853 XXH_COMPILER_GUARD(acc);
4854 acc_end += XXH3_mix16B(input+(16*i), secret+(16*(i-8)) + XXH3_MIDSIZE_STARTOFFSET, seed);
4855 }
4856 return XXH3_avalanche(acc + acc_end);
4857 }
4858 }
4859
4860
4861
4862
4863 #define XXH_STRIPE_LEN 64
4864 #define XXH_SECRET_CONSUME_RATE 8
4865 #define XXH_ACC_NB (XXH_STRIPE_LEN / sizeof(xxh_u64))
4866
4867 #ifdef XXH_OLD_NAMES
4868 # define STRIPE_LEN XXH_STRIPE_LEN
4869 # define ACC_NB XXH_ACC_NB
4870 #endif
4871
4872 #ifndef XXH_PREFETCH_DIST
4873 # ifdef __clang__
4874 # define XXH_PREFETCH_DIST 320
4875 # else
4876 # if (XXH_VECTOR == XXH_AVX512)
4877 # define XXH_PREFETCH_DIST 512
4878 # else
4879 # define XXH_PREFETCH_DIST 384
4880 # endif
4881 # endif
4882 #endif
4883
4884
4885
4886
4887
4888
4889
4890
4891
4892
4893
4894 #define XXH3_ACCUMULATE_TEMPLATE(name) \
4895 void \
4896 XXH3_accumulate_##name(xxh_u64* XXH_RESTRICT acc, \
4897 const xxh_u8* XXH_RESTRICT input, \
4898 const xxh_u8* XXH_RESTRICT secret, \
4899 size_t nbStripes) \
4900 { \
4901 size_t n; \
4902 for (n = 0; n < nbStripes; n++ ) { \
4903 const xxh_u8* const in = input + n*XXH_STRIPE_LEN; \
4904 XXH_PREFETCH(in + XXH_PREFETCH_DIST); \
4905 XXH3_accumulate_512_##name( \
4906 acc, \
4907 in, \
4908 secret + n*XXH_SECRET_CONSUME_RATE); \
4909 } \
4910 }
4911
4912
4913 XXH_FORCE_INLINE void XXH_writeLE64(void* dst, xxh_u64 v64)
4914 {
4915 if (!XXH_CPU_LITTLE_ENDIAN) v64 = XXH_swap64(v64);
4916 XXH_memcpy(dst, &v64, sizeof(v64));
4917 }
4918
4919
4920
4921
4922
4923
4924 #if !defined (__VMS) \
4925 && (defined (__cplusplus) \
4926 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
4927 typedef int64_t xxh_i64;
4928 #else
4929
4930 typedef long long xxh_i64;
4931 #endif
4932
4933
4934
4935
4936
4937
4938
4939
4940
4941
4942
4943
4944
4945
4946
4947
4948
4949
4950
4951
4952
4953
4954
4955
4956
4957 #if (XXH_VECTOR == XXH_AVX512) \
4958 || (defined(XXH_DISPATCH_AVX512) && XXH_DISPATCH_AVX512 != 0)
4959
4960 #ifndef XXH_TARGET_AVX512
4961 # define XXH_TARGET_AVX512
4962 #endif
4963
4964 XXH_FORCE_INLINE XXH_TARGET_AVX512 void
4965 XXH3_accumulate_512_avx512(void* XXH_RESTRICT acc,
4966 const void* XXH_RESTRICT input,
4967 const void* XXH_RESTRICT secret)
4968 {
4969 __m512i* const xacc = (__m512i *) acc;
4970 XXH_ASSERT((((size_t)acc) & 63) == 0);
4971 XXH_STATIC_ASSERT(XXH_STRIPE_LEN == sizeof(__m512i));
4972
4973 {
4974
4975 __m512i const data_vec = _mm512_loadu_si512 (input);
4976
4977 __m512i const key_vec = _mm512_loadu_si512 (secret);
4978
4979 __m512i const data_key = _mm512_xor_si512 (data_vec, key_vec);
4980
4981 __m512i const data_key_lo = _mm512_srli_epi64 (data_key, 32);
4982
4983 __m512i const product = _mm512_mul_epu32 (data_key, data_key_lo);
4984
4985 __m512i const data_swap = _mm512_shuffle_epi32(data_vec, (_MM_PERM_ENUM)_MM_SHUFFLE(1, 0, 3, 2));
4986 __m512i const sum = _mm512_add_epi64(*xacc, data_swap);
4987
4988 *xacc = _mm512_add_epi64(product, sum);
4989 }
4990 }
4991 XXH_FORCE_INLINE XXH_TARGET_AVX512 XXH3_ACCUMULATE_TEMPLATE(avx512)
4992
4993
4994
4995
4996
4997
4998
4999
5000
5001
5002
5003
5004
5005
5006
5007
5008
5009
5010
5011
5012
5013
5014 XXH_FORCE_INLINE XXH_TARGET_AVX512 void
5015 XXH3_scrambleAcc_avx512(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5016 {
5017 XXH_ASSERT((((size_t)acc) & 63) == 0);
5018 XXH_STATIC_ASSERT(XXH_STRIPE_LEN == sizeof(__m512i));
5019 { __m512i* const xacc = (__m512i*) acc;
5020 const __m512i prime32 = _mm512_set1_epi32((int)XXH_PRIME32_1);
5021
5022
5023 __m512i const acc_vec = *xacc;
5024 __m512i const shifted = _mm512_srli_epi64 (acc_vec, 47);
5025
5026 __m512i const key_vec = _mm512_loadu_si512 (secret);
5027 __m512i const data_key = _mm512_ternarylogic_epi32(key_vec, acc_vec, shifted, 0x96 );
5028
5029
5030 __m512i const data_key_hi = _mm512_srli_epi64 (data_key, 32);
5031 __m512i const prod_lo = _mm512_mul_epu32 (data_key, prime32);
5032 __m512i const prod_hi = _mm512_mul_epu32 (data_key_hi, prime32);
5033 *xacc = _mm512_add_epi64(prod_lo, _mm512_slli_epi64(prod_hi, 32));
5034 }
5035 }
5036
5037 XXH_FORCE_INLINE XXH_TARGET_AVX512 void
5038 XXH3_initCustomSecret_avx512(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
5039 {
5040 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 63) == 0);
5041 XXH_STATIC_ASSERT(XXH_SEC_ALIGN == 64);
5042 XXH_ASSERT(((size_t)customSecret & 63) == 0);
5043 (void)(&XXH_writeLE64);
5044 { int const nbRounds = XXH_SECRET_DEFAULT_SIZE / sizeof(__m512i);
5045 __m512i const seed_pos = _mm512_set1_epi64((xxh_i64)seed64);
5046 __m512i const seed = _mm512_mask_sub_epi64(seed_pos, 0xAA, _mm512_set1_epi8(0), seed_pos);
5047
5048 const __m512i* const src = (const __m512i*) ((const void*) XXH3_kSecret);
5049 __m512i* const dest = ( __m512i*) customSecret;
5050 int i;
5051 XXH_ASSERT(((size_t)src & 63) == 0);
5052 XXH_ASSERT(((size_t)dest & 63) == 0);
5053 for (i=0; i < nbRounds; ++i) {
5054 dest[i] = _mm512_add_epi64(_mm512_load_si512(src + i), seed);
5055 } }
5056 }
5057
5058 #endif
5059
5060 #if (XXH_VECTOR == XXH_AVX2) \
5061 || (defined(XXH_DISPATCH_AVX2) && XXH_DISPATCH_AVX2 != 0)
5062
5063 #ifndef XXH_TARGET_AVX2
5064 # define XXH_TARGET_AVX2
5065 #endif
5066
5067 XXH_FORCE_INLINE XXH_TARGET_AVX2 void
5068 XXH3_accumulate_512_avx2( void* XXH_RESTRICT acc,
5069 const void* XXH_RESTRICT input,
5070 const void* XXH_RESTRICT secret)
5071 {
5072 XXH_ASSERT((((size_t)acc) & 31) == 0);
5073 { __m256i* const xacc = (__m256i *) acc;
5074
5075
5076 const __m256i* const xinput = (const __m256i *) input;
5077
5078
5079 const __m256i* const xsecret = (const __m256i *) secret;
5080
5081 size_t i;
5082 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m256i); i++) {
5083
5084 __m256i const data_vec = _mm256_loadu_si256 (xinput+i);
5085
5086 __m256i const key_vec = _mm256_loadu_si256 (xsecret+i);
5087
5088 __m256i const data_key = _mm256_xor_si256 (data_vec, key_vec);
5089
5090 __m256i const data_key_lo = _mm256_srli_epi64 (data_key, 32);
5091
5092 __m256i const product = _mm256_mul_epu32 (data_key, data_key_lo);
5093
5094 __m256i const data_swap = _mm256_shuffle_epi32(data_vec, _MM_SHUFFLE(1, 0, 3, 2));
5095 __m256i const sum = _mm256_add_epi64(xacc[i], data_swap);
5096
5097 xacc[i] = _mm256_add_epi64(product, sum);
5098 } }
5099 }
5100 XXH_FORCE_INLINE XXH_TARGET_AVX2 XXH3_ACCUMULATE_TEMPLATE(avx2)
5101
5102 XXH_FORCE_INLINE XXH_TARGET_AVX2 void
5103 XXH3_scrambleAcc_avx2(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5104 {
5105 XXH_ASSERT((((size_t)acc) & 31) == 0);
5106 { __m256i* const xacc = (__m256i*) acc;
5107
5108
5109 const __m256i* const xsecret = (const __m256i *) secret;
5110 const __m256i prime32 = _mm256_set1_epi32((int)XXH_PRIME32_1);
5111
5112 size_t i;
5113 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m256i); i++) {
5114
5115 __m256i const acc_vec = xacc[i];
5116 __m256i const shifted = _mm256_srli_epi64 (acc_vec, 47);
5117 __m256i const data_vec = _mm256_xor_si256 (acc_vec, shifted);
5118
5119 __m256i const key_vec = _mm256_loadu_si256 (xsecret+i);
5120 __m256i const data_key = _mm256_xor_si256 (data_vec, key_vec);
5121
5122
5123 __m256i const data_key_hi = _mm256_srli_epi64 (data_key, 32);
5124 __m256i const prod_lo = _mm256_mul_epu32 (data_key, prime32);
5125 __m256i const prod_hi = _mm256_mul_epu32 (data_key_hi, prime32);
5126 xacc[i] = _mm256_add_epi64(prod_lo, _mm256_slli_epi64(prod_hi, 32));
5127 }
5128 }
5129 }
5130
5131 XXH_FORCE_INLINE XXH_TARGET_AVX2 void XXH3_initCustomSecret_avx2(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
5132 {
5133 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 31) == 0);
5134 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE / sizeof(__m256i)) == 6);
5135 XXH_STATIC_ASSERT(XXH_SEC_ALIGN <= 64);
5136 (void)(&XXH_writeLE64);
5137 XXH_PREFETCH(customSecret);
5138 { __m256i const seed = _mm256_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64, (xxh_i64)(0U - seed64), (xxh_i64)seed64);
5139
5140 const __m256i* const src = (const __m256i*) ((const void*) XXH3_kSecret);
5141 __m256i* dest = ( __m256i*) customSecret;
5142
5143 # if defined(__GNUC__) || defined(__clang__)
5144
5145
5146
5147
5148
5149 XXH_COMPILER_GUARD(dest);
5150 # endif
5151 XXH_ASSERT(((size_t)src & 31) == 0);
5152 XXH_ASSERT(((size_t)dest & 31) == 0);
5153
5154
5155 dest[0] = _mm256_add_epi64(_mm256_load_si256(src+0), seed);
5156 dest[1] = _mm256_add_epi64(_mm256_load_si256(src+1), seed);
5157 dest[2] = _mm256_add_epi64(_mm256_load_si256(src+2), seed);
5158 dest[3] = _mm256_add_epi64(_mm256_load_si256(src+3), seed);
5159 dest[4] = _mm256_add_epi64(_mm256_load_si256(src+4), seed);
5160 dest[5] = _mm256_add_epi64(_mm256_load_si256(src+5), seed);
5161 }
5162 }
5163
5164 #endif
5165
5166
5167 #if (XXH_VECTOR == XXH_SSE2) || defined(XXH_X86DISPATCH)
5168
5169 #ifndef XXH_TARGET_SSE2
5170 # define XXH_TARGET_SSE2
5171 #endif
5172
5173 XXH_FORCE_INLINE XXH_TARGET_SSE2 void
5174 XXH3_accumulate_512_sse2( void* XXH_RESTRICT acc,
5175 const void* XXH_RESTRICT input,
5176 const void* XXH_RESTRICT secret)
5177 {
5178
5179 XXH_ASSERT((((size_t)acc) & 15) == 0);
5180 { __m128i* const xacc = (__m128i *) acc;
5181
5182
5183 const __m128i* const xinput = (const __m128i *) input;
5184
5185
5186 const __m128i* const xsecret = (const __m128i *) secret;
5187
5188 size_t i;
5189 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m128i); i++) {
5190
5191 __m128i const data_vec = _mm_loadu_si128 (xinput+i);
5192
5193 __m128i const key_vec = _mm_loadu_si128 (xsecret+i);
5194
5195 __m128i const data_key = _mm_xor_si128 (data_vec, key_vec);
5196
5197 __m128i const data_key_lo = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
5198
5199 __m128i const product = _mm_mul_epu32 (data_key, data_key_lo);
5200
5201 __m128i const data_swap = _mm_shuffle_epi32(data_vec, _MM_SHUFFLE(1,0,3,2));
5202 __m128i const sum = _mm_add_epi64(xacc[i], data_swap);
5203
5204 xacc[i] = _mm_add_epi64(product, sum);
5205 } }
5206 }
5207 XXH_FORCE_INLINE XXH_TARGET_SSE2 XXH3_ACCUMULATE_TEMPLATE(sse2)
5208
5209 XXH_FORCE_INLINE XXH_TARGET_SSE2 void
5210 XXH3_scrambleAcc_sse2(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5211 {
5212 XXH_ASSERT((((size_t)acc) & 15) == 0);
5213 { __m128i* const xacc = (__m128i*) acc;
5214
5215
5216 const __m128i* const xsecret = (const __m128i *) secret;
5217 const __m128i prime32 = _mm_set1_epi32((int)XXH_PRIME32_1);
5218
5219 size_t i;
5220 for (i=0; i < XXH_STRIPE_LEN/sizeof(__m128i); i++) {
5221
5222 __m128i const acc_vec = xacc[i];
5223 __m128i const shifted = _mm_srli_epi64 (acc_vec, 47);
5224 __m128i const data_vec = _mm_xor_si128 (acc_vec, shifted);
5225
5226 __m128i const key_vec = _mm_loadu_si128 (xsecret+i);
5227 __m128i const data_key = _mm_xor_si128 (data_vec, key_vec);
5228
5229
5230 __m128i const data_key_hi = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
5231 __m128i const prod_lo = _mm_mul_epu32 (data_key, prime32);
5232 __m128i const prod_hi = _mm_mul_epu32 (data_key_hi, prime32);
5233 xacc[i] = _mm_add_epi64(prod_lo, _mm_slli_epi64(prod_hi, 32));
5234 }
5235 }
5236 }
5237
5238 XXH_FORCE_INLINE XXH_TARGET_SSE2 void XXH3_initCustomSecret_sse2(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
5239 {
5240 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
5241 (void)(&XXH_writeLE64);
5242 { int const nbRounds = XXH_SECRET_DEFAULT_SIZE / sizeof(__m128i);
5243
5244 # if defined(_MSC_VER) && defined(_M_IX86) && _MSC_VER < 1900
5245
5246 XXH_ALIGN(16) const xxh_i64 seed64x2[2] = { (xxh_i64)seed64, (xxh_i64)(0U - seed64) };
5247 __m128i const seed = _mm_load_si128((__m128i const*)seed64x2);
5248 # else
5249 __m128i const seed = _mm_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64);
5250 # endif
5251 int i;
5252
5253 const void* const src16 = XXH3_kSecret;
5254 __m128i* dst16 = (__m128i*) customSecret;
5255 # if defined(__GNUC__) || defined(__clang__)
5256
5257
5258
5259
5260
5261 XXH_COMPILER_GUARD(dst16);
5262 # endif
5263 XXH_ASSERT(((size_t)src16 & 15) == 0);
5264 XXH_ASSERT(((size_t)dst16 & 15) == 0);
5265
5266 for (i=0; i < nbRounds; ++i) {
5267 dst16[i] = _mm_add_epi64(_mm_load_si128((const __m128i *)src16+i), seed);
5268 } }
5269 }
5270
5271 #endif
5272
5273 #if (XXH_VECTOR == XXH_NEON)
5274
5275
5276 XXH_FORCE_INLINE void
5277 XXH3_scalarRound(void* XXH_RESTRICT acc, void const* XXH_RESTRICT input,
5278 void const* XXH_RESTRICT secret, size_t lane);
5279
5280 XXH_FORCE_INLINE void
5281 XXH3_scalarScrambleRound(void* XXH_RESTRICT acc,
5282 void const* XXH_RESTRICT secret, size_t lane);
5283
5284
5285
5286
5287
5288
5289
5290
5291
5292
5293
5294
5295
5296
5297
5298
5299
5300
5301
5302
5303
5304
5305
5306
5307
5308 XXH_FORCE_INLINE void
5309 XXH3_accumulate_512_neon( void* XXH_RESTRICT acc,
5310 const void* XXH_RESTRICT input,
5311 const void* XXH_RESTRICT secret)
5312 {
5313 XXH_ASSERT((((size_t)acc) & 15) == 0);
5314 XXH_STATIC_ASSERT(XXH3_NEON_LANES > 0 && XXH3_NEON_LANES <= XXH_ACC_NB && XXH3_NEON_LANES % 2 == 0);
5315 {
5316 xxh_aliasing_uint64x2_t* const xacc = (xxh_aliasing_uint64x2_t*) acc;
5317
5318 uint8_t const* xinput = (const uint8_t *) input;
5319 uint8_t const* xsecret = (const uint8_t *) secret;
5320
5321 size_t i;
5322 #ifdef __wasm_simd128__
5323
5324
5325
5326
5327
5328
5329
5330
5331
5332
5333
5334
5335
5336
5337
5338
5339
5340 XXH_COMPILER_GUARD(xsecret);
5341 #endif
5342
5343 for (i = XXH3_NEON_LANES; i < XXH_ACC_NB; i++) {
5344 XXH3_scalarRound(acc, input, secret, i);
5345 }
5346 i = 0;
5347
5348 for (; i+1 < XXH3_NEON_LANES / 2; i+=2) {
5349
5350 uint64x2_t data_vec_1 = XXH_vld1q_u64(xinput + (i * 16));
5351 uint64x2_t data_vec_2 = XXH_vld1q_u64(xinput + ((i+1) * 16));
5352
5353 uint64x2_t key_vec_1 = XXH_vld1q_u64(xsecret + (i * 16));
5354 uint64x2_t key_vec_2 = XXH_vld1q_u64(xsecret + ((i+1) * 16));
5355
5356 uint64x2_t data_swap_1 = vextq_u64(data_vec_1, data_vec_1, 1);
5357 uint64x2_t data_swap_2 = vextq_u64(data_vec_2, data_vec_2, 1);
5358
5359 uint64x2_t data_key_1 = veorq_u64(data_vec_1, key_vec_1);
5360 uint64x2_t data_key_2 = veorq_u64(data_vec_2, key_vec_2);
5361
5362
5363
5364
5365
5366
5367
5368
5369
5370
5371
5372
5373
5374
5375 uint32x4x2_t unzipped = vuzpq_u32(
5376 vreinterpretq_u32_u64(data_key_1),
5377 vreinterpretq_u32_u64(data_key_2)
5378 );
5379
5380 uint32x4_t data_key_lo = unzipped.val[0];
5381
5382 uint32x4_t data_key_hi = unzipped.val[1];
5383
5384
5385
5386
5387
5388
5389
5390 uint64x2_t sum_1 = XXH_vmlal_low_u32(data_swap_1, data_key_lo, data_key_hi);
5391 uint64x2_t sum_2 = XXH_vmlal_high_u32(data_swap_2, data_key_lo, data_key_hi);
5392
5393
5394
5395
5396
5397
5398
5399
5400
5401
5402
5403
5404 XXH_COMPILER_GUARD_CLANG_NEON(sum_1);
5405 XXH_COMPILER_GUARD_CLANG_NEON(sum_2);
5406
5407 xacc[i] = vaddq_u64(xacc[i], sum_1);
5408 xacc[i+1] = vaddq_u64(xacc[i+1], sum_2);
5409 }
5410
5411 for (; i < XXH3_NEON_LANES / 2; i++) {
5412
5413 uint64x2_t data_vec = XXH_vld1q_u64(xinput + (i * 16));
5414
5415 uint64x2_t key_vec = XXH_vld1q_u64(xsecret + (i * 16));
5416
5417 uint64x2_t data_swap = vextq_u64(data_vec, data_vec, 1);
5418
5419 uint64x2_t data_key = veorq_u64(data_vec, key_vec);
5420
5421
5422 uint32x2_t data_key_lo = vmovn_u64(data_key);
5423
5424 uint32x2_t data_key_hi = vshrn_n_u64(data_key, 32);
5425
5426 uint64x2_t sum = vmlal_u32(data_swap, data_key_lo, data_key_hi);
5427
5428 XXH_COMPILER_GUARD_CLANG_NEON(sum);
5429
5430 xacc[i] = vaddq_u64 (xacc[i], sum);
5431 }
5432 }
5433 }
5434 XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(neon)
5435
5436 XXH_FORCE_INLINE void
5437 XXH3_scrambleAcc_neon(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5438 {
5439 XXH_ASSERT((((size_t)acc) & 15) == 0);
5440
5441 { xxh_aliasing_uint64x2_t* xacc = (xxh_aliasing_uint64x2_t*) acc;
5442 uint8_t const* xsecret = (uint8_t const*) secret;
5443
5444 size_t i;
5445
5446 #ifndef __wasm_simd128__
5447
5448 uint32x2_t const kPrimeLo = vdup_n_u32(XXH_PRIME32_1);
5449
5450 uint32x4_t const kPrimeHi = vreinterpretq_u32_u64(vdupq_n_u64((xxh_u64)XXH_PRIME32_1 << 32));
5451 #endif
5452
5453
5454 for (i = XXH3_NEON_LANES; i < XXH_ACC_NB; i++) {
5455 XXH3_scalarScrambleRound(acc, secret, i);
5456 }
5457 for (i=0; i < XXH3_NEON_LANES / 2; i++) {
5458
5459 uint64x2_t acc_vec = xacc[i];
5460 uint64x2_t shifted = vshrq_n_u64(acc_vec, 47);
5461 uint64x2_t data_vec = veorq_u64(acc_vec, shifted);
5462
5463
5464 uint64x2_t key_vec = XXH_vld1q_u64(xsecret + (i * 16));
5465 uint64x2_t data_key = veorq_u64(data_vec, key_vec);
5466
5467 #ifdef __wasm_simd128__
5468
5469 xacc[i] = data_key * XXH_PRIME32_1;
5470 #else
5471
5472
5473
5474
5475
5476
5477
5478
5479
5480
5481
5482 uint32x4_t prod_hi = vmulq_u32 (vreinterpretq_u32_u64(data_key), kPrimeHi);
5483
5484 uint32x2_t data_key_lo = vmovn_u64(data_key);
5485
5486 xacc[i] = vmlal_u32(vreinterpretq_u64_u32(prod_hi), data_key_lo, kPrimeLo);
5487 #endif
5488 }
5489 }
5490 }
5491 #endif
5492
5493 #if (XXH_VECTOR == XXH_VSX)
5494
5495 XXH_FORCE_INLINE void
5496 XXH3_accumulate_512_vsx( void* XXH_RESTRICT acc,
5497 const void* XXH_RESTRICT input,
5498 const void* XXH_RESTRICT secret)
5499 {
5500
5501 xxh_aliasing_u64x2* const xacc = (xxh_aliasing_u64x2*) acc;
5502 xxh_u8 const* const xinput = (xxh_u8 const*) input;
5503 xxh_u8 const* const xsecret = (xxh_u8 const*) secret;
5504 xxh_u64x2 const v32 = { 32, 32 };
5505 size_t i;
5506 for (i = 0; i < XXH_STRIPE_LEN / sizeof(xxh_u64x2); i++) {
5507
5508 xxh_u64x2 const data_vec = XXH_vec_loadu(xinput + 16*i);
5509
5510 xxh_u64x2 const key_vec = XXH_vec_loadu(xsecret + 16*i);
5511 xxh_u64x2 const data_key = data_vec ^ key_vec;
5512
5513 xxh_u32x4 const shuffled = (xxh_u32x4)vec_rl(data_key, v32);
5514
5515 xxh_u64x2 const product = XXH_vec_mulo((xxh_u32x4)data_key, shuffled);
5516
5517 xxh_u64x2 acc_vec = xacc[i];
5518 acc_vec += product;
5519
5520
5521 #ifdef __s390x__
5522 acc_vec += vec_permi(data_vec, data_vec, 2);
5523 #else
5524 acc_vec += vec_xxpermdi(data_vec, data_vec, 2);
5525 #endif
5526 xacc[i] = acc_vec;
5527 }
5528 }
5529 XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(vsx)
5530
5531 XXH_FORCE_INLINE void
5532 XXH3_scrambleAcc_vsx(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5533 {
5534 XXH_ASSERT((((size_t)acc) & 15) == 0);
5535
5536 { xxh_aliasing_u64x2* const xacc = (xxh_aliasing_u64x2*) acc;
5537 const xxh_u8* const xsecret = (const xxh_u8*) secret;
5538
5539 xxh_u64x2 const v32 = { 32, 32 };
5540 xxh_u64x2 const v47 = { 47, 47 };
5541 xxh_u32x4 const prime = { XXH_PRIME32_1, XXH_PRIME32_1, XXH_PRIME32_1, XXH_PRIME32_1 };
5542 size_t i;
5543 for (i = 0; i < XXH_STRIPE_LEN / sizeof(xxh_u64x2); i++) {
5544
5545 xxh_u64x2 const acc_vec = xacc[i];
5546 xxh_u64x2 const data_vec = acc_vec ^ (acc_vec >> v47);
5547
5548
5549 xxh_u64x2 const key_vec = XXH_vec_loadu(xsecret + 16*i);
5550 xxh_u64x2 const data_key = data_vec ^ key_vec;
5551
5552
5553
5554 xxh_u64x2 const prod_even = XXH_vec_mule((xxh_u32x4)data_key, prime);
5555
5556 xxh_u64x2 const prod_odd = XXH_vec_mulo((xxh_u32x4)data_key, prime);
5557 xacc[i] = prod_odd + (prod_even << v32);
5558 } }
5559 }
5560
5561 #endif
5562
5563 #if (XXH_VECTOR == XXH_SVE)
5564
5565 XXH_FORCE_INLINE void
5566 XXH3_accumulate_512_sve( void* XXH_RESTRICT acc,
5567 const void* XXH_RESTRICT input,
5568 const void* XXH_RESTRICT secret)
5569 {
5570 uint64_t *xacc = (uint64_t *)acc;
5571 const uint64_t *xinput = (const uint64_t *)(const void *)input;
5572 const uint64_t *xsecret = (const uint64_t *)(const void *)secret;
5573 svuint64_t kSwap = sveor_n_u64_z(svptrue_b64(), svindex_u64(0, 1), 1);
5574 uint64_t element_count = svcntd();
5575 if (element_count >= 8) {
5576 svbool_t mask = svptrue_pat_b64(SV_VL8);
5577 svuint64_t vacc = svld1_u64(mask, xacc);
5578 ACCRND(vacc, 0);
5579 svst1_u64(mask, xacc, vacc);
5580 } else if (element_count == 2) {
5581 svbool_t mask = svptrue_pat_b64(SV_VL2);
5582 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5583 svuint64_t acc1 = svld1_u64(mask, xacc + 2);
5584 svuint64_t acc2 = svld1_u64(mask, xacc + 4);
5585 svuint64_t acc3 = svld1_u64(mask, xacc + 6);
5586 ACCRND(acc0, 0);
5587 ACCRND(acc1, 2);
5588 ACCRND(acc2, 4);
5589 ACCRND(acc3, 6);
5590 svst1_u64(mask, xacc + 0, acc0);
5591 svst1_u64(mask, xacc + 2, acc1);
5592 svst1_u64(mask, xacc + 4, acc2);
5593 svst1_u64(mask, xacc + 6, acc3);
5594 } else {
5595 svbool_t mask = svptrue_pat_b64(SV_VL4);
5596 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5597 svuint64_t acc1 = svld1_u64(mask, xacc + 4);
5598 ACCRND(acc0, 0);
5599 ACCRND(acc1, 4);
5600 svst1_u64(mask, xacc + 0, acc0);
5601 svst1_u64(mask, xacc + 4, acc1);
5602 }
5603 }
5604
5605 XXH_FORCE_INLINE void
5606 XXH3_accumulate_sve(xxh_u64* XXH_RESTRICT acc,
5607 const xxh_u8* XXH_RESTRICT input,
5608 const xxh_u8* XXH_RESTRICT secret,
5609 size_t nbStripes)
5610 {
5611 if (nbStripes != 0) {
5612 uint64_t *xacc = (uint64_t *)acc;
5613 const uint64_t *xinput = (const uint64_t *)(const void *)input;
5614 const uint64_t *xsecret = (const uint64_t *)(const void *)secret;
5615 svuint64_t kSwap = sveor_n_u64_z(svptrue_b64(), svindex_u64(0, 1), 1);
5616 uint64_t element_count = svcntd();
5617 if (element_count >= 8) {
5618 svbool_t mask = svptrue_pat_b64(SV_VL8);
5619 svuint64_t vacc = svld1_u64(mask, xacc + 0);
5620 do {
5621
5622 svprfd(mask, xinput + 128, SV_PLDL1STRM);
5623 ACCRND(vacc, 0);
5624 xinput += 8;
5625 xsecret += 1;
5626 nbStripes--;
5627 } while (nbStripes != 0);
5628
5629 svst1_u64(mask, xacc + 0, vacc);
5630 } else if (element_count == 2) {
5631 svbool_t mask = svptrue_pat_b64(SV_VL2);
5632 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5633 svuint64_t acc1 = svld1_u64(mask, xacc + 2);
5634 svuint64_t acc2 = svld1_u64(mask, xacc + 4);
5635 svuint64_t acc3 = svld1_u64(mask, xacc + 6);
5636 do {
5637 svprfd(mask, xinput + 128, SV_PLDL1STRM);
5638 ACCRND(acc0, 0);
5639 ACCRND(acc1, 2);
5640 ACCRND(acc2, 4);
5641 ACCRND(acc3, 6);
5642 xinput += 8;
5643 xsecret += 1;
5644 nbStripes--;
5645 } while (nbStripes != 0);
5646
5647 svst1_u64(mask, xacc + 0, acc0);
5648 svst1_u64(mask, xacc + 2, acc1);
5649 svst1_u64(mask, xacc + 4, acc2);
5650 svst1_u64(mask, xacc + 6, acc3);
5651 } else {
5652 svbool_t mask = svptrue_pat_b64(SV_VL4);
5653 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5654 svuint64_t acc1 = svld1_u64(mask, xacc + 4);
5655 do {
5656 svprfd(mask, xinput + 128, SV_PLDL1STRM);
5657 ACCRND(acc0, 0);
5658 ACCRND(acc1, 4);
5659 xinput += 8;
5660 xsecret += 1;
5661 nbStripes--;
5662 } while (nbStripes != 0);
5663
5664 svst1_u64(mask, xacc + 0, acc0);
5665 svst1_u64(mask, xacc + 4, acc1);
5666 }
5667 }
5668 }
5669
5670 #endif
5671
5672 #if (XXH_VECTOR == XXH_LSX)
5673 #define _LSX_SHUFFLE(z, y, x, w) (((z) << 6) | ((y) << 4) | ((x) << 2) | (w))
5674
5675 XXH_FORCE_INLINE void
5676 XXH3_accumulate_512_lsx( void* XXH_RESTRICT acc,
5677 const void* XXH_RESTRICT input,
5678 const void* XXH_RESTRICT secret)
5679 {
5680 XXH_ASSERT((((size_t)acc) & 15) == 0);
5681 {
5682 __m128i* const xacc = (__m128i *) acc;
5683 const __m128i* const xinput = (const __m128i *) input;
5684 const __m128i* const xsecret = (const __m128i *) secret;
5685
5686 for (size_t i = 0; i < XXH_STRIPE_LEN / sizeof(__m128i); i++) {
5687
5688 __m128i const data_vec = __lsx_vld(xinput + i, 0);
5689
5690 __m128i const key_vec = __lsx_vld(xsecret + i, 0);
5691
5692 __m128i const data_key = __lsx_vxor_v(data_vec, key_vec);
5693
5694 __m128i const data_key_lo = __lsx_vsrli_d(data_key, 32);
5695
5696
5697 __m128i const product = __lsx_vmulwev_d_wu(data_key, data_key_lo);
5698
5699 __m128i const data_swap = __lsx_vshuf4i_w(data_vec, _LSX_SHUFFLE(1, 0, 3, 2));
5700 __m128i const sum = __lsx_vadd_d(xacc[i], data_swap);
5701
5702 xacc[i] = __lsx_vadd_d(product, sum);
5703 }
5704 }
5705 }
5706 XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(lsx)
5707
5708 XXH_FORCE_INLINE void
5709 XXH3_scrambleAcc_lsx(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5710 {
5711 XXH_ASSERT((((size_t)acc) & 15) == 0);
5712 {
5713 __m128i* const xacc = (__m128i*) acc;
5714 const __m128i* const xsecret = (const __m128i *) secret;
5715 const __m128i prime32 = __lsx_vreplgr2vr_w((int)XXH_PRIME32_1);
5716
5717 for (size_t i = 0; i < XXH_STRIPE_LEN / sizeof(__m128i); i++) {
5718
5719 __m128i const acc_vec = xacc[i];
5720 __m128i const shifted = __lsx_vsrli_d(acc_vec, 47);
5721 __m128i const data_vec = __lsx_vxor_v(acc_vec, shifted);
5722
5723 __m128i const key_vec = __lsx_vld(xsecret + i, 0);
5724 __m128i const data_key = __lsx_vxor_v(data_vec, key_vec);
5725
5726
5727 __m128i const data_key_hi = __lsx_vsrli_d(data_key, 32);
5728 __m128i const prod_lo = __lsx_vmulwev_d_wu(data_key, prime32);
5729 __m128i const prod_hi = __lsx_vmulwev_d_wu(data_key_hi, prime32);
5730 xacc[i] = __lsx_vadd_d(prod_lo, __lsx_vslli_d(prod_hi, 32));
5731 }
5732 }
5733 }
5734
5735 #endif
5736
5737
5738
5739 #if defined(__aarch64__) && (defined(__GNUC__) || defined(__clang__))
5740
5741
5742
5743
5744
5745
5746
5747
5748
5749
5750
5751
5752
5753
5754 XXH_FORCE_INLINE xxh_u64
5755 XXH_mult32to64_add64(xxh_u64 lhs, xxh_u64 rhs, xxh_u64 acc)
5756 {
5757 xxh_u64 ret;
5758
5759 __asm__("umaddl %x0, %w1, %w2, %x3" : "=r" (ret) : "r" (lhs), "r" (rhs), "r" (acc));
5760 return ret;
5761 }
5762 #else
5763 XXH_FORCE_INLINE xxh_u64
5764 XXH_mult32to64_add64(xxh_u64 lhs, xxh_u64 rhs, xxh_u64 acc)
5765 {
5766 return XXH_mult32to64((xxh_u32)lhs, (xxh_u32)rhs) + acc;
5767 }
5768 #endif
5769
5770
5771
5772
5773
5774
5775
5776
5777 XXH_FORCE_INLINE void
5778 XXH3_scalarRound(void* XXH_RESTRICT acc,
5779 void const* XXH_RESTRICT input,
5780 void const* XXH_RESTRICT secret,
5781 size_t lane)
5782 {
5783 xxh_u64* xacc = (xxh_u64*) acc;
5784 xxh_u8 const* xinput = (xxh_u8 const*) input;
5785 xxh_u8 const* xsecret = (xxh_u8 const*) secret;
5786 XXH_ASSERT(lane < XXH_ACC_NB);
5787 XXH_ASSERT(((size_t)acc & (XXH_ACC_ALIGN-1)) == 0);
5788 {
5789 xxh_u64 const data_val = XXH_readLE64(xinput + lane * 8);
5790 xxh_u64 const data_key = data_val ^ XXH_readLE64(xsecret + lane * 8);
5791 xacc[lane ^ 1] += data_val;
5792 xacc[lane] = XXH_mult32to64_add64(data_key , data_key >> 32, xacc[lane]);
5793 }
5794 }
5795
5796
5797
5798
5799
5800 XXH_FORCE_INLINE void
5801 XXH3_accumulate_512_scalar(void* XXH_RESTRICT acc,
5802 const void* XXH_RESTRICT input,
5803 const void* XXH_RESTRICT secret)
5804 {
5805 size_t i;
5806
5807 #if defined(__GNUC__) && !defined(__clang__) \
5808 && (defined(__arm__) || defined(__thumb2__)) \
5809 && defined(__ARM_FEATURE_UNALIGNED) \
5810 && XXH_SIZE_OPT <= 0
5811 # pragma GCC unroll 8
5812 #endif
5813 for (i=0; i < XXH_ACC_NB; i++) {
5814 XXH3_scalarRound(acc, input, secret, i);
5815 }
5816 }
5817 XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(scalar)
5818
5819
5820
5821
5822
5823
5824
5825
5826 XXH_FORCE_INLINE void
5827 XXH3_scalarScrambleRound(void* XXH_RESTRICT acc,
5828 void const* XXH_RESTRICT secret,
5829 size_t lane)
5830 {
5831 xxh_u64* const xacc = (xxh_u64*) acc;
5832 const xxh_u8* const xsecret = (const xxh_u8*) secret;
5833 XXH_ASSERT((((size_t)acc) & (XXH_ACC_ALIGN-1)) == 0);
5834 XXH_ASSERT(lane < XXH_ACC_NB);
5835 {
5836 xxh_u64 const key64 = XXH_readLE64(xsecret + lane * 8);
5837 xxh_u64 acc64 = xacc[lane];
5838 acc64 = XXH_xorshift64(acc64, 47);
5839 acc64 ^= key64;
5840 acc64 *= XXH_PRIME32_1;
5841 xacc[lane] = acc64;
5842 }
5843 }
5844
5845
5846
5847
5848
5849 XXH_FORCE_INLINE void
5850 XXH3_scrambleAcc_scalar(void* XXH_RESTRICT acc, const void* XXH_RESTRICT secret)
5851 {
5852 size_t i;
5853 for (i=0; i < XXH_ACC_NB; i++) {
5854 XXH3_scalarScrambleRound(acc, secret, i);
5855 }
5856 }
5857
5858 XXH_FORCE_INLINE void
5859 XXH3_initCustomSecret_scalar(void* XXH_RESTRICT customSecret, xxh_u64 seed64)
5860 {
5861
5862
5863
5864
5865
5866 const xxh_u8* kSecretPtr = XXH3_kSecret;
5867 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
5868
5869 #if defined(__GNUC__) && defined(__aarch64__)
5870
5871
5872
5873
5874
5875
5876
5877
5878
5879
5880
5881
5882
5883
5884
5885
5886
5887
5888
5889
5890
5891
5892
5893
5894
5895
5896
5897
5898
5899
5900
5901
5902 XXH_COMPILER_GUARD(kSecretPtr);
5903 #endif
5904 { int const nbRounds = XXH_SECRET_DEFAULT_SIZE / 16;
5905 int i;
5906 for (i=0; i < nbRounds; i++) {
5907
5908
5909
5910
5911
5912
5913 xxh_u64 lo = XXH_readLE64(kSecretPtr + 16*i) + seed64;
5914 xxh_u64 hi = XXH_readLE64(kSecretPtr + 16*i + 8) - seed64;
5915 XXH_writeLE64((xxh_u8*)customSecret + 16*i, lo);
5916 XXH_writeLE64((xxh_u8*)customSecret + 16*i + 8, hi);
5917 } }
5918 }
5919
5920
5921 typedef void (*XXH3_f_accumulate)(xxh_u64* XXH_RESTRICT, const xxh_u8* XXH_RESTRICT, const xxh_u8* XXH_RESTRICT, size_t);
5922 typedef void (*XXH3_f_scrambleAcc)(void* XXH_RESTRICT, const void*);
5923 typedef void (*XXH3_f_initCustomSecret)(void* XXH_RESTRICT, xxh_u64);
5924
5925
5926 #if (XXH_VECTOR == XXH_AVX512)
5927
5928 #define XXH3_accumulate_512 XXH3_accumulate_512_avx512
5929 #define XXH3_accumulate XXH3_accumulate_avx512
5930 #define XXH3_scrambleAcc XXH3_scrambleAcc_avx512
5931 #define XXH3_initCustomSecret XXH3_initCustomSecret_avx512
5932
5933 #elif (XXH_VECTOR == XXH_AVX2)
5934
5935 #define XXH3_accumulate_512 XXH3_accumulate_512_avx2
5936 #define XXH3_accumulate XXH3_accumulate_avx2
5937 #define XXH3_scrambleAcc XXH3_scrambleAcc_avx2
5938 #define XXH3_initCustomSecret XXH3_initCustomSecret_avx2
5939
5940 #elif (XXH_VECTOR == XXH_SSE2)
5941
5942 #define XXH3_accumulate_512 XXH3_accumulate_512_sse2
5943 #define XXH3_accumulate XXH3_accumulate_sse2
5944 #define XXH3_scrambleAcc XXH3_scrambleAcc_sse2
5945 #define XXH3_initCustomSecret XXH3_initCustomSecret_sse2
5946
5947 #elif (XXH_VECTOR == XXH_NEON)
5948
5949 #define XXH3_accumulate_512 XXH3_accumulate_512_neon
5950 #define XXH3_accumulate XXH3_accumulate_neon
5951 #define XXH3_scrambleAcc XXH3_scrambleAcc_neon
5952 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5953
5954 #elif (XXH_VECTOR == XXH_VSX)
5955
5956 #define XXH3_accumulate_512 XXH3_accumulate_512_vsx
5957 #define XXH3_accumulate XXH3_accumulate_vsx
5958 #define XXH3_scrambleAcc XXH3_scrambleAcc_vsx
5959 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5960
5961 #elif (XXH_VECTOR == XXH_SVE)
5962 #define XXH3_accumulate_512 XXH3_accumulate_512_sve
5963 #define XXH3_accumulate XXH3_accumulate_sve
5964 #define XXH3_scrambleAcc XXH3_scrambleAcc_scalar
5965 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5966
5967 #elif (XXH_VECTOR == XXH_LSX)
5968 #define XXH3_accumulate_512 XXH3_accumulate_512_lsx
5969 #define XXH3_accumulate XXH3_accumulate_lsx
5970 #define XXH3_scrambleAcc XXH3_scrambleAcc_lsx
5971 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5972
5973 #else
5974
5975 #define XXH3_accumulate_512 XXH3_accumulate_512_scalar
5976 #define XXH3_accumulate XXH3_accumulate_scalar
5977 #define XXH3_scrambleAcc XXH3_scrambleAcc_scalar
5978 #define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5979
5980 #endif
5981
5982 #if XXH_SIZE_OPT >= 1
5983 # undef XXH3_initCustomSecret
5984 # define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5985 #endif
5986
5987 XXH_FORCE_INLINE void
5988 XXH3_hashLong_internal_loop(xxh_u64* XXH_RESTRICT acc,
5989 const xxh_u8* XXH_RESTRICT input, size_t len,
5990 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
5991 XXH3_f_accumulate f_acc,
5992 XXH3_f_scrambleAcc f_scramble)
5993 {
5994 size_t const nbStripesPerBlock = (secretSize - XXH_STRIPE_LEN) / XXH_SECRET_CONSUME_RATE;
5995 size_t const block_len = XXH_STRIPE_LEN * nbStripesPerBlock;
5996 size_t const nb_blocks = (len - 1) / block_len;
5997
5998 size_t n;
5999
6000 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN);
6001
6002 for (n = 0; n < nb_blocks; n++) {
6003 f_acc(acc, input + n*block_len, secret, nbStripesPerBlock);
6004 f_scramble(acc, secret + secretSize - XXH_STRIPE_LEN);
6005 }
6006
6007
6008 XXH_ASSERT(len > XXH_STRIPE_LEN);
6009 { size_t const nbStripes = ((len - 1) - (block_len * nb_blocks)) / XXH_STRIPE_LEN;
6010 XXH_ASSERT(nbStripes <= (secretSize / XXH_SECRET_CONSUME_RATE));
6011 f_acc(acc, input + nb_blocks*block_len, secret, nbStripes);
6012
6013
6014 { const xxh_u8* const p = input + len - XXH_STRIPE_LEN;
6015 #define XXH_SECRET_LASTACC_START 7
6016 XXH3_accumulate_512(acc, p, secret + secretSize - XXH_STRIPE_LEN - XXH_SECRET_LASTACC_START);
6017 } }
6018 }
6019
6020 XXH_FORCE_INLINE xxh_u64
6021 XXH3_mix2Accs(const xxh_u64* XXH_RESTRICT acc, const xxh_u8* XXH_RESTRICT secret)
6022 {
6023 return XXH3_mul128_fold64(
6024 acc[0] ^ XXH_readLE64(secret),
6025 acc[1] ^ XXH_readLE64(secret+8) );
6026 }
6027
6028 static XXH_PUREF XXH64_hash_t
6029 XXH3_mergeAccs(const xxh_u64* XXH_RESTRICT acc, const xxh_u8* XXH_RESTRICT secret, xxh_u64 start)
6030 {
6031 xxh_u64 result64 = start;
6032 size_t i = 0;
6033
6034 for (i = 0; i < 4; i++) {
6035 result64 += XXH3_mix2Accs(acc+2*i, secret + 16*i);
6036 #if defined(__clang__) \
6037 && (defined(__arm__) || defined(__thumb__)) \
6038 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
6039 && !defined(XXH_ENABLE_AUTOVECTORIZE)
6040
6041
6042
6043
6044
6045
6046
6047
6048 XXH_COMPILER_GUARD(result64);
6049 #endif
6050 }
6051
6052 return XXH3_avalanche(result64);
6053 }
6054
6055
6056 #define XXH_SECRET_MERGEACCS_START 11
6057
6058 static XXH_PUREF XXH64_hash_t
6059 XXH3_finalizeLong_64b(const xxh_u64* XXH_RESTRICT acc, const xxh_u8* XXH_RESTRICT secret, xxh_u64 len)
6060 {
6061 return XXH3_mergeAccs(acc, secret + XXH_SECRET_MERGEACCS_START, len * XXH_PRIME64_1);
6062 }
6063
6064 #define XXH3_INIT_ACC { XXH_PRIME32_3, XXH_PRIME64_1, XXH_PRIME64_2, XXH_PRIME64_3, \
6065 XXH_PRIME64_4, XXH_PRIME32_2, XXH_PRIME64_5, XXH_PRIME32_1 }
6066
6067 XXH_FORCE_INLINE XXH64_hash_t
6068 XXH3_hashLong_64b_internal(const void* XXH_RESTRICT input, size_t len,
6069 const void* XXH_RESTRICT secret, size_t secretSize,
6070 XXH3_f_accumulate f_acc,
6071 XXH3_f_scrambleAcc f_scramble)
6072 {
6073 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
6074
6075 XXH3_hashLong_internal_loop(acc, (const xxh_u8*)input, len, (const xxh_u8*)secret, secretSize, f_acc, f_scramble);
6076
6077
6078 XXH_STATIC_ASSERT(sizeof(acc) == 64);
6079 XXH_ASSERT(secretSize >= sizeof(acc) + XXH_SECRET_MERGEACCS_START);
6080 return XXH3_finalizeLong_64b(acc, (const xxh_u8*)secret, (xxh_u64)len);
6081 }
6082
6083
6084
6085
6086
6087
6088
6089
6090 XXH3_WITH_SECRET_INLINE XXH64_hash_t
6091 XXH3_hashLong_64b_withSecret(const void* XXH_RESTRICT input, size_t len,
6092 XXH64_hash_t seed64, const xxh_u8* XXH_RESTRICT secret, size_t secretLen)
6093 {
6094 (void)seed64;
6095 return XXH3_hashLong_64b_internal(input, len, secret, secretLen, XXH3_accumulate, XXH3_scrambleAcc);
6096 }
6097
6098
6099
6100
6101
6102
6103
6104 XXH_NO_INLINE XXH_PUREF XXH64_hash_t
6105 XXH3_hashLong_64b_default(const void* XXH_RESTRICT input, size_t len,
6106 XXH64_hash_t seed64, const xxh_u8* XXH_RESTRICT secret, size_t secretLen)
6107 {
6108 (void)seed64; (void)secret; (void)secretLen;
6109 return XXH3_hashLong_64b_internal(input, len, XXH3_kSecret, sizeof(XXH3_kSecret), XXH3_accumulate, XXH3_scrambleAcc);
6110 }
6111
6112
6113
6114
6115
6116
6117
6118
6119
6120
6121
6122
6123 XXH_FORCE_INLINE XXH64_hash_t
6124 XXH3_hashLong_64b_withSeed_internal(const void* input, size_t len,
6125 XXH64_hash_t seed,
6126 XXH3_f_accumulate f_acc,
6127 XXH3_f_scrambleAcc f_scramble,
6128 XXH3_f_initCustomSecret f_initSec)
6129 {
6130 #if XXH_SIZE_OPT <= 0
6131 if (seed == 0)
6132 return XXH3_hashLong_64b_internal(input, len,
6133 XXH3_kSecret, sizeof(XXH3_kSecret),
6134 f_acc, f_scramble);
6135 #endif
6136 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
6137 f_initSec(secret, seed);
6138 return XXH3_hashLong_64b_internal(input, len, secret, sizeof(secret),
6139 f_acc, f_scramble);
6140 }
6141 }
6142
6143
6144
6145
6146 XXH_NO_INLINE XXH64_hash_t
6147 XXH3_hashLong_64b_withSeed(const void* XXH_RESTRICT input, size_t len,
6148 XXH64_hash_t seed, const xxh_u8* XXH_RESTRICT secret, size_t secretLen)
6149 {
6150 (void)secret; (void)secretLen;
6151 return XXH3_hashLong_64b_withSeed_internal(input, len, seed,
6152 XXH3_accumulate, XXH3_scrambleAcc, XXH3_initCustomSecret);
6153 }
6154
6155
6156 typedef XXH64_hash_t (*XXH3_hashLong64_f)(const void* XXH_RESTRICT, size_t,
6157 XXH64_hash_t, const xxh_u8* XXH_RESTRICT, size_t);
6158
6159 XXH_FORCE_INLINE XXH64_hash_t
6160 XXH3_64bits_internal(const void* XXH_RESTRICT input, size_t len,
6161 XXH64_hash_t seed64, const void* XXH_RESTRICT secret, size_t secretLen,
6162 XXH3_hashLong64_f f_hashLong)
6163 {
6164 XXH_ASSERT(secretLen >= XXH3_SECRET_SIZE_MIN);
6165
6166
6167
6168
6169
6170
6171
6172 if (len <= 16)
6173 return XXH3_len_0to16_64b((const xxh_u8*)input, len, (const xxh_u8*)secret, seed64);
6174 if (len <= 128)
6175 return XXH3_len_17to128_64b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
6176 if (len <= XXH3_MIDSIZE_MAX)
6177 return XXH3_len_129to240_64b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
6178 return f_hashLong(input, len, seed64, (const xxh_u8*)secret, secretLen);
6179 }
6180
6181
6182
6183
6184
6185 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits(XXH_NOESCAPE const void* input, size_t length)
6186 {
6187 return XXH3_64bits_internal(input, length, 0, XXH3_kSecret, sizeof(XXH3_kSecret), XXH3_hashLong_64b_default);
6188 }
6189
6190
6191 XXH_PUBLIC_API XXH64_hash_t
6192 XXH3_64bits_withSecret(XXH_NOESCAPE const void* input, size_t length, XXH_NOESCAPE const void* secret, size_t secretSize)
6193 {
6194 return XXH3_64bits_internal(input, length, 0, secret, secretSize, XXH3_hashLong_64b_withSecret);
6195 }
6196
6197
6198 XXH_PUBLIC_API XXH64_hash_t
6199 XXH3_64bits_withSeed(XXH_NOESCAPE const void* input, size_t length, XXH64_hash_t seed)
6200 {
6201 return XXH3_64bits_internal(input, length, seed, XXH3_kSecret, sizeof(XXH3_kSecret), XXH3_hashLong_64b_withSeed);
6202 }
6203
6204 XXH_PUBLIC_API XXH64_hash_t
6205 XXH3_64bits_withSecretandSeed(XXH_NOESCAPE const void* input, size_t length, XXH_NOESCAPE const void* secret, size_t secretSize, XXH64_hash_t seed)
6206 {
6207 if (length <= XXH3_MIDSIZE_MAX)
6208 return XXH3_64bits_internal(input, length, seed, XXH3_kSecret, sizeof(XXH3_kSecret), NULL);
6209 return XXH3_hashLong_64b_withSecret(input, length, seed, (const xxh_u8*)secret, secretSize);
6210 }
6211
6212
6213
6214 #ifndef XXH_NO_STREAM
6215
6216
6217
6218
6219
6220
6221
6222
6223
6224
6225
6226
6227
6228
6229
6230
6231
6232
6233
6234
6235
6236
6237
6238 static XXH_MALLOCF void* XXH_alignedMalloc(size_t s, size_t align)
6239 {
6240 XXH_ASSERT(align <= 128 && align >= 8);
6241 XXH_ASSERT((align & (align-1)) == 0);
6242 XXH_ASSERT(s != 0 && s < (s + align));
6243 {
6244 xxh_u8* base = (xxh_u8*)XXH_malloc(s + align);
6245 if (base != NULL) {
6246
6247
6248
6249
6250
6251
6252 size_t offset = align - ((size_t)base & (align - 1));
6253
6254 xxh_u8* ptr = base + offset;
6255
6256 XXH_ASSERT((size_t)ptr % align == 0);
6257
6258
6259 ptr[-1] = (xxh_u8)offset;
6260 return ptr;
6261 }
6262 return NULL;
6263 }
6264 }
6265
6266
6267
6268
6269 static void XXH_alignedFree(void* p)
6270 {
6271 if (p != NULL) {
6272 xxh_u8* ptr = (xxh_u8*)p;
6273
6274 xxh_u8 offset = ptr[-1];
6275
6276 xxh_u8* base = ptr - offset;
6277 XXH_free(base);
6278 }
6279 }
6280
6281
6282
6283
6284
6285
6286
6287
6288
6289
6290
6291 XXH_PUBLIC_API XXH3_state_t* XXH3_createState(void)
6292 {
6293 XXH3_state_t* const state = (XXH3_state_t*)XXH_alignedMalloc(sizeof(XXH3_state_t), 64);
6294 if (state==NULL) return NULL;
6295 XXH3_INITSTATE(state);
6296 return state;
6297 }
6298
6299
6300
6301
6302
6303
6304
6305
6306
6307
6308
6309
6310
6311 XXH_PUBLIC_API XXH_errorcode XXH3_freeState(XXH3_state_t* statePtr)
6312 {
6313 XXH_alignedFree(statePtr);
6314 return XXH_OK;
6315 }
6316
6317
6318 XXH_PUBLIC_API void
6319 XXH3_copyState(XXH_NOESCAPE XXH3_state_t* dst_state, XXH_NOESCAPE const XXH3_state_t* src_state)
6320 {
6321 XXH_memcpy(dst_state, src_state, sizeof(*dst_state));
6322 }
6323
6324 static void
6325 XXH3_reset_internal(XXH3_state_t* statePtr,
6326 XXH64_hash_t seed,
6327 const void* secret, size_t secretSize)
6328 {
6329 size_t const initStart = offsetof(XXH3_state_t, bufferedSize);
6330 size_t const initLength = offsetof(XXH3_state_t, nbStripesPerBlock) - initStart;
6331 XXH_ASSERT(offsetof(XXH3_state_t, nbStripesPerBlock) > initStart);
6332 XXH_ASSERT(statePtr != NULL);
6333
6334 memset((char*)statePtr + initStart, 0, initLength);
6335 statePtr->acc[0] = XXH_PRIME32_3;
6336 statePtr->acc[1] = XXH_PRIME64_1;
6337 statePtr->acc[2] = XXH_PRIME64_2;
6338 statePtr->acc[3] = XXH_PRIME64_3;
6339 statePtr->acc[4] = XXH_PRIME64_4;
6340 statePtr->acc[5] = XXH_PRIME32_2;
6341 statePtr->acc[6] = XXH_PRIME64_5;
6342 statePtr->acc[7] = XXH_PRIME32_1;
6343 statePtr->seed = seed;
6344 statePtr->useSeed = (seed != 0);
6345 statePtr->extSecret = (const unsigned char*)secret;
6346 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN);
6347 statePtr->secretLimit = secretSize - XXH_STRIPE_LEN;
6348 statePtr->nbStripesPerBlock = statePtr->secretLimit / XXH_SECRET_CONSUME_RATE;
6349 }
6350
6351
6352 XXH_PUBLIC_API XXH_errorcode
6353 XXH3_64bits_reset(XXH_NOESCAPE XXH3_state_t* statePtr)
6354 {
6355 if (statePtr == NULL) return XXH_ERROR;
6356 XXH3_reset_internal(statePtr, 0, XXH3_kSecret, XXH_SECRET_DEFAULT_SIZE);
6357 return XXH_OK;
6358 }
6359
6360
6361 XXH_PUBLIC_API XXH_errorcode
6362 XXH3_64bits_reset_withSecret(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize)
6363 {
6364 if (statePtr == NULL) return XXH_ERROR;
6365 XXH3_reset_internal(statePtr, 0, secret, secretSize);
6366 if (secret == NULL) return XXH_ERROR;
6367 if (secretSize < XXH3_SECRET_SIZE_MIN) return XXH_ERROR;
6368 return XXH_OK;
6369 }
6370
6371
6372 XXH_PUBLIC_API XXH_errorcode
6373 XXH3_64bits_reset_withSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH64_hash_t seed)
6374 {
6375 if (statePtr == NULL) return XXH_ERROR;
6376 if (seed==0) return XXH3_64bits_reset(statePtr);
6377 if ((seed != statePtr->seed) || (statePtr->extSecret != NULL))
6378 XXH3_initCustomSecret(statePtr->customSecret, seed);
6379 XXH3_reset_internal(statePtr, seed, NULL, XXH_SECRET_DEFAULT_SIZE);
6380 return XXH_OK;
6381 }
6382
6383
6384 XXH_PUBLIC_API XXH_errorcode
6385 XXH3_64bits_reset_withSecretandSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize, XXH64_hash_t seed64)
6386 {
6387 if (statePtr == NULL) return XXH_ERROR;
6388 if (secret == NULL) return XXH_ERROR;
6389 if (secretSize < XXH3_SECRET_SIZE_MIN) return XXH_ERROR;
6390 XXH3_reset_internal(statePtr, seed64, secret, secretSize);
6391 statePtr->useSeed = 1;
6392 return XXH_OK;
6393 }
6394
6395
6396
6397
6398
6399
6400
6401
6402
6403
6404
6405
6406
6407
6408
6409
6410
6411
6412 XXH_FORCE_INLINE const xxh_u8 *
6413 XXH3_consumeStripes(xxh_u64* XXH_RESTRICT acc,
6414 size_t* XXH_RESTRICT nbStripesSoFarPtr, size_t nbStripesPerBlock,
6415 const xxh_u8* XXH_RESTRICT input, size_t nbStripes,
6416 const xxh_u8* XXH_RESTRICT secret, size_t secretLimit,
6417 XXH3_f_accumulate f_acc,
6418 XXH3_f_scrambleAcc f_scramble)
6419 {
6420 const xxh_u8* initialSecret = secret + *nbStripesSoFarPtr * XXH_SECRET_CONSUME_RATE;
6421
6422 if (nbStripes >= (nbStripesPerBlock - *nbStripesSoFarPtr)) {
6423
6424 size_t nbStripesThisIter = nbStripesPerBlock - *nbStripesSoFarPtr;
6425
6426 do {
6427
6428 f_acc(acc, input, initialSecret, nbStripesThisIter);
6429 f_scramble(acc, secret + secretLimit);
6430 input += nbStripesThisIter * XXH_STRIPE_LEN;
6431 nbStripes -= nbStripesThisIter;
6432
6433 nbStripesThisIter = nbStripesPerBlock;
6434 initialSecret = secret;
6435 } while (nbStripes >= nbStripesPerBlock);
6436 *nbStripesSoFarPtr = 0;
6437 }
6438
6439 if (nbStripes > 0) {
6440 f_acc(acc, input, initialSecret, nbStripes);
6441 input += nbStripes * XXH_STRIPE_LEN;
6442 *nbStripesSoFarPtr += nbStripes;
6443 }
6444
6445 return input;
6446 }
6447
6448 #ifndef XXH3_STREAM_USE_STACK
6449 # if XXH_SIZE_OPT <= 0 && !defined(__clang__)
6450 # define XXH3_STREAM_USE_STACK 1
6451 # endif
6452 #endif
6453
6454
6455
6456 XXH_FORCE_INLINE XXH_errorcode
6457 XXH3_update(XXH3_state_t* XXH_RESTRICT const state,
6458 const xxh_u8* XXH_RESTRICT input, size_t len,
6459 XXH3_f_accumulate f_acc,
6460 XXH3_f_scrambleAcc f_scramble)
6461 {
6462 if (input==NULL) {
6463 XXH_ASSERT(len == 0);
6464 return XXH_OK;
6465 }
6466
6467 XXH_ASSERT(state != NULL);
6468 { const xxh_u8* const bEnd = input + len;
6469 const unsigned char* const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
6470 #if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
6471
6472
6473
6474
6475 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[8];
6476 XXH_memcpy(acc, state->acc, sizeof(acc));
6477 #else
6478 xxh_u64* XXH_RESTRICT const acc = state->acc;
6479 #endif
6480 state->totalLen += len;
6481 XXH_ASSERT(state->bufferedSize <= XXH3_INTERNALBUFFER_SIZE);
6482
6483
6484 if (len <= XXH3_INTERNALBUFFER_SIZE - state->bufferedSize) {
6485 XXH_memcpy(state->buffer + state->bufferedSize, input, len);
6486 state->bufferedSize += (XXH32_hash_t)len;
6487 return XXH_OK;
6488 }
6489
6490
6491 #define XXH3_INTERNALBUFFER_STRIPES (XXH3_INTERNALBUFFER_SIZE / XXH_STRIPE_LEN)
6492 XXH_STATIC_ASSERT(XXH3_INTERNALBUFFER_SIZE % XXH_STRIPE_LEN == 0);
6493
6494
6495
6496
6497
6498 if (state->bufferedSize) {
6499 size_t const loadSize = XXH3_INTERNALBUFFER_SIZE - state->bufferedSize;
6500 XXH_memcpy(state->buffer + state->bufferedSize, input, loadSize);
6501 input += loadSize;
6502 XXH3_consumeStripes(acc,
6503 &state->nbStripesSoFar, state->nbStripesPerBlock,
6504 state->buffer, XXH3_INTERNALBUFFER_STRIPES,
6505 secret, state->secretLimit,
6506 f_acc, f_scramble);
6507 state->bufferedSize = 0;
6508 }
6509 XXH_ASSERT(input < bEnd);
6510 if (bEnd - input > XXH3_INTERNALBUFFER_SIZE) {
6511 size_t nbStripes = (size_t)(bEnd - 1 - input) / XXH_STRIPE_LEN;
6512 input = XXH3_consumeStripes(acc,
6513 &state->nbStripesSoFar, state->nbStripesPerBlock,
6514 input, nbStripes,
6515 secret, state->secretLimit,
6516 f_acc, f_scramble);
6517 XXH_memcpy(state->buffer + sizeof(state->buffer) - XXH_STRIPE_LEN, input - XXH_STRIPE_LEN, XXH_STRIPE_LEN);
6518
6519 }
6520
6521 XXH_ASSERT(input < bEnd);
6522 XXH_ASSERT(bEnd - input <= XXH3_INTERNALBUFFER_SIZE);
6523 XXH_ASSERT(state->bufferedSize == 0);
6524 XXH_memcpy(state->buffer, input, (size_t)(bEnd-input));
6525 state->bufferedSize = (XXH32_hash_t)(bEnd-input);
6526 #if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
6527
6528 XXH_memcpy(state->acc, acc, sizeof(acc));
6529 #endif
6530 }
6531
6532 return XXH_OK;
6533 }
6534
6535
6536 XXH_PUBLIC_API XXH_errorcode
6537 XXH3_64bits_update(XXH_NOESCAPE XXH3_state_t* state, XXH_NOESCAPE const void* input, size_t len)
6538 {
6539 return XXH3_update(state, (const xxh_u8*)input, len,
6540 XXH3_accumulate, XXH3_scrambleAcc);
6541 }
6542
6543
6544 XXH_FORCE_INLINE void
6545 XXH3_digest_long (XXH64_hash_t* acc,
6546 const XXH3_state_t* state,
6547 const unsigned char* secret)
6548 {
6549 xxh_u8 lastStripe[XXH_STRIPE_LEN];
6550 const xxh_u8* lastStripePtr;
6551
6552
6553
6554
6555
6556 XXH_memcpy(acc, state->acc, sizeof(state->acc));
6557 if (state->bufferedSize >= XXH_STRIPE_LEN) {
6558
6559 size_t const nbStripes = (state->bufferedSize - 1) / XXH_STRIPE_LEN;
6560 size_t nbStripesSoFar = state->nbStripesSoFar;
6561 XXH3_consumeStripes(acc,
6562 &nbStripesSoFar, state->nbStripesPerBlock,
6563 state->buffer, nbStripes,
6564 secret, state->secretLimit,
6565 XXH3_accumulate, XXH3_scrambleAcc);
6566 lastStripePtr = state->buffer + state->bufferedSize - XXH_STRIPE_LEN;
6567 } else {
6568
6569 size_t const catchupSize = XXH_STRIPE_LEN - state->bufferedSize;
6570 XXH_ASSERT(state->bufferedSize > 0);
6571 XXH_memcpy(lastStripe, state->buffer + sizeof(state->buffer) - catchupSize, catchupSize);
6572 XXH_memcpy(lastStripe + catchupSize, state->buffer, state->bufferedSize);
6573 lastStripePtr = lastStripe;
6574 }
6575
6576 XXH3_accumulate_512(acc,
6577 lastStripePtr,
6578 secret + state->secretLimit - XXH_SECRET_LASTACC_START);
6579 }
6580
6581
6582 XXH_PUBLIC_API XXH64_hash_t XXH3_64bits_digest (XXH_NOESCAPE const XXH3_state_t* state)
6583 {
6584 const unsigned char* const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
6585 if (state->totalLen > XXH3_MIDSIZE_MAX) {
6586 XXH_ALIGN(XXH_ACC_ALIGN) XXH64_hash_t acc[XXH_ACC_NB];
6587 XXH3_digest_long(acc, state, secret);
6588 return XXH3_finalizeLong_64b(acc, secret, (xxh_u64)state->totalLen);
6589 }
6590
6591 if (state->useSeed)
6592 return XXH3_64bits_withSeed(state->buffer, (size_t)state->totalLen, state->seed);
6593 return XXH3_64bits_withSecret(state->buffer, (size_t)(state->totalLen),
6594 secret, state->secretLimit + XXH_STRIPE_LEN);
6595 }
6596 #endif
6597
6598
6599
6600
6601
6602
6603
6604
6605
6606
6607
6608
6609
6610
6611
6612
6613
6614
6615
6616 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6617 XXH3_len_1to3_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
6618 {
6619
6620 XXH_ASSERT(input != NULL);
6621 XXH_ASSERT(1 <= len && len <= 3);
6622 XXH_ASSERT(secret != NULL);
6623
6624
6625
6626
6627
6628 { xxh_u8 const c1 = input[0];
6629 xxh_u8 const c2 = input[len >> 1];
6630 xxh_u8 const c3 = input[len - 1];
6631 xxh_u32 const combinedl = ((xxh_u32)c1 <<16) | ((xxh_u32)c2 << 24)
6632 | ((xxh_u32)c3 << 0) | ((xxh_u32)len << 8);
6633 xxh_u32 const combinedh = XXH_rotl32(XXH_swap32(combinedl), 13);
6634 xxh_u64 const bitflipl = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
6635 xxh_u64 const bitfliph = (XXH_readLE32(secret+8) ^ XXH_readLE32(secret+12)) - seed;
6636 xxh_u64 const keyed_lo = (xxh_u64)combinedl ^ bitflipl;
6637 xxh_u64 const keyed_hi = (xxh_u64)combinedh ^ bitfliph;
6638 XXH128_hash_t h128;
6639 h128.low64 = XXH64_avalanche(keyed_lo);
6640 h128.high64 = XXH64_avalanche(keyed_hi);
6641 return h128;
6642 }
6643 }
6644
6645 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6646 XXH3_len_4to8_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
6647 {
6648 XXH_ASSERT(input != NULL);
6649 XXH_ASSERT(secret != NULL);
6650 XXH_ASSERT(4 <= len && len <= 8);
6651 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
6652 { xxh_u32 const input_lo = XXH_readLE32(input);
6653 xxh_u32 const input_hi = XXH_readLE32(input + len - 4);
6654 xxh_u64 const input_64 = input_lo + ((xxh_u64)input_hi << 32);
6655 xxh_u64 const bitflip = (XXH_readLE64(secret+16) ^ XXH_readLE64(secret+24)) + seed;
6656 xxh_u64 const keyed = input_64 ^ bitflip;
6657
6658
6659 XXH128_hash_t m128 = XXH_mult64to128(keyed, XXH_PRIME64_1 + (len << 2));
6660
6661 m128.high64 += (m128.low64 << 1);
6662 m128.low64 ^= (m128.high64 >> 3);
6663
6664 m128.low64 = XXH_xorshift64(m128.low64, 35);
6665 m128.low64 *= PRIME_MX2;
6666 m128.low64 = XXH_xorshift64(m128.low64, 28);
6667 m128.high64 = XXH3_avalanche(m128.high64);
6668 return m128;
6669 }
6670 }
6671
6672 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6673 XXH3_len_9to16_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
6674 {
6675 XXH_ASSERT(input != NULL);
6676 XXH_ASSERT(secret != NULL);
6677 XXH_ASSERT(9 <= len && len <= 16);
6678 { xxh_u64 const bitflipl = (XXH_readLE64(secret+32) ^ XXH_readLE64(secret+40)) - seed;
6679 xxh_u64 const bitfliph = (XXH_readLE64(secret+48) ^ XXH_readLE64(secret+56)) + seed;
6680 xxh_u64 const input_lo = XXH_readLE64(input);
6681 xxh_u64 input_hi = XXH_readLE64(input + len - 8);
6682 XXH128_hash_t m128 = XXH_mult64to128(input_lo ^ input_hi ^ bitflipl, XXH_PRIME64_1);
6683
6684
6685
6686
6687 m128.low64 += (xxh_u64)(len - 1) << 54;
6688 input_hi ^= bitfliph;
6689
6690
6691
6692
6693
6694
6695
6696 if (sizeof(void *) < sizeof(xxh_u64)) {
6697
6698
6699
6700
6701
6702
6703 m128.high64 += (input_hi & 0xFFFFFFFF00000000ULL) + XXH_mult32to64((xxh_u32)input_hi, XXH_PRIME32_2);
6704 } else {
6705
6706
6707
6708
6709
6710
6711
6712
6713
6714
6715
6716
6717
6718
6719
6720
6721
6722
6723
6724
6725
6726
6727
6728
6729 m128.high64 += input_hi + XXH_mult32to64((xxh_u32)input_hi, XXH_PRIME32_2 - 1);
6730 }
6731
6732 m128.low64 ^= XXH_swap64(m128.high64);
6733
6734 {
6735 XXH128_hash_t h128 = XXH_mult64to128(m128.low64, XXH_PRIME64_2);
6736 h128.high64 += m128.high64 * XXH_PRIME64_2;
6737
6738 h128.low64 = XXH3_avalanche(h128.low64);
6739 h128.high64 = XXH3_avalanche(h128.high64);
6740 return h128;
6741 } }
6742 }
6743
6744
6745
6746
6747 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6748 XXH3_len_0to16_128b(const xxh_u8* input, size_t len, const xxh_u8* secret, XXH64_hash_t seed)
6749 {
6750 XXH_ASSERT(len <= 16);
6751 { if (len > 8) return XXH3_len_9to16_128b(input, len, secret, seed);
6752 if (len >= 4) return XXH3_len_4to8_128b(input, len, secret, seed);
6753 if (len) return XXH3_len_1to3_128b(input, len, secret, seed);
6754 { XXH128_hash_t h128;
6755 xxh_u64 const bitflipl = XXH_readLE64(secret+64) ^ XXH_readLE64(secret+72);
6756 xxh_u64 const bitfliph = XXH_readLE64(secret+80) ^ XXH_readLE64(secret+88);
6757 h128.low64 = XXH64_avalanche(seed ^ bitflipl);
6758 h128.high64 = XXH64_avalanche( seed ^ bitfliph);
6759 return h128;
6760 } }
6761 }
6762
6763
6764
6765
6766 XXH_FORCE_INLINE XXH128_hash_t
6767 XXH128_mix32B(XXH128_hash_t acc, const xxh_u8* input_1, const xxh_u8* input_2,
6768 const xxh_u8* secret, XXH64_hash_t seed)
6769 {
6770 acc.low64 += XXH3_mix16B (input_1, secret+0, seed);
6771 acc.low64 ^= XXH_readLE64(input_2) + XXH_readLE64(input_2 + 8);
6772 acc.high64 += XXH3_mix16B (input_2, secret+16, seed);
6773 acc.high64 ^= XXH_readLE64(input_1) + XXH_readLE64(input_1 + 8);
6774 return acc;
6775 }
6776
6777
6778 XXH_FORCE_INLINE XXH_PUREF XXH128_hash_t
6779 XXH3_len_17to128_128b(const xxh_u8* XXH_RESTRICT input, size_t len,
6780 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
6781 XXH64_hash_t seed)
6782 {
6783 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
6784 XXH_ASSERT(16 < len && len <= 128);
6785
6786 { XXH128_hash_t acc;
6787 acc.low64 = len * XXH_PRIME64_1;
6788 acc.high64 = 0;
6789
6790 #if XXH_SIZE_OPT >= 1
6791 {
6792
6793 unsigned int i = (unsigned int)(len - 1) / 32;
6794 do {
6795 acc = XXH128_mix32B(acc, input+16*i, input+len-16*(i+1), secret+32*i, seed);
6796 } while (i-- != 0);
6797 }
6798 #else
6799 if (len > 32) {
6800 if (len > 64) {
6801 if (len > 96) {
6802 acc = XXH128_mix32B(acc, input+48, input+len-64, secret+96, seed);
6803 }
6804 acc = XXH128_mix32B(acc, input+32, input+len-48, secret+64, seed);
6805 }
6806 acc = XXH128_mix32B(acc, input+16, input+len-32, secret+32, seed);
6807 }
6808 acc = XXH128_mix32B(acc, input, input+len-16, secret, seed);
6809 #endif
6810 { XXH128_hash_t h128;
6811 h128.low64 = acc.low64 + acc.high64;
6812 h128.high64 = (acc.low64 * XXH_PRIME64_1)
6813 + (acc.high64 * XXH_PRIME64_4)
6814 + ((len - seed) * XXH_PRIME64_2);
6815 h128.low64 = XXH3_avalanche(h128.low64);
6816 h128.high64 = (XXH64_hash_t)0 - XXH3_avalanche(h128.high64);
6817 return h128;
6818 }
6819 }
6820 }
6821
6822 XXH_NO_INLINE XXH_PUREF XXH128_hash_t
6823 XXH3_len_129to240_128b(const xxh_u8* XXH_RESTRICT input, size_t len,
6824 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
6825 XXH64_hash_t seed)
6826 {
6827 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN); (void)secretSize;
6828 XXH_ASSERT(128 < len && len <= XXH3_MIDSIZE_MAX);
6829
6830 { XXH128_hash_t acc;
6831 unsigned i;
6832 acc.low64 = len * XXH_PRIME64_1;
6833 acc.high64 = 0;
6834
6835
6836
6837
6838
6839
6840 for (i = 32; i < 160; i += 32) {
6841 acc = XXH128_mix32B(acc,
6842 input + i - 32,
6843 input + i - 16,
6844 secret + i - 32,
6845 seed);
6846 }
6847 acc.low64 = XXH3_avalanche(acc.low64);
6848 acc.high64 = XXH3_avalanche(acc.high64);
6849
6850
6851
6852
6853
6854 for (i=160; i <= len; i += 32) {
6855 acc = XXH128_mix32B(acc,
6856 input + i - 32,
6857 input + i - 16,
6858 secret + XXH3_MIDSIZE_STARTOFFSET + i - 160,
6859 seed);
6860 }
6861
6862 acc = XXH128_mix32B(acc,
6863 input + len - 16,
6864 input + len - 32,
6865 secret + XXH3_SECRET_SIZE_MIN - XXH3_MIDSIZE_LASTOFFSET - 16,
6866 (XXH64_hash_t)0 - seed);
6867
6868 { XXH128_hash_t h128;
6869 h128.low64 = acc.low64 + acc.high64;
6870 h128.high64 = (acc.low64 * XXH_PRIME64_1)
6871 + (acc.high64 * XXH_PRIME64_4)
6872 + ((len - seed) * XXH_PRIME64_2);
6873 h128.low64 = XXH3_avalanche(h128.low64);
6874 h128.high64 = (XXH64_hash_t)0 - XXH3_avalanche(h128.high64);
6875 return h128;
6876 }
6877 }
6878 }
6879
6880 static XXH_PUREF XXH128_hash_t
6881 XXH3_finalizeLong_128b(const xxh_u64* XXH_RESTRICT acc, const xxh_u8* XXH_RESTRICT secret, size_t secretSize, xxh_u64 len)
6882 {
6883 XXH128_hash_t h128;
6884 h128.low64 = XXH3_finalizeLong_64b(acc, secret, len);
6885 h128.high64 = XXH3_mergeAccs(acc, secret + secretSize
6886 - XXH_STRIPE_LEN - XXH_SECRET_MERGEACCS_START,
6887 ~(len * XXH_PRIME64_2));
6888 return h128;
6889 }
6890
6891 XXH_FORCE_INLINE XXH128_hash_t
6892 XXH3_hashLong_128b_internal(const void* XXH_RESTRICT input, size_t len,
6893 const xxh_u8* XXH_RESTRICT secret, size_t secretSize,
6894 XXH3_f_accumulate f_acc,
6895 XXH3_f_scrambleAcc f_scramble)
6896 {
6897 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
6898
6899 XXH3_hashLong_internal_loop(acc, (const xxh_u8*)input, len, secret, secretSize, f_acc, f_scramble);
6900
6901
6902 XXH_STATIC_ASSERT(sizeof(acc) == 64);
6903 XXH_ASSERT(secretSize >= sizeof(acc) + XXH_SECRET_MERGEACCS_START);
6904 return XXH3_finalizeLong_128b(acc, secret, secretSize, (xxh_u64)len);
6905 }
6906
6907
6908
6909
6910 XXH_NO_INLINE XXH_PUREF XXH128_hash_t
6911 XXH3_hashLong_128b_default(const void* XXH_RESTRICT input, size_t len,
6912 XXH64_hash_t seed64,
6913 const void* XXH_RESTRICT secret, size_t secretLen)
6914 {
6915 (void)seed64; (void)secret; (void)secretLen;
6916 return XXH3_hashLong_128b_internal(input, len, XXH3_kSecret, sizeof(XXH3_kSecret),
6917 XXH3_accumulate, XXH3_scrambleAcc);
6918 }
6919
6920
6921
6922
6923
6924
6925
6926
6927 XXH3_WITH_SECRET_INLINE XXH128_hash_t
6928 XXH3_hashLong_128b_withSecret(const void* XXH_RESTRICT input, size_t len,
6929 XXH64_hash_t seed64,
6930 const void* XXH_RESTRICT secret, size_t secretLen)
6931 {
6932 (void)seed64;
6933 return XXH3_hashLong_128b_internal(input, len, (const xxh_u8*)secret, secretLen,
6934 XXH3_accumulate, XXH3_scrambleAcc);
6935 }
6936
6937 XXH_FORCE_INLINE XXH128_hash_t
6938 XXH3_hashLong_128b_withSeed_internal(const void* XXH_RESTRICT input, size_t len,
6939 XXH64_hash_t seed64,
6940 XXH3_f_accumulate f_acc,
6941 XXH3_f_scrambleAcc f_scramble,
6942 XXH3_f_initCustomSecret f_initSec)
6943 {
6944 if (seed64 == 0)
6945 return XXH3_hashLong_128b_internal(input, len,
6946 XXH3_kSecret, sizeof(XXH3_kSecret),
6947 f_acc, f_scramble);
6948 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
6949 f_initSec(secret, seed64);
6950 return XXH3_hashLong_128b_internal(input, len, (const xxh_u8*)secret, sizeof(secret),
6951 f_acc, f_scramble);
6952 }
6953 }
6954
6955
6956
6957
6958 XXH_NO_INLINE XXH128_hash_t
6959 XXH3_hashLong_128b_withSeed(const void* input, size_t len,
6960 XXH64_hash_t seed64, const void* XXH_RESTRICT secret, size_t secretLen)
6961 {
6962 (void)secret; (void)secretLen;
6963 return XXH3_hashLong_128b_withSeed_internal(input, len, seed64,
6964 XXH3_accumulate, XXH3_scrambleAcc, XXH3_initCustomSecret);
6965 }
6966
6967 typedef XXH128_hash_t (*XXH3_hashLong128_f)(const void* XXH_RESTRICT, size_t,
6968 XXH64_hash_t, const void* XXH_RESTRICT, size_t);
6969
6970 XXH_FORCE_INLINE XXH128_hash_t
6971 XXH3_128bits_internal(const void* input, size_t len,
6972 XXH64_hash_t seed64, const void* XXH_RESTRICT secret, size_t secretLen,
6973 XXH3_hashLong128_f f_hl128)
6974 {
6975 XXH_ASSERT(secretLen >= XXH3_SECRET_SIZE_MIN);
6976
6977
6978
6979
6980
6981
6982 if (len <= 16)
6983 return XXH3_len_0to16_128b((const xxh_u8*)input, len, (const xxh_u8*)secret, seed64);
6984 if (len <= 128)
6985 return XXH3_len_17to128_128b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
6986 if (len <= XXH3_MIDSIZE_MAX)
6987 return XXH3_len_129to240_128b((const xxh_u8*)input, len, (const xxh_u8*)secret, secretLen, seed64);
6988 return f_hl128(input, len, seed64, secret, secretLen);
6989 }
6990
6991
6992
6993
6994
6995 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits(XXH_NOESCAPE const void* input, size_t len)
6996 {
6997 return XXH3_128bits_internal(input, len, 0,
6998 XXH3_kSecret, sizeof(XXH3_kSecret),
6999 XXH3_hashLong_128b_default);
7000 }
7001
7002
7003 XXH_PUBLIC_API XXH128_hash_t
7004 XXH3_128bits_withSecret(XXH_NOESCAPE const void* input, size_t len, XXH_NOESCAPE const void* secret, size_t secretSize)
7005 {
7006 return XXH3_128bits_internal(input, len, 0,
7007 (const xxh_u8*)secret, secretSize,
7008 XXH3_hashLong_128b_withSecret);
7009 }
7010
7011
7012 XXH_PUBLIC_API XXH128_hash_t
7013 XXH3_128bits_withSeed(XXH_NOESCAPE const void* input, size_t len, XXH64_hash_t seed)
7014 {
7015 return XXH3_128bits_internal(input, len, seed,
7016 XXH3_kSecret, sizeof(XXH3_kSecret),
7017 XXH3_hashLong_128b_withSeed);
7018 }
7019
7020
7021 XXH_PUBLIC_API XXH128_hash_t
7022 XXH3_128bits_withSecretandSeed(XXH_NOESCAPE const void* input, size_t len, XXH_NOESCAPE const void* secret, size_t secretSize, XXH64_hash_t seed)
7023 {
7024 if (len <= XXH3_MIDSIZE_MAX)
7025 return XXH3_128bits_internal(input, len, seed, XXH3_kSecret, sizeof(XXH3_kSecret), NULL);
7026 return XXH3_hashLong_128b_withSecret(input, len, seed, secret, secretSize);
7027 }
7028
7029
7030 XXH_PUBLIC_API XXH128_hash_t
7031 XXH128(XXH_NOESCAPE const void* input, size_t len, XXH64_hash_t seed)
7032 {
7033 return XXH3_128bits_withSeed(input, len, seed);
7034 }
7035
7036
7037
7038 #ifndef XXH_NO_STREAM
7039
7040
7041
7042
7043
7044
7045 XXH_PUBLIC_API XXH_errorcode
7046 XXH3_128bits_reset(XXH_NOESCAPE XXH3_state_t* statePtr)
7047 {
7048 return XXH3_64bits_reset(statePtr);
7049 }
7050
7051
7052 XXH_PUBLIC_API XXH_errorcode
7053 XXH3_128bits_reset_withSecret(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize)
7054 {
7055 return XXH3_64bits_reset_withSecret(statePtr, secret, secretSize);
7056 }
7057
7058
7059 XXH_PUBLIC_API XXH_errorcode
7060 XXH3_128bits_reset_withSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH64_hash_t seed)
7061 {
7062 return XXH3_64bits_reset_withSeed(statePtr, seed);
7063 }
7064
7065
7066 XXH_PUBLIC_API XXH_errorcode
7067 XXH3_128bits_reset_withSecretandSeed(XXH_NOESCAPE XXH3_state_t* statePtr, XXH_NOESCAPE const void* secret, size_t secretSize, XXH64_hash_t seed)
7068 {
7069 return XXH3_64bits_reset_withSecretandSeed(statePtr, secret, secretSize, seed);
7070 }
7071
7072
7073 XXH_PUBLIC_API XXH_errorcode
7074 XXH3_128bits_update(XXH_NOESCAPE XXH3_state_t* state, XXH_NOESCAPE const void* input, size_t len)
7075 {
7076 return XXH3_64bits_update(state, input, len);
7077 }
7078
7079
7080 XXH_PUBLIC_API XXH128_hash_t XXH3_128bits_digest (XXH_NOESCAPE const XXH3_state_t* state)
7081 {
7082 const unsigned char* const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
7083 if (state->totalLen > XXH3_MIDSIZE_MAX) {
7084 XXH_ALIGN(XXH_ACC_ALIGN) XXH64_hash_t acc[XXH_ACC_NB];
7085 XXH3_digest_long(acc, state, secret);
7086 XXH_ASSERT(state->secretLimit + XXH_STRIPE_LEN >= sizeof(acc) + XXH_SECRET_MERGEACCS_START);
7087 return XXH3_finalizeLong_128b(acc, secret, state->secretLimit + XXH_STRIPE_LEN, (xxh_u64)state->totalLen);
7088 }
7089
7090 if (state->useSeed)
7091 return XXH3_128bits_withSeed(state->buffer, (size_t)state->totalLen, state->seed);
7092 return XXH3_128bits_withSecret(state->buffer, (size_t)(state->totalLen),
7093 secret, state->secretLimit + XXH_STRIPE_LEN);
7094 }
7095 #endif
7096
7097
7098 #include <string.h> /* memcmp, memcpy */
7099
7100
7101
7102 XXH_PUBLIC_API int XXH128_isEqual(XXH128_hash_t h1, XXH128_hash_t h2)
7103 {
7104
7105 return !(memcmp(&h1, &h2, sizeof(h1)));
7106 }
7107
7108
7109
7110
7111
7112
7113 XXH_PUBLIC_API int XXH128_cmp(XXH_NOESCAPE const void* h128_1, XXH_NOESCAPE const void* h128_2)
7114 {
7115 XXH128_hash_t const h1 = *(const XXH128_hash_t*)h128_1;
7116 XXH128_hash_t const h2 = *(const XXH128_hash_t*)h128_2;
7117 int const hcmp = (h1.high64 > h2.high64) - (h2.high64 > h1.high64);
7118
7119 if (hcmp) return hcmp;
7120 return (h1.low64 > h2.low64) - (h2.low64 > h1.low64);
7121 }
7122
7123
7124
7125
7126 XXH_PUBLIC_API void
7127 XXH128_canonicalFromHash(XXH_NOESCAPE XXH128_canonical_t* dst, XXH128_hash_t hash)
7128 {
7129 XXH_STATIC_ASSERT(sizeof(XXH128_canonical_t) == sizeof(XXH128_hash_t));
7130 if (XXH_CPU_LITTLE_ENDIAN) {
7131 hash.high64 = XXH_swap64(hash.high64);
7132 hash.low64 = XXH_swap64(hash.low64);
7133 }
7134 XXH_memcpy(dst, &hash.high64, sizeof(hash.high64));
7135 XXH_memcpy((char*)dst + sizeof(hash.high64), &hash.low64, sizeof(hash.low64));
7136 }
7137
7138
7139 XXH_PUBLIC_API XXH128_hash_t
7140 XXH128_hashFromCanonical(XXH_NOESCAPE const XXH128_canonical_t* src)
7141 {
7142 XXH128_hash_t h;
7143 h.high64 = XXH_readBE64(src);
7144 h.low64 = XXH_readBE64(src->digest + 8);
7145 return h;
7146 }
7147
7148
7149
7150
7151
7152
7153
7154 #define XXH_MIN(x, y) (((x) > (y)) ? (y) : (x))
7155
7156 XXH_FORCE_INLINE void XXH3_combine16(void* dst, XXH128_hash_t h128)
7157 {
7158 XXH_writeLE64( dst, XXH_readLE64(dst) ^ h128.low64 );
7159 XXH_writeLE64( (char*)dst+8, XXH_readLE64((char*)dst+8) ^ h128.high64 );
7160 }
7161
7162
7163 XXH_PUBLIC_API XXH_errorcode
7164 XXH3_generateSecret(XXH_NOESCAPE void* secretBuffer, size_t secretSize, XXH_NOESCAPE const void* customSeed, size_t customSeedSize)
7165 {
7166 #if (XXH_DEBUGLEVEL >= 1)
7167 XXH_ASSERT(secretBuffer != NULL);
7168 XXH_ASSERT(secretSize >= XXH3_SECRET_SIZE_MIN);
7169 #else
7170
7171 if (secretBuffer == NULL) return XXH_ERROR;
7172 if (secretSize < XXH3_SECRET_SIZE_MIN) return XXH_ERROR;
7173 #endif
7174
7175 if (customSeedSize == 0) {
7176 customSeed = XXH3_kSecret;
7177 customSeedSize = XXH_SECRET_DEFAULT_SIZE;
7178 }
7179 #if (XXH_DEBUGLEVEL >= 1)
7180 XXH_ASSERT(customSeed != NULL);
7181 #else
7182 if (customSeed == NULL) return XXH_ERROR;
7183 #endif
7184
7185
7186 { size_t pos = 0;
7187 while (pos < secretSize) {
7188 size_t const toCopy = XXH_MIN((secretSize - pos), customSeedSize);
7189 memcpy((char*)secretBuffer + pos, customSeed, toCopy);
7190 pos += toCopy;
7191 } }
7192
7193 { size_t const nbSeg16 = secretSize / 16;
7194 size_t n;
7195 XXH128_canonical_t scrambler;
7196 XXH128_canonicalFromHash(&scrambler, XXH128(customSeed, customSeedSize, 0));
7197 for (n=0; n<nbSeg16; n++) {
7198 XXH128_hash_t const h128 = XXH128(&scrambler, sizeof(scrambler), n);
7199 XXH3_combine16((char*)secretBuffer + n*16, h128);
7200 }
7201
7202 XXH3_combine16((char*)secretBuffer + secretSize - 16, XXH128_hashFromCanonical(&scrambler));
7203 }
7204 return XXH_OK;
7205 }
7206
7207
7208 XXH_PUBLIC_API void
7209 XXH3_generateSecret_fromSeed(XXH_NOESCAPE void* secretBuffer, XXH64_hash_t seed)
7210 {
7211 XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
7212 XXH3_initCustomSecret(secret, seed);
7213 XXH_ASSERT(secretBuffer != NULL);
7214 memcpy(secretBuffer, secret, XXH_SECRET_DEFAULT_SIZE);
7215 }
7216
7217
7218
7219
7220 #if XXH_VECTOR == XXH_AVX2 \
7221 && defined(__GNUC__) && !defined(__clang__) \
7222 && defined(__OPTIMIZE__) && XXH_SIZE_OPT <= 0
7223 # pragma GCC pop_options
7224 #endif
7225
7226 #endif
7227
7228 #endif
7229
7230
7231
7232
7233 #endif
7234
7235
7236 #if defined (__cplusplus)
7237 }
7238 #endif