File indexing completed on 2025-08-28 09:11:39
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012 #ifndef XSIMD_BATCH_HPP
0013 #define XSIMD_BATCH_HPP
0014
0015 #include <cassert>
0016 #include <complex>
0017
0018 #include "../config/xsimd_arch.hpp"
0019 #include "../memory/xsimd_alignment.hpp"
0020 #include "./xsimd_utils.hpp"
0021
0022 namespace xsimd
0023 {
0024 template <class T, class A = default_arch>
0025 class batch;
0026
0027 namespace types
0028 {
0029 template <class T, class A>
0030 struct integral_only_operators
0031 {
0032 XSIMD_INLINE batch<T, A>& operator%=(batch<T, A> const& other) noexcept;
0033 XSIMD_INLINE batch<T, A>& operator>>=(int32_t other) noexcept;
0034 XSIMD_INLINE batch<T, A>& operator>>=(batch<T, A> const& other) noexcept;
0035 XSIMD_INLINE batch<T, A>& operator<<=(int32_t other) noexcept;
0036 XSIMD_INLINE batch<T, A>& operator<<=(batch<T, A> const& other) noexcept;
0037
0038
0039 friend XSIMD_INLINE batch<T, A> operator%(batch<T, A> const& self, batch<T, A> const& other) noexcept
0040 {
0041 return batch<T, A>(self) %= other;
0042 }
0043
0044
0045 friend XSIMD_INLINE batch<T, A> operator>>(batch<T, A> const& self, batch<T, A> const& other) noexcept
0046 {
0047 return batch<T, A>(self) >>= other;
0048 }
0049
0050
0051 friend XSIMD_INLINE batch<T, A> operator<<(batch<T, A> const& self, batch<T, A> const& other) noexcept
0052 {
0053 return batch<T, A>(self) <<= other;
0054 }
0055
0056
0057 friend XSIMD_INLINE batch<T, A> operator>>(batch<T, A> const& self, int32_t other) noexcept
0058 {
0059 return batch<T, A>(self) >>= other;
0060 }
0061
0062
0063 friend XSIMD_INLINE batch<T, A> operator<<(batch<T, A> const& self, int32_t other) noexcept
0064 {
0065 return batch<T, A>(self) <<= other;
0066 }
0067 };
0068 template <class A>
0069 struct integral_only_operators<float, A>
0070 {
0071 };
0072 template <class A>
0073 struct integral_only_operators<double, A>
0074 {
0075 };
0076
0077 }
0078
0079 namespace details
0080 {
0081
0082
0083
0084 template <class T, class A>
0085 XSIMD_INLINE batch_bool<T, A> eq(batch<T, A> const& self, batch<T, A> const& other) noexcept;
0086
0087 template <class T, class A>
0088 XSIMD_INLINE batch_bool<T, A> neq(batch<T, A> const& self, batch<T, A> const& other) noexcept;
0089
0090 template <class T, class A>
0091 XSIMD_INLINE batch_bool<T, A> ge(batch<T, A> const& self, batch<T, A> const& other) noexcept;
0092
0093 template <class T, class A>
0094 XSIMD_INLINE batch_bool<T, A> le(batch<T, A> const& self, batch<T, A> const& other) noexcept;
0095
0096 template <class T, class A>
0097 XSIMD_INLINE batch_bool<T, A> gt(batch<T, A> const& self, batch<T, A> const& other) noexcept;
0098
0099 template <class T, class A>
0100 XSIMD_INLINE batch_bool<T, A> lt(batch<T, A> const& self, batch<T, A> const& other) noexcept;
0101 }
0102
0103
0104
0105
0106
0107
0108
0109
0110
0111
0112 template <class T, class A>
0113 class batch : public types::simd_register<T, A>, public types::integral_only_operators<T, A>
0114 {
0115 static_assert(!std::is_same<T, bool>::value, "use xsimd::batch_bool<T, A> instead of xsimd::batch<bool, A>");
0116
0117 public:
0118 static constexpr std::size_t size = sizeof(types::simd_register<T, A>) / sizeof(T);
0119
0120 using value_type = T;
0121 using arch_type = A;
0122 using register_type = typename types::simd_register<T, A>::register_type;
0123 using batch_bool_type = batch_bool<T, A>;
0124
0125
0126 XSIMD_INLINE batch() = default;
0127 XSIMD_INLINE batch(T val) noexcept;
0128 template <class... Ts>
0129 XSIMD_INLINE batch(T val0, T val1, Ts... vals) noexcept;
0130 XSIMD_INLINE explicit batch(batch_bool_type const& b) noexcept;
0131 XSIMD_INLINE batch(register_type reg) noexcept;
0132
0133 template <class U>
0134 XSIMD_NO_DISCARD static XSIMD_INLINE batch broadcast(U val) noexcept;
0135
0136
0137 template <class U>
0138 XSIMD_INLINE void store_aligned(U* mem) const noexcept;
0139 template <class U>
0140 XSIMD_INLINE void store_unaligned(U* mem) const noexcept;
0141 template <class U>
0142 XSIMD_INLINE void store(U* mem, aligned_mode) const noexcept;
0143 template <class U>
0144 XSIMD_INLINE void store(U* mem, unaligned_mode) const noexcept;
0145
0146 template <class U>
0147 XSIMD_NO_DISCARD static XSIMD_INLINE batch load_aligned(U const* mem) noexcept;
0148 template <class U>
0149 XSIMD_NO_DISCARD static XSIMD_INLINE batch load_unaligned(U const* mem) noexcept;
0150 template <class U>
0151 XSIMD_NO_DISCARD static XSIMD_INLINE batch load(U const* mem, aligned_mode) noexcept;
0152 template <class U>
0153 XSIMD_NO_DISCARD static XSIMD_INLINE batch load(U const* mem, unaligned_mode) noexcept;
0154
0155 template <class U, class V>
0156 XSIMD_NO_DISCARD static XSIMD_INLINE batch gather(U const* src, batch<V, arch_type> const& index) noexcept;
0157 template <class U, class V>
0158 XSIMD_INLINE void scatter(U* dst, batch<V, arch_type> const& index) const noexcept;
0159
0160 XSIMD_INLINE T get(std::size_t i) const noexcept;
0161
0162
0163
0164
0165 friend XSIMD_INLINE batch_bool<T, A> operator==(batch const& self, batch const& other) noexcept
0166 {
0167 return details::eq<T, A>(self, other);
0168 }
0169 friend XSIMD_INLINE batch_bool<T, A> operator!=(batch const& self, batch const& other) noexcept
0170 {
0171 return details::neq<T, A>(self, other);
0172 }
0173 friend XSIMD_INLINE batch_bool<T, A> operator>=(batch const& self, batch const& other) noexcept
0174 {
0175 return details::ge<T, A>(self, other);
0176 }
0177 friend XSIMD_INLINE batch_bool<T, A> operator<=(batch const& self, batch const& other) noexcept
0178 {
0179 return details::le<T, A>(self, other);
0180 }
0181 friend XSIMD_INLINE batch_bool<T, A> operator>(batch const& self, batch const& other) noexcept
0182 {
0183 return details::gt<T, A>(self, other);
0184 }
0185 friend XSIMD_INLINE batch_bool<T, A> operator<(batch const& self, batch const& other) noexcept
0186 {
0187 return details::lt<T, A>(self, other);
0188 }
0189
0190
0191 XSIMD_INLINE batch& operator+=(batch const& other) noexcept;
0192 XSIMD_INLINE batch& operator-=(batch const& other) noexcept;
0193 XSIMD_INLINE batch& operator*=(batch const& other) noexcept;
0194 XSIMD_INLINE batch& operator/=(batch const& other) noexcept;
0195 XSIMD_INLINE batch& operator&=(batch const& other) noexcept;
0196 XSIMD_INLINE batch& operator|=(batch const& other) noexcept;
0197 XSIMD_INLINE batch& operator^=(batch const& other) noexcept;
0198
0199
0200 XSIMD_INLINE batch& operator++() noexcept;
0201 XSIMD_INLINE batch& operator--() noexcept;
0202 XSIMD_INLINE batch operator++(int) noexcept;
0203 XSIMD_INLINE batch operator--(int) noexcept;
0204
0205
0206 XSIMD_INLINE batch_bool_type operator!() const noexcept;
0207 XSIMD_INLINE batch operator~() const noexcept;
0208 XSIMD_INLINE batch operator-() const noexcept;
0209 XSIMD_INLINE batch operator+() const noexcept;
0210
0211
0212
0213
0214
0215
0216 friend XSIMD_INLINE batch operator+(batch const& self, batch const& other) noexcept
0217 {
0218 return batch(self) += other;
0219 }
0220
0221
0222 friend XSIMD_INLINE batch operator-(batch const& self, batch const& other) noexcept
0223 {
0224 return batch(self) -= other;
0225 }
0226
0227
0228 friend XSIMD_INLINE batch operator*(batch const& self, batch const& other) noexcept
0229 {
0230 return batch(self) *= other;
0231 }
0232
0233
0234 friend XSIMD_INLINE batch operator/(batch const& self, batch const& other) noexcept
0235 {
0236 return batch(self) /= other;
0237 }
0238
0239
0240 friend XSIMD_INLINE batch operator&(batch const& self, batch const& other) noexcept
0241 {
0242 return batch(self) &= other;
0243 }
0244
0245
0246 friend XSIMD_INLINE batch operator|(batch const& self, batch const& other) noexcept
0247 {
0248 return batch(self) |= other;
0249 }
0250
0251
0252 friend XSIMD_INLINE batch operator^(batch const& self, batch const& other) noexcept
0253 {
0254 return batch(self) ^= other;
0255 }
0256
0257
0258 friend XSIMD_INLINE batch operator&&(batch const& self, batch const& other) noexcept
0259 {
0260 return batch(self).logical_and(other);
0261 }
0262
0263
0264 friend XSIMD_INLINE batch operator||(batch const& self, batch const& other) noexcept
0265 {
0266 return batch(self).logical_or(other);
0267 }
0268
0269 private:
0270 XSIMD_INLINE batch logical_and(batch const& other) const noexcept;
0271 XSIMD_INLINE batch logical_or(batch const& other) const noexcept;
0272 };
0273
0274 template <class T, class A>
0275 constexpr std::size_t batch<T, A>::size;
0276
0277
0278
0279
0280
0281
0282
0283
0284
0285
0286 template <class T, class A = default_arch>
0287 class batch_bool : public types::get_bool_simd_register_t<T, A>
0288 {
0289 using base_type = types::get_bool_simd_register_t<T, A>;
0290
0291 public:
0292 static constexpr std::size_t size = sizeof(types::simd_register<T, A>) / sizeof(T);
0293
0294 using value_type = bool;
0295 using arch_type = A;
0296 using register_type = typename base_type::register_type;
0297 using batch_type = batch<T, A>;
0298
0299
0300 XSIMD_INLINE batch_bool() = default;
0301 XSIMD_INLINE batch_bool(bool val) noexcept;
0302 XSIMD_INLINE batch_bool(register_type reg) noexcept;
0303 template <class... Ts>
0304 XSIMD_INLINE batch_bool(bool val0, bool val1, Ts... vals) noexcept;
0305
0306 template <class Tp>
0307 XSIMD_INLINE batch_bool(Tp const*) = delete;
0308
0309
0310 XSIMD_INLINE void store_aligned(bool* mem) const noexcept;
0311 XSIMD_INLINE void store_unaligned(bool* mem) const noexcept;
0312 XSIMD_NO_DISCARD static XSIMD_INLINE batch_bool load_aligned(bool const* mem) noexcept;
0313 XSIMD_NO_DISCARD static XSIMD_INLINE batch_bool load_unaligned(bool const* mem) noexcept;
0314
0315 XSIMD_INLINE bool get(std::size_t i) const noexcept;
0316
0317
0318 XSIMD_INLINE uint64_t mask() const noexcept;
0319 XSIMD_INLINE static batch_bool from_mask(uint64_t mask) noexcept;
0320
0321
0322 XSIMD_INLINE batch_bool operator==(batch_bool const& other) const noexcept;
0323 XSIMD_INLINE batch_bool operator!=(batch_bool const& other) const noexcept;
0324
0325
0326 XSIMD_INLINE batch_bool operator~() const noexcept;
0327 XSIMD_INLINE batch_bool operator!() const noexcept;
0328 XSIMD_INLINE batch_bool operator&(batch_bool const& other) const noexcept;
0329 XSIMD_INLINE batch_bool operator|(batch_bool const& other) const noexcept;
0330 XSIMD_INLINE batch_bool operator^(batch_bool const& other) const noexcept;
0331 XSIMD_INLINE batch_bool operator&&(batch_bool const& other) const noexcept;
0332 XSIMD_INLINE batch_bool operator||(batch_bool const& other) const noexcept;
0333
0334
0335 XSIMD_INLINE batch_bool& operator&=(batch_bool const& other) noexcept { return (*this) = (*this) & other; }
0336 XSIMD_INLINE batch_bool& operator|=(batch_bool const& other) noexcept { return (*this) = (*this) | other; }
0337 XSIMD_INLINE batch_bool& operator^=(batch_bool const& other) noexcept { return (*this) = (*this) ^ other; }
0338
0339 private:
0340 template <class U, class... V, size_t I, size_t... Is>
0341 static XSIMD_INLINE register_type make_register(detail::index_sequence<I, Is...>, U u, V... v) noexcept;
0342
0343 template <class... V>
0344 static XSIMD_INLINE register_type make_register(detail::index_sequence<>, V... v) noexcept;
0345 };
0346
0347 template <class T, class A>
0348 constexpr std::size_t batch_bool<T, A>::size;
0349
0350
0351
0352
0353
0354
0355
0356
0357
0358 template <class T, class A>
0359 class batch<std::complex<T>, A>
0360 {
0361 public:
0362 using value_type = std::complex<T>;
0363 using real_batch = batch<T, A>;
0364 using arch_type = A;
0365 using batch_bool_type = batch_bool<T, A>;
0366
0367 static constexpr std::size_t size = real_batch::size;
0368
0369
0370 XSIMD_INLINE batch() = default;
0371 XSIMD_INLINE batch(value_type const& val) noexcept;
0372 XSIMD_INLINE batch(real_batch const& real, real_batch const& imag) noexcept;
0373
0374 XSIMD_INLINE batch(real_batch const& real) noexcept;
0375 XSIMD_INLINE batch(T val) noexcept;
0376 template <class... Ts>
0377 XSIMD_INLINE batch(value_type val0, value_type val1, Ts... vals) noexcept;
0378 XSIMD_INLINE explicit batch(batch_bool_type const& b) noexcept;
0379
0380 template <class U>
0381 XSIMD_NO_DISCARD static XSIMD_INLINE batch broadcast(U val) noexcept;
0382
0383
0384 XSIMD_NO_DISCARD static XSIMD_INLINE batch load_aligned(const T* real_src, const T* imag_src = nullptr) noexcept;
0385 XSIMD_NO_DISCARD static XSIMD_INLINE batch load_unaligned(const T* real_src, const T* imag_src = nullptr) noexcept;
0386 XSIMD_INLINE void store_aligned(T* real_dst, T* imag_dst) const noexcept;
0387 XSIMD_INLINE void store_unaligned(T* real_dst, T* imag_dst) const noexcept;
0388
0389 XSIMD_NO_DISCARD static XSIMD_INLINE batch load_aligned(const value_type* src) noexcept;
0390 XSIMD_NO_DISCARD static XSIMD_INLINE batch load_unaligned(const value_type* src) noexcept;
0391 XSIMD_INLINE void store_aligned(value_type* dst) const noexcept;
0392 XSIMD_INLINE void store_unaligned(value_type* dst) const noexcept;
0393
0394 template <class U>
0395 XSIMD_NO_DISCARD static XSIMD_INLINE batch load(U const* mem, aligned_mode) noexcept;
0396 template <class U>
0397 XSIMD_NO_DISCARD static XSIMD_INLINE batch load(U const* mem, unaligned_mode) noexcept;
0398 template <class U>
0399 XSIMD_INLINE void store(U* mem, aligned_mode) const noexcept;
0400 template <class U>
0401 XSIMD_INLINE void store(U* mem, unaligned_mode) const noexcept;
0402
0403 XSIMD_INLINE real_batch real() const noexcept;
0404 XSIMD_INLINE real_batch imag() const noexcept;
0405
0406 XSIMD_INLINE value_type get(std::size_t i) const noexcept;
0407
0408 #ifdef XSIMD_ENABLE_XTL_COMPLEX
0409
0410 template <bool i3ec>
0411 XSIMD_INLINE batch(xtl::xcomplex<T, T, i3ec> const& val) noexcept;
0412 template <bool i3ec, class... Ts>
0413 XSIMD_INLINE batch(xtl::xcomplex<T, T, i3ec> val0, xtl::xcomplex<T, T, i3ec> val1, Ts... vals) noexcept;
0414
0415 template <bool i3ec>
0416 XSIMD_NO_DISCARD static XSIMD_INLINE batch load_aligned(const xtl::xcomplex<T, T, i3ec>* src) noexcept;
0417 template <bool i3ec>
0418 XSIMD_NO_DISCARD static XSIMD_INLINE batch load_unaligned(const xtl::xcomplex<T, T, i3ec>* src) noexcept;
0419 template <bool i3ec>
0420 XSIMD_INLINE void store_aligned(xtl::xcomplex<T, T, i3ec>* dst) const noexcept;
0421 template <bool i3ec>
0422 XSIMD_INLINE void store_unaligned(xtl::xcomplex<T, T, i3ec>* dst) const noexcept;
0423 #endif
0424
0425
0426 XSIMD_INLINE batch_bool<T, A> operator==(batch const& other) const noexcept;
0427 XSIMD_INLINE batch_bool<T, A> operator!=(batch const& other) const noexcept;
0428
0429
0430 XSIMD_INLINE batch& operator+=(batch const& other) noexcept;
0431 XSIMD_INLINE batch& operator-=(batch const& other) noexcept;
0432 XSIMD_INLINE batch& operator*=(batch const& other) noexcept;
0433 XSIMD_INLINE batch& operator/=(batch const& other) noexcept;
0434
0435
0436 XSIMD_INLINE batch& operator++() noexcept;
0437 XSIMD_INLINE batch& operator--() noexcept;
0438 XSIMD_INLINE batch operator++(int) noexcept;
0439 XSIMD_INLINE batch operator--(int) noexcept;
0440
0441
0442 XSIMD_INLINE batch_bool_type operator!() const noexcept;
0443 XSIMD_INLINE batch operator~() const noexcept;
0444 XSIMD_INLINE batch operator-() const noexcept;
0445 XSIMD_INLINE batch operator+() const noexcept;
0446
0447
0448
0449
0450
0451 friend XSIMD_INLINE batch operator+(batch const& self, batch const& other) noexcept
0452 {
0453 return batch(self) += other;
0454 }
0455
0456
0457 friend XSIMD_INLINE batch operator-(batch const& self, batch const& other) noexcept
0458 {
0459 return batch(self) -= other;
0460 }
0461
0462
0463 friend XSIMD_INLINE batch operator*(batch const& self, batch const& other) noexcept
0464 {
0465 return batch(self) *= other;
0466 }
0467
0468
0469 friend XSIMD_INLINE batch operator/(batch const& self, batch const& other) noexcept
0470 {
0471 return batch(self) /= other;
0472 }
0473
0474 private:
0475 real_batch m_real;
0476 real_batch m_imag;
0477 };
0478
0479 template <class T, class A>
0480 constexpr std::size_t batch<std::complex<T>, A>::size;
0481
0482 #ifdef XSIMD_ENABLE_XTL_COMPLEX
0483 template <typename T, bool i3ec, typename A>
0484 struct batch<xtl::xcomplex<T, T, i3ec>, A>
0485 {
0486 static_assert(std::is_same<T, void>::value,
0487 "Please use batch<std::complex<T>, A> initialized from xtl::xcomplex instead");
0488 };
0489 #endif
0490 }
0491
0492 #include "../arch/xsimd_isa.hpp"
0493 #include "./xsimd_batch_constant.hpp"
0494 #include "./xsimd_traits.hpp"
0495
0496 namespace xsimd
0497 {
0498
0499
0500
0501
0502 template <class T, class A>
0503 XSIMD_INLINE batch<T, A>::batch(T val) noexcept
0504 : types::simd_register<T, A>(kernel::broadcast<A>(val, A {}))
0505 {
0506 detail::static_check_supported_config<T, A>();
0507 }
0508
0509
0510
0511
0512
0513 template <class T, class A>
0514 template <class... Ts>
0515 XSIMD_INLINE batch<T, A>::batch(T val0, T val1, Ts... vals) noexcept
0516 : batch(kernel::set<A>(batch {}, A {}, val0, val1, static_cast<T>(vals)...))
0517 {
0518 detail::static_check_supported_config<T, A>();
0519 static_assert(sizeof...(Ts) + 2 == size, "The constructor requires as many arguments as batch elements.");
0520 }
0521
0522
0523
0524
0525
0526
0527 template <class T, class A>
0528 XSIMD_INLINE batch<T, A>::batch(batch_bool<T, A> const& b) noexcept
0529 : batch(kernel::from_bool(b, A {}))
0530 {
0531 }
0532
0533
0534
0535
0536
0537 template <class T, class A>
0538 XSIMD_INLINE batch<T, A>::batch(register_type reg) noexcept
0539 : types::simd_register<T, A>({ reg })
0540 {
0541 detail::static_check_supported_config<T, A>();
0542 }
0543
0544
0545
0546
0547 template <class T, class A>
0548 template <class U>
0549 XSIMD_NO_DISCARD XSIMD_INLINE batch<T, A> batch<T, A>::broadcast(U val) noexcept
0550 {
0551 detail::static_check_supported_config<T, A>();
0552 return batch(static_cast<T>(val));
0553 }
0554
0555
0556
0557
0558
0559
0560
0561
0562
0563 template <class T, class A>
0564 template <class U>
0565 XSIMD_INLINE void batch<T, A>::store_aligned(U* mem) const noexcept
0566 {
0567 detail::static_check_supported_config<T, A>();
0568 assert(((reinterpret_cast<uintptr_t>(mem) % A::alignment()) == 0)
0569 && "store location is not properly aligned");
0570 kernel::store_aligned<A>(mem, *this, A {});
0571 }
0572
0573
0574
0575
0576
0577 template <class T, class A>
0578 template <class U>
0579 XSIMD_INLINE void batch<T, A>::store_unaligned(U* mem) const noexcept
0580 {
0581 detail::static_check_supported_config<T, A>();
0582 kernel::store_unaligned<A>(mem, *this, A {});
0583 }
0584
0585
0586
0587
0588 template <class T, class A>
0589 template <class U>
0590 XSIMD_INLINE void batch<T, A>::store(U* mem, aligned_mode) const noexcept
0591 {
0592 detail::static_check_supported_config<T, A>();
0593 return store_aligned(mem);
0594 }
0595
0596
0597
0598
0599 template <class T, class A>
0600 template <class U>
0601 XSIMD_INLINE void batch<T, A>::store(U* mem, unaligned_mode) const noexcept
0602 {
0603 detail::static_check_supported_config<T, A>();
0604 return store_unaligned(mem);
0605 }
0606
0607
0608
0609
0610
0611 template <class T, class A>
0612 template <class U>
0613 XSIMD_INLINE batch<T, A> batch<T, A>::load_aligned(U const* mem) noexcept
0614 {
0615 assert(((reinterpret_cast<uintptr_t>(mem) % A::alignment()) == 0)
0616 && "loaded pointer is not properly aligned");
0617 detail::static_check_supported_config<T, A>();
0618 return kernel::load_aligned<A>(mem, kernel::convert<T> {}, A {});
0619 }
0620
0621
0622
0623
0624
0625 template <class T, class A>
0626 template <class U>
0627 XSIMD_INLINE batch<T, A> batch<T, A>::load_unaligned(U const* mem) noexcept
0628 {
0629 detail::static_check_supported_config<T, A>();
0630 return kernel::load_unaligned<A>(mem, kernel::convert<T> {}, A {});
0631 }
0632
0633
0634
0635
0636 template <class T, class A>
0637 template <class U>
0638 XSIMD_INLINE batch<T, A> batch<T, A>::load(U const* mem, aligned_mode) noexcept
0639 {
0640 detail::static_check_supported_config<T, A>();
0641 return load_aligned(mem);
0642 }
0643
0644
0645
0646
0647 template <class T, class A>
0648 template <class U>
0649 XSIMD_INLINE batch<T, A> batch<T, A>::load(U const* mem, unaligned_mode) noexcept
0650 {
0651 detail::static_check_supported_config<T, A>();
0652 return load_unaligned(mem);
0653 }
0654
0655
0656
0657
0658
0659
0660
0661 template <class T, class A>
0662 template <typename U, typename V>
0663 XSIMD_INLINE batch<T, A> batch<T, A>::gather(U const* src, batch<V, A> const& index) noexcept
0664 {
0665 detail::static_check_supported_config<T, A>();
0666 static_assert(std::is_convertible<T, U>::value, "Can't convert from src to this batch's type!");
0667 return kernel::gather(batch {}, src, index, A {});
0668 }
0669
0670
0671
0672
0673
0674
0675
0676 template <class T, class A>
0677 template <class U, class V>
0678 XSIMD_INLINE void batch<T, A>::scatter(U* dst, batch<V, A> const& index) const noexcept
0679 {
0680 detail::static_check_supported_config<T, A>();
0681 static_assert(std::is_convertible<T, U>::value, "Can't convert from this batch's type to dst!");
0682 kernel::scatter<A>(*this, dst, index, A {});
0683 }
0684
0685
0686
0687
0688
0689
0690 template <class T, class A>
0691 XSIMD_INLINE T batch<T, A>::get(std::size_t i) const noexcept
0692 {
0693 return kernel::get(*this, i, A {});
0694 }
0695
0696
0697
0698
0699 namespace details
0700 {
0701
0702
0703
0704 template <class T, class A>
0705 XSIMD_INLINE batch_bool<T, A> eq(batch<T, A> const& self, batch<T, A> const& other) noexcept
0706 {
0707 detail::static_check_supported_config<T, A>();
0708 return kernel::eq<A>(self, other, A {});
0709 }
0710
0711
0712
0713
0714 template <class T, class A>
0715 XSIMD_INLINE batch_bool<T, A> neq(batch<T, A> const& self, batch<T, A> const& other) noexcept
0716 {
0717 detail::static_check_supported_config<T, A>();
0718 return kernel::neq<A>(self, other, A {});
0719 }
0720
0721
0722
0723
0724 template <class T, class A>
0725 XSIMD_INLINE batch_bool<T, A> ge(batch<T, A> const& self, batch<T, A> const& other) noexcept
0726 {
0727 detail::static_check_supported_config<T, A>();
0728 return kernel::ge<A>(self, other, A {});
0729 }
0730
0731
0732
0733
0734 template <class T, class A>
0735 XSIMD_INLINE batch_bool<T, A> le(batch<T, A> const& self, batch<T, A> const& other) noexcept
0736 {
0737 detail::static_check_supported_config<T, A>();
0738 return kernel::le<A>(self, other, A {});
0739 }
0740
0741
0742
0743
0744 template <class T, class A>
0745 XSIMD_INLINE batch_bool<T, A> gt(batch<T, A> const& self, batch<T, A> const& other) noexcept
0746 {
0747 detail::static_check_supported_config<T, A>();
0748 return kernel::gt<A>(self, other, A {});
0749 }
0750
0751
0752
0753
0754 template <class T, class A>
0755 XSIMD_INLINE batch_bool<T, A> lt(batch<T, A> const& self, batch<T, A> const& other) noexcept
0756 {
0757 detail::static_check_supported_config<T, A>();
0758 return kernel::lt<A>(self, other, A {});
0759 }
0760 }
0761
0762
0763
0764
0765
0766 template <class T, class A>
0767 XSIMD_INLINE batch<T, A>& batch<T, A>::operator+=(batch<T, A> const& other) noexcept
0768 {
0769 detail::static_check_supported_config<T, A>();
0770 return *this = kernel::add<A>(*this, other, A {});
0771 }
0772
0773 template <class T, class A>
0774 XSIMD_INLINE batch<T, A>& batch<T, A>::operator-=(batch<T, A> const& other) noexcept
0775 {
0776 detail::static_check_supported_config<T, A>();
0777 return *this = kernel::sub<A>(*this, other, A {});
0778 }
0779
0780 template <class T, class A>
0781 XSIMD_INLINE batch<T, A>& batch<T, A>::operator*=(batch<T, A> const& other) noexcept
0782 {
0783 detail::static_check_supported_config<T, A>();
0784 return *this = kernel::mul<A>(*this, other, A {});
0785 }
0786
0787 template <class T, class A>
0788 XSIMD_INLINE batch<T, A>& batch<T, A>::operator/=(batch<T, A> const& other) noexcept
0789 {
0790 detail::static_check_supported_config<T, A>();
0791 return *this = kernel::div<A>(*this, other, A {});
0792 }
0793
0794 template <class T, class A>
0795 XSIMD_INLINE batch<T, A>& types::integral_only_operators<T, A>::operator%=(batch<T, A> const& other) noexcept
0796 {
0797 ::xsimd::detail::static_check_supported_config<T, A>();
0798 return *static_cast<batch<T, A>*>(this) = kernel::mod<A>(*static_cast<batch<T, A>*>(this), other, A {});
0799 }
0800
0801 template <class T, class A>
0802 XSIMD_INLINE batch<T, A>& batch<T, A>::operator&=(batch<T, A> const& other) noexcept
0803 {
0804 detail::static_check_supported_config<T, A>();
0805 return *this = kernel::bitwise_and<A>(*this, other, A {});
0806 }
0807
0808 template <class T, class A>
0809 XSIMD_INLINE batch<T, A>& batch<T, A>::operator|=(batch<T, A> const& other) noexcept
0810 {
0811 detail::static_check_supported_config<T, A>();
0812 return *this = kernel::bitwise_or<A>(*this, other, A {});
0813 }
0814
0815 template <class T, class A>
0816 XSIMD_INLINE batch<T, A>& batch<T, A>::operator^=(batch<T, A> const& other) noexcept
0817 {
0818 detail::static_check_supported_config<T, A>();
0819 return *this = kernel::bitwise_xor<A>(*this, other, A {});
0820 }
0821
0822 template <class T, class A>
0823 XSIMD_INLINE batch<T, A>& kernel::integral_only_operators<T, A>::operator>>=(batch<T, A> const& other) noexcept
0824 {
0825 ::xsimd::detail::static_check_supported_config<T, A>();
0826 return *static_cast<batch<T, A>*>(this) = kernel::bitwise_rshift<A>(*static_cast<batch<T, A>*>(this), other, A {});
0827 }
0828
0829 template <class T, class A>
0830 XSIMD_INLINE batch<T, A>& kernel::integral_only_operators<T, A>::operator<<=(batch<T, A> const& other) noexcept
0831 {
0832 ::xsimd::detail::static_check_supported_config<T, A>();
0833 return *static_cast<batch<T, A>*>(this) = kernel::bitwise_lshift<A>(*static_cast<batch<T, A>*>(this), other, A {});
0834 }
0835
0836 template <class T, class A>
0837 XSIMD_INLINE batch<T, A>& kernel::integral_only_operators<T, A>::operator>>=(int32_t other) noexcept
0838 {
0839 ::xsimd::detail::static_check_supported_config<T, A>();
0840 return *static_cast<batch<T, A>*>(this) = kernel::bitwise_rshift<A>(*static_cast<batch<T, A>*>(this), other, A {});
0841 }
0842
0843 template <class T, class A>
0844 XSIMD_INLINE batch<T, A>& kernel::integral_only_operators<T, A>::operator<<=(int32_t other) noexcept
0845 {
0846 ::xsimd::detail::static_check_supported_config<T, A>();
0847 return *static_cast<batch<T, A>*>(this) = kernel::bitwise_lshift<A>(*static_cast<batch<T, A>*>(this), other, A {});
0848 }
0849
0850
0851
0852
0853
0854 template <class T, class A>
0855 XSIMD_INLINE batch<T, A>& batch<T, A>::operator++() noexcept
0856 {
0857 detail::static_check_supported_config<T, A>();
0858 return operator+=(1);
0859 }
0860
0861 template <class T, class A>
0862 XSIMD_INLINE batch<T, A>& batch<T, A>::operator--() noexcept
0863 {
0864 detail::static_check_supported_config<T, A>();
0865 return operator-=(1);
0866 }
0867
0868 template <class T, class A>
0869 XSIMD_INLINE batch<T, A> batch<T, A>::operator++(int) noexcept
0870 {
0871 detail::static_check_supported_config<T, A>();
0872 batch<T, A> copy(*this);
0873 operator+=(1);
0874 return copy;
0875 }
0876
0877 template <class T, class A>
0878 XSIMD_INLINE batch<T, A> batch<T, A>::operator--(int) noexcept
0879 {
0880 detail::static_check_supported_config<T, A>();
0881 batch copy(*this);
0882 operator-=(1);
0883 return copy;
0884 }
0885
0886
0887
0888
0889
0890 template <class T, class A>
0891 XSIMD_INLINE batch_bool<T, A> batch<T, A>::operator!() const noexcept
0892 {
0893 detail::static_check_supported_config<T, A>();
0894 return kernel::eq<A>(*this, batch(0), A {});
0895 }
0896
0897 template <class T, class A>
0898 XSIMD_INLINE batch<T, A> batch<T, A>::operator~() const noexcept
0899 {
0900 detail::static_check_supported_config<T, A>();
0901 return kernel::bitwise_not<A>(*this, A {});
0902 }
0903
0904 template <class T, class A>
0905 XSIMD_INLINE batch<T, A> batch<T, A>::operator-() const noexcept
0906 {
0907 detail::static_check_supported_config<T, A>();
0908 return kernel::neg<A>(*this, A {});
0909 }
0910
0911 template <class T, class A>
0912 XSIMD_INLINE batch<T, A> batch<T, A>::operator+() const noexcept
0913 {
0914 detail::static_check_supported_config<T, A>();
0915 return *this;
0916 }
0917
0918
0919
0920
0921
0922 template <class T, class A>
0923 XSIMD_INLINE batch<T, A> batch<T, A>::logical_and(batch<T, A> const& other) const noexcept
0924 {
0925 return kernel::logical_and<A>(*this, other, A());
0926 }
0927
0928 template <class T, class A>
0929 XSIMD_INLINE batch<T, A> batch<T, A>::logical_or(batch<T, A> const& other) const noexcept
0930 {
0931 return kernel::logical_or<A>(*this, other, A());
0932 }
0933
0934
0935
0936
0937
0938 template <class T, class A>
0939 XSIMD_INLINE batch_bool<T, A>::batch_bool(register_type reg) noexcept
0940 : types::get_bool_simd_register_t<T, A>({ reg })
0941 {
0942 }
0943
0944 template <class T, class A>
0945 template <class... Ts>
0946 XSIMD_INLINE batch_bool<T, A>::batch_bool(bool val0, bool val1, Ts... vals) noexcept
0947 : batch_bool(kernel::set<A>(batch_bool {}, A {}, val0, val1, static_cast<bool>(vals)...))
0948 {
0949 static_assert(sizeof...(Ts) + 2 == size, "The constructor requires as many arguments as batch elements.");
0950 }
0951
0952
0953
0954
0955
0956 template <class T, class A>
0957 XSIMD_INLINE void batch_bool<T, A>::store_aligned(bool* mem) const noexcept
0958 {
0959 kernel::store(*this, mem, A {});
0960 }
0961
0962 template <class T, class A>
0963 XSIMD_INLINE void batch_bool<T, A>::store_unaligned(bool* mem) const noexcept
0964 {
0965 store_aligned(mem);
0966 }
0967
0968 template <class T, class A>
0969 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::load_aligned(bool const* mem) noexcept
0970 {
0971 batch_type ref(0);
0972 alignas(A::alignment()) T buffer[size];
0973 for (std::size_t i = 0; i < size; ++i)
0974 buffer[i] = mem[i] ? 1 : 0;
0975 return ref != batch_type::load_aligned(&buffer[0]);
0976 }
0977
0978 template <class T, class A>
0979 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::load_unaligned(bool const* mem) noexcept
0980 {
0981 return load_aligned(mem);
0982 }
0983
0984
0985
0986
0987
0988
0989 template <class T, class A>
0990 XSIMD_INLINE uint64_t batch_bool<T, A>::mask() const noexcept
0991 {
0992 return kernel::mask(*this, A {});
0993 }
0994
0995
0996
0997
0998
0999
1000 template <class T, class A>
1001 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::from_mask(uint64_t mask) noexcept
1002 {
1003 return kernel::from_mask(batch_bool<T, A>(), mask, A {});
1004 }
1005
1006 template <class T, class A>
1007 XSIMD_INLINE bool batch_bool<T, A>::get(std::size_t i) const noexcept
1008 {
1009 return kernel::get(*this, i, A {});
1010 }
1011
1012
1013
1014
1015
1016 template <class T, class A>
1017 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::operator==(batch_bool<T, A> const& other) const noexcept
1018 {
1019 return kernel::eq<A>(*this, other, A {}).data;
1020 }
1021
1022 template <class T, class A>
1023 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::operator!=(batch_bool<T, A> const& other) const noexcept
1024 {
1025 return kernel::neq<A>(*this, other, A {}).data;
1026 }
1027
1028
1029
1030
1031
1032 template <class T, class A>
1033 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::operator~() const noexcept
1034 {
1035 return kernel::bitwise_not<A>(*this, A {}).data;
1036 }
1037
1038 template <class T, class A>
1039 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::operator!() const noexcept
1040 {
1041 return operator==(batch_bool(false));
1042 }
1043
1044 template <class T, class A>
1045 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::operator&(batch_bool<T, A> const& other) const noexcept
1046 {
1047 return kernel::bitwise_and<A>(*this, other, A {}).data;
1048 }
1049
1050 template <class T, class A>
1051 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::operator|(batch_bool<T, A> const& other) const noexcept
1052 {
1053 return kernel::bitwise_or<A>(*this, other, A {}).data;
1054 }
1055
1056 template <class T, class A>
1057 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::operator^(batch_bool<T, A> const& other) const noexcept
1058 {
1059 return kernel::bitwise_xor<A>(*this, other, A {}).data;
1060 }
1061
1062 template <class T, class A>
1063 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::operator&&(batch_bool const& other) const noexcept
1064 {
1065 return operator&(other);
1066 }
1067
1068 template <class T, class A>
1069 XSIMD_INLINE batch_bool<T, A> batch_bool<T, A>::operator||(batch_bool const& other) const noexcept
1070 {
1071 return operator|(other);
1072 }
1073
1074
1075
1076
1077
1078 template <class T, class A>
1079 XSIMD_INLINE batch_bool<T, A>::batch_bool(bool val) noexcept
1080 : base_type { make_register(detail::make_index_sequence<size - 1>(), val) }
1081 {
1082 }
1083
1084 template <class T, class A>
1085 template <class U, class... V, size_t I, size_t... Is>
1086 XSIMD_INLINE auto batch_bool<T, A>::make_register(detail::index_sequence<I, Is...>, U u, V... v) noexcept -> register_type
1087 {
1088 return make_register(detail::index_sequence<Is...>(), u, u, v...);
1089 }
1090
1091 template <class T, class A>
1092 template <class... V>
1093 XSIMD_INLINE auto batch_bool<T, A>::make_register(detail::index_sequence<>, V... v) noexcept -> register_type
1094 {
1095 return kernel::set<A>(batch_bool<T, A>(), A {}, v...).data;
1096 }
1097
1098
1099
1100
1101
1102 template <class T, class A>
1103 XSIMD_INLINE batch<std::complex<T>, A>::batch(value_type const& val) noexcept
1104 : m_real(val.real())
1105 , m_imag(val.imag())
1106 {
1107 }
1108
1109 template <class T, class A>
1110 XSIMD_INLINE batch<std::complex<T>, A>::batch(real_batch const& real, real_batch const& imag) noexcept
1111 : m_real(real)
1112 , m_imag(imag)
1113 {
1114 }
1115
1116 template <class T, class A>
1117 XSIMD_INLINE batch<std::complex<T>, A>::batch(real_batch const& real) noexcept
1118 : m_real(real)
1119 , m_imag(0)
1120 {
1121 }
1122
1123 template <class T, class A>
1124 XSIMD_INLINE batch<std::complex<T>, A>::batch(T val) noexcept
1125 : m_real(val)
1126 , m_imag(0)
1127 {
1128 }
1129
1130 template <class T, class A>
1131 template <class... Ts>
1132 XSIMD_INLINE batch<std::complex<T>, A>::batch(value_type val0, value_type val1, Ts... vals) noexcept
1133 : batch(kernel::set<A>(batch {}, A {}, val0, val1, static_cast<value_type>(vals)...))
1134 {
1135 static_assert(sizeof...(Ts) + 2 == size, "as many arguments as batch elements");
1136 }
1137
1138 template <class T, class A>
1139 XSIMD_INLINE batch<std::complex<T>, A>::batch(batch_bool_type const& b) noexcept
1140 : m_real(b)
1141 , m_imag(0)
1142 {
1143 }
1144
1145 template <class T, class A>
1146 template <class U>
1147 XSIMD_NO_DISCARD XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::broadcast(U val) noexcept
1148 {
1149 return batch(static_cast<std::complex<T>>(val));
1150 }
1151
1152
1153
1154
1155
1156 template <class T, class A>
1157 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::load_aligned(const T* real_src, const T* imag_src) noexcept
1158 {
1159 return { batch<T, A>::load_aligned(real_src), imag_src ? batch<T, A>::load_aligned(imag_src) : batch<T, A>(0) };
1160 }
1161 template <class T, class A>
1162 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::load_unaligned(const T* real_src, const T* imag_src) noexcept
1163 {
1164 return { batch<T, A>::load_unaligned(real_src), imag_src ? batch<T, A>::load_unaligned(imag_src) : batch<T, A>(0) };
1165 }
1166
1167 template <class T, class A>
1168 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::load_aligned(const value_type* src) noexcept
1169 {
1170 assert(((reinterpret_cast<uintptr_t>(src) % A::alignment()) == 0)
1171 && "loaded pointer is not properly aligned");
1172 return kernel::load_complex_aligned<A>(src, kernel::convert<value_type> {}, A {});
1173 }
1174
1175 template <class T, class A>
1176 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::load_unaligned(const value_type* src) noexcept
1177 {
1178 return kernel::load_complex_unaligned<A>(src, kernel::convert<value_type> {}, A {});
1179 }
1180
1181 template <class T, class A>
1182 XSIMD_INLINE void batch<std::complex<T>, A>::store_aligned(value_type* dst) const noexcept
1183 {
1184 assert(((reinterpret_cast<uintptr_t>(dst) % A::alignment()) == 0)
1185 && "store location is not properly aligned");
1186 return kernel::store_complex_aligned(dst, *this, A {});
1187 }
1188
1189 template <class T, class A>
1190 XSIMD_INLINE void batch<std::complex<T>, A>::store_unaligned(value_type* dst) const noexcept
1191 {
1192 return kernel::store_complex_unaligned(dst, *this, A {});
1193 }
1194
1195 template <class T, class A>
1196 XSIMD_INLINE void batch<std::complex<T>, A>::store_aligned(T* real_dst, T* imag_dst) const noexcept
1197 {
1198 m_real.store_aligned(real_dst);
1199 m_imag.store_aligned(imag_dst);
1200 }
1201
1202 template <class T, class A>
1203 XSIMD_INLINE void batch<std::complex<T>, A>::store_unaligned(T* real_dst, T* imag_dst) const noexcept
1204 {
1205 m_real.store_unaligned(real_dst);
1206 m_imag.store_unaligned(imag_dst);
1207 }
1208
1209 template <class T, class A>
1210 template <class U>
1211 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::load(U const* mem, aligned_mode) noexcept
1212 {
1213 return load_aligned(mem);
1214 }
1215
1216 template <class T, class A>
1217 template <class U>
1218 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::load(U const* mem, unaligned_mode) noexcept
1219 {
1220 return load_unaligned(mem);
1221 }
1222
1223 template <class T, class A>
1224 template <class U>
1225 XSIMD_INLINE void batch<std::complex<T>, A>::store(U* mem, aligned_mode) const noexcept
1226 {
1227 return store_aligned(mem);
1228 }
1229
1230 template <class T, class A>
1231 template <class U>
1232 XSIMD_INLINE void batch<std::complex<T>, A>::store(U* mem, unaligned_mode) const noexcept
1233 {
1234 return store_unaligned(mem);
1235 }
1236
1237 template <class T, class A>
1238 XSIMD_INLINE auto batch<std::complex<T>, A>::real() const noexcept -> real_batch
1239 {
1240 return m_real;
1241 }
1242
1243 template <class T, class A>
1244 XSIMD_INLINE auto batch<std::complex<T>, A>::imag() const noexcept -> real_batch
1245 {
1246 return m_imag;
1247 }
1248
1249 template <class T, class A>
1250 XSIMD_INLINE auto batch<std::complex<T>, A>::get(std::size_t i) const noexcept -> value_type
1251 {
1252 return kernel::get(*this, i, A {});
1253 }
1254
1255
1256
1257
1258
1259 #ifdef XSIMD_ENABLE_XTL_COMPLEX
1260
1261 template <class T, class A>
1262 template <bool i3ec>
1263 XSIMD_INLINE batch<std::complex<T>, A>::batch(xtl::xcomplex<T, T, i3ec> const& val) noexcept
1264 : m_real(val.real())
1265 , m_imag(val.imag())
1266 {
1267 }
1268
1269 template <class T, class A>
1270 template <bool i3ec, class... Ts>
1271 XSIMD_INLINE batch<std::complex<T>, A>::batch(xtl::xcomplex<T, T, i3ec> val0, xtl::xcomplex<T, T, i3ec> val1, Ts... vals) noexcept
1272 : batch(kernel::set<A>(batch {}, A {}, val0, val1, static_cast<xtl::xcomplex<T, T, i3ec>>(vals)...))
1273 {
1274 static_assert(sizeof...(Ts) + 2 == size, "as many arguments as batch elements");
1275 }
1276
1277
1278
1279
1280
1281 template <class T, class A>
1282 template <bool i3ec>
1283 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::load_aligned(const xtl::xcomplex<T, T, i3ec>* src) noexcept
1284 {
1285 return load_aligned(reinterpret_cast<std::complex<T> const*>(src));
1286 }
1287
1288 template <class T, class A>
1289 template <bool i3ec>
1290 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::load_unaligned(const xtl::xcomplex<T, T, i3ec>* src) noexcept
1291 {
1292 return load_unaligned(reinterpret_cast<std::complex<T> const*>(src));
1293 }
1294
1295 template <class T, class A>
1296 template <bool i3ec>
1297 XSIMD_INLINE void batch<std::complex<T>, A>::store_aligned(xtl::xcomplex<T, T, i3ec>* dst) const noexcept
1298 {
1299 store_aligned(reinterpret_cast<std::complex<T>*>(dst));
1300 }
1301
1302 template <class T, class A>
1303 template <bool i3ec>
1304 XSIMD_INLINE void batch<std::complex<T>, A>::store_unaligned(xtl::xcomplex<T, T, i3ec>* dst) const noexcept
1305 {
1306 store_unaligned(reinterpret_cast<std::complex<T>*>(dst));
1307 }
1308
1309 #endif
1310
1311
1312
1313
1314
1315 template <class T, class A>
1316 XSIMD_INLINE batch_bool<T, A> batch<std::complex<T>, A>::operator==(batch const& other) const noexcept
1317 {
1318 return m_real == other.m_real && m_imag == other.m_imag;
1319 }
1320
1321 template <class T, class A>
1322 XSIMD_INLINE batch_bool<T, A> batch<std::complex<T>, A>::operator!=(batch const& other) const noexcept
1323 {
1324 return m_real != other.m_real || m_imag != other.m_imag;
1325 }
1326
1327
1328
1329
1330
1331 template <class T, class A>
1332 XSIMD_INLINE batch<std::complex<T>, A>& batch<std::complex<T>, A>::operator+=(batch const& other) noexcept
1333 {
1334 m_real += other.m_real;
1335 m_imag += other.m_imag;
1336 return *this;
1337 }
1338
1339 template <class T, class A>
1340 XSIMD_INLINE batch<std::complex<T>, A>& batch<std::complex<T>, A>::operator-=(batch const& other) noexcept
1341 {
1342 m_real -= other.m_real;
1343 m_imag -= other.m_imag;
1344 return *this;
1345 }
1346
1347 template <class T, class A>
1348 XSIMD_INLINE batch<std::complex<T>, A>& batch<std::complex<T>, A>::operator*=(batch const& other) noexcept
1349 {
1350 real_batch new_real = fms(real(), other.real(), imag() * other.imag());
1351 real_batch new_imag = fma(real(), other.imag(), imag() * other.real());
1352 m_real = new_real;
1353 m_imag = new_imag;
1354 return *this;
1355 }
1356
1357 template <class T, class A>
1358 XSIMD_INLINE batch<std::complex<T>, A>& batch<std::complex<T>, A>::operator/=(batch const& other) noexcept
1359 {
1360 real_batch a = real();
1361 real_batch b = imag();
1362 real_batch c = other.real();
1363 real_batch d = other.imag();
1364 real_batch e = c * c + d * d;
1365 m_real = (c * a + d * b) / e;
1366 m_imag = (c * b - d * a) / e;
1367 return *this;
1368 }
1369
1370
1371
1372
1373
1374 template <class T, class A>
1375 XSIMD_INLINE batch<std::complex<T>, A>& batch<std::complex<T>, A>::operator++() noexcept
1376 {
1377 return operator+=(1);
1378 }
1379
1380 template <class T, class A>
1381 XSIMD_INLINE batch<std::complex<T>, A>& batch<std::complex<T>, A>::operator--() noexcept
1382 {
1383 return operator-=(1);
1384 }
1385
1386 template <class T, class A>
1387 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::operator++(int) noexcept
1388 {
1389 batch copy(*this);
1390 operator+=(1);
1391 return copy;
1392 }
1393
1394 template <class T, class A>
1395 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::operator--(int) noexcept
1396 {
1397 batch copy(*this);
1398 operator-=(1);
1399 return copy;
1400 }
1401
1402
1403
1404
1405
1406 template <class T, class A>
1407 XSIMD_INLINE batch_bool<T, A> batch<std::complex<T>, A>::operator!() const noexcept
1408 {
1409 return operator==(batch(0));
1410 }
1411
1412 template <class T, class A>
1413 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::operator~() const noexcept
1414 {
1415 return { ~m_real, ~m_imag };
1416 }
1417
1418 template <class T, class A>
1419 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::operator-() const noexcept
1420 {
1421 return { -m_real, -m_imag };
1422 }
1423
1424 template <class T, class A>
1425 XSIMD_INLINE batch<std::complex<T>, A> batch<std::complex<T>, A>::operator+() const noexcept
1426 {
1427 return { +m_real, +m_imag };
1428 }
1429
1430
1431
1432
1433
1434 namespace details
1435 {
1436 template <typename T, std::size_t N, class ArchList>
1437 struct sized_batch;
1438
1439 template <typename T, std::size_t N>
1440 struct sized_batch<T, N, xsimd::arch_list<>>
1441 {
1442 using type = void;
1443 };
1444
1445 template <typename T, class Arch, bool BatchExists = xsimd::has_simd_register<T, Arch>::value>
1446 struct batch_trait;
1447
1448 template <typename T, class Arch>
1449 struct batch_trait<T, Arch, true>
1450 {
1451 using type = xsimd::batch<T, Arch>;
1452 static constexpr std::size_t size = xsimd::batch<T, Arch>::size;
1453 };
1454
1455 template <typename T, class Arch>
1456 struct batch_trait<T, Arch, false>
1457 {
1458 using type = void;
1459 static constexpr std::size_t size = 0;
1460 };
1461
1462 template <typename T, std::size_t N, class Arch, class... Archs>
1463 struct sized_batch<T, N, xsimd::arch_list<Arch, Archs...>>
1464 {
1465 using type = typename std::conditional<
1466 batch_trait<T, Arch>::size == N,
1467 typename batch_trait<T, Arch>::type,
1468 typename sized_batch<T, N, xsimd::arch_list<Archs...>>::type>::type;
1469 };
1470 }
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482 template <typename T, std::size_t N>
1483 struct make_sized_batch
1484 {
1485 using type = typename details::sized_batch<T, N, supported_architectures>::type;
1486 };
1487
1488 template <typename T, std::size_t N>
1489 using make_sized_batch_t = typename make_sized_batch<T, N>::type;
1490 }
1491
1492 #endif