Introduce [us]128 (when available). Unlike [us]64, ensure they are always naturally aligned. This also enables 128bit wide atomics (which require natural alignment) such as cmpxchg128(). Signed-off-by: Peter Zijlstra (Intel) <peterz@xxxxxxxxxxxxx> --- include/linux/types.h | 5 +++++ include/uapi/linux/types.h | 4 ++++ 2 files changed, 9 insertions(+) --- a/include/linux/types.h +++ b/include/linux/types.h @@ -10,6 +10,11 @@ #define DECLARE_BITMAP(name,bits) \ unsigned long name[BITS_TO_LONGS(bits)] +#ifdef __SIZEOF_INT128__ +typedef __s128 s128; +typedef __u128 u128; +#endif + typedef u32 __kernel_dev_t; typedef __kernel_fd_set fd_set; --- a/include/uapi/linux/types.h +++ b/include/uapi/linux/types.h @@ -13,6 +13,10 @@ #include <linux/posix_types.h> +#ifdef __SIZEOF_INT128__ +typedef __signed__ __int128 __s128 __attribute__((aligned(16))); +typedef unsigned __int128 __u128 __attribute__((aligned(16))); +#endif /* * Below are truly Linux-specific types that should never collide with --- a/lib/crypto/curve25519-hacl64.c +++ b/lib/crypto/curve25519-hacl64.c @@ -14,8 +14,6 @@ #include <crypto/curve25519.h> #include <linux/string.h> -typedef __uint128_t u128; - static __always_inline u64 u64_eq_mask(u64 a, u64 b) { u64 x = a ^ b; --- a/lib/crypto/poly1305-donna64.c +++ b/lib/crypto/poly1305-donna64.c @@ -10,8 +10,6 @@ #include <asm/unaligned.h> #include <crypto/internal/poly1305.h> -typedef __uint128_t u128; - void poly1305_core_setkey(struct poly1305_core_key *key, const u8 raw_key[POLY1305_BLOCK_SIZE]) {