27 #if defined(ASCON_MASKED_X4_BACKEND_C64) && ASCON_MASKED_MAX_SHARES >= 4
36 #define and_not_xor(x, y, z) \
38 x##_a ^= (~(y##_a) & z##_a); \
39 x##_a ^= (ascon_mask64_unrotate_share1_0(y##_b) & z##_a); \
40 x##_a ^= (ascon_mask64_unrotate_share2_0(y##_c) & z##_a); \
41 x##_a ^= (ascon_mask64_unrotate_share3_0(y##_d) & z##_a); \
43 x##_b ^= (ascon_mask64_rotate_share1_0(~(y##_a)) & z##_b); \
44 x##_b ^= (y##_b & z##_b); \
45 x##_b ^= (ascon_mask64_unrotate_share2_1(y##_c) & z##_b); \
46 x##_b ^= (ascon_mask64_unrotate_share3_1(y##_d) & z##_b); \
48 x##_c ^= (ascon_mask64_rotate_share2_0(~(y##_a)) & z##_c); \
49 x##_c ^= (ascon_mask64_rotate_share2_1(y##_b) & z##_c); \
50 x##_c ^= (y##_c & z##_c); \
51 x##_c ^= (ascon_mask64_unrotate_share3_2(y##_d) & z##_c); \
53 x##_d ^= (ascon_mask64_rotate_share3_0(~(y##_a)) & z##_d); \
54 x##_d ^= (ascon_mask64_rotate_share3_1(y##_b) & z##_d); \
55 x##_d ^= (ascon_mask64_rotate_share3_2(y##_c) & z##_d); \
56 x##_d ^= (y##_d & z##_d); \
61 #define ROUND_CONSTANT(round) \
62 (~(uint64_t)(((0x0F - (round)) << 4) | (round)))
67 static const uint64_t RC[12] = {
81 uint64_t x0_a, x1_a, x2_a, x3_a, x4_a;
82 uint64_t x0_b, x1_b, x2_b, x3_b, x4_b;
83 uint64_t x0_c, x1_c, x2_c, x3_c, x4_c;
84 uint64_t x0_d, x1_d, x2_d, x3_d, x4_d;
85 uint64_t t0_a, t0_b, t0_c, t0_d;
86 uint64_t t1_a, t1_b, t1_c, t1_d;
94 #if defined(ASCON_MASKED_WORD_BACKEND_DIRECT_XOR)
144 while (first_round < 12) {
146 x2_a ^= RC[first_round++];
243 #if defined(ASCON_MASKED_WORD_BACKEND_DIRECT_XOR)
Utility functions for operating on masked ASCON states with between 2 and 4 shares.
#define ascon_mask64_rotate_share3_1(x)
Rotates 64-bit masked share 3 with respect to share 1.
#define ascon_mask64_rotate_share3_0(x)
Rotates 64-bit masked share 3 with respect to share 0.
#define ascon_mask64_rotate_share3_2(x)
Rotates 64-bit masked share 3 with respect to share 2.
#define rightRotate39_64(a)
#define rightRotate59_64(a)
#define rightRotate61_64(a)
#define rightRotate41_64(a)
#define rightRotate1_64(a)
#define rightRotate29_64(a)
#define rightRotate10_64(a)
#define be_store_word64(ptr, x)
#define rightRotate19_64(a)
#define rightRotate6_64(a)
#define rightRotate7_64(a)
#define rightRotate28_64(a)
#define rightRotate17_64(a)
#define be_load_word64(ptr)
#define rightRotate13_64(a)
void ascon_x4_permute(ascon_masked_state_t *state, uint8_t first_round, uint64_t preserve[3])
#define and_not_xor(x, y, z)
Computes x ^= (~y & z) with a 4-share masked representation.
#define ROUND_CONSTANT(round)
ascon_state_t state
[snippet_key]
State of the ASCON permutation which has been masked with up to 4 shares.