27 #if defined(ASCON_MASKED_X3_BACKEND_C64) && ASCON_MASKED_MAX_SHARES >= 3
36 #define and_not_xor(x, y, z) \
38 x##_a ^= (~(y##_a) & z##_a); \
39 x##_a ^= ((y##_a) & ascon_mask64_unrotate_share1_0(z##_b)); \
40 x##_a ^= (y##_a & ascon_mask64_unrotate_share2_0(z##_c)); \
42 x##_b ^= (y##_b & ascon_mask64_rotate_share1_0(z##_a)); \
43 x##_b ^= ((~y##_b) & z##_b); \
44 x##_b ^= (y##_b & ascon_mask64_unrotate_share2_1(z##_c)); \
46 x##_c ^= (y##_c & ascon_mask64_rotate_share2_0(~z##_a)); \
47 x##_c ^= (y##_c & ascon_mask64_rotate_share2_1(z##_b)); \
48 x##_c ^= (y##_c | z##_c); \
53 #define ROUND_CONSTANT(round) \
54 (~(uint64_t)(((0x0F - (round)) << 4) | (round)))
59 static const uint64_t RC[12] = {
73 uint64_t x0_a, x1_a, x2_a, x3_a, x4_a;
74 uint64_t x0_b, x1_b, x2_b, x3_b, x4_b;
75 uint64_t x0_c, x1_c, x2_c, x3_c, x4_c;
76 uint64_t t0_a, t0_b, t0_c, t1_a, t1_b, t1_c;
83 #if defined(ASCON_MASKED_WORD_BACKEND_DIRECT_XOR)
123 while (first_round < 12) {
125 x2_a ^= RC[first_round++];
202 #if defined(ASCON_MASKED_WORD_BACKEND_DIRECT_XOR)
Utility functions for operating on masked ASCON states with between 2 and 4 shares.
#define ascon_mask64_rotate_share2_1(x)
Rotates 64-bit masked share 2 with respect to share 1.
#define ascon_mask64_rotate_share2_0(x)
Rotates 64-bit masked share 2 with respect to share 0.
#define rightRotate39_64(a)
#define rightRotate61_64(a)
#define rightRotate41_64(a)
#define rightRotate1_64(a)
#define rightRotate29_64(a)
#define rightRotate10_64(a)
#define be_store_word64(ptr, x)
#define rightRotate19_64(a)
#define rightRotate6_64(a)
#define rightRotate7_64(a)
#define rightRotate28_64(a)
#define rightRotate17_64(a)
#define be_load_word64(ptr)
#define rightRotate13_64(a)
#define and_not_xor(x, y, z)
Computes x ^= (~y & z) with a 3-share masked representation.
#define ROUND_CONSTANT(round)
void ascon_x3_permute(ascon_masked_state_t *state, uint8_t first_round, uint64_t preserve[2])
ascon_state_t state
[snippet_key]
State of the ASCON permutation which has been masked with up to 4 shares.