19#if defined(CRYPTOPP_SSSE3_AVAILABLE) && defined(CRYPTOPP_ENABLE_64BIT_SSE)
21#if defined(CRYPTOPP_SSSE3_AVAILABLE)
22# include <emmintrin.h>
23# include <tmmintrin.h>
26#if defined(CRYPTOPP_XOP_AVAILABLE)
27# include <ammintrin.h>
31#if (CRYPTOPP_GCC_VERSION >= 40500)
32# include <x86intrin.h>
35ANONYMOUS_NAMESPACE_BEGIN
39const unsigned int LSH256_MSG_BLK_BYTE_LEN = 128;
42const unsigned int LSH256_HASH_VAL_MAX_BYTE_LEN = 32;
45const unsigned int CV_WORD_LEN = 16;
46const unsigned int CONST_WORD_LEN = 8;
49const unsigned int NUM_STEPS = 26;
51const unsigned int ROT_EVEN_ALPHA = 29;
52const unsigned int ROT_EVEN_BETA = 1;
53const unsigned int ROT_ODD_ALPHA = 5;
54const unsigned int ROT_ODD_BETA = 17;
56const unsigned int LSH_TYPE_256_256 = 0x0000020;
57const unsigned int LSH_TYPE_256_224 = 0x000001C;
64const unsigned int LSH_SUCCESS = 0x0;
67const unsigned int LSH_ERR_INVALID_DATABITLEN = 0x2403;
68const unsigned int LSH_ERR_INVALID_STATE = 0x2404;
72const unsigned int AlgorithmType = 80;
73const unsigned int RemainingBits = 81;
81extern const word32 LSH256_IV224[CV_WORD_LEN];
82extern const word32 LSH256_IV256[CV_WORD_LEN];
83extern const word32 LSH256_StepConstants[CONST_WORD_LEN * NUM_STEPS];
88ANONYMOUS_NAMESPACE_BEGIN
91using CryptoPP::word32;
92using CryptoPP::rotlFixed;
93using CryptoPP::rotlConstant;
95using CryptoPP::GetBlock;
96using CryptoPP::LittleEndian;
97using CryptoPP::ConditionalByteReverse;
98using CryptoPP::LITTLE_ENDIAN_ORDER;
106using CryptoPP::LSH::LSH256_IV224;
107using CryptoPP::LSH::LSH256_IV256;
108using CryptoPP::LSH::LSH256_StepConstants;
110struct LSH256_SSSE3_Context
113 cv_l(state+0), cv_r(state+8), sub_msgs(state+16),
114 last_block(reinterpret_cast<
byte*>(state+48)),
115 remain_databitlen(remainingBitLength),
116 alg_type(static_cast<lsh_type>(algType)) {}
122 lsh_u32& remain_databitlen;
126struct LSH256_SSSE3_Internal
128 LSH256_SSSE3_Internal(
word32* state) :
129 submsg_e_l(state+16), submsg_e_r(state+24),
130 submsg_o_l(state+32), submsg_o_r(state+40) { }
142inline bool LSH_IS_LSH512(lsh_uint val) {
143 return (val & 0xf0000) == 0;
146inline lsh_uint LSH_GET_SMALL_HASHBIT(lsh_uint val) {
150inline lsh_uint LSH_GET_HASHBYTE(lsh_uint val) {
154inline lsh_uint LSH_GET_HASHBIT(lsh_uint val) {
155 return (LSH_GET_HASHBYTE(val) << 3) - LSH_GET_SMALL_HASHBIT(val);
158inline lsh_u32 loadLE32(lsh_u32 v) {
162lsh_u32 ROTL(lsh_u32 x, lsh_u32 r) {
167inline void load_msg_blk(LSH256_SSSE3_Internal* i_state,
const lsh_u8 msgblk[LSH256_MSG_BLK_BYTE_LEN])
170 lsh_u32* submsg_e_l = i_state->submsg_e_l;
171 lsh_u32* submsg_e_r = i_state->submsg_e_r;
172 lsh_u32* submsg_o_l = i_state->submsg_o_l;
173 lsh_u32* submsg_o_r = i_state->submsg_o_r;
175 _mm_storeu_si128(
M128_CAST(submsg_e_l+0),
177 _mm_storeu_si128(
M128_CAST(submsg_e_l+4),
179 _mm_storeu_si128(
M128_CAST(submsg_e_r+0),
181 _mm_storeu_si128(
M128_CAST(submsg_e_r+4),
183 _mm_storeu_si128(
M128_CAST(submsg_o_l+0),
185 _mm_storeu_si128(
M128_CAST(submsg_o_l+4),
187 _mm_storeu_si128(
M128_CAST(submsg_o_r+0),
189 _mm_storeu_si128(
M128_CAST(submsg_o_r+4),
193inline void msg_exp_even(LSH256_SSSE3_Internal* i_state)
197 lsh_u32* submsg_e_l = i_state->submsg_e_l;
198 lsh_u32* submsg_e_r = i_state->submsg_e_r;
199 lsh_u32* submsg_o_l = i_state->submsg_o_l;
200 lsh_u32* submsg_o_r = i_state->submsg_o_r;
202 _mm_storeu_si128(
M128_CAST(submsg_e_l+0), _mm_add_epi32(
206 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_l+0)), _MM_SHUFFLE(1,0,2,3))));
208 _mm_storeu_si128(
M128_CAST(submsg_e_l+4), _mm_add_epi32(
212 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_l+4)), _MM_SHUFFLE(2,1,0,3))));
214 _mm_storeu_si128(
M128_CAST(submsg_e_r+0), _mm_add_epi32(
218 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_r+0)), _MM_SHUFFLE(1,0,2,3))));
220 _mm_storeu_si128(
M128_CAST(submsg_e_r+4), _mm_add_epi32(
224 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_r+4)), _MM_SHUFFLE(2,1,0,3))));
227inline void msg_exp_odd(LSH256_SSSE3_Internal* i_state)
231 lsh_u32* submsg_e_l = i_state->submsg_e_l;
232 lsh_u32* submsg_e_r = i_state->submsg_e_r;
233 lsh_u32* submsg_o_l = i_state->submsg_o_l;
234 lsh_u32* submsg_o_r = i_state->submsg_o_r;
236 _mm_storeu_si128(
M128_CAST(submsg_o_l+0), _mm_add_epi32(
240 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_l+0)), _MM_SHUFFLE(1,0,2,3))));
242 _mm_storeu_si128(
M128_CAST(submsg_o_l+4), _mm_add_epi32(
246 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_l+4)), _MM_SHUFFLE(2,1,0,3))));
248 _mm_storeu_si128(
M128_CAST(submsg_o_r+0), _mm_add_epi32(
252 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_r+0)), _MM_SHUFFLE(1,0,2,3))));
254 _mm_storeu_si128(
M128_CAST(submsg_o_r+4), _mm_add_epi32(
258 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_r+4)), _MM_SHUFFLE(2,1,0,3))));
261inline void load_sc(
const lsh_u32** p_const_v,
size_t i)
265 *p_const_v = &LSH256_StepConstants[i];
268inline void msg_add_even(lsh_u32 cv_l[8], lsh_u32 cv_r[8], LSH256_SSSE3_Internal* i_state)
272 lsh_u32* submsg_e_l = i_state->submsg_e_l;
273 lsh_u32* submsg_e_r = i_state->submsg_e_r;
275 _mm_storeu_si128(
M128_CAST(cv_l+0), _mm_xor_si128(
278 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
281 _mm_storeu_si128(
M128_CAST(cv_r+0), _mm_xor_si128(
284 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_xor_si128(
289inline void msg_add_odd(lsh_u32 cv_l[8], lsh_u32 cv_r[8], LSH256_SSSE3_Internal* i_state)
293 lsh_u32* submsg_o_l = i_state->submsg_o_l;
294 lsh_u32* submsg_o_r = i_state->submsg_o_r;
296 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
299 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
302 _mm_storeu_si128(
M128_CAST(cv_r), _mm_xor_si128(
305 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_xor_si128(
310inline void add_blk(lsh_u32 cv_l[8],
const lsh_u32 cv_r[8])
312 _mm_storeu_si128(
M128_CAST(cv_l), _mm_add_epi32(
315 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_add_epi32(
320template <
unsigned int R>
321inline void rotate_blk(lsh_u32 cv[8])
323#if defined(CRYPTOPP_XOP_AVAILABLE)
329 _mm_storeu_si128(
M128_CAST(cv), _mm_or_si128(
332 _mm_storeu_si128(
M128_CAST(cv+4), _mm_or_si128(
338inline void xor_with_const(lsh_u32* cv_l,
const lsh_u32* const_v)
340 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
343 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
348inline void rotate_msg_gamma(lsh_u32 cv_r[8])
353 _mm_set_epi8(12,15,14,13, 9,8,11,10, 6,5,4,7, 3,2,1,0)));
356 _mm_set_epi8(15,14,13,12, 10,9,8,11, 5,4,7,6, 0,3,2,1)));
359inline void word_perm(lsh_u32 cv_l[8], lsh_u32 cv_r[8])
361 _mm_storeu_si128(
M128_CAST(cv_l+0), _mm_shuffle_epi32(
363 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_shuffle_epi32(
365 _mm_storeu_si128(
M128_CAST(cv_r+0), _mm_shuffle_epi32(
367 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_shuffle_epi32(
377 _mm_storeu_si128(
M128_CAST(cv_r+0), temp);
384template <
unsigned int Alpha,
unsigned int Beta>
385inline void mix(lsh_u32 cv_l[8], lsh_u32 cv_r[8],
const lsh_u32 const_v[8])
388 rotate_blk<Alpha>(cv_l);
389 xor_with_const(cv_l, const_v);
391 rotate_blk<Beta>(cv_r);
393 rotate_msg_gamma(cv_r);
400inline void compress(LSH256_SSSE3_Context* ctx,
const lsh_u8 pdMsgBlk[LSH256_MSG_BLK_BYTE_LEN])
404 LSH256_SSSE3_Internal s_state(ctx->cv_l);
405 LSH256_SSSE3_Internal* i_state = &s_state;
407 const lsh_u32* const_v = NULL;
408 lsh_u32* cv_l = ctx->cv_l;
409 lsh_u32* cv_r = ctx->cv_r;
411 load_msg_blk(i_state, pdMsgBlk);
413 msg_add_even(cv_l, cv_r, i_state);
414 load_sc(&const_v, 0);
415 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
416 word_perm(cv_l, cv_r);
418 msg_add_odd(cv_l, cv_r, i_state);
419 load_sc(&const_v, 8);
420 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
421 word_perm(cv_l, cv_r);
423 for (
size_t i = 1; i < NUM_STEPS / 2; i++)
425 msg_exp_even(i_state);
426 msg_add_even(cv_l, cv_r, i_state);
427 load_sc(&const_v, 16 * i);
428 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
429 word_perm(cv_l, cv_r);
431 msg_exp_odd(i_state);
432 msg_add_odd(cv_l, cv_r, i_state);
433 load_sc(&const_v, 16 * i + 8);
434 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
435 word_perm(cv_l, cv_r);
438 msg_exp_even(i_state);
439 msg_add_even(cv_l, cv_r, i_state);
444inline void load_iv(lsh_u32 cv_l[8], lsh_u32 cv_r[8],
const lsh_u32 iv[16])
456inline void zero_iv(lsh_u32 cv_l[8], lsh_u32 cv_r[8])
458 _mm_storeu_si128(
M128_CAST(cv_l+0), _mm_setzero_si128());
459 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_setzero_si128());
460 _mm_storeu_si128(
M128_CAST(cv_r+0), _mm_setzero_si128());
461 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_setzero_si128());
464inline void zero_submsgs(LSH256_SSSE3_Context* ctx)
466 lsh_u32* sub_msgs = ctx->sub_msgs;
468 _mm_storeu_si128(
M128_CAST(sub_msgs+ 0), _mm_setzero_si128());
469 _mm_storeu_si128(
M128_CAST(sub_msgs+ 4), _mm_setzero_si128());
470 _mm_storeu_si128(
M128_CAST(sub_msgs+ 8), _mm_setzero_si128());
471 _mm_storeu_si128(
M128_CAST(sub_msgs+12), _mm_setzero_si128());
472 _mm_storeu_si128(
M128_CAST(sub_msgs+16), _mm_setzero_si128());
473 _mm_storeu_si128(
M128_CAST(sub_msgs+20), _mm_setzero_si128());
474 _mm_storeu_si128(
M128_CAST(sub_msgs+24), _mm_setzero_si128());
475 _mm_storeu_si128(
M128_CAST(sub_msgs+28), _mm_setzero_si128());
478inline void init224(LSH256_SSSE3_Context* ctx)
483 load_iv(ctx->cv_l, ctx->cv_r, LSH256_IV224);
486inline void init256(LSH256_SSSE3_Context* ctx)
491 load_iv(ctx->cv_l, ctx->cv_r, LSH256_IV256);
496inline void fin(LSH256_SSSE3_Context* ctx)
500 _mm_storeu_si128(
M128_CAST(ctx->cv_l+0), _mm_xor_si128(
503 _mm_storeu_si128(
M128_CAST(ctx->cv_l+4), _mm_xor_si128(
510inline void get_hash(LSH256_SSSE3_Context* ctx, lsh_u8* pbHashVal)
516 lsh_uint alg_type = ctx->alg_type;
517 lsh_uint hash_val_byte_len = LSH_GET_HASHBYTE(alg_type);
518 lsh_uint hash_val_bit_len = LSH_GET_SMALL_HASHBIT(alg_type);
521 memcpy(pbHashVal, ctx->cv_l, hash_val_byte_len);
522 if (hash_val_bit_len){
523 pbHashVal[hash_val_byte_len-1] &= (((lsh_u8)0xff) << hash_val_bit_len);
529lsh_err lsh256_ssse3_init(LSH256_SSSE3_Context* ctx)
534 lsh_u32 alg_type = ctx->alg_type;
535 const lsh_u32* const_v = NULL;
536 ctx->remain_databitlen = 0;
540 case LSH_TYPE_256_256:
543 case LSH_TYPE_256_224:
550 lsh_u32* cv_l = ctx->cv_l;
551 lsh_u32* cv_r = ctx->cv_r;
554 cv_l[0] = LSH256_HASH_VAL_MAX_BYTE_LEN;
555 cv_l[1] = LSH_GET_HASHBIT(alg_type);
557 for (
size_t i = 0; i < NUM_STEPS / 2; i++)
560 load_sc(&const_v, i * 16);
561 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
562 word_perm(cv_l, cv_r);
564 load_sc(&const_v, i * 16 + 8);
565 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
566 word_perm(cv_l, cv_r);
572lsh_err lsh256_ssse3_update(LSH256_SSSE3_Context* ctx,
const lsh_u8* data,
size_t databitlen)
579 if (databitlen == 0){
584 size_t databytelen = databitlen >> 3;
586 const size_t pos2 = 0;
588 size_t remain_msg_byte = ctx->remain_databitlen >> 3;
590 const size_t remain_msg_bit = 0;
592 if (remain_msg_byte >= LSH256_MSG_BLK_BYTE_LEN){
593 return LSH_ERR_INVALID_STATE;
595 if (remain_msg_bit > 0){
596 return LSH_ERR_INVALID_DATABITLEN;
599 if (databytelen + remain_msg_byte < LSH256_MSG_BLK_BYTE_LEN)
601 memcpy(ctx->last_block + remain_msg_byte, data, databytelen);
602 ctx->remain_databitlen += (lsh_uint)databitlen;
603 remain_msg_byte += (lsh_uint)databytelen;
605 ctx->last_block[remain_msg_byte] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
610 if (remain_msg_byte > 0){
611 size_t more_byte = LSH256_MSG_BLK_BYTE_LEN - remain_msg_byte;
612 memcpy(ctx->last_block + remain_msg_byte, data, more_byte);
613 compress(ctx, ctx->last_block);
615 databytelen -= more_byte;
617 ctx->remain_databitlen = 0;
620 while (databytelen >= LSH256_MSG_BLK_BYTE_LEN)
626 data += LSH256_MSG_BLK_BYTE_LEN;
627 databytelen -= LSH256_MSG_BLK_BYTE_LEN;
630 if (databytelen > 0){
631 memcpy(ctx->last_block, data, databytelen);
632 ctx->remain_databitlen = (lsh_uint)(databytelen << 3);
636 ctx->last_block[databytelen] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
637 ctx->remain_databitlen += pos2;
643lsh_err lsh256_ssse3_final(LSH256_SSSE3_Context* ctx, lsh_u8* hashval)
649 size_t remain_msg_byte = ctx->remain_databitlen >> 3;
651 const size_t remain_msg_bit = 0;
653 if (remain_msg_byte >= LSH256_MSG_BLK_BYTE_LEN){
654 return LSH_ERR_INVALID_STATE;
658 ctx->last_block[remain_msg_byte] |= (0x1 << (7 - remain_msg_bit));
661 ctx->last_block[remain_msg_byte] = 0x80;
663 memset(ctx->last_block + remain_msg_byte + 1, 0, LSH256_MSG_BLK_BYTE_LEN - remain_msg_byte - 1);
665 compress(ctx, ctx->last_block);
668 get_hash(ctx, hashval);
673ANONYMOUS_NAMESPACE_END
678void LSH256_Base_Restart_SSSE3(
word32* state)
680 state[RemainingBits] = 0;
681 LSH256_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
682 lsh_err err = lsh256_ssse3_init(&ctx);
684 if (err != LSH_SUCCESS)
689void LSH256_Base_Update_SSSE3(
word32* state,
const byte *input,
size_t size)
691 LSH256_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
692 lsh_err err = lsh256_ssse3_update(&ctx, input, 8*size);
694 if (err != LSH_SUCCESS)
699void LSH256_Base_TruncatedFinal_SSSE3(
word32* state,
byte *hash,
size_t)
701 LSH256_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
702 lsh_err err = lsh256_ssse3_final(&ctx, hash);
704 if (err != LSH_SUCCESS)
#define M128_CAST(x)
Clang workaround.
#define CONST_M128_CAST(x)
Clang workaround.
Base class for all exceptions thrown by the library.
@ OTHER_ERROR
Some other error occurred not belonging to other categories.
Library configuration file.
unsigned char byte
8-bit unsigned datatype
unsigned int word32
32-bit unsigned datatype
Functions for CPU features and intrinsics.
@ LITTLE_ENDIAN_ORDER
byte order is little-endian
Classes for the LSH hash functions.
Utility functions for the Crypto++ library.
T ConditionalByteReverse(ByteOrder order, T value)
Reverses bytes in a value depending upon endianness.
T rotlFixed(T x, unsigned int y)
Performs a left rotate.
Crypto++ library namespace.
#define CRYPTOPP_ASSERT(exp)
Debugging and diagnostic assertion.