/** * Author......: Jens Steube * License.....: MIT */ #define PERM_OP_SSE2(a,b,tt,n,m) \ { \ tt = _mm_srli_epi32(a, n); \ tt = _mm_xor_si128 (tt, b); \ tt = _mm_and_si128 (tt, m); \ b = _mm_xor_si128 (b, tt); \ tt = _mm_slli_epi32(tt, n); \ a = _mm_xor_si128 (a, tt); \ } #define HPERM_OP_SSE2(a,tt,n,m) \ { \ tt = _mm_slli_epi32(a, (16+n)); \ tt = _mm_xor_si128 (tt, a); \ tt = _mm_and_si128 (tt, m); \ a = _mm_xor_si128 (a, tt); \ tt = _mm_srli_epi32(tt, (16+n)); \ a = _mm_xor_si128 (a, tt); \ } #define IP_SSE2(l,r) \ { \ __m128i tt; \ PERM_OP (r, l, tt, 4, 0x0f0f0f0f); \ PERM_OP (l, r, tt, 16, 0x0000ffff); \ PERM_OP (r, l, tt, 2, 0x33333333); \ PERM_OP (l, r, tt, 8, 0x00ff00ff); \ PERM_OP (r, l, tt, 1, 0x55555555); \ } #define FP_SSE2(l,r) \ { \ __m128i tt; \ PERM_OP (l, r, tt, 1, 0x55555555); \ PERM_OP (r, l, tt, 8, 0x00ff00ff); \ PERM_OP (l, r, tt, 2, 0x33333333); \ PERM_OP (r, l, tt, 16, 0x0000ffff); \ PERM_OP (l, r, tt, 4, 0x0f0f0f0f); \ } const int c_SPtrans[8][64] __attribute__ ((aligned (16))) = { { /* nibble 0 */ 0x00820200, 0x00020000, 0x80800000, 0x80820200, 0x00800000, 0x80020200, 0x80020000, 0x80800000, 0x80020200, 0x00820200, 0x00820000, 0x80000200, 0x80800200, 0x00800000, 0x00000000, 0x80020000, 0x00020000, 0x80000000, 0x00800200, 0x00020200, 0x80820200, 0x00820000, 0x80000200, 0x00800200, 0x80000000, 0x00000200, 0x00020200, 0x80820000, 0x00000200, 0x80800200, 0x80820000, 0x00000000, 0x00000000, 0x80820200, 0x00800200, 0x80020000, 0x00820200, 0x00020000, 0x80000200, 0x00800200, 0x80820000, 0x00000200, 0x00020200, 0x80800000, 0x80020200, 0x80000000, 0x80800000, 0x00820000, 0x80820200, 0x00020200, 0x00820000, 0x80800200, 0x00800000, 0x80000200, 0x80020000, 0x00000000, 0x00020000, 0x00800000, 0x80800200, 0x00820200, 0x80000000, 0x80820000, 0x00000200, 0x80020200 },{ /* nibble 1 */ 0x10042004, 0x00000000, 0x00042000, 0x10040000, 0x10000004, 0x00002004, 0x10002000, 0x00042000, 0x00002000, 0x10040004, 0x00000004, 0x10002000, 0x00040004, 0x10042000, 0x10040000, 0x00000004, 0x00040000, 0x10002004, 0x10040004, 0x00002000, 0x00042004, 0x10000000, 0x00000000, 0x00040004, 0x10002004, 0x00042004, 0x10042000, 0x10000004, 0x10000000, 0x00040000, 0x00002004, 0x10042004, 0x00040004, 0x10042000, 0x10002000, 0x00042004, 0x10042004, 0x00040004, 0x10000004, 0x00000000, 0x10000000, 0x00002004, 0x00040000, 0x10040004, 0x00002000, 0x10000000, 0x00042004, 0x10002004, 0x10042000, 0x00002000, 0x00000000, 0x10000004, 0x00000004, 0x10042004, 0x00042000, 0x10040000, 0x10040004, 0x00040000, 0x00002004, 0x10002000, 0x10002004, 0x00000004, 0x10040000, 0x00042000 },{ /* nibble 2 */ 0x41000000, 0x01010040, 0x00000040, 0x41000040, 0x40010000, 0x01000000, 0x41000040, 0x00010040, 0x01000040, 0x00010000, 0x01010000, 0x40000000, 0x41010040, 0x40000040, 0x40000000, 0x41010000, 0x00000000, 0x40010000, 0x01010040, 0x00000040, 0x40000040, 0x41010040, 0x00010000, 0x41000000, 0x41010000, 0x01000040, 0x40010040, 0x01010000, 0x00010040, 0x00000000, 0x01000000, 0x40010040, 0x01010040, 0x00000040, 0x40000000, 0x00010000, 0x40000040, 0x40010000, 0x01010000, 0x41000040, 0x00000000, 0x01010040, 0x00010040, 0x41010000, 0x40010000, 0x01000000, 0x41010040, 0x40000000, 0x40010040, 0x41000000, 0x01000000, 0x41010040, 0x00010000, 0x01000040, 0x41000040, 0x00010040, 0x01000040, 0x00000000, 0x41010000, 0x40000040, 0x41000000, 0x40010040, 0x00000040, 0x01010000 },{ /* nibble 3 */ 0x00100402, 0x04000400, 0x00000002, 0x04100402, 0x00000000, 0x04100000, 0x04000402, 0x00100002, 0x04100400, 0x04000002, 0x04000000, 0x00000402, 0x04000002, 0x00100402, 0x00100000, 0x04000000, 0x04100002, 0x00100400, 0x00000400, 0x00000002, 0x00100400, 0x04000402, 0x04100000, 0x00000400, 0x00000402, 0x00000000, 0x00100002, 0x04100400, 0x04000400, 0x04100002, 0x04100402, 0x00100000, 0x04100002, 0x00000402, 0x00100000, 0x04000002, 0x00100400, 0x04000400, 0x00000002, 0x04100000, 0x04000402, 0x00000000, 0x00000400, 0x00100002, 0x00000000, 0x04100002, 0x04100400, 0x00000400, 0x04000000, 0x04100402, 0x00100402, 0x00100000, 0x04100402, 0x00000002, 0x04000400, 0x00100402, 0x00100002, 0x00100400, 0x04100000, 0x04000402, 0x00000402, 0x04000000, 0x04000002, 0x04100400 },{ /* nibble 4 */ 0x02000000, 0x00004000, 0x00000100, 0x02004108, 0x02004008, 0x02000100, 0x00004108, 0x02004000, 0x00004000, 0x00000008, 0x02000008, 0x00004100, 0x02000108, 0x02004008, 0x02004100, 0x00000000, 0x00004100, 0x02000000, 0x00004008, 0x00000108, 0x02000100, 0x00004108, 0x00000000, 0x02000008, 0x00000008, 0x02000108, 0x02004108, 0x00004008, 0x02004000, 0x00000100, 0x00000108, 0x02004100, 0x02004100, 0x02000108, 0x00004008, 0x02004000, 0x00004000, 0x00000008, 0x02000008, 0x02000100, 0x02000000, 0x00004100, 0x02004108, 0x00000000, 0x00004108, 0x02000000, 0x00000100, 0x00004008, 0x02000108, 0x00000100, 0x00000000, 0x02004108, 0x02004008, 0x02004100, 0x00000108, 0x00004000, 0x00004100, 0x02004008, 0x02000100, 0x00000108, 0x00000008, 0x00004108, 0x02004000, 0x02000008 },{ /* nibble 5 */ 0x20000010, 0x00080010, 0x00000000, 0x20080800, 0x00080010, 0x00000800, 0x20000810, 0x00080000, 0x00000810, 0x20080810, 0x00080800, 0x20000000, 0x20000800, 0x20000010, 0x20080000, 0x00080810, 0x00080000, 0x20000810, 0x20080010, 0x00000000, 0x00000800, 0x00000010, 0x20080800, 0x20080010, 0x20080810, 0x20080000, 0x20000000, 0x00000810, 0x00000010, 0x00080800, 0x00080810, 0x20000800, 0x00000810, 0x20000000, 0x20000800, 0x00080810, 0x20080800, 0x00080010, 0x00000000, 0x20000800, 0x20000000, 0x00000800, 0x20080010, 0x00080000, 0x00080010, 0x20080810, 0x00080800, 0x00000010, 0x20080810, 0x00080800, 0x00080000, 0x20000810, 0x20000010, 0x20080000, 0x00080810, 0x00000000, 0x00000800, 0x20000010, 0x20000810, 0x20080800, 0x20080000, 0x00000810, 0x00000010, 0x20080010 },{ /* nibble 6 */ 0x00001000, 0x00000080, 0x00400080, 0x00400001, 0x00401081, 0x00001001, 0x00001080, 0x00000000, 0x00400000, 0x00400081, 0x00000081, 0x00401000, 0x00000001, 0x00401080, 0x00401000, 0x00000081, 0x00400081, 0x00001000, 0x00001001, 0x00401081, 0x00000000, 0x00400080, 0x00400001, 0x00001080, 0x00401001, 0x00001081, 0x00401080, 0x00000001, 0x00001081, 0x00401001, 0x00000080, 0x00400000, 0x00001081, 0x00401000, 0x00401001, 0x00000081, 0x00001000, 0x00000080, 0x00400000, 0x00401001, 0x00400081, 0x00001081, 0x00001080, 0x00000000, 0x00000080, 0x00400001, 0x00000001, 0x00400080, 0x00000000, 0x00400081, 0x00400080, 0x00001080, 0x00000081, 0x00001000, 0x00401081, 0x00400000, 0x00401080, 0x00000001, 0x00001001, 0x00401081, 0x00400001, 0x00401080, 0x00401000, 0x00001001 },{ /* nibble 7 */ 0x08200020, 0x08208000, 0x00008020, 0x00000000, 0x08008000, 0x00200020, 0x08200000, 0x08208020, 0x00000020, 0x08000000, 0x00208000, 0x00008020, 0x00208020, 0x08008020, 0x08000020, 0x08200000, 0x00008000, 0x00208020, 0x00200020, 0x08008000, 0x08208020, 0x08000020, 0x00000000, 0x00208000, 0x08000000, 0x00200000, 0x08008020, 0x08200020, 0x00200000, 0x00008000, 0x08208000, 0x00000020, 0x00200000, 0x00008000, 0x08000020, 0x08208020, 0x00008020, 0x08000000, 0x00000000, 0x00208000, 0x08200020, 0x08008020, 0x08008000, 0x00200020, 0x08208000, 0x00000020, 0x00200020, 0x08008000, 0x08208020, 0x00200000, 0x08200000, 0x08000020, 0x00208000, 0x00008020, 0x08008020, 0x08200000, 0x00000020, 0x08208000, 0x00208020, 0x00000000, 0x08000000, 0x08200020, 0x00008000, 0x00208020 }, }; const int c_skb[8][64] __attribute__ ((aligned (16))) = { { /* for C bits (numbered as per FIPS 46) 1 2 3 4 5 6 */ 0x00000000, 0x00000010, 0x20000000, 0x20000010, 0x00010000, 0x00010010, 0x20010000, 0x20010010, 0x00000800, 0x00000810, 0x20000800, 0x20000810, 0x00010800, 0x00010810, 0x20010800, 0x20010810, 0x00000020, 0x00000030, 0x20000020, 0x20000030, 0x00010020, 0x00010030, 0x20010020, 0x20010030, 0x00000820, 0x00000830, 0x20000820, 0x20000830, 0x00010820, 0x00010830, 0x20010820, 0x20010830, 0x00080000, 0x00080010, 0x20080000, 0x20080010, 0x00090000, 0x00090010, 0x20090000, 0x20090010, 0x00080800, 0x00080810, 0x20080800, 0x20080810, 0x00090800, 0x00090810, 0x20090800, 0x20090810, 0x00080020, 0x00080030, 0x20080020, 0x20080030, 0x00090020, 0x00090030, 0x20090020, 0x20090030, 0x00080820, 0x00080830, 0x20080820, 0x20080830, 0x00090820, 0x00090830, 0x20090820, 0x20090830 },{ /* for C bits (numbered as per FIPS 46) 7 8 10 11 12 13 */ 0x00000000, 0x02000000, 0x00002000, 0x02002000, 0x00200000, 0x02200000, 0x00202000, 0x02202000, 0x00000004, 0x02000004, 0x00002004, 0x02002004, 0x00200004, 0x02200004, 0x00202004, 0x02202004, 0x00000400, 0x02000400, 0x00002400, 0x02002400, 0x00200400, 0x02200400, 0x00202400, 0x02202400, 0x00000404, 0x02000404, 0x00002404, 0x02002404, 0x00200404, 0x02200404, 0x00202404, 0x02202404, 0x10000000, 0x12000000, 0x10002000, 0x12002000, 0x10200000, 0x12200000, 0x10202000, 0x12202000, 0x10000004, 0x12000004, 0x10002004, 0x12002004, 0x10200004, 0x12200004, 0x10202004, 0x12202004, 0x10000400, 0x12000400, 0x10002400, 0x12002400, 0x10200400, 0x12200400, 0x10202400, 0x12202400, 0x10000404, 0x12000404, 0x10002404, 0x12002404, 0x10200404, 0x12200404, 0x10202404, 0x12202404 },{ /* for C bits (numbered as per FIPS 46) 14 15 16 17 19 20 */ 0x00000000, 0x00000001, 0x00040000, 0x00040001, 0x01000000, 0x01000001, 0x01040000, 0x01040001, 0x00000002, 0x00000003, 0x00040002, 0x00040003, 0x01000002, 0x01000003, 0x01040002, 0x01040003, 0x00000200, 0x00000201, 0x00040200, 0x00040201, 0x01000200, 0x01000201, 0x01040200, 0x01040201, 0x00000202, 0x00000203, 0x00040202, 0x00040203, 0x01000202, 0x01000203, 0x01040202, 0x01040203, 0x08000000, 0x08000001, 0x08040000, 0x08040001, 0x09000000, 0x09000001, 0x09040000, 0x09040001, 0x08000002, 0x08000003, 0x08040002, 0x08040003, 0x09000002, 0x09000003, 0x09040002, 0x09040003, 0x08000200, 0x08000201, 0x08040200, 0x08040201, 0x09000200, 0x09000201, 0x09040200, 0x09040201, 0x08000202, 0x08000203, 0x08040202, 0x08040203, 0x09000202, 0x09000203, 0x09040202, 0x09040203 },{ /* for C bits (numbered as per FIPS 46) 21 23 24 26 27 28 */ 0x00000000, 0x00100000, 0x00000100, 0x00100100, 0x00000008, 0x00100008, 0x00000108, 0x00100108, 0x00001000, 0x00101000, 0x00001100, 0x00101100, 0x00001008, 0x00101008, 0x00001108, 0x00101108, 0x04000000, 0x04100000, 0x04000100, 0x04100100, 0x04000008, 0x04100008, 0x04000108, 0x04100108, 0x04001000, 0x04101000, 0x04001100, 0x04101100, 0x04001008, 0x04101008, 0x04001108, 0x04101108, 0x00020000, 0x00120000, 0x00020100, 0x00120100, 0x00020008, 0x00120008, 0x00020108, 0x00120108, 0x00021000, 0x00121000, 0x00021100, 0x00121100, 0x00021008, 0x00121008, 0x00021108, 0x00121108, 0x04020000, 0x04120000, 0x04020100, 0x04120100, 0x04020008, 0x04120008, 0x04020108, 0x04120108, 0x04021000, 0x04121000, 0x04021100, 0x04121100, 0x04021008, 0x04121008, 0x04021108, 0x04121108 },{ /* for D bits (numbered as per FIPS 46) 1 2 3 4 5 6 */ 0x00000000, 0x10000000, 0x00010000, 0x10010000, 0x00000004, 0x10000004, 0x00010004, 0x10010004, 0x20000000, 0x30000000, 0x20010000, 0x30010000, 0x20000004, 0x30000004, 0x20010004, 0x30010004, 0x00100000, 0x10100000, 0x00110000, 0x10110000, 0x00100004, 0x10100004, 0x00110004, 0x10110004, 0x20100000, 0x30100000, 0x20110000, 0x30110000, 0x20100004, 0x30100004, 0x20110004, 0x30110004, 0x00001000, 0x10001000, 0x00011000, 0x10011000, 0x00001004, 0x10001004, 0x00011004, 0x10011004, 0x20001000, 0x30001000, 0x20011000, 0x30011000, 0x20001004, 0x30001004, 0x20011004, 0x30011004, 0x00101000, 0x10101000, 0x00111000, 0x10111000, 0x00101004, 0x10101004, 0x00111004, 0x10111004, 0x20101000, 0x30101000, 0x20111000, 0x30111000, 0x20101004, 0x30101004, 0x20111004, 0x30111004 },{ /* for D bits (numbered as per FIPS 46) 8 9 11 12 13 14 */ 0x00000000, 0x08000000, 0x00000008, 0x08000008, 0x00000400, 0x08000400, 0x00000408, 0x08000408, 0x00020000, 0x08020000, 0x00020008, 0x08020008, 0x00020400, 0x08020400, 0x00020408, 0x08020408, 0x00000001, 0x08000001, 0x00000009, 0x08000009, 0x00000401, 0x08000401, 0x00000409, 0x08000409, 0x00020001, 0x08020001, 0x00020009, 0x08020009, 0x00020401, 0x08020401, 0x00020409, 0x08020409, 0x02000000, 0x0A000000, 0x02000008, 0x0A000008, 0x02000400, 0x0A000400, 0x02000408, 0x0A000408, 0x02020000, 0x0A020000, 0x02020008, 0x0A020008, 0x02020400, 0x0A020400, 0x02020408, 0x0A020408, 0x02000001, 0x0A000001, 0x02000009, 0x0A000009, 0x02000401, 0x0A000401, 0x02000409, 0x0A000409, 0x02020001, 0x0A020001, 0x02020009, 0x0A020009, 0x02020401, 0x0A020401, 0x02020409, 0x0A020409 },{ /* for D bits (numbered as per FIPS 46) 16 17 18 19 20 21 */ 0x00000000, 0x00000100, 0x00080000, 0x00080100, 0x01000000, 0x01000100, 0x01080000, 0x01080100, 0x00000010, 0x00000110, 0x00080010, 0x00080110, 0x01000010, 0x01000110, 0x01080010, 0x01080110, 0x00200000, 0x00200100, 0x00280000, 0x00280100, 0x01200000, 0x01200100, 0x01280000, 0x01280100, 0x00200010, 0x00200110, 0x00280010, 0x00280110, 0x01200010, 0x01200110, 0x01280010, 0x01280110, 0x00000200, 0x00000300, 0x00080200, 0x00080300, 0x01000200, 0x01000300, 0x01080200, 0x01080300, 0x00000210, 0x00000310, 0x00080210, 0x00080310, 0x01000210, 0x01000310, 0x01080210, 0x01080310, 0x00200200, 0x00200300, 0x00280200, 0x00280300, 0x01200200, 0x01200300, 0x01280200, 0x01280300, 0x00200210, 0x00200310, 0x00280210, 0x00280310, 0x01200210, 0x01200310, 0x01280210, 0x01280310 },{ /* for D bits (numbered as per FIPS 46) 22 23 24 25 27 28 */ 0x00000000, 0x04000000, 0x00040000, 0x04040000, 0x00000002, 0x04000002, 0x00040002, 0x04040002, 0x00002000, 0x04002000, 0x00042000, 0x04042000, 0x00002002, 0x04002002, 0x00042002, 0x04042002, 0x00000020, 0x04000020, 0x00040020, 0x04040020, 0x00000022, 0x04000022, 0x00040022, 0x04040022, 0x00002020, 0x04002020, 0x00042020, 0x04042020, 0x00002022, 0x04002022, 0x00042022, 0x04042022, 0x00000800, 0x04000800, 0x00040800, 0x04040800, 0x00000802, 0x04000802, 0x00040802, 0x04040802, 0x00002800, 0x04002800, 0x00042800, 0x04042800, 0x00002802, 0x04002802, 0x00042802, 0x04042802, 0x00000820, 0x04000820, 0x00040820, 0x04040820, 0x00000822, 0x04000822, 0x00040822, 0x04040822, 0x00002820, 0x04002820, 0x00042820, 0x04042820, 0x00002822, 0x04002822, 0x00042822, 0x04042822 }, }; void _des_crypt_keysetup_sse2 (__m128i c, __m128i d, __m128i Kc[16], __m128i Kd[16]) { __m128i tt; PERM_OP_SSE2 (d, c, tt, 4, _mm_set1_epi32 (0x0f0f0f0f)); HPERM_OP_SSE2 (c, tt, 2, _mm_set1_epi32 (0xcccc0000)); HPERM_OP_SSE2 (d, tt, 2, _mm_set1_epi32 (0xcccc0000)); PERM_OP_SSE2 (d, c, tt, 1, _mm_set1_epi32 (0x55555555)); PERM_OP_SSE2 (c, d, tt, 8, _mm_set1_epi32 (0x00ff00ff)); PERM_OP_SSE2 (d, c, tt, 1, _mm_set1_epi32 (0x55555555)); d = _mm_slli_epi32 (_mm_and_si128 (d, _mm_set1_epi32 (0x000000ff)), 16) | _mm_slli_epi32 (_mm_and_si128 (d, _mm_set1_epi32 (0x0000ff00)), 0) | _mm_srli_epi32 (_mm_and_si128 (d, _mm_set1_epi32 (0x00ff0000)), 16) | _mm_srli_epi32 (_mm_and_si128 (c, _mm_set1_epi32 (0xf0000000)), 4); c = _mm_and_si128 (c, _mm_set1_epi32 (0x0fffffff)); uint i; for (i = 0; i < 16; i++) { const uint shifts3s0[16] = { 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1 }; const uint shifts3s1[16] = { 27, 27, 26, 26, 26, 26, 26, 26, 27, 26, 26, 26, 26, 26, 26, 27 }; c = _mm_srli_epi32 (c, shifts3s0[i]) | _mm_slli_epi32 (c, shifts3s1[i]); d = _mm_srli_epi32 (d, shifts3s0[i]) | _mm_slli_epi32 (d, shifts3s1[i]); c = _mm_and_si128 (c, _mm_set1_epi32 (0x0fffffff)); d = _mm_and_si128 (d, _mm_set1_epi32 (0x0fffffff)); #ifdef __AVX2__IS_SLOWER __m128i s = _mm_setzero_si128 (); __m128i t = _mm_setzero_si128 (); __m128i vindex[4]; vindex[0] = (_mm_srli_epi32 (c, 0) & _mm_set1_epi32 (0x3f)); vindex[1] = (_mm_srli_epi32 (c, 6) & _mm_set1_epi32 (0x03)) | (_mm_srli_epi32 (c, 7) & _mm_set1_epi32 (0x3c)); vindex[2] = (_mm_srli_epi32 (c, 13) & _mm_set1_epi32 (0x0f)) | (_mm_srli_epi32 (c, 14) & _mm_set1_epi32 (0x30)); vindex[3] = (_mm_srli_epi32 (c, 20) & _mm_set1_epi32 (0x01)) | (_mm_srli_epi32 (c, 21) & _mm_set1_epi32 (0x06)) | (_mm_srli_epi32 (c, 22) & _mm_set1_epi32 (0x38)); uint k; for (k = 0; k < 4; k++) { s ^= _mm_i32gather_epi32 (c_skb[0 + k], vindex[k], sizeof (uint)); } vindex[0] = (_mm_srli_epi32 (d, 0) & _mm_set1_epi32 (0x3f)); vindex[1] = (_mm_srli_epi32 (d, 7) & _mm_set1_epi32 (0x03)) | (_mm_srli_epi32 (d, 8) & _mm_set1_epi32 (0x3c)); vindex[2] = (_mm_srli_epi32 (d, 15) & _mm_set1_epi32 (0x3f)); vindex[3] = (_mm_srli_epi32 (d, 21) & _mm_set1_epi32 (0x0f)) | (_mm_srli_epi32 (d, 22) & _mm_set1_epi32 (0x30)); for (k = 0; k < 4; k++) { t ^= _mm_i32gather_epi32 (c_skb[4 + k], vindex[k], sizeof (uint)); } #else uint32_t cb[4] __attribute__ ((aligned (16))); uint32_t db[4] __attribute__ ((aligned (16))); _mm_store_si128 ((__m128i *) cb, c); _mm_store_si128 ((__m128i *) db, d); uint32_t sb[4]; uint32_t tb[4]; uint k; for (k = 0; k < 4; k++) { const uint cbv = cb[k]; const uint dbv = db[k]; uint s = BOX ((( cbv >> 0) & 0x3f), 0, c_skb) | BOX ((((cbv >> 6) & 0x03) | ((cbv >> 7) & 0x3c)), 1, c_skb) | BOX ((((cbv >> 13) & 0x0f) | ((cbv >> 14) & 0x30)), 2, c_skb) | BOX ((((cbv >> 20) & 0x01) | ((cbv >> 21) & 0x06) | ((cbv >> 22) & 0x38)), 3, c_skb); uint t = BOX ((( dbv >> 0) & 0x3f), 4, c_skb) | BOX ((((dbv >> 7) & 0x03) | ((dbv >> 8) & 0x3c)), 5, c_skb) | BOX ((((dbv >> 15) & 0x3f)), 6, c_skb) | BOX ((((dbv >> 21) & 0x0f) | ((dbv >> 22) & 0x30)), 7, c_skb); sb[k] = s; tb[k] = t; } __m128i s = _mm_set_epi32 (sb[3], sb[2], sb[1], sb[0]); __m128i t = _mm_set_epi32 (tb[3], tb[2], tb[1], tb[0]); #endif Kc[i] = _mm_slli_epi32 (t, 16) | _mm_and_si128 (s, _mm_set1_epi32 (0x0000ffff)); Kd[i] = _mm_srli_epi32 (s, 16) | _mm_and_si128 (t, _mm_set1_epi32 (0xffff0000)); } } void _des_crypt_encrypt_sse2 (__m128i iv[2], __m128i mask, __m128i Kc[16], __m128i Kd[16]) { __m128i E0 = mask; __m128i E1 = _mm_srli_epi32 (mask, 2); E0 = _mm_and_si128 (E0, _mm_set1_epi32 (0x3f)); E1 = _mm_and_si128 (E1, _mm_set1_epi32 (0x3f0)); __m128i r = _mm_setzero_si128 (); __m128i l = _mm_setzero_si128 (); uint i; for (i = 0; i < 25; i++) { __m128i tt; uint j; for (j = 0; j < 16; j++) { /* sbox */ __m128i t = _mm_xor_si128 (r, _mm_srli_epi32 (r, 16)); __m128i u = t; /* u */ u = _mm_and_si128 (u, E0); tt = _mm_slli_epi32 (u, 16); u = _mm_xor_si128 (u, r); u = _mm_xor_si128 (u, tt); u = _mm_xor_si128 (u, Kc[j]); /* t */ t = _mm_and_si128 (t, E1); tt = _mm_slli_epi32 (t, 16); t = _mm_xor_si128 (t, r); t = _mm_xor_si128 (t, tt); t = ROTL32_SSE (t, 28); t = _mm_xor_si128 (t, Kd[j]); #ifdef __AVX2__IS_SLOWER uint k; for (k = 0; k < 4; k++) { __m128i vindex = _mm_and_si128 (_mm_srli_epi32 (u, k * 8), _mm_set1_epi32 (0x3f)); l ^= _mm_i32gather_epi32 (c_SPtrans[(k * 2) + 0], vindex, sizeof (uint)); } for (k = 0; k < 4; k++) { __m128i vindex = _mm_and_si128 (_mm_srli_epi32 (t, k * 8), _mm_set1_epi32 (0x3f)); l ^= _mm_i32gather_epi32 (c_SPtrans[(k * 2) + 1], vindex, sizeof (uint)); } #else uint32_t ub[4] __attribute__ ((aligned (16))); uint32_t tb[4] __attribute__ ((aligned (16))); _mm_store_si128 ((__m128i *) ub, u); _mm_store_si128 ((__m128i *) tb, t); uint32_t lb[4]; uint k; for (k = 0; k < 4; k++) { const uint ubv = ub[k]; const uint tbv = tb[k]; lb[k] = BOX (((ubv >> 0) & 0x3f), 0, c_SPtrans) | BOX (((ubv >> 8) & 0x3f), 2, c_SPtrans) | BOX (((ubv >> 16) & 0x3f), 4, c_SPtrans) | BOX (((ubv >> 24) & 0x3f), 6, c_SPtrans) | BOX (((tbv >> 0) & 0x3f), 1, c_SPtrans) | BOX (((tbv >> 8) & 0x3f), 3, c_SPtrans) | BOX (((tbv >> 16) & 0x3f), 5, c_SPtrans) | BOX (((tbv >> 24) & 0x3f), 7, c_SPtrans); } l ^= _mm_set_epi32 (lb[3], lb[2], lb[1], lb[0]); #endif tt = l; l = r; r = tt; } tt = l; l = r; r = tt; } iv[0] = r; iv[1] = l; } void _des_crypt_keysetup (uint c, uint d, uint Kc[16], uint Kd[16]) { uint tt; PERM_OP (d, c, tt, 4, 0x0f0f0f0f); HPERM_OP (c, tt, 2, 0xcccc0000); HPERM_OP (d, tt, 2, 0xcccc0000); PERM_OP (d, c, tt, 1, 0x55555555); PERM_OP (c, d, tt, 8, 0x00ff00ff); PERM_OP (d, c, tt, 1, 0x55555555); d = ((d & 0x000000ff) << 16) | ((d & 0x0000ff00) << 0) | ((d & 0x00ff0000) >> 16) | ((c & 0xf0000000) >> 4); c = c & 0x0fffffff; uint i; for (i = 0; i < 16; i++) { const uint shifts3s0[16] = { 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1 }; const uint shifts3s1[16] = { 27, 27, 26, 26, 26, 26, 26, 26, 27, 26, 26, 26, 26, 26, 26, 27 }; c = c >> shifts3s0[i] | c << shifts3s1[i]; d = d >> shifts3s0[i] | d << shifts3s1[i]; c = c & 0x0fffffff; d = d & 0x0fffffff; uint s = BOX ((( c >> 0) & 0x3f), 0, c_skb) | BOX ((((c >> 6) & 0x03) | ((c >> 7) & 0x3c)), 1, c_skb) | BOX ((((c >> 13) & 0x0f) | ((c >> 14) & 0x30)), 2, c_skb) | BOX ((((c >> 20) & 0x01) | ((c >> 21) & 0x06) | ((c >> 22) & 0x38)), 3, c_skb); uint t = BOX ((( d >> 0) & 0x3f), 4, c_skb) | BOX ((((d >> 7) & 0x03) | ((d >> 8) & 0x3c)), 5, c_skb) | BOX ((((d >> 15) & 0x3f)), 6, c_skb) | BOX ((((d >> 21) & 0x0f) | ((d >> 22) & 0x30)), 7, c_skb); Kc[i] = ((t << 16) | (s & 0x0000ffff)); Kd[i] = ((s >> 16) | (t & 0xffff0000)); } } void _des_crypt_encrypt (uint iv[2], uint mask, uint Kc[16], uint Kd[16]) { uint tt; const uint E1 = (mask >> 2) & 0x3f0; const uint E0 = mask & 0x3f; uint r = 0; uint l = 0; uint i,j; for (i = 0; i < 25; i++) { for (j = 0; j < 16; j++) { /* sbox */ uint t = r ^ (r >> 16); uint u = t; /* u */ u = u & E0; tt = (u << 16); u = u ^ r; u = u ^ tt; u = u ^ Kc[j]; /* t */ t = t & E1; tt = (t << 16); t = t ^ r; t = t ^ tt; t = ROTL32 (t, 28); t = t ^ Kd[j]; l ^= BOX (((u >> 0) & 0x3f), 0, c_SPtrans) | BOX (((u >> 8) & 0x3f), 2, c_SPtrans) | BOX (((u >> 16) & 0x3f), 4, c_SPtrans) | BOX (((u >> 24) & 0x3f), 6, c_SPtrans) | BOX (((t >> 0) & 0x3f), 1, c_SPtrans) | BOX (((t >> 8) & 0x3f), 3, c_SPtrans) | BOX (((t >> 16) & 0x3f), 5, c_SPtrans) | BOX (((t >> 24) & 0x3f), 7, c_SPtrans); tt = l; l = r; r = tt; } tt = l; l = r; r = tt; } iv[0] = r; iv[1] = l; } void hashcat_descrypt_64_sse2 (__m128i digests[2], __m128i blocks[3]) { blocks[0] = _mm_and_si128 (_mm_slli_epi32 (blocks[0], 1), _mm_set1_epi32 (0xfefefefe)); blocks[1] = _mm_and_si128 (_mm_slli_epi32 (blocks[1], 1), _mm_set1_epi32 (0xfefefefe)); __m128i Kc[16]; __m128i Kd[16]; _des_crypt_keysetup_sse2 (blocks[0], blocks[1], Kc, Kd); _des_crypt_encrypt_sse2 (digests, blocks[2], Kc, Kd); } void hashcat_descrypt_64 (uint32_t digests[2][4], uint32_t blocks[3][4]) { /** * base */ uint i; for (i = 0; i < 4; i++) { uint data[2]; data[0] = (blocks[0][i] << 1) & 0xfefefefe; data[1] = (blocks[1][i] << 1) & 0xfefefefe; uint Kc[16]; uint Kd[16]; _des_crypt_keysetup (data[0], data[1], Kc, Kd); uint iv[2]; _des_crypt_encrypt (iv, blocks[2][i], Kc, Kd); digests[0][i] = iv[0]; digests[1][i] = iv[1]; } }