/linux-4.19.296/fs/ext4/ |
D | hash.c | 46 #define K2 013240474631UL macro 67 ROUND(G, a, b, c, d, in[1] + K2, 3); in half_md4_transform() 68 ROUND(G, d, a, b, c, in[3] + K2, 5); in half_md4_transform() 69 ROUND(G, c, d, a, b, in[5] + K2, 9); in half_md4_transform() 70 ROUND(G, b, c, d, a, in[7] + K2, 13); in half_md4_transform() 71 ROUND(G, a, b, c, d, in[0] + K2, 3); in half_md4_transform() 72 ROUND(G, d, a, b, c, in[2] + K2, 5); in half_md4_transform() 73 ROUND(G, c, d, a, b, in[4] + K2, 9); in half_md4_transform() 74 ROUND(G, b, c, d, a, in[6] + K2, 13); in half_md4_transform() 95 #undef K2
|
/linux-4.19.296/crypto/ |
D | anubis.c | 493 u32 K0, K1, K2, K3; in anubis_setkey() local 499 K2 = T4[(kappa[N - 1] >> 8) & 0xff]; in anubis_setkey() 512 K2 = T4[(kappa[i] >> 8) & 0xff] ^ in anubis_setkey() 513 (T5[(K2 >> 24) ] & 0xff000000U) ^ in anubis_setkey() 514 (T5[(K2 >> 16) & 0xff] & 0x00ff0000U) ^ in anubis_setkey() 515 (T5[(K2 >> 8) & 0xff] & 0x0000ff00U) ^ in anubis_setkey() 516 (T5[(K2 ) & 0xff] & 0x000000ffU); in anubis_setkey() 526 ctx->E[r][2] = K2; in anubis_setkey()
|
D | rmd128.c | 32 #define K2 RMD_K2 macro 85 ROUND(aa, bb, cc, dd, F2, K2, in[7], 7); in rmd128_transform() 86 ROUND(dd, aa, bb, cc, F2, K2, in[4], 6); in rmd128_transform() 87 ROUND(cc, dd, aa, bb, F2, K2, in[13], 8); in rmd128_transform() 88 ROUND(bb, cc, dd, aa, F2, K2, in[1], 13); in rmd128_transform() 89 ROUND(aa, bb, cc, dd, F2, K2, in[10], 11); in rmd128_transform() 90 ROUND(dd, aa, bb, cc, F2, K2, in[6], 9); in rmd128_transform() 91 ROUND(cc, dd, aa, bb, F2, K2, in[15], 7); in rmd128_transform() 92 ROUND(bb, cc, dd, aa, F2, K2, in[3], 15); in rmd128_transform() 93 ROUND(aa, bb, cc, dd, F2, K2, in[12], 7); in rmd128_transform() [all …]
|
D | rmd256.c | 32 #define K2 RMD_K2 macro 106 ROUND(aa, bb, cc, dd, F2, K2, in[7], 7); in rmd256_transform() 107 ROUND(dd, aa, bb, cc, F2, K2, in[4], 6); in rmd256_transform() 108 ROUND(cc, dd, aa, bb, F2, K2, in[13], 8); in rmd256_transform() 109 ROUND(bb, cc, dd, aa, F2, K2, in[1], 13); in rmd256_transform() 110 ROUND(aa, bb, cc, dd, F2, K2, in[10], 11); in rmd256_transform() 111 ROUND(dd, aa, bb, cc, F2, K2, in[6], 9); in rmd256_transform() 112 ROUND(cc, dd, aa, bb, F2, K2, in[15], 7); in rmd256_transform() 113 ROUND(bb, cc, dd, aa, F2, K2, in[3], 15); in rmd256_transform() 114 ROUND(aa, bb, cc, dd, F2, K2, in[12], 7); in rmd256_transform() [all …]
|
D | rmd320.c | 32 #define K2 RMD_K2 macro 112 ROUND(ee, aa, bb, cc, dd, F2, K2, in[7], 7); in rmd320_transform() 113 ROUND(dd, ee, aa, bb, cc, F2, K2, in[4], 6); in rmd320_transform() 114 ROUND(cc, dd, ee, aa, bb, F2, K2, in[13], 8); in rmd320_transform() 115 ROUND(bb, cc, dd, ee, aa, F2, K2, in[1], 13); in rmd320_transform() 116 ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11); in rmd320_transform() 117 ROUND(ee, aa, bb, cc, dd, F2, K2, in[6], 9); in rmd320_transform() 118 ROUND(dd, ee, aa, bb, cc, F2, K2, in[15], 7); in rmd320_transform() 119 ROUND(cc, dd, ee, aa, bb, F2, K2, in[3], 15); in rmd320_transform() 120 ROUND(bb, cc, dd, ee, aa, F2, K2, in[12], 7); in rmd320_transform() [all …]
|
D | rmd160.c | 32 #define K2 RMD_K2 macro 91 ROUND(ee, aa, bb, cc, dd, F2, K2, in[7], 7); in rmd160_transform() 92 ROUND(dd, ee, aa, bb, cc, F2, K2, in[4], 6); in rmd160_transform() 93 ROUND(cc, dd, ee, aa, bb, F2, K2, in[13], 8); in rmd160_transform() 94 ROUND(bb, cc, dd, ee, aa, F2, K2, in[1], 13); in rmd160_transform() 95 ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11); in rmd160_transform() 96 ROUND(ee, aa, bb, cc, dd, F2, K2, in[6], 9); in rmd160_transform() 97 ROUND(dd, ee, aa, bb, cc, F2, K2, in[15], 7); in rmd160_transform() 98 ROUND(cc, dd, ee, aa, bb, F2, K2, in[3], 15); in rmd160_transform() 99 ROUND(bb, cc, dd, ee, aa, F2, K2, in[12], 7); in rmd160_transform() [all …]
|
D | khazad.c | 763 u64 K2, K1; in khazad_setkey() local 766 K2 = ((u64)be32_to_cpu(key[0]) << 32) | be32_to_cpu(key[1]); in khazad_setkey() 779 c[r] ^ K2; in khazad_setkey() 780 K2 = K1; in khazad_setkey()
|