wp_block.c 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655
  1. /**
  2. * The Whirlpool hashing function.
  3. *
  4. * <P>
  5. * <b>References</b>
  6. *
  7. * <P>
  8. * The Whirlpool algorithm was developed by
  9. * <a href="mailto:pbarreto@scopus.com.br">Paulo S. L. M. Barreto</a> and
  10. * <a href="mailto:vincent.rijmen@cryptomathic.com">Vincent Rijmen</a>.
  11. *
  12. * See
  13. * P.S.L.M. Barreto, V. Rijmen,
  14. * ``The Whirlpool hashing function,''
  15. * NESSIE submission, 2000 (tweaked version, 2001),
  16. * <https://www.cosic.esat.kuleuven.ac.be/nessie/workshop/submissions/whirlpool.zip>
  17. *
  18. * Based on "@version 3.0 (2003.03.12)" by Paulo S.L.M. Barreto and
  19. * Vincent Rijmen. Lookup "reference implementations" on
  20. * <http://planeta.terra.com.br/informatica/paulobarreto/>
  21. *
  22. * =============================================================================
  23. *
  24. * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ''AS IS'' AND ANY EXPRESS
  25. * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  26. * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  27. * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE
  28. * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  29. * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  30. * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
  31. * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
  32. * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
  33. * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
  34. * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  35. *
  36. */
  37. #include "wp_locl.h"
  38. #include <string.h>
  39. typedef unsigned char u8;
  40. #if (defined(_WIN32) || defined(_WIN64)) && !defined(__MINGW32)
  41. typedef unsigned __int64 u64;
  42. #elif defined(__arch64__)
  43. typedef unsigned long u64;
  44. #else
  45. typedef unsigned long long u64;
  46. #endif
  47. #define ROUNDS 10
  48. #define STRICT_ALIGNMENT
  49. #if defined(__i386) || defined(__i386__) || \
  50. defined(__x86_64) || defined(__x86_64__) || \
  51. defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64)
  52. /* Well, formally there're couple of other architectures, which permit
  53. * unaligned loads, specifically those not crossing cache lines, IA-64
  54. * and PowerPC... */
  55. # undef STRICT_ALIGNMENT
  56. #endif
  57. #undef SMALL_REGISTER_BANK
  58. #if defined(__i386) || defined(__i386__) || defined(_M_IX86)
  59. # define SMALL_REGISTER_BANK
  60. # if defined(WHIRLPOOL_ASM)
  61. # ifndef OPENSSL_SMALL_FOOTPRINT
  62. # define OPENSSL_SMALL_FOOTPRINT /* it appears that for elder non-MMX
  63. CPUs this is actually faster! */
  64. # endif
  65. # define GO_FOR_MMX(ctx,inp,num) do { \
  66. extern unsigned long OPENSSL_ia32cap_P[]; \
  67. void whirlpool_block_mmx(void *,const void *,size_t); \
  68. if (!(OPENSSL_ia32cap_P[0] & (1<<23))) break; \
  69. whirlpool_block_mmx(ctx->H.c,inp,num); return; \
  70. } while (0)
  71. # endif
  72. #endif
  73. #undef ROTATE
  74. #if defined(_MSC_VER)
  75. # if defined(_WIN64) /* applies to both IA-64 and AMD64 */
  76. # pragma intrinsic(_rotl64)
  77. # define ROTATE(a,n) _rotl64((a),n)
  78. # endif
  79. #elif defined(__GNUC__) && __GNUC__>=2
  80. # if defined(__x86_64) || defined(__x86_64__)
  81. # if defined(L_ENDIAN)
  82. # define ROTATE(a,n) ({ u64 ret; asm ("rolq %1,%0" \
  83. : "=r"(ret) : "J"(n),"0"(a) : "cc"); ret; })
  84. # elif defined(B_ENDIAN)
  85. /* Most will argue that x86_64 is always little-endian. Well,
  86. * yes, but then we have stratus.com who has modified gcc to
  87. * "emulate" big-endian on x86. Is there evidence that they
  88. * [or somebody else] won't do same for x86_64? Naturally no.
  89. * And this line is waiting ready for that brave soul:-) */
  90. # define ROTATE(a,n) ({ u64 ret; asm ("rorq %1,%0" \
  91. : "=r"(ret) : "J"(n),"0"(a) : "cc"); ret; })
  92. # endif
  93. # elif defined(__ia64) || defined(__ia64__)
  94. # if defined(L_ENDIAN)
  95. # define ROTATE(a,n) ({ u64 ret; asm ("shrp %0=%1,%1,%2" \
  96. : "=r"(ret) : "r"(a),"M"(64-(n))); ret; })
  97. # elif defined(B_ENDIAN)
  98. # define ROTATE(a,n) ({ u64 ret; asm ("shrp %0=%1,%1,%2" \
  99. : "=r"(ret) : "r"(a),"M"(n)); ret; })
  100. # endif
  101. # endif
  102. #endif
  103. #if defined(OPENSSL_SMALL_FOOTPRINT)
  104. # if !defined(ROTATE)
  105. # if defined(L_ENDIAN) /* little-endians have to rotate left */
  106. # define ROTATE(i,n) ((i)<<(n) ^ (i)>>(64-n))
  107. # elif defined(B_ENDIAN) /* big-endians have to rotate right */
  108. # define ROTATE(i,n) ((i)>>(n) ^ (i)<<(64-n))
  109. # endif
  110. # endif
  111. # if defined(ROTATE) && !defined(STRICT_ALIGNMENT)
  112. # define STRICT_ALIGNMENT /* ensure smallest table size */
  113. # endif
  114. #endif
  115. /*
  116. * Table size depends on STRICT_ALIGNMENT and whether or not endian-
  117. * specific ROTATE macro is defined. If STRICT_ALIGNMENT is not
  118. * defined, which is normally the case on x86[_64] CPUs, the table is
  119. * 4KB large unconditionally. Otherwise if ROTATE is defined, the
  120. * table is 2KB large, and otherwise - 16KB. 2KB table requires a
  121. * whole bunch of additional rotations, but I'm willing to "trade,"
  122. * because 16KB table certainly trashes L1 cache. I wish all CPUs
  123. * could handle unaligned load as 4KB table doesn't trash the cache,
  124. * nor does it require additional rotations.
  125. */
  126. /*
  127. * Note that every Cn macro expands as two loads: one byte load and
  128. * one quadword load. One can argue that that many single-byte loads
  129. * is too excessive, as one could load a quadword and "milk" it for
  130. * eight 8-bit values instead. Well, yes, but in order to do so *and*
  131. * avoid excessive loads you have to accomodate a handful of 64-bit
  132. * values in the register bank and issue a bunch of shifts and mask.
  133. * It's a tradeoff: loads vs. shift and mask in big register bank[!].
  134. * On most CPUs eight single-byte loads are faster and I let other
  135. * ones to depend on smart compiler to fold byte loads if beneficial.
  136. * Hand-coded assembler would be another alternative:-)
  137. */
  138. #ifdef STRICT_ALIGNMENT
  139. # if defined(ROTATE)
  140. # define N 1
  141. # define LL(c0,c1,c2,c3,c4,c5,c6,c7) c0,c1,c2,c3,c4,c5,c6,c7
  142. # define C0(K,i) (Cx.q[K.c[(i)*8+0]])
  143. # define C1(K,i) ROTATE(Cx.q[K.c[(i)*8+1]],8)
  144. # define C2(K,i) ROTATE(Cx.q[K.c[(i)*8+2]],16)
  145. # define C3(K,i) ROTATE(Cx.q[K.c[(i)*8+3]],24)
  146. # define C4(K,i) ROTATE(Cx.q[K.c[(i)*8+4]],32)
  147. # define C5(K,i) ROTATE(Cx.q[K.c[(i)*8+5]],40)
  148. # define C6(K,i) ROTATE(Cx.q[K.c[(i)*8+6]],48)
  149. # define C7(K,i) ROTATE(Cx.q[K.c[(i)*8+7]],56)
  150. # else
  151. # define N 8
  152. # define LL(c0,c1,c2,c3,c4,c5,c6,c7) c0,c1,c2,c3,c4,c5,c6,c7, \
  153. c7,c0,c1,c2,c3,c4,c5,c6, \
  154. c6,c7,c0,c1,c2,c3,c4,c5, \
  155. c5,c6,c7,c0,c1,c2,c3,c4, \
  156. c4,c5,c6,c7,c0,c1,c2,c3, \
  157. c3,c4,c5,c6,c7,c0,c1,c2, \
  158. c2,c3,c4,c5,c6,c7,c0,c1, \
  159. c1,c2,c3,c4,c5,c6,c7,c0
  160. # define C0(K,i) (Cx.q[0+8*K.c[(i)*8+0]])
  161. # define C1(K,i) (Cx.q[1+8*K.c[(i)*8+1]])
  162. # define C2(K,i) (Cx.q[2+8*K.c[(i)*8+2]])
  163. # define C3(K,i) (Cx.q[3+8*K.c[(i)*8+3]])
  164. # define C4(K,i) (Cx.q[4+8*K.c[(i)*8+4]])
  165. # define C5(K,i) (Cx.q[5+8*K.c[(i)*8+5]])
  166. # define C6(K,i) (Cx.q[6+8*K.c[(i)*8+6]])
  167. # define C7(K,i) (Cx.q[7+8*K.c[(i)*8+7]])
  168. # endif
  169. #else
  170. # define N 2
  171. # define LL(c0,c1,c2,c3,c4,c5,c6,c7) c0,c1,c2,c3,c4,c5,c6,c7, \
  172. c0,c1,c2,c3,c4,c5,c6,c7
  173. # define C0(K,i) (((u64*)(Cx.c+0))[2*K.c[(i)*8+0]])
  174. # define C1(K,i) (((u64*)(Cx.c+7))[2*K.c[(i)*8+1]])
  175. # define C2(K,i) (((u64*)(Cx.c+6))[2*K.c[(i)*8+2]])
  176. # define C3(K,i) (((u64*)(Cx.c+5))[2*K.c[(i)*8+3]])
  177. # define C4(K,i) (((u64*)(Cx.c+4))[2*K.c[(i)*8+4]])
  178. # define C5(K,i) (((u64*)(Cx.c+3))[2*K.c[(i)*8+5]])
  179. # define C6(K,i) (((u64*)(Cx.c+2))[2*K.c[(i)*8+6]])
  180. # define C7(K,i) (((u64*)(Cx.c+1))[2*K.c[(i)*8+7]])
  181. #endif
  182. static const
  183. union {
  184. u8 c[(256*N+ROUNDS)*sizeof(u64)];
  185. u64 q[(256*N+ROUNDS)];
  186. } Cx = { {
  187. /* Note endian-neutral representation:-) */
  188. LL(0x18,0x18,0x60,0x18,0xc0,0x78,0x30,0xd8),
  189. LL(0x23,0x23,0x8c,0x23,0x05,0xaf,0x46,0x26),
  190. LL(0xc6,0xc6,0x3f,0xc6,0x7e,0xf9,0x91,0xb8),
  191. LL(0xe8,0xe8,0x87,0xe8,0x13,0x6f,0xcd,0xfb),
  192. LL(0x87,0x87,0x26,0x87,0x4c,0xa1,0x13,0xcb),
  193. LL(0xb8,0xb8,0xda,0xb8,0xa9,0x62,0x6d,0x11),
  194. LL(0x01,0x01,0x04,0x01,0x08,0x05,0x02,0x09),
  195. LL(0x4f,0x4f,0x21,0x4f,0x42,0x6e,0x9e,0x0d),
  196. LL(0x36,0x36,0xd8,0x36,0xad,0xee,0x6c,0x9b),
  197. LL(0xa6,0xa6,0xa2,0xa6,0x59,0x04,0x51,0xff),
  198. LL(0xd2,0xd2,0x6f,0xd2,0xde,0xbd,0xb9,0x0c),
  199. LL(0xf5,0xf5,0xf3,0xf5,0xfb,0x06,0xf7,0x0e),
  200. LL(0x79,0x79,0xf9,0x79,0xef,0x80,0xf2,0x96),
  201. LL(0x6f,0x6f,0xa1,0x6f,0x5f,0xce,0xde,0x30),
  202. LL(0x91,0x91,0x7e,0x91,0xfc,0xef,0x3f,0x6d),
  203. LL(0x52,0x52,0x55,0x52,0xaa,0x07,0xa4,0xf8),
  204. LL(0x60,0x60,0x9d,0x60,0x27,0xfd,0xc0,0x47),
  205. LL(0xbc,0xbc,0xca,0xbc,0x89,0x76,0x65,0x35),
  206. LL(0x9b,0x9b,0x56,0x9b,0xac,0xcd,0x2b,0x37),
  207. LL(0x8e,0x8e,0x02,0x8e,0x04,0x8c,0x01,0x8a),
  208. LL(0xa3,0xa3,0xb6,0xa3,0x71,0x15,0x5b,0xd2),
  209. LL(0x0c,0x0c,0x30,0x0c,0x60,0x3c,0x18,0x6c),
  210. LL(0x7b,0x7b,0xf1,0x7b,0xff,0x8a,0xf6,0x84),
  211. LL(0x35,0x35,0xd4,0x35,0xb5,0xe1,0x6a,0x80),
  212. LL(0x1d,0x1d,0x74,0x1d,0xe8,0x69,0x3a,0xf5),
  213. LL(0xe0,0xe0,0xa7,0xe0,0x53,0x47,0xdd,0xb3),
  214. LL(0xd7,0xd7,0x7b,0xd7,0xf6,0xac,0xb3,0x21),
  215. LL(0xc2,0xc2,0x2f,0xc2,0x5e,0xed,0x99,0x9c),
  216. LL(0x2e,0x2e,0xb8,0x2e,0x6d,0x96,0x5c,0x43),
  217. LL(0x4b,0x4b,0x31,0x4b,0x62,0x7a,0x96,0x29),
  218. LL(0xfe,0xfe,0xdf,0xfe,0xa3,0x21,0xe1,0x5d),
  219. LL(0x57,0x57,0x41,0x57,0x82,0x16,0xae,0xd5),
  220. LL(0x15,0x15,0x54,0x15,0xa8,0x41,0x2a,0xbd),
  221. LL(0x77,0x77,0xc1,0x77,0x9f,0xb6,0xee,0xe8),
  222. LL(0x37,0x37,0xdc,0x37,0xa5,0xeb,0x6e,0x92),
  223. LL(0xe5,0xe5,0xb3,0xe5,0x7b,0x56,0xd7,0x9e),
  224. LL(0x9f,0x9f,0x46,0x9f,0x8c,0xd9,0x23,0x13),
  225. LL(0xf0,0xf0,0xe7,0xf0,0xd3,0x17,0xfd,0x23),
  226. LL(0x4a,0x4a,0x35,0x4a,0x6a,0x7f,0x94,0x20),
  227. LL(0xda,0xda,0x4f,0xda,0x9e,0x95,0xa9,0x44),
  228. LL(0x58,0x58,0x7d,0x58,0xfa,0x25,0xb0,0xa2),
  229. LL(0xc9,0xc9,0x03,0xc9,0x06,0xca,0x8f,0xcf),
  230. LL(0x29,0x29,0xa4,0x29,0x55,0x8d,0x52,0x7c),
  231. LL(0x0a,0x0a,0x28,0x0a,0x50,0x22,0x14,0x5a),
  232. LL(0xb1,0xb1,0xfe,0xb1,0xe1,0x4f,0x7f,0x50),
  233. LL(0xa0,0xa0,0xba,0xa0,0x69,0x1a,0x5d,0xc9),
  234. LL(0x6b,0x6b,0xb1,0x6b,0x7f,0xda,0xd6,0x14),
  235. LL(0x85,0x85,0x2e,0x85,0x5c,0xab,0x17,0xd9),
  236. LL(0xbd,0xbd,0xce,0xbd,0x81,0x73,0x67,0x3c),
  237. LL(0x5d,0x5d,0x69,0x5d,0xd2,0x34,0xba,0x8f),
  238. LL(0x10,0x10,0x40,0x10,0x80,0x50,0x20,0x90),
  239. LL(0xf4,0xf4,0xf7,0xf4,0xf3,0x03,0xf5,0x07),
  240. LL(0xcb,0xcb,0x0b,0xcb,0x16,0xc0,0x8b,0xdd),
  241. LL(0x3e,0x3e,0xf8,0x3e,0xed,0xc6,0x7c,0xd3),
  242. LL(0x05,0x05,0x14,0x05,0x28,0x11,0x0a,0x2d),
  243. LL(0x67,0x67,0x81,0x67,0x1f,0xe6,0xce,0x78),
  244. LL(0xe4,0xe4,0xb7,0xe4,0x73,0x53,0xd5,0x97),
  245. LL(0x27,0x27,0x9c,0x27,0x25,0xbb,0x4e,0x02),
  246. LL(0x41,0x41,0x19,0x41,0x32,0x58,0x82,0x73),
  247. LL(0x8b,0x8b,0x16,0x8b,0x2c,0x9d,0x0b,0xa7),
  248. LL(0xa7,0xa7,0xa6,0xa7,0x51,0x01,0x53,0xf6),
  249. LL(0x7d,0x7d,0xe9,0x7d,0xcf,0x94,0xfa,0xb2),
  250. LL(0x95,0x95,0x6e,0x95,0xdc,0xfb,0x37,0x49),
  251. LL(0xd8,0xd8,0x47,0xd8,0x8e,0x9f,0xad,0x56),
  252. LL(0xfb,0xfb,0xcb,0xfb,0x8b,0x30,0xeb,0x70),
  253. LL(0xee,0xee,0x9f,0xee,0x23,0x71,0xc1,0xcd),
  254. LL(0x7c,0x7c,0xed,0x7c,0xc7,0x91,0xf8,0xbb),
  255. LL(0x66,0x66,0x85,0x66,0x17,0xe3,0xcc,0x71),
  256. LL(0xdd,0xdd,0x53,0xdd,0xa6,0x8e,0xa7,0x7b),
  257. LL(0x17,0x17,0x5c,0x17,0xb8,0x4b,0x2e,0xaf),
  258. LL(0x47,0x47,0x01,0x47,0x02,0x46,0x8e,0x45),
  259. LL(0x9e,0x9e,0x42,0x9e,0x84,0xdc,0x21,0x1a),
  260. LL(0xca,0xca,0x0f,0xca,0x1e,0xc5,0x89,0xd4),
  261. LL(0x2d,0x2d,0xb4,0x2d,0x75,0x99,0x5a,0x58),
  262. LL(0xbf,0xbf,0xc6,0xbf,0x91,0x79,0x63,0x2e),
  263. LL(0x07,0x07,0x1c,0x07,0x38,0x1b,0x0e,0x3f),
  264. LL(0xad,0xad,0x8e,0xad,0x01,0x23,0x47,0xac),
  265. LL(0x5a,0x5a,0x75,0x5a,0xea,0x2f,0xb4,0xb0),
  266. LL(0x83,0x83,0x36,0x83,0x6c,0xb5,0x1b,0xef),
  267. LL(0x33,0x33,0xcc,0x33,0x85,0xff,0x66,0xb6),
  268. LL(0x63,0x63,0x91,0x63,0x3f,0xf2,0xc6,0x5c),
  269. LL(0x02,0x02,0x08,0x02,0x10,0x0a,0x04,0x12),
  270. LL(0xaa,0xaa,0x92,0xaa,0x39,0x38,0x49,0x93),
  271. LL(0x71,0x71,0xd9,0x71,0xaf,0xa8,0xe2,0xde),
  272. LL(0xc8,0xc8,0x07,0xc8,0x0e,0xcf,0x8d,0xc6),
  273. LL(0x19,0x19,0x64,0x19,0xc8,0x7d,0x32,0xd1),
  274. LL(0x49,0x49,0x39,0x49,0x72,0x70,0x92,0x3b),
  275. LL(0xd9,0xd9,0x43,0xd9,0x86,0x9a,0xaf,0x5f),
  276. LL(0xf2,0xf2,0xef,0xf2,0xc3,0x1d,0xf9,0x31),
  277. LL(0xe3,0xe3,0xab,0xe3,0x4b,0x48,0xdb,0xa8),
  278. LL(0x5b,0x5b,0x71,0x5b,0xe2,0x2a,0xb6,0xb9),
  279. LL(0x88,0x88,0x1a,0x88,0x34,0x92,0x0d,0xbc),
  280. LL(0x9a,0x9a,0x52,0x9a,0xa4,0xc8,0x29,0x3e),
  281. LL(0x26,0x26,0x98,0x26,0x2d,0xbe,0x4c,0x0b),
  282. LL(0x32,0x32,0xc8,0x32,0x8d,0xfa,0x64,0xbf),
  283. LL(0xb0,0xb0,0xfa,0xb0,0xe9,0x4a,0x7d,0x59),
  284. LL(0xe9,0xe9,0x83,0xe9,0x1b,0x6a,0xcf,0xf2),
  285. LL(0x0f,0x0f,0x3c,0x0f,0x78,0x33,0x1e,0x77),
  286. LL(0xd5,0xd5,0x73,0xd5,0xe6,0xa6,0xb7,0x33),
  287. LL(0x80,0x80,0x3a,0x80,0x74,0xba,0x1d,0xf4),
  288. LL(0xbe,0xbe,0xc2,0xbe,0x99,0x7c,0x61,0x27),
  289. LL(0xcd,0xcd,0x13,0xcd,0x26,0xde,0x87,0xeb),
  290. LL(0x34,0x34,0xd0,0x34,0xbd,0xe4,0x68,0x89),
  291. LL(0x48,0x48,0x3d,0x48,0x7a,0x75,0x90,0x32),
  292. LL(0xff,0xff,0xdb,0xff,0xab,0x24,0xe3,0x54),
  293. LL(0x7a,0x7a,0xf5,0x7a,0xf7,0x8f,0xf4,0x8d),
  294. LL(0x90,0x90,0x7a,0x90,0xf4,0xea,0x3d,0x64),
  295. LL(0x5f,0x5f,0x61,0x5f,0xc2,0x3e,0xbe,0x9d),
  296. LL(0x20,0x20,0x80,0x20,0x1d,0xa0,0x40,0x3d),
  297. LL(0x68,0x68,0xbd,0x68,0x67,0xd5,0xd0,0x0f),
  298. LL(0x1a,0x1a,0x68,0x1a,0xd0,0x72,0x34,0xca),
  299. LL(0xae,0xae,0x82,0xae,0x19,0x2c,0x41,0xb7),
  300. LL(0xb4,0xb4,0xea,0xb4,0xc9,0x5e,0x75,0x7d),
  301. LL(0x54,0x54,0x4d,0x54,0x9a,0x19,0xa8,0xce),
  302. LL(0x93,0x93,0x76,0x93,0xec,0xe5,0x3b,0x7f),
  303. LL(0x22,0x22,0x88,0x22,0x0d,0xaa,0x44,0x2f),
  304. LL(0x64,0x64,0x8d,0x64,0x07,0xe9,0xc8,0x63),
  305. LL(0xf1,0xf1,0xe3,0xf1,0xdb,0x12,0xff,0x2a),
  306. LL(0x73,0x73,0xd1,0x73,0xbf,0xa2,0xe6,0xcc),
  307. LL(0x12,0x12,0x48,0x12,0x90,0x5a,0x24,0x82),
  308. LL(0x40,0x40,0x1d,0x40,0x3a,0x5d,0x80,0x7a),
  309. LL(0x08,0x08,0x20,0x08,0x40,0x28,0x10,0x48),
  310. LL(0xc3,0xc3,0x2b,0xc3,0x56,0xe8,0x9b,0x95),
  311. LL(0xec,0xec,0x97,0xec,0x33,0x7b,0xc5,0xdf),
  312. LL(0xdb,0xdb,0x4b,0xdb,0x96,0x90,0xab,0x4d),
  313. LL(0xa1,0xa1,0xbe,0xa1,0x61,0x1f,0x5f,0xc0),
  314. LL(0x8d,0x8d,0x0e,0x8d,0x1c,0x83,0x07,0x91),
  315. LL(0x3d,0x3d,0xf4,0x3d,0xf5,0xc9,0x7a,0xc8),
  316. LL(0x97,0x97,0x66,0x97,0xcc,0xf1,0x33,0x5b),
  317. LL(0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00),
  318. LL(0xcf,0xcf,0x1b,0xcf,0x36,0xd4,0x83,0xf9),
  319. LL(0x2b,0x2b,0xac,0x2b,0x45,0x87,0x56,0x6e),
  320. LL(0x76,0x76,0xc5,0x76,0x97,0xb3,0xec,0xe1),
  321. LL(0x82,0x82,0x32,0x82,0x64,0xb0,0x19,0xe6),
  322. LL(0xd6,0xd6,0x7f,0xd6,0xfe,0xa9,0xb1,0x28),
  323. LL(0x1b,0x1b,0x6c,0x1b,0xd8,0x77,0x36,0xc3),
  324. LL(0xb5,0xb5,0xee,0xb5,0xc1,0x5b,0x77,0x74),
  325. LL(0xaf,0xaf,0x86,0xaf,0x11,0x29,0x43,0xbe),
  326. LL(0x6a,0x6a,0xb5,0x6a,0x77,0xdf,0xd4,0x1d),
  327. LL(0x50,0x50,0x5d,0x50,0xba,0x0d,0xa0,0xea),
  328. LL(0x45,0x45,0x09,0x45,0x12,0x4c,0x8a,0x57),
  329. LL(0xf3,0xf3,0xeb,0xf3,0xcb,0x18,0xfb,0x38),
  330. LL(0x30,0x30,0xc0,0x30,0x9d,0xf0,0x60,0xad),
  331. LL(0xef,0xef,0x9b,0xef,0x2b,0x74,0xc3,0xc4),
  332. LL(0x3f,0x3f,0xfc,0x3f,0xe5,0xc3,0x7e,0xda),
  333. LL(0x55,0x55,0x49,0x55,0x92,0x1c,0xaa,0xc7),
  334. LL(0xa2,0xa2,0xb2,0xa2,0x79,0x10,0x59,0xdb),
  335. LL(0xea,0xea,0x8f,0xea,0x03,0x65,0xc9,0xe9),
  336. LL(0x65,0x65,0x89,0x65,0x0f,0xec,0xca,0x6a),
  337. LL(0xba,0xba,0xd2,0xba,0xb9,0x68,0x69,0x03),
  338. LL(0x2f,0x2f,0xbc,0x2f,0x65,0x93,0x5e,0x4a),
  339. LL(0xc0,0xc0,0x27,0xc0,0x4e,0xe7,0x9d,0x8e),
  340. LL(0xde,0xde,0x5f,0xde,0xbe,0x81,0xa1,0x60),
  341. LL(0x1c,0x1c,0x70,0x1c,0xe0,0x6c,0x38,0xfc),
  342. LL(0xfd,0xfd,0xd3,0xfd,0xbb,0x2e,0xe7,0x46),
  343. LL(0x4d,0x4d,0x29,0x4d,0x52,0x64,0x9a,0x1f),
  344. LL(0x92,0x92,0x72,0x92,0xe4,0xe0,0x39,0x76),
  345. LL(0x75,0x75,0xc9,0x75,0x8f,0xbc,0xea,0xfa),
  346. LL(0x06,0x06,0x18,0x06,0x30,0x1e,0x0c,0x36),
  347. LL(0x8a,0x8a,0x12,0x8a,0x24,0x98,0x09,0xae),
  348. LL(0xb2,0xb2,0xf2,0xb2,0xf9,0x40,0x79,0x4b),
  349. LL(0xe6,0xe6,0xbf,0xe6,0x63,0x59,0xd1,0x85),
  350. LL(0x0e,0x0e,0x38,0x0e,0x70,0x36,0x1c,0x7e),
  351. LL(0x1f,0x1f,0x7c,0x1f,0xf8,0x63,0x3e,0xe7),
  352. LL(0x62,0x62,0x95,0x62,0x37,0xf7,0xc4,0x55),
  353. LL(0xd4,0xd4,0x77,0xd4,0xee,0xa3,0xb5,0x3a),
  354. LL(0xa8,0xa8,0x9a,0xa8,0x29,0x32,0x4d,0x81),
  355. LL(0x96,0x96,0x62,0x96,0xc4,0xf4,0x31,0x52),
  356. LL(0xf9,0xf9,0xc3,0xf9,0x9b,0x3a,0xef,0x62),
  357. LL(0xc5,0xc5,0x33,0xc5,0x66,0xf6,0x97,0xa3),
  358. LL(0x25,0x25,0x94,0x25,0x35,0xb1,0x4a,0x10),
  359. LL(0x59,0x59,0x79,0x59,0xf2,0x20,0xb2,0xab),
  360. LL(0x84,0x84,0x2a,0x84,0x54,0xae,0x15,0xd0),
  361. LL(0x72,0x72,0xd5,0x72,0xb7,0xa7,0xe4,0xc5),
  362. LL(0x39,0x39,0xe4,0x39,0xd5,0xdd,0x72,0xec),
  363. LL(0x4c,0x4c,0x2d,0x4c,0x5a,0x61,0x98,0x16),
  364. LL(0x5e,0x5e,0x65,0x5e,0xca,0x3b,0xbc,0x94),
  365. LL(0x78,0x78,0xfd,0x78,0xe7,0x85,0xf0,0x9f),
  366. LL(0x38,0x38,0xe0,0x38,0xdd,0xd8,0x70,0xe5),
  367. LL(0x8c,0x8c,0x0a,0x8c,0x14,0x86,0x05,0x98),
  368. LL(0xd1,0xd1,0x63,0xd1,0xc6,0xb2,0xbf,0x17),
  369. LL(0xa5,0xa5,0xae,0xa5,0x41,0x0b,0x57,0xe4),
  370. LL(0xe2,0xe2,0xaf,0xe2,0x43,0x4d,0xd9,0xa1),
  371. LL(0x61,0x61,0x99,0x61,0x2f,0xf8,0xc2,0x4e),
  372. LL(0xb3,0xb3,0xf6,0xb3,0xf1,0x45,0x7b,0x42),
  373. LL(0x21,0x21,0x84,0x21,0x15,0xa5,0x42,0x34),
  374. LL(0x9c,0x9c,0x4a,0x9c,0x94,0xd6,0x25,0x08),
  375. LL(0x1e,0x1e,0x78,0x1e,0xf0,0x66,0x3c,0xee),
  376. LL(0x43,0x43,0x11,0x43,0x22,0x52,0x86,0x61),
  377. LL(0xc7,0xc7,0x3b,0xc7,0x76,0xfc,0x93,0xb1),
  378. LL(0xfc,0xfc,0xd7,0xfc,0xb3,0x2b,0xe5,0x4f),
  379. LL(0x04,0x04,0x10,0x04,0x20,0x14,0x08,0x24),
  380. LL(0x51,0x51,0x59,0x51,0xb2,0x08,0xa2,0xe3),
  381. LL(0x99,0x99,0x5e,0x99,0xbc,0xc7,0x2f,0x25),
  382. LL(0x6d,0x6d,0xa9,0x6d,0x4f,0xc4,0xda,0x22),
  383. LL(0x0d,0x0d,0x34,0x0d,0x68,0x39,0x1a,0x65),
  384. LL(0xfa,0xfa,0xcf,0xfa,0x83,0x35,0xe9,0x79),
  385. LL(0xdf,0xdf,0x5b,0xdf,0xb6,0x84,0xa3,0x69),
  386. LL(0x7e,0x7e,0xe5,0x7e,0xd7,0x9b,0xfc,0xa9),
  387. LL(0x24,0x24,0x90,0x24,0x3d,0xb4,0x48,0x19),
  388. LL(0x3b,0x3b,0xec,0x3b,0xc5,0xd7,0x76,0xfe),
  389. LL(0xab,0xab,0x96,0xab,0x31,0x3d,0x4b,0x9a),
  390. LL(0xce,0xce,0x1f,0xce,0x3e,0xd1,0x81,0xf0),
  391. LL(0x11,0x11,0x44,0x11,0x88,0x55,0x22,0x99),
  392. LL(0x8f,0x8f,0x06,0x8f,0x0c,0x89,0x03,0x83),
  393. LL(0x4e,0x4e,0x25,0x4e,0x4a,0x6b,0x9c,0x04),
  394. LL(0xb7,0xb7,0xe6,0xb7,0xd1,0x51,0x73,0x66),
  395. LL(0xeb,0xeb,0x8b,0xeb,0x0b,0x60,0xcb,0xe0),
  396. LL(0x3c,0x3c,0xf0,0x3c,0xfd,0xcc,0x78,0xc1),
  397. LL(0x81,0x81,0x3e,0x81,0x7c,0xbf,0x1f,0xfd),
  398. LL(0x94,0x94,0x6a,0x94,0xd4,0xfe,0x35,0x40),
  399. LL(0xf7,0xf7,0xfb,0xf7,0xeb,0x0c,0xf3,0x1c),
  400. LL(0xb9,0xb9,0xde,0xb9,0xa1,0x67,0x6f,0x18),
  401. LL(0x13,0x13,0x4c,0x13,0x98,0x5f,0x26,0x8b),
  402. LL(0x2c,0x2c,0xb0,0x2c,0x7d,0x9c,0x58,0x51),
  403. LL(0xd3,0xd3,0x6b,0xd3,0xd6,0xb8,0xbb,0x05),
  404. LL(0xe7,0xe7,0xbb,0xe7,0x6b,0x5c,0xd3,0x8c),
  405. LL(0x6e,0x6e,0xa5,0x6e,0x57,0xcb,0xdc,0x39),
  406. LL(0xc4,0xc4,0x37,0xc4,0x6e,0xf3,0x95,0xaa),
  407. LL(0x03,0x03,0x0c,0x03,0x18,0x0f,0x06,0x1b),
  408. LL(0x56,0x56,0x45,0x56,0x8a,0x13,0xac,0xdc),
  409. LL(0x44,0x44,0x0d,0x44,0x1a,0x49,0x88,0x5e),
  410. LL(0x7f,0x7f,0xe1,0x7f,0xdf,0x9e,0xfe,0xa0),
  411. LL(0xa9,0xa9,0x9e,0xa9,0x21,0x37,0x4f,0x88),
  412. LL(0x2a,0x2a,0xa8,0x2a,0x4d,0x82,0x54,0x67),
  413. LL(0xbb,0xbb,0xd6,0xbb,0xb1,0x6d,0x6b,0x0a),
  414. LL(0xc1,0xc1,0x23,0xc1,0x46,0xe2,0x9f,0x87),
  415. LL(0x53,0x53,0x51,0x53,0xa2,0x02,0xa6,0xf1),
  416. LL(0xdc,0xdc,0x57,0xdc,0xae,0x8b,0xa5,0x72),
  417. LL(0x0b,0x0b,0x2c,0x0b,0x58,0x27,0x16,0x53),
  418. LL(0x9d,0x9d,0x4e,0x9d,0x9c,0xd3,0x27,0x01),
  419. LL(0x6c,0x6c,0xad,0x6c,0x47,0xc1,0xd8,0x2b),
  420. LL(0x31,0x31,0xc4,0x31,0x95,0xf5,0x62,0xa4),
  421. LL(0x74,0x74,0xcd,0x74,0x87,0xb9,0xe8,0xf3),
  422. LL(0xf6,0xf6,0xff,0xf6,0xe3,0x09,0xf1,0x15),
  423. LL(0x46,0x46,0x05,0x46,0x0a,0x43,0x8c,0x4c),
  424. LL(0xac,0xac,0x8a,0xac,0x09,0x26,0x45,0xa5),
  425. LL(0x89,0x89,0x1e,0x89,0x3c,0x97,0x0f,0xb5),
  426. LL(0x14,0x14,0x50,0x14,0xa0,0x44,0x28,0xb4),
  427. LL(0xe1,0xe1,0xa3,0xe1,0x5b,0x42,0xdf,0xba),
  428. LL(0x16,0x16,0x58,0x16,0xb0,0x4e,0x2c,0xa6),
  429. LL(0x3a,0x3a,0xe8,0x3a,0xcd,0xd2,0x74,0xf7),
  430. LL(0x69,0x69,0xb9,0x69,0x6f,0xd0,0xd2,0x06),
  431. LL(0x09,0x09,0x24,0x09,0x48,0x2d,0x12,0x41),
  432. LL(0x70,0x70,0xdd,0x70,0xa7,0xad,0xe0,0xd7),
  433. LL(0xb6,0xb6,0xe2,0xb6,0xd9,0x54,0x71,0x6f),
  434. LL(0xd0,0xd0,0x67,0xd0,0xce,0xb7,0xbd,0x1e),
  435. LL(0xed,0xed,0x93,0xed,0x3b,0x7e,0xc7,0xd6),
  436. LL(0xcc,0xcc,0x17,0xcc,0x2e,0xdb,0x85,0xe2),
  437. LL(0x42,0x42,0x15,0x42,0x2a,0x57,0x84,0x68),
  438. LL(0x98,0x98,0x5a,0x98,0xb4,0xc2,0x2d,0x2c),
  439. LL(0xa4,0xa4,0xaa,0xa4,0x49,0x0e,0x55,0xed),
  440. LL(0x28,0x28,0xa0,0x28,0x5d,0x88,0x50,0x75),
  441. LL(0x5c,0x5c,0x6d,0x5c,0xda,0x31,0xb8,0x86),
  442. LL(0xf8,0xf8,0xc7,0xf8,0x93,0x3f,0xed,0x6b),
  443. LL(0x86,0x86,0x22,0x86,0x44,0xa4,0x11,0xc2),
  444. #define RC (&(Cx.q[256*N]))
  445. 0x18,0x23,0xc6,0xe8,0x87,0xb8,0x01,0x4f, /* rc[ROUNDS] */
  446. 0x36,0xa6,0xd2,0xf5,0x79,0x6f,0x91,0x52,
  447. 0x60,0xbc,0x9b,0x8e,0xa3,0x0c,0x7b,0x35,
  448. 0x1d,0xe0,0xd7,0xc2,0x2e,0x4b,0xfe,0x57,
  449. 0x15,0x77,0x37,0xe5,0x9f,0xf0,0x4a,0xda,
  450. 0x58,0xc9,0x29,0x0a,0xb1,0xa0,0x6b,0x85,
  451. 0xbd,0x5d,0x10,0xf4,0xcb,0x3e,0x05,0x67,
  452. 0xe4,0x27,0x41,0x8b,0xa7,0x7d,0x95,0xd8,
  453. 0xfb,0xee,0x7c,0x66,0xdd,0x17,0x47,0x9e,
  454. 0xca,0x2d,0xbf,0x07,0xad,0x5a,0x83,0x33
  455. }
  456. };
  457. void whirlpool_block(WHIRLPOOL_CTX *ctx,const void *inp,size_t n)
  458. {
  459. int r;
  460. const u8 *p=inp;
  461. union { u64 q[8]; u8 c[64]; } S,K,*H=(void *)ctx->H.q;
  462. #ifdef GO_FOR_MMX
  463. GO_FOR_MMX(ctx,inp,n);
  464. #endif
  465. do {
  466. #ifdef OPENSSL_SMALL_FOOTPRINT
  467. u64 L[8];
  468. int i;
  469. for (i=0;i<64;i++) S.c[i] = (K.c[i] = H->c[i]) ^ p[i];
  470. for (r=0;r<ROUNDS;r++)
  471. {
  472. for (i=0;i<8;i++)
  473. {
  474. L[i] = i ? 0 : RC[r];
  475. L[i] ^= C0(K,i) ^ C1(K,(i-1)&7) ^
  476. C2(K,(i-2)&7) ^ C3(K,(i-3)&7) ^
  477. C4(K,(i-4)&7) ^ C5(K,(i-5)&7) ^
  478. C6(K,(i-6)&7) ^ C7(K,(i-7)&7);
  479. }
  480. memcpy (K.q,L,64);
  481. for (i=0;i<8;i++)
  482. {
  483. L[i] ^= C0(S,i) ^ C1(S,(i-1)&7) ^
  484. C2(S,(i-2)&7) ^ C3(S,(i-3)&7) ^
  485. C4(S,(i-4)&7) ^ C5(S,(i-5)&7) ^
  486. C6(S,(i-6)&7) ^ C7(S,(i-7)&7);
  487. }
  488. memcpy (S.q,L,64);
  489. }
  490. for (i=0;i<64;i++) H->c[i] ^= S.c[i] ^ p[i];
  491. #else
  492. u64 L0,L1,L2,L3,L4,L5,L6,L7;
  493. #ifdef STRICT_ALIGNMENT
  494. if ((size_t)p & 7)
  495. {
  496. memcpy (S.c,p,64);
  497. S.q[0] ^= (K.q[0] = H->q[0]);
  498. S.q[1] ^= (K.q[1] = H->q[1]);
  499. S.q[2] ^= (K.q[2] = H->q[2]);
  500. S.q[3] ^= (K.q[3] = H->q[3]);
  501. S.q[4] ^= (K.q[4] = H->q[4]);
  502. S.q[5] ^= (K.q[5] = H->q[5]);
  503. S.q[6] ^= (K.q[6] = H->q[6]);
  504. S.q[7] ^= (K.q[7] = H->q[7]);
  505. }
  506. else
  507. #endif
  508. {
  509. const u64 *pa = (const u64*)p;
  510. S.q[0] = (K.q[0] = H->q[0]) ^ pa[0];
  511. S.q[1] = (K.q[1] = H->q[1]) ^ pa[1];
  512. S.q[2] = (K.q[2] = H->q[2]) ^ pa[2];
  513. S.q[3] = (K.q[3] = H->q[3]) ^ pa[3];
  514. S.q[4] = (K.q[4] = H->q[4]) ^ pa[4];
  515. S.q[5] = (K.q[5] = H->q[5]) ^ pa[5];
  516. S.q[6] = (K.q[6] = H->q[6]) ^ pa[6];
  517. S.q[7] = (K.q[7] = H->q[7]) ^ pa[7];
  518. }
  519. for(r=0;r<ROUNDS;r++)
  520. {
  521. #ifdef SMALL_REGISTER_BANK
  522. L0 = C0(K,0) ^ C1(K,7) ^ C2(K,6) ^ C3(K,5) ^
  523. C4(K,4) ^ C5(K,3) ^ C6(K,2) ^ C7(K,1) ^ RC[r];
  524. L1 = C0(K,1) ^ C1(K,0) ^ C2(K,7) ^ C3(K,6) ^
  525. C4(K,5) ^ C5(K,4) ^ C6(K,3) ^ C7(K,2);
  526. L2 = C0(K,2) ^ C1(K,1) ^ C2(K,0) ^ C3(K,7) ^
  527. C4(K,6) ^ C5(K,5) ^ C6(K,4) ^ C7(K,3);
  528. L3 = C0(K,3) ^ C1(K,2) ^ C2(K,1) ^ C3(K,0) ^
  529. C4(K,7) ^ C5(K,6) ^ C6(K,5) ^ C7(K,4);
  530. L4 = C0(K,4) ^ C1(K,3) ^ C2(K,2) ^ C3(K,1) ^
  531. C4(K,0) ^ C5(K,7) ^ C6(K,6) ^ C7(K,5);
  532. L5 = C0(K,5) ^ C1(K,4) ^ C2(K,3) ^ C3(K,2) ^
  533. C4(K,1) ^ C5(K,0) ^ C6(K,7) ^ C7(K,6);
  534. L6 = C0(K,6) ^ C1(K,5) ^ C2(K,4) ^ C3(K,3) ^
  535. C4(K,2) ^ C5(K,1) ^ C6(K,0) ^ C7(K,7);
  536. L7 = C0(K,7) ^ C1(K,6) ^ C2(K,5) ^ C3(K,4) ^
  537. C4(K,3) ^ C5(K,2) ^ C6(K,1) ^ C7(K,0);
  538. K.q[0] = L0; K.q[1] = L1; K.q[2] = L2; K.q[3] = L3;
  539. K.q[4] = L4; K.q[5] = L5; K.q[6] = L6; K.q[7] = L7;
  540. L0 ^= C0(S,0) ^ C1(S,7) ^ C2(S,6) ^ C3(S,5) ^
  541. C4(S,4) ^ C5(S,3) ^ C6(S,2) ^ C7(S,1);
  542. L1 ^= C0(S,1) ^ C1(S,0) ^ C2(S,7) ^ C3(S,6) ^
  543. C4(S,5) ^ C5(S,4) ^ C6(S,3) ^ C7(S,2);
  544. L2 ^= C0(S,2) ^ C1(S,1) ^ C2(S,0) ^ C3(S,7) ^
  545. C4(S,6) ^ C5(S,5) ^ C6(S,4) ^ C7(S,3);
  546. L3 ^= C0(S,3) ^ C1(S,2) ^ C2(S,1) ^ C3(S,0) ^
  547. C4(S,7) ^ C5(S,6) ^ C6(S,5) ^ C7(S,4);
  548. L4 ^= C0(S,4) ^ C1(S,3) ^ C2(S,2) ^ C3(S,1) ^
  549. C4(S,0) ^ C5(S,7) ^ C6(S,6) ^ C7(S,5);
  550. L5 ^= C0(S,5) ^ C1(S,4) ^ C2(S,3) ^ C3(S,2) ^
  551. C4(S,1) ^ C5(S,0) ^ C6(S,7) ^ C7(S,6);
  552. L6 ^= C0(S,6) ^ C1(S,5) ^ C2(S,4) ^ C3(S,3) ^
  553. C4(S,2) ^ C5(S,1) ^ C6(S,0) ^ C7(S,7);
  554. L7 ^= C0(S,7) ^ C1(S,6) ^ C2(S,5) ^ C3(S,4) ^
  555. C4(S,3) ^ C5(S,2) ^ C6(S,1) ^ C7(S,0);
  556. S.q[0] = L0; S.q[1] = L1; S.q[2] = L2; S.q[3] = L3;
  557. S.q[4] = L4; S.q[5] = L5; S.q[6] = L6; S.q[7] = L7;
  558. #else
  559. L0 = C0(K,0); L1 = C1(K,0); L2 = C2(K,0); L3 = C3(K,0);
  560. L4 = C4(K,0); L5 = C5(K,0); L6 = C6(K,0); L7 = C7(K,0);
  561. L0 ^= RC[r];
  562. L1 ^= C0(K,1); L2 ^= C1(K,1); L3 ^= C2(K,1); L4 ^= C3(K,1);
  563. L5 ^= C4(K,1); L6 ^= C5(K,1); L7 ^= C6(K,1); L0 ^= C7(K,1);
  564. L2 ^= C0(K,2); L3 ^= C1(K,2); L4 ^= C2(K,2); L5 ^= C3(K,2);
  565. L6 ^= C4(K,2); L7 ^= C5(K,2); L0 ^= C6(K,2); L1 ^= C7(K,2);
  566. L3 ^= C0(K,3); L4 ^= C1(K,3); L5 ^= C2(K,3); L6 ^= C3(K,3);
  567. L7 ^= C4(K,3); L0 ^= C5(K,3); L1 ^= C6(K,3); L2 ^= C7(K,3);
  568. L4 ^= C0(K,4); L5 ^= C1(K,4); L6 ^= C2(K,4); L7 ^= C3(K,4);
  569. L0 ^= C4(K,4); L1 ^= C5(K,4); L2 ^= C6(K,4); L3 ^= C7(K,4);
  570. L5 ^= C0(K,5); L6 ^= C1(K,5); L7 ^= C2(K,5); L0 ^= C3(K,5);
  571. L1 ^= C4(K,5); L2 ^= C5(K,5); L3 ^= C6(K,5); L4 ^= C7(K,5);
  572. L6 ^= C0(K,6); L7 ^= C1(K,6); L0 ^= C2(K,6); L1 ^= C3(K,6);
  573. L2 ^= C4(K,6); L3 ^= C5(K,6); L4 ^= C6(K,6); L5 ^= C7(K,6);
  574. L7 ^= C0(K,7); L0 ^= C1(K,7); L1 ^= C2(K,7); L2 ^= C3(K,7);
  575. L3 ^= C4(K,7); L4 ^= C5(K,7); L5 ^= C6(K,7); L6 ^= C7(K,7);
  576. K.q[0] = L0; K.q[1] = L1; K.q[2] = L2; K.q[3] = L3;
  577. K.q[4] = L4; K.q[5] = L5; K.q[6] = L6; K.q[7] = L7;
  578. L0 ^= C0(S,0); L1 ^= C1(S,0); L2 ^= C2(S,0); L3 ^= C3(S,0);
  579. L4 ^= C4(S,0); L5 ^= C5(S,0); L6 ^= C6(S,0); L7 ^= C7(S,0);
  580. L1 ^= C0(S,1); L2 ^= C1(S,1); L3 ^= C2(S,1); L4 ^= C3(S,1);
  581. L5 ^= C4(S,1); L6 ^= C5(S,1); L7 ^= C6(S,1); L0 ^= C7(S,1);
  582. L2 ^= C0(S,2); L3 ^= C1(S,2); L4 ^= C2(S,2); L5 ^= C3(S,2);
  583. L6 ^= C4(S,2); L7 ^= C5(S,2); L0 ^= C6(S,2); L1 ^= C7(S,2);
  584. L3 ^= C0(S,3); L4 ^= C1(S,3); L5 ^= C2(S,3); L6 ^= C3(S,3);
  585. L7 ^= C4(S,3); L0 ^= C5(S,3); L1 ^= C6(S,3); L2 ^= C7(S,3);
  586. L4 ^= C0(S,4); L5 ^= C1(S,4); L6 ^= C2(S,4); L7 ^= C3(S,4);
  587. L0 ^= C4(S,4); L1 ^= C5(S,4); L2 ^= C6(S,4); L3 ^= C7(S,4);
  588. L5 ^= C0(S,5); L6 ^= C1(S,5); L7 ^= C2(S,5); L0 ^= C3(S,5);
  589. L1 ^= C4(S,5); L2 ^= C5(S,5); L3 ^= C6(S,5); L4 ^= C7(S,5);
  590. L6 ^= C0(S,6); L7 ^= C1(S,6); L0 ^= C2(S,6); L1 ^= C3(S,6);
  591. L2 ^= C4(S,6); L3 ^= C5(S,6); L4 ^= C6(S,6); L5 ^= C7(S,6);
  592. L7 ^= C0(S,7); L0 ^= C1(S,7); L1 ^= C2(S,7); L2 ^= C3(S,7);
  593. L3 ^= C4(S,7); L4 ^= C5(S,7); L5 ^= C6(S,7); L6 ^= C7(S,7);
  594. S.q[0] = L0; S.q[1] = L1; S.q[2] = L2; S.q[3] = L3;
  595. S.q[4] = L4; S.q[5] = L5; S.q[6] = L6; S.q[7] = L7;
  596. #endif
  597. }
  598. #ifdef STRICT_ALIGNMENT
  599. if ((size_t)p & 7)
  600. {
  601. int i;
  602. for(i=0;i<64;i++) H->c[i] ^= S.c[i] ^ p[i];
  603. }
  604. else
  605. #endif
  606. {
  607. const u64 *pa=(const u64 *)p;
  608. H->q[0] ^= S.q[0] ^ pa[0];
  609. H->q[1] ^= S.q[1] ^ pa[1];
  610. H->q[2] ^= S.q[2] ^ pa[2];
  611. H->q[3] ^= S.q[3] ^ pa[3];
  612. H->q[4] ^= S.q[4] ^ pa[4];
  613. H->q[5] ^= S.q[5] ^ pa[5];
  614. H->q[6] ^= S.q[6] ^ pa[6];
  615. H->q[7] ^= S.q[7] ^ pa[7];
  616. }
  617. #endif
  618. p += 64;
  619. } while(--n);
  620. }