aes_core.c 80 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013
  1. /*
  2. * Copyright 2002-2022 The OpenSSL Project Authors. All Rights Reserved.
  3. *
  4. * Licensed under the Apache License 2.0 (the "License"). You may not use
  5. * this file except in compliance with the License. You can obtain a copy
  6. * in the file LICENSE in the source distribution or at
  7. * https://www.openssl.org/source/license.html
  8. */
  9. /**
  10. * rijndael-alg-fst.c
  11. *
  12. * @version 3.0 (December 2000)
  13. *
  14. * Optimised ANSI C code for the Rijndael cipher (now AES)
  15. *
  16. * @author Vincent Rijmen
  17. * @author Antoon Bosselaers
  18. * @author Paulo Barreto
  19. *
  20. * This code is hereby placed in the public domain.
  21. *
  22. * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ''AS IS'' AND ANY EXPRESS
  23. * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  24. * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  25. * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE
  26. * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  27. * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  28. * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
  29. * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
  30. * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
  31. * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
  32. * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  33. */
  34. /* Note: rewritten a little bit to provide error control and an OpenSSL-
  35. compatible API */
  36. /*
  37. * AES low level APIs are deprecated for public use, but still ok for internal
  38. * use where we're using them to implement the higher level EVP interface, as is
  39. * the case here.
  40. */
  41. #include "internal/deprecated.h"
  42. #include <assert.h>
  43. #include <stdlib.h>
  44. #include <openssl/crypto.h>
  45. #include <openssl/aes.h>
  46. #include "aes_local.h"
  47. #if defined(OPENSSL_AES_CONST_TIME) && !defined(AES_ASM)
  48. # if (defined(_WIN32) || defined(_WIN64)) && !defined(__MINGW32__)
  49. # define U64(C) C##UI64
  50. # elif defined(__arch64__)
  51. # define U64(C) C##UL
  52. # else
  53. # define U64(C) C##ULL
  54. # endif
  55. typedef union {
  56. unsigned char b[8];
  57. u32 w[2];
  58. u64 d;
  59. } uni;
  60. /*
  61. * Compute w := (w * x) mod (x^8 + x^4 + x^3 + x^1 + 1)
  62. * Therefore the name "xtime".
  63. */
  64. static void XtimeWord(u32 *w)
  65. {
  66. u32 a, b;
  67. a = *w;
  68. b = a & 0x80808080u;
  69. a ^= b;
  70. b -= b >> 7;
  71. b &= 0x1B1B1B1Bu;
  72. b ^= a << 1;
  73. *w = b;
  74. }
  75. static void XtimeLong(u64 *w)
  76. {
  77. u64 a, b;
  78. a = *w;
  79. b = a & U64(0x8080808080808080);
  80. a ^= b;
  81. b -= b >> 7;
  82. b &= U64(0x1B1B1B1B1B1B1B1B);
  83. b ^= a << 1;
  84. *w = b;
  85. }
  86. /*
  87. * This computes w := S * w ^ -1 + c, where c = {01100011}.
  88. * Instead of using GF(2^8) mod (x^8+x^4+x^3+x+1} we do the inversion
  89. * in GF(GF(GF(2^2)^2)^2) mod (X^2+X+8)
  90. * and GF(GF(2^2)^2) mod (X^2+X+2)
  91. * and GF(2^2) mod (X^2+X+1)
  92. * The first part of the algorithm below transfers the coordinates
  93. * {0x01,0x02,0x04,0x08,0x10,0x20,0x40,0x80} =>
  94. * {1,Y,Y^2,Y^3,Y^4,Y^5,Y^6,Y^7} with Y=0x41:
  95. * {0x01,0x41,0x66,0x6c,0x56,0x9a,0x58,0xc4}
  96. * The last part undoes the coordinate transfer and the final affine
  97. * transformation S:
  98. * b[i] = b[i] + b[(i+4)%8] + b[(i+5)%8] + b[(i+6)%8] + b[(i+7)%8] + c[i]
  99. * in one step.
  100. * The multiplication in GF(2^2^2^2) is done in ordinary coords:
  101. * A = (a0*1 + a1*x^4)
  102. * B = (b0*1 + b1*x^4)
  103. * AB = ((a0*b0 + 8*a1*b1)*1 + (a1*b0 + (a0+a1)*b1)*x^4)
  104. * When A = (a0,a1) is given we want to solve AB = 1:
  105. * (a) 1 = a0*b0 + 8*a1*b1
  106. * (b) 0 = a1*b0 + (a0+a1)*b1
  107. * => multiply (a) by a1 and (b) by a0
  108. * (c) a1 = a1*a0*b0 + (8*a1*a1)*b1
  109. * (d) 0 = a1*a0*b0 + (a0*a0+a1*a0)*b1
  110. * => add (c) + (d)
  111. * (e) a1 = (a0*a0 + a1*a0 + 8*a1*a1)*b1
  112. * => therefore
  113. * b1 = (a0*a0 + a1*a0 + 8*a1*a1)^-1 * a1
  114. * => and adding (a1*b0) to (b) we get
  115. * (f) a1*b0 = (a0+a1)*b1
  116. * => therefore
  117. * b0 = (a0*a0 + a1*a0 + 8*a1*a1)^-1 * (a0+a1)
  118. * Note this formula also works for the case
  119. * (a0+a1)*a0 + 8*a1*a1 = 0
  120. * if the inverse element for 0^-1 is mapped to 0.
  121. * Repeat the same for GF(2^2^2) and GF(2^2).
  122. * We get the following algorithm:
  123. * inv8(a0,a1):
  124. * x0 = a0^a1
  125. * [y0,y1] = mul4([x0,a1],[a0,a1]); (*)
  126. * y1 = mul4(8,y1);
  127. * t = inv4(y0^y1);
  128. * [b0,b1] = mul4([x0,a1],[t,t]); (*)
  129. * return [b0,b1];
  130. * The non-linear multiplies (*) can be done in parallel at no extra cost.
  131. */
  132. static void SubWord(u32 *w)
  133. {
  134. u32 x, y, a1, a2, a3, a4, a5, a6;
  135. x = *w;
  136. y = ((x & 0xFEFEFEFEu) >> 1) | ((x & 0x01010101u) << 7);
  137. x &= 0xDDDDDDDDu;
  138. x ^= y & 0x57575757u;
  139. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  140. x ^= y & 0x1C1C1C1Cu;
  141. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  142. x ^= y & 0x4A4A4A4Au;
  143. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  144. x ^= y & 0x42424242u;
  145. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  146. x ^= y & 0x64646464u;
  147. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  148. x ^= y & 0xE0E0E0E0u;
  149. a1 = x;
  150. a1 ^= (x & 0xF0F0F0F0u) >> 4;
  151. a2 = ((x & 0xCCCCCCCCu) >> 2) | ((x & 0x33333333u) << 2);
  152. a3 = x & a1;
  153. a3 ^= (a3 & 0xAAAAAAAAu) >> 1;
  154. a3 ^= (((x << 1) & a1) ^ ((a1 << 1) & x)) & 0xAAAAAAAAu;
  155. a4 = a2 & a1;
  156. a4 ^= (a4 & 0xAAAAAAAAu) >> 1;
  157. a4 ^= (((a2 << 1) & a1) ^ ((a1 << 1) & a2)) & 0xAAAAAAAAu;
  158. a5 = (a3 & 0xCCCCCCCCu) >> 2;
  159. a3 ^= ((a4 << 2) ^ a4) & 0xCCCCCCCCu;
  160. a4 = a5 & 0x22222222u;
  161. a4 |= a4 >> 1;
  162. a4 ^= (a5 << 1) & 0x22222222u;
  163. a3 ^= a4;
  164. a5 = a3 & 0xA0A0A0A0u;
  165. a5 |= a5 >> 1;
  166. a5 ^= (a3 << 1) & 0xA0A0A0A0u;
  167. a4 = a5 & 0xC0C0C0C0u;
  168. a6 = a4 >> 2;
  169. a4 ^= (a5 << 2) & 0xC0C0C0C0u;
  170. a5 = a6 & 0x20202020u;
  171. a5 |= a5 >> 1;
  172. a5 ^= (a6 << 1) & 0x20202020u;
  173. a4 |= a5;
  174. a3 ^= a4 >> 4;
  175. a3 &= 0x0F0F0F0Fu;
  176. a2 = a3;
  177. a2 ^= (a3 & 0x0C0C0C0Cu) >> 2;
  178. a4 = a3 & a2;
  179. a4 ^= (a4 & 0x0A0A0A0A0Au) >> 1;
  180. a4 ^= (((a3 << 1) & a2) ^ ((a2 << 1) & a3)) & 0x0A0A0A0Au;
  181. a5 = a4 & 0x08080808u;
  182. a5 |= a5 >> 1;
  183. a5 ^= (a4 << 1) & 0x08080808u;
  184. a4 ^= a5 >> 2;
  185. a4 &= 0x03030303u;
  186. a4 ^= (a4 & 0x02020202u) >> 1;
  187. a4 |= a4 << 2;
  188. a3 = a2 & a4;
  189. a3 ^= (a3 & 0x0A0A0A0Au) >> 1;
  190. a3 ^= (((a2 << 1) & a4) ^ ((a4 << 1) & a2)) & 0x0A0A0A0Au;
  191. a3 |= a3 << 4;
  192. a2 = ((a1 & 0xCCCCCCCCu) >> 2) | ((a1 & 0x33333333u) << 2);
  193. x = a1 & a3;
  194. x ^= (x & 0xAAAAAAAAu) >> 1;
  195. x ^= (((a1 << 1) & a3) ^ ((a3 << 1) & a1)) & 0xAAAAAAAAu;
  196. a4 = a2 & a3;
  197. a4 ^= (a4 & 0xAAAAAAAAu) >> 1;
  198. a4 ^= (((a2 << 1) & a3) ^ ((a3 << 1) & a2)) & 0xAAAAAAAAu;
  199. a5 = (x & 0xCCCCCCCCu) >> 2;
  200. x ^= ((a4 << 2) ^ a4) & 0xCCCCCCCCu;
  201. a4 = a5 & 0x22222222u;
  202. a4 |= a4 >> 1;
  203. a4 ^= (a5 << 1) & 0x22222222u;
  204. x ^= a4;
  205. y = ((x & 0xFEFEFEFEu) >> 1) | ((x & 0x01010101u) << 7);
  206. x &= 0x39393939u;
  207. x ^= y & 0x3F3F3F3Fu;
  208. y = ((y & 0xFCFCFCFCu) >> 2) | ((y & 0x03030303u) << 6);
  209. x ^= y & 0x97979797u;
  210. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  211. x ^= y & 0x9B9B9B9Bu;
  212. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  213. x ^= y & 0x3C3C3C3Cu;
  214. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  215. x ^= y & 0xDDDDDDDDu;
  216. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  217. x ^= y & 0x72727272u;
  218. x ^= 0x63636363u;
  219. *w = x;
  220. }
  221. static void SubLong(u64 *w)
  222. {
  223. u64 x, y, a1, a2, a3, a4, a5, a6;
  224. x = *w;
  225. y = ((x & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((x & U64(0x0101010101010101)) << 7);
  226. x &= U64(0xDDDDDDDDDDDDDDDD);
  227. x ^= y & U64(0x5757575757575757);
  228. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  229. x ^= y & U64(0x1C1C1C1C1C1C1C1C);
  230. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  231. x ^= y & U64(0x4A4A4A4A4A4A4A4A);
  232. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  233. x ^= y & U64(0x4242424242424242);
  234. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  235. x ^= y & U64(0x6464646464646464);
  236. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  237. x ^= y & U64(0xE0E0E0E0E0E0E0E0);
  238. a1 = x;
  239. a1 ^= (x & U64(0xF0F0F0F0F0F0F0F0)) >> 4;
  240. a2 = ((x & U64(0xCCCCCCCCCCCCCCCC)) >> 2) | ((x & U64(0x3333333333333333)) << 2);
  241. a3 = x & a1;
  242. a3 ^= (a3 & U64(0xAAAAAAAAAAAAAAAA)) >> 1;
  243. a3 ^= (((x << 1) & a1) ^ ((a1 << 1) & x)) & U64(0xAAAAAAAAAAAAAAAA);
  244. a4 = a2 & a1;
  245. a4 ^= (a4 & U64(0xAAAAAAAAAAAAAAAA)) >> 1;
  246. a4 ^= (((a2 << 1) & a1) ^ ((a1 << 1) & a2)) & U64(0xAAAAAAAAAAAAAAAA);
  247. a5 = (a3 & U64(0xCCCCCCCCCCCCCCCC)) >> 2;
  248. a3 ^= ((a4 << 2) ^ a4) & U64(0xCCCCCCCCCCCCCCCC);
  249. a4 = a5 & U64(0x2222222222222222);
  250. a4 |= a4 >> 1;
  251. a4 ^= (a5 << 1) & U64(0x2222222222222222);
  252. a3 ^= a4;
  253. a5 = a3 & U64(0xA0A0A0A0A0A0A0A0);
  254. a5 |= a5 >> 1;
  255. a5 ^= (a3 << 1) & U64(0xA0A0A0A0A0A0A0A0);
  256. a4 = a5 & U64(0xC0C0C0C0C0C0C0C0);
  257. a6 = a4 >> 2;
  258. a4 ^= (a5 << 2) & U64(0xC0C0C0C0C0C0C0C0);
  259. a5 = a6 & U64(0x2020202020202020);
  260. a5 |= a5 >> 1;
  261. a5 ^= (a6 << 1) & U64(0x2020202020202020);
  262. a4 |= a5;
  263. a3 ^= a4 >> 4;
  264. a3 &= U64(0x0F0F0F0F0F0F0F0F);
  265. a2 = a3;
  266. a2 ^= (a3 & U64(0x0C0C0C0C0C0C0C0C)) >> 2;
  267. a4 = a3 & a2;
  268. a4 ^= (a4 & U64(0x0A0A0A0A0A0A0A0A)) >> 1;
  269. a4 ^= (((a3 << 1) & a2) ^ ((a2 << 1) & a3)) & U64(0x0A0A0A0A0A0A0A0A);
  270. a5 = a4 & U64(0x0808080808080808);
  271. a5 |= a5 >> 1;
  272. a5 ^= (a4 << 1) & U64(0x0808080808080808);
  273. a4 ^= a5 >> 2;
  274. a4 &= U64(0x0303030303030303);
  275. a4 ^= (a4 & U64(0x0202020202020202)) >> 1;
  276. a4 |= a4 << 2;
  277. a3 = a2 & a4;
  278. a3 ^= (a3 & U64(0x0A0A0A0A0A0A0A0A)) >> 1;
  279. a3 ^= (((a2 << 1) & a4) ^ ((a4 << 1) & a2)) & U64(0x0A0A0A0A0A0A0A0A);
  280. a3 |= a3 << 4;
  281. a2 = ((a1 & U64(0xCCCCCCCCCCCCCCCC)) >> 2) | ((a1 & U64(0x3333333333333333)) << 2);
  282. x = a1 & a3;
  283. x ^= (x & U64(0xAAAAAAAAAAAAAAAA)) >> 1;
  284. x ^= (((a1 << 1) & a3) ^ ((a3 << 1) & a1)) & U64(0xAAAAAAAAAAAAAAAA);
  285. a4 = a2 & a3;
  286. a4 ^= (a4 & U64(0xAAAAAAAAAAAAAAAA)) >> 1;
  287. a4 ^= (((a2 << 1) & a3) ^ ((a3 << 1) & a2)) & U64(0xAAAAAAAAAAAAAAAA);
  288. a5 = (x & U64(0xCCCCCCCCCCCCCCCC)) >> 2;
  289. x ^= ((a4 << 2) ^ a4) & U64(0xCCCCCCCCCCCCCCCC);
  290. a4 = a5 & U64(0x2222222222222222);
  291. a4 |= a4 >> 1;
  292. a4 ^= (a5 << 1) & U64(0x2222222222222222);
  293. x ^= a4;
  294. y = ((x & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((x & U64(0x0101010101010101)) << 7);
  295. x &= U64(0x3939393939393939);
  296. x ^= y & U64(0x3F3F3F3F3F3F3F3F);
  297. y = ((y & U64(0xFCFCFCFCFCFCFCFC)) >> 2) | ((y & U64(0x0303030303030303)) << 6);
  298. x ^= y & U64(0x9797979797979797);
  299. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  300. x ^= y & U64(0x9B9B9B9B9B9B9B9B);
  301. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  302. x ^= y & U64(0x3C3C3C3C3C3C3C3C);
  303. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  304. x ^= y & U64(0xDDDDDDDDDDDDDDDD);
  305. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  306. x ^= y & U64(0x7272727272727272);
  307. x ^= U64(0x6363636363636363);
  308. *w = x;
  309. }
  310. /*
  311. * This computes w := (S^-1 * (w + c))^-1
  312. */
  313. static void InvSubLong(u64 *w)
  314. {
  315. u64 x, y, a1, a2, a3, a4, a5, a6;
  316. x = *w;
  317. x ^= U64(0x6363636363636363);
  318. y = ((x & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((x & U64(0x0101010101010101)) << 7);
  319. x &= U64(0xFDFDFDFDFDFDFDFD);
  320. x ^= y & U64(0x5E5E5E5E5E5E5E5E);
  321. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  322. x ^= y & U64(0xF3F3F3F3F3F3F3F3);
  323. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  324. x ^= y & U64(0xF5F5F5F5F5F5F5F5);
  325. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  326. x ^= y & U64(0x7878787878787878);
  327. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  328. x ^= y & U64(0x7777777777777777);
  329. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  330. x ^= y & U64(0x1515151515151515);
  331. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  332. x ^= y & U64(0xA5A5A5A5A5A5A5A5);
  333. a1 = x;
  334. a1 ^= (x & U64(0xF0F0F0F0F0F0F0F0)) >> 4;
  335. a2 = ((x & U64(0xCCCCCCCCCCCCCCCC)) >> 2) | ((x & U64(0x3333333333333333)) << 2);
  336. a3 = x & a1;
  337. a3 ^= (a3 & U64(0xAAAAAAAAAAAAAAAA)) >> 1;
  338. a3 ^= (((x << 1) & a1) ^ ((a1 << 1) & x)) & U64(0xAAAAAAAAAAAAAAAA);
  339. a4 = a2 & a1;
  340. a4 ^= (a4 & U64(0xAAAAAAAAAAAAAAAA)) >> 1;
  341. a4 ^= (((a2 << 1) & a1) ^ ((a1 << 1) & a2)) & U64(0xAAAAAAAAAAAAAAAA);
  342. a5 = (a3 & U64(0xCCCCCCCCCCCCCCCC)) >> 2;
  343. a3 ^= ((a4 << 2) ^ a4) & U64(0xCCCCCCCCCCCCCCCC);
  344. a4 = a5 & U64(0x2222222222222222);
  345. a4 |= a4 >> 1;
  346. a4 ^= (a5 << 1) & U64(0x2222222222222222);
  347. a3 ^= a4;
  348. a5 = a3 & U64(0xA0A0A0A0A0A0A0A0);
  349. a5 |= a5 >> 1;
  350. a5 ^= (a3 << 1) & U64(0xA0A0A0A0A0A0A0A0);
  351. a4 = a5 & U64(0xC0C0C0C0C0C0C0C0);
  352. a6 = a4 >> 2;
  353. a4 ^= (a5 << 2) & U64(0xC0C0C0C0C0C0C0C0);
  354. a5 = a6 & U64(0x2020202020202020);
  355. a5 |= a5 >> 1;
  356. a5 ^= (a6 << 1) & U64(0x2020202020202020);
  357. a4 |= a5;
  358. a3 ^= a4 >> 4;
  359. a3 &= U64(0x0F0F0F0F0F0F0F0F);
  360. a2 = a3;
  361. a2 ^= (a3 & U64(0x0C0C0C0C0C0C0C0C)) >> 2;
  362. a4 = a3 & a2;
  363. a4 ^= (a4 & U64(0x0A0A0A0A0A0A0A0A)) >> 1;
  364. a4 ^= (((a3 << 1) & a2) ^ ((a2 << 1) & a3)) & U64(0x0A0A0A0A0A0A0A0A);
  365. a5 = a4 & U64(0x0808080808080808);
  366. a5 |= a5 >> 1;
  367. a5 ^= (a4 << 1) & U64(0x0808080808080808);
  368. a4 ^= a5 >> 2;
  369. a4 &= U64(0x0303030303030303);
  370. a4 ^= (a4 & U64(0x0202020202020202)) >> 1;
  371. a4 |= a4 << 2;
  372. a3 = a2 & a4;
  373. a3 ^= (a3 & U64(0x0A0A0A0A0A0A0A0A)) >> 1;
  374. a3 ^= (((a2 << 1) & a4) ^ ((a4 << 1) & a2)) & U64(0x0A0A0A0A0A0A0A0A);
  375. a3 |= a3 << 4;
  376. a2 = ((a1 & U64(0xCCCCCCCCCCCCCCCC)) >> 2) | ((a1 & U64(0x3333333333333333)) << 2);
  377. x = a1 & a3;
  378. x ^= (x & U64(0xAAAAAAAAAAAAAAAA)) >> 1;
  379. x ^= (((a1 << 1) & a3) ^ ((a3 << 1) & a1)) & U64(0xAAAAAAAAAAAAAAAA);
  380. a4 = a2 & a3;
  381. a4 ^= (a4 & U64(0xAAAAAAAAAAAAAAAA)) >> 1;
  382. a4 ^= (((a2 << 1) & a3) ^ ((a3 << 1) & a2)) & U64(0xAAAAAAAAAAAAAAAA);
  383. a5 = (x & U64(0xCCCCCCCCCCCCCCCC)) >> 2;
  384. x ^= ((a4 << 2) ^ a4) & U64(0xCCCCCCCCCCCCCCCC);
  385. a4 = a5 & U64(0x2222222222222222);
  386. a4 |= a4 >> 1;
  387. a4 ^= (a5 << 1) & U64(0x2222222222222222);
  388. x ^= a4;
  389. y = ((x & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((x & U64(0x0101010101010101)) << 7);
  390. x &= U64(0xB5B5B5B5B5B5B5B5);
  391. x ^= y & U64(0x4040404040404040);
  392. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  393. x ^= y & U64(0x8080808080808080);
  394. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  395. x ^= y & U64(0x1616161616161616);
  396. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  397. x ^= y & U64(0xEBEBEBEBEBEBEBEB);
  398. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  399. x ^= y & U64(0x9797979797979797);
  400. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  401. x ^= y & U64(0xFBFBFBFBFBFBFBFB);
  402. y = ((y & U64(0xFEFEFEFEFEFEFEFE)) >> 1) | ((y & U64(0x0101010101010101)) << 7);
  403. x ^= y & U64(0x7D7D7D7D7D7D7D7D);
  404. *w = x;
  405. }
  406. static void ShiftRows(u64 *state)
  407. {
  408. unsigned char s[4];
  409. unsigned char *s0;
  410. int r;
  411. s0 = (unsigned char *)state;
  412. for (r = 0; r < 4; r++) {
  413. s[0] = s0[0*4 + r];
  414. s[1] = s0[1*4 + r];
  415. s[2] = s0[2*4 + r];
  416. s[3] = s0[3*4 + r];
  417. s0[0*4 + r] = s[(r+0) % 4];
  418. s0[1*4 + r] = s[(r+1) % 4];
  419. s0[2*4 + r] = s[(r+2) % 4];
  420. s0[3*4 + r] = s[(r+3) % 4];
  421. }
  422. }
  423. static void InvShiftRows(u64 *state)
  424. {
  425. unsigned char s[4];
  426. unsigned char *s0;
  427. int r;
  428. s0 = (unsigned char *)state;
  429. for (r = 0; r < 4; r++) {
  430. s[0] = s0[0*4 + r];
  431. s[1] = s0[1*4 + r];
  432. s[2] = s0[2*4 + r];
  433. s[3] = s0[3*4 + r];
  434. s0[0*4 + r] = s[(4-r) % 4];
  435. s0[1*4 + r] = s[(5-r) % 4];
  436. s0[2*4 + r] = s[(6-r) % 4];
  437. s0[3*4 + r] = s[(7-r) % 4];
  438. }
  439. }
  440. static void MixColumns(u64 *state)
  441. {
  442. uni s1;
  443. uni s;
  444. int c;
  445. for (c = 0; c < 2; c++) {
  446. s1.d = state[c];
  447. s.d = s1.d;
  448. s.d ^= ((s.d & U64(0xFFFF0000FFFF0000)) >> 16)
  449. | ((s.d & U64(0x0000FFFF0000FFFF)) << 16);
  450. s.d ^= ((s.d & U64(0xFF00FF00FF00FF00)) >> 8)
  451. | ((s.d & U64(0x00FF00FF00FF00FF)) << 8);
  452. s.d ^= s1.d;
  453. XtimeLong(&s1.d);
  454. s.d ^= s1.d;
  455. s.b[0] ^= s1.b[1];
  456. s.b[1] ^= s1.b[2];
  457. s.b[2] ^= s1.b[3];
  458. s.b[3] ^= s1.b[0];
  459. s.b[4] ^= s1.b[5];
  460. s.b[5] ^= s1.b[6];
  461. s.b[6] ^= s1.b[7];
  462. s.b[7] ^= s1.b[4];
  463. state[c] = s.d;
  464. }
  465. }
  466. static void InvMixColumns(u64 *state)
  467. {
  468. uni s1;
  469. uni s;
  470. int c;
  471. for (c = 0; c < 2; c++) {
  472. s1.d = state[c];
  473. s.d = s1.d;
  474. s.d ^= ((s.d & U64(0xFFFF0000FFFF0000)) >> 16)
  475. | ((s.d & U64(0x0000FFFF0000FFFF)) << 16);
  476. s.d ^= ((s.d & U64(0xFF00FF00FF00FF00)) >> 8)
  477. | ((s.d & U64(0x00FF00FF00FF00FF)) << 8);
  478. s.d ^= s1.d;
  479. XtimeLong(&s1.d);
  480. s.d ^= s1.d;
  481. s.b[0] ^= s1.b[1];
  482. s.b[1] ^= s1.b[2];
  483. s.b[2] ^= s1.b[3];
  484. s.b[3] ^= s1.b[0];
  485. s.b[4] ^= s1.b[5];
  486. s.b[5] ^= s1.b[6];
  487. s.b[6] ^= s1.b[7];
  488. s.b[7] ^= s1.b[4];
  489. XtimeLong(&s1.d);
  490. s1.d ^= ((s1.d & U64(0xFFFF0000FFFF0000)) >> 16)
  491. | ((s1.d & U64(0x0000FFFF0000FFFF)) << 16);
  492. s.d ^= s1.d;
  493. XtimeLong(&s1.d);
  494. s1.d ^= ((s1.d & U64(0xFF00FF00FF00FF00)) >> 8)
  495. | ((s1.d & U64(0x00FF00FF00FF00FF)) << 8);
  496. s.d ^= s1.d;
  497. state[c] = s.d;
  498. }
  499. }
  500. static void AddRoundKey(u64 *state, const u64 *w)
  501. {
  502. state[0] ^= w[0];
  503. state[1] ^= w[1];
  504. }
  505. static void Cipher(const unsigned char *in, unsigned char *out,
  506. const u64 *w, int nr)
  507. {
  508. u64 state[2];
  509. int i;
  510. memcpy(state, in, 16);
  511. AddRoundKey(state, w);
  512. for (i = 1; i < nr; i++) {
  513. SubLong(&state[0]);
  514. SubLong(&state[1]);
  515. ShiftRows(state);
  516. MixColumns(state);
  517. AddRoundKey(state, w + i*2);
  518. }
  519. SubLong(&state[0]);
  520. SubLong(&state[1]);
  521. ShiftRows(state);
  522. AddRoundKey(state, w + nr*2);
  523. memcpy(out, state, 16);
  524. }
  525. static void InvCipher(const unsigned char *in, unsigned char *out,
  526. const u64 *w, int nr)
  527. {
  528. u64 state[2];
  529. int i;
  530. memcpy(state, in, 16);
  531. AddRoundKey(state, w + nr*2);
  532. for (i = nr - 1; i > 0; i--) {
  533. InvShiftRows(state);
  534. InvSubLong(&state[0]);
  535. InvSubLong(&state[1]);
  536. AddRoundKey(state, w + i*2);
  537. InvMixColumns(state);
  538. }
  539. InvShiftRows(state);
  540. InvSubLong(&state[0]);
  541. InvSubLong(&state[1]);
  542. AddRoundKey(state, w);
  543. memcpy(out, state, 16);
  544. }
  545. static void RotWord(u32 *x)
  546. {
  547. unsigned char *w0;
  548. unsigned char tmp;
  549. w0 = (unsigned char *)x;
  550. tmp = w0[0];
  551. w0[0] = w0[1];
  552. w0[1] = w0[2];
  553. w0[2] = w0[3];
  554. w0[3] = tmp;
  555. }
  556. static void KeyExpansion(const unsigned char *key, u64 *w,
  557. int nr, int nk)
  558. {
  559. u32 rcon;
  560. uni prev;
  561. u32 temp;
  562. int i, n;
  563. memcpy(w, key, nk*4);
  564. memcpy(&rcon, "\1\0\0\0", 4);
  565. n = nk/2;
  566. prev.d = w[n-1];
  567. for (i = n; i < (nr+1)*2; i++) {
  568. temp = prev.w[1];
  569. if (i % n == 0) {
  570. RotWord(&temp);
  571. SubWord(&temp);
  572. temp ^= rcon;
  573. XtimeWord(&rcon);
  574. } else if (nk > 6 && i % n == 2) {
  575. SubWord(&temp);
  576. }
  577. prev.d = w[i-n];
  578. prev.w[0] ^= temp;
  579. prev.w[1] ^= prev.w[0];
  580. w[i] = prev.d;
  581. }
  582. }
  583. /**
  584. * Expand the cipher key into the encryption key schedule.
  585. */
  586. int AES_set_encrypt_key(const unsigned char *userKey, const int bits,
  587. AES_KEY *key)
  588. {
  589. u64 *rk;
  590. if (!userKey || !key)
  591. return -1;
  592. if (bits != 128 && bits != 192 && bits != 256)
  593. return -2;
  594. rk = (u64*)key->rd_key;
  595. if (bits == 128)
  596. key->rounds = 10;
  597. else if (bits == 192)
  598. key->rounds = 12;
  599. else
  600. key->rounds = 14;
  601. KeyExpansion(userKey, rk, key->rounds, bits/32);
  602. return 0;
  603. }
  604. /**
  605. * Expand the cipher key into the decryption key schedule.
  606. */
  607. int AES_set_decrypt_key(const unsigned char *userKey, const int bits,
  608. AES_KEY *key)
  609. {
  610. return AES_set_encrypt_key(userKey, bits, key);
  611. }
  612. /*
  613. * Encrypt a single block
  614. * in and out can overlap
  615. */
  616. void AES_encrypt(const unsigned char *in, unsigned char *out,
  617. const AES_KEY *key)
  618. {
  619. const u64 *rk;
  620. assert(in && out && key);
  621. rk = (u64*)key->rd_key;
  622. Cipher(in, out, rk, key->rounds);
  623. }
  624. /*
  625. * Decrypt a single block
  626. * in and out can overlap
  627. */
  628. void AES_decrypt(const unsigned char *in, unsigned char *out,
  629. const AES_KEY *key)
  630. {
  631. const u64 *rk;
  632. assert(in && out && key);
  633. rk = (u64*)key->rd_key;
  634. InvCipher(in, out, rk, key->rounds);
  635. }
  636. #elif !defined(AES_ASM)
  637. /*-
  638. Te0[x] = S [x].[02, 01, 01, 03];
  639. Te1[x] = S [x].[03, 02, 01, 01];
  640. Te2[x] = S [x].[01, 03, 02, 01];
  641. Te3[x] = S [x].[01, 01, 03, 02];
  642. Td0[x] = Si[x].[0e, 09, 0d, 0b];
  643. Td1[x] = Si[x].[0b, 0e, 09, 0d];
  644. Td2[x] = Si[x].[0d, 0b, 0e, 09];
  645. Td3[x] = Si[x].[09, 0d, 0b, 0e];
  646. Td4[x] = Si[x].[01];
  647. */
  648. static const u32 Te0[256] = {
  649. 0xc66363a5U, 0xf87c7c84U, 0xee777799U, 0xf67b7b8dU,
  650. 0xfff2f20dU, 0xd66b6bbdU, 0xde6f6fb1U, 0x91c5c554U,
  651. 0x60303050U, 0x02010103U, 0xce6767a9U, 0x562b2b7dU,
  652. 0xe7fefe19U, 0xb5d7d762U, 0x4dababe6U, 0xec76769aU,
  653. 0x8fcaca45U, 0x1f82829dU, 0x89c9c940U, 0xfa7d7d87U,
  654. 0xeffafa15U, 0xb25959ebU, 0x8e4747c9U, 0xfbf0f00bU,
  655. 0x41adadecU, 0xb3d4d467U, 0x5fa2a2fdU, 0x45afafeaU,
  656. 0x239c9cbfU, 0x53a4a4f7U, 0xe4727296U, 0x9bc0c05bU,
  657. 0x75b7b7c2U, 0xe1fdfd1cU, 0x3d9393aeU, 0x4c26266aU,
  658. 0x6c36365aU, 0x7e3f3f41U, 0xf5f7f702U, 0x83cccc4fU,
  659. 0x6834345cU, 0x51a5a5f4U, 0xd1e5e534U, 0xf9f1f108U,
  660. 0xe2717193U, 0xabd8d873U, 0x62313153U, 0x2a15153fU,
  661. 0x0804040cU, 0x95c7c752U, 0x46232365U, 0x9dc3c35eU,
  662. 0x30181828U, 0x379696a1U, 0x0a05050fU, 0x2f9a9ab5U,
  663. 0x0e070709U, 0x24121236U, 0x1b80809bU, 0xdfe2e23dU,
  664. 0xcdebeb26U, 0x4e272769U, 0x7fb2b2cdU, 0xea75759fU,
  665. 0x1209091bU, 0x1d83839eU, 0x582c2c74U, 0x341a1a2eU,
  666. 0x361b1b2dU, 0xdc6e6eb2U, 0xb45a5aeeU, 0x5ba0a0fbU,
  667. 0xa45252f6U, 0x763b3b4dU, 0xb7d6d661U, 0x7db3b3ceU,
  668. 0x5229297bU, 0xdde3e33eU, 0x5e2f2f71U, 0x13848497U,
  669. 0xa65353f5U, 0xb9d1d168U, 0x00000000U, 0xc1eded2cU,
  670. 0x40202060U, 0xe3fcfc1fU, 0x79b1b1c8U, 0xb65b5bedU,
  671. 0xd46a6abeU, 0x8dcbcb46U, 0x67bebed9U, 0x7239394bU,
  672. 0x944a4adeU, 0x984c4cd4U, 0xb05858e8U, 0x85cfcf4aU,
  673. 0xbbd0d06bU, 0xc5efef2aU, 0x4faaaae5U, 0xedfbfb16U,
  674. 0x864343c5U, 0x9a4d4dd7U, 0x66333355U, 0x11858594U,
  675. 0x8a4545cfU, 0xe9f9f910U, 0x04020206U, 0xfe7f7f81U,
  676. 0xa05050f0U, 0x783c3c44U, 0x259f9fbaU, 0x4ba8a8e3U,
  677. 0xa25151f3U, 0x5da3a3feU, 0x804040c0U, 0x058f8f8aU,
  678. 0x3f9292adU, 0x219d9dbcU, 0x70383848U, 0xf1f5f504U,
  679. 0x63bcbcdfU, 0x77b6b6c1U, 0xafdada75U, 0x42212163U,
  680. 0x20101030U, 0xe5ffff1aU, 0xfdf3f30eU, 0xbfd2d26dU,
  681. 0x81cdcd4cU, 0x180c0c14U, 0x26131335U, 0xc3ecec2fU,
  682. 0xbe5f5fe1U, 0x359797a2U, 0x884444ccU, 0x2e171739U,
  683. 0x93c4c457U, 0x55a7a7f2U, 0xfc7e7e82U, 0x7a3d3d47U,
  684. 0xc86464acU, 0xba5d5de7U, 0x3219192bU, 0xe6737395U,
  685. 0xc06060a0U, 0x19818198U, 0x9e4f4fd1U, 0xa3dcdc7fU,
  686. 0x44222266U, 0x542a2a7eU, 0x3b9090abU, 0x0b888883U,
  687. 0x8c4646caU, 0xc7eeee29U, 0x6bb8b8d3U, 0x2814143cU,
  688. 0xa7dede79U, 0xbc5e5ee2U, 0x160b0b1dU, 0xaddbdb76U,
  689. 0xdbe0e03bU, 0x64323256U, 0x743a3a4eU, 0x140a0a1eU,
  690. 0x924949dbU, 0x0c06060aU, 0x4824246cU, 0xb85c5ce4U,
  691. 0x9fc2c25dU, 0xbdd3d36eU, 0x43acacefU, 0xc46262a6U,
  692. 0x399191a8U, 0x319595a4U, 0xd3e4e437U, 0xf279798bU,
  693. 0xd5e7e732U, 0x8bc8c843U, 0x6e373759U, 0xda6d6db7U,
  694. 0x018d8d8cU, 0xb1d5d564U, 0x9c4e4ed2U, 0x49a9a9e0U,
  695. 0xd86c6cb4U, 0xac5656faU, 0xf3f4f407U, 0xcfeaea25U,
  696. 0xca6565afU, 0xf47a7a8eU, 0x47aeaee9U, 0x10080818U,
  697. 0x6fbabad5U, 0xf0787888U, 0x4a25256fU, 0x5c2e2e72U,
  698. 0x381c1c24U, 0x57a6a6f1U, 0x73b4b4c7U, 0x97c6c651U,
  699. 0xcbe8e823U, 0xa1dddd7cU, 0xe874749cU, 0x3e1f1f21U,
  700. 0x964b4bddU, 0x61bdbddcU, 0x0d8b8b86U, 0x0f8a8a85U,
  701. 0xe0707090U, 0x7c3e3e42U, 0x71b5b5c4U, 0xcc6666aaU,
  702. 0x904848d8U, 0x06030305U, 0xf7f6f601U, 0x1c0e0e12U,
  703. 0xc26161a3U, 0x6a35355fU, 0xae5757f9U, 0x69b9b9d0U,
  704. 0x17868691U, 0x99c1c158U, 0x3a1d1d27U, 0x279e9eb9U,
  705. 0xd9e1e138U, 0xebf8f813U, 0x2b9898b3U, 0x22111133U,
  706. 0xd26969bbU, 0xa9d9d970U, 0x078e8e89U, 0x339494a7U,
  707. 0x2d9b9bb6U, 0x3c1e1e22U, 0x15878792U, 0xc9e9e920U,
  708. 0x87cece49U, 0xaa5555ffU, 0x50282878U, 0xa5dfdf7aU,
  709. 0x038c8c8fU, 0x59a1a1f8U, 0x09898980U, 0x1a0d0d17U,
  710. 0x65bfbfdaU, 0xd7e6e631U, 0x844242c6U, 0xd06868b8U,
  711. 0x824141c3U, 0x299999b0U, 0x5a2d2d77U, 0x1e0f0f11U,
  712. 0x7bb0b0cbU, 0xa85454fcU, 0x6dbbbbd6U, 0x2c16163aU,
  713. };
  714. static const u32 Te1[256] = {
  715. 0xa5c66363U, 0x84f87c7cU, 0x99ee7777U, 0x8df67b7bU,
  716. 0x0dfff2f2U, 0xbdd66b6bU, 0xb1de6f6fU, 0x5491c5c5U,
  717. 0x50603030U, 0x03020101U, 0xa9ce6767U, 0x7d562b2bU,
  718. 0x19e7fefeU, 0x62b5d7d7U, 0xe64dababU, 0x9aec7676U,
  719. 0x458fcacaU, 0x9d1f8282U, 0x4089c9c9U, 0x87fa7d7dU,
  720. 0x15effafaU, 0xebb25959U, 0xc98e4747U, 0x0bfbf0f0U,
  721. 0xec41adadU, 0x67b3d4d4U, 0xfd5fa2a2U, 0xea45afafU,
  722. 0xbf239c9cU, 0xf753a4a4U, 0x96e47272U, 0x5b9bc0c0U,
  723. 0xc275b7b7U, 0x1ce1fdfdU, 0xae3d9393U, 0x6a4c2626U,
  724. 0x5a6c3636U, 0x417e3f3fU, 0x02f5f7f7U, 0x4f83ccccU,
  725. 0x5c683434U, 0xf451a5a5U, 0x34d1e5e5U, 0x08f9f1f1U,
  726. 0x93e27171U, 0x73abd8d8U, 0x53623131U, 0x3f2a1515U,
  727. 0x0c080404U, 0x5295c7c7U, 0x65462323U, 0x5e9dc3c3U,
  728. 0x28301818U, 0xa1379696U, 0x0f0a0505U, 0xb52f9a9aU,
  729. 0x090e0707U, 0x36241212U, 0x9b1b8080U, 0x3ddfe2e2U,
  730. 0x26cdebebU, 0x694e2727U, 0xcd7fb2b2U, 0x9fea7575U,
  731. 0x1b120909U, 0x9e1d8383U, 0x74582c2cU, 0x2e341a1aU,
  732. 0x2d361b1bU, 0xb2dc6e6eU, 0xeeb45a5aU, 0xfb5ba0a0U,
  733. 0xf6a45252U, 0x4d763b3bU, 0x61b7d6d6U, 0xce7db3b3U,
  734. 0x7b522929U, 0x3edde3e3U, 0x715e2f2fU, 0x97138484U,
  735. 0xf5a65353U, 0x68b9d1d1U, 0x00000000U, 0x2cc1ededU,
  736. 0x60402020U, 0x1fe3fcfcU, 0xc879b1b1U, 0xedb65b5bU,
  737. 0xbed46a6aU, 0x468dcbcbU, 0xd967bebeU, 0x4b723939U,
  738. 0xde944a4aU, 0xd4984c4cU, 0xe8b05858U, 0x4a85cfcfU,
  739. 0x6bbbd0d0U, 0x2ac5efefU, 0xe54faaaaU, 0x16edfbfbU,
  740. 0xc5864343U, 0xd79a4d4dU, 0x55663333U, 0x94118585U,
  741. 0xcf8a4545U, 0x10e9f9f9U, 0x06040202U, 0x81fe7f7fU,
  742. 0xf0a05050U, 0x44783c3cU, 0xba259f9fU, 0xe34ba8a8U,
  743. 0xf3a25151U, 0xfe5da3a3U, 0xc0804040U, 0x8a058f8fU,
  744. 0xad3f9292U, 0xbc219d9dU, 0x48703838U, 0x04f1f5f5U,
  745. 0xdf63bcbcU, 0xc177b6b6U, 0x75afdadaU, 0x63422121U,
  746. 0x30201010U, 0x1ae5ffffU, 0x0efdf3f3U, 0x6dbfd2d2U,
  747. 0x4c81cdcdU, 0x14180c0cU, 0x35261313U, 0x2fc3ececU,
  748. 0xe1be5f5fU, 0xa2359797U, 0xcc884444U, 0x392e1717U,
  749. 0x5793c4c4U, 0xf255a7a7U, 0x82fc7e7eU, 0x477a3d3dU,
  750. 0xacc86464U, 0xe7ba5d5dU, 0x2b321919U, 0x95e67373U,
  751. 0xa0c06060U, 0x98198181U, 0xd19e4f4fU, 0x7fa3dcdcU,
  752. 0x66442222U, 0x7e542a2aU, 0xab3b9090U, 0x830b8888U,
  753. 0xca8c4646U, 0x29c7eeeeU, 0xd36bb8b8U, 0x3c281414U,
  754. 0x79a7dedeU, 0xe2bc5e5eU, 0x1d160b0bU, 0x76addbdbU,
  755. 0x3bdbe0e0U, 0x56643232U, 0x4e743a3aU, 0x1e140a0aU,
  756. 0xdb924949U, 0x0a0c0606U, 0x6c482424U, 0xe4b85c5cU,
  757. 0x5d9fc2c2U, 0x6ebdd3d3U, 0xef43acacU, 0xa6c46262U,
  758. 0xa8399191U, 0xa4319595U, 0x37d3e4e4U, 0x8bf27979U,
  759. 0x32d5e7e7U, 0x438bc8c8U, 0x596e3737U, 0xb7da6d6dU,
  760. 0x8c018d8dU, 0x64b1d5d5U, 0xd29c4e4eU, 0xe049a9a9U,
  761. 0xb4d86c6cU, 0xfaac5656U, 0x07f3f4f4U, 0x25cfeaeaU,
  762. 0xafca6565U, 0x8ef47a7aU, 0xe947aeaeU, 0x18100808U,
  763. 0xd56fbabaU, 0x88f07878U, 0x6f4a2525U, 0x725c2e2eU,
  764. 0x24381c1cU, 0xf157a6a6U, 0xc773b4b4U, 0x5197c6c6U,
  765. 0x23cbe8e8U, 0x7ca1ddddU, 0x9ce87474U, 0x213e1f1fU,
  766. 0xdd964b4bU, 0xdc61bdbdU, 0x860d8b8bU, 0x850f8a8aU,
  767. 0x90e07070U, 0x427c3e3eU, 0xc471b5b5U, 0xaacc6666U,
  768. 0xd8904848U, 0x05060303U, 0x01f7f6f6U, 0x121c0e0eU,
  769. 0xa3c26161U, 0x5f6a3535U, 0xf9ae5757U, 0xd069b9b9U,
  770. 0x91178686U, 0x5899c1c1U, 0x273a1d1dU, 0xb9279e9eU,
  771. 0x38d9e1e1U, 0x13ebf8f8U, 0xb32b9898U, 0x33221111U,
  772. 0xbbd26969U, 0x70a9d9d9U, 0x89078e8eU, 0xa7339494U,
  773. 0xb62d9b9bU, 0x223c1e1eU, 0x92158787U, 0x20c9e9e9U,
  774. 0x4987ceceU, 0xffaa5555U, 0x78502828U, 0x7aa5dfdfU,
  775. 0x8f038c8cU, 0xf859a1a1U, 0x80098989U, 0x171a0d0dU,
  776. 0xda65bfbfU, 0x31d7e6e6U, 0xc6844242U, 0xb8d06868U,
  777. 0xc3824141U, 0xb0299999U, 0x775a2d2dU, 0x111e0f0fU,
  778. 0xcb7bb0b0U, 0xfca85454U, 0xd66dbbbbU, 0x3a2c1616U,
  779. };
  780. static const u32 Te2[256] = {
  781. 0x63a5c663U, 0x7c84f87cU, 0x7799ee77U, 0x7b8df67bU,
  782. 0xf20dfff2U, 0x6bbdd66bU, 0x6fb1de6fU, 0xc55491c5U,
  783. 0x30506030U, 0x01030201U, 0x67a9ce67U, 0x2b7d562bU,
  784. 0xfe19e7feU, 0xd762b5d7U, 0xabe64dabU, 0x769aec76U,
  785. 0xca458fcaU, 0x829d1f82U, 0xc94089c9U, 0x7d87fa7dU,
  786. 0xfa15effaU, 0x59ebb259U, 0x47c98e47U, 0xf00bfbf0U,
  787. 0xadec41adU, 0xd467b3d4U, 0xa2fd5fa2U, 0xafea45afU,
  788. 0x9cbf239cU, 0xa4f753a4U, 0x7296e472U, 0xc05b9bc0U,
  789. 0xb7c275b7U, 0xfd1ce1fdU, 0x93ae3d93U, 0x266a4c26U,
  790. 0x365a6c36U, 0x3f417e3fU, 0xf702f5f7U, 0xcc4f83ccU,
  791. 0x345c6834U, 0xa5f451a5U, 0xe534d1e5U, 0xf108f9f1U,
  792. 0x7193e271U, 0xd873abd8U, 0x31536231U, 0x153f2a15U,
  793. 0x040c0804U, 0xc75295c7U, 0x23654623U, 0xc35e9dc3U,
  794. 0x18283018U, 0x96a13796U, 0x050f0a05U, 0x9ab52f9aU,
  795. 0x07090e07U, 0x12362412U, 0x809b1b80U, 0xe23ddfe2U,
  796. 0xeb26cdebU, 0x27694e27U, 0xb2cd7fb2U, 0x759fea75U,
  797. 0x091b1209U, 0x839e1d83U, 0x2c74582cU, 0x1a2e341aU,
  798. 0x1b2d361bU, 0x6eb2dc6eU, 0x5aeeb45aU, 0xa0fb5ba0U,
  799. 0x52f6a452U, 0x3b4d763bU, 0xd661b7d6U, 0xb3ce7db3U,
  800. 0x297b5229U, 0xe33edde3U, 0x2f715e2fU, 0x84971384U,
  801. 0x53f5a653U, 0xd168b9d1U, 0x00000000U, 0xed2cc1edU,
  802. 0x20604020U, 0xfc1fe3fcU, 0xb1c879b1U, 0x5bedb65bU,
  803. 0x6abed46aU, 0xcb468dcbU, 0xbed967beU, 0x394b7239U,
  804. 0x4ade944aU, 0x4cd4984cU, 0x58e8b058U, 0xcf4a85cfU,
  805. 0xd06bbbd0U, 0xef2ac5efU, 0xaae54faaU, 0xfb16edfbU,
  806. 0x43c58643U, 0x4dd79a4dU, 0x33556633U, 0x85941185U,
  807. 0x45cf8a45U, 0xf910e9f9U, 0x02060402U, 0x7f81fe7fU,
  808. 0x50f0a050U, 0x3c44783cU, 0x9fba259fU, 0xa8e34ba8U,
  809. 0x51f3a251U, 0xa3fe5da3U, 0x40c08040U, 0x8f8a058fU,
  810. 0x92ad3f92U, 0x9dbc219dU, 0x38487038U, 0xf504f1f5U,
  811. 0xbcdf63bcU, 0xb6c177b6U, 0xda75afdaU, 0x21634221U,
  812. 0x10302010U, 0xff1ae5ffU, 0xf30efdf3U, 0xd26dbfd2U,
  813. 0xcd4c81cdU, 0x0c14180cU, 0x13352613U, 0xec2fc3ecU,
  814. 0x5fe1be5fU, 0x97a23597U, 0x44cc8844U, 0x17392e17U,
  815. 0xc45793c4U, 0xa7f255a7U, 0x7e82fc7eU, 0x3d477a3dU,
  816. 0x64acc864U, 0x5de7ba5dU, 0x192b3219U, 0x7395e673U,
  817. 0x60a0c060U, 0x81981981U, 0x4fd19e4fU, 0xdc7fa3dcU,
  818. 0x22664422U, 0x2a7e542aU, 0x90ab3b90U, 0x88830b88U,
  819. 0x46ca8c46U, 0xee29c7eeU, 0xb8d36bb8U, 0x143c2814U,
  820. 0xde79a7deU, 0x5ee2bc5eU, 0x0b1d160bU, 0xdb76addbU,
  821. 0xe03bdbe0U, 0x32566432U, 0x3a4e743aU, 0x0a1e140aU,
  822. 0x49db9249U, 0x060a0c06U, 0x246c4824U, 0x5ce4b85cU,
  823. 0xc25d9fc2U, 0xd36ebdd3U, 0xacef43acU, 0x62a6c462U,
  824. 0x91a83991U, 0x95a43195U, 0xe437d3e4U, 0x798bf279U,
  825. 0xe732d5e7U, 0xc8438bc8U, 0x37596e37U, 0x6db7da6dU,
  826. 0x8d8c018dU, 0xd564b1d5U, 0x4ed29c4eU, 0xa9e049a9U,
  827. 0x6cb4d86cU, 0x56faac56U, 0xf407f3f4U, 0xea25cfeaU,
  828. 0x65afca65U, 0x7a8ef47aU, 0xaee947aeU, 0x08181008U,
  829. 0xbad56fbaU, 0x7888f078U, 0x256f4a25U, 0x2e725c2eU,
  830. 0x1c24381cU, 0xa6f157a6U, 0xb4c773b4U, 0xc65197c6U,
  831. 0xe823cbe8U, 0xdd7ca1ddU, 0x749ce874U, 0x1f213e1fU,
  832. 0x4bdd964bU, 0xbddc61bdU, 0x8b860d8bU, 0x8a850f8aU,
  833. 0x7090e070U, 0x3e427c3eU, 0xb5c471b5U, 0x66aacc66U,
  834. 0x48d89048U, 0x03050603U, 0xf601f7f6U, 0x0e121c0eU,
  835. 0x61a3c261U, 0x355f6a35U, 0x57f9ae57U, 0xb9d069b9U,
  836. 0x86911786U, 0xc15899c1U, 0x1d273a1dU, 0x9eb9279eU,
  837. 0xe138d9e1U, 0xf813ebf8U, 0x98b32b98U, 0x11332211U,
  838. 0x69bbd269U, 0xd970a9d9U, 0x8e89078eU, 0x94a73394U,
  839. 0x9bb62d9bU, 0x1e223c1eU, 0x87921587U, 0xe920c9e9U,
  840. 0xce4987ceU, 0x55ffaa55U, 0x28785028U, 0xdf7aa5dfU,
  841. 0x8c8f038cU, 0xa1f859a1U, 0x89800989U, 0x0d171a0dU,
  842. 0xbfda65bfU, 0xe631d7e6U, 0x42c68442U, 0x68b8d068U,
  843. 0x41c38241U, 0x99b02999U, 0x2d775a2dU, 0x0f111e0fU,
  844. 0xb0cb7bb0U, 0x54fca854U, 0xbbd66dbbU, 0x163a2c16U,
  845. };
  846. static const u32 Te3[256] = {
  847. 0x6363a5c6U, 0x7c7c84f8U, 0x777799eeU, 0x7b7b8df6U,
  848. 0xf2f20dffU, 0x6b6bbdd6U, 0x6f6fb1deU, 0xc5c55491U,
  849. 0x30305060U, 0x01010302U, 0x6767a9ceU, 0x2b2b7d56U,
  850. 0xfefe19e7U, 0xd7d762b5U, 0xababe64dU, 0x76769aecU,
  851. 0xcaca458fU, 0x82829d1fU, 0xc9c94089U, 0x7d7d87faU,
  852. 0xfafa15efU, 0x5959ebb2U, 0x4747c98eU, 0xf0f00bfbU,
  853. 0xadadec41U, 0xd4d467b3U, 0xa2a2fd5fU, 0xafafea45U,
  854. 0x9c9cbf23U, 0xa4a4f753U, 0x727296e4U, 0xc0c05b9bU,
  855. 0xb7b7c275U, 0xfdfd1ce1U, 0x9393ae3dU, 0x26266a4cU,
  856. 0x36365a6cU, 0x3f3f417eU, 0xf7f702f5U, 0xcccc4f83U,
  857. 0x34345c68U, 0xa5a5f451U, 0xe5e534d1U, 0xf1f108f9U,
  858. 0x717193e2U, 0xd8d873abU, 0x31315362U, 0x15153f2aU,
  859. 0x04040c08U, 0xc7c75295U, 0x23236546U, 0xc3c35e9dU,
  860. 0x18182830U, 0x9696a137U, 0x05050f0aU, 0x9a9ab52fU,
  861. 0x0707090eU, 0x12123624U, 0x80809b1bU, 0xe2e23ddfU,
  862. 0xebeb26cdU, 0x2727694eU, 0xb2b2cd7fU, 0x75759feaU,
  863. 0x09091b12U, 0x83839e1dU, 0x2c2c7458U, 0x1a1a2e34U,
  864. 0x1b1b2d36U, 0x6e6eb2dcU, 0x5a5aeeb4U, 0xa0a0fb5bU,
  865. 0x5252f6a4U, 0x3b3b4d76U, 0xd6d661b7U, 0xb3b3ce7dU,
  866. 0x29297b52U, 0xe3e33eddU, 0x2f2f715eU, 0x84849713U,
  867. 0x5353f5a6U, 0xd1d168b9U, 0x00000000U, 0xeded2cc1U,
  868. 0x20206040U, 0xfcfc1fe3U, 0xb1b1c879U, 0x5b5bedb6U,
  869. 0x6a6abed4U, 0xcbcb468dU, 0xbebed967U, 0x39394b72U,
  870. 0x4a4ade94U, 0x4c4cd498U, 0x5858e8b0U, 0xcfcf4a85U,
  871. 0xd0d06bbbU, 0xefef2ac5U, 0xaaaae54fU, 0xfbfb16edU,
  872. 0x4343c586U, 0x4d4dd79aU, 0x33335566U, 0x85859411U,
  873. 0x4545cf8aU, 0xf9f910e9U, 0x02020604U, 0x7f7f81feU,
  874. 0x5050f0a0U, 0x3c3c4478U, 0x9f9fba25U, 0xa8a8e34bU,
  875. 0x5151f3a2U, 0xa3a3fe5dU, 0x4040c080U, 0x8f8f8a05U,
  876. 0x9292ad3fU, 0x9d9dbc21U, 0x38384870U, 0xf5f504f1U,
  877. 0xbcbcdf63U, 0xb6b6c177U, 0xdada75afU, 0x21216342U,
  878. 0x10103020U, 0xffff1ae5U, 0xf3f30efdU, 0xd2d26dbfU,
  879. 0xcdcd4c81U, 0x0c0c1418U, 0x13133526U, 0xecec2fc3U,
  880. 0x5f5fe1beU, 0x9797a235U, 0x4444cc88U, 0x1717392eU,
  881. 0xc4c45793U, 0xa7a7f255U, 0x7e7e82fcU, 0x3d3d477aU,
  882. 0x6464acc8U, 0x5d5de7baU, 0x19192b32U, 0x737395e6U,
  883. 0x6060a0c0U, 0x81819819U, 0x4f4fd19eU, 0xdcdc7fa3U,
  884. 0x22226644U, 0x2a2a7e54U, 0x9090ab3bU, 0x8888830bU,
  885. 0x4646ca8cU, 0xeeee29c7U, 0xb8b8d36bU, 0x14143c28U,
  886. 0xdede79a7U, 0x5e5ee2bcU, 0x0b0b1d16U, 0xdbdb76adU,
  887. 0xe0e03bdbU, 0x32325664U, 0x3a3a4e74U, 0x0a0a1e14U,
  888. 0x4949db92U, 0x06060a0cU, 0x24246c48U, 0x5c5ce4b8U,
  889. 0xc2c25d9fU, 0xd3d36ebdU, 0xacacef43U, 0x6262a6c4U,
  890. 0x9191a839U, 0x9595a431U, 0xe4e437d3U, 0x79798bf2U,
  891. 0xe7e732d5U, 0xc8c8438bU, 0x3737596eU, 0x6d6db7daU,
  892. 0x8d8d8c01U, 0xd5d564b1U, 0x4e4ed29cU, 0xa9a9e049U,
  893. 0x6c6cb4d8U, 0x5656faacU, 0xf4f407f3U, 0xeaea25cfU,
  894. 0x6565afcaU, 0x7a7a8ef4U, 0xaeaee947U, 0x08081810U,
  895. 0xbabad56fU, 0x787888f0U, 0x25256f4aU, 0x2e2e725cU,
  896. 0x1c1c2438U, 0xa6a6f157U, 0xb4b4c773U, 0xc6c65197U,
  897. 0xe8e823cbU, 0xdddd7ca1U, 0x74749ce8U, 0x1f1f213eU,
  898. 0x4b4bdd96U, 0xbdbddc61U, 0x8b8b860dU, 0x8a8a850fU,
  899. 0x707090e0U, 0x3e3e427cU, 0xb5b5c471U, 0x6666aaccU,
  900. 0x4848d890U, 0x03030506U, 0xf6f601f7U, 0x0e0e121cU,
  901. 0x6161a3c2U, 0x35355f6aU, 0x5757f9aeU, 0xb9b9d069U,
  902. 0x86869117U, 0xc1c15899U, 0x1d1d273aU, 0x9e9eb927U,
  903. 0xe1e138d9U, 0xf8f813ebU, 0x9898b32bU, 0x11113322U,
  904. 0x6969bbd2U, 0xd9d970a9U, 0x8e8e8907U, 0x9494a733U,
  905. 0x9b9bb62dU, 0x1e1e223cU, 0x87879215U, 0xe9e920c9U,
  906. 0xcece4987U, 0x5555ffaaU, 0x28287850U, 0xdfdf7aa5U,
  907. 0x8c8c8f03U, 0xa1a1f859U, 0x89898009U, 0x0d0d171aU,
  908. 0xbfbfda65U, 0xe6e631d7U, 0x4242c684U, 0x6868b8d0U,
  909. 0x4141c382U, 0x9999b029U, 0x2d2d775aU, 0x0f0f111eU,
  910. 0xb0b0cb7bU, 0x5454fca8U, 0xbbbbd66dU, 0x16163a2cU,
  911. };
  912. static const u32 Td0[256] = {
  913. 0x51f4a750U, 0x7e416553U, 0x1a17a4c3U, 0x3a275e96U,
  914. 0x3bab6bcbU, 0x1f9d45f1U, 0xacfa58abU, 0x4be30393U,
  915. 0x2030fa55U, 0xad766df6U, 0x88cc7691U, 0xf5024c25U,
  916. 0x4fe5d7fcU, 0xc52acbd7U, 0x26354480U, 0xb562a38fU,
  917. 0xdeb15a49U, 0x25ba1b67U, 0x45ea0e98U, 0x5dfec0e1U,
  918. 0xc32f7502U, 0x814cf012U, 0x8d4697a3U, 0x6bd3f9c6U,
  919. 0x038f5fe7U, 0x15929c95U, 0xbf6d7aebU, 0x955259daU,
  920. 0xd4be832dU, 0x587421d3U, 0x49e06929U, 0x8ec9c844U,
  921. 0x75c2896aU, 0xf48e7978U, 0x99583e6bU, 0x27b971ddU,
  922. 0xbee14fb6U, 0xf088ad17U, 0xc920ac66U, 0x7dce3ab4U,
  923. 0x63df4a18U, 0xe51a3182U, 0x97513360U, 0x62537f45U,
  924. 0xb16477e0U, 0xbb6bae84U, 0xfe81a01cU, 0xf9082b94U,
  925. 0x70486858U, 0x8f45fd19U, 0x94de6c87U, 0x527bf8b7U,
  926. 0xab73d323U, 0x724b02e2U, 0xe31f8f57U, 0x6655ab2aU,
  927. 0xb2eb2807U, 0x2fb5c203U, 0x86c57b9aU, 0xd33708a5U,
  928. 0x302887f2U, 0x23bfa5b2U, 0x02036abaU, 0xed16825cU,
  929. 0x8acf1c2bU, 0xa779b492U, 0xf307f2f0U, 0x4e69e2a1U,
  930. 0x65daf4cdU, 0x0605bed5U, 0xd134621fU, 0xc4a6fe8aU,
  931. 0x342e539dU, 0xa2f355a0U, 0x058ae132U, 0xa4f6eb75U,
  932. 0x0b83ec39U, 0x4060efaaU, 0x5e719f06U, 0xbd6e1051U,
  933. 0x3e218af9U, 0x96dd063dU, 0xdd3e05aeU, 0x4de6bd46U,
  934. 0x91548db5U, 0x71c45d05U, 0x0406d46fU, 0x605015ffU,
  935. 0x1998fb24U, 0xd6bde997U, 0x894043ccU, 0x67d99e77U,
  936. 0xb0e842bdU, 0x07898b88U, 0xe7195b38U, 0x79c8eedbU,
  937. 0xa17c0a47U, 0x7c420fe9U, 0xf8841ec9U, 0x00000000U,
  938. 0x09808683U, 0x322bed48U, 0x1e1170acU, 0x6c5a724eU,
  939. 0xfd0efffbU, 0x0f853856U, 0x3daed51eU, 0x362d3927U,
  940. 0x0a0fd964U, 0x685ca621U, 0x9b5b54d1U, 0x24362e3aU,
  941. 0x0c0a67b1U, 0x9357e70fU, 0xb4ee96d2U, 0x1b9b919eU,
  942. 0x80c0c54fU, 0x61dc20a2U, 0x5a774b69U, 0x1c121a16U,
  943. 0xe293ba0aU, 0xc0a02ae5U, 0x3c22e043U, 0x121b171dU,
  944. 0x0e090d0bU, 0xf28bc7adU, 0x2db6a8b9U, 0x141ea9c8U,
  945. 0x57f11985U, 0xaf75074cU, 0xee99ddbbU, 0xa37f60fdU,
  946. 0xf701269fU, 0x5c72f5bcU, 0x44663bc5U, 0x5bfb7e34U,
  947. 0x8b432976U, 0xcb23c6dcU, 0xb6edfc68U, 0xb8e4f163U,
  948. 0xd731dccaU, 0x42638510U, 0x13972240U, 0x84c61120U,
  949. 0x854a247dU, 0xd2bb3df8U, 0xaef93211U, 0xc729a16dU,
  950. 0x1d9e2f4bU, 0xdcb230f3U, 0x0d8652ecU, 0x77c1e3d0U,
  951. 0x2bb3166cU, 0xa970b999U, 0x119448faU, 0x47e96422U,
  952. 0xa8fc8cc4U, 0xa0f03f1aU, 0x567d2cd8U, 0x223390efU,
  953. 0x87494ec7U, 0xd938d1c1U, 0x8ccaa2feU, 0x98d40b36U,
  954. 0xa6f581cfU, 0xa57ade28U, 0xdab78e26U, 0x3fadbfa4U,
  955. 0x2c3a9de4U, 0x5078920dU, 0x6a5fcc9bU, 0x547e4662U,
  956. 0xf68d13c2U, 0x90d8b8e8U, 0x2e39f75eU, 0x82c3aff5U,
  957. 0x9f5d80beU, 0x69d0937cU, 0x6fd52da9U, 0xcf2512b3U,
  958. 0xc8ac993bU, 0x10187da7U, 0xe89c636eU, 0xdb3bbb7bU,
  959. 0xcd267809U, 0x6e5918f4U, 0xec9ab701U, 0x834f9aa8U,
  960. 0xe6956e65U, 0xaaffe67eU, 0x21bccf08U, 0xef15e8e6U,
  961. 0xbae79bd9U, 0x4a6f36ceU, 0xea9f09d4U, 0x29b07cd6U,
  962. 0x31a4b2afU, 0x2a3f2331U, 0xc6a59430U, 0x35a266c0U,
  963. 0x744ebc37U, 0xfc82caa6U, 0xe090d0b0U, 0x33a7d815U,
  964. 0xf104984aU, 0x41ecdaf7U, 0x7fcd500eU, 0x1791f62fU,
  965. 0x764dd68dU, 0x43efb04dU, 0xccaa4d54U, 0xe49604dfU,
  966. 0x9ed1b5e3U, 0x4c6a881bU, 0xc12c1fb8U, 0x4665517fU,
  967. 0x9d5eea04U, 0x018c355dU, 0xfa877473U, 0xfb0b412eU,
  968. 0xb3671d5aU, 0x92dbd252U, 0xe9105633U, 0x6dd64713U,
  969. 0x9ad7618cU, 0x37a10c7aU, 0x59f8148eU, 0xeb133c89U,
  970. 0xcea927eeU, 0xb761c935U, 0xe11ce5edU, 0x7a47b13cU,
  971. 0x9cd2df59U, 0x55f2733fU, 0x1814ce79U, 0x73c737bfU,
  972. 0x53f7cdeaU, 0x5ffdaa5bU, 0xdf3d6f14U, 0x7844db86U,
  973. 0xcaaff381U, 0xb968c43eU, 0x3824342cU, 0xc2a3405fU,
  974. 0x161dc372U, 0xbce2250cU, 0x283c498bU, 0xff0d9541U,
  975. 0x39a80171U, 0x080cb3deU, 0xd8b4e49cU, 0x6456c190U,
  976. 0x7bcb8461U, 0xd532b670U, 0x486c5c74U, 0xd0b85742U,
  977. };
  978. static const u32 Td1[256] = {
  979. 0x5051f4a7U, 0x537e4165U, 0xc31a17a4U, 0x963a275eU,
  980. 0xcb3bab6bU, 0xf11f9d45U, 0xabacfa58U, 0x934be303U,
  981. 0x552030faU, 0xf6ad766dU, 0x9188cc76U, 0x25f5024cU,
  982. 0xfc4fe5d7U, 0xd7c52acbU, 0x80263544U, 0x8fb562a3U,
  983. 0x49deb15aU, 0x6725ba1bU, 0x9845ea0eU, 0xe15dfec0U,
  984. 0x02c32f75U, 0x12814cf0U, 0xa38d4697U, 0xc66bd3f9U,
  985. 0xe7038f5fU, 0x9515929cU, 0xebbf6d7aU, 0xda955259U,
  986. 0x2dd4be83U, 0xd3587421U, 0x2949e069U, 0x448ec9c8U,
  987. 0x6a75c289U, 0x78f48e79U, 0x6b99583eU, 0xdd27b971U,
  988. 0xb6bee14fU, 0x17f088adU, 0x66c920acU, 0xb47dce3aU,
  989. 0x1863df4aU, 0x82e51a31U, 0x60975133U, 0x4562537fU,
  990. 0xe0b16477U, 0x84bb6baeU, 0x1cfe81a0U, 0x94f9082bU,
  991. 0x58704868U, 0x198f45fdU, 0x8794de6cU, 0xb7527bf8U,
  992. 0x23ab73d3U, 0xe2724b02U, 0x57e31f8fU, 0x2a6655abU,
  993. 0x07b2eb28U, 0x032fb5c2U, 0x9a86c57bU, 0xa5d33708U,
  994. 0xf2302887U, 0xb223bfa5U, 0xba02036aU, 0x5ced1682U,
  995. 0x2b8acf1cU, 0x92a779b4U, 0xf0f307f2U, 0xa14e69e2U,
  996. 0xcd65daf4U, 0xd50605beU, 0x1fd13462U, 0x8ac4a6feU,
  997. 0x9d342e53U, 0xa0a2f355U, 0x32058ae1U, 0x75a4f6ebU,
  998. 0x390b83ecU, 0xaa4060efU, 0x065e719fU, 0x51bd6e10U,
  999. 0xf93e218aU, 0x3d96dd06U, 0xaedd3e05U, 0x464de6bdU,
  1000. 0xb591548dU, 0x0571c45dU, 0x6f0406d4U, 0xff605015U,
  1001. 0x241998fbU, 0x97d6bde9U, 0xcc894043U, 0x7767d99eU,
  1002. 0xbdb0e842U, 0x8807898bU, 0x38e7195bU, 0xdb79c8eeU,
  1003. 0x47a17c0aU, 0xe97c420fU, 0xc9f8841eU, 0x00000000U,
  1004. 0x83098086U, 0x48322bedU, 0xac1e1170U, 0x4e6c5a72U,
  1005. 0xfbfd0effU, 0x560f8538U, 0x1e3daed5U, 0x27362d39U,
  1006. 0x640a0fd9U, 0x21685ca6U, 0xd19b5b54U, 0x3a24362eU,
  1007. 0xb10c0a67U, 0x0f9357e7U, 0xd2b4ee96U, 0x9e1b9b91U,
  1008. 0x4f80c0c5U, 0xa261dc20U, 0x695a774bU, 0x161c121aU,
  1009. 0x0ae293baU, 0xe5c0a02aU, 0x433c22e0U, 0x1d121b17U,
  1010. 0x0b0e090dU, 0xadf28bc7U, 0xb92db6a8U, 0xc8141ea9U,
  1011. 0x8557f119U, 0x4caf7507U, 0xbbee99ddU, 0xfda37f60U,
  1012. 0x9ff70126U, 0xbc5c72f5U, 0xc544663bU, 0x345bfb7eU,
  1013. 0x768b4329U, 0xdccb23c6U, 0x68b6edfcU, 0x63b8e4f1U,
  1014. 0xcad731dcU, 0x10426385U, 0x40139722U, 0x2084c611U,
  1015. 0x7d854a24U, 0xf8d2bb3dU, 0x11aef932U, 0x6dc729a1U,
  1016. 0x4b1d9e2fU, 0xf3dcb230U, 0xec0d8652U, 0xd077c1e3U,
  1017. 0x6c2bb316U, 0x99a970b9U, 0xfa119448U, 0x2247e964U,
  1018. 0xc4a8fc8cU, 0x1aa0f03fU, 0xd8567d2cU, 0xef223390U,
  1019. 0xc787494eU, 0xc1d938d1U, 0xfe8ccaa2U, 0x3698d40bU,
  1020. 0xcfa6f581U, 0x28a57adeU, 0x26dab78eU, 0xa43fadbfU,
  1021. 0xe42c3a9dU, 0x0d507892U, 0x9b6a5fccU, 0x62547e46U,
  1022. 0xc2f68d13U, 0xe890d8b8U, 0x5e2e39f7U, 0xf582c3afU,
  1023. 0xbe9f5d80U, 0x7c69d093U, 0xa96fd52dU, 0xb3cf2512U,
  1024. 0x3bc8ac99U, 0xa710187dU, 0x6ee89c63U, 0x7bdb3bbbU,
  1025. 0x09cd2678U, 0xf46e5918U, 0x01ec9ab7U, 0xa8834f9aU,
  1026. 0x65e6956eU, 0x7eaaffe6U, 0x0821bccfU, 0xe6ef15e8U,
  1027. 0xd9bae79bU, 0xce4a6f36U, 0xd4ea9f09U, 0xd629b07cU,
  1028. 0xaf31a4b2U, 0x312a3f23U, 0x30c6a594U, 0xc035a266U,
  1029. 0x37744ebcU, 0xa6fc82caU, 0xb0e090d0U, 0x1533a7d8U,
  1030. 0x4af10498U, 0xf741ecdaU, 0x0e7fcd50U, 0x2f1791f6U,
  1031. 0x8d764dd6U, 0x4d43efb0U, 0x54ccaa4dU, 0xdfe49604U,
  1032. 0xe39ed1b5U, 0x1b4c6a88U, 0xb8c12c1fU, 0x7f466551U,
  1033. 0x049d5eeaU, 0x5d018c35U, 0x73fa8774U, 0x2efb0b41U,
  1034. 0x5ab3671dU, 0x5292dbd2U, 0x33e91056U, 0x136dd647U,
  1035. 0x8c9ad761U, 0x7a37a10cU, 0x8e59f814U, 0x89eb133cU,
  1036. 0xeecea927U, 0x35b761c9U, 0xede11ce5U, 0x3c7a47b1U,
  1037. 0x599cd2dfU, 0x3f55f273U, 0x791814ceU, 0xbf73c737U,
  1038. 0xea53f7cdU, 0x5b5ffdaaU, 0x14df3d6fU, 0x867844dbU,
  1039. 0x81caaff3U, 0x3eb968c4U, 0x2c382434U, 0x5fc2a340U,
  1040. 0x72161dc3U, 0x0cbce225U, 0x8b283c49U, 0x41ff0d95U,
  1041. 0x7139a801U, 0xde080cb3U, 0x9cd8b4e4U, 0x906456c1U,
  1042. 0x617bcb84U, 0x70d532b6U, 0x74486c5cU, 0x42d0b857U,
  1043. };
  1044. static const u32 Td2[256] = {
  1045. 0xa75051f4U, 0x65537e41U, 0xa4c31a17U, 0x5e963a27U,
  1046. 0x6bcb3babU, 0x45f11f9dU, 0x58abacfaU, 0x03934be3U,
  1047. 0xfa552030U, 0x6df6ad76U, 0x769188ccU, 0x4c25f502U,
  1048. 0xd7fc4fe5U, 0xcbd7c52aU, 0x44802635U, 0xa38fb562U,
  1049. 0x5a49deb1U, 0x1b6725baU, 0x0e9845eaU, 0xc0e15dfeU,
  1050. 0x7502c32fU, 0xf012814cU, 0x97a38d46U, 0xf9c66bd3U,
  1051. 0x5fe7038fU, 0x9c951592U, 0x7aebbf6dU, 0x59da9552U,
  1052. 0x832dd4beU, 0x21d35874U, 0x692949e0U, 0xc8448ec9U,
  1053. 0x896a75c2U, 0x7978f48eU, 0x3e6b9958U, 0x71dd27b9U,
  1054. 0x4fb6bee1U, 0xad17f088U, 0xac66c920U, 0x3ab47dceU,
  1055. 0x4a1863dfU, 0x3182e51aU, 0x33609751U, 0x7f456253U,
  1056. 0x77e0b164U, 0xae84bb6bU, 0xa01cfe81U, 0x2b94f908U,
  1057. 0x68587048U, 0xfd198f45U, 0x6c8794deU, 0xf8b7527bU,
  1058. 0xd323ab73U, 0x02e2724bU, 0x8f57e31fU, 0xab2a6655U,
  1059. 0x2807b2ebU, 0xc2032fb5U, 0x7b9a86c5U, 0x08a5d337U,
  1060. 0x87f23028U, 0xa5b223bfU, 0x6aba0203U, 0x825ced16U,
  1061. 0x1c2b8acfU, 0xb492a779U, 0xf2f0f307U, 0xe2a14e69U,
  1062. 0xf4cd65daU, 0xbed50605U, 0x621fd134U, 0xfe8ac4a6U,
  1063. 0x539d342eU, 0x55a0a2f3U, 0xe132058aU, 0xeb75a4f6U,
  1064. 0xec390b83U, 0xefaa4060U, 0x9f065e71U, 0x1051bd6eU,
  1065. 0x8af93e21U, 0x063d96ddU, 0x05aedd3eU, 0xbd464de6U,
  1066. 0x8db59154U, 0x5d0571c4U, 0xd46f0406U, 0x15ff6050U,
  1067. 0xfb241998U, 0xe997d6bdU, 0x43cc8940U, 0x9e7767d9U,
  1068. 0x42bdb0e8U, 0x8b880789U, 0x5b38e719U, 0xeedb79c8U,
  1069. 0x0a47a17cU, 0x0fe97c42U, 0x1ec9f884U, 0x00000000U,
  1070. 0x86830980U, 0xed48322bU, 0x70ac1e11U, 0x724e6c5aU,
  1071. 0xfffbfd0eU, 0x38560f85U, 0xd51e3daeU, 0x3927362dU,
  1072. 0xd9640a0fU, 0xa621685cU, 0x54d19b5bU, 0x2e3a2436U,
  1073. 0x67b10c0aU, 0xe70f9357U, 0x96d2b4eeU, 0x919e1b9bU,
  1074. 0xc54f80c0U, 0x20a261dcU, 0x4b695a77U, 0x1a161c12U,
  1075. 0xba0ae293U, 0x2ae5c0a0U, 0xe0433c22U, 0x171d121bU,
  1076. 0x0d0b0e09U, 0xc7adf28bU, 0xa8b92db6U, 0xa9c8141eU,
  1077. 0x198557f1U, 0x074caf75U, 0xddbbee99U, 0x60fda37fU,
  1078. 0x269ff701U, 0xf5bc5c72U, 0x3bc54466U, 0x7e345bfbU,
  1079. 0x29768b43U, 0xc6dccb23U, 0xfc68b6edU, 0xf163b8e4U,
  1080. 0xdccad731U, 0x85104263U, 0x22401397U, 0x112084c6U,
  1081. 0x247d854aU, 0x3df8d2bbU, 0x3211aef9U, 0xa16dc729U,
  1082. 0x2f4b1d9eU, 0x30f3dcb2U, 0x52ec0d86U, 0xe3d077c1U,
  1083. 0x166c2bb3U, 0xb999a970U, 0x48fa1194U, 0x642247e9U,
  1084. 0x8cc4a8fcU, 0x3f1aa0f0U, 0x2cd8567dU, 0x90ef2233U,
  1085. 0x4ec78749U, 0xd1c1d938U, 0xa2fe8ccaU, 0x0b3698d4U,
  1086. 0x81cfa6f5U, 0xde28a57aU, 0x8e26dab7U, 0xbfa43fadU,
  1087. 0x9de42c3aU, 0x920d5078U, 0xcc9b6a5fU, 0x4662547eU,
  1088. 0x13c2f68dU, 0xb8e890d8U, 0xf75e2e39U, 0xaff582c3U,
  1089. 0x80be9f5dU, 0x937c69d0U, 0x2da96fd5U, 0x12b3cf25U,
  1090. 0x993bc8acU, 0x7da71018U, 0x636ee89cU, 0xbb7bdb3bU,
  1091. 0x7809cd26U, 0x18f46e59U, 0xb701ec9aU, 0x9aa8834fU,
  1092. 0x6e65e695U, 0xe67eaaffU, 0xcf0821bcU, 0xe8e6ef15U,
  1093. 0x9bd9bae7U, 0x36ce4a6fU, 0x09d4ea9fU, 0x7cd629b0U,
  1094. 0xb2af31a4U, 0x23312a3fU, 0x9430c6a5U, 0x66c035a2U,
  1095. 0xbc37744eU, 0xcaa6fc82U, 0xd0b0e090U, 0xd81533a7U,
  1096. 0x984af104U, 0xdaf741ecU, 0x500e7fcdU, 0xf62f1791U,
  1097. 0xd68d764dU, 0xb04d43efU, 0x4d54ccaaU, 0x04dfe496U,
  1098. 0xb5e39ed1U, 0x881b4c6aU, 0x1fb8c12cU, 0x517f4665U,
  1099. 0xea049d5eU, 0x355d018cU, 0x7473fa87U, 0x412efb0bU,
  1100. 0x1d5ab367U, 0xd25292dbU, 0x5633e910U, 0x47136dd6U,
  1101. 0x618c9ad7U, 0x0c7a37a1U, 0x148e59f8U, 0x3c89eb13U,
  1102. 0x27eecea9U, 0xc935b761U, 0xe5ede11cU, 0xb13c7a47U,
  1103. 0xdf599cd2U, 0x733f55f2U, 0xce791814U, 0x37bf73c7U,
  1104. 0xcdea53f7U, 0xaa5b5ffdU, 0x6f14df3dU, 0xdb867844U,
  1105. 0xf381caafU, 0xc43eb968U, 0x342c3824U, 0x405fc2a3U,
  1106. 0xc372161dU, 0x250cbce2U, 0x498b283cU, 0x9541ff0dU,
  1107. 0x017139a8U, 0xb3de080cU, 0xe49cd8b4U, 0xc1906456U,
  1108. 0x84617bcbU, 0xb670d532U, 0x5c74486cU, 0x5742d0b8U,
  1109. };
  1110. static const u32 Td3[256] = {
  1111. 0xf4a75051U, 0x4165537eU, 0x17a4c31aU, 0x275e963aU,
  1112. 0xab6bcb3bU, 0x9d45f11fU, 0xfa58abacU, 0xe303934bU,
  1113. 0x30fa5520U, 0x766df6adU, 0xcc769188U, 0x024c25f5U,
  1114. 0xe5d7fc4fU, 0x2acbd7c5U, 0x35448026U, 0x62a38fb5U,
  1115. 0xb15a49deU, 0xba1b6725U, 0xea0e9845U, 0xfec0e15dU,
  1116. 0x2f7502c3U, 0x4cf01281U, 0x4697a38dU, 0xd3f9c66bU,
  1117. 0x8f5fe703U, 0x929c9515U, 0x6d7aebbfU, 0x5259da95U,
  1118. 0xbe832dd4U, 0x7421d358U, 0xe0692949U, 0xc9c8448eU,
  1119. 0xc2896a75U, 0x8e7978f4U, 0x583e6b99U, 0xb971dd27U,
  1120. 0xe14fb6beU, 0x88ad17f0U, 0x20ac66c9U, 0xce3ab47dU,
  1121. 0xdf4a1863U, 0x1a3182e5U, 0x51336097U, 0x537f4562U,
  1122. 0x6477e0b1U, 0x6bae84bbU, 0x81a01cfeU, 0x082b94f9U,
  1123. 0x48685870U, 0x45fd198fU, 0xde6c8794U, 0x7bf8b752U,
  1124. 0x73d323abU, 0x4b02e272U, 0x1f8f57e3U, 0x55ab2a66U,
  1125. 0xeb2807b2U, 0xb5c2032fU, 0xc57b9a86U, 0x3708a5d3U,
  1126. 0x2887f230U, 0xbfa5b223U, 0x036aba02U, 0x16825cedU,
  1127. 0xcf1c2b8aU, 0x79b492a7U, 0x07f2f0f3U, 0x69e2a14eU,
  1128. 0xdaf4cd65U, 0x05bed506U, 0x34621fd1U, 0xa6fe8ac4U,
  1129. 0x2e539d34U, 0xf355a0a2U, 0x8ae13205U, 0xf6eb75a4U,
  1130. 0x83ec390bU, 0x60efaa40U, 0x719f065eU, 0x6e1051bdU,
  1131. 0x218af93eU, 0xdd063d96U, 0x3e05aeddU, 0xe6bd464dU,
  1132. 0x548db591U, 0xc45d0571U, 0x06d46f04U, 0x5015ff60U,
  1133. 0x98fb2419U, 0xbde997d6U, 0x4043cc89U, 0xd99e7767U,
  1134. 0xe842bdb0U, 0x898b8807U, 0x195b38e7U, 0xc8eedb79U,
  1135. 0x7c0a47a1U, 0x420fe97cU, 0x841ec9f8U, 0x00000000U,
  1136. 0x80868309U, 0x2bed4832U, 0x1170ac1eU, 0x5a724e6cU,
  1137. 0x0efffbfdU, 0x8538560fU, 0xaed51e3dU, 0x2d392736U,
  1138. 0x0fd9640aU, 0x5ca62168U, 0x5b54d19bU, 0x362e3a24U,
  1139. 0x0a67b10cU, 0x57e70f93U, 0xee96d2b4U, 0x9b919e1bU,
  1140. 0xc0c54f80U, 0xdc20a261U, 0x774b695aU, 0x121a161cU,
  1141. 0x93ba0ae2U, 0xa02ae5c0U, 0x22e0433cU, 0x1b171d12U,
  1142. 0x090d0b0eU, 0x8bc7adf2U, 0xb6a8b92dU, 0x1ea9c814U,
  1143. 0xf1198557U, 0x75074cafU, 0x99ddbbeeU, 0x7f60fda3U,
  1144. 0x01269ff7U, 0x72f5bc5cU, 0x663bc544U, 0xfb7e345bU,
  1145. 0x4329768bU, 0x23c6dccbU, 0xedfc68b6U, 0xe4f163b8U,
  1146. 0x31dccad7U, 0x63851042U, 0x97224013U, 0xc6112084U,
  1147. 0x4a247d85U, 0xbb3df8d2U, 0xf93211aeU, 0x29a16dc7U,
  1148. 0x9e2f4b1dU, 0xb230f3dcU, 0x8652ec0dU, 0xc1e3d077U,
  1149. 0xb3166c2bU, 0x70b999a9U, 0x9448fa11U, 0xe9642247U,
  1150. 0xfc8cc4a8U, 0xf03f1aa0U, 0x7d2cd856U, 0x3390ef22U,
  1151. 0x494ec787U, 0x38d1c1d9U, 0xcaa2fe8cU, 0xd40b3698U,
  1152. 0xf581cfa6U, 0x7ade28a5U, 0xb78e26daU, 0xadbfa43fU,
  1153. 0x3a9de42cU, 0x78920d50U, 0x5fcc9b6aU, 0x7e466254U,
  1154. 0x8d13c2f6U, 0xd8b8e890U, 0x39f75e2eU, 0xc3aff582U,
  1155. 0x5d80be9fU, 0xd0937c69U, 0xd52da96fU, 0x2512b3cfU,
  1156. 0xac993bc8U, 0x187da710U, 0x9c636ee8U, 0x3bbb7bdbU,
  1157. 0x267809cdU, 0x5918f46eU, 0x9ab701ecU, 0x4f9aa883U,
  1158. 0x956e65e6U, 0xffe67eaaU, 0xbccf0821U, 0x15e8e6efU,
  1159. 0xe79bd9baU, 0x6f36ce4aU, 0x9f09d4eaU, 0xb07cd629U,
  1160. 0xa4b2af31U, 0x3f23312aU, 0xa59430c6U, 0xa266c035U,
  1161. 0x4ebc3774U, 0x82caa6fcU, 0x90d0b0e0U, 0xa7d81533U,
  1162. 0x04984af1U, 0xecdaf741U, 0xcd500e7fU, 0x91f62f17U,
  1163. 0x4dd68d76U, 0xefb04d43U, 0xaa4d54ccU, 0x9604dfe4U,
  1164. 0xd1b5e39eU, 0x6a881b4cU, 0x2c1fb8c1U, 0x65517f46U,
  1165. 0x5eea049dU, 0x8c355d01U, 0x877473faU, 0x0b412efbU,
  1166. 0x671d5ab3U, 0xdbd25292U, 0x105633e9U, 0xd647136dU,
  1167. 0xd7618c9aU, 0xa10c7a37U, 0xf8148e59U, 0x133c89ebU,
  1168. 0xa927eeceU, 0x61c935b7U, 0x1ce5ede1U, 0x47b13c7aU,
  1169. 0xd2df599cU, 0xf2733f55U, 0x14ce7918U, 0xc737bf73U,
  1170. 0xf7cdea53U, 0xfdaa5b5fU, 0x3d6f14dfU, 0x44db8678U,
  1171. 0xaff381caU, 0x68c43eb9U, 0x24342c38U, 0xa3405fc2U,
  1172. 0x1dc37216U, 0xe2250cbcU, 0x3c498b28U, 0x0d9541ffU,
  1173. 0xa8017139U, 0x0cb3de08U, 0xb4e49cd8U, 0x56c19064U,
  1174. 0xcb84617bU, 0x32b670d5U, 0x6c5c7448U, 0xb85742d0U,
  1175. };
  1176. static const u8 Td4[256] = {
  1177. 0x52U, 0x09U, 0x6aU, 0xd5U, 0x30U, 0x36U, 0xa5U, 0x38U,
  1178. 0xbfU, 0x40U, 0xa3U, 0x9eU, 0x81U, 0xf3U, 0xd7U, 0xfbU,
  1179. 0x7cU, 0xe3U, 0x39U, 0x82U, 0x9bU, 0x2fU, 0xffU, 0x87U,
  1180. 0x34U, 0x8eU, 0x43U, 0x44U, 0xc4U, 0xdeU, 0xe9U, 0xcbU,
  1181. 0x54U, 0x7bU, 0x94U, 0x32U, 0xa6U, 0xc2U, 0x23U, 0x3dU,
  1182. 0xeeU, 0x4cU, 0x95U, 0x0bU, 0x42U, 0xfaU, 0xc3U, 0x4eU,
  1183. 0x08U, 0x2eU, 0xa1U, 0x66U, 0x28U, 0xd9U, 0x24U, 0xb2U,
  1184. 0x76U, 0x5bU, 0xa2U, 0x49U, 0x6dU, 0x8bU, 0xd1U, 0x25U,
  1185. 0x72U, 0xf8U, 0xf6U, 0x64U, 0x86U, 0x68U, 0x98U, 0x16U,
  1186. 0xd4U, 0xa4U, 0x5cU, 0xccU, 0x5dU, 0x65U, 0xb6U, 0x92U,
  1187. 0x6cU, 0x70U, 0x48U, 0x50U, 0xfdU, 0xedU, 0xb9U, 0xdaU,
  1188. 0x5eU, 0x15U, 0x46U, 0x57U, 0xa7U, 0x8dU, 0x9dU, 0x84U,
  1189. 0x90U, 0xd8U, 0xabU, 0x00U, 0x8cU, 0xbcU, 0xd3U, 0x0aU,
  1190. 0xf7U, 0xe4U, 0x58U, 0x05U, 0xb8U, 0xb3U, 0x45U, 0x06U,
  1191. 0xd0U, 0x2cU, 0x1eU, 0x8fU, 0xcaU, 0x3fU, 0x0fU, 0x02U,
  1192. 0xc1U, 0xafU, 0xbdU, 0x03U, 0x01U, 0x13U, 0x8aU, 0x6bU,
  1193. 0x3aU, 0x91U, 0x11U, 0x41U, 0x4fU, 0x67U, 0xdcU, 0xeaU,
  1194. 0x97U, 0xf2U, 0xcfU, 0xceU, 0xf0U, 0xb4U, 0xe6U, 0x73U,
  1195. 0x96U, 0xacU, 0x74U, 0x22U, 0xe7U, 0xadU, 0x35U, 0x85U,
  1196. 0xe2U, 0xf9U, 0x37U, 0xe8U, 0x1cU, 0x75U, 0xdfU, 0x6eU,
  1197. 0x47U, 0xf1U, 0x1aU, 0x71U, 0x1dU, 0x29U, 0xc5U, 0x89U,
  1198. 0x6fU, 0xb7U, 0x62U, 0x0eU, 0xaaU, 0x18U, 0xbeU, 0x1bU,
  1199. 0xfcU, 0x56U, 0x3eU, 0x4bU, 0xc6U, 0xd2U, 0x79U, 0x20U,
  1200. 0x9aU, 0xdbU, 0xc0U, 0xfeU, 0x78U, 0xcdU, 0x5aU, 0xf4U,
  1201. 0x1fU, 0xddU, 0xa8U, 0x33U, 0x88U, 0x07U, 0xc7U, 0x31U,
  1202. 0xb1U, 0x12U, 0x10U, 0x59U, 0x27U, 0x80U, 0xecU, 0x5fU,
  1203. 0x60U, 0x51U, 0x7fU, 0xa9U, 0x19U, 0xb5U, 0x4aU, 0x0dU,
  1204. 0x2dU, 0xe5U, 0x7aU, 0x9fU, 0x93U, 0xc9U, 0x9cU, 0xefU,
  1205. 0xa0U, 0xe0U, 0x3bU, 0x4dU, 0xaeU, 0x2aU, 0xf5U, 0xb0U,
  1206. 0xc8U, 0xebU, 0xbbU, 0x3cU, 0x83U, 0x53U, 0x99U, 0x61U,
  1207. 0x17U, 0x2bU, 0x04U, 0x7eU, 0xbaU, 0x77U, 0xd6U, 0x26U,
  1208. 0xe1U, 0x69U, 0x14U, 0x63U, 0x55U, 0x21U, 0x0cU, 0x7dU,
  1209. };
  1210. static const u32 rcon[] = {
  1211. 0x01000000, 0x02000000, 0x04000000, 0x08000000,
  1212. 0x10000000, 0x20000000, 0x40000000, 0x80000000,
  1213. 0x1B000000, 0x36000000, /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  1214. };
  1215. /**
  1216. * Expand the cipher key into the encryption key schedule.
  1217. */
  1218. int AES_set_encrypt_key(const unsigned char *userKey, const int bits,
  1219. AES_KEY *key)
  1220. {
  1221. u32 *rk;
  1222. int i = 0;
  1223. u32 temp;
  1224. if (!userKey || !key)
  1225. return -1;
  1226. if (bits != 128 && bits != 192 && bits != 256)
  1227. return -2;
  1228. rk = key->rd_key;
  1229. if (bits == 128)
  1230. key->rounds = 10;
  1231. else if (bits == 192)
  1232. key->rounds = 12;
  1233. else
  1234. key->rounds = 14;
  1235. rk[0] = GETU32(userKey );
  1236. rk[1] = GETU32(userKey + 4);
  1237. rk[2] = GETU32(userKey + 8);
  1238. rk[3] = GETU32(userKey + 12);
  1239. if (bits == 128) {
  1240. while (1) {
  1241. temp = rk[3];
  1242. rk[4] = rk[0] ^
  1243. (Te2[(temp >> 16) & 0xff] & 0xff000000) ^
  1244. (Te3[(temp >> 8) & 0xff] & 0x00ff0000) ^
  1245. (Te0[(temp ) & 0xff] & 0x0000ff00) ^
  1246. (Te1[(temp >> 24) ] & 0x000000ff) ^
  1247. rcon[i];
  1248. rk[5] = rk[1] ^ rk[4];
  1249. rk[6] = rk[2] ^ rk[5];
  1250. rk[7] = rk[3] ^ rk[6];
  1251. if (++i == 10) {
  1252. return 0;
  1253. }
  1254. rk += 4;
  1255. }
  1256. }
  1257. rk[4] = GETU32(userKey + 16);
  1258. rk[5] = GETU32(userKey + 20);
  1259. if (bits == 192) {
  1260. while (1) {
  1261. temp = rk[ 5];
  1262. rk[ 6] = rk[ 0] ^
  1263. (Te2[(temp >> 16) & 0xff] & 0xff000000) ^
  1264. (Te3[(temp >> 8) & 0xff] & 0x00ff0000) ^
  1265. (Te0[(temp ) & 0xff] & 0x0000ff00) ^
  1266. (Te1[(temp >> 24) ] & 0x000000ff) ^
  1267. rcon[i];
  1268. rk[ 7] = rk[ 1] ^ rk[ 6];
  1269. rk[ 8] = rk[ 2] ^ rk[ 7];
  1270. rk[ 9] = rk[ 3] ^ rk[ 8];
  1271. if (++i == 8) {
  1272. return 0;
  1273. }
  1274. rk[10] = rk[ 4] ^ rk[ 9];
  1275. rk[11] = rk[ 5] ^ rk[10];
  1276. rk += 6;
  1277. }
  1278. }
  1279. rk[6] = GETU32(userKey + 24);
  1280. rk[7] = GETU32(userKey + 28);
  1281. if (bits == 256) {
  1282. while (1) {
  1283. temp = rk[ 7];
  1284. rk[ 8] = rk[ 0] ^
  1285. (Te2[(temp >> 16) & 0xff] & 0xff000000) ^
  1286. (Te3[(temp >> 8) & 0xff] & 0x00ff0000) ^
  1287. (Te0[(temp ) & 0xff] & 0x0000ff00) ^
  1288. (Te1[(temp >> 24) ] & 0x000000ff) ^
  1289. rcon[i];
  1290. rk[ 9] = rk[ 1] ^ rk[ 8];
  1291. rk[10] = rk[ 2] ^ rk[ 9];
  1292. rk[11] = rk[ 3] ^ rk[10];
  1293. if (++i == 7) {
  1294. return 0;
  1295. }
  1296. temp = rk[11];
  1297. rk[12] = rk[ 4] ^
  1298. (Te2[(temp >> 24) ] & 0xff000000) ^
  1299. (Te3[(temp >> 16) & 0xff] & 0x00ff0000) ^
  1300. (Te0[(temp >> 8) & 0xff] & 0x0000ff00) ^
  1301. (Te1[(temp ) & 0xff] & 0x000000ff);
  1302. rk[13] = rk[ 5] ^ rk[12];
  1303. rk[14] = rk[ 6] ^ rk[13];
  1304. rk[15] = rk[ 7] ^ rk[14];
  1305. rk += 8;
  1306. }
  1307. }
  1308. return 0;
  1309. }
  1310. /**
  1311. * Expand the cipher key into the decryption key schedule.
  1312. */
  1313. int AES_set_decrypt_key(const unsigned char *userKey, const int bits,
  1314. AES_KEY *key)
  1315. {
  1316. u32 *rk;
  1317. int i, j, status;
  1318. u32 temp;
  1319. /* first, start with an encryption schedule */
  1320. status = AES_set_encrypt_key(userKey, bits, key);
  1321. if (status < 0)
  1322. return status;
  1323. rk = key->rd_key;
  1324. /* invert the order of the round keys: */
  1325. for (i = 0, j = 4*(key->rounds); i < j; i += 4, j -= 4) {
  1326. temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp;
  1327. temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp;
  1328. temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp;
  1329. temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp;
  1330. }
  1331. /* apply the inverse MixColumn transform to all round keys but the first and the last: */
  1332. for (i = 1; i < (key->rounds); i++) {
  1333. rk += 4;
  1334. rk[0] =
  1335. Td0[Te1[(rk[0] >> 24) ] & 0xff] ^
  1336. Td1[Te1[(rk[0] >> 16) & 0xff] & 0xff] ^
  1337. Td2[Te1[(rk[0] >> 8) & 0xff] & 0xff] ^
  1338. Td3[Te1[(rk[0] ) & 0xff] & 0xff];
  1339. rk[1] =
  1340. Td0[Te1[(rk[1] >> 24) ] & 0xff] ^
  1341. Td1[Te1[(rk[1] >> 16) & 0xff] & 0xff] ^
  1342. Td2[Te1[(rk[1] >> 8) & 0xff] & 0xff] ^
  1343. Td3[Te1[(rk[1] ) & 0xff] & 0xff];
  1344. rk[2] =
  1345. Td0[Te1[(rk[2] >> 24) ] & 0xff] ^
  1346. Td1[Te1[(rk[2] >> 16) & 0xff] & 0xff] ^
  1347. Td2[Te1[(rk[2] >> 8) & 0xff] & 0xff] ^
  1348. Td3[Te1[(rk[2] ) & 0xff] & 0xff];
  1349. rk[3] =
  1350. Td0[Te1[(rk[3] >> 24) ] & 0xff] ^
  1351. Td1[Te1[(rk[3] >> 16) & 0xff] & 0xff] ^
  1352. Td2[Te1[(rk[3] >> 8) & 0xff] & 0xff] ^
  1353. Td3[Te1[(rk[3] ) & 0xff] & 0xff];
  1354. }
  1355. return 0;
  1356. }
  1357. /*
  1358. * Encrypt a single block
  1359. * in and out can overlap
  1360. */
  1361. void AES_encrypt(const unsigned char *in, unsigned char *out,
  1362. const AES_KEY *key) {
  1363. const u32 *rk;
  1364. u32 s0, s1, s2, s3, t0, t1, t2, t3;
  1365. #ifndef FULL_UNROLL
  1366. int r;
  1367. #endif /* ?FULL_UNROLL */
  1368. assert(in && out && key);
  1369. rk = key->rd_key;
  1370. /*
  1371. * map byte array block to cipher state
  1372. * and add initial round key:
  1373. */
  1374. s0 = GETU32(in ) ^ rk[0];
  1375. s1 = GETU32(in + 4) ^ rk[1];
  1376. s2 = GETU32(in + 8) ^ rk[2];
  1377. s3 = GETU32(in + 12) ^ rk[3];
  1378. #ifdef FULL_UNROLL
  1379. /* round 1: */
  1380. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[ 4];
  1381. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[ 5];
  1382. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[ 6];
  1383. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[ 7];
  1384. /* round 2: */
  1385. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[ 8];
  1386. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[ 9];
  1387. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[10];
  1388. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[11];
  1389. /* round 3: */
  1390. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[12];
  1391. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[13];
  1392. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[14];
  1393. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[15];
  1394. /* round 4: */
  1395. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[16];
  1396. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[17];
  1397. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[18];
  1398. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[19];
  1399. /* round 5: */
  1400. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[20];
  1401. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[21];
  1402. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[22];
  1403. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[23];
  1404. /* round 6: */
  1405. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[24];
  1406. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[25];
  1407. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[26];
  1408. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[27];
  1409. /* round 7: */
  1410. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[28];
  1411. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[29];
  1412. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[30];
  1413. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[31];
  1414. /* round 8: */
  1415. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[32];
  1416. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[33];
  1417. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[34];
  1418. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[35];
  1419. /* round 9: */
  1420. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[36];
  1421. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[37];
  1422. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[38];
  1423. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[39];
  1424. if (key->rounds > 10) {
  1425. /* round 10: */
  1426. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[40];
  1427. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[41];
  1428. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[42];
  1429. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[43];
  1430. /* round 11: */
  1431. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[44];
  1432. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[45];
  1433. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[46];
  1434. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[47];
  1435. if (key->rounds > 12) {
  1436. /* round 12: */
  1437. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[48];
  1438. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[49];
  1439. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[50];
  1440. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[51];
  1441. /* round 13: */
  1442. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[52];
  1443. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[53];
  1444. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[54];
  1445. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[55];
  1446. }
  1447. }
  1448. rk += key->rounds << 2;
  1449. #else /* !FULL_UNROLL */
  1450. /*
  1451. * Nr - 1 full rounds:
  1452. */
  1453. r = key->rounds >> 1;
  1454. for (;;) {
  1455. t0 =
  1456. Te0[(s0 >> 24) ] ^
  1457. Te1[(s1 >> 16) & 0xff] ^
  1458. Te2[(s2 >> 8) & 0xff] ^
  1459. Te3[(s3 ) & 0xff] ^
  1460. rk[4];
  1461. t1 =
  1462. Te0[(s1 >> 24) ] ^
  1463. Te1[(s2 >> 16) & 0xff] ^
  1464. Te2[(s3 >> 8) & 0xff] ^
  1465. Te3[(s0 ) & 0xff] ^
  1466. rk[5];
  1467. t2 =
  1468. Te0[(s2 >> 24) ] ^
  1469. Te1[(s3 >> 16) & 0xff] ^
  1470. Te2[(s0 >> 8) & 0xff] ^
  1471. Te3[(s1 ) & 0xff] ^
  1472. rk[6];
  1473. t3 =
  1474. Te0[(s3 >> 24) ] ^
  1475. Te1[(s0 >> 16) & 0xff] ^
  1476. Te2[(s1 >> 8) & 0xff] ^
  1477. Te3[(s2 ) & 0xff] ^
  1478. rk[7];
  1479. rk += 8;
  1480. if (--r == 0) {
  1481. break;
  1482. }
  1483. s0 =
  1484. Te0[(t0 >> 24) ] ^
  1485. Te1[(t1 >> 16) & 0xff] ^
  1486. Te2[(t2 >> 8) & 0xff] ^
  1487. Te3[(t3 ) & 0xff] ^
  1488. rk[0];
  1489. s1 =
  1490. Te0[(t1 >> 24) ] ^
  1491. Te1[(t2 >> 16) & 0xff] ^
  1492. Te2[(t3 >> 8) & 0xff] ^
  1493. Te3[(t0 ) & 0xff] ^
  1494. rk[1];
  1495. s2 =
  1496. Te0[(t2 >> 24) ] ^
  1497. Te1[(t3 >> 16) & 0xff] ^
  1498. Te2[(t0 >> 8) & 0xff] ^
  1499. Te3[(t1 ) & 0xff] ^
  1500. rk[2];
  1501. s3 =
  1502. Te0[(t3 >> 24) ] ^
  1503. Te1[(t0 >> 16) & 0xff] ^
  1504. Te2[(t1 >> 8) & 0xff] ^
  1505. Te3[(t2 ) & 0xff] ^
  1506. rk[3];
  1507. }
  1508. #endif /* ?FULL_UNROLL */
  1509. /*
  1510. * apply last round and
  1511. * map cipher state to byte array block:
  1512. */
  1513. s0 =
  1514. (Te2[(t0 >> 24) ] & 0xff000000) ^
  1515. (Te3[(t1 >> 16) & 0xff] & 0x00ff0000) ^
  1516. (Te0[(t2 >> 8) & 0xff] & 0x0000ff00) ^
  1517. (Te1[(t3 ) & 0xff] & 0x000000ff) ^
  1518. rk[0];
  1519. PUTU32(out , s0);
  1520. s1 =
  1521. (Te2[(t1 >> 24) ] & 0xff000000) ^
  1522. (Te3[(t2 >> 16) & 0xff] & 0x00ff0000) ^
  1523. (Te0[(t3 >> 8) & 0xff] & 0x0000ff00) ^
  1524. (Te1[(t0 ) & 0xff] & 0x000000ff) ^
  1525. rk[1];
  1526. PUTU32(out + 4, s1);
  1527. s2 =
  1528. (Te2[(t2 >> 24) ] & 0xff000000) ^
  1529. (Te3[(t3 >> 16) & 0xff] & 0x00ff0000) ^
  1530. (Te0[(t0 >> 8) & 0xff] & 0x0000ff00) ^
  1531. (Te1[(t1 ) & 0xff] & 0x000000ff) ^
  1532. rk[2];
  1533. PUTU32(out + 8, s2);
  1534. s3 =
  1535. (Te2[(t3 >> 24) ] & 0xff000000) ^
  1536. (Te3[(t0 >> 16) & 0xff] & 0x00ff0000) ^
  1537. (Te0[(t1 >> 8) & 0xff] & 0x0000ff00) ^
  1538. (Te1[(t2 ) & 0xff] & 0x000000ff) ^
  1539. rk[3];
  1540. PUTU32(out + 12, s3);
  1541. }
  1542. /*
  1543. * Decrypt a single block
  1544. * in and out can overlap
  1545. */
  1546. void AES_decrypt(const unsigned char *in, unsigned char *out,
  1547. const AES_KEY *key)
  1548. {
  1549. const u32 *rk;
  1550. u32 s0, s1, s2, s3, t0, t1, t2, t3;
  1551. #ifndef FULL_UNROLL
  1552. int r;
  1553. #endif /* ?FULL_UNROLL */
  1554. assert(in && out && key);
  1555. rk = key->rd_key;
  1556. /*
  1557. * map byte array block to cipher state
  1558. * and add initial round key:
  1559. */
  1560. s0 = GETU32(in ) ^ rk[0];
  1561. s1 = GETU32(in + 4) ^ rk[1];
  1562. s2 = GETU32(in + 8) ^ rk[2];
  1563. s3 = GETU32(in + 12) ^ rk[3];
  1564. #ifdef FULL_UNROLL
  1565. /* round 1: */
  1566. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[ 4];
  1567. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[ 5];
  1568. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[ 6];
  1569. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[ 7];
  1570. /* round 2: */
  1571. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[ 8];
  1572. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[ 9];
  1573. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[10];
  1574. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[11];
  1575. /* round 3: */
  1576. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[12];
  1577. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[13];
  1578. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[14];
  1579. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[15];
  1580. /* round 4: */
  1581. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[16];
  1582. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[17];
  1583. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[18];
  1584. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[19];
  1585. /* round 5: */
  1586. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[20];
  1587. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[21];
  1588. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[22];
  1589. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[23];
  1590. /* round 6: */
  1591. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[24];
  1592. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[25];
  1593. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[26];
  1594. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[27];
  1595. /* round 7: */
  1596. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[28];
  1597. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[29];
  1598. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[30];
  1599. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[31];
  1600. /* round 8: */
  1601. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[32];
  1602. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[33];
  1603. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[34];
  1604. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[35];
  1605. /* round 9: */
  1606. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[36];
  1607. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[37];
  1608. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[38];
  1609. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[39];
  1610. if (key->rounds > 10) {
  1611. /* round 10: */
  1612. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[40];
  1613. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[41];
  1614. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[42];
  1615. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[43];
  1616. /* round 11: */
  1617. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[44];
  1618. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[45];
  1619. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[46];
  1620. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[47];
  1621. if (key->rounds > 12) {
  1622. /* round 12: */
  1623. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[48];
  1624. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[49];
  1625. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[50];
  1626. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[51];
  1627. /* round 13: */
  1628. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[52];
  1629. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[53];
  1630. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[54];
  1631. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[55];
  1632. }
  1633. }
  1634. rk += key->rounds << 2;
  1635. #else /* !FULL_UNROLL */
  1636. /*
  1637. * Nr - 1 full rounds:
  1638. */
  1639. r = key->rounds >> 1;
  1640. for (;;) {
  1641. t0 =
  1642. Td0[(s0 >> 24) ] ^
  1643. Td1[(s3 >> 16) & 0xff] ^
  1644. Td2[(s2 >> 8) & 0xff] ^
  1645. Td3[(s1 ) & 0xff] ^
  1646. rk[4];
  1647. t1 =
  1648. Td0[(s1 >> 24) ] ^
  1649. Td1[(s0 >> 16) & 0xff] ^
  1650. Td2[(s3 >> 8) & 0xff] ^
  1651. Td3[(s2 ) & 0xff] ^
  1652. rk[5];
  1653. t2 =
  1654. Td0[(s2 >> 24) ] ^
  1655. Td1[(s1 >> 16) & 0xff] ^
  1656. Td2[(s0 >> 8) & 0xff] ^
  1657. Td3[(s3 ) & 0xff] ^
  1658. rk[6];
  1659. t3 =
  1660. Td0[(s3 >> 24) ] ^
  1661. Td1[(s2 >> 16) & 0xff] ^
  1662. Td2[(s1 >> 8) & 0xff] ^
  1663. Td3[(s0 ) & 0xff] ^
  1664. rk[7];
  1665. rk += 8;
  1666. if (--r == 0) {
  1667. break;
  1668. }
  1669. s0 =
  1670. Td0[(t0 >> 24) ] ^
  1671. Td1[(t3 >> 16) & 0xff] ^
  1672. Td2[(t2 >> 8) & 0xff] ^
  1673. Td3[(t1 ) & 0xff] ^
  1674. rk[0];
  1675. s1 =
  1676. Td0[(t1 >> 24) ] ^
  1677. Td1[(t0 >> 16) & 0xff] ^
  1678. Td2[(t3 >> 8) & 0xff] ^
  1679. Td3[(t2 ) & 0xff] ^
  1680. rk[1];
  1681. s2 =
  1682. Td0[(t2 >> 24) ] ^
  1683. Td1[(t1 >> 16) & 0xff] ^
  1684. Td2[(t0 >> 8) & 0xff] ^
  1685. Td3[(t3 ) & 0xff] ^
  1686. rk[2];
  1687. s3 =
  1688. Td0[(t3 >> 24) ] ^
  1689. Td1[(t2 >> 16) & 0xff] ^
  1690. Td2[(t1 >> 8) & 0xff] ^
  1691. Td3[(t0 ) & 0xff] ^
  1692. rk[3];
  1693. }
  1694. #endif /* ?FULL_UNROLL */
  1695. /*
  1696. * apply last round and
  1697. * map cipher state to byte array block:
  1698. */
  1699. s0 =
  1700. ((u32)Td4[(t0 >> 24) ] << 24) ^
  1701. ((u32)Td4[(t3 >> 16) & 0xff] << 16) ^
  1702. ((u32)Td4[(t2 >> 8) & 0xff] << 8) ^
  1703. ((u32)Td4[(t1 ) & 0xff]) ^
  1704. rk[0];
  1705. PUTU32(out , s0);
  1706. s1 =
  1707. ((u32)Td4[(t1 >> 24) ] << 24) ^
  1708. ((u32)Td4[(t0 >> 16) & 0xff] << 16) ^
  1709. ((u32)Td4[(t3 >> 8) & 0xff] << 8) ^
  1710. ((u32)Td4[(t2 ) & 0xff]) ^
  1711. rk[1];
  1712. PUTU32(out + 4, s1);
  1713. s2 =
  1714. ((u32)Td4[(t2 >> 24) ] << 24) ^
  1715. ((u32)Td4[(t1 >> 16) & 0xff] << 16) ^
  1716. ((u32)Td4[(t0 >> 8) & 0xff] << 8) ^
  1717. ((u32)Td4[(t3 ) & 0xff]) ^
  1718. rk[2];
  1719. PUTU32(out + 8, s2);
  1720. s3 =
  1721. ((u32)Td4[(t3 >> 24) ] << 24) ^
  1722. ((u32)Td4[(t2 >> 16) & 0xff] << 16) ^
  1723. ((u32)Td4[(t1 >> 8) & 0xff] << 8) ^
  1724. ((u32)Td4[(t0 ) & 0xff]) ^
  1725. rk[3];
  1726. PUTU32(out + 12, s3);
  1727. }
  1728. #else /* AES_ASM */
  1729. static const u8 Te4[256] = {
  1730. 0x63U, 0x7cU, 0x77U, 0x7bU, 0xf2U, 0x6bU, 0x6fU, 0xc5U,
  1731. 0x30U, 0x01U, 0x67U, 0x2bU, 0xfeU, 0xd7U, 0xabU, 0x76U,
  1732. 0xcaU, 0x82U, 0xc9U, 0x7dU, 0xfaU, 0x59U, 0x47U, 0xf0U,
  1733. 0xadU, 0xd4U, 0xa2U, 0xafU, 0x9cU, 0xa4U, 0x72U, 0xc0U,
  1734. 0xb7U, 0xfdU, 0x93U, 0x26U, 0x36U, 0x3fU, 0xf7U, 0xccU,
  1735. 0x34U, 0xa5U, 0xe5U, 0xf1U, 0x71U, 0xd8U, 0x31U, 0x15U,
  1736. 0x04U, 0xc7U, 0x23U, 0xc3U, 0x18U, 0x96U, 0x05U, 0x9aU,
  1737. 0x07U, 0x12U, 0x80U, 0xe2U, 0xebU, 0x27U, 0xb2U, 0x75U,
  1738. 0x09U, 0x83U, 0x2cU, 0x1aU, 0x1bU, 0x6eU, 0x5aU, 0xa0U,
  1739. 0x52U, 0x3bU, 0xd6U, 0xb3U, 0x29U, 0xe3U, 0x2fU, 0x84U,
  1740. 0x53U, 0xd1U, 0x00U, 0xedU, 0x20U, 0xfcU, 0xb1U, 0x5bU,
  1741. 0x6aU, 0xcbU, 0xbeU, 0x39U, 0x4aU, 0x4cU, 0x58U, 0xcfU,
  1742. 0xd0U, 0xefU, 0xaaU, 0xfbU, 0x43U, 0x4dU, 0x33U, 0x85U,
  1743. 0x45U, 0xf9U, 0x02U, 0x7fU, 0x50U, 0x3cU, 0x9fU, 0xa8U,
  1744. 0x51U, 0xa3U, 0x40U, 0x8fU, 0x92U, 0x9dU, 0x38U, 0xf5U,
  1745. 0xbcU, 0xb6U, 0xdaU, 0x21U, 0x10U, 0xffU, 0xf3U, 0xd2U,
  1746. 0xcdU, 0x0cU, 0x13U, 0xecU, 0x5fU, 0x97U, 0x44U, 0x17U,
  1747. 0xc4U, 0xa7U, 0x7eU, 0x3dU, 0x64U, 0x5dU, 0x19U, 0x73U,
  1748. 0x60U, 0x81U, 0x4fU, 0xdcU, 0x22U, 0x2aU, 0x90U, 0x88U,
  1749. 0x46U, 0xeeU, 0xb8U, 0x14U, 0xdeU, 0x5eU, 0x0bU, 0xdbU,
  1750. 0xe0U, 0x32U, 0x3aU, 0x0aU, 0x49U, 0x06U, 0x24U, 0x5cU,
  1751. 0xc2U, 0xd3U, 0xacU, 0x62U, 0x91U, 0x95U, 0xe4U, 0x79U,
  1752. 0xe7U, 0xc8U, 0x37U, 0x6dU, 0x8dU, 0xd5U, 0x4eU, 0xa9U,
  1753. 0x6cU, 0x56U, 0xf4U, 0xeaU, 0x65U, 0x7aU, 0xaeU, 0x08U,
  1754. 0xbaU, 0x78U, 0x25U, 0x2eU, 0x1cU, 0xa6U, 0xb4U, 0xc6U,
  1755. 0xe8U, 0xddU, 0x74U, 0x1fU, 0x4bU, 0xbdU, 0x8bU, 0x8aU,
  1756. 0x70U, 0x3eU, 0xb5U, 0x66U, 0x48U, 0x03U, 0xf6U, 0x0eU,
  1757. 0x61U, 0x35U, 0x57U, 0xb9U, 0x86U, 0xc1U, 0x1dU, 0x9eU,
  1758. 0xe1U, 0xf8U, 0x98U, 0x11U, 0x69U, 0xd9U, 0x8eU, 0x94U,
  1759. 0x9bU, 0x1eU, 0x87U, 0xe9U, 0xceU, 0x55U, 0x28U, 0xdfU,
  1760. 0x8cU, 0xa1U, 0x89U, 0x0dU, 0xbfU, 0xe6U, 0x42U, 0x68U,
  1761. 0x41U, 0x99U, 0x2dU, 0x0fU, 0xb0U, 0x54U, 0xbbU, 0x16U
  1762. };
  1763. static const u32 rcon[] = {
  1764. 0x01000000, 0x02000000, 0x04000000, 0x08000000,
  1765. 0x10000000, 0x20000000, 0x40000000, 0x80000000,
  1766. 0x1B000000, 0x36000000, /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  1767. };
  1768. /**
  1769. * Expand the cipher key into the encryption key schedule.
  1770. */
  1771. int AES_set_encrypt_key(const unsigned char *userKey, const int bits,
  1772. AES_KEY *key)
  1773. {
  1774. u32 *rk;
  1775. int i = 0;
  1776. u32 temp;
  1777. if (!userKey || !key)
  1778. return -1;
  1779. if (bits != 128 && bits != 192 && bits != 256)
  1780. return -2;
  1781. rk = key->rd_key;
  1782. if (bits == 128)
  1783. key->rounds = 10;
  1784. else if (bits == 192)
  1785. key->rounds = 12;
  1786. else
  1787. key->rounds = 14;
  1788. rk[0] = GETU32(userKey );
  1789. rk[1] = GETU32(userKey + 4);
  1790. rk[2] = GETU32(userKey + 8);
  1791. rk[3] = GETU32(userKey + 12);
  1792. if (bits == 128) {
  1793. while (1) {
  1794. temp = rk[3];
  1795. rk[4] = rk[0] ^
  1796. ((u32)Te4[(temp >> 16) & 0xff] << 24) ^
  1797. ((u32)Te4[(temp >> 8) & 0xff] << 16) ^
  1798. ((u32)Te4[(temp ) & 0xff] << 8) ^
  1799. ((u32)Te4[(temp >> 24) ]) ^
  1800. rcon[i];
  1801. rk[5] = rk[1] ^ rk[4];
  1802. rk[6] = rk[2] ^ rk[5];
  1803. rk[7] = rk[3] ^ rk[6];
  1804. if (++i == 10) {
  1805. return 0;
  1806. }
  1807. rk += 4;
  1808. }
  1809. }
  1810. rk[4] = GETU32(userKey + 16);
  1811. rk[5] = GETU32(userKey + 20);
  1812. if (bits == 192) {
  1813. while (1) {
  1814. temp = rk[ 5];
  1815. rk[ 6] = rk[ 0] ^
  1816. ((u32)Te4[(temp >> 16) & 0xff] << 24) ^
  1817. ((u32)Te4[(temp >> 8) & 0xff] << 16) ^
  1818. ((u32)Te4[(temp ) & 0xff] << 8) ^
  1819. ((u32)Te4[(temp >> 24) ]) ^
  1820. rcon[i];
  1821. rk[ 7] = rk[ 1] ^ rk[ 6];
  1822. rk[ 8] = rk[ 2] ^ rk[ 7];
  1823. rk[ 9] = rk[ 3] ^ rk[ 8];
  1824. if (++i == 8) {
  1825. return 0;
  1826. }
  1827. rk[10] = rk[ 4] ^ rk[ 9];
  1828. rk[11] = rk[ 5] ^ rk[10];
  1829. rk += 6;
  1830. }
  1831. }
  1832. rk[6] = GETU32(userKey + 24);
  1833. rk[7] = GETU32(userKey + 28);
  1834. if (bits == 256) {
  1835. while (1) {
  1836. temp = rk[ 7];
  1837. rk[ 8] = rk[ 0] ^
  1838. ((u32)Te4[(temp >> 16) & 0xff] << 24) ^
  1839. ((u32)Te4[(temp >> 8) & 0xff] << 16) ^
  1840. ((u32)Te4[(temp ) & 0xff] << 8) ^
  1841. ((u32)Te4[(temp >> 24) ]) ^
  1842. rcon[i];
  1843. rk[ 9] = rk[ 1] ^ rk[ 8];
  1844. rk[10] = rk[ 2] ^ rk[ 9];
  1845. rk[11] = rk[ 3] ^ rk[10];
  1846. if (++i == 7) {
  1847. return 0;
  1848. }
  1849. temp = rk[11];
  1850. rk[12] = rk[ 4] ^
  1851. ((u32)Te4[(temp >> 24) ] << 24) ^
  1852. ((u32)Te4[(temp >> 16) & 0xff] << 16) ^
  1853. ((u32)Te4[(temp >> 8) & 0xff] << 8) ^
  1854. ((u32)Te4[(temp ) & 0xff]);
  1855. rk[13] = rk[ 5] ^ rk[12];
  1856. rk[14] = rk[ 6] ^ rk[13];
  1857. rk[15] = rk[ 7] ^ rk[14];
  1858. rk += 8;
  1859. }
  1860. }
  1861. return 0;
  1862. }
  1863. /**
  1864. * Expand the cipher key into the decryption key schedule.
  1865. */
  1866. int AES_set_decrypt_key(const unsigned char *userKey, const int bits,
  1867. AES_KEY *key)
  1868. {
  1869. u32 *rk;
  1870. int i, j, status;
  1871. u32 temp;
  1872. /* first, start with an encryption schedule */
  1873. status = AES_set_encrypt_key(userKey, bits, key);
  1874. if (status < 0)
  1875. return status;
  1876. rk = key->rd_key;
  1877. /* invert the order of the round keys: */
  1878. for (i = 0, j = 4*(key->rounds); i < j; i += 4, j -= 4) {
  1879. temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp;
  1880. temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp;
  1881. temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp;
  1882. temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp;
  1883. }
  1884. /* apply the inverse MixColumn transform to all round keys but the first and the last: */
  1885. for (i = 1; i < (key->rounds); i++) {
  1886. rk += 4;
  1887. for (j = 0; j < 4; j++) {
  1888. u32 tp1, tp2, tp4, tp8, tp9, tpb, tpd, tpe, m;
  1889. tp1 = rk[j];
  1890. m = tp1 & 0x80808080;
  1891. tp2 = ((tp1 & 0x7f7f7f7f) << 1) ^
  1892. ((m - (m >> 7)) & 0x1b1b1b1b);
  1893. m = tp2 & 0x80808080;
  1894. tp4 = ((tp2 & 0x7f7f7f7f) << 1) ^
  1895. ((m - (m >> 7)) & 0x1b1b1b1b);
  1896. m = tp4 & 0x80808080;
  1897. tp8 = ((tp4 & 0x7f7f7f7f) << 1) ^
  1898. ((m - (m >> 7)) & 0x1b1b1b1b);
  1899. tp9 = tp8 ^ tp1;
  1900. tpb = tp9 ^ tp2;
  1901. tpd = tp9 ^ tp4;
  1902. tpe = tp8 ^ tp4 ^ tp2;
  1903. #if defined(ROTATE)
  1904. rk[j] = tpe ^ ROTATE(tpd,16) ^
  1905. ROTATE(tp9,24) ^ ROTATE(tpb,8);
  1906. #else
  1907. rk[j] = tpe ^ (tpd >> 16) ^ (tpd << 16) ^
  1908. (tp9 >> 8) ^ (tp9 << 24) ^
  1909. (tpb >> 24) ^ (tpb << 8);
  1910. #endif
  1911. }
  1912. }
  1913. return 0;
  1914. }
  1915. #endif /* AES_ASM */