aes-x86_64.pl 75 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902
  1. #! /usr/bin/env perl
  2. # Copyright 2005-2016 The OpenSSL Project Authors. All Rights Reserved.
  3. #
  4. # Licensed under the OpenSSL license (the "License"). You may not use
  5. # this file except in compliance with the License. You can obtain a copy
  6. # in the file LICENSE in the source distribution or at
  7. # https://www.openssl.org/source/license.html
  8. #
  9. # ====================================================================
  10. # Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
  11. # project. The module is, however, dual licensed under OpenSSL and
  12. # CRYPTOGAMS licenses depending on where you obtain it. For further
  13. # details see http://www.openssl.org/~appro/cryptogams/.
  14. # ====================================================================
  15. #
  16. # Version 2.1.
  17. #
  18. # aes-*-cbc benchmarks are improved by >70% [compared to gcc 3.3.2 on
  19. # Opteron 240 CPU] plus all the bells-n-whistles from 32-bit version
  20. # [you'll notice a lot of resemblance], such as compressed S-boxes
  21. # in little-endian byte order, prefetch of these tables in CBC mode,
  22. # as well as avoiding L1 cache aliasing between stack frame and key
  23. # schedule and already mentioned tables, compressed Td4...
  24. #
  25. # Performance in number of cycles per processed byte for 128-bit key:
  26. #
  27. # ECB encrypt ECB decrypt CBC large chunk
  28. # AMD64 33 43 13.0
  29. # EM64T 38 56 18.6(*)
  30. # Core 2 30 42 14.5(*)
  31. # Atom 65 86 32.1(*)
  32. #
  33. # (*) with hyper-threading off
  34. $flavour = shift;
  35. $output = shift;
  36. if ($flavour =~ /\./) { $output = $flavour; undef $flavour; }
  37. $win64=0; $win64=1 if ($flavour =~ /[nm]asm|mingw64/ || $output =~ /\.asm$/);
  38. $0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
  39. ( $xlate="${dir}x86_64-xlate.pl" and -f $xlate ) or
  40. ( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or
  41. die "can't locate x86_64-xlate.pl";
  42. open OUT,"| \"$^X\" \"$xlate\" $flavour \"$output\"";
  43. *STDOUT=*OUT;
  44. $verticalspin=1; # unlike 32-bit version $verticalspin performs
  45. # ~15% better on both AMD and Intel cores
  46. $speed_limit=512; # see aes-586.pl for details
  47. $code=".text\n";
  48. $s0="%eax";
  49. $s1="%ebx";
  50. $s2="%ecx";
  51. $s3="%edx";
  52. $acc0="%esi"; $mask80="%rsi";
  53. $acc1="%edi"; $maskfe="%rdi";
  54. $acc2="%ebp"; $mask1b="%rbp";
  55. $inp="%r8";
  56. $out="%r9";
  57. $t0="%r10d";
  58. $t1="%r11d";
  59. $t2="%r12d";
  60. $rnds="%r13d";
  61. $sbox="%r14";
  62. $key="%r15";
  63. sub hi() { my $r=shift; $r =~ s/%[er]([a-d])x/%\1h/; $r; }
  64. sub lo() { my $r=shift; $r =~ s/%[er]([a-d])x/%\1l/;
  65. $r =~ s/%[er]([sd]i)/%\1l/;
  66. $r =~ s/%(r[0-9]+)[d]?/%\1b/; $r; }
  67. sub LO() { my $r=shift; $r =~ s/%r([a-z]+)/%e\1/;
  68. $r =~ s/%r([0-9]+)/%r\1d/; $r; }
  69. sub _data_word()
  70. { my $i;
  71. while(defined($i=shift)) { $code.=sprintf".long\t0x%08x,0x%08x\n",$i,$i; }
  72. }
  73. sub data_word()
  74. { my $i;
  75. my $last=pop(@_);
  76. $code.=".long\t";
  77. while(defined($i=shift)) { $code.=sprintf"0x%08x,",$i; }
  78. $code.=sprintf"0x%08x\n",$last;
  79. }
  80. sub data_byte()
  81. { my $i;
  82. my $last=pop(@_);
  83. $code.=".byte\t";
  84. while(defined($i=shift)) { $code.=sprintf"0x%02x,",$i&0xff; }
  85. $code.=sprintf"0x%02x\n",$last&0xff;
  86. }
  87. sub encvert()
  88. { my $t3="%r8d"; # zaps $inp!
  89. $code.=<<___;
  90. # favor 3-way issue Opteron pipeline...
  91. movzb `&lo("$s0")`,$acc0
  92. movzb `&lo("$s1")`,$acc1
  93. movzb `&lo("$s2")`,$acc2
  94. mov 0($sbox,$acc0,8),$t0
  95. mov 0($sbox,$acc1,8),$t1
  96. mov 0($sbox,$acc2,8),$t2
  97. movzb `&hi("$s1")`,$acc0
  98. movzb `&hi("$s2")`,$acc1
  99. movzb `&lo("$s3")`,$acc2
  100. xor 3($sbox,$acc0,8),$t0
  101. xor 3($sbox,$acc1,8),$t1
  102. mov 0($sbox,$acc2,8),$t3
  103. movzb `&hi("$s3")`,$acc0
  104. shr \$16,$s2
  105. movzb `&hi("$s0")`,$acc2
  106. xor 3($sbox,$acc0,8),$t2
  107. shr \$16,$s3
  108. xor 3($sbox,$acc2,8),$t3
  109. shr \$16,$s1
  110. lea 16($key),$key
  111. shr \$16,$s0
  112. movzb `&lo("$s2")`,$acc0
  113. movzb `&lo("$s3")`,$acc1
  114. movzb `&lo("$s0")`,$acc2
  115. xor 2($sbox,$acc0,8),$t0
  116. xor 2($sbox,$acc1,8),$t1
  117. xor 2($sbox,$acc2,8),$t2
  118. movzb `&hi("$s3")`,$acc0
  119. movzb `&hi("$s0")`,$acc1
  120. movzb `&lo("$s1")`,$acc2
  121. xor 1($sbox,$acc0,8),$t0
  122. xor 1($sbox,$acc1,8),$t1
  123. xor 2($sbox,$acc2,8),$t3
  124. mov 12($key),$s3
  125. movzb `&hi("$s1")`,$acc1
  126. movzb `&hi("$s2")`,$acc2
  127. mov 0($key),$s0
  128. xor 1($sbox,$acc1,8),$t2
  129. xor 1($sbox,$acc2,8),$t3
  130. mov 4($key),$s1
  131. mov 8($key),$s2
  132. xor $t0,$s0
  133. xor $t1,$s1
  134. xor $t2,$s2
  135. xor $t3,$s3
  136. ___
  137. }
  138. sub enclastvert()
  139. { my $t3="%r8d"; # zaps $inp!
  140. $code.=<<___;
  141. movzb `&lo("$s0")`,$acc0
  142. movzb `&lo("$s1")`,$acc1
  143. movzb `&lo("$s2")`,$acc2
  144. movzb 2($sbox,$acc0,8),$t0
  145. movzb 2($sbox,$acc1,8),$t1
  146. movzb 2($sbox,$acc2,8),$t2
  147. movzb `&lo("$s3")`,$acc0
  148. movzb `&hi("$s1")`,$acc1
  149. movzb `&hi("$s2")`,$acc2
  150. movzb 2($sbox,$acc0,8),$t3
  151. mov 0($sbox,$acc1,8),$acc1 #$t0
  152. mov 0($sbox,$acc2,8),$acc2 #$t1
  153. and \$0x0000ff00,$acc1
  154. and \$0x0000ff00,$acc2
  155. xor $acc1,$t0
  156. xor $acc2,$t1
  157. shr \$16,$s2
  158. movzb `&hi("$s3")`,$acc0
  159. movzb `&hi("$s0")`,$acc1
  160. shr \$16,$s3
  161. mov 0($sbox,$acc0,8),$acc0 #$t2
  162. mov 0($sbox,$acc1,8),$acc1 #$t3
  163. and \$0x0000ff00,$acc0
  164. and \$0x0000ff00,$acc1
  165. shr \$16,$s1
  166. xor $acc0,$t2
  167. xor $acc1,$t3
  168. shr \$16,$s0
  169. movzb `&lo("$s2")`,$acc0
  170. movzb `&lo("$s3")`,$acc1
  171. movzb `&lo("$s0")`,$acc2
  172. mov 0($sbox,$acc0,8),$acc0 #$t0
  173. mov 0($sbox,$acc1,8),$acc1 #$t1
  174. mov 0($sbox,$acc2,8),$acc2 #$t2
  175. and \$0x00ff0000,$acc0
  176. and \$0x00ff0000,$acc1
  177. and \$0x00ff0000,$acc2
  178. xor $acc0,$t0
  179. xor $acc1,$t1
  180. xor $acc2,$t2
  181. movzb `&lo("$s1")`,$acc0
  182. movzb `&hi("$s3")`,$acc1
  183. movzb `&hi("$s0")`,$acc2
  184. mov 0($sbox,$acc0,8),$acc0 #$t3
  185. mov 2($sbox,$acc1,8),$acc1 #$t0
  186. mov 2($sbox,$acc2,8),$acc2 #$t1
  187. and \$0x00ff0000,$acc0
  188. and \$0xff000000,$acc1
  189. and \$0xff000000,$acc2
  190. xor $acc0,$t3
  191. xor $acc1,$t0
  192. xor $acc2,$t1
  193. movzb `&hi("$s1")`,$acc0
  194. movzb `&hi("$s2")`,$acc1
  195. mov 16+12($key),$s3
  196. mov 2($sbox,$acc0,8),$acc0 #$t2
  197. mov 2($sbox,$acc1,8),$acc1 #$t3
  198. mov 16+0($key),$s0
  199. and \$0xff000000,$acc0
  200. and \$0xff000000,$acc1
  201. xor $acc0,$t2
  202. xor $acc1,$t3
  203. mov 16+4($key),$s1
  204. mov 16+8($key),$s2
  205. xor $t0,$s0
  206. xor $t1,$s1
  207. xor $t2,$s2
  208. xor $t3,$s3
  209. ___
  210. }
  211. sub encstep()
  212. { my ($i,@s) = @_;
  213. my $tmp0=$acc0;
  214. my $tmp1=$acc1;
  215. my $tmp2=$acc2;
  216. my $out=($t0,$t1,$t2,$s[0])[$i];
  217. if ($i==3) {
  218. $tmp0=$s[1];
  219. $tmp1=$s[2];
  220. $tmp2=$s[3];
  221. }
  222. $code.=" movzb ".&lo($s[0]).",$out\n";
  223. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  224. $code.=" lea 16($key),$key\n" if ($i==0);
  225. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  226. $code.=" mov 0($sbox,$out,8),$out\n";
  227. $code.=" shr \$16,$tmp1\n";
  228. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  229. $code.=" xor 3($sbox,$tmp0,8),$out\n";
  230. $code.=" movzb ".&lo($tmp1).",$tmp1\n";
  231. $code.=" shr \$24,$tmp2\n";
  232. $code.=" xor 4*$i($key),$out\n";
  233. $code.=" xor 2($sbox,$tmp1,8),$out\n";
  234. $code.=" xor 1($sbox,$tmp2,8),$out\n";
  235. $code.=" mov $t0,$s[1]\n" if ($i==3);
  236. $code.=" mov $t1,$s[2]\n" if ($i==3);
  237. $code.=" mov $t2,$s[3]\n" if ($i==3);
  238. $code.="\n";
  239. }
  240. sub enclast()
  241. { my ($i,@s)=@_;
  242. my $tmp0=$acc0;
  243. my $tmp1=$acc1;
  244. my $tmp2=$acc2;
  245. my $out=($t0,$t1,$t2,$s[0])[$i];
  246. if ($i==3) {
  247. $tmp0=$s[1];
  248. $tmp1=$s[2];
  249. $tmp2=$s[3];
  250. }
  251. $code.=" movzb ".&lo($s[0]).",$out\n";
  252. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  253. $code.=" mov 2($sbox,$out,8),$out\n";
  254. $code.=" shr \$16,$tmp1\n";
  255. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  256. $code.=" and \$0x000000ff,$out\n";
  257. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  258. $code.=" movzb ".&lo($tmp1).",$tmp1\n";
  259. $code.=" shr \$24,$tmp2\n";
  260. $code.=" mov 0($sbox,$tmp0,8),$tmp0\n";
  261. $code.=" mov 0($sbox,$tmp1,8),$tmp1\n";
  262. $code.=" mov 2($sbox,$tmp2,8),$tmp2\n";
  263. $code.=" and \$0x0000ff00,$tmp0\n";
  264. $code.=" and \$0x00ff0000,$tmp1\n";
  265. $code.=" and \$0xff000000,$tmp2\n";
  266. $code.=" xor $tmp0,$out\n";
  267. $code.=" mov $t0,$s[1]\n" if ($i==3);
  268. $code.=" xor $tmp1,$out\n";
  269. $code.=" mov $t1,$s[2]\n" if ($i==3);
  270. $code.=" xor $tmp2,$out\n";
  271. $code.=" mov $t2,$s[3]\n" if ($i==3);
  272. $code.="\n";
  273. }
  274. $code.=<<___;
  275. .type _x86_64_AES_encrypt,\@abi-omnipotent
  276. .align 16
  277. _x86_64_AES_encrypt:
  278. xor 0($key),$s0 # xor with key
  279. xor 4($key),$s1
  280. xor 8($key),$s2
  281. xor 12($key),$s3
  282. mov 240($key),$rnds # load key->rounds
  283. sub \$1,$rnds
  284. jmp .Lenc_loop
  285. .align 16
  286. .Lenc_loop:
  287. ___
  288. if ($verticalspin) { &encvert(); }
  289. else { &encstep(0,$s0,$s1,$s2,$s3);
  290. &encstep(1,$s1,$s2,$s3,$s0);
  291. &encstep(2,$s2,$s3,$s0,$s1);
  292. &encstep(3,$s3,$s0,$s1,$s2);
  293. }
  294. $code.=<<___;
  295. sub \$1,$rnds
  296. jnz .Lenc_loop
  297. ___
  298. if ($verticalspin) { &enclastvert(); }
  299. else { &enclast(0,$s0,$s1,$s2,$s3);
  300. &enclast(1,$s1,$s2,$s3,$s0);
  301. &enclast(2,$s2,$s3,$s0,$s1);
  302. &enclast(3,$s3,$s0,$s1,$s2);
  303. $code.=<<___;
  304. xor 16+0($key),$s0 # xor with key
  305. xor 16+4($key),$s1
  306. xor 16+8($key),$s2
  307. xor 16+12($key),$s3
  308. ___
  309. }
  310. $code.=<<___;
  311. .byte 0xf3,0xc3 # rep ret
  312. .size _x86_64_AES_encrypt,.-_x86_64_AES_encrypt
  313. ___
  314. # it's possible to implement this by shifting tN by 8, filling least
  315. # significant byte with byte load and finally bswap-ing at the end,
  316. # but such partial register load kills Core 2...
  317. sub enccompactvert()
  318. { my ($t3,$t4,$t5)=("%r8d","%r9d","%r13d");
  319. $code.=<<___;
  320. movzb `&lo("$s0")`,$t0
  321. movzb `&lo("$s1")`,$t1
  322. movzb `&lo("$s2")`,$t2
  323. movzb `&lo("$s3")`,$t3
  324. movzb `&hi("$s1")`,$acc0
  325. movzb `&hi("$s2")`,$acc1
  326. shr \$16,$s2
  327. movzb `&hi("$s3")`,$acc2
  328. movzb ($sbox,$t0,1),$t0
  329. movzb ($sbox,$t1,1),$t1
  330. movzb ($sbox,$t2,1),$t2
  331. movzb ($sbox,$t3,1),$t3
  332. movzb ($sbox,$acc0,1),$t4 #$t0
  333. movzb `&hi("$s0")`,$acc0
  334. movzb ($sbox,$acc1,1),$t5 #$t1
  335. movzb `&lo("$s2")`,$acc1
  336. movzb ($sbox,$acc2,1),$acc2 #$t2
  337. movzb ($sbox,$acc0,1),$acc0 #$t3
  338. shl \$8,$t4
  339. shr \$16,$s3
  340. shl \$8,$t5
  341. xor $t4,$t0
  342. shr \$16,$s0
  343. movzb `&lo("$s3")`,$t4
  344. shr \$16,$s1
  345. xor $t5,$t1
  346. shl \$8,$acc2
  347. movzb `&lo("$s0")`,$t5
  348. movzb ($sbox,$acc1,1),$acc1 #$t0
  349. xor $acc2,$t2
  350. shl \$8,$acc0
  351. movzb `&lo("$s1")`,$acc2
  352. shl \$16,$acc1
  353. xor $acc0,$t3
  354. movzb ($sbox,$t4,1),$t4 #$t1
  355. movzb `&hi("$s3")`,$acc0
  356. movzb ($sbox,$t5,1),$t5 #$t2
  357. xor $acc1,$t0
  358. shr \$8,$s2
  359. movzb `&hi("$s0")`,$acc1
  360. shl \$16,$t4
  361. shr \$8,$s1
  362. shl \$16,$t5
  363. xor $t4,$t1
  364. movzb ($sbox,$acc2,1),$acc2 #$t3
  365. movzb ($sbox,$acc0,1),$acc0 #$t0
  366. movzb ($sbox,$acc1,1),$acc1 #$t1
  367. movzb ($sbox,$s2,1),$s3 #$t3
  368. movzb ($sbox,$s1,1),$s2 #$t2
  369. shl \$16,$acc2
  370. xor $t5,$t2
  371. shl \$24,$acc0
  372. xor $acc2,$t3
  373. shl \$24,$acc1
  374. xor $acc0,$t0
  375. shl \$24,$s3
  376. xor $acc1,$t1
  377. shl \$24,$s2
  378. mov $t0,$s0
  379. mov $t1,$s1
  380. xor $t2,$s2
  381. xor $t3,$s3
  382. ___
  383. }
  384. sub enctransform_ref()
  385. { my $sn = shift;
  386. my ($acc,$r2,$tmp)=("%r8d","%r9d","%r13d");
  387. $code.=<<___;
  388. mov $sn,$acc
  389. and \$0x80808080,$acc
  390. mov $acc,$tmp
  391. shr \$7,$tmp
  392. lea ($sn,$sn),$r2
  393. sub $tmp,$acc
  394. and \$0xfefefefe,$r2
  395. and \$0x1b1b1b1b,$acc
  396. mov $sn,$tmp
  397. xor $acc,$r2
  398. xor $r2,$sn
  399. rol \$24,$sn
  400. xor $r2,$sn
  401. ror \$16,$tmp
  402. xor $tmp,$sn
  403. ror \$8,$tmp
  404. xor $tmp,$sn
  405. ___
  406. }
  407. # unlike decrypt case it does not pay off to parallelize enctransform
  408. sub enctransform()
  409. { my ($t3,$r20,$r21)=($acc2,"%r8d","%r9d");
  410. $code.=<<___;
  411. mov \$0x80808080,$t0
  412. mov \$0x80808080,$t1
  413. and $s0,$t0
  414. and $s1,$t1
  415. mov $t0,$acc0
  416. mov $t1,$acc1
  417. shr \$7,$t0
  418. lea ($s0,$s0),$r20
  419. shr \$7,$t1
  420. lea ($s1,$s1),$r21
  421. sub $t0,$acc0
  422. sub $t1,$acc1
  423. and \$0xfefefefe,$r20
  424. and \$0xfefefefe,$r21
  425. and \$0x1b1b1b1b,$acc0
  426. and \$0x1b1b1b1b,$acc1
  427. mov $s0,$t0
  428. mov $s1,$t1
  429. xor $acc0,$r20
  430. xor $acc1,$r21
  431. xor $r20,$s0
  432. xor $r21,$s1
  433. mov \$0x80808080,$t2
  434. rol \$24,$s0
  435. mov \$0x80808080,$t3
  436. rol \$24,$s1
  437. and $s2,$t2
  438. and $s3,$t3
  439. xor $r20,$s0
  440. xor $r21,$s1
  441. mov $t2,$acc0
  442. ror \$16,$t0
  443. mov $t3,$acc1
  444. ror \$16,$t1
  445. lea ($s2,$s2),$r20
  446. shr \$7,$t2
  447. xor $t0,$s0
  448. shr \$7,$t3
  449. xor $t1,$s1
  450. ror \$8,$t0
  451. lea ($s3,$s3),$r21
  452. ror \$8,$t1
  453. sub $t2,$acc0
  454. sub $t3,$acc1
  455. xor $t0,$s0
  456. xor $t1,$s1
  457. and \$0xfefefefe,$r20
  458. and \$0xfefefefe,$r21
  459. and \$0x1b1b1b1b,$acc0
  460. and \$0x1b1b1b1b,$acc1
  461. mov $s2,$t2
  462. mov $s3,$t3
  463. xor $acc0,$r20
  464. xor $acc1,$r21
  465. ror \$16,$t2
  466. xor $r20,$s2
  467. ror \$16,$t3
  468. xor $r21,$s3
  469. rol \$24,$s2
  470. mov 0($sbox),$acc0 # prefetch Te4
  471. rol \$24,$s3
  472. xor $r20,$s2
  473. mov 64($sbox),$acc1
  474. xor $r21,$s3
  475. mov 128($sbox),$r20
  476. xor $t2,$s2
  477. ror \$8,$t2
  478. xor $t3,$s3
  479. ror \$8,$t3
  480. xor $t2,$s2
  481. mov 192($sbox),$r21
  482. xor $t3,$s3
  483. ___
  484. }
  485. $code.=<<___;
  486. .type _x86_64_AES_encrypt_compact,\@abi-omnipotent
  487. .align 16
  488. _x86_64_AES_encrypt_compact:
  489. lea 128($sbox),$inp # size optimization
  490. mov 0-128($inp),$acc1 # prefetch Te4
  491. mov 32-128($inp),$acc2
  492. mov 64-128($inp),$t0
  493. mov 96-128($inp),$t1
  494. mov 128-128($inp),$acc1
  495. mov 160-128($inp),$acc2
  496. mov 192-128($inp),$t0
  497. mov 224-128($inp),$t1
  498. jmp .Lenc_loop_compact
  499. .align 16
  500. .Lenc_loop_compact:
  501. xor 0($key),$s0 # xor with key
  502. xor 4($key),$s1
  503. xor 8($key),$s2
  504. xor 12($key),$s3
  505. lea 16($key),$key
  506. ___
  507. &enccompactvert();
  508. $code.=<<___;
  509. cmp 16(%rsp),$key
  510. je .Lenc_compact_done
  511. ___
  512. &enctransform();
  513. $code.=<<___;
  514. jmp .Lenc_loop_compact
  515. .align 16
  516. .Lenc_compact_done:
  517. xor 0($key),$s0
  518. xor 4($key),$s1
  519. xor 8($key),$s2
  520. xor 12($key),$s3
  521. .byte 0xf3,0xc3 # rep ret
  522. .size _x86_64_AES_encrypt_compact,.-_x86_64_AES_encrypt_compact
  523. ___
  524. # void AES_encrypt (const void *inp,void *out,const AES_KEY *key);
  525. $code.=<<___;
  526. .globl AES_encrypt
  527. .type AES_encrypt,\@function,3
  528. .align 16
  529. .globl asm_AES_encrypt
  530. .hidden asm_AES_encrypt
  531. asm_AES_encrypt:
  532. AES_encrypt:
  533. .cfi_startproc
  534. mov %rsp,%rax
  535. .cfi_def_cfa_register %rax
  536. push %rbx
  537. .cfi_push %rbx
  538. push %rbp
  539. .cfi_push %rbp
  540. push %r12
  541. .cfi_push %r12
  542. push %r13
  543. .cfi_push %r13
  544. push %r14
  545. .cfi_push %r14
  546. push %r15
  547. .cfi_push %r15
  548. # allocate frame "above" key schedule
  549. lea -63(%rdx),%rcx # %rdx is key argument
  550. and \$-64,%rsp
  551. sub %rsp,%rcx
  552. neg %rcx
  553. and \$0x3c0,%rcx
  554. sub %rcx,%rsp
  555. sub \$32,%rsp
  556. mov %rsi,16(%rsp) # save out
  557. mov %rax,24(%rsp) # save original stack pointer
  558. .cfi_cfa_expression %rsp+24,deref,+8
  559. .Lenc_prologue:
  560. mov %rdx,$key
  561. mov 240($key),$rnds # load rounds
  562. mov 0(%rdi),$s0 # load input vector
  563. mov 4(%rdi),$s1
  564. mov 8(%rdi),$s2
  565. mov 12(%rdi),$s3
  566. shl \$4,$rnds
  567. lea ($key,$rnds),%rbp
  568. mov $key,(%rsp) # key schedule
  569. mov %rbp,8(%rsp) # end of key schedule
  570. # pick Te4 copy which can't "overlap" with stack frame or key schedule
  571. lea .LAES_Te+2048(%rip),$sbox
  572. lea 768(%rsp),%rbp
  573. sub $sbox,%rbp
  574. and \$0x300,%rbp
  575. lea ($sbox,%rbp),$sbox
  576. call _x86_64_AES_encrypt_compact
  577. mov 16(%rsp),$out # restore out
  578. mov 24(%rsp),%rsi # restore saved stack pointer
  579. .cfi_def_cfa %rsi,8
  580. mov $s0,0($out) # write output vector
  581. mov $s1,4($out)
  582. mov $s2,8($out)
  583. mov $s3,12($out)
  584. mov -48(%rsi),%r15
  585. .cfi_restore %r15
  586. mov -40(%rsi),%r14
  587. .cfi_restore %r14
  588. mov -32(%rsi),%r13
  589. .cfi_restore %r13
  590. mov -24(%rsi),%r12
  591. .cfi_restore %r12
  592. mov -16(%rsi),%rbp
  593. .cfi_restore %rbp
  594. mov -8(%rsi),%rbx
  595. .cfi_restore %rbx
  596. lea (%rsi),%rsp
  597. .cfi_def_cfa_register %rsp
  598. .Lenc_epilogue:
  599. ret
  600. .cfi_endproc
  601. .size AES_encrypt,.-AES_encrypt
  602. ___
  603. #------------------------------------------------------------------#
  604. sub decvert()
  605. { my $t3="%r8d"; # zaps $inp!
  606. $code.=<<___;
  607. # favor 3-way issue Opteron pipeline...
  608. movzb `&lo("$s0")`,$acc0
  609. movzb `&lo("$s1")`,$acc1
  610. movzb `&lo("$s2")`,$acc2
  611. mov 0($sbox,$acc0,8),$t0
  612. mov 0($sbox,$acc1,8),$t1
  613. mov 0($sbox,$acc2,8),$t2
  614. movzb `&hi("$s3")`,$acc0
  615. movzb `&hi("$s0")`,$acc1
  616. movzb `&lo("$s3")`,$acc2
  617. xor 3($sbox,$acc0,8),$t0
  618. xor 3($sbox,$acc1,8),$t1
  619. mov 0($sbox,$acc2,8),$t3
  620. movzb `&hi("$s1")`,$acc0
  621. shr \$16,$s0
  622. movzb `&hi("$s2")`,$acc2
  623. xor 3($sbox,$acc0,8),$t2
  624. shr \$16,$s3
  625. xor 3($sbox,$acc2,8),$t3
  626. shr \$16,$s1
  627. lea 16($key),$key
  628. shr \$16,$s2
  629. movzb `&lo("$s2")`,$acc0
  630. movzb `&lo("$s3")`,$acc1
  631. movzb `&lo("$s0")`,$acc2
  632. xor 2($sbox,$acc0,8),$t0
  633. xor 2($sbox,$acc1,8),$t1
  634. xor 2($sbox,$acc2,8),$t2
  635. movzb `&hi("$s1")`,$acc0
  636. movzb `&hi("$s2")`,$acc1
  637. movzb `&lo("$s1")`,$acc2
  638. xor 1($sbox,$acc0,8),$t0
  639. xor 1($sbox,$acc1,8),$t1
  640. xor 2($sbox,$acc2,8),$t3
  641. movzb `&hi("$s3")`,$acc0
  642. mov 12($key),$s3
  643. movzb `&hi("$s0")`,$acc2
  644. xor 1($sbox,$acc0,8),$t2
  645. mov 0($key),$s0
  646. xor 1($sbox,$acc2,8),$t3
  647. xor $t0,$s0
  648. mov 4($key),$s1
  649. mov 8($key),$s2
  650. xor $t2,$s2
  651. xor $t1,$s1
  652. xor $t3,$s3
  653. ___
  654. }
  655. sub declastvert()
  656. { my $t3="%r8d"; # zaps $inp!
  657. $code.=<<___;
  658. lea 2048($sbox),$sbox # size optimization
  659. movzb `&lo("$s0")`,$acc0
  660. movzb `&lo("$s1")`,$acc1
  661. movzb `&lo("$s2")`,$acc2
  662. movzb ($sbox,$acc0,1),$t0
  663. movzb ($sbox,$acc1,1),$t1
  664. movzb ($sbox,$acc2,1),$t2
  665. movzb `&lo("$s3")`,$acc0
  666. movzb `&hi("$s3")`,$acc1
  667. movzb `&hi("$s0")`,$acc2
  668. movzb ($sbox,$acc0,1),$t3
  669. movzb ($sbox,$acc1,1),$acc1 #$t0
  670. movzb ($sbox,$acc2,1),$acc2 #$t1
  671. shl \$8,$acc1
  672. shl \$8,$acc2
  673. xor $acc1,$t0
  674. xor $acc2,$t1
  675. shr \$16,$s3
  676. movzb `&hi("$s1")`,$acc0
  677. movzb `&hi("$s2")`,$acc1
  678. shr \$16,$s0
  679. movzb ($sbox,$acc0,1),$acc0 #$t2
  680. movzb ($sbox,$acc1,1),$acc1 #$t3
  681. shl \$8,$acc0
  682. shl \$8,$acc1
  683. shr \$16,$s1
  684. xor $acc0,$t2
  685. xor $acc1,$t3
  686. shr \$16,$s2
  687. movzb `&lo("$s2")`,$acc0
  688. movzb `&lo("$s3")`,$acc1
  689. movzb `&lo("$s0")`,$acc2
  690. movzb ($sbox,$acc0,1),$acc0 #$t0
  691. movzb ($sbox,$acc1,1),$acc1 #$t1
  692. movzb ($sbox,$acc2,1),$acc2 #$t2
  693. shl \$16,$acc0
  694. shl \$16,$acc1
  695. shl \$16,$acc2
  696. xor $acc0,$t0
  697. xor $acc1,$t1
  698. xor $acc2,$t2
  699. movzb `&lo("$s1")`,$acc0
  700. movzb `&hi("$s1")`,$acc1
  701. movzb `&hi("$s2")`,$acc2
  702. movzb ($sbox,$acc0,1),$acc0 #$t3
  703. movzb ($sbox,$acc1,1),$acc1 #$t0
  704. movzb ($sbox,$acc2,1),$acc2 #$t1
  705. shl \$16,$acc0
  706. shl \$24,$acc1
  707. shl \$24,$acc2
  708. xor $acc0,$t3
  709. xor $acc1,$t0
  710. xor $acc2,$t1
  711. movzb `&hi("$s3")`,$acc0
  712. movzb `&hi("$s0")`,$acc1
  713. mov 16+12($key),$s3
  714. movzb ($sbox,$acc0,1),$acc0 #$t2
  715. movzb ($sbox,$acc1,1),$acc1 #$t3
  716. mov 16+0($key),$s0
  717. shl \$24,$acc0
  718. shl \$24,$acc1
  719. xor $acc0,$t2
  720. xor $acc1,$t3
  721. mov 16+4($key),$s1
  722. mov 16+8($key),$s2
  723. lea -2048($sbox),$sbox
  724. xor $t0,$s0
  725. xor $t1,$s1
  726. xor $t2,$s2
  727. xor $t3,$s3
  728. ___
  729. }
  730. sub decstep()
  731. { my ($i,@s) = @_;
  732. my $tmp0=$acc0;
  733. my $tmp1=$acc1;
  734. my $tmp2=$acc2;
  735. my $out=($t0,$t1,$t2,$s[0])[$i];
  736. $code.=" mov $s[0],$out\n" if ($i!=3);
  737. $tmp1=$s[2] if ($i==3);
  738. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  739. $code.=" and \$0xFF,$out\n";
  740. $code.=" mov 0($sbox,$out,8),$out\n";
  741. $code.=" shr \$16,$tmp1\n";
  742. $tmp2=$s[3] if ($i==3);
  743. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  744. $tmp0=$s[1] if ($i==3);
  745. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  746. $code.=" and \$0xFF,$tmp1\n";
  747. $code.=" shr \$24,$tmp2\n";
  748. $code.=" xor 3($sbox,$tmp0,8),$out\n";
  749. $code.=" xor 2($sbox,$tmp1,8),$out\n";
  750. $code.=" xor 1($sbox,$tmp2,8),$out\n";
  751. $code.=" mov $t2,$s[1]\n" if ($i==3);
  752. $code.=" mov $t1,$s[2]\n" if ($i==3);
  753. $code.=" mov $t0,$s[3]\n" if ($i==3);
  754. $code.="\n";
  755. }
  756. sub declast()
  757. { my ($i,@s)=@_;
  758. my $tmp0=$acc0;
  759. my $tmp1=$acc1;
  760. my $tmp2=$acc2;
  761. my $out=($t0,$t1,$t2,$s[0])[$i];
  762. $code.=" mov $s[0],$out\n" if ($i!=3);
  763. $tmp1=$s[2] if ($i==3);
  764. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  765. $code.=" and \$0xFF,$out\n";
  766. $code.=" movzb 2048($sbox,$out,1),$out\n";
  767. $code.=" shr \$16,$tmp1\n";
  768. $tmp2=$s[3] if ($i==3);
  769. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  770. $tmp0=$s[1] if ($i==3);
  771. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  772. $code.=" and \$0xFF,$tmp1\n";
  773. $code.=" shr \$24,$tmp2\n";
  774. $code.=" movzb 2048($sbox,$tmp0,1),$tmp0\n";
  775. $code.=" movzb 2048($sbox,$tmp1,1),$tmp1\n";
  776. $code.=" movzb 2048($sbox,$tmp2,1),$tmp2\n";
  777. $code.=" shl \$8,$tmp0\n";
  778. $code.=" shl \$16,$tmp1\n";
  779. $code.=" shl \$24,$tmp2\n";
  780. $code.=" xor $tmp0,$out\n";
  781. $code.=" mov $t2,$s[1]\n" if ($i==3);
  782. $code.=" xor $tmp1,$out\n";
  783. $code.=" mov $t1,$s[2]\n" if ($i==3);
  784. $code.=" xor $tmp2,$out\n";
  785. $code.=" mov $t0,$s[3]\n" if ($i==3);
  786. $code.="\n";
  787. }
  788. $code.=<<___;
  789. .type _x86_64_AES_decrypt,\@abi-omnipotent
  790. .align 16
  791. _x86_64_AES_decrypt:
  792. xor 0($key),$s0 # xor with key
  793. xor 4($key),$s1
  794. xor 8($key),$s2
  795. xor 12($key),$s3
  796. mov 240($key),$rnds # load key->rounds
  797. sub \$1,$rnds
  798. jmp .Ldec_loop
  799. .align 16
  800. .Ldec_loop:
  801. ___
  802. if ($verticalspin) { &decvert(); }
  803. else { &decstep(0,$s0,$s3,$s2,$s1);
  804. &decstep(1,$s1,$s0,$s3,$s2);
  805. &decstep(2,$s2,$s1,$s0,$s3);
  806. &decstep(3,$s3,$s2,$s1,$s0);
  807. $code.=<<___;
  808. lea 16($key),$key
  809. xor 0($key),$s0 # xor with key
  810. xor 4($key),$s1
  811. xor 8($key),$s2
  812. xor 12($key),$s3
  813. ___
  814. }
  815. $code.=<<___;
  816. sub \$1,$rnds
  817. jnz .Ldec_loop
  818. ___
  819. if ($verticalspin) { &declastvert(); }
  820. else { &declast(0,$s0,$s3,$s2,$s1);
  821. &declast(1,$s1,$s0,$s3,$s2);
  822. &declast(2,$s2,$s1,$s0,$s3);
  823. &declast(3,$s3,$s2,$s1,$s0);
  824. $code.=<<___;
  825. xor 16+0($key),$s0 # xor with key
  826. xor 16+4($key),$s1
  827. xor 16+8($key),$s2
  828. xor 16+12($key),$s3
  829. ___
  830. }
  831. $code.=<<___;
  832. .byte 0xf3,0xc3 # rep ret
  833. .size _x86_64_AES_decrypt,.-_x86_64_AES_decrypt
  834. ___
  835. sub deccompactvert()
  836. { my ($t3,$t4,$t5)=("%r8d","%r9d","%r13d");
  837. $code.=<<___;
  838. movzb `&lo("$s0")`,$t0
  839. movzb `&lo("$s1")`,$t1
  840. movzb `&lo("$s2")`,$t2
  841. movzb `&lo("$s3")`,$t3
  842. movzb `&hi("$s3")`,$acc0
  843. movzb `&hi("$s0")`,$acc1
  844. shr \$16,$s3
  845. movzb `&hi("$s1")`,$acc2
  846. movzb ($sbox,$t0,1),$t0
  847. movzb ($sbox,$t1,1),$t1
  848. movzb ($sbox,$t2,1),$t2
  849. movzb ($sbox,$t3,1),$t3
  850. movzb ($sbox,$acc0,1),$t4 #$t0
  851. movzb `&hi("$s2")`,$acc0
  852. movzb ($sbox,$acc1,1),$t5 #$t1
  853. movzb ($sbox,$acc2,1),$acc2 #$t2
  854. movzb ($sbox,$acc0,1),$acc0 #$t3
  855. shr \$16,$s2
  856. shl \$8,$t5
  857. shl \$8,$t4
  858. movzb `&lo("$s2")`,$acc1
  859. shr \$16,$s0
  860. xor $t4,$t0
  861. shr \$16,$s1
  862. movzb `&lo("$s3")`,$t4
  863. shl \$8,$acc2
  864. xor $t5,$t1
  865. shl \$8,$acc0
  866. movzb `&lo("$s0")`,$t5
  867. movzb ($sbox,$acc1,1),$acc1 #$t0
  868. xor $acc2,$t2
  869. movzb `&lo("$s1")`,$acc2
  870. shl \$16,$acc1
  871. xor $acc0,$t3
  872. movzb ($sbox,$t4,1),$t4 #$t1
  873. movzb `&hi("$s1")`,$acc0
  874. movzb ($sbox,$acc2,1),$acc2 #$t3
  875. xor $acc1,$t0
  876. movzb ($sbox,$t5,1),$t5 #$t2
  877. movzb `&hi("$s2")`,$acc1
  878. shl \$16,$acc2
  879. shl \$16,$t4
  880. shl \$16,$t5
  881. xor $acc2,$t3
  882. movzb `&hi("$s3")`,$acc2
  883. xor $t4,$t1
  884. shr \$8,$s0
  885. xor $t5,$t2
  886. movzb ($sbox,$acc0,1),$acc0 #$t0
  887. movzb ($sbox,$acc1,1),$s1 #$t1
  888. movzb ($sbox,$acc2,1),$s2 #$t2
  889. movzb ($sbox,$s0,1),$s3 #$t3
  890. mov $t0,$s0
  891. shl \$24,$acc0
  892. shl \$24,$s1
  893. shl \$24,$s2
  894. xor $acc0,$s0
  895. shl \$24,$s3
  896. xor $t1,$s1
  897. xor $t2,$s2
  898. xor $t3,$s3
  899. ___
  900. }
  901. # parallelized version! input is pair of 64-bit values: %rax=s1.s0
  902. # and %rcx=s3.s2, output is four 32-bit values in %eax=s0, %ebx=s1,
  903. # %ecx=s2 and %edx=s3.
  904. sub dectransform()
  905. { my ($tp10,$tp20,$tp40,$tp80,$acc0)=("%rax","%r8", "%r9", "%r10","%rbx");
  906. my ($tp18,$tp28,$tp48,$tp88,$acc8)=("%rcx","%r11","%r12","%r13","%rdx");
  907. my $prefetch = shift;
  908. $code.=<<___;
  909. mov $mask80,$tp40
  910. mov $mask80,$tp48
  911. and $tp10,$tp40
  912. and $tp18,$tp48
  913. mov $tp40,$acc0
  914. mov $tp48,$acc8
  915. shr \$7,$tp40
  916. lea ($tp10,$tp10),$tp20
  917. shr \$7,$tp48
  918. lea ($tp18,$tp18),$tp28
  919. sub $tp40,$acc0
  920. sub $tp48,$acc8
  921. and $maskfe,$tp20
  922. and $maskfe,$tp28
  923. and $mask1b,$acc0
  924. and $mask1b,$acc8
  925. xor $acc0,$tp20
  926. xor $acc8,$tp28
  927. mov $mask80,$tp80
  928. mov $mask80,$tp88
  929. and $tp20,$tp80
  930. and $tp28,$tp88
  931. mov $tp80,$acc0
  932. mov $tp88,$acc8
  933. shr \$7,$tp80
  934. lea ($tp20,$tp20),$tp40
  935. shr \$7,$tp88
  936. lea ($tp28,$tp28),$tp48
  937. sub $tp80,$acc0
  938. sub $tp88,$acc8
  939. and $maskfe,$tp40
  940. and $maskfe,$tp48
  941. and $mask1b,$acc0
  942. and $mask1b,$acc8
  943. xor $acc0,$tp40
  944. xor $acc8,$tp48
  945. mov $mask80,$tp80
  946. mov $mask80,$tp88
  947. and $tp40,$tp80
  948. and $tp48,$tp88
  949. mov $tp80,$acc0
  950. mov $tp88,$acc8
  951. shr \$7,$tp80
  952. xor $tp10,$tp20 # tp2^=tp1
  953. shr \$7,$tp88
  954. xor $tp18,$tp28 # tp2^=tp1
  955. sub $tp80,$acc0
  956. sub $tp88,$acc8
  957. lea ($tp40,$tp40),$tp80
  958. lea ($tp48,$tp48),$tp88
  959. xor $tp10,$tp40 # tp4^=tp1
  960. xor $tp18,$tp48 # tp4^=tp1
  961. and $maskfe,$tp80
  962. and $maskfe,$tp88
  963. and $mask1b,$acc0
  964. and $mask1b,$acc8
  965. xor $acc0,$tp80
  966. xor $acc8,$tp88
  967. xor $tp80,$tp10 # tp1^=tp8
  968. xor $tp88,$tp18 # tp1^=tp8
  969. xor $tp80,$tp20 # tp2^tp1^=tp8
  970. xor $tp88,$tp28 # tp2^tp1^=tp8
  971. mov $tp10,$acc0
  972. mov $tp18,$acc8
  973. xor $tp80,$tp40 # tp4^tp1^=tp8
  974. shr \$32,$acc0
  975. xor $tp88,$tp48 # tp4^tp1^=tp8
  976. shr \$32,$acc8
  977. xor $tp20,$tp80 # tp8^=tp8^tp2^tp1=tp2^tp1
  978. rol \$8,`&LO("$tp10")` # ROTATE(tp1^tp8,8)
  979. xor $tp28,$tp88 # tp8^=tp8^tp2^tp1=tp2^tp1
  980. rol \$8,`&LO("$tp18")` # ROTATE(tp1^tp8,8)
  981. xor $tp40,$tp80 # tp2^tp1^=tp8^tp4^tp1=tp8^tp4^tp2
  982. rol \$8,`&LO("$acc0")` # ROTATE(tp1^tp8,8)
  983. xor $tp48,$tp88 # tp2^tp1^=tp8^tp4^tp1=tp8^tp4^tp2
  984. rol \$8,`&LO("$acc8")` # ROTATE(tp1^tp8,8)
  985. xor `&LO("$tp80")`,`&LO("$tp10")`
  986. shr \$32,$tp80
  987. xor `&LO("$tp88")`,`&LO("$tp18")`
  988. shr \$32,$tp88
  989. xor `&LO("$tp80")`,`&LO("$acc0")`
  990. xor `&LO("$tp88")`,`&LO("$acc8")`
  991. mov $tp20,$tp80
  992. rol \$24,`&LO("$tp20")` # ROTATE(tp2^tp1^tp8,24)
  993. mov $tp28,$tp88
  994. rol \$24,`&LO("$tp28")` # ROTATE(tp2^tp1^tp8,24)
  995. shr \$32,$tp80
  996. xor `&LO("$tp20")`,`&LO("$tp10")`
  997. shr \$32,$tp88
  998. xor `&LO("$tp28")`,`&LO("$tp18")`
  999. rol \$24,`&LO("$tp80")` # ROTATE(tp2^tp1^tp8,24)
  1000. mov $tp40,$tp20
  1001. rol \$24,`&LO("$tp88")` # ROTATE(tp2^tp1^tp8,24)
  1002. mov $tp48,$tp28
  1003. shr \$32,$tp20
  1004. xor `&LO("$tp80")`,`&LO("$acc0")`
  1005. shr \$32,$tp28
  1006. xor `&LO("$tp88")`,`&LO("$acc8")`
  1007. `"mov 0($sbox),$mask80" if ($prefetch)`
  1008. rol \$16,`&LO("$tp40")` # ROTATE(tp4^tp1^tp8,16)
  1009. `"mov 64($sbox),$maskfe" if ($prefetch)`
  1010. rol \$16,`&LO("$tp48")` # ROTATE(tp4^tp1^tp8,16)
  1011. `"mov 128($sbox),$mask1b" if ($prefetch)`
  1012. rol \$16,`&LO("$tp20")` # ROTATE(tp4^tp1^tp8,16)
  1013. `"mov 192($sbox),$tp80" if ($prefetch)`
  1014. xor `&LO("$tp40")`,`&LO("$tp10")`
  1015. rol \$16,`&LO("$tp28")` # ROTATE(tp4^tp1^tp8,16)
  1016. xor `&LO("$tp48")`,`&LO("$tp18")`
  1017. `"mov 256($sbox),$tp88" if ($prefetch)`
  1018. xor `&LO("$tp20")`,`&LO("$acc0")`
  1019. xor `&LO("$tp28")`,`&LO("$acc8")`
  1020. ___
  1021. }
  1022. $code.=<<___;
  1023. .type _x86_64_AES_decrypt_compact,\@abi-omnipotent
  1024. .align 16
  1025. _x86_64_AES_decrypt_compact:
  1026. lea 128($sbox),$inp # size optimization
  1027. mov 0-128($inp),$acc1 # prefetch Td4
  1028. mov 32-128($inp),$acc2
  1029. mov 64-128($inp),$t0
  1030. mov 96-128($inp),$t1
  1031. mov 128-128($inp),$acc1
  1032. mov 160-128($inp),$acc2
  1033. mov 192-128($inp),$t0
  1034. mov 224-128($inp),$t1
  1035. jmp .Ldec_loop_compact
  1036. .align 16
  1037. .Ldec_loop_compact:
  1038. xor 0($key),$s0 # xor with key
  1039. xor 4($key),$s1
  1040. xor 8($key),$s2
  1041. xor 12($key),$s3
  1042. lea 16($key),$key
  1043. ___
  1044. &deccompactvert();
  1045. $code.=<<___;
  1046. cmp 16(%rsp),$key
  1047. je .Ldec_compact_done
  1048. mov 256+0($sbox),$mask80
  1049. shl \$32,%rbx
  1050. shl \$32,%rdx
  1051. mov 256+8($sbox),$maskfe
  1052. or %rbx,%rax
  1053. or %rdx,%rcx
  1054. mov 256+16($sbox),$mask1b
  1055. ___
  1056. &dectransform(1);
  1057. $code.=<<___;
  1058. jmp .Ldec_loop_compact
  1059. .align 16
  1060. .Ldec_compact_done:
  1061. xor 0($key),$s0
  1062. xor 4($key),$s1
  1063. xor 8($key),$s2
  1064. xor 12($key),$s3
  1065. .byte 0xf3,0xc3 # rep ret
  1066. .size _x86_64_AES_decrypt_compact,.-_x86_64_AES_decrypt_compact
  1067. ___
  1068. # void AES_decrypt (const void *inp,void *out,const AES_KEY *key);
  1069. $code.=<<___;
  1070. .globl AES_decrypt
  1071. .type AES_decrypt,\@function,3
  1072. .align 16
  1073. .globl asm_AES_decrypt
  1074. .hidden asm_AES_decrypt
  1075. asm_AES_decrypt:
  1076. AES_decrypt:
  1077. .cfi_startproc
  1078. mov %rsp,%rax
  1079. .cfi_def_cfa_register %rax
  1080. push %rbx
  1081. .cfi_push %rbx
  1082. push %rbp
  1083. .cfi_push %rbp
  1084. push %r12
  1085. .cfi_push %r12
  1086. push %r13
  1087. .cfi_push %r13
  1088. push %r14
  1089. .cfi_push %r14
  1090. push %r15
  1091. .cfi_push %r15
  1092. # allocate frame "above" key schedule
  1093. lea -63(%rdx),%rcx # %rdx is key argument
  1094. and \$-64,%rsp
  1095. sub %rsp,%rcx
  1096. neg %rcx
  1097. and \$0x3c0,%rcx
  1098. sub %rcx,%rsp
  1099. sub \$32,%rsp
  1100. mov %rsi,16(%rsp) # save out
  1101. mov %rax,24(%rsp) # save original stack pointer
  1102. .cfi_cfa_expression %rsp+24,deref,+8
  1103. .Ldec_prologue:
  1104. mov %rdx,$key
  1105. mov 240($key),$rnds # load rounds
  1106. mov 0(%rdi),$s0 # load input vector
  1107. mov 4(%rdi),$s1
  1108. mov 8(%rdi),$s2
  1109. mov 12(%rdi),$s3
  1110. shl \$4,$rnds
  1111. lea ($key,$rnds),%rbp
  1112. mov $key,(%rsp) # key schedule
  1113. mov %rbp,8(%rsp) # end of key schedule
  1114. # pick Td4 copy which can't "overlap" with stack frame or key schedule
  1115. lea .LAES_Td+2048(%rip),$sbox
  1116. lea 768(%rsp),%rbp
  1117. sub $sbox,%rbp
  1118. and \$0x300,%rbp
  1119. lea ($sbox,%rbp),$sbox
  1120. shr \$3,%rbp # recall "magic" constants!
  1121. add %rbp,$sbox
  1122. call _x86_64_AES_decrypt_compact
  1123. mov 16(%rsp),$out # restore out
  1124. mov 24(%rsp),%rsi # restore saved stack pointer
  1125. .cfi_def_cfa %rsi,8
  1126. mov $s0,0($out) # write output vector
  1127. mov $s1,4($out)
  1128. mov $s2,8($out)
  1129. mov $s3,12($out)
  1130. mov -48(%rsi),%r15
  1131. .cfi_restore %r15
  1132. mov -40(%rsi),%r14
  1133. .cfi_restore %r14
  1134. mov -32(%rsi),%r13
  1135. .cfi_restore %r13
  1136. mov -24(%rsi),%r12
  1137. .cfi_restore %r12
  1138. mov -16(%rsi),%rbp
  1139. .cfi_restore %rbp
  1140. mov -8(%rsi),%rbx
  1141. .cfi_restore %rbx
  1142. lea (%rsi),%rsp
  1143. .cfi_def_cfa_register %rsp
  1144. .Ldec_epilogue:
  1145. ret
  1146. .cfi_endproc
  1147. .size AES_decrypt,.-AES_decrypt
  1148. ___
  1149. #------------------------------------------------------------------#
  1150. sub enckey()
  1151. {
  1152. $code.=<<___;
  1153. movz %dl,%esi # rk[i]>>0
  1154. movzb -128(%rbp,%rsi),%ebx
  1155. movz %dh,%esi # rk[i]>>8
  1156. shl \$24,%ebx
  1157. xor %ebx,%eax
  1158. movzb -128(%rbp,%rsi),%ebx
  1159. shr \$16,%edx
  1160. movz %dl,%esi # rk[i]>>16
  1161. xor %ebx,%eax
  1162. movzb -128(%rbp,%rsi),%ebx
  1163. movz %dh,%esi # rk[i]>>24
  1164. shl \$8,%ebx
  1165. xor %ebx,%eax
  1166. movzb -128(%rbp,%rsi),%ebx
  1167. shl \$16,%ebx
  1168. xor %ebx,%eax
  1169. xor 1024-128(%rbp,%rcx,4),%eax # rcon
  1170. ___
  1171. }
  1172. # int AES_set_encrypt_key(const unsigned char *userKey, const int bits,
  1173. # AES_KEY *key)
  1174. $code.=<<___;
  1175. .globl AES_set_encrypt_key
  1176. .type AES_set_encrypt_key,\@function,3
  1177. .align 16
  1178. AES_set_encrypt_key:
  1179. .cfi_startproc
  1180. push %rbx
  1181. .cfi_push %rbx
  1182. push %rbp
  1183. .cfi_push %rbp
  1184. push %r12 # redundant, but allows to share
  1185. .cfi_push %r12
  1186. push %r13 # exception handler...
  1187. .cfi_push %r13
  1188. push %r14
  1189. .cfi_push %r14
  1190. push %r15
  1191. .cfi_push %r15
  1192. sub \$8,%rsp
  1193. .cfi_adjust_cfa_offset 8
  1194. .Lenc_key_prologue:
  1195. call _x86_64_AES_set_encrypt_key
  1196. mov 40(%rsp),%rbp
  1197. .cfi_restore %rbp
  1198. mov 48(%rsp),%rbx
  1199. .cfi_restore %rbx
  1200. add \$56,%rsp
  1201. .cfi_adjust_cfa_offset -56
  1202. .Lenc_key_epilogue:
  1203. ret
  1204. .cfi_endproc
  1205. .size AES_set_encrypt_key,.-AES_set_encrypt_key
  1206. .type _x86_64_AES_set_encrypt_key,\@abi-omnipotent
  1207. .align 16
  1208. _x86_64_AES_set_encrypt_key:
  1209. mov %esi,%ecx # %ecx=bits
  1210. mov %rdi,%rsi # %rsi=userKey
  1211. mov %rdx,%rdi # %rdi=key
  1212. test \$-1,%rsi
  1213. jz .Lbadpointer
  1214. test \$-1,%rdi
  1215. jz .Lbadpointer
  1216. lea .LAES_Te(%rip),%rbp
  1217. lea 2048+128(%rbp),%rbp
  1218. # prefetch Te4
  1219. mov 0-128(%rbp),%eax
  1220. mov 32-128(%rbp),%ebx
  1221. mov 64-128(%rbp),%r8d
  1222. mov 96-128(%rbp),%edx
  1223. mov 128-128(%rbp),%eax
  1224. mov 160-128(%rbp),%ebx
  1225. mov 192-128(%rbp),%r8d
  1226. mov 224-128(%rbp),%edx
  1227. cmp \$128,%ecx
  1228. je .L10rounds
  1229. cmp \$192,%ecx
  1230. je .L12rounds
  1231. cmp \$256,%ecx
  1232. je .L14rounds
  1233. mov \$-2,%rax # invalid number of bits
  1234. jmp .Lexit
  1235. .L10rounds:
  1236. mov 0(%rsi),%rax # copy first 4 dwords
  1237. mov 8(%rsi),%rdx
  1238. mov %rax,0(%rdi)
  1239. mov %rdx,8(%rdi)
  1240. shr \$32,%rdx
  1241. xor %ecx,%ecx
  1242. jmp .L10shortcut
  1243. .align 4
  1244. .L10loop:
  1245. mov 0(%rdi),%eax # rk[0]
  1246. mov 12(%rdi),%edx # rk[3]
  1247. .L10shortcut:
  1248. ___
  1249. &enckey ();
  1250. $code.=<<___;
  1251. mov %eax,16(%rdi) # rk[4]
  1252. xor 4(%rdi),%eax
  1253. mov %eax,20(%rdi) # rk[5]
  1254. xor 8(%rdi),%eax
  1255. mov %eax,24(%rdi) # rk[6]
  1256. xor 12(%rdi),%eax
  1257. mov %eax,28(%rdi) # rk[7]
  1258. add \$1,%ecx
  1259. lea 16(%rdi),%rdi
  1260. cmp \$10,%ecx
  1261. jl .L10loop
  1262. movl \$10,80(%rdi) # setup number of rounds
  1263. xor %rax,%rax
  1264. jmp .Lexit
  1265. .L12rounds:
  1266. mov 0(%rsi),%rax # copy first 6 dwords
  1267. mov 8(%rsi),%rbx
  1268. mov 16(%rsi),%rdx
  1269. mov %rax,0(%rdi)
  1270. mov %rbx,8(%rdi)
  1271. mov %rdx,16(%rdi)
  1272. shr \$32,%rdx
  1273. xor %ecx,%ecx
  1274. jmp .L12shortcut
  1275. .align 4
  1276. .L12loop:
  1277. mov 0(%rdi),%eax # rk[0]
  1278. mov 20(%rdi),%edx # rk[5]
  1279. .L12shortcut:
  1280. ___
  1281. &enckey ();
  1282. $code.=<<___;
  1283. mov %eax,24(%rdi) # rk[6]
  1284. xor 4(%rdi),%eax
  1285. mov %eax,28(%rdi) # rk[7]
  1286. xor 8(%rdi),%eax
  1287. mov %eax,32(%rdi) # rk[8]
  1288. xor 12(%rdi),%eax
  1289. mov %eax,36(%rdi) # rk[9]
  1290. cmp \$7,%ecx
  1291. je .L12break
  1292. add \$1,%ecx
  1293. xor 16(%rdi),%eax
  1294. mov %eax,40(%rdi) # rk[10]
  1295. xor 20(%rdi),%eax
  1296. mov %eax,44(%rdi) # rk[11]
  1297. lea 24(%rdi),%rdi
  1298. jmp .L12loop
  1299. .L12break:
  1300. movl \$12,72(%rdi) # setup number of rounds
  1301. xor %rax,%rax
  1302. jmp .Lexit
  1303. .L14rounds:
  1304. mov 0(%rsi),%rax # copy first 8 dwords
  1305. mov 8(%rsi),%rbx
  1306. mov 16(%rsi),%rcx
  1307. mov 24(%rsi),%rdx
  1308. mov %rax,0(%rdi)
  1309. mov %rbx,8(%rdi)
  1310. mov %rcx,16(%rdi)
  1311. mov %rdx,24(%rdi)
  1312. shr \$32,%rdx
  1313. xor %ecx,%ecx
  1314. jmp .L14shortcut
  1315. .align 4
  1316. .L14loop:
  1317. mov 0(%rdi),%eax # rk[0]
  1318. mov 28(%rdi),%edx # rk[4]
  1319. .L14shortcut:
  1320. ___
  1321. &enckey ();
  1322. $code.=<<___;
  1323. mov %eax,32(%rdi) # rk[8]
  1324. xor 4(%rdi),%eax
  1325. mov %eax,36(%rdi) # rk[9]
  1326. xor 8(%rdi),%eax
  1327. mov %eax,40(%rdi) # rk[10]
  1328. xor 12(%rdi),%eax
  1329. mov %eax,44(%rdi) # rk[11]
  1330. cmp \$6,%ecx
  1331. je .L14break
  1332. add \$1,%ecx
  1333. mov %eax,%edx
  1334. mov 16(%rdi),%eax # rk[4]
  1335. movz %dl,%esi # rk[11]>>0
  1336. movzb -128(%rbp,%rsi),%ebx
  1337. movz %dh,%esi # rk[11]>>8
  1338. xor %ebx,%eax
  1339. movzb -128(%rbp,%rsi),%ebx
  1340. shr \$16,%edx
  1341. shl \$8,%ebx
  1342. movz %dl,%esi # rk[11]>>16
  1343. xor %ebx,%eax
  1344. movzb -128(%rbp,%rsi),%ebx
  1345. movz %dh,%esi # rk[11]>>24
  1346. shl \$16,%ebx
  1347. xor %ebx,%eax
  1348. movzb -128(%rbp,%rsi),%ebx
  1349. shl \$24,%ebx
  1350. xor %ebx,%eax
  1351. mov %eax,48(%rdi) # rk[12]
  1352. xor 20(%rdi),%eax
  1353. mov %eax,52(%rdi) # rk[13]
  1354. xor 24(%rdi),%eax
  1355. mov %eax,56(%rdi) # rk[14]
  1356. xor 28(%rdi),%eax
  1357. mov %eax,60(%rdi) # rk[15]
  1358. lea 32(%rdi),%rdi
  1359. jmp .L14loop
  1360. .L14break:
  1361. movl \$14,48(%rdi) # setup number of rounds
  1362. xor %rax,%rax
  1363. jmp .Lexit
  1364. .Lbadpointer:
  1365. mov \$-1,%rax
  1366. .Lexit:
  1367. .byte 0xf3,0xc3 # rep ret
  1368. .size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key
  1369. ___
  1370. sub deckey_ref()
  1371. { my ($i,$ptr,$te,$td) = @_;
  1372. my ($tp1,$tp2,$tp4,$tp8,$acc)=("%eax","%ebx","%edi","%edx","%r8d");
  1373. $code.=<<___;
  1374. mov $i($ptr),$tp1
  1375. mov $tp1,$acc
  1376. and \$0x80808080,$acc
  1377. mov $acc,$tp4
  1378. shr \$7,$tp4
  1379. lea 0($tp1,$tp1),$tp2
  1380. sub $tp4,$acc
  1381. and \$0xfefefefe,$tp2
  1382. and \$0x1b1b1b1b,$acc
  1383. xor $tp2,$acc
  1384. mov $acc,$tp2
  1385. and \$0x80808080,$acc
  1386. mov $acc,$tp8
  1387. shr \$7,$tp8
  1388. lea 0($tp2,$tp2),$tp4
  1389. sub $tp8,$acc
  1390. and \$0xfefefefe,$tp4
  1391. and \$0x1b1b1b1b,$acc
  1392. xor $tp1,$tp2 # tp2^tp1
  1393. xor $tp4,$acc
  1394. mov $acc,$tp4
  1395. and \$0x80808080,$acc
  1396. mov $acc,$tp8
  1397. shr \$7,$tp8
  1398. sub $tp8,$acc
  1399. lea 0($tp4,$tp4),$tp8
  1400. xor $tp1,$tp4 # tp4^tp1
  1401. and \$0xfefefefe,$tp8
  1402. and \$0x1b1b1b1b,$acc
  1403. xor $acc,$tp8
  1404. xor $tp8,$tp1 # tp1^tp8
  1405. rol \$8,$tp1 # ROTATE(tp1^tp8,8)
  1406. xor $tp8,$tp2 # tp2^tp1^tp8
  1407. xor $tp8,$tp4 # tp4^tp1^tp8
  1408. xor $tp2,$tp8
  1409. xor $tp4,$tp8 # tp8^(tp8^tp4^tp1)^(tp8^tp2^tp1)=tp8^tp4^tp2
  1410. xor $tp8,$tp1
  1411. rol \$24,$tp2 # ROTATE(tp2^tp1^tp8,24)
  1412. xor $tp2,$tp1
  1413. rol \$16,$tp4 # ROTATE(tp4^tp1^tp8,16)
  1414. xor $tp4,$tp1
  1415. mov $tp1,$i($ptr)
  1416. ___
  1417. }
  1418. # int AES_set_decrypt_key(const unsigned char *userKey, const int bits,
  1419. # AES_KEY *key)
  1420. $code.=<<___;
  1421. .globl AES_set_decrypt_key
  1422. .type AES_set_decrypt_key,\@function,3
  1423. .align 16
  1424. AES_set_decrypt_key:
  1425. .cfi_startproc
  1426. push %rbx
  1427. .cfi_push %rbx
  1428. push %rbp
  1429. .cfi_push %rbp
  1430. push %r12
  1431. .cfi_push %r12
  1432. push %r13
  1433. .cfi_push %r13
  1434. push %r14
  1435. .cfi_push %r14
  1436. push %r15
  1437. .cfi_push %r15
  1438. push %rdx # save key schedule
  1439. .cfi_adjust_cfa_offset 8
  1440. .Ldec_key_prologue:
  1441. call _x86_64_AES_set_encrypt_key
  1442. mov (%rsp),%r8 # restore key schedule
  1443. cmp \$0,%eax
  1444. jne .Labort
  1445. mov 240(%r8),%r14d # pull number of rounds
  1446. xor %rdi,%rdi
  1447. lea (%rdi,%r14d,4),%rcx
  1448. mov %r8,%rsi
  1449. lea (%r8,%rcx,4),%rdi # pointer to last chunk
  1450. .align 4
  1451. .Linvert:
  1452. mov 0(%rsi),%rax
  1453. mov 8(%rsi),%rbx
  1454. mov 0(%rdi),%rcx
  1455. mov 8(%rdi),%rdx
  1456. mov %rax,0(%rdi)
  1457. mov %rbx,8(%rdi)
  1458. mov %rcx,0(%rsi)
  1459. mov %rdx,8(%rsi)
  1460. lea 16(%rsi),%rsi
  1461. lea -16(%rdi),%rdi
  1462. cmp %rsi,%rdi
  1463. jne .Linvert
  1464. lea .LAES_Te+2048+1024(%rip),%rax # rcon
  1465. mov 40(%rax),$mask80
  1466. mov 48(%rax),$maskfe
  1467. mov 56(%rax),$mask1b
  1468. mov %r8,$key
  1469. sub \$1,%r14d
  1470. .align 4
  1471. .Lpermute:
  1472. lea 16($key),$key
  1473. mov 0($key),%rax
  1474. mov 8($key),%rcx
  1475. ___
  1476. &dectransform ();
  1477. $code.=<<___;
  1478. mov %eax,0($key)
  1479. mov %ebx,4($key)
  1480. mov %ecx,8($key)
  1481. mov %edx,12($key)
  1482. sub \$1,%r14d
  1483. jnz .Lpermute
  1484. xor %rax,%rax
  1485. .Labort:
  1486. mov 8(%rsp),%r15
  1487. .cfi_restore %r15
  1488. mov 16(%rsp),%r14
  1489. .cfi_restore %r14
  1490. mov 24(%rsp),%r13
  1491. .cfi_restore %r13
  1492. mov 32(%rsp),%r12
  1493. .cfi_restore %r12
  1494. mov 40(%rsp),%rbp
  1495. .cfi_restore %rbp
  1496. mov 48(%rsp),%rbx
  1497. .cfi_restore %rbx
  1498. add \$56,%rsp
  1499. .cfi_adjust_cfa_offset -56
  1500. .Ldec_key_epilogue:
  1501. ret
  1502. .cfi_endproc
  1503. .size AES_set_decrypt_key,.-AES_set_decrypt_key
  1504. ___
  1505. # void AES_cbc_encrypt (const void char *inp, unsigned char *out,
  1506. # size_t length, const AES_KEY *key,
  1507. # unsigned char *ivp,const int enc);
  1508. {
  1509. # stack frame layout
  1510. # -8(%rsp) return address
  1511. my $keyp="0(%rsp)"; # one to pass as $key
  1512. my $keyend="8(%rsp)"; # &(keyp->rd_key[4*keyp->rounds])
  1513. my $_rsp="16(%rsp)"; # saved %rsp
  1514. my $_inp="24(%rsp)"; # copy of 1st parameter, inp
  1515. my $_out="32(%rsp)"; # copy of 2nd parameter, out
  1516. my $_len="40(%rsp)"; # copy of 3rd parameter, length
  1517. my $_key="48(%rsp)"; # copy of 4th parameter, key
  1518. my $_ivp="56(%rsp)"; # copy of 5th parameter, ivp
  1519. my $ivec="64(%rsp)"; # ivec[16]
  1520. my $aes_key="80(%rsp)"; # copy of aes_key
  1521. my $mark="80+240(%rsp)"; # copy of aes_key->rounds
  1522. $code.=<<___;
  1523. .globl AES_cbc_encrypt
  1524. .type AES_cbc_encrypt,\@function,6
  1525. .align 16
  1526. .extern OPENSSL_ia32cap_P
  1527. .globl asm_AES_cbc_encrypt
  1528. .hidden asm_AES_cbc_encrypt
  1529. asm_AES_cbc_encrypt:
  1530. AES_cbc_encrypt:
  1531. .cfi_startproc
  1532. cmp \$0,%rdx # check length
  1533. je .Lcbc_epilogue
  1534. pushfq
  1535. .cfi_push 49 # %rflags
  1536. push %rbx
  1537. .cfi_push %rbx
  1538. push %rbp
  1539. .cfi_push %rbp
  1540. push %r12
  1541. .cfi_push %r12
  1542. push %r13
  1543. .cfi_push %r13
  1544. push %r14
  1545. .cfi_push %r14
  1546. push %r15
  1547. .cfi_push %r15
  1548. .Lcbc_prologue:
  1549. cld
  1550. mov %r9d,%r9d # clear upper half of enc
  1551. lea .LAES_Te(%rip),$sbox
  1552. lea .LAES_Td(%rip),%r10
  1553. cmp \$0,%r9
  1554. cmoveq %r10,$sbox
  1555. mov OPENSSL_ia32cap_P(%rip),%r10d
  1556. cmp \$$speed_limit,%rdx
  1557. jb .Lcbc_slow_prologue
  1558. test \$15,%rdx
  1559. jnz .Lcbc_slow_prologue
  1560. bt \$28,%r10d
  1561. jc .Lcbc_slow_prologue
  1562. # allocate aligned stack frame...
  1563. lea -88-248(%rsp),$key
  1564. and \$-64,$key
  1565. # ... and make sure it doesn't alias with AES_T[ed] modulo 4096
  1566. mov $sbox,%r10
  1567. lea 2304($sbox),%r11
  1568. mov $key,%r12
  1569. and \$0xFFF,%r10 # s = $sbox&0xfff
  1570. and \$0xFFF,%r11 # e = ($sbox+2048)&0xfff
  1571. and \$0xFFF,%r12 # p = %rsp&0xfff
  1572. cmp %r11,%r12 # if (p=>e) %rsp =- (p-e);
  1573. jb .Lcbc_te_break_out
  1574. sub %r11,%r12
  1575. sub %r12,$key
  1576. jmp .Lcbc_te_ok
  1577. .Lcbc_te_break_out: # else %rsp -= (p-s)&0xfff + framesz
  1578. sub %r10,%r12
  1579. and \$0xFFF,%r12
  1580. add \$320,%r12
  1581. sub %r12,$key
  1582. .align 4
  1583. .Lcbc_te_ok:
  1584. xchg %rsp,$key
  1585. .cfi_def_cfa_register $key
  1586. #add \$8,%rsp # reserve for return address!
  1587. mov $key,$_rsp # save %rsp
  1588. .cfi_cfa_expression $_rsp,deref,+64
  1589. .Lcbc_fast_body:
  1590. mov %rdi,$_inp # save copy of inp
  1591. mov %rsi,$_out # save copy of out
  1592. mov %rdx,$_len # save copy of len
  1593. mov %rcx,$_key # save copy of key
  1594. mov %r8,$_ivp # save copy of ivp
  1595. movl \$0,$mark # copy of aes_key->rounds = 0;
  1596. mov %r8,%rbp # rearrange input arguments
  1597. mov %r9,%rbx
  1598. mov %rsi,$out
  1599. mov %rdi,$inp
  1600. mov %rcx,$key
  1601. mov 240($key),%eax # key->rounds
  1602. # do we copy key schedule to stack?
  1603. mov $key,%r10
  1604. sub $sbox,%r10
  1605. and \$0xfff,%r10
  1606. cmp \$2304,%r10
  1607. jb .Lcbc_do_ecopy
  1608. cmp \$4096-248,%r10
  1609. jb .Lcbc_skip_ecopy
  1610. .align 4
  1611. .Lcbc_do_ecopy:
  1612. mov $key,%rsi
  1613. lea $aes_key,%rdi
  1614. lea $aes_key,$key
  1615. mov \$240/8,%ecx
  1616. .long 0x90A548F3 # rep movsq
  1617. mov %eax,(%rdi) # copy aes_key->rounds
  1618. .Lcbc_skip_ecopy:
  1619. mov $key,$keyp # save key pointer
  1620. mov \$18,%ecx
  1621. .align 4
  1622. .Lcbc_prefetch_te:
  1623. mov 0($sbox),%r10
  1624. mov 32($sbox),%r11
  1625. mov 64($sbox),%r12
  1626. mov 96($sbox),%r13
  1627. lea 128($sbox),$sbox
  1628. sub \$1,%ecx
  1629. jnz .Lcbc_prefetch_te
  1630. lea -2304($sbox),$sbox
  1631. cmp \$0,%rbx
  1632. je .LFAST_DECRYPT
  1633. #----------------------------- ENCRYPT -----------------------------#
  1634. mov 0(%rbp),$s0 # load iv
  1635. mov 4(%rbp),$s1
  1636. mov 8(%rbp),$s2
  1637. mov 12(%rbp),$s3
  1638. .align 4
  1639. .Lcbc_fast_enc_loop:
  1640. xor 0($inp),$s0
  1641. xor 4($inp),$s1
  1642. xor 8($inp),$s2
  1643. xor 12($inp),$s3
  1644. mov $keyp,$key # restore key
  1645. mov $inp,$_inp # if ($verticalspin) save inp
  1646. call _x86_64_AES_encrypt
  1647. mov $_inp,$inp # if ($verticalspin) restore inp
  1648. mov $_len,%r10
  1649. mov $s0,0($out)
  1650. mov $s1,4($out)
  1651. mov $s2,8($out)
  1652. mov $s3,12($out)
  1653. lea 16($inp),$inp
  1654. lea 16($out),$out
  1655. sub \$16,%r10
  1656. test \$-16,%r10
  1657. mov %r10,$_len
  1658. jnz .Lcbc_fast_enc_loop
  1659. mov $_ivp,%rbp # restore ivp
  1660. mov $s0,0(%rbp) # save ivec
  1661. mov $s1,4(%rbp)
  1662. mov $s2,8(%rbp)
  1663. mov $s3,12(%rbp)
  1664. jmp .Lcbc_fast_cleanup
  1665. #----------------------------- DECRYPT -----------------------------#
  1666. .align 16
  1667. .LFAST_DECRYPT:
  1668. cmp $inp,$out
  1669. je .Lcbc_fast_dec_in_place
  1670. mov %rbp,$ivec
  1671. .align 4
  1672. .Lcbc_fast_dec_loop:
  1673. mov 0($inp),$s0 # read input
  1674. mov 4($inp),$s1
  1675. mov 8($inp),$s2
  1676. mov 12($inp),$s3
  1677. mov $keyp,$key # restore key
  1678. mov $inp,$_inp # if ($verticalspin) save inp
  1679. call _x86_64_AES_decrypt
  1680. mov $ivec,%rbp # load ivp
  1681. mov $_inp,$inp # if ($verticalspin) restore inp
  1682. mov $_len,%r10 # load len
  1683. xor 0(%rbp),$s0 # xor iv
  1684. xor 4(%rbp),$s1
  1685. xor 8(%rbp),$s2
  1686. xor 12(%rbp),$s3
  1687. mov $inp,%rbp # current input, next iv
  1688. sub \$16,%r10
  1689. mov %r10,$_len # update len
  1690. mov %rbp,$ivec # update ivp
  1691. mov $s0,0($out) # write output
  1692. mov $s1,4($out)
  1693. mov $s2,8($out)
  1694. mov $s3,12($out)
  1695. lea 16($inp),$inp
  1696. lea 16($out),$out
  1697. jnz .Lcbc_fast_dec_loop
  1698. mov $_ivp,%r12 # load user ivp
  1699. mov 0(%rbp),%r10 # load iv
  1700. mov 8(%rbp),%r11
  1701. mov %r10,0(%r12) # copy back to user
  1702. mov %r11,8(%r12)
  1703. jmp .Lcbc_fast_cleanup
  1704. .align 16
  1705. .Lcbc_fast_dec_in_place:
  1706. mov 0(%rbp),%r10 # copy iv to stack
  1707. mov 8(%rbp),%r11
  1708. mov %r10,0+$ivec
  1709. mov %r11,8+$ivec
  1710. .align 4
  1711. .Lcbc_fast_dec_in_place_loop:
  1712. mov 0($inp),$s0 # load input
  1713. mov 4($inp),$s1
  1714. mov 8($inp),$s2
  1715. mov 12($inp),$s3
  1716. mov $keyp,$key # restore key
  1717. mov $inp,$_inp # if ($verticalspin) save inp
  1718. call _x86_64_AES_decrypt
  1719. mov $_inp,$inp # if ($verticalspin) restore inp
  1720. mov $_len,%r10
  1721. xor 0+$ivec,$s0
  1722. xor 4+$ivec,$s1
  1723. xor 8+$ivec,$s2
  1724. xor 12+$ivec,$s3
  1725. mov 0($inp),%r11 # load input
  1726. mov 8($inp),%r12
  1727. sub \$16,%r10
  1728. jz .Lcbc_fast_dec_in_place_done
  1729. mov %r11,0+$ivec # copy input to iv
  1730. mov %r12,8+$ivec
  1731. mov $s0,0($out) # save output [zaps input]
  1732. mov $s1,4($out)
  1733. mov $s2,8($out)
  1734. mov $s3,12($out)
  1735. lea 16($inp),$inp
  1736. lea 16($out),$out
  1737. mov %r10,$_len
  1738. jmp .Lcbc_fast_dec_in_place_loop
  1739. .Lcbc_fast_dec_in_place_done:
  1740. mov $_ivp,%rdi
  1741. mov %r11,0(%rdi) # copy iv back to user
  1742. mov %r12,8(%rdi)
  1743. mov $s0,0($out) # save output [zaps input]
  1744. mov $s1,4($out)
  1745. mov $s2,8($out)
  1746. mov $s3,12($out)
  1747. .align 4
  1748. .Lcbc_fast_cleanup:
  1749. cmpl \$0,$mark # was the key schedule copied?
  1750. lea $aes_key,%rdi
  1751. je .Lcbc_exit
  1752. mov \$240/8,%ecx
  1753. xor %rax,%rax
  1754. .long 0x90AB48F3 # rep stosq
  1755. jmp .Lcbc_exit
  1756. #--------------------------- SLOW ROUTINE ---------------------------#
  1757. .align 16
  1758. .Lcbc_slow_prologue:
  1759. # allocate aligned stack frame...
  1760. lea -88(%rsp),%rbp
  1761. and \$-64,%rbp
  1762. # ... just "above" key schedule
  1763. lea -88-63(%rcx),%r10
  1764. sub %rbp,%r10
  1765. neg %r10
  1766. and \$0x3c0,%r10
  1767. sub %r10,%rbp
  1768. xchg %rsp,%rbp
  1769. #add \$8,%rsp # reserve for return address!
  1770. mov %rbp,$_rsp # save %rsp
  1771. .Lcbc_slow_body:
  1772. #mov %rdi,$_inp # save copy of inp
  1773. #mov %rsi,$_out # save copy of out
  1774. #mov %rdx,$_len # save copy of len
  1775. #mov %rcx,$_key # save copy of key
  1776. mov %r8,$_ivp # save copy of ivp
  1777. mov %r8,%rbp # rearrange input arguments
  1778. mov %r9,%rbx
  1779. mov %rsi,$out
  1780. mov %rdi,$inp
  1781. mov %rcx,$key
  1782. mov %rdx,%r10
  1783. mov 240($key),%eax
  1784. mov $key,$keyp # save key pointer
  1785. shl \$4,%eax
  1786. lea ($key,%rax),%rax
  1787. mov %rax,$keyend
  1788. # pick Te4 copy which can't "overlap" with stack frame or key schedule
  1789. lea 2048($sbox),$sbox
  1790. lea 768-8(%rsp),%rax
  1791. sub $sbox,%rax
  1792. and \$0x300,%rax
  1793. lea ($sbox,%rax),$sbox
  1794. cmp \$0,%rbx
  1795. je .LSLOW_DECRYPT
  1796. #--------------------------- SLOW ENCRYPT ---------------------------#
  1797. test \$-16,%r10 # check upon length
  1798. mov 0(%rbp),$s0 # load iv
  1799. mov 4(%rbp),$s1
  1800. mov 8(%rbp),$s2
  1801. mov 12(%rbp),$s3
  1802. jz .Lcbc_slow_enc_tail # short input...
  1803. .align 4
  1804. .Lcbc_slow_enc_loop:
  1805. xor 0($inp),$s0
  1806. xor 4($inp),$s1
  1807. xor 8($inp),$s2
  1808. xor 12($inp),$s3
  1809. mov $keyp,$key # restore key
  1810. mov $inp,$_inp # save inp
  1811. mov $out,$_out # save out
  1812. mov %r10,$_len # save len
  1813. call _x86_64_AES_encrypt_compact
  1814. mov $_inp,$inp # restore inp
  1815. mov $_out,$out # restore out
  1816. mov $_len,%r10 # restore len
  1817. mov $s0,0($out)
  1818. mov $s1,4($out)
  1819. mov $s2,8($out)
  1820. mov $s3,12($out)
  1821. lea 16($inp),$inp
  1822. lea 16($out),$out
  1823. sub \$16,%r10
  1824. test \$-16,%r10
  1825. jnz .Lcbc_slow_enc_loop
  1826. test \$15,%r10
  1827. jnz .Lcbc_slow_enc_tail
  1828. mov $_ivp,%rbp # restore ivp
  1829. mov $s0,0(%rbp) # save ivec
  1830. mov $s1,4(%rbp)
  1831. mov $s2,8(%rbp)
  1832. mov $s3,12(%rbp)
  1833. jmp .Lcbc_exit
  1834. .align 4
  1835. .Lcbc_slow_enc_tail:
  1836. mov %rax,%r11
  1837. mov %rcx,%r12
  1838. mov %r10,%rcx
  1839. mov $inp,%rsi
  1840. mov $out,%rdi
  1841. .long 0x9066A4F3 # rep movsb
  1842. mov \$16,%rcx # zero tail
  1843. sub %r10,%rcx
  1844. xor %rax,%rax
  1845. .long 0x9066AAF3 # rep stosb
  1846. mov $out,$inp # this is not a mistake!
  1847. mov \$16,%r10 # len=16
  1848. mov %r11,%rax
  1849. mov %r12,%rcx
  1850. jmp .Lcbc_slow_enc_loop # one more spin...
  1851. #--------------------------- SLOW DECRYPT ---------------------------#
  1852. .align 16
  1853. .LSLOW_DECRYPT:
  1854. shr \$3,%rax
  1855. add %rax,$sbox # recall "magic" constants!
  1856. mov 0(%rbp),%r11 # copy iv to stack
  1857. mov 8(%rbp),%r12
  1858. mov %r11,0+$ivec
  1859. mov %r12,8+$ivec
  1860. .align 4
  1861. .Lcbc_slow_dec_loop:
  1862. mov 0($inp),$s0 # load input
  1863. mov 4($inp),$s1
  1864. mov 8($inp),$s2
  1865. mov 12($inp),$s3
  1866. mov $keyp,$key # restore key
  1867. mov $inp,$_inp # save inp
  1868. mov $out,$_out # save out
  1869. mov %r10,$_len # save len
  1870. call _x86_64_AES_decrypt_compact
  1871. mov $_inp,$inp # restore inp
  1872. mov $_out,$out # restore out
  1873. mov $_len,%r10
  1874. xor 0+$ivec,$s0
  1875. xor 4+$ivec,$s1
  1876. xor 8+$ivec,$s2
  1877. xor 12+$ivec,$s3
  1878. mov 0($inp),%r11 # load input
  1879. mov 8($inp),%r12
  1880. sub \$16,%r10
  1881. jc .Lcbc_slow_dec_partial
  1882. jz .Lcbc_slow_dec_done
  1883. mov %r11,0+$ivec # copy input to iv
  1884. mov %r12,8+$ivec
  1885. mov $s0,0($out) # save output [can zap input]
  1886. mov $s1,4($out)
  1887. mov $s2,8($out)
  1888. mov $s3,12($out)
  1889. lea 16($inp),$inp
  1890. lea 16($out),$out
  1891. jmp .Lcbc_slow_dec_loop
  1892. .Lcbc_slow_dec_done:
  1893. mov $_ivp,%rdi
  1894. mov %r11,0(%rdi) # copy iv back to user
  1895. mov %r12,8(%rdi)
  1896. mov $s0,0($out) # save output [can zap input]
  1897. mov $s1,4($out)
  1898. mov $s2,8($out)
  1899. mov $s3,12($out)
  1900. jmp .Lcbc_exit
  1901. .align 4
  1902. .Lcbc_slow_dec_partial:
  1903. mov $_ivp,%rdi
  1904. mov %r11,0(%rdi) # copy iv back to user
  1905. mov %r12,8(%rdi)
  1906. mov $s0,0+$ivec # save output to stack
  1907. mov $s1,4+$ivec
  1908. mov $s2,8+$ivec
  1909. mov $s3,12+$ivec
  1910. mov $out,%rdi
  1911. lea $ivec,%rsi
  1912. lea 16(%r10),%rcx
  1913. .long 0x9066A4F3 # rep movsb
  1914. jmp .Lcbc_exit
  1915. .align 16
  1916. .Lcbc_exit:
  1917. mov $_rsp,%rsi
  1918. .cfi_def_cfa %rsi,64
  1919. mov (%rsi),%r15
  1920. .cfi_restore %r15
  1921. mov 8(%rsi),%r14
  1922. .cfi_restore %r14
  1923. mov 16(%rsi),%r13
  1924. .cfi_restore %r13
  1925. mov 24(%rsi),%r12
  1926. .cfi_restore %r12
  1927. mov 32(%rsi),%rbp
  1928. .cfi_restore %rbp
  1929. mov 40(%rsi),%rbx
  1930. .cfi_restore %rbx
  1931. lea 48(%rsi),%rsp
  1932. .cfi_def_cfa %rsp,16
  1933. .Lcbc_popfq:
  1934. popfq
  1935. .cfi_pop 49 # %rflags
  1936. .Lcbc_epilogue:
  1937. ret
  1938. .cfi_endproc
  1939. .size AES_cbc_encrypt,.-AES_cbc_encrypt
  1940. ___
  1941. }
  1942. $code.=<<___;
  1943. .align 64
  1944. .LAES_Te:
  1945. ___
  1946. &_data_word(0xa56363c6, 0x847c7cf8, 0x997777ee, 0x8d7b7bf6);
  1947. &_data_word(0x0df2f2ff, 0xbd6b6bd6, 0xb16f6fde, 0x54c5c591);
  1948. &_data_word(0x50303060, 0x03010102, 0xa96767ce, 0x7d2b2b56);
  1949. &_data_word(0x19fefee7, 0x62d7d7b5, 0xe6abab4d, 0x9a7676ec);
  1950. &_data_word(0x45caca8f, 0x9d82821f, 0x40c9c989, 0x877d7dfa);
  1951. &_data_word(0x15fafaef, 0xeb5959b2, 0xc947478e, 0x0bf0f0fb);
  1952. &_data_word(0xecadad41, 0x67d4d4b3, 0xfda2a25f, 0xeaafaf45);
  1953. &_data_word(0xbf9c9c23, 0xf7a4a453, 0x967272e4, 0x5bc0c09b);
  1954. &_data_word(0xc2b7b775, 0x1cfdfde1, 0xae93933d, 0x6a26264c);
  1955. &_data_word(0x5a36366c, 0x413f3f7e, 0x02f7f7f5, 0x4fcccc83);
  1956. &_data_word(0x5c343468, 0xf4a5a551, 0x34e5e5d1, 0x08f1f1f9);
  1957. &_data_word(0x937171e2, 0x73d8d8ab, 0x53313162, 0x3f15152a);
  1958. &_data_word(0x0c040408, 0x52c7c795, 0x65232346, 0x5ec3c39d);
  1959. &_data_word(0x28181830, 0xa1969637, 0x0f05050a, 0xb59a9a2f);
  1960. &_data_word(0x0907070e, 0x36121224, 0x9b80801b, 0x3de2e2df);
  1961. &_data_word(0x26ebebcd, 0x6927274e, 0xcdb2b27f, 0x9f7575ea);
  1962. &_data_word(0x1b090912, 0x9e83831d, 0x742c2c58, 0x2e1a1a34);
  1963. &_data_word(0x2d1b1b36, 0xb26e6edc, 0xee5a5ab4, 0xfba0a05b);
  1964. &_data_word(0xf65252a4, 0x4d3b3b76, 0x61d6d6b7, 0xceb3b37d);
  1965. &_data_word(0x7b292952, 0x3ee3e3dd, 0x712f2f5e, 0x97848413);
  1966. &_data_word(0xf55353a6, 0x68d1d1b9, 0x00000000, 0x2cededc1);
  1967. &_data_word(0x60202040, 0x1ffcfce3, 0xc8b1b179, 0xed5b5bb6);
  1968. &_data_word(0xbe6a6ad4, 0x46cbcb8d, 0xd9bebe67, 0x4b393972);
  1969. &_data_word(0xde4a4a94, 0xd44c4c98, 0xe85858b0, 0x4acfcf85);
  1970. &_data_word(0x6bd0d0bb, 0x2aefefc5, 0xe5aaaa4f, 0x16fbfbed);
  1971. &_data_word(0xc5434386, 0xd74d4d9a, 0x55333366, 0x94858511);
  1972. &_data_word(0xcf45458a, 0x10f9f9e9, 0x06020204, 0x817f7ffe);
  1973. &_data_word(0xf05050a0, 0x443c3c78, 0xba9f9f25, 0xe3a8a84b);
  1974. &_data_word(0xf35151a2, 0xfea3a35d, 0xc0404080, 0x8a8f8f05);
  1975. &_data_word(0xad92923f, 0xbc9d9d21, 0x48383870, 0x04f5f5f1);
  1976. &_data_word(0xdfbcbc63, 0xc1b6b677, 0x75dadaaf, 0x63212142);
  1977. &_data_word(0x30101020, 0x1affffe5, 0x0ef3f3fd, 0x6dd2d2bf);
  1978. &_data_word(0x4ccdcd81, 0x140c0c18, 0x35131326, 0x2fececc3);
  1979. &_data_word(0xe15f5fbe, 0xa2979735, 0xcc444488, 0x3917172e);
  1980. &_data_word(0x57c4c493, 0xf2a7a755, 0x827e7efc, 0x473d3d7a);
  1981. &_data_word(0xac6464c8, 0xe75d5dba, 0x2b191932, 0x957373e6);
  1982. &_data_word(0xa06060c0, 0x98818119, 0xd14f4f9e, 0x7fdcdca3);
  1983. &_data_word(0x66222244, 0x7e2a2a54, 0xab90903b, 0x8388880b);
  1984. &_data_word(0xca46468c, 0x29eeeec7, 0xd3b8b86b, 0x3c141428);
  1985. &_data_word(0x79dedea7, 0xe25e5ebc, 0x1d0b0b16, 0x76dbdbad);
  1986. &_data_word(0x3be0e0db, 0x56323264, 0x4e3a3a74, 0x1e0a0a14);
  1987. &_data_word(0xdb494992, 0x0a06060c, 0x6c242448, 0xe45c5cb8);
  1988. &_data_word(0x5dc2c29f, 0x6ed3d3bd, 0xefacac43, 0xa66262c4);
  1989. &_data_word(0xa8919139, 0xa4959531, 0x37e4e4d3, 0x8b7979f2);
  1990. &_data_word(0x32e7e7d5, 0x43c8c88b, 0x5937376e, 0xb76d6dda);
  1991. &_data_word(0x8c8d8d01, 0x64d5d5b1, 0xd24e4e9c, 0xe0a9a949);
  1992. &_data_word(0xb46c6cd8, 0xfa5656ac, 0x07f4f4f3, 0x25eaeacf);
  1993. &_data_word(0xaf6565ca, 0x8e7a7af4, 0xe9aeae47, 0x18080810);
  1994. &_data_word(0xd5baba6f, 0x887878f0, 0x6f25254a, 0x722e2e5c);
  1995. &_data_word(0x241c1c38, 0xf1a6a657, 0xc7b4b473, 0x51c6c697);
  1996. &_data_word(0x23e8e8cb, 0x7cdddda1, 0x9c7474e8, 0x211f1f3e);
  1997. &_data_word(0xdd4b4b96, 0xdcbdbd61, 0x868b8b0d, 0x858a8a0f);
  1998. &_data_word(0x907070e0, 0x423e3e7c, 0xc4b5b571, 0xaa6666cc);
  1999. &_data_word(0xd8484890, 0x05030306, 0x01f6f6f7, 0x120e0e1c);
  2000. &_data_word(0xa36161c2, 0x5f35356a, 0xf95757ae, 0xd0b9b969);
  2001. &_data_word(0x91868617, 0x58c1c199, 0x271d1d3a, 0xb99e9e27);
  2002. &_data_word(0x38e1e1d9, 0x13f8f8eb, 0xb398982b, 0x33111122);
  2003. &_data_word(0xbb6969d2, 0x70d9d9a9, 0x898e8e07, 0xa7949433);
  2004. &_data_word(0xb69b9b2d, 0x221e1e3c, 0x92878715, 0x20e9e9c9);
  2005. &_data_word(0x49cece87, 0xff5555aa, 0x78282850, 0x7adfdfa5);
  2006. &_data_word(0x8f8c8c03, 0xf8a1a159, 0x80898909, 0x170d0d1a);
  2007. &_data_word(0xdabfbf65, 0x31e6e6d7, 0xc6424284, 0xb86868d0);
  2008. &_data_word(0xc3414182, 0xb0999929, 0x772d2d5a, 0x110f0f1e);
  2009. &_data_word(0xcbb0b07b, 0xfc5454a8, 0xd6bbbb6d, 0x3a16162c);
  2010. #Te4 # four copies of Te4 to choose from to avoid L1 aliasing
  2011. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  2012. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  2013. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  2014. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  2015. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  2016. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  2017. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  2018. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  2019. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  2020. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  2021. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  2022. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  2023. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  2024. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  2025. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  2026. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  2027. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  2028. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  2029. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  2030. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  2031. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  2032. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2033. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2034. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2035. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2036. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2037. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2038. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2039. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2040. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2041. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2042. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2043. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  2044. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  2045. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  2046. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  2047. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  2048. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  2049. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  2050. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  2051. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  2052. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  2053. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  2054. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  2055. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  2056. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  2057. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  2058. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  2059. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  2060. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  2061. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  2062. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  2063. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  2064. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2065. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2066. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2067. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2068. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2069. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2070. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2071. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2072. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2073. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2074. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2075. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  2076. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  2077. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  2078. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  2079. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  2080. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  2081. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  2082. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  2083. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  2084. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  2085. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  2086. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  2087. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  2088. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  2089. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  2090. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  2091. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  2092. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  2093. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  2094. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  2095. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  2096. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2097. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2098. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2099. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2100. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2101. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2102. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2103. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2104. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2105. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2106. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2107. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  2108. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  2109. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  2110. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  2111. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  2112. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  2113. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  2114. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  2115. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  2116. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  2117. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  2118. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  2119. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  2120. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  2121. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  2122. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  2123. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  2124. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  2125. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  2126. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  2127. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  2128. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2129. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2130. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2131. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2132. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2133. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2134. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2135. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2136. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2137. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2138. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2139. #rcon:
  2140. $code.=<<___;
  2141. .long 0x00000001, 0x00000002, 0x00000004, 0x00000008
  2142. .long 0x00000010, 0x00000020, 0x00000040, 0x00000080
  2143. .long 0x0000001b, 0x00000036, 0x80808080, 0x80808080
  2144. .long 0xfefefefe, 0xfefefefe, 0x1b1b1b1b, 0x1b1b1b1b
  2145. ___
  2146. $code.=<<___;
  2147. .align 64
  2148. .LAES_Td:
  2149. ___
  2150. &_data_word(0x50a7f451, 0x5365417e, 0xc3a4171a, 0x965e273a);
  2151. &_data_word(0xcb6bab3b, 0xf1459d1f, 0xab58faac, 0x9303e34b);
  2152. &_data_word(0x55fa3020, 0xf66d76ad, 0x9176cc88, 0x254c02f5);
  2153. &_data_word(0xfcd7e54f, 0xd7cb2ac5, 0x80443526, 0x8fa362b5);
  2154. &_data_word(0x495ab1de, 0x671bba25, 0x980eea45, 0xe1c0fe5d);
  2155. &_data_word(0x02752fc3, 0x12f04c81, 0xa397468d, 0xc6f9d36b);
  2156. &_data_word(0xe75f8f03, 0x959c9215, 0xeb7a6dbf, 0xda595295);
  2157. &_data_word(0x2d83bed4, 0xd3217458, 0x2969e049, 0x44c8c98e);
  2158. &_data_word(0x6a89c275, 0x78798ef4, 0x6b3e5899, 0xdd71b927);
  2159. &_data_word(0xb64fe1be, 0x17ad88f0, 0x66ac20c9, 0xb43ace7d);
  2160. &_data_word(0x184adf63, 0x82311ae5, 0x60335197, 0x457f5362);
  2161. &_data_word(0xe07764b1, 0x84ae6bbb, 0x1ca081fe, 0x942b08f9);
  2162. &_data_word(0x58684870, 0x19fd458f, 0x876cde94, 0xb7f87b52);
  2163. &_data_word(0x23d373ab, 0xe2024b72, 0x578f1fe3, 0x2aab5566);
  2164. &_data_word(0x0728ebb2, 0x03c2b52f, 0x9a7bc586, 0xa50837d3);
  2165. &_data_word(0xf2872830, 0xb2a5bf23, 0xba6a0302, 0x5c8216ed);
  2166. &_data_word(0x2b1ccf8a, 0x92b479a7, 0xf0f207f3, 0xa1e2694e);
  2167. &_data_word(0xcdf4da65, 0xd5be0506, 0x1f6234d1, 0x8afea6c4);
  2168. &_data_word(0x9d532e34, 0xa055f3a2, 0x32e18a05, 0x75ebf6a4);
  2169. &_data_word(0x39ec830b, 0xaaef6040, 0x069f715e, 0x51106ebd);
  2170. &_data_word(0xf98a213e, 0x3d06dd96, 0xae053edd, 0x46bde64d);
  2171. &_data_word(0xb58d5491, 0x055dc471, 0x6fd40604, 0xff155060);
  2172. &_data_word(0x24fb9819, 0x97e9bdd6, 0xcc434089, 0x779ed967);
  2173. &_data_word(0xbd42e8b0, 0x888b8907, 0x385b19e7, 0xdbeec879);
  2174. &_data_word(0x470a7ca1, 0xe90f427c, 0xc91e84f8, 0x00000000);
  2175. &_data_word(0x83868009, 0x48ed2b32, 0xac70111e, 0x4e725a6c);
  2176. &_data_word(0xfbff0efd, 0x5638850f, 0x1ed5ae3d, 0x27392d36);
  2177. &_data_word(0x64d90f0a, 0x21a65c68, 0xd1545b9b, 0x3a2e3624);
  2178. &_data_word(0xb1670a0c, 0x0fe75793, 0xd296eeb4, 0x9e919b1b);
  2179. &_data_word(0x4fc5c080, 0xa220dc61, 0x694b775a, 0x161a121c);
  2180. &_data_word(0x0aba93e2, 0xe52aa0c0, 0x43e0223c, 0x1d171b12);
  2181. &_data_word(0x0b0d090e, 0xadc78bf2, 0xb9a8b62d, 0xc8a91e14);
  2182. &_data_word(0x8519f157, 0x4c0775af, 0xbbdd99ee, 0xfd607fa3);
  2183. &_data_word(0x9f2601f7, 0xbcf5725c, 0xc53b6644, 0x347efb5b);
  2184. &_data_word(0x7629438b, 0xdcc623cb, 0x68fcedb6, 0x63f1e4b8);
  2185. &_data_word(0xcadc31d7, 0x10856342, 0x40229713, 0x2011c684);
  2186. &_data_word(0x7d244a85, 0xf83dbbd2, 0x1132f9ae, 0x6da129c7);
  2187. &_data_word(0x4b2f9e1d, 0xf330b2dc, 0xec52860d, 0xd0e3c177);
  2188. &_data_word(0x6c16b32b, 0x99b970a9, 0xfa489411, 0x2264e947);
  2189. &_data_word(0xc48cfca8, 0x1a3ff0a0, 0xd82c7d56, 0xef903322);
  2190. &_data_word(0xc74e4987, 0xc1d138d9, 0xfea2ca8c, 0x360bd498);
  2191. &_data_word(0xcf81f5a6, 0x28de7aa5, 0x268eb7da, 0xa4bfad3f);
  2192. &_data_word(0xe49d3a2c, 0x0d927850, 0x9bcc5f6a, 0x62467e54);
  2193. &_data_word(0xc2138df6, 0xe8b8d890, 0x5ef7392e, 0xf5afc382);
  2194. &_data_word(0xbe805d9f, 0x7c93d069, 0xa92dd56f, 0xb31225cf);
  2195. &_data_word(0x3b99acc8, 0xa77d1810, 0x6e639ce8, 0x7bbb3bdb);
  2196. &_data_word(0x097826cd, 0xf418596e, 0x01b79aec, 0xa89a4f83);
  2197. &_data_word(0x656e95e6, 0x7ee6ffaa, 0x08cfbc21, 0xe6e815ef);
  2198. &_data_word(0xd99be7ba, 0xce366f4a, 0xd4099fea, 0xd67cb029);
  2199. &_data_word(0xafb2a431, 0x31233f2a, 0x3094a5c6, 0xc066a235);
  2200. &_data_word(0x37bc4e74, 0xa6ca82fc, 0xb0d090e0, 0x15d8a733);
  2201. &_data_word(0x4a9804f1, 0xf7daec41, 0x0e50cd7f, 0x2ff69117);
  2202. &_data_word(0x8dd64d76, 0x4db0ef43, 0x544daacc, 0xdf0496e4);
  2203. &_data_word(0xe3b5d19e, 0x1b886a4c, 0xb81f2cc1, 0x7f516546);
  2204. &_data_word(0x04ea5e9d, 0x5d358c01, 0x737487fa, 0x2e410bfb);
  2205. &_data_word(0x5a1d67b3, 0x52d2db92, 0x335610e9, 0x1347d66d);
  2206. &_data_word(0x8c61d79a, 0x7a0ca137, 0x8e14f859, 0x893c13eb);
  2207. &_data_word(0xee27a9ce, 0x35c961b7, 0xede51ce1, 0x3cb1477a);
  2208. &_data_word(0x59dfd29c, 0x3f73f255, 0x79ce1418, 0xbf37c773);
  2209. &_data_word(0xeacdf753, 0x5baafd5f, 0x146f3ddf, 0x86db4478);
  2210. &_data_word(0x81f3afca, 0x3ec468b9, 0x2c342438, 0x5f40a3c2);
  2211. &_data_word(0x72c31d16, 0x0c25e2bc, 0x8b493c28, 0x41950dff);
  2212. &_data_word(0x7101a839, 0xdeb30c08, 0x9ce4b4d8, 0x90c15664);
  2213. &_data_word(0x6184cb7b, 0x70b632d5, 0x745c6c48, 0x4257b8d0);
  2214. #Td4: # four copies of Td4 to choose from to avoid L1 aliasing
  2215. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2216. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2217. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2218. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2219. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2220. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2221. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2222. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2223. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2224. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2225. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2226. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2227. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2228. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2229. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2230. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2231. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2232. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2233. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2234. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2235. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2236. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2237. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2238. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2239. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2240. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2241. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2242. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2243. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2244. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2245. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2246. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2247. $code.=<<___;
  2248. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2249. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2250. ___
  2251. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2252. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2253. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2254. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2255. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2256. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2257. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2258. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2259. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2260. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2261. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2262. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2263. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2264. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2265. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2266. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2267. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2268. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2269. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2270. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2271. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2272. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2273. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2274. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2275. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2276. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2277. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2278. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2279. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2280. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2281. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2282. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2283. $code.=<<___;
  2284. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2285. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2286. ___
  2287. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2288. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2289. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2290. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2291. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2292. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2293. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2294. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2295. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2296. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2297. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2298. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2299. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2300. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2301. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2302. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2303. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2304. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2305. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2306. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2307. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2308. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2309. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2310. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2311. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2312. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2313. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2314. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2315. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2316. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2317. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2318. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2319. $code.=<<___;
  2320. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2321. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2322. ___
  2323. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2324. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2325. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2326. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2327. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2328. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2329. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2330. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2331. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2332. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2333. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2334. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2335. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2336. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2337. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2338. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2339. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2340. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2341. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2342. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2343. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2344. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2345. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2346. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2347. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2348. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2349. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2350. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2351. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2352. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2353. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2354. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2355. $code.=<<___;
  2356. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2357. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2358. .asciz "AES for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
  2359. .align 64
  2360. ___
  2361. # EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame,
  2362. # CONTEXT *context,DISPATCHER_CONTEXT *disp)
  2363. if ($win64) {
  2364. $rec="%rcx";
  2365. $frame="%rdx";
  2366. $context="%r8";
  2367. $disp="%r9";
  2368. $code.=<<___;
  2369. .extern __imp_RtlVirtualUnwind
  2370. .type block_se_handler,\@abi-omnipotent
  2371. .align 16
  2372. block_se_handler:
  2373. push %rsi
  2374. push %rdi
  2375. push %rbx
  2376. push %rbp
  2377. push %r12
  2378. push %r13
  2379. push %r14
  2380. push %r15
  2381. pushfq
  2382. sub \$64,%rsp
  2383. mov 120($context),%rax # pull context->Rax
  2384. mov 248($context),%rbx # pull context->Rip
  2385. mov 8($disp),%rsi # disp->ImageBase
  2386. mov 56($disp),%r11 # disp->HandlerData
  2387. mov 0(%r11),%r10d # HandlerData[0]
  2388. lea (%rsi,%r10),%r10 # prologue label
  2389. cmp %r10,%rbx # context->Rip<prologue label
  2390. jb .Lin_block_prologue
  2391. mov 152($context),%rax # pull context->Rsp
  2392. mov 4(%r11),%r10d # HandlerData[1]
  2393. lea (%rsi,%r10),%r10 # epilogue label
  2394. cmp %r10,%rbx # context->Rip>=epilogue label
  2395. jae .Lin_block_prologue
  2396. mov 24(%rax),%rax # pull saved real stack pointer
  2397. mov -8(%rax),%rbx
  2398. mov -16(%rax),%rbp
  2399. mov -24(%rax),%r12
  2400. mov -32(%rax),%r13
  2401. mov -40(%rax),%r14
  2402. mov -48(%rax),%r15
  2403. mov %rbx,144($context) # restore context->Rbx
  2404. mov %rbp,160($context) # restore context->Rbp
  2405. mov %r12,216($context) # restore context->R12
  2406. mov %r13,224($context) # restore context->R13
  2407. mov %r14,232($context) # restore context->R14
  2408. mov %r15,240($context) # restore context->R15
  2409. .Lin_block_prologue:
  2410. mov 8(%rax),%rdi
  2411. mov 16(%rax),%rsi
  2412. mov %rax,152($context) # restore context->Rsp
  2413. mov %rsi,168($context) # restore context->Rsi
  2414. mov %rdi,176($context) # restore context->Rdi
  2415. jmp .Lcommon_seh_exit
  2416. .size block_se_handler,.-block_se_handler
  2417. .type key_se_handler,\@abi-omnipotent
  2418. .align 16
  2419. key_se_handler:
  2420. push %rsi
  2421. push %rdi
  2422. push %rbx
  2423. push %rbp
  2424. push %r12
  2425. push %r13
  2426. push %r14
  2427. push %r15
  2428. pushfq
  2429. sub \$64,%rsp
  2430. mov 120($context),%rax # pull context->Rax
  2431. mov 248($context),%rbx # pull context->Rip
  2432. mov 8($disp),%rsi # disp->ImageBase
  2433. mov 56($disp),%r11 # disp->HandlerData
  2434. mov 0(%r11),%r10d # HandlerData[0]
  2435. lea (%rsi,%r10),%r10 # prologue label
  2436. cmp %r10,%rbx # context->Rip<prologue label
  2437. jb .Lin_key_prologue
  2438. mov 152($context),%rax # pull context->Rsp
  2439. mov 4(%r11),%r10d # HandlerData[1]
  2440. lea (%rsi,%r10),%r10 # epilogue label
  2441. cmp %r10,%rbx # context->Rip>=epilogue label
  2442. jae .Lin_key_prologue
  2443. lea 56(%rax),%rax
  2444. mov -8(%rax),%rbx
  2445. mov -16(%rax),%rbp
  2446. mov -24(%rax),%r12
  2447. mov -32(%rax),%r13
  2448. mov -40(%rax),%r14
  2449. mov -48(%rax),%r15
  2450. mov %rbx,144($context) # restore context->Rbx
  2451. mov %rbp,160($context) # restore context->Rbp
  2452. mov %r12,216($context) # restore context->R12
  2453. mov %r13,224($context) # restore context->R13
  2454. mov %r14,232($context) # restore context->R14
  2455. mov %r15,240($context) # restore context->R15
  2456. .Lin_key_prologue:
  2457. mov 8(%rax),%rdi
  2458. mov 16(%rax),%rsi
  2459. mov %rax,152($context) # restore context->Rsp
  2460. mov %rsi,168($context) # restore context->Rsi
  2461. mov %rdi,176($context) # restore context->Rdi
  2462. jmp .Lcommon_seh_exit
  2463. .size key_se_handler,.-key_se_handler
  2464. .type cbc_se_handler,\@abi-omnipotent
  2465. .align 16
  2466. cbc_se_handler:
  2467. push %rsi
  2468. push %rdi
  2469. push %rbx
  2470. push %rbp
  2471. push %r12
  2472. push %r13
  2473. push %r14
  2474. push %r15
  2475. pushfq
  2476. sub \$64,%rsp
  2477. mov 120($context),%rax # pull context->Rax
  2478. mov 248($context),%rbx # pull context->Rip
  2479. lea .Lcbc_prologue(%rip),%r10
  2480. cmp %r10,%rbx # context->Rip<.Lcbc_prologue
  2481. jb .Lin_cbc_prologue
  2482. lea .Lcbc_fast_body(%rip),%r10
  2483. cmp %r10,%rbx # context->Rip<.Lcbc_fast_body
  2484. jb .Lin_cbc_frame_setup
  2485. lea .Lcbc_slow_prologue(%rip),%r10
  2486. cmp %r10,%rbx # context->Rip<.Lcbc_slow_prologue
  2487. jb .Lin_cbc_body
  2488. lea .Lcbc_slow_body(%rip),%r10
  2489. cmp %r10,%rbx # context->Rip<.Lcbc_slow_body
  2490. jb .Lin_cbc_frame_setup
  2491. .Lin_cbc_body:
  2492. mov 152($context),%rax # pull context->Rsp
  2493. lea .Lcbc_epilogue(%rip),%r10
  2494. cmp %r10,%rbx # context->Rip>=.Lcbc_epilogue
  2495. jae .Lin_cbc_prologue
  2496. lea 8(%rax),%rax
  2497. lea .Lcbc_popfq(%rip),%r10
  2498. cmp %r10,%rbx # context->Rip>=.Lcbc_popfq
  2499. jae .Lin_cbc_prologue
  2500. mov `16-8`(%rax),%rax # biased $_rsp
  2501. lea 56(%rax),%rax
  2502. .Lin_cbc_frame_setup:
  2503. mov -16(%rax),%rbx
  2504. mov -24(%rax),%rbp
  2505. mov -32(%rax),%r12
  2506. mov -40(%rax),%r13
  2507. mov -48(%rax),%r14
  2508. mov -56(%rax),%r15
  2509. mov %rbx,144($context) # restore context->Rbx
  2510. mov %rbp,160($context) # restore context->Rbp
  2511. mov %r12,216($context) # restore context->R12
  2512. mov %r13,224($context) # restore context->R13
  2513. mov %r14,232($context) # restore context->R14
  2514. mov %r15,240($context) # restore context->R15
  2515. .Lin_cbc_prologue:
  2516. mov 8(%rax),%rdi
  2517. mov 16(%rax),%rsi
  2518. mov %rax,152($context) # restore context->Rsp
  2519. mov %rsi,168($context) # restore context->Rsi
  2520. mov %rdi,176($context) # restore context->Rdi
  2521. .Lcommon_seh_exit:
  2522. mov 40($disp),%rdi # disp->ContextRecord
  2523. mov $context,%rsi # context
  2524. mov \$`1232/8`,%ecx # sizeof(CONTEXT)
  2525. .long 0xa548f3fc # cld; rep movsq
  2526. mov $disp,%rsi
  2527. xor %rcx,%rcx # arg1, UNW_FLAG_NHANDLER
  2528. mov 8(%rsi),%rdx # arg2, disp->ImageBase
  2529. mov 0(%rsi),%r8 # arg3, disp->ControlPc
  2530. mov 16(%rsi),%r9 # arg4, disp->FunctionEntry
  2531. mov 40(%rsi),%r10 # disp->ContextRecord
  2532. lea 56(%rsi),%r11 # &disp->HandlerData
  2533. lea 24(%rsi),%r12 # &disp->EstablisherFrame
  2534. mov %r10,32(%rsp) # arg5
  2535. mov %r11,40(%rsp) # arg6
  2536. mov %r12,48(%rsp) # arg7
  2537. mov %rcx,56(%rsp) # arg8, (NULL)
  2538. call *__imp_RtlVirtualUnwind(%rip)
  2539. mov \$1,%eax # ExceptionContinueSearch
  2540. add \$64,%rsp
  2541. popfq
  2542. pop %r15
  2543. pop %r14
  2544. pop %r13
  2545. pop %r12
  2546. pop %rbp
  2547. pop %rbx
  2548. pop %rdi
  2549. pop %rsi
  2550. ret
  2551. .size cbc_se_handler,.-cbc_se_handler
  2552. .section .pdata
  2553. .align 4
  2554. .rva .LSEH_begin_AES_encrypt
  2555. .rva .LSEH_end_AES_encrypt
  2556. .rva .LSEH_info_AES_encrypt
  2557. .rva .LSEH_begin_AES_decrypt
  2558. .rva .LSEH_end_AES_decrypt
  2559. .rva .LSEH_info_AES_decrypt
  2560. .rva .LSEH_begin_AES_set_encrypt_key
  2561. .rva .LSEH_end_AES_set_encrypt_key
  2562. .rva .LSEH_info_AES_set_encrypt_key
  2563. .rva .LSEH_begin_AES_set_decrypt_key
  2564. .rva .LSEH_end_AES_set_decrypt_key
  2565. .rva .LSEH_info_AES_set_decrypt_key
  2566. .rva .LSEH_begin_AES_cbc_encrypt
  2567. .rva .LSEH_end_AES_cbc_encrypt
  2568. .rva .LSEH_info_AES_cbc_encrypt
  2569. .section .xdata
  2570. .align 8
  2571. .LSEH_info_AES_encrypt:
  2572. .byte 9,0,0,0
  2573. .rva block_se_handler
  2574. .rva .Lenc_prologue,.Lenc_epilogue # HandlerData[]
  2575. .LSEH_info_AES_decrypt:
  2576. .byte 9,0,0,0
  2577. .rva block_se_handler
  2578. .rva .Ldec_prologue,.Ldec_epilogue # HandlerData[]
  2579. .LSEH_info_AES_set_encrypt_key:
  2580. .byte 9,0,0,0
  2581. .rva key_se_handler
  2582. .rva .Lenc_key_prologue,.Lenc_key_epilogue # HandlerData[]
  2583. .LSEH_info_AES_set_decrypt_key:
  2584. .byte 9,0,0,0
  2585. .rva key_se_handler
  2586. .rva .Ldec_key_prologue,.Ldec_key_epilogue # HandlerData[]
  2587. .LSEH_info_AES_cbc_encrypt:
  2588. .byte 9,0,0,0
  2589. .rva cbc_se_handler
  2590. ___
  2591. }
  2592. $code =~ s/\`([^\`]*)\`/eval($1)/gem;
  2593. print $code;
  2594. close STDOUT;