aes-x86_64.pl 76 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927
  1. #! /usr/bin/env perl
  2. # Copyright 2005-2020 The OpenSSL Project Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License 2.0 (the "License"). You may not use
  5. # this file except in compliance with the License. You can obtain a copy
  6. # in the file LICENSE in the source distribution or at
  7. # https://www.openssl.org/source/license.html
  8. #
  9. # ====================================================================
  10. # Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
  11. # project. The module is, however, dual licensed under OpenSSL and
  12. # CRYPTOGAMS licenses depending on where you obtain it. For further
  13. # details see http://www.openssl.org/~appro/cryptogams/.
  14. # ====================================================================
  15. #
  16. # Version 2.1.
  17. #
  18. # aes-*-cbc benchmarks are improved by >70% [compared to gcc 3.3.2 on
  19. # Opteron 240 CPU] plus all the bells-n-whistles from 32-bit version
  20. # [you'll notice a lot of resemblance], such as compressed S-boxes
  21. # in little-endian byte order, prefetch of these tables in CBC mode,
  22. # as well as avoiding L1 cache aliasing between stack frame and key
  23. # schedule and already mentioned tables, compressed Td4...
  24. #
  25. # Performance in number of cycles per processed byte for 128-bit key:
  26. #
  27. # ECB encrypt ECB decrypt CBC large chunk
  28. # AMD64 33 43 13.0
  29. # EM64T 38 56 18.6(*)
  30. # Core 2 30 42 14.5(*)
  31. # Atom 65 86 32.1(*)
  32. #
  33. # (*) with hyper-threading off
  34. # $output is the last argument if it looks like a file (it has an extension)
  35. # $flavour is the first argument if it doesn't look like a file
  36. $output = $#ARGV >= 0 && $ARGV[$#ARGV] =~ m|\.\w+$| ? pop : undef;
  37. $flavour = $#ARGV >= 0 && $ARGV[0] !~ m|\.| ? shift : undef;
  38. $win64=0; $win64=1 if ($flavour =~ /[nm]asm|mingw64/ || $output =~ /\.asm$/);
  39. $0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
  40. ( $xlate="${dir}x86_64-xlate.pl" and -f $xlate ) or
  41. ( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or
  42. die "can't locate x86_64-xlate.pl";
  43. open OUT,"| \"$^X\" \"$xlate\" $flavour \"$output\""
  44. or die "can't call $xlate: $!";
  45. *STDOUT=*OUT;
  46. $verticalspin=1; # unlike 32-bit version $verticalspin performs
  47. # ~15% better on both AMD and Intel cores
  48. $speed_limit=512; # see aes-586.pl for details
  49. $code=".text\n";
  50. $s0="%eax";
  51. $s1="%ebx";
  52. $s2="%ecx";
  53. $s3="%edx";
  54. $acc0="%esi"; $mask80="%rsi";
  55. $acc1="%edi"; $maskfe="%rdi";
  56. $acc2="%ebp"; $mask1b="%rbp";
  57. $inp="%r8";
  58. $out="%r9";
  59. $t0="%r10d";
  60. $t1="%r11d";
  61. $t2="%r12d";
  62. $rnds="%r13d";
  63. $sbox="%r14";
  64. $key="%r15";
  65. sub hi() { my $r=shift; $r =~ s/%[er]([a-d])x/%\1h/; $r; }
  66. sub lo() { my $r=shift; $r =~ s/%[er]([a-d])x/%\1l/;
  67. $r =~ s/%[er]([sd]i)/%\1l/;
  68. $r =~ s/%(r[0-9]+)[d]?/%\1b/; $r; }
  69. sub LO() { my $r=shift; $r =~ s/%r([a-z]+)/%e\1/;
  70. $r =~ s/%r([0-9]+)/%r\1d/; $r; }
  71. sub _data_word()
  72. { my $i;
  73. while(defined($i=shift)) { $code.=sprintf".long\t0x%08x,0x%08x\n",$i,$i; }
  74. }
  75. sub data_word()
  76. { my $i;
  77. my $last=pop(@_);
  78. $code.=".long\t";
  79. while(defined($i=shift)) { $code.=sprintf"0x%08x,",$i; }
  80. $code.=sprintf"0x%08x\n",$last;
  81. }
  82. sub data_byte()
  83. { my $i;
  84. my $last=pop(@_);
  85. $code.=".byte\t";
  86. while(defined($i=shift)) { $code.=sprintf"0x%02x,",$i&0xff; }
  87. $code.=sprintf"0x%02x\n",$last&0xff;
  88. }
  89. sub encvert()
  90. { my $t3="%r8d"; # zaps $inp!
  91. $code.=<<___;
  92. # favor 3-way issue Opteron pipeline...
  93. movzb `&lo("$s0")`,$acc0
  94. movzb `&lo("$s1")`,$acc1
  95. movzb `&lo("$s2")`,$acc2
  96. mov 0($sbox,$acc0,8),$t0
  97. mov 0($sbox,$acc1,8),$t1
  98. mov 0($sbox,$acc2,8),$t2
  99. movzb `&hi("$s1")`,$acc0
  100. movzb `&hi("$s2")`,$acc1
  101. movzb `&lo("$s3")`,$acc2
  102. xor 3($sbox,$acc0,8),$t0
  103. xor 3($sbox,$acc1,8),$t1
  104. mov 0($sbox,$acc2,8),$t3
  105. movzb `&hi("$s3")`,$acc0
  106. shr \$16,$s2
  107. movzb `&hi("$s0")`,$acc2
  108. xor 3($sbox,$acc0,8),$t2
  109. shr \$16,$s3
  110. xor 3($sbox,$acc2,8),$t3
  111. shr \$16,$s1
  112. lea 16($key),$key
  113. shr \$16,$s0
  114. movzb `&lo("$s2")`,$acc0
  115. movzb `&lo("$s3")`,$acc1
  116. movzb `&lo("$s0")`,$acc2
  117. xor 2($sbox,$acc0,8),$t0
  118. xor 2($sbox,$acc1,8),$t1
  119. xor 2($sbox,$acc2,8),$t2
  120. movzb `&hi("$s3")`,$acc0
  121. movzb `&hi("$s0")`,$acc1
  122. movzb `&lo("$s1")`,$acc2
  123. xor 1($sbox,$acc0,8),$t0
  124. xor 1($sbox,$acc1,8),$t1
  125. xor 2($sbox,$acc2,8),$t3
  126. mov 12($key),$s3
  127. movzb `&hi("$s1")`,$acc1
  128. movzb `&hi("$s2")`,$acc2
  129. mov 0($key),$s0
  130. xor 1($sbox,$acc1,8),$t2
  131. xor 1($sbox,$acc2,8),$t3
  132. mov 4($key),$s1
  133. mov 8($key),$s2
  134. xor $t0,$s0
  135. xor $t1,$s1
  136. xor $t2,$s2
  137. xor $t3,$s3
  138. ___
  139. }
  140. sub enclastvert()
  141. { my $t3="%r8d"; # zaps $inp!
  142. $code.=<<___;
  143. movzb `&lo("$s0")`,$acc0
  144. movzb `&lo("$s1")`,$acc1
  145. movzb `&lo("$s2")`,$acc2
  146. movzb 2($sbox,$acc0,8),$t0
  147. movzb 2($sbox,$acc1,8),$t1
  148. movzb 2($sbox,$acc2,8),$t2
  149. movzb `&lo("$s3")`,$acc0
  150. movzb `&hi("$s1")`,$acc1
  151. movzb `&hi("$s2")`,$acc2
  152. movzb 2($sbox,$acc0,8),$t3
  153. mov 0($sbox,$acc1,8),$acc1 #$t0
  154. mov 0($sbox,$acc2,8),$acc2 #$t1
  155. and \$0x0000ff00,$acc1
  156. and \$0x0000ff00,$acc2
  157. xor $acc1,$t0
  158. xor $acc2,$t1
  159. shr \$16,$s2
  160. movzb `&hi("$s3")`,$acc0
  161. movzb `&hi("$s0")`,$acc1
  162. shr \$16,$s3
  163. mov 0($sbox,$acc0,8),$acc0 #$t2
  164. mov 0($sbox,$acc1,8),$acc1 #$t3
  165. and \$0x0000ff00,$acc0
  166. and \$0x0000ff00,$acc1
  167. shr \$16,$s1
  168. xor $acc0,$t2
  169. xor $acc1,$t3
  170. shr \$16,$s0
  171. movzb `&lo("$s2")`,$acc0
  172. movzb `&lo("$s3")`,$acc1
  173. movzb `&lo("$s0")`,$acc2
  174. mov 0($sbox,$acc0,8),$acc0 #$t0
  175. mov 0($sbox,$acc1,8),$acc1 #$t1
  176. mov 0($sbox,$acc2,8),$acc2 #$t2
  177. and \$0x00ff0000,$acc0
  178. and \$0x00ff0000,$acc1
  179. and \$0x00ff0000,$acc2
  180. xor $acc0,$t0
  181. xor $acc1,$t1
  182. xor $acc2,$t2
  183. movzb `&lo("$s1")`,$acc0
  184. movzb `&hi("$s3")`,$acc1
  185. movzb `&hi("$s0")`,$acc2
  186. mov 0($sbox,$acc0,8),$acc0 #$t3
  187. mov 2($sbox,$acc1,8),$acc1 #$t0
  188. mov 2($sbox,$acc2,8),$acc2 #$t1
  189. and \$0x00ff0000,$acc0
  190. and \$0xff000000,$acc1
  191. and \$0xff000000,$acc2
  192. xor $acc0,$t3
  193. xor $acc1,$t0
  194. xor $acc2,$t1
  195. movzb `&hi("$s1")`,$acc0
  196. movzb `&hi("$s2")`,$acc1
  197. mov 16+12($key),$s3
  198. mov 2($sbox,$acc0,8),$acc0 #$t2
  199. mov 2($sbox,$acc1,8),$acc1 #$t3
  200. mov 16+0($key),$s0
  201. and \$0xff000000,$acc0
  202. and \$0xff000000,$acc1
  203. xor $acc0,$t2
  204. xor $acc1,$t3
  205. mov 16+4($key),$s1
  206. mov 16+8($key),$s2
  207. xor $t0,$s0
  208. xor $t1,$s1
  209. xor $t2,$s2
  210. xor $t3,$s3
  211. ___
  212. }
  213. sub encstep()
  214. { my ($i,@s) = @_;
  215. my $tmp0=$acc0;
  216. my $tmp1=$acc1;
  217. my $tmp2=$acc2;
  218. my $out=($t0,$t1,$t2,$s[0])[$i];
  219. if ($i==3) {
  220. $tmp0=$s[1];
  221. $tmp1=$s[2];
  222. $tmp2=$s[3];
  223. }
  224. $code.=" movzb ".&lo($s[0]).",$out\n";
  225. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  226. $code.=" lea 16($key),$key\n" if ($i==0);
  227. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  228. $code.=" mov 0($sbox,$out,8),$out\n";
  229. $code.=" shr \$16,$tmp1\n";
  230. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  231. $code.=" xor 3($sbox,$tmp0,8),$out\n";
  232. $code.=" movzb ".&lo($tmp1).",$tmp1\n";
  233. $code.=" shr \$24,$tmp2\n";
  234. $code.=" xor 4*$i($key),$out\n";
  235. $code.=" xor 2($sbox,$tmp1,8),$out\n";
  236. $code.=" xor 1($sbox,$tmp2,8),$out\n";
  237. $code.=" mov $t0,$s[1]\n" if ($i==3);
  238. $code.=" mov $t1,$s[2]\n" if ($i==3);
  239. $code.=" mov $t2,$s[3]\n" if ($i==3);
  240. $code.="\n";
  241. }
  242. sub enclast()
  243. { my ($i,@s)=@_;
  244. my $tmp0=$acc0;
  245. my $tmp1=$acc1;
  246. my $tmp2=$acc2;
  247. my $out=($t0,$t1,$t2,$s[0])[$i];
  248. if ($i==3) {
  249. $tmp0=$s[1];
  250. $tmp1=$s[2];
  251. $tmp2=$s[3];
  252. }
  253. $code.=" movzb ".&lo($s[0]).",$out\n";
  254. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  255. $code.=" mov 2($sbox,$out,8),$out\n";
  256. $code.=" shr \$16,$tmp1\n";
  257. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  258. $code.=" and \$0x000000ff,$out\n";
  259. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  260. $code.=" movzb ".&lo($tmp1).",$tmp1\n";
  261. $code.=" shr \$24,$tmp2\n";
  262. $code.=" mov 0($sbox,$tmp0,8),$tmp0\n";
  263. $code.=" mov 0($sbox,$tmp1,8),$tmp1\n";
  264. $code.=" mov 2($sbox,$tmp2,8),$tmp2\n";
  265. $code.=" and \$0x0000ff00,$tmp0\n";
  266. $code.=" and \$0x00ff0000,$tmp1\n";
  267. $code.=" and \$0xff000000,$tmp2\n";
  268. $code.=" xor $tmp0,$out\n";
  269. $code.=" mov $t0,$s[1]\n" if ($i==3);
  270. $code.=" xor $tmp1,$out\n";
  271. $code.=" mov $t1,$s[2]\n" if ($i==3);
  272. $code.=" xor $tmp2,$out\n";
  273. $code.=" mov $t2,$s[3]\n" if ($i==3);
  274. $code.="\n";
  275. }
  276. $code.=<<___;
  277. .type _x86_64_AES_encrypt,\@abi-omnipotent
  278. .align 16
  279. _x86_64_AES_encrypt:
  280. .cfi_startproc
  281. xor 0($key),$s0 # xor with key
  282. xor 4($key),$s1
  283. xor 8($key),$s2
  284. xor 12($key),$s3
  285. mov 240($key),$rnds # load key->rounds
  286. sub \$1,$rnds
  287. jmp .Lenc_loop
  288. .align 16
  289. .Lenc_loop:
  290. ___
  291. if ($verticalspin) { &encvert(); }
  292. else { &encstep(0,$s0,$s1,$s2,$s3);
  293. &encstep(1,$s1,$s2,$s3,$s0);
  294. &encstep(2,$s2,$s3,$s0,$s1);
  295. &encstep(3,$s3,$s0,$s1,$s2);
  296. }
  297. $code.=<<___;
  298. sub \$1,$rnds
  299. jnz .Lenc_loop
  300. ___
  301. if ($verticalspin) { &enclastvert(); }
  302. else { &enclast(0,$s0,$s1,$s2,$s3);
  303. &enclast(1,$s1,$s2,$s3,$s0);
  304. &enclast(2,$s2,$s3,$s0,$s1);
  305. &enclast(3,$s3,$s0,$s1,$s2);
  306. $code.=<<___;
  307. xor 16+0($key),$s0 # xor with key
  308. xor 16+4($key),$s1
  309. xor 16+8($key),$s2
  310. xor 16+12($key),$s3
  311. ___
  312. }
  313. $code.=<<___;
  314. .byte 0xf3,0xc3 # rep ret
  315. .cfi_endproc
  316. .size _x86_64_AES_encrypt,.-_x86_64_AES_encrypt
  317. ___
  318. # it's possible to implement this by shifting tN by 8, filling least
  319. # significant byte with byte load and finally bswap-ing at the end,
  320. # but such partial register load kills Core 2...
  321. sub enccompactvert()
  322. { my ($t3,$t4,$t5)=("%r8d","%r9d","%r13d");
  323. $code.=<<___;
  324. movzb `&lo("$s0")`,$t0
  325. movzb `&lo("$s1")`,$t1
  326. movzb `&lo("$s2")`,$t2
  327. movzb `&lo("$s3")`,$t3
  328. movzb `&hi("$s1")`,$acc0
  329. movzb `&hi("$s2")`,$acc1
  330. shr \$16,$s2
  331. movzb `&hi("$s3")`,$acc2
  332. movzb ($sbox,$t0,1),$t0
  333. movzb ($sbox,$t1,1),$t1
  334. movzb ($sbox,$t2,1),$t2
  335. movzb ($sbox,$t3,1),$t3
  336. movzb ($sbox,$acc0,1),$t4 #$t0
  337. movzb `&hi("$s0")`,$acc0
  338. movzb ($sbox,$acc1,1),$t5 #$t1
  339. movzb `&lo("$s2")`,$acc1
  340. movzb ($sbox,$acc2,1),$acc2 #$t2
  341. movzb ($sbox,$acc0,1),$acc0 #$t3
  342. shl \$8,$t4
  343. shr \$16,$s3
  344. shl \$8,$t5
  345. xor $t4,$t0
  346. shr \$16,$s0
  347. movzb `&lo("$s3")`,$t4
  348. shr \$16,$s1
  349. xor $t5,$t1
  350. shl \$8,$acc2
  351. movzb `&lo("$s0")`,$t5
  352. movzb ($sbox,$acc1,1),$acc1 #$t0
  353. xor $acc2,$t2
  354. shl \$8,$acc0
  355. movzb `&lo("$s1")`,$acc2
  356. shl \$16,$acc1
  357. xor $acc0,$t3
  358. movzb ($sbox,$t4,1),$t4 #$t1
  359. movzb `&hi("$s3")`,$acc0
  360. movzb ($sbox,$t5,1),$t5 #$t2
  361. xor $acc1,$t0
  362. shr \$8,$s2
  363. movzb `&hi("$s0")`,$acc1
  364. shl \$16,$t4
  365. shr \$8,$s1
  366. shl \$16,$t5
  367. xor $t4,$t1
  368. movzb ($sbox,$acc2,1),$acc2 #$t3
  369. movzb ($sbox,$acc0,1),$acc0 #$t0
  370. movzb ($sbox,$acc1,1),$acc1 #$t1
  371. movzb ($sbox,$s2,1),$s3 #$t3
  372. movzb ($sbox,$s1,1),$s2 #$t2
  373. shl \$16,$acc2
  374. xor $t5,$t2
  375. shl \$24,$acc0
  376. xor $acc2,$t3
  377. shl \$24,$acc1
  378. xor $acc0,$t0
  379. shl \$24,$s3
  380. xor $acc1,$t1
  381. shl \$24,$s2
  382. mov $t0,$s0
  383. mov $t1,$s1
  384. xor $t2,$s2
  385. xor $t3,$s3
  386. ___
  387. }
  388. sub enctransform_ref()
  389. { my $sn = shift;
  390. my ($acc,$r2,$tmp)=("%r8d","%r9d","%r13d");
  391. $code.=<<___;
  392. mov $sn,$acc
  393. and \$0x80808080,$acc
  394. mov $acc,$tmp
  395. shr \$7,$tmp
  396. lea ($sn,$sn),$r2
  397. sub $tmp,$acc
  398. and \$0xfefefefe,$r2
  399. and \$0x1b1b1b1b,$acc
  400. mov $sn,$tmp
  401. xor $acc,$r2
  402. xor $r2,$sn
  403. rol \$24,$sn
  404. xor $r2,$sn
  405. ror \$16,$tmp
  406. xor $tmp,$sn
  407. ror \$8,$tmp
  408. xor $tmp,$sn
  409. ___
  410. }
  411. # unlike decrypt case it does not pay off to parallelize enctransform
  412. sub enctransform()
  413. { my ($t3,$r20,$r21)=($acc2,"%r8d","%r9d");
  414. $code.=<<___;
  415. mov \$0x80808080,$t0
  416. mov \$0x80808080,$t1
  417. and $s0,$t0
  418. and $s1,$t1
  419. mov $t0,$acc0
  420. mov $t1,$acc1
  421. shr \$7,$t0
  422. lea ($s0,$s0),$r20
  423. shr \$7,$t1
  424. lea ($s1,$s1),$r21
  425. sub $t0,$acc0
  426. sub $t1,$acc1
  427. and \$0xfefefefe,$r20
  428. and \$0xfefefefe,$r21
  429. and \$0x1b1b1b1b,$acc0
  430. and \$0x1b1b1b1b,$acc1
  431. mov $s0,$t0
  432. mov $s1,$t1
  433. xor $acc0,$r20
  434. xor $acc1,$r21
  435. xor $r20,$s0
  436. xor $r21,$s1
  437. mov \$0x80808080,$t2
  438. rol \$24,$s0
  439. mov \$0x80808080,$t3
  440. rol \$24,$s1
  441. and $s2,$t2
  442. and $s3,$t3
  443. xor $r20,$s0
  444. xor $r21,$s1
  445. mov $t2,$acc0
  446. ror \$16,$t0
  447. mov $t3,$acc1
  448. ror \$16,$t1
  449. lea ($s2,$s2),$r20
  450. shr \$7,$t2
  451. xor $t0,$s0
  452. shr \$7,$t3
  453. xor $t1,$s1
  454. ror \$8,$t0
  455. lea ($s3,$s3),$r21
  456. ror \$8,$t1
  457. sub $t2,$acc0
  458. sub $t3,$acc1
  459. xor $t0,$s0
  460. xor $t1,$s1
  461. and \$0xfefefefe,$r20
  462. and \$0xfefefefe,$r21
  463. and \$0x1b1b1b1b,$acc0
  464. and \$0x1b1b1b1b,$acc1
  465. mov $s2,$t2
  466. mov $s3,$t3
  467. xor $acc0,$r20
  468. xor $acc1,$r21
  469. ror \$16,$t2
  470. xor $r20,$s2
  471. ror \$16,$t3
  472. xor $r21,$s3
  473. rol \$24,$s2
  474. mov 0($sbox),$acc0 # prefetch Te4
  475. rol \$24,$s3
  476. xor $r20,$s2
  477. mov 64($sbox),$acc1
  478. xor $r21,$s3
  479. mov 128($sbox),$r20
  480. xor $t2,$s2
  481. ror \$8,$t2
  482. xor $t3,$s3
  483. ror \$8,$t3
  484. xor $t2,$s2
  485. mov 192($sbox),$r21
  486. xor $t3,$s3
  487. ___
  488. }
  489. $code.=<<___;
  490. .type _x86_64_AES_encrypt_compact,\@abi-omnipotent
  491. .align 16
  492. _x86_64_AES_encrypt_compact:
  493. .cfi_startproc
  494. lea 128($sbox),$inp # size optimization
  495. mov 0-128($inp),$acc1 # prefetch Te4
  496. mov 32-128($inp),$acc2
  497. mov 64-128($inp),$t0
  498. mov 96-128($inp),$t1
  499. mov 128-128($inp),$acc1
  500. mov 160-128($inp),$acc2
  501. mov 192-128($inp),$t0
  502. mov 224-128($inp),$t1
  503. jmp .Lenc_loop_compact
  504. .align 16
  505. .Lenc_loop_compact:
  506. xor 0($key),$s0 # xor with key
  507. xor 4($key),$s1
  508. xor 8($key),$s2
  509. xor 12($key),$s3
  510. lea 16($key),$key
  511. ___
  512. &enccompactvert();
  513. $code.=<<___;
  514. cmp 16(%rsp),$key
  515. je .Lenc_compact_done
  516. ___
  517. &enctransform();
  518. $code.=<<___;
  519. jmp .Lenc_loop_compact
  520. .align 16
  521. .Lenc_compact_done:
  522. xor 0($key),$s0
  523. xor 4($key),$s1
  524. xor 8($key),$s2
  525. xor 12($key),$s3
  526. .byte 0xf3,0xc3 # rep ret
  527. .cfi_endproc
  528. .size _x86_64_AES_encrypt_compact,.-_x86_64_AES_encrypt_compact
  529. ___
  530. # void AES_encrypt (const void *inp,void *out,const AES_KEY *key);
  531. $code.=<<___;
  532. .globl AES_encrypt
  533. .type AES_encrypt,\@function,3
  534. .align 16
  535. .globl asm_AES_encrypt
  536. .hidden asm_AES_encrypt
  537. asm_AES_encrypt:
  538. AES_encrypt:
  539. .cfi_startproc
  540. endbranch
  541. mov %rsp,%rax
  542. .cfi_def_cfa_register %rax
  543. push %rbx
  544. .cfi_push %rbx
  545. push %rbp
  546. .cfi_push %rbp
  547. push %r12
  548. .cfi_push %r12
  549. push %r13
  550. .cfi_push %r13
  551. push %r14
  552. .cfi_push %r14
  553. push %r15
  554. .cfi_push %r15
  555. # allocate frame "above" key schedule
  556. lea -63(%rdx),%rcx # %rdx is key argument
  557. and \$-64,%rsp
  558. sub %rsp,%rcx
  559. neg %rcx
  560. and \$0x3c0,%rcx
  561. sub %rcx,%rsp
  562. sub \$32,%rsp
  563. mov %rsi,16(%rsp) # save out
  564. mov %rax,24(%rsp) # save original stack pointer
  565. .cfi_cfa_expression %rsp+24,deref,+8
  566. .Lenc_prologue:
  567. mov %rdx,$key
  568. mov 240($key),$rnds # load rounds
  569. mov 0(%rdi),$s0 # load input vector
  570. mov 4(%rdi),$s1
  571. mov 8(%rdi),$s2
  572. mov 12(%rdi),$s3
  573. shl \$4,$rnds
  574. lea ($key,$rnds),%rbp
  575. mov $key,(%rsp) # key schedule
  576. mov %rbp,8(%rsp) # end of key schedule
  577. # pick Te4 copy which can't "overlap" with stack frame or key schedule
  578. lea .LAES_Te+2048(%rip),$sbox
  579. lea 768(%rsp),%rbp
  580. sub $sbox,%rbp
  581. and \$0x300,%rbp
  582. lea ($sbox,%rbp),$sbox
  583. call _x86_64_AES_encrypt_compact
  584. mov 16(%rsp),$out # restore out
  585. mov 24(%rsp),%rsi # restore saved stack pointer
  586. .cfi_def_cfa %rsi,8
  587. mov $s0,0($out) # write output vector
  588. mov $s1,4($out)
  589. mov $s2,8($out)
  590. mov $s3,12($out)
  591. mov -48(%rsi),%r15
  592. .cfi_restore %r15
  593. mov -40(%rsi),%r14
  594. .cfi_restore %r14
  595. mov -32(%rsi),%r13
  596. .cfi_restore %r13
  597. mov -24(%rsi),%r12
  598. .cfi_restore %r12
  599. mov -16(%rsi),%rbp
  600. .cfi_restore %rbp
  601. mov -8(%rsi),%rbx
  602. .cfi_restore %rbx
  603. lea (%rsi),%rsp
  604. .cfi_def_cfa_register %rsp
  605. .Lenc_epilogue:
  606. ret
  607. .cfi_endproc
  608. .size AES_encrypt,.-AES_encrypt
  609. ___
  610. #------------------------------------------------------------------#
  611. sub decvert()
  612. { my $t3="%r8d"; # zaps $inp!
  613. $code.=<<___;
  614. # favor 3-way issue Opteron pipeline...
  615. movzb `&lo("$s0")`,$acc0
  616. movzb `&lo("$s1")`,$acc1
  617. movzb `&lo("$s2")`,$acc2
  618. mov 0($sbox,$acc0,8),$t0
  619. mov 0($sbox,$acc1,8),$t1
  620. mov 0($sbox,$acc2,8),$t2
  621. movzb `&hi("$s3")`,$acc0
  622. movzb `&hi("$s0")`,$acc1
  623. movzb `&lo("$s3")`,$acc2
  624. xor 3($sbox,$acc0,8),$t0
  625. xor 3($sbox,$acc1,8),$t1
  626. mov 0($sbox,$acc2,8),$t3
  627. movzb `&hi("$s1")`,$acc0
  628. shr \$16,$s0
  629. movzb `&hi("$s2")`,$acc2
  630. xor 3($sbox,$acc0,8),$t2
  631. shr \$16,$s3
  632. xor 3($sbox,$acc2,8),$t3
  633. shr \$16,$s1
  634. lea 16($key),$key
  635. shr \$16,$s2
  636. movzb `&lo("$s2")`,$acc0
  637. movzb `&lo("$s3")`,$acc1
  638. movzb `&lo("$s0")`,$acc2
  639. xor 2($sbox,$acc0,8),$t0
  640. xor 2($sbox,$acc1,8),$t1
  641. xor 2($sbox,$acc2,8),$t2
  642. movzb `&hi("$s1")`,$acc0
  643. movzb `&hi("$s2")`,$acc1
  644. movzb `&lo("$s1")`,$acc2
  645. xor 1($sbox,$acc0,8),$t0
  646. xor 1($sbox,$acc1,8),$t1
  647. xor 2($sbox,$acc2,8),$t3
  648. movzb `&hi("$s3")`,$acc0
  649. mov 12($key),$s3
  650. movzb `&hi("$s0")`,$acc2
  651. xor 1($sbox,$acc0,8),$t2
  652. mov 0($key),$s0
  653. xor 1($sbox,$acc2,8),$t3
  654. xor $t0,$s0
  655. mov 4($key),$s1
  656. mov 8($key),$s2
  657. xor $t2,$s2
  658. xor $t1,$s1
  659. xor $t3,$s3
  660. ___
  661. }
  662. sub declastvert()
  663. { my $t3="%r8d"; # zaps $inp!
  664. $code.=<<___;
  665. lea 2048($sbox),$sbox # size optimization
  666. movzb `&lo("$s0")`,$acc0
  667. movzb `&lo("$s1")`,$acc1
  668. movzb `&lo("$s2")`,$acc2
  669. movzb ($sbox,$acc0,1),$t0
  670. movzb ($sbox,$acc1,1),$t1
  671. movzb ($sbox,$acc2,1),$t2
  672. movzb `&lo("$s3")`,$acc0
  673. movzb `&hi("$s3")`,$acc1
  674. movzb `&hi("$s0")`,$acc2
  675. movzb ($sbox,$acc0,1),$t3
  676. movzb ($sbox,$acc1,1),$acc1 #$t0
  677. movzb ($sbox,$acc2,1),$acc2 #$t1
  678. shl \$8,$acc1
  679. shl \$8,$acc2
  680. xor $acc1,$t0
  681. xor $acc2,$t1
  682. shr \$16,$s3
  683. movzb `&hi("$s1")`,$acc0
  684. movzb `&hi("$s2")`,$acc1
  685. shr \$16,$s0
  686. movzb ($sbox,$acc0,1),$acc0 #$t2
  687. movzb ($sbox,$acc1,1),$acc1 #$t3
  688. shl \$8,$acc0
  689. shl \$8,$acc1
  690. shr \$16,$s1
  691. xor $acc0,$t2
  692. xor $acc1,$t3
  693. shr \$16,$s2
  694. movzb `&lo("$s2")`,$acc0
  695. movzb `&lo("$s3")`,$acc1
  696. movzb `&lo("$s0")`,$acc2
  697. movzb ($sbox,$acc0,1),$acc0 #$t0
  698. movzb ($sbox,$acc1,1),$acc1 #$t1
  699. movzb ($sbox,$acc2,1),$acc2 #$t2
  700. shl \$16,$acc0
  701. shl \$16,$acc1
  702. shl \$16,$acc2
  703. xor $acc0,$t0
  704. xor $acc1,$t1
  705. xor $acc2,$t2
  706. movzb `&lo("$s1")`,$acc0
  707. movzb `&hi("$s1")`,$acc1
  708. movzb `&hi("$s2")`,$acc2
  709. movzb ($sbox,$acc0,1),$acc0 #$t3
  710. movzb ($sbox,$acc1,1),$acc1 #$t0
  711. movzb ($sbox,$acc2,1),$acc2 #$t1
  712. shl \$16,$acc0
  713. shl \$24,$acc1
  714. shl \$24,$acc2
  715. xor $acc0,$t3
  716. xor $acc1,$t0
  717. xor $acc2,$t1
  718. movzb `&hi("$s3")`,$acc0
  719. movzb `&hi("$s0")`,$acc1
  720. mov 16+12($key),$s3
  721. movzb ($sbox,$acc0,1),$acc0 #$t2
  722. movzb ($sbox,$acc1,1),$acc1 #$t3
  723. mov 16+0($key),$s0
  724. shl \$24,$acc0
  725. shl \$24,$acc1
  726. xor $acc0,$t2
  727. xor $acc1,$t3
  728. mov 16+4($key),$s1
  729. mov 16+8($key),$s2
  730. lea -2048($sbox),$sbox
  731. xor $t0,$s0
  732. xor $t1,$s1
  733. xor $t2,$s2
  734. xor $t3,$s3
  735. ___
  736. }
  737. sub decstep()
  738. { my ($i,@s) = @_;
  739. my $tmp0=$acc0;
  740. my $tmp1=$acc1;
  741. my $tmp2=$acc2;
  742. my $out=($t0,$t1,$t2,$s[0])[$i];
  743. $code.=" mov $s[0],$out\n" if ($i!=3);
  744. $tmp1=$s[2] if ($i==3);
  745. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  746. $code.=" and \$0xFF,$out\n";
  747. $code.=" mov 0($sbox,$out,8),$out\n";
  748. $code.=" shr \$16,$tmp1\n";
  749. $tmp2=$s[3] if ($i==3);
  750. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  751. $tmp0=$s[1] if ($i==3);
  752. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  753. $code.=" and \$0xFF,$tmp1\n";
  754. $code.=" shr \$24,$tmp2\n";
  755. $code.=" xor 3($sbox,$tmp0,8),$out\n";
  756. $code.=" xor 2($sbox,$tmp1,8),$out\n";
  757. $code.=" xor 1($sbox,$tmp2,8),$out\n";
  758. $code.=" mov $t2,$s[1]\n" if ($i==3);
  759. $code.=" mov $t1,$s[2]\n" if ($i==3);
  760. $code.=" mov $t0,$s[3]\n" if ($i==3);
  761. $code.="\n";
  762. }
  763. sub declast()
  764. { my ($i,@s)=@_;
  765. my $tmp0=$acc0;
  766. my $tmp1=$acc1;
  767. my $tmp2=$acc2;
  768. my $out=($t0,$t1,$t2,$s[0])[$i];
  769. $code.=" mov $s[0],$out\n" if ($i!=3);
  770. $tmp1=$s[2] if ($i==3);
  771. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  772. $code.=" and \$0xFF,$out\n";
  773. $code.=" movzb 2048($sbox,$out,1),$out\n";
  774. $code.=" shr \$16,$tmp1\n";
  775. $tmp2=$s[3] if ($i==3);
  776. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  777. $tmp0=$s[1] if ($i==3);
  778. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  779. $code.=" and \$0xFF,$tmp1\n";
  780. $code.=" shr \$24,$tmp2\n";
  781. $code.=" movzb 2048($sbox,$tmp0,1),$tmp0\n";
  782. $code.=" movzb 2048($sbox,$tmp1,1),$tmp1\n";
  783. $code.=" movzb 2048($sbox,$tmp2,1),$tmp2\n";
  784. $code.=" shl \$8,$tmp0\n";
  785. $code.=" shl \$16,$tmp1\n";
  786. $code.=" shl \$24,$tmp2\n";
  787. $code.=" xor $tmp0,$out\n";
  788. $code.=" mov $t2,$s[1]\n" if ($i==3);
  789. $code.=" xor $tmp1,$out\n";
  790. $code.=" mov $t1,$s[2]\n" if ($i==3);
  791. $code.=" xor $tmp2,$out\n";
  792. $code.=" mov $t0,$s[3]\n" if ($i==3);
  793. $code.="\n";
  794. }
  795. $code.=<<___;
  796. .type _x86_64_AES_decrypt,\@abi-omnipotent
  797. .align 16
  798. _x86_64_AES_decrypt:
  799. .cfi_startproc
  800. xor 0($key),$s0 # xor with key
  801. xor 4($key),$s1
  802. xor 8($key),$s2
  803. xor 12($key),$s3
  804. mov 240($key),$rnds # load key->rounds
  805. sub \$1,$rnds
  806. jmp .Ldec_loop
  807. .align 16
  808. .Ldec_loop:
  809. ___
  810. if ($verticalspin) { &decvert(); }
  811. else { &decstep(0,$s0,$s3,$s2,$s1);
  812. &decstep(1,$s1,$s0,$s3,$s2);
  813. &decstep(2,$s2,$s1,$s0,$s3);
  814. &decstep(3,$s3,$s2,$s1,$s0);
  815. $code.=<<___;
  816. lea 16($key),$key
  817. xor 0($key),$s0 # xor with key
  818. xor 4($key),$s1
  819. xor 8($key),$s2
  820. xor 12($key),$s3
  821. ___
  822. }
  823. $code.=<<___;
  824. sub \$1,$rnds
  825. jnz .Ldec_loop
  826. ___
  827. if ($verticalspin) { &declastvert(); }
  828. else { &declast(0,$s0,$s3,$s2,$s1);
  829. &declast(1,$s1,$s0,$s3,$s2);
  830. &declast(2,$s2,$s1,$s0,$s3);
  831. &declast(3,$s3,$s2,$s1,$s0);
  832. $code.=<<___;
  833. xor 16+0($key),$s0 # xor with key
  834. xor 16+4($key),$s1
  835. xor 16+8($key),$s2
  836. xor 16+12($key),$s3
  837. ___
  838. }
  839. $code.=<<___;
  840. .byte 0xf3,0xc3 # rep ret
  841. .cfi_endproc
  842. .size _x86_64_AES_decrypt,.-_x86_64_AES_decrypt
  843. ___
  844. sub deccompactvert()
  845. { my ($t3,$t4,$t5)=("%r8d","%r9d","%r13d");
  846. $code.=<<___;
  847. movzb `&lo("$s0")`,$t0
  848. movzb `&lo("$s1")`,$t1
  849. movzb `&lo("$s2")`,$t2
  850. movzb `&lo("$s3")`,$t3
  851. movzb `&hi("$s3")`,$acc0
  852. movzb `&hi("$s0")`,$acc1
  853. shr \$16,$s3
  854. movzb `&hi("$s1")`,$acc2
  855. movzb ($sbox,$t0,1),$t0
  856. movzb ($sbox,$t1,1),$t1
  857. movzb ($sbox,$t2,1),$t2
  858. movzb ($sbox,$t3,1),$t3
  859. movzb ($sbox,$acc0,1),$t4 #$t0
  860. movzb `&hi("$s2")`,$acc0
  861. movzb ($sbox,$acc1,1),$t5 #$t1
  862. movzb ($sbox,$acc2,1),$acc2 #$t2
  863. movzb ($sbox,$acc0,1),$acc0 #$t3
  864. shr \$16,$s2
  865. shl \$8,$t5
  866. shl \$8,$t4
  867. movzb `&lo("$s2")`,$acc1
  868. shr \$16,$s0
  869. xor $t4,$t0
  870. shr \$16,$s1
  871. movzb `&lo("$s3")`,$t4
  872. shl \$8,$acc2
  873. xor $t5,$t1
  874. shl \$8,$acc0
  875. movzb `&lo("$s0")`,$t5
  876. movzb ($sbox,$acc1,1),$acc1 #$t0
  877. xor $acc2,$t2
  878. movzb `&lo("$s1")`,$acc2
  879. shl \$16,$acc1
  880. xor $acc0,$t3
  881. movzb ($sbox,$t4,1),$t4 #$t1
  882. movzb `&hi("$s1")`,$acc0
  883. movzb ($sbox,$acc2,1),$acc2 #$t3
  884. xor $acc1,$t0
  885. movzb ($sbox,$t5,1),$t5 #$t2
  886. movzb `&hi("$s2")`,$acc1
  887. shl \$16,$acc2
  888. shl \$16,$t4
  889. shl \$16,$t5
  890. xor $acc2,$t3
  891. movzb `&hi("$s3")`,$acc2
  892. xor $t4,$t1
  893. shr \$8,$s0
  894. xor $t5,$t2
  895. movzb ($sbox,$acc0,1),$acc0 #$t0
  896. movzb ($sbox,$acc1,1),$s1 #$t1
  897. movzb ($sbox,$acc2,1),$s2 #$t2
  898. movzb ($sbox,$s0,1),$s3 #$t3
  899. mov $t0,$s0
  900. shl \$24,$acc0
  901. shl \$24,$s1
  902. shl \$24,$s2
  903. xor $acc0,$s0
  904. shl \$24,$s3
  905. xor $t1,$s1
  906. xor $t2,$s2
  907. xor $t3,$s3
  908. ___
  909. }
  910. # parallelized version! input is pair of 64-bit values: %rax=s1.s0
  911. # and %rcx=s3.s2, output is four 32-bit values in %eax=s0, %ebx=s1,
  912. # %ecx=s2 and %edx=s3.
  913. sub dectransform()
  914. { my ($tp10,$tp20,$tp40,$tp80,$acc0)=("%rax","%r8", "%r9", "%r10","%rbx");
  915. my ($tp18,$tp28,$tp48,$tp88,$acc8)=("%rcx","%r11","%r12","%r13","%rdx");
  916. my $prefetch = shift;
  917. $code.=<<___;
  918. mov $mask80,$tp40
  919. mov $mask80,$tp48
  920. and $tp10,$tp40
  921. and $tp18,$tp48
  922. mov $tp40,$acc0
  923. mov $tp48,$acc8
  924. shr \$7,$tp40
  925. lea ($tp10,$tp10),$tp20
  926. shr \$7,$tp48
  927. lea ($tp18,$tp18),$tp28
  928. sub $tp40,$acc0
  929. sub $tp48,$acc8
  930. and $maskfe,$tp20
  931. and $maskfe,$tp28
  932. and $mask1b,$acc0
  933. and $mask1b,$acc8
  934. xor $acc0,$tp20
  935. xor $acc8,$tp28
  936. mov $mask80,$tp80
  937. mov $mask80,$tp88
  938. and $tp20,$tp80
  939. and $tp28,$tp88
  940. mov $tp80,$acc0
  941. mov $tp88,$acc8
  942. shr \$7,$tp80
  943. lea ($tp20,$tp20),$tp40
  944. shr \$7,$tp88
  945. lea ($tp28,$tp28),$tp48
  946. sub $tp80,$acc0
  947. sub $tp88,$acc8
  948. and $maskfe,$tp40
  949. and $maskfe,$tp48
  950. and $mask1b,$acc0
  951. and $mask1b,$acc8
  952. xor $acc0,$tp40
  953. xor $acc8,$tp48
  954. mov $mask80,$tp80
  955. mov $mask80,$tp88
  956. and $tp40,$tp80
  957. and $tp48,$tp88
  958. mov $tp80,$acc0
  959. mov $tp88,$acc8
  960. shr \$7,$tp80
  961. xor $tp10,$tp20 # tp2^=tp1
  962. shr \$7,$tp88
  963. xor $tp18,$tp28 # tp2^=tp1
  964. sub $tp80,$acc0
  965. sub $tp88,$acc8
  966. lea ($tp40,$tp40),$tp80
  967. lea ($tp48,$tp48),$tp88
  968. xor $tp10,$tp40 # tp4^=tp1
  969. xor $tp18,$tp48 # tp4^=tp1
  970. and $maskfe,$tp80
  971. and $maskfe,$tp88
  972. and $mask1b,$acc0
  973. and $mask1b,$acc8
  974. xor $acc0,$tp80
  975. xor $acc8,$tp88
  976. xor $tp80,$tp10 # tp1^=tp8
  977. xor $tp88,$tp18 # tp1^=tp8
  978. xor $tp80,$tp20 # tp2^tp1^=tp8
  979. xor $tp88,$tp28 # tp2^tp1^=tp8
  980. mov $tp10,$acc0
  981. mov $tp18,$acc8
  982. xor $tp80,$tp40 # tp4^tp1^=tp8
  983. shr \$32,$acc0
  984. xor $tp88,$tp48 # tp4^tp1^=tp8
  985. shr \$32,$acc8
  986. xor $tp20,$tp80 # tp8^=tp8^tp2^tp1=tp2^tp1
  987. rol \$8,`&LO("$tp10")` # ROTATE(tp1^tp8,8)
  988. xor $tp28,$tp88 # tp8^=tp8^tp2^tp1=tp2^tp1
  989. rol \$8,`&LO("$tp18")` # ROTATE(tp1^tp8,8)
  990. xor $tp40,$tp80 # tp2^tp1^=tp8^tp4^tp1=tp8^tp4^tp2
  991. rol \$8,`&LO("$acc0")` # ROTATE(tp1^tp8,8)
  992. xor $tp48,$tp88 # tp2^tp1^=tp8^tp4^tp1=tp8^tp4^tp2
  993. rol \$8,`&LO("$acc8")` # ROTATE(tp1^tp8,8)
  994. xor `&LO("$tp80")`,`&LO("$tp10")`
  995. shr \$32,$tp80
  996. xor `&LO("$tp88")`,`&LO("$tp18")`
  997. shr \$32,$tp88
  998. xor `&LO("$tp80")`,`&LO("$acc0")`
  999. xor `&LO("$tp88")`,`&LO("$acc8")`
  1000. mov $tp20,$tp80
  1001. rol \$24,`&LO("$tp20")` # ROTATE(tp2^tp1^tp8,24)
  1002. mov $tp28,$tp88
  1003. rol \$24,`&LO("$tp28")` # ROTATE(tp2^tp1^tp8,24)
  1004. shr \$32,$tp80
  1005. xor `&LO("$tp20")`,`&LO("$tp10")`
  1006. shr \$32,$tp88
  1007. xor `&LO("$tp28")`,`&LO("$tp18")`
  1008. rol \$24,`&LO("$tp80")` # ROTATE(tp2^tp1^tp8,24)
  1009. mov $tp40,$tp20
  1010. rol \$24,`&LO("$tp88")` # ROTATE(tp2^tp1^tp8,24)
  1011. mov $tp48,$tp28
  1012. shr \$32,$tp20
  1013. xor `&LO("$tp80")`,`&LO("$acc0")`
  1014. shr \$32,$tp28
  1015. xor `&LO("$tp88")`,`&LO("$acc8")`
  1016. `"mov 0($sbox),$mask80" if ($prefetch)`
  1017. rol \$16,`&LO("$tp40")` # ROTATE(tp4^tp1^tp8,16)
  1018. `"mov 64($sbox),$maskfe" if ($prefetch)`
  1019. rol \$16,`&LO("$tp48")` # ROTATE(tp4^tp1^tp8,16)
  1020. `"mov 128($sbox),$mask1b" if ($prefetch)`
  1021. rol \$16,`&LO("$tp20")` # ROTATE(tp4^tp1^tp8,16)
  1022. `"mov 192($sbox),$tp80" if ($prefetch)`
  1023. xor `&LO("$tp40")`,`&LO("$tp10")`
  1024. rol \$16,`&LO("$tp28")` # ROTATE(tp4^tp1^tp8,16)
  1025. xor `&LO("$tp48")`,`&LO("$tp18")`
  1026. `"mov 256($sbox),$tp88" if ($prefetch)`
  1027. xor `&LO("$tp20")`,`&LO("$acc0")`
  1028. xor `&LO("$tp28")`,`&LO("$acc8")`
  1029. ___
  1030. }
  1031. $code.=<<___;
  1032. .type _x86_64_AES_decrypt_compact,\@abi-omnipotent
  1033. .align 16
  1034. _x86_64_AES_decrypt_compact:
  1035. .cfi_startproc
  1036. lea 128($sbox),$inp # size optimization
  1037. mov 0-128($inp),$acc1 # prefetch Td4
  1038. mov 32-128($inp),$acc2
  1039. mov 64-128($inp),$t0
  1040. mov 96-128($inp),$t1
  1041. mov 128-128($inp),$acc1
  1042. mov 160-128($inp),$acc2
  1043. mov 192-128($inp),$t0
  1044. mov 224-128($inp),$t1
  1045. jmp .Ldec_loop_compact
  1046. .align 16
  1047. .Ldec_loop_compact:
  1048. xor 0($key),$s0 # xor with key
  1049. xor 4($key),$s1
  1050. xor 8($key),$s2
  1051. xor 12($key),$s3
  1052. lea 16($key),$key
  1053. ___
  1054. &deccompactvert();
  1055. $code.=<<___;
  1056. cmp 16(%rsp),$key
  1057. je .Ldec_compact_done
  1058. mov 256+0($sbox),$mask80
  1059. shl \$32,%rbx
  1060. shl \$32,%rdx
  1061. mov 256+8($sbox),$maskfe
  1062. or %rbx,%rax
  1063. or %rdx,%rcx
  1064. mov 256+16($sbox),$mask1b
  1065. ___
  1066. &dectransform(1);
  1067. $code.=<<___;
  1068. jmp .Ldec_loop_compact
  1069. .align 16
  1070. .Ldec_compact_done:
  1071. xor 0($key),$s0
  1072. xor 4($key),$s1
  1073. xor 8($key),$s2
  1074. xor 12($key),$s3
  1075. .byte 0xf3,0xc3 # rep ret
  1076. .cfi_endproc
  1077. .size _x86_64_AES_decrypt_compact,.-_x86_64_AES_decrypt_compact
  1078. ___
  1079. # void AES_decrypt (const void *inp,void *out,const AES_KEY *key);
  1080. $code.=<<___;
  1081. .globl AES_decrypt
  1082. .type AES_decrypt,\@function,3
  1083. .align 16
  1084. .globl asm_AES_decrypt
  1085. .hidden asm_AES_decrypt
  1086. asm_AES_decrypt:
  1087. AES_decrypt:
  1088. .cfi_startproc
  1089. endbranch
  1090. mov %rsp,%rax
  1091. .cfi_def_cfa_register %rax
  1092. push %rbx
  1093. .cfi_push %rbx
  1094. push %rbp
  1095. .cfi_push %rbp
  1096. push %r12
  1097. .cfi_push %r12
  1098. push %r13
  1099. .cfi_push %r13
  1100. push %r14
  1101. .cfi_push %r14
  1102. push %r15
  1103. .cfi_push %r15
  1104. # allocate frame "above" key schedule
  1105. lea -63(%rdx),%rcx # %rdx is key argument
  1106. and \$-64,%rsp
  1107. sub %rsp,%rcx
  1108. neg %rcx
  1109. and \$0x3c0,%rcx
  1110. sub %rcx,%rsp
  1111. sub \$32,%rsp
  1112. mov %rsi,16(%rsp) # save out
  1113. mov %rax,24(%rsp) # save original stack pointer
  1114. .cfi_cfa_expression %rsp+24,deref,+8
  1115. .Ldec_prologue:
  1116. mov %rdx,$key
  1117. mov 240($key),$rnds # load rounds
  1118. mov 0(%rdi),$s0 # load input vector
  1119. mov 4(%rdi),$s1
  1120. mov 8(%rdi),$s2
  1121. mov 12(%rdi),$s3
  1122. shl \$4,$rnds
  1123. lea ($key,$rnds),%rbp
  1124. mov $key,(%rsp) # key schedule
  1125. mov %rbp,8(%rsp) # end of key schedule
  1126. # pick Td4 copy which can't "overlap" with stack frame or key schedule
  1127. lea .LAES_Td+2048(%rip),$sbox
  1128. lea 768(%rsp),%rbp
  1129. sub $sbox,%rbp
  1130. and \$0x300,%rbp
  1131. lea ($sbox,%rbp),$sbox
  1132. shr \$3,%rbp # recall "magic" constants!
  1133. add %rbp,$sbox
  1134. call _x86_64_AES_decrypt_compact
  1135. mov 16(%rsp),$out # restore out
  1136. mov 24(%rsp),%rsi # restore saved stack pointer
  1137. .cfi_def_cfa %rsi,8
  1138. mov $s0,0($out) # write output vector
  1139. mov $s1,4($out)
  1140. mov $s2,8($out)
  1141. mov $s3,12($out)
  1142. mov -48(%rsi),%r15
  1143. .cfi_restore %r15
  1144. mov -40(%rsi),%r14
  1145. .cfi_restore %r14
  1146. mov -32(%rsi),%r13
  1147. .cfi_restore %r13
  1148. mov -24(%rsi),%r12
  1149. .cfi_restore %r12
  1150. mov -16(%rsi),%rbp
  1151. .cfi_restore %rbp
  1152. mov -8(%rsi),%rbx
  1153. .cfi_restore %rbx
  1154. lea (%rsi),%rsp
  1155. .cfi_def_cfa_register %rsp
  1156. .Ldec_epilogue:
  1157. ret
  1158. .cfi_endproc
  1159. .size AES_decrypt,.-AES_decrypt
  1160. ___
  1161. #------------------------------------------------------------------#
  1162. sub enckey()
  1163. {
  1164. $code.=<<___;
  1165. movz %dl,%esi # rk[i]>>0
  1166. movzb -128(%rbp,%rsi),%ebx
  1167. movz %dh,%esi # rk[i]>>8
  1168. shl \$24,%ebx
  1169. xor %ebx,%eax
  1170. movzb -128(%rbp,%rsi),%ebx
  1171. shr \$16,%edx
  1172. movz %dl,%esi # rk[i]>>16
  1173. xor %ebx,%eax
  1174. movzb -128(%rbp,%rsi),%ebx
  1175. movz %dh,%esi # rk[i]>>24
  1176. shl \$8,%ebx
  1177. xor %ebx,%eax
  1178. movzb -128(%rbp,%rsi),%ebx
  1179. shl \$16,%ebx
  1180. xor %ebx,%eax
  1181. xor 1024-128(%rbp,%rcx,4),%eax # rcon
  1182. ___
  1183. }
  1184. # int AES_set_encrypt_key(const unsigned char *userKey, const int bits,
  1185. # AES_KEY *key)
  1186. $code.=<<___;
  1187. .globl AES_set_encrypt_key
  1188. .type AES_set_encrypt_key,\@function,3
  1189. .align 16
  1190. AES_set_encrypt_key:
  1191. .cfi_startproc
  1192. endbranch
  1193. push %rbx
  1194. .cfi_push %rbx
  1195. push %rbp
  1196. .cfi_push %rbp
  1197. push %r12 # redundant, but allows to share
  1198. .cfi_push %r12
  1199. push %r13 # exception handler...
  1200. .cfi_push %r13
  1201. push %r14
  1202. .cfi_push %r14
  1203. push %r15
  1204. .cfi_push %r15
  1205. sub \$8,%rsp
  1206. .cfi_adjust_cfa_offset 8
  1207. .Lenc_key_prologue:
  1208. call _x86_64_AES_set_encrypt_key
  1209. mov 40(%rsp),%rbp
  1210. .cfi_restore %rbp
  1211. mov 48(%rsp),%rbx
  1212. .cfi_restore %rbx
  1213. add \$56,%rsp
  1214. .cfi_adjust_cfa_offset -56
  1215. .Lenc_key_epilogue:
  1216. ret
  1217. .cfi_endproc
  1218. .size AES_set_encrypt_key,.-AES_set_encrypt_key
  1219. .type _x86_64_AES_set_encrypt_key,\@abi-omnipotent
  1220. .align 16
  1221. _x86_64_AES_set_encrypt_key:
  1222. .cfi_startproc
  1223. mov %esi,%ecx # %ecx=bits
  1224. mov %rdi,%rsi # %rsi=userKey
  1225. mov %rdx,%rdi # %rdi=key
  1226. test \$-1,%rsi
  1227. jz .Lbadpointer
  1228. test \$-1,%rdi
  1229. jz .Lbadpointer
  1230. lea .LAES_Te(%rip),%rbp
  1231. lea 2048+128(%rbp),%rbp
  1232. # prefetch Te4
  1233. mov 0-128(%rbp),%eax
  1234. mov 32-128(%rbp),%ebx
  1235. mov 64-128(%rbp),%r8d
  1236. mov 96-128(%rbp),%edx
  1237. mov 128-128(%rbp),%eax
  1238. mov 160-128(%rbp),%ebx
  1239. mov 192-128(%rbp),%r8d
  1240. mov 224-128(%rbp),%edx
  1241. cmp \$128,%ecx
  1242. je .L10rounds
  1243. cmp \$192,%ecx
  1244. je .L12rounds
  1245. cmp \$256,%ecx
  1246. je .L14rounds
  1247. mov \$-2,%rax # invalid number of bits
  1248. jmp .Lexit
  1249. .L10rounds:
  1250. mov 0(%rsi),%rax # copy first 4 dwords
  1251. mov 8(%rsi),%rdx
  1252. mov %rax,0(%rdi)
  1253. mov %rdx,8(%rdi)
  1254. shr \$32,%rdx
  1255. xor %ecx,%ecx
  1256. jmp .L10shortcut
  1257. .align 4
  1258. .L10loop:
  1259. mov 0(%rdi),%eax # rk[0]
  1260. mov 12(%rdi),%edx # rk[3]
  1261. .L10shortcut:
  1262. ___
  1263. &enckey ();
  1264. $code.=<<___;
  1265. mov %eax,16(%rdi) # rk[4]
  1266. xor 4(%rdi),%eax
  1267. mov %eax,20(%rdi) # rk[5]
  1268. xor 8(%rdi),%eax
  1269. mov %eax,24(%rdi) # rk[6]
  1270. xor 12(%rdi),%eax
  1271. mov %eax,28(%rdi) # rk[7]
  1272. add \$1,%ecx
  1273. lea 16(%rdi),%rdi
  1274. cmp \$10,%ecx
  1275. jl .L10loop
  1276. movl \$10,80(%rdi) # setup number of rounds
  1277. xor %rax,%rax
  1278. jmp .Lexit
  1279. .L12rounds:
  1280. mov 0(%rsi),%rax # copy first 6 dwords
  1281. mov 8(%rsi),%rbx
  1282. mov 16(%rsi),%rdx
  1283. mov %rax,0(%rdi)
  1284. mov %rbx,8(%rdi)
  1285. mov %rdx,16(%rdi)
  1286. shr \$32,%rdx
  1287. xor %ecx,%ecx
  1288. jmp .L12shortcut
  1289. .align 4
  1290. .L12loop:
  1291. mov 0(%rdi),%eax # rk[0]
  1292. mov 20(%rdi),%edx # rk[5]
  1293. .L12shortcut:
  1294. ___
  1295. &enckey ();
  1296. $code.=<<___;
  1297. mov %eax,24(%rdi) # rk[6]
  1298. xor 4(%rdi),%eax
  1299. mov %eax,28(%rdi) # rk[7]
  1300. xor 8(%rdi),%eax
  1301. mov %eax,32(%rdi) # rk[8]
  1302. xor 12(%rdi),%eax
  1303. mov %eax,36(%rdi) # rk[9]
  1304. cmp \$7,%ecx
  1305. je .L12break
  1306. add \$1,%ecx
  1307. xor 16(%rdi),%eax
  1308. mov %eax,40(%rdi) # rk[10]
  1309. xor 20(%rdi),%eax
  1310. mov %eax,44(%rdi) # rk[11]
  1311. lea 24(%rdi),%rdi
  1312. jmp .L12loop
  1313. .L12break:
  1314. movl \$12,72(%rdi) # setup number of rounds
  1315. xor %rax,%rax
  1316. jmp .Lexit
  1317. .L14rounds:
  1318. mov 0(%rsi),%rax # copy first 8 dwords
  1319. mov 8(%rsi),%rbx
  1320. mov 16(%rsi),%rcx
  1321. mov 24(%rsi),%rdx
  1322. mov %rax,0(%rdi)
  1323. mov %rbx,8(%rdi)
  1324. mov %rcx,16(%rdi)
  1325. mov %rdx,24(%rdi)
  1326. shr \$32,%rdx
  1327. xor %ecx,%ecx
  1328. jmp .L14shortcut
  1329. .align 4
  1330. .L14loop:
  1331. mov 0(%rdi),%eax # rk[0]
  1332. mov 28(%rdi),%edx # rk[4]
  1333. .L14shortcut:
  1334. ___
  1335. &enckey ();
  1336. $code.=<<___;
  1337. mov %eax,32(%rdi) # rk[8]
  1338. xor 4(%rdi),%eax
  1339. mov %eax,36(%rdi) # rk[9]
  1340. xor 8(%rdi),%eax
  1341. mov %eax,40(%rdi) # rk[10]
  1342. xor 12(%rdi),%eax
  1343. mov %eax,44(%rdi) # rk[11]
  1344. cmp \$6,%ecx
  1345. je .L14break
  1346. add \$1,%ecx
  1347. mov %eax,%edx
  1348. mov 16(%rdi),%eax # rk[4]
  1349. movz %dl,%esi # rk[11]>>0
  1350. movzb -128(%rbp,%rsi),%ebx
  1351. movz %dh,%esi # rk[11]>>8
  1352. xor %ebx,%eax
  1353. movzb -128(%rbp,%rsi),%ebx
  1354. shr \$16,%edx
  1355. shl \$8,%ebx
  1356. movz %dl,%esi # rk[11]>>16
  1357. xor %ebx,%eax
  1358. movzb -128(%rbp,%rsi),%ebx
  1359. movz %dh,%esi # rk[11]>>24
  1360. shl \$16,%ebx
  1361. xor %ebx,%eax
  1362. movzb -128(%rbp,%rsi),%ebx
  1363. shl \$24,%ebx
  1364. xor %ebx,%eax
  1365. mov %eax,48(%rdi) # rk[12]
  1366. xor 20(%rdi),%eax
  1367. mov %eax,52(%rdi) # rk[13]
  1368. xor 24(%rdi),%eax
  1369. mov %eax,56(%rdi) # rk[14]
  1370. xor 28(%rdi),%eax
  1371. mov %eax,60(%rdi) # rk[15]
  1372. lea 32(%rdi),%rdi
  1373. jmp .L14loop
  1374. .L14break:
  1375. movl \$14,48(%rdi) # setup number of rounds
  1376. xor %rax,%rax
  1377. jmp .Lexit
  1378. .Lbadpointer:
  1379. mov \$-1,%rax
  1380. .Lexit:
  1381. .byte 0xf3,0xc3 # rep ret
  1382. .cfi_endproc
  1383. .size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key
  1384. ___
  1385. sub deckey_ref()
  1386. { my ($i,$ptr,$te,$td) = @_;
  1387. my ($tp1,$tp2,$tp4,$tp8,$acc)=("%eax","%ebx","%edi","%edx","%r8d");
  1388. $code.=<<___;
  1389. mov $i($ptr),$tp1
  1390. mov $tp1,$acc
  1391. and \$0x80808080,$acc
  1392. mov $acc,$tp4
  1393. shr \$7,$tp4
  1394. lea 0($tp1,$tp1),$tp2
  1395. sub $tp4,$acc
  1396. and \$0xfefefefe,$tp2
  1397. and \$0x1b1b1b1b,$acc
  1398. xor $tp2,$acc
  1399. mov $acc,$tp2
  1400. and \$0x80808080,$acc
  1401. mov $acc,$tp8
  1402. shr \$7,$tp8
  1403. lea 0($tp2,$tp2),$tp4
  1404. sub $tp8,$acc
  1405. and \$0xfefefefe,$tp4
  1406. and \$0x1b1b1b1b,$acc
  1407. xor $tp1,$tp2 # tp2^tp1
  1408. xor $tp4,$acc
  1409. mov $acc,$tp4
  1410. and \$0x80808080,$acc
  1411. mov $acc,$tp8
  1412. shr \$7,$tp8
  1413. sub $tp8,$acc
  1414. lea 0($tp4,$tp4),$tp8
  1415. xor $tp1,$tp4 # tp4^tp1
  1416. and \$0xfefefefe,$tp8
  1417. and \$0x1b1b1b1b,$acc
  1418. xor $acc,$tp8
  1419. xor $tp8,$tp1 # tp1^tp8
  1420. rol \$8,$tp1 # ROTATE(tp1^tp8,8)
  1421. xor $tp8,$tp2 # tp2^tp1^tp8
  1422. xor $tp8,$tp4 # tp4^tp1^tp8
  1423. xor $tp2,$tp8
  1424. xor $tp4,$tp8 # tp8^(tp8^tp4^tp1)^(tp8^tp2^tp1)=tp8^tp4^tp2
  1425. xor $tp8,$tp1
  1426. rol \$24,$tp2 # ROTATE(tp2^tp1^tp8,24)
  1427. xor $tp2,$tp1
  1428. rol \$16,$tp4 # ROTATE(tp4^tp1^tp8,16)
  1429. xor $tp4,$tp1
  1430. mov $tp1,$i($ptr)
  1431. ___
  1432. }
  1433. # int AES_set_decrypt_key(const unsigned char *userKey, const int bits,
  1434. # AES_KEY *key)
  1435. $code.=<<___;
  1436. .globl AES_set_decrypt_key
  1437. .type AES_set_decrypt_key,\@function,3
  1438. .align 16
  1439. AES_set_decrypt_key:
  1440. .cfi_startproc
  1441. endbranch
  1442. push %rbx
  1443. .cfi_push %rbx
  1444. push %rbp
  1445. .cfi_push %rbp
  1446. push %r12
  1447. .cfi_push %r12
  1448. push %r13
  1449. .cfi_push %r13
  1450. push %r14
  1451. .cfi_push %r14
  1452. push %r15
  1453. .cfi_push %r15
  1454. push %rdx # save key schedule
  1455. .cfi_adjust_cfa_offset 8
  1456. .Ldec_key_prologue:
  1457. call _x86_64_AES_set_encrypt_key
  1458. mov (%rsp),%r8 # restore key schedule
  1459. cmp \$0,%eax
  1460. jne .Labort
  1461. mov 240(%r8),%r14d # pull number of rounds
  1462. xor %rdi,%rdi
  1463. lea (%rdi,%r14d,4),%rcx
  1464. mov %r8,%rsi
  1465. lea (%r8,%rcx,4),%rdi # pointer to last chunk
  1466. .align 4
  1467. .Linvert:
  1468. mov 0(%rsi),%rax
  1469. mov 8(%rsi),%rbx
  1470. mov 0(%rdi),%rcx
  1471. mov 8(%rdi),%rdx
  1472. mov %rax,0(%rdi)
  1473. mov %rbx,8(%rdi)
  1474. mov %rcx,0(%rsi)
  1475. mov %rdx,8(%rsi)
  1476. lea 16(%rsi),%rsi
  1477. lea -16(%rdi),%rdi
  1478. cmp %rsi,%rdi
  1479. jne .Linvert
  1480. lea .LAES_Te+2048+1024(%rip),%rax # rcon
  1481. mov 40(%rax),$mask80
  1482. mov 48(%rax),$maskfe
  1483. mov 56(%rax),$mask1b
  1484. mov %r8,$key
  1485. sub \$1,%r14d
  1486. .align 4
  1487. .Lpermute:
  1488. lea 16($key),$key
  1489. mov 0($key),%rax
  1490. mov 8($key),%rcx
  1491. ___
  1492. &dectransform ();
  1493. $code.=<<___;
  1494. mov %eax,0($key)
  1495. mov %ebx,4($key)
  1496. mov %ecx,8($key)
  1497. mov %edx,12($key)
  1498. sub \$1,%r14d
  1499. jnz .Lpermute
  1500. xor %rax,%rax
  1501. .Labort:
  1502. mov 8(%rsp),%r15
  1503. .cfi_restore %r15
  1504. mov 16(%rsp),%r14
  1505. .cfi_restore %r14
  1506. mov 24(%rsp),%r13
  1507. .cfi_restore %r13
  1508. mov 32(%rsp),%r12
  1509. .cfi_restore %r12
  1510. mov 40(%rsp),%rbp
  1511. .cfi_restore %rbp
  1512. mov 48(%rsp),%rbx
  1513. .cfi_restore %rbx
  1514. add \$56,%rsp
  1515. .cfi_adjust_cfa_offset -56
  1516. .Ldec_key_epilogue:
  1517. ret
  1518. .cfi_endproc
  1519. .size AES_set_decrypt_key,.-AES_set_decrypt_key
  1520. ___
  1521. # void AES_cbc_encrypt (const void char *inp, unsigned char *out,
  1522. # size_t length, const AES_KEY *key,
  1523. # unsigned char *ivp,const int enc);
  1524. {
  1525. # stack frame layout
  1526. # -8(%rsp) return address
  1527. my $keyp="0(%rsp)"; # one to pass as $key
  1528. my $keyend="8(%rsp)"; # &(keyp->rd_key[4*keyp->rounds])
  1529. my $_rsp="16(%rsp)"; # saved %rsp
  1530. my $_inp="24(%rsp)"; # copy of 1st parameter, inp
  1531. my $_out="32(%rsp)"; # copy of 2nd parameter, out
  1532. my $_len="40(%rsp)"; # copy of 3rd parameter, length
  1533. my $_key="48(%rsp)"; # copy of 4th parameter, key
  1534. my $_ivp="56(%rsp)"; # copy of 5th parameter, ivp
  1535. my $ivec="64(%rsp)"; # ivec[16]
  1536. my $aes_key="80(%rsp)"; # copy of aes_key
  1537. my $mark="80+240(%rsp)"; # copy of aes_key->rounds
  1538. $code.=<<___;
  1539. .globl AES_cbc_encrypt
  1540. .type AES_cbc_encrypt,\@function,6
  1541. .align 16
  1542. .extern OPENSSL_ia32cap_P
  1543. .globl asm_AES_cbc_encrypt
  1544. .hidden asm_AES_cbc_encrypt
  1545. asm_AES_cbc_encrypt:
  1546. AES_cbc_encrypt:
  1547. .cfi_startproc
  1548. endbranch
  1549. cmp \$0,%rdx # check length
  1550. je .Lcbc_epilogue
  1551. pushfq
  1552. # This could be .cfi_push 49, but libunwind fails on registers it does not
  1553. # recognize. See https://bugzilla.redhat.com/show_bug.cgi?id=217087.
  1554. .cfi_adjust_cfa_offset 8
  1555. push %rbx
  1556. .cfi_push %rbx
  1557. push %rbp
  1558. .cfi_push %rbp
  1559. push %r12
  1560. .cfi_push %r12
  1561. push %r13
  1562. .cfi_push %r13
  1563. push %r14
  1564. .cfi_push %r14
  1565. push %r15
  1566. .cfi_push %r15
  1567. .Lcbc_prologue:
  1568. cld
  1569. mov %r9d,%r9d # clear upper half of enc
  1570. lea .LAES_Te(%rip),$sbox
  1571. lea .LAES_Td(%rip),%r10
  1572. cmp \$0,%r9
  1573. cmoveq %r10,$sbox
  1574. .cfi_remember_state
  1575. mov OPENSSL_ia32cap_P(%rip),%r10d
  1576. cmp \$$speed_limit,%rdx
  1577. jb .Lcbc_slow_prologue
  1578. test \$15,%rdx
  1579. jnz .Lcbc_slow_prologue
  1580. bt \$28,%r10d
  1581. jc .Lcbc_slow_prologue
  1582. # allocate aligned stack frame...
  1583. lea -88-248(%rsp),$key
  1584. and \$-64,$key
  1585. # ... and make sure it doesn't alias with AES_T[ed] modulo 4096
  1586. mov $sbox,%r10
  1587. lea 2304($sbox),%r11
  1588. mov $key,%r12
  1589. and \$0xFFF,%r10 # s = $sbox&0xfff
  1590. and \$0xFFF,%r11 # e = ($sbox+2048)&0xfff
  1591. and \$0xFFF,%r12 # p = %rsp&0xfff
  1592. cmp %r11,%r12 # if (p=>e) %rsp =- (p-e);
  1593. jb .Lcbc_te_break_out
  1594. sub %r11,%r12
  1595. sub %r12,$key
  1596. jmp .Lcbc_te_ok
  1597. .Lcbc_te_break_out: # else %rsp -= (p-s)&0xfff + framesz
  1598. sub %r10,%r12
  1599. and \$0xFFF,%r12
  1600. add \$320,%r12
  1601. sub %r12,$key
  1602. .align 4
  1603. .Lcbc_te_ok:
  1604. xchg %rsp,$key
  1605. .cfi_def_cfa_register $key
  1606. #add \$8,%rsp # reserve for return address!
  1607. mov $key,$_rsp # save %rsp
  1608. .cfi_cfa_expression $_rsp,deref,+64
  1609. .Lcbc_fast_body:
  1610. mov %rdi,$_inp # save copy of inp
  1611. mov %rsi,$_out # save copy of out
  1612. mov %rdx,$_len # save copy of len
  1613. mov %rcx,$_key # save copy of key
  1614. mov %r8,$_ivp # save copy of ivp
  1615. movl \$0,$mark # copy of aes_key->rounds = 0;
  1616. mov %r8,%rbp # rearrange input arguments
  1617. mov %r9,%rbx
  1618. mov %rsi,$out
  1619. mov %rdi,$inp
  1620. mov %rcx,$key
  1621. mov 240($key),%eax # key->rounds
  1622. # do we copy key schedule to stack?
  1623. mov $key,%r10
  1624. sub $sbox,%r10
  1625. and \$0xfff,%r10
  1626. cmp \$2304,%r10
  1627. jb .Lcbc_do_ecopy
  1628. cmp \$4096-248,%r10
  1629. jb .Lcbc_skip_ecopy
  1630. .align 4
  1631. .Lcbc_do_ecopy:
  1632. mov $key,%rsi
  1633. lea $aes_key,%rdi
  1634. lea $aes_key,$key
  1635. mov \$240/8,%ecx
  1636. .long 0x90A548F3 # rep movsq
  1637. mov %eax,(%rdi) # copy aes_key->rounds
  1638. .Lcbc_skip_ecopy:
  1639. mov $key,$keyp # save key pointer
  1640. mov \$18,%ecx
  1641. .align 4
  1642. .Lcbc_prefetch_te:
  1643. mov 0($sbox),%r10
  1644. mov 32($sbox),%r11
  1645. mov 64($sbox),%r12
  1646. mov 96($sbox),%r13
  1647. lea 128($sbox),$sbox
  1648. sub \$1,%ecx
  1649. jnz .Lcbc_prefetch_te
  1650. lea -2304($sbox),$sbox
  1651. cmp \$0,%rbx
  1652. je .LFAST_DECRYPT
  1653. #----------------------------- ENCRYPT -----------------------------#
  1654. mov 0(%rbp),$s0 # load iv
  1655. mov 4(%rbp),$s1
  1656. mov 8(%rbp),$s2
  1657. mov 12(%rbp),$s3
  1658. .align 4
  1659. .Lcbc_fast_enc_loop:
  1660. xor 0($inp),$s0
  1661. xor 4($inp),$s1
  1662. xor 8($inp),$s2
  1663. xor 12($inp),$s3
  1664. mov $keyp,$key # restore key
  1665. mov $inp,$_inp # if ($verticalspin) save inp
  1666. call _x86_64_AES_encrypt
  1667. mov $_inp,$inp # if ($verticalspin) restore inp
  1668. mov $_len,%r10
  1669. mov $s0,0($out)
  1670. mov $s1,4($out)
  1671. mov $s2,8($out)
  1672. mov $s3,12($out)
  1673. lea 16($inp),$inp
  1674. lea 16($out),$out
  1675. sub \$16,%r10
  1676. test \$-16,%r10
  1677. mov %r10,$_len
  1678. jnz .Lcbc_fast_enc_loop
  1679. mov $_ivp,%rbp # restore ivp
  1680. mov $s0,0(%rbp) # save ivec
  1681. mov $s1,4(%rbp)
  1682. mov $s2,8(%rbp)
  1683. mov $s3,12(%rbp)
  1684. jmp .Lcbc_fast_cleanup
  1685. #----------------------------- DECRYPT -----------------------------#
  1686. .align 16
  1687. .LFAST_DECRYPT:
  1688. cmp $inp,$out
  1689. je .Lcbc_fast_dec_in_place
  1690. mov %rbp,$ivec
  1691. .align 4
  1692. .Lcbc_fast_dec_loop:
  1693. mov 0($inp),$s0 # read input
  1694. mov 4($inp),$s1
  1695. mov 8($inp),$s2
  1696. mov 12($inp),$s3
  1697. mov $keyp,$key # restore key
  1698. mov $inp,$_inp # if ($verticalspin) save inp
  1699. call _x86_64_AES_decrypt
  1700. mov $ivec,%rbp # load ivp
  1701. mov $_inp,$inp # if ($verticalspin) restore inp
  1702. mov $_len,%r10 # load len
  1703. xor 0(%rbp),$s0 # xor iv
  1704. xor 4(%rbp),$s1
  1705. xor 8(%rbp),$s2
  1706. xor 12(%rbp),$s3
  1707. mov $inp,%rbp # current input, next iv
  1708. sub \$16,%r10
  1709. mov %r10,$_len # update len
  1710. mov %rbp,$ivec # update ivp
  1711. mov $s0,0($out) # write output
  1712. mov $s1,4($out)
  1713. mov $s2,8($out)
  1714. mov $s3,12($out)
  1715. lea 16($inp),$inp
  1716. lea 16($out),$out
  1717. jnz .Lcbc_fast_dec_loop
  1718. mov $_ivp,%r12 # load user ivp
  1719. mov 0(%rbp),%r10 # load iv
  1720. mov 8(%rbp),%r11
  1721. mov %r10,0(%r12) # copy back to user
  1722. mov %r11,8(%r12)
  1723. jmp .Lcbc_fast_cleanup
  1724. .align 16
  1725. .Lcbc_fast_dec_in_place:
  1726. mov 0(%rbp),%r10 # copy iv to stack
  1727. mov 8(%rbp),%r11
  1728. mov %r10,0+$ivec
  1729. mov %r11,8+$ivec
  1730. .align 4
  1731. .Lcbc_fast_dec_in_place_loop:
  1732. mov 0($inp),$s0 # load input
  1733. mov 4($inp),$s1
  1734. mov 8($inp),$s2
  1735. mov 12($inp),$s3
  1736. mov $keyp,$key # restore key
  1737. mov $inp,$_inp # if ($verticalspin) save inp
  1738. call _x86_64_AES_decrypt
  1739. mov $_inp,$inp # if ($verticalspin) restore inp
  1740. mov $_len,%r10
  1741. xor 0+$ivec,$s0
  1742. xor 4+$ivec,$s1
  1743. xor 8+$ivec,$s2
  1744. xor 12+$ivec,$s3
  1745. mov 0($inp),%r11 # load input
  1746. mov 8($inp),%r12
  1747. sub \$16,%r10
  1748. jz .Lcbc_fast_dec_in_place_done
  1749. mov %r11,0+$ivec # copy input to iv
  1750. mov %r12,8+$ivec
  1751. mov $s0,0($out) # save output [zaps input]
  1752. mov $s1,4($out)
  1753. mov $s2,8($out)
  1754. mov $s3,12($out)
  1755. lea 16($inp),$inp
  1756. lea 16($out),$out
  1757. mov %r10,$_len
  1758. jmp .Lcbc_fast_dec_in_place_loop
  1759. .Lcbc_fast_dec_in_place_done:
  1760. mov $_ivp,%rdi
  1761. mov %r11,0(%rdi) # copy iv back to user
  1762. mov %r12,8(%rdi)
  1763. mov $s0,0($out) # save output [zaps input]
  1764. mov $s1,4($out)
  1765. mov $s2,8($out)
  1766. mov $s3,12($out)
  1767. .align 4
  1768. .Lcbc_fast_cleanup:
  1769. cmpl \$0,$mark # was the key schedule copied?
  1770. lea $aes_key,%rdi
  1771. je .Lcbc_exit
  1772. mov \$240/8,%ecx
  1773. xor %rax,%rax
  1774. .long 0x90AB48F3 # rep stosq
  1775. jmp .Lcbc_exit
  1776. #--------------------------- SLOW ROUTINE ---------------------------#
  1777. .align 16
  1778. .Lcbc_slow_prologue:
  1779. .cfi_restore_state
  1780. # allocate aligned stack frame...
  1781. lea -88(%rsp),%rbp
  1782. and \$-64,%rbp
  1783. # ... just "above" key schedule
  1784. lea -88-63(%rcx),%r10
  1785. sub %rbp,%r10
  1786. neg %r10
  1787. and \$0x3c0,%r10
  1788. sub %r10,%rbp
  1789. xchg %rsp,%rbp
  1790. .cfi_def_cfa_register %rbp
  1791. #add \$8,%rsp # reserve for return address!
  1792. mov %rbp,$_rsp # save %rsp
  1793. .cfi_cfa_expression $_rsp,deref,+64
  1794. .Lcbc_slow_body:
  1795. #mov %rdi,$_inp # save copy of inp
  1796. #mov %rsi,$_out # save copy of out
  1797. #mov %rdx,$_len # save copy of len
  1798. #mov %rcx,$_key # save copy of key
  1799. mov %r8,$_ivp # save copy of ivp
  1800. mov %r8,%rbp # rearrange input arguments
  1801. mov %r9,%rbx
  1802. mov %rsi,$out
  1803. mov %rdi,$inp
  1804. mov %rcx,$key
  1805. mov %rdx,%r10
  1806. mov 240($key),%eax
  1807. mov $key,$keyp # save key pointer
  1808. shl \$4,%eax
  1809. lea ($key,%rax),%rax
  1810. mov %rax,$keyend
  1811. # pick Te4 copy which can't "overlap" with stack frame or key schedule
  1812. lea 2048($sbox),$sbox
  1813. lea 768-8(%rsp),%rax
  1814. sub $sbox,%rax
  1815. and \$0x300,%rax
  1816. lea ($sbox,%rax),$sbox
  1817. cmp \$0,%rbx
  1818. je .LSLOW_DECRYPT
  1819. #--------------------------- SLOW ENCRYPT ---------------------------#
  1820. test \$-16,%r10 # check upon length
  1821. mov 0(%rbp),$s0 # load iv
  1822. mov 4(%rbp),$s1
  1823. mov 8(%rbp),$s2
  1824. mov 12(%rbp),$s3
  1825. jz .Lcbc_slow_enc_tail # short input...
  1826. .align 4
  1827. .Lcbc_slow_enc_loop:
  1828. xor 0($inp),$s0
  1829. xor 4($inp),$s1
  1830. xor 8($inp),$s2
  1831. xor 12($inp),$s3
  1832. mov $keyp,$key # restore key
  1833. mov $inp,$_inp # save inp
  1834. mov $out,$_out # save out
  1835. mov %r10,$_len # save len
  1836. call _x86_64_AES_encrypt_compact
  1837. mov $_inp,$inp # restore inp
  1838. mov $_out,$out # restore out
  1839. mov $_len,%r10 # restore len
  1840. mov $s0,0($out)
  1841. mov $s1,4($out)
  1842. mov $s2,8($out)
  1843. mov $s3,12($out)
  1844. lea 16($inp),$inp
  1845. lea 16($out),$out
  1846. sub \$16,%r10
  1847. test \$-16,%r10
  1848. jnz .Lcbc_slow_enc_loop
  1849. test \$15,%r10
  1850. jnz .Lcbc_slow_enc_tail
  1851. mov $_ivp,%rbp # restore ivp
  1852. mov $s0,0(%rbp) # save ivec
  1853. mov $s1,4(%rbp)
  1854. mov $s2,8(%rbp)
  1855. mov $s3,12(%rbp)
  1856. jmp .Lcbc_exit
  1857. .align 4
  1858. .Lcbc_slow_enc_tail:
  1859. mov %rax,%r11
  1860. mov %rcx,%r12
  1861. mov %r10,%rcx
  1862. mov $inp,%rsi
  1863. mov $out,%rdi
  1864. .long 0x9066A4F3 # rep movsb
  1865. mov \$16,%rcx # zero tail
  1866. sub %r10,%rcx
  1867. xor %rax,%rax
  1868. .long 0x9066AAF3 # rep stosb
  1869. mov $out,$inp # this is not a mistake!
  1870. mov \$16,%r10 # len=16
  1871. mov %r11,%rax
  1872. mov %r12,%rcx
  1873. jmp .Lcbc_slow_enc_loop # one more spin...
  1874. #--------------------------- SLOW DECRYPT ---------------------------#
  1875. .align 16
  1876. .LSLOW_DECRYPT:
  1877. shr \$3,%rax
  1878. add %rax,$sbox # recall "magic" constants!
  1879. mov 0(%rbp),%r11 # copy iv to stack
  1880. mov 8(%rbp),%r12
  1881. mov %r11,0+$ivec
  1882. mov %r12,8+$ivec
  1883. .align 4
  1884. .Lcbc_slow_dec_loop:
  1885. mov 0($inp),$s0 # load input
  1886. mov 4($inp),$s1
  1887. mov 8($inp),$s2
  1888. mov 12($inp),$s3
  1889. mov $keyp,$key # restore key
  1890. mov $inp,$_inp # save inp
  1891. mov $out,$_out # save out
  1892. mov %r10,$_len # save len
  1893. call _x86_64_AES_decrypt_compact
  1894. mov $_inp,$inp # restore inp
  1895. mov $_out,$out # restore out
  1896. mov $_len,%r10
  1897. xor 0+$ivec,$s0
  1898. xor 4+$ivec,$s1
  1899. xor 8+$ivec,$s2
  1900. xor 12+$ivec,$s3
  1901. mov 0($inp),%r11 # load input
  1902. mov 8($inp),%r12
  1903. sub \$16,%r10
  1904. jc .Lcbc_slow_dec_partial
  1905. jz .Lcbc_slow_dec_done
  1906. mov %r11,0+$ivec # copy input to iv
  1907. mov %r12,8+$ivec
  1908. mov $s0,0($out) # save output [can zap input]
  1909. mov $s1,4($out)
  1910. mov $s2,8($out)
  1911. mov $s3,12($out)
  1912. lea 16($inp),$inp
  1913. lea 16($out),$out
  1914. jmp .Lcbc_slow_dec_loop
  1915. .Lcbc_slow_dec_done:
  1916. mov $_ivp,%rdi
  1917. mov %r11,0(%rdi) # copy iv back to user
  1918. mov %r12,8(%rdi)
  1919. mov $s0,0($out) # save output [can zap input]
  1920. mov $s1,4($out)
  1921. mov $s2,8($out)
  1922. mov $s3,12($out)
  1923. jmp .Lcbc_exit
  1924. .align 4
  1925. .Lcbc_slow_dec_partial:
  1926. mov $_ivp,%rdi
  1927. mov %r11,0(%rdi) # copy iv back to user
  1928. mov %r12,8(%rdi)
  1929. mov $s0,0+$ivec # save output to stack
  1930. mov $s1,4+$ivec
  1931. mov $s2,8+$ivec
  1932. mov $s3,12+$ivec
  1933. mov $out,%rdi
  1934. lea $ivec,%rsi
  1935. lea 16(%r10),%rcx
  1936. .long 0x9066A4F3 # rep movsb
  1937. jmp .Lcbc_exit
  1938. .align 16
  1939. .Lcbc_exit:
  1940. mov $_rsp,%rsi
  1941. .cfi_def_cfa %rsi,64
  1942. mov (%rsi),%r15
  1943. .cfi_restore %r15
  1944. mov 8(%rsi),%r14
  1945. .cfi_restore %r14
  1946. mov 16(%rsi),%r13
  1947. .cfi_restore %r13
  1948. mov 24(%rsi),%r12
  1949. .cfi_restore %r12
  1950. mov 32(%rsi),%rbp
  1951. .cfi_restore %rbp
  1952. mov 40(%rsi),%rbx
  1953. .cfi_restore %rbx
  1954. lea 48(%rsi),%rsp
  1955. .cfi_def_cfa %rsp,16
  1956. .Lcbc_popfq:
  1957. popfq
  1958. # This could be .cfi_pop 49, but libunwind fails on registers it does not
  1959. # recognize. See https://bugzilla.redhat.com/show_bug.cgi?id=217087.
  1960. .cfi_adjust_cfa_offset -8
  1961. .Lcbc_epilogue:
  1962. ret
  1963. .cfi_endproc
  1964. .size AES_cbc_encrypt,.-AES_cbc_encrypt
  1965. ___
  1966. }
  1967. $code.=<<___;
  1968. .align 64
  1969. .LAES_Te:
  1970. ___
  1971. &_data_word(0xa56363c6, 0x847c7cf8, 0x997777ee, 0x8d7b7bf6);
  1972. &_data_word(0x0df2f2ff, 0xbd6b6bd6, 0xb16f6fde, 0x54c5c591);
  1973. &_data_word(0x50303060, 0x03010102, 0xa96767ce, 0x7d2b2b56);
  1974. &_data_word(0x19fefee7, 0x62d7d7b5, 0xe6abab4d, 0x9a7676ec);
  1975. &_data_word(0x45caca8f, 0x9d82821f, 0x40c9c989, 0x877d7dfa);
  1976. &_data_word(0x15fafaef, 0xeb5959b2, 0xc947478e, 0x0bf0f0fb);
  1977. &_data_word(0xecadad41, 0x67d4d4b3, 0xfda2a25f, 0xeaafaf45);
  1978. &_data_word(0xbf9c9c23, 0xf7a4a453, 0x967272e4, 0x5bc0c09b);
  1979. &_data_word(0xc2b7b775, 0x1cfdfde1, 0xae93933d, 0x6a26264c);
  1980. &_data_word(0x5a36366c, 0x413f3f7e, 0x02f7f7f5, 0x4fcccc83);
  1981. &_data_word(0x5c343468, 0xf4a5a551, 0x34e5e5d1, 0x08f1f1f9);
  1982. &_data_word(0x937171e2, 0x73d8d8ab, 0x53313162, 0x3f15152a);
  1983. &_data_word(0x0c040408, 0x52c7c795, 0x65232346, 0x5ec3c39d);
  1984. &_data_word(0x28181830, 0xa1969637, 0x0f05050a, 0xb59a9a2f);
  1985. &_data_word(0x0907070e, 0x36121224, 0x9b80801b, 0x3de2e2df);
  1986. &_data_word(0x26ebebcd, 0x6927274e, 0xcdb2b27f, 0x9f7575ea);
  1987. &_data_word(0x1b090912, 0x9e83831d, 0x742c2c58, 0x2e1a1a34);
  1988. &_data_word(0x2d1b1b36, 0xb26e6edc, 0xee5a5ab4, 0xfba0a05b);
  1989. &_data_word(0xf65252a4, 0x4d3b3b76, 0x61d6d6b7, 0xceb3b37d);
  1990. &_data_word(0x7b292952, 0x3ee3e3dd, 0x712f2f5e, 0x97848413);
  1991. &_data_word(0xf55353a6, 0x68d1d1b9, 0x00000000, 0x2cededc1);
  1992. &_data_word(0x60202040, 0x1ffcfce3, 0xc8b1b179, 0xed5b5bb6);
  1993. &_data_word(0xbe6a6ad4, 0x46cbcb8d, 0xd9bebe67, 0x4b393972);
  1994. &_data_word(0xde4a4a94, 0xd44c4c98, 0xe85858b0, 0x4acfcf85);
  1995. &_data_word(0x6bd0d0bb, 0x2aefefc5, 0xe5aaaa4f, 0x16fbfbed);
  1996. &_data_word(0xc5434386, 0xd74d4d9a, 0x55333366, 0x94858511);
  1997. &_data_word(0xcf45458a, 0x10f9f9e9, 0x06020204, 0x817f7ffe);
  1998. &_data_word(0xf05050a0, 0x443c3c78, 0xba9f9f25, 0xe3a8a84b);
  1999. &_data_word(0xf35151a2, 0xfea3a35d, 0xc0404080, 0x8a8f8f05);
  2000. &_data_word(0xad92923f, 0xbc9d9d21, 0x48383870, 0x04f5f5f1);
  2001. &_data_word(0xdfbcbc63, 0xc1b6b677, 0x75dadaaf, 0x63212142);
  2002. &_data_word(0x30101020, 0x1affffe5, 0x0ef3f3fd, 0x6dd2d2bf);
  2003. &_data_word(0x4ccdcd81, 0x140c0c18, 0x35131326, 0x2fececc3);
  2004. &_data_word(0xe15f5fbe, 0xa2979735, 0xcc444488, 0x3917172e);
  2005. &_data_word(0x57c4c493, 0xf2a7a755, 0x827e7efc, 0x473d3d7a);
  2006. &_data_word(0xac6464c8, 0xe75d5dba, 0x2b191932, 0x957373e6);
  2007. &_data_word(0xa06060c0, 0x98818119, 0xd14f4f9e, 0x7fdcdca3);
  2008. &_data_word(0x66222244, 0x7e2a2a54, 0xab90903b, 0x8388880b);
  2009. &_data_word(0xca46468c, 0x29eeeec7, 0xd3b8b86b, 0x3c141428);
  2010. &_data_word(0x79dedea7, 0xe25e5ebc, 0x1d0b0b16, 0x76dbdbad);
  2011. &_data_word(0x3be0e0db, 0x56323264, 0x4e3a3a74, 0x1e0a0a14);
  2012. &_data_word(0xdb494992, 0x0a06060c, 0x6c242448, 0xe45c5cb8);
  2013. &_data_word(0x5dc2c29f, 0x6ed3d3bd, 0xefacac43, 0xa66262c4);
  2014. &_data_word(0xa8919139, 0xa4959531, 0x37e4e4d3, 0x8b7979f2);
  2015. &_data_word(0x32e7e7d5, 0x43c8c88b, 0x5937376e, 0xb76d6dda);
  2016. &_data_word(0x8c8d8d01, 0x64d5d5b1, 0xd24e4e9c, 0xe0a9a949);
  2017. &_data_word(0xb46c6cd8, 0xfa5656ac, 0x07f4f4f3, 0x25eaeacf);
  2018. &_data_word(0xaf6565ca, 0x8e7a7af4, 0xe9aeae47, 0x18080810);
  2019. &_data_word(0xd5baba6f, 0x887878f0, 0x6f25254a, 0x722e2e5c);
  2020. &_data_word(0x241c1c38, 0xf1a6a657, 0xc7b4b473, 0x51c6c697);
  2021. &_data_word(0x23e8e8cb, 0x7cdddda1, 0x9c7474e8, 0x211f1f3e);
  2022. &_data_word(0xdd4b4b96, 0xdcbdbd61, 0x868b8b0d, 0x858a8a0f);
  2023. &_data_word(0x907070e0, 0x423e3e7c, 0xc4b5b571, 0xaa6666cc);
  2024. &_data_word(0xd8484890, 0x05030306, 0x01f6f6f7, 0x120e0e1c);
  2025. &_data_word(0xa36161c2, 0x5f35356a, 0xf95757ae, 0xd0b9b969);
  2026. &_data_word(0x91868617, 0x58c1c199, 0x271d1d3a, 0xb99e9e27);
  2027. &_data_word(0x38e1e1d9, 0x13f8f8eb, 0xb398982b, 0x33111122);
  2028. &_data_word(0xbb6969d2, 0x70d9d9a9, 0x898e8e07, 0xa7949433);
  2029. &_data_word(0xb69b9b2d, 0x221e1e3c, 0x92878715, 0x20e9e9c9);
  2030. &_data_word(0x49cece87, 0xff5555aa, 0x78282850, 0x7adfdfa5);
  2031. &_data_word(0x8f8c8c03, 0xf8a1a159, 0x80898909, 0x170d0d1a);
  2032. &_data_word(0xdabfbf65, 0x31e6e6d7, 0xc6424284, 0xb86868d0);
  2033. &_data_word(0xc3414182, 0xb0999929, 0x772d2d5a, 0x110f0f1e);
  2034. &_data_word(0xcbb0b07b, 0xfc5454a8, 0xd6bbbb6d, 0x3a16162c);
  2035. #Te4 # four copies of Te4 to choose from to avoid L1 aliasing
  2036. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  2037. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  2038. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  2039. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  2040. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  2041. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  2042. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  2043. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  2044. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  2045. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  2046. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  2047. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  2048. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  2049. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  2050. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  2051. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  2052. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  2053. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  2054. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  2055. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  2056. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  2057. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2058. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2059. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2060. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2061. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2062. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2063. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2064. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2065. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2066. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2067. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2068. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  2069. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  2070. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  2071. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  2072. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  2073. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  2074. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  2075. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  2076. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  2077. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  2078. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  2079. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  2080. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  2081. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  2082. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  2083. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  2084. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  2085. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  2086. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  2087. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  2088. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  2089. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2090. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2091. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2092. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2093. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2094. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2095. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2096. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2097. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2098. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2099. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2100. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  2101. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  2102. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  2103. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  2104. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  2105. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  2106. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  2107. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  2108. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  2109. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  2110. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  2111. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  2112. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  2113. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  2114. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  2115. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  2116. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  2117. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  2118. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  2119. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  2120. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  2121. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2122. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2123. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2124. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2125. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2126. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2127. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2128. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2129. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2130. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2131. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2132. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  2133. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  2134. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  2135. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  2136. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  2137. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  2138. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  2139. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  2140. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  2141. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  2142. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  2143. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  2144. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  2145. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  2146. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  2147. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  2148. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  2149. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  2150. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  2151. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  2152. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  2153. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2154. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2155. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2156. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2157. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2158. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2159. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2160. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2161. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2162. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2163. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2164. #rcon:
  2165. $code.=<<___;
  2166. .long 0x00000001, 0x00000002, 0x00000004, 0x00000008
  2167. .long 0x00000010, 0x00000020, 0x00000040, 0x00000080
  2168. .long 0x0000001b, 0x00000036, 0x80808080, 0x80808080
  2169. .long 0xfefefefe, 0xfefefefe, 0x1b1b1b1b, 0x1b1b1b1b
  2170. ___
  2171. $code.=<<___;
  2172. .align 64
  2173. .LAES_Td:
  2174. ___
  2175. &_data_word(0x50a7f451, 0x5365417e, 0xc3a4171a, 0x965e273a);
  2176. &_data_word(0xcb6bab3b, 0xf1459d1f, 0xab58faac, 0x9303e34b);
  2177. &_data_word(0x55fa3020, 0xf66d76ad, 0x9176cc88, 0x254c02f5);
  2178. &_data_word(0xfcd7e54f, 0xd7cb2ac5, 0x80443526, 0x8fa362b5);
  2179. &_data_word(0x495ab1de, 0x671bba25, 0x980eea45, 0xe1c0fe5d);
  2180. &_data_word(0x02752fc3, 0x12f04c81, 0xa397468d, 0xc6f9d36b);
  2181. &_data_word(0xe75f8f03, 0x959c9215, 0xeb7a6dbf, 0xda595295);
  2182. &_data_word(0x2d83bed4, 0xd3217458, 0x2969e049, 0x44c8c98e);
  2183. &_data_word(0x6a89c275, 0x78798ef4, 0x6b3e5899, 0xdd71b927);
  2184. &_data_word(0xb64fe1be, 0x17ad88f0, 0x66ac20c9, 0xb43ace7d);
  2185. &_data_word(0x184adf63, 0x82311ae5, 0x60335197, 0x457f5362);
  2186. &_data_word(0xe07764b1, 0x84ae6bbb, 0x1ca081fe, 0x942b08f9);
  2187. &_data_word(0x58684870, 0x19fd458f, 0x876cde94, 0xb7f87b52);
  2188. &_data_word(0x23d373ab, 0xe2024b72, 0x578f1fe3, 0x2aab5566);
  2189. &_data_word(0x0728ebb2, 0x03c2b52f, 0x9a7bc586, 0xa50837d3);
  2190. &_data_word(0xf2872830, 0xb2a5bf23, 0xba6a0302, 0x5c8216ed);
  2191. &_data_word(0x2b1ccf8a, 0x92b479a7, 0xf0f207f3, 0xa1e2694e);
  2192. &_data_word(0xcdf4da65, 0xd5be0506, 0x1f6234d1, 0x8afea6c4);
  2193. &_data_word(0x9d532e34, 0xa055f3a2, 0x32e18a05, 0x75ebf6a4);
  2194. &_data_word(0x39ec830b, 0xaaef6040, 0x069f715e, 0x51106ebd);
  2195. &_data_word(0xf98a213e, 0x3d06dd96, 0xae053edd, 0x46bde64d);
  2196. &_data_word(0xb58d5491, 0x055dc471, 0x6fd40604, 0xff155060);
  2197. &_data_word(0x24fb9819, 0x97e9bdd6, 0xcc434089, 0x779ed967);
  2198. &_data_word(0xbd42e8b0, 0x888b8907, 0x385b19e7, 0xdbeec879);
  2199. &_data_word(0x470a7ca1, 0xe90f427c, 0xc91e84f8, 0x00000000);
  2200. &_data_word(0x83868009, 0x48ed2b32, 0xac70111e, 0x4e725a6c);
  2201. &_data_word(0xfbff0efd, 0x5638850f, 0x1ed5ae3d, 0x27392d36);
  2202. &_data_word(0x64d90f0a, 0x21a65c68, 0xd1545b9b, 0x3a2e3624);
  2203. &_data_word(0xb1670a0c, 0x0fe75793, 0xd296eeb4, 0x9e919b1b);
  2204. &_data_word(0x4fc5c080, 0xa220dc61, 0x694b775a, 0x161a121c);
  2205. &_data_word(0x0aba93e2, 0xe52aa0c0, 0x43e0223c, 0x1d171b12);
  2206. &_data_word(0x0b0d090e, 0xadc78bf2, 0xb9a8b62d, 0xc8a91e14);
  2207. &_data_word(0x8519f157, 0x4c0775af, 0xbbdd99ee, 0xfd607fa3);
  2208. &_data_word(0x9f2601f7, 0xbcf5725c, 0xc53b6644, 0x347efb5b);
  2209. &_data_word(0x7629438b, 0xdcc623cb, 0x68fcedb6, 0x63f1e4b8);
  2210. &_data_word(0xcadc31d7, 0x10856342, 0x40229713, 0x2011c684);
  2211. &_data_word(0x7d244a85, 0xf83dbbd2, 0x1132f9ae, 0x6da129c7);
  2212. &_data_word(0x4b2f9e1d, 0xf330b2dc, 0xec52860d, 0xd0e3c177);
  2213. &_data_word(0x6c16b32b, 0x99b970a9, 0xfa489411, 0x2264e947);
  2214. &_data_word(0xc48cfca8, 0x1a3ff0a0, 0xd82c7d56, 0xef903322);
  2215. &_data_word(0xc74e4987, 0xc1d138d9, 0xfea2ca8c, 0x360bd498);
  2216. &_data_word(0xcf81f5a6, 0x28de7aa5, 0x268eb7da, 0xa4bfad3f);
  2217. &_data_word(0xe49d3a2c, 0x0d927850, 0x9bcc5f6a, 0x62467e54);
  2218. &_data_word(0xc2138df6, 0xe8b8d890, 0x5ef7392e, 0xf5afc382);
  2219. &_data_word(0xbe805d9f, 0x7c93d069, 0xa92dd56f, 0xb31225cf);
  2220. &_data_word(0x3b99acc8, 0xa77d1810, 0x6e639ce8, 0x7bbb3bdb);
  2221. &_data_word(0x097826cd, 0xf418596e, 0x01b79aec, 0xa89a4f83);
  2222. &_data_word(0x656e95e6, 0x7ee6ffaa, 0x08cfbc21, 0xe6e815ef);
  2223. &_data_word(0xd99be7ba, 0xce366f4a, 0xd4099fea, 0xd67cb029);
  2224. &_data_word(0xafb2a431, 0x31233f2a, 0x3094a5c6, 0xc066a235);
  2225. &_data_word(0x37bc4e74, 0xa6ca82fc, 0xb0d090e0, 0x15d8a733);
  2226. &_data_word(0x4a9804f1, 0xf7daec41, 0x0e50cd7f, 0x2ff69117);
  2227. &_data_word(0x8dd64d76, 0x4db0ef43, 0x544daacc, 0xdf0496e4);
  2228. &_data_word(0xe3b5d19e, 0x1b886a4c, 0xb81f2cc1, 0x7f516546);
  2229. &_data_word(0x04ea5e9d, 0x5d358c01, 0x737487fa, 0x2e410bfb);
  2230. &_data_word(0x5a1d67b3, 0x52d2db92, 0x335610e9, 0x1347d66d);
  2231. &_data_word(0x8c61d79a, 0x7a0ca137, 0x8e14f859, 0x893c13eb);
  2232. &_data_word(0xee27a9ce, 0x35c961b7, 0xede51ce1, 0x3cb1477a);
  2233. &_data_word(0x59dfd29c, 0x3f73f255, 0x79ce1418, 0xbf37c773);
  2234. &_data_word(0xeacdf753, 0x5baafd5f, 0x146f3ddf, 0x86db4478);
  2235. &_data_word(0x81f3afca, 0x3ec468b9, 0x2c342438, 0x5f40a3c2);
  2236. &_data_word(0x72c31d16, 0x0c25e2bc, 0x8b493c28, 0x41950dff);
  2237. &_data_word(0x7101a839, 0xdeb30c08, 0x9ce4b4d8, 0x90c15664);
  2238. &_data_word(0x6184cb7b, 0x70b632d5, 0x745c6c48, 0x4257b8d0);
  2239. #Td4: # four copies of Td4 to choose from to avoid L1 aliasing
  2240. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2241. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2242. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2243. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2244. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2245. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2246. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2247. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2248. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2249. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2250. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2251. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2252. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2253. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2254. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2255. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2256. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2257. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2258. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2259. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2260. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2261. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2262. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2263. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2264. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2265. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2266. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2267. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2268. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2269. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2270. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2271. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2272. $code.=<<___;
  2273. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2274. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2275. ___
  2276. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2277. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2278. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2279. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2280. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2281. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2282. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2283. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2284. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2285. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2286. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2287. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2288. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2289. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2290. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2291. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2292. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2293. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2294. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2295. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2296. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2297. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2298. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2299. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2300. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2301. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2302. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2303. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2304. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2305. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2306. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2307. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2308. $code.=<<___;
  2309. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2310. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2311. ___
  2312. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2313. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2314. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2315. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2316. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2317. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2318. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2319. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2320. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2321. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2322. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2323. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2324. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2325. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2326. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2327. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2328. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2329. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2330. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2331. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2332. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2333. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2334. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2335. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2336. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2337. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2338. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2339. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2340. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2341. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2342. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2343. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2344. $code.=<<___;
  2345. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2346. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2347. ___
  2348. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2349. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2350. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2351. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2352. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2353. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2354. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2355. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2356. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2357. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2358. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2359. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2360. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2361. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2362. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2363. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2364. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2365. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2366. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2367. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2368. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2369. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2370. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2371. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2372. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2373. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2374. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2375. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2376. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2377. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2378. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2379. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2380. $code.=<<___;
  2381. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2382. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2383. .asciz "AES for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
  2384. .align 64
  2385. ___
  2386. # EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame,
  2387. # CONTEXT *context,DISPATCHER_CONTEXT *disp)
  2388. if ($win64) {
  2389. $rec="%rcx";
  2390. $frame="%rdx";
  2391. $context="%r8";
  2392. $disp="%r9";
  2393. $code.=<<___;
  2394. .extern __imp_RtlVirtualUnwind
  2395. .type block_se_handler,\@abi-omnipotent
  2396. .align 16
  2397. block_se_handler:
  2398. push %rsi
  2399. push %rdi
  2400. push %rbx
  2401. push %rbp
  2402. push %r12
  2403. push %r13
  2404. push %r14
  2405. push %r15
  2406. pushfq
  2407. sub \$64,%rsp
  2408. mov 120($context),%rax # pull context->Rax
  2409. mov 248($context),%rbx # pull context->Rip
  2410. mov 8($disp),%rsi # disp->ImageBase
  2411. mov 56($disp),%r11 # disp->HandlerData
  2412. mov 0(%r11),%r10d # HandlerData[0]
  2413. lea (%rsi,%r10),%r10 # prologue label
  2414. cmp %r10,%rbx # context->Rip<prologue label
  2415. jb .Lin_block_prologue
  2416. mov 152($context),%rax # pull context->Rsp
  2417. mov 4(%r11),%r10d # HandlerData[1]
  2418. lea (%rsi,%r10),%r10 # epilogue label
  2419. cmp %r10,%rbx # context->Rip>=epilogue label
  2420. jae .Lin_block_prologue
  2421. mov 24(%rax),%rax # pull saved real stack pointer
  2422. mov -8(%rax),%rbx
  2423. mov -16(%rax),%rbp
  2424. mov -24(%rax),%r12
  2425. mov -32(%rax),%r13
  2426. mov -40(%rax),%r14
  2427. mov -48(%rax),%r15
  2428. mov %rbx,144($context) # restore context->Rbx
  2429. mov %rbp,160($context) # restore context->Rbp
  2430. mov %r12,216($context) # restore context->R12
  2431. mov %r13,224($context) # restore context->R13
  2432. mov %r14,232($context) # restore context->R14
  2433. mov %r15,240($context) # restore context->R15
  2434. .Lin_block_prologue:
  2435. mov 8(%rax),%rdi
  2436. mov 16(%rax),%rsi
  2437. mov %rax,152($context) # restore context->Rsp
  2438. mov %rsi,168($context) # restore context->Rsi
  2439. mov %rdi,176($context) # restore context->Rdi
  2440. jmp .Lcommon_seh_exit
  2441. .size block_se_handler,.-block_se_handler
  2442. .type key_se_handler,\@abi-omnipotent
  2443. .align 16
  2444. key_se_handler:
  2445. push %rsi
  2446. push %rdi
  2447. push %rbx
  2448. push %rbp
  2449. push %r12
  2450. push %r13
  2451. push %r14
  2452. push %r15
  2453. pushfq
  2454. sub \$64,%rsp
  2455. mov 120($context),%rax # pull context->Rax
  2456. mov 248($context),%rbx # pull context->Rip
  2457. mov 8($disp),%rsi # disp->ImageBase
  2458. mov 56($disp),%r11 # disp->HandlerData
  2459. mov 0(%r11),%r10d # HandlerData[0]
  2460. lea (%rsi,%r10),%r10 # prologue label
  2461. cmp %r10,%rbx # context->Rip<prologue label
  2462. jb .Lin_key_prologue
  2463. mov 152($context),%rax # pull context->Rsp
  2464. mov 4(%r11),%r10d # HandlerData[1]
  2465. lea (%rsi,%r10),%r10 # epilogue label
  2466. cmp %r10,%rbx # context->Rip>=epilogue label
  2467. jae .Lin_key_prologue
  2468. lea 56(%rax),%rax
  2469. mov -8(%rax),%rbx
  2470. mov -16(%rax),%rbp
  2471. mov -24(%rax),%r12
  2472. mov -32(%rax),%r13
  2473. mov -40(%rax),%r14
  2474. mov -48(%rax),%r15
  2475. mov %rbx,144($context) # restore context->Rbx
  2476. mov %rbp,160($context) # restore context->Rbp
  2477. mov %r12,216($context) # restore context->R12
  2478. mov %r13,224($context) # restore context->R13
  2479. mov %r14,232($context) # restore context->R14
  2480. mov %r15,240($context) # restore context->R15
  2481. .Lin_key_prologue:
  2482. mov 8(%rax),%rdi
  2483. mov 16(%rax),%rsi
  2484. mov %rax,152($context) # restore context->Rsp
  2485. mov %rsi,168($context) # restore context->Rsi
  2486. mov %rdi,176($context) # restore context->Rdi
  2487. jmp .Lcommon_seh_exit
  2488. .size key_se_handler,.-key_se_handler
  2489. .type cbc_se_handler,\@abi-omnipotent
  2490. .align 16
  2491. cbc_se_handler:
  2492. push %rsi
  2493. push %rdi
  2494. push %rbx
  2495. push %rbp
  2496. push %r12
  2497. push %r13
  2498. push %r14
  2499. push %r15
  2500. pushfq
  2501. sub \$64,%rsp
  2502. mov 120($context),%rax # pull context->Rax
  2503. mov 248($context),%rbx # pull context->Rip
  2504. lea .Lcbc_prologue(%rip),%r10
  2505. cmp %r10,%rbx # context->Rip<.Lcbc_prologue
  2506. jb .Lin_cbc_prologue
  2507. lea .Lcbc_fast_body(%rip),%r10
  2508. cmp %r10,%rbx # context->Rip<.Lcbc_fast_body
  2509. jb .Lin_cbc_frame_setup
  2510. lea .Lcbc_slow_prologue(%rip),%r10
  2511. cmp %r10,%rbx # context->Rip<.Lcbc_slow_prologue
  2512. jb .Lin_cbc_body
  2513. lea .Lcbc_slow_body(%rip),%r10
  2514. cmp %r10,%rbx # context->Rip<.Lcbc_slow_body
  2515. jb .Lin_cbc_frame_setup
  2516. .Lin_cbc_body:
  2517. mov 152($context),%rax # pull context->Rsp
  2518. lea .Lcbc_epilogue(%rip),%r10
  2519. cmp %r10,%rbx # context->Rip>=.Lcbc_epilogue
  2520. jae .Lin_cbc_prologue
  2521. lea 8(%rax),%rax
  2522. lea .Lcbc_popfq(%rip),%r10
  2523. cmp %r10,%rbx # context->Rip>=.Lcbc_popfq
  2524. jae .Lin_cbc_prologue
  2525. mov `16-8`(%rax),%rax # biased $_rsp
  2526. lea 56(%rax),%rax
  2527. .Lin_cbc_frame_setup:
  2528. mov -16(%rax),%rbx
  2529. mov -24(%rax),%rbp
  2530. mov -32(%rax),%r12
  2531. mov -40(%rax),%r13
  2532. mov -48(%rax),%r14
  2533. mov -56(%rax),%r15
  2534. mov %rbx,144($context) # restore context->Rbx
  2535. mov %rbp,160($context) # restore context->Rbp
  2536. mov %r12,216($context) # restore context->R12
  2537. mov %r13,224($context) # restore context->R13
  2538. mov %r14,232($context) # restore context->R14
  2539. mov %r15,240($context) # restore context->R15
  2540. .Lin_cbc_prologue:
  2541. mov 8(%rax),%rdi
  2542. mov 16(%rax),%rsi
  2543. mov %rax,152($context) # restore context->Rsp
  2544. mov %rsi,168($context) # restore context->Rsi
  2545. mov %rdi,176($context) # restore context->Rdi
  2546. .Lcommon_seh_exit:
  2547. mov 40($disp),%rdi # disp->ContextRecord
  2548. mov $context,%rsi # context
  2549. mov \$`1232/8`,%ecx # sizeof(CONTEXT)
  2550. .long 0xa548f3fc # cld; rep movsq
  2551. mov $disp,%rsi
  2552. xor %rcx,%rcx # arg1, UNW_FLAG_NHANDLER
  2553. mov 8(%rsi),%rdx # arg2, disp->ImageBase
  2554. mov 0(%rsi),%r8 # arg3, disp->ControlPc
  2555. mov 16(%rsi),%r9 # arg4, disp->FunctionEntry
  2556. mov 40(%rsi),%r10 # disp->ContextRecord
  2557. lea 56(%rsi),%r11 # &disp->HandlerData
  2558. lea 24(%rsi),%r12 # &disp->EstablisherFrame
  2559. mov %r10,32(%rsp) # arg5
  2560. mov %r11,40(%rsp) # arg6
  2561. mov %r12,48(%rsp) # arg7
  2562. mov %rcx,56(%rsp) # arg8, (NULL)
  2563. call *__imp_RtlVirtualUnwind(%rip)
  2564. mov \$1,%eax # ExceptionContinueSearch
  2565. add \$64,%rsp
  2566. popfq
  2567. pop %r15
  2568. pop %r14
  2569. pop %r13
  2570. pop %r12
  2571. pop %rbp
  2572. pop %rbx
  2573. pop %rdi
  2574. pop %rsi
  2575. ret
  2576. .size cbc_se_handler,.-cbc_se_handler
  2577. .section .pdata
  2578. .align 4
  2579. .rva .LSEH_begin_AES_encrypt
  2580. .rva .LSEH_end_AES_encrypt
  2581. .rva .LSEH_info_AES_encrypt
  2582. .rva .LSEH_begin_AES_decrypt
  2583. .rva .LSEH_end_AES_decrypt
  2584. .rva .LSEH_info_AES_decrypt
  2585. .rva .LSEH_begin_AES_set_encrypt_key
  2586. .rva .LSEH_end_AES_set_encrypt_key
  2587. .rva .LSEH_info_AES_set_encrypt_key
  2588. .rva .LSEH_begin_AES_set_decrypt_key
  2589. .rva .LSEH_end_AES_set_decrypt_key
  2590. .rva .LSEH_info_AES_set_decrypt_key
  2591. .rva .LSEH_begin_AES_cbc_encrypt
  2592. .rva .LSEH_end_AES_cbc_encrypt
  2593. .rva .LSEH_info_AES_cbc_encrypt
  2594. .section .xdata
  2595. .align 8
  2596. .LSEH_info_AES_encrypt:
  2597. .byte 9,0,0,0
  2598. .rva block_se_handler
  2599. .rva .Lenc_prologue,.Lenc_epilogue # HandlerData[]
  2600. .LSEH_info_AES_decrypt:
  2601. .byte 9,0,0,0
  2602. .rva block_se_handler
  2603. .rva .Ldec_prologue,.Ldec_epilogue # HandlerData[]
  2604. .LSEH_info_AES_set_encrypt_key:
  2605. .byte 9,0,0,0
  2606. .rva key_se_handler
  2607. .rva .Lenc_key_prologue,.Lenc_key_epilogue # HandlerData[]
  2608. .LSEH_info_AES_set_decrypt_key:
  2609. .byte 9,0,0,0
  2610. .rva key_se_handler
  2611. .rva .Ldec_key_prologue,.Ldec_key_epilogue # HandlerData[]
  2612. .LSEH_info_AES_cbc_encrypt:
  2613. .byte 9,0,0,0
  2614. .rva cbc_se_handler
  2615. ___
  2616. }
  2617. $code =~ s/\`([^\`]*)\`/eval($1)/gem;
  2618. print $code;
  2619. close STDOUT or die "error closing STDOUT: $!";