mips.pl 44 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234
  1. #!/usr/bin/env perl
  2. #
  3. # ====================================================================
  4. # Written by Andy Polyakov <appro@fy.chalmers.se> for the OpenSSL
  5. # project.
  6. #
  7. # Rights for redistribution and usage in source and binary forms are
  8. # granted according to the OpenSSL license. Warranty of any kind is
  9. # disclaimed.
  10. # ====================================================================
  11. # July 1999
  12. #
  13. # This is drop-in MIPS III/IV ISA replacement for crypto/bn/bn_asm.c.
  14. #
  15. # The module is designed to work with either of the "new" MIPS ABI(5),
  16. # namely N32 or N64, offered by IRIX 6.x. It's not ment to work under
  17. # IRIX 5.x not only because it doesn't support new ABIs but also
  18. # because 5.x kernels put R4x00 CPU into 32-bit mode and all those
  19. # 64-bit instructions (daddu, dmultu, etc.) found below gonna only
  20. # cause illegal instruction exception:-(
  21. #
  22. # In addition the code depends on preprocessor flags set up by MIPSpro
  23. # compiler driver (either as or cc) and therefore (probably?) can't be
  24. # compiled by the GNU assembler. GNU C driver manages fine though...
  25. # I mean as long as -mmips-as is specified or is the default option,
  26. # because then it simply invokes /usr/bin/as which in turn takes
  27. # perfect care of the preprocessor definitions. Another neat feature
  28. # offered by the MIPSpro assembler is an optimization pass. This gave
  29. # me the opportunity to have the code looking more regular as all those
  30. # architecture dependent instruction rescheduling details were left to
  31. # the assembler. Cool, huh?
  32. #
  33. # Performance improvement is astonishing! 'apps/openssl speed rsa dsa'
  34. # goes way over 3 times faster!
  35. #
  36. # <appro@fy.chalmers.se>
  37. # October 2010
  38. #
  39. # Adapt the module even for 32-bit ABIs and other OSes. The former was
  40. # achieved by mechanical replacement of 64-bit arithmetic instructions
  41. # such as dmultu, daddu, etc. with their 32-bit counterparts and
  42. # adjusting offsets denoting multiples of BN_ULONG. Above mentioned
  43. # >3x performance improvement naturally does not apply to 32-bit code
  44. # [because there is no instruction 32-bit compiler can't use], one
  45. # has to content with 40-85% improvement depending on benchmark and
  46. # key length, more for longer keys.
  47. $flavour = shift || "o32";
  48. while (($output=shift) && ($output!~/^\w[\w\-]*\.\w+$/)) {}
  49. open STDOUT,">$output";
  50. if ($flavour =~ /64|n32/i) {
  51. $LD="ld";
  52. $ST="sd";
  53. $MULTU="dmultu";
  54. $DIVU="ddivu";
  55. $ADDU="daddu";
  56. $SUBU="dsubu";
  57. $SRL="dsrl";
  58. $SLL="dsll";
  59. $BNSZ=8;
  60. $PTR_ADD="daddu";
  61. $PTR_SUB="dsubu";
  62. $SZREG=8;
  63. $REG_S="sd";
  64. $REG_L="ld";
  65. } else {
  66. $LD="lw";
  67. $ST="sw";
  68. $MULTU="multu";
  69. $DIVU="divu";
  70. $ADDU="addu";
  71. $SUBU="subu";
  72. $SRL="srl";
  73. $SLL="sll";
  74. $BNSZ=4;
  75. $PTR_ADD="addu";
  76. $PTR_SUB="subu";
  77. $SZREG=4;
  78. $REG_S="sw";
  79. $REG_L="lw";
  80. $code=".set mips2\n";
  81. }
  82. # Below is N32/64 register layout used in the original module.
  83. #
  84. ($zero,$at,$v0,$v1)=map("\$$_",(0..3));
  85. ($a0,$a1,$a2,$a3,$a4,$a5,$a6,$a7)=map("\$$_",(4..11));
  86. ($t0,$t1,$t2,$t3,$t8,$t9)=map("\$$_",(12..15,24,25));
  87. ($s0,$s1,$s2,$s3,$s4,$s5,$s6,$s7)=map("\$$_",(16..23));
  88. ($gp,$sp,$fp,$ra)=map("\$$_",(28..31));
  89. ($ta0,$ta1,$ta2,$ta3)=($a4,$a5,$a6,$a7);
  90. #
  91. # No special adaptation is required for O32. NUBI on the other hand
  92. # is treated by saving/restoring ($v1,$t0..$t3).
  93. $gp=$v1 if ($flavour =~ /nubi/i);
  94. $minus4=$v1;
  95. $code.=<<___;
  96. .rdata
  97. .asciiz "mips3.s, Version 1.2"
  98. .asciiz "MIPS II/III/IV ISA artwork by Andy Polyakov <appro\@fy.chalmers.se>"
  99. .text
  100. .set noat
  101. .align 5
  102. .globl bn_mul_add_words
  103. .ent bn_mul_add_words
  104. bn_mul_add_words:
  105. .set noreorder
  106. bgtz $a2,bn_mul_add_words_internal
  107. move $v0,$zero
  108. jr $ra
  109. move $a0,$v0
  110. .end bn_mul_add_words
  111. .align 5
  112. .ent bn_mul_add_words_internal
  113. bn_mul_add_words_internal:
  114. ___
  115. $code.=<<___ if ($flavour =~ /nubi/i);
  116. .frame $sp,6*$SZREG,$ra
  117. .mask 0x8000f008,-$SZREG
  118. .set noreorder
  119. $PTR_SUB $sp,6*$SZREG
  120. $REG_S $ra,5*$SZREG($sp)
  121. $REG_S $t3,4*$SZREG($sp)
  122. $REG_S $t2,3*$SZREG($sp)
  123. $REG_S $t1,2*$SZREG($sp)
  124. $REG_S $t0,1*$SZREG($sp)
  125. $REG_S $gp,0*$SZREG($sp)
  126. ___
  127. $code.=<<___;
  128. .set reorder
  129. li $minus4,-4
  130. and $ta0,$a2,$minus4
  131. beqz $ta0,.L_bn_mul_add_words_tail
  132. .L_bn_mul_add_words_loop:
  133. $LD $t0,0($a1)
  134. $MULTU $t0,$a3
  135. $LD $t1,0($a0)
  136. $LD $t2,$BNSZ($a1)
  137. $LD $t3,$BNSZ($a0)
  138. $LD $ta0,2*$BNSZ($a1)
  139. $LD $ta1,2*$BNSZ($a0)
  140. $ADDU $t1,$v0
  141. sltu $v0,$t1,$v0 # All manuals say it "compares 32-bit
  142. # values", but it seems to work fine
  143. # even on 64-bit registers.
  144. mflo $at
  145. mfhi $t0
  146. $ADDU $t1,$at
  147. $ADDU $v0,$t0
  148. $MULTU $t2,$a3
  149. sltu $at,$t1,$at
  150. $ST $t1,0($a0)
  151. $ADDU $v0,$at
  152. $LD $ta2,3*$BNSZ($a1)
  153. $LD $ta3,3*$BNSZ($a0)
  154. $ADDU $t3,$v0
  155. sltu $v0,$t3,$v0
  156. mflo $at
  157. mfhi $t2
  158. $ADDU $t3,$at
  159. $ADDU $v0,$t2
  160. $MULTU $ta0,$a3
  161. sltu $at,$t3,$at
  162. $ST $t3,$BNSZ($a0)
  163. $ADDU $v0,$at
  164. subu $a2,4
  165. $PTR_ADD $a0,4*$BNSZ
  166. $PTR_ADD $a1,4*$BNSZ
  167. $ADDU $ta1,$v0
  168. sltu $v0,$ta1,$v0
  169. mflo $at
  170. mfhi $ta0
  171. $ADDU $ta1,$at
  172. $ADDU $v0,$ta0
  173. $MULTU $ta2,$a3
  174. sltu $at,$ta1,$at
  175. $ST $ta1,-2*$BNSZ($a0)
  176. $ADDU $v0,$at
  177. and $ta0,$a2,$minus4
  178. $ADDU $ta3,$v0
  179. sltu $v0,$ta3,$v0
  180. mflo $at
  181. mfhi $ta2
  182. $ADDU $ta3,$at
  183. $ADDU $v0,$ta2
  184. sltu $at,$ta3,$at
  185. $ST $ta3,-$BNSZ($a0)
  186. .set noreorder
  187. bgtz $ta0,.L_bn_mul_add_words_loop
  188. $ADDU $v0,$at
  189. beqz $a2,.L_bn_mul_add_words_return
  190. nop
  191. .L_bn_mul_add_words_tail:
  192. .set reorder
  193. $LD $t0,0($a1)
  194. $MULTU $t0,$a3
  195. $LD $t1,0($a0)
  196. subu $a2,1
  197. $ADDU $t1,$v0
  198. sltu $v0,$t1,$v0
  199. mflo $at
  200. mfhi $t0
  201. $ADDU $t1,$at
  202. $ADDU $v0,$t0
  203. sltu $at,$t1,$at
  204. $ST $t1,0($a0)
  205. $ADDU $v0,$at
  206. beqz $a2,.L_bn_mul_add_words_return
  207. $LD $t0,$BNSZ($a1)
  208. $MULTU $t0,$a3
  209. $LD $t1,$BNSZ($a0)
  210. subu $a2,1
  211. $ADDU $t1,$v0
  212. sltu $v0,$t1,$v0
  213. mflo $at
  214. mfhi $t0
  215. $ADDU $t1,$at
  216. $ADDU $v0,$t0
  217. sltu $at,$t1,$at
  218. $ST $t1,$BNSZ($a0)
  219. $ADDU $v0,$at
  220. beqz $a2,.L_bn_mul_add_words_return
  221. $LD $t0,2*$BNSZ($a1)
  222. $MULTU $t0,$a3
  223. $LD $t1,2*$BNSZ($a0)
  224. $ADDU $t1,$v0
  225. sltu $v0,$t1,$v0
  226. mflo $at
  227. mfhi $t0
  228. $ADDU $t1,$at
  229. $ADDU $v0,$t0
  230. sltu $at,$t1,$at
  231. $ST $t1,2*$BNSZ($a0)
  232. $ADDU $v0,$at
  233. .L_bn_mul_add_words_return:
  234. .set noreorder
  235. ___
  236. $code.=<<___ if ($flavour =~ /nubi/i);
  237. $REG_L $t3,4*$SZREG($sp)
  238. $REG_L $t2,3*$SZREG($sp)
  239. $REG_L $t1,2*$SZREG($sp)
  240. $REG_L $t0,1*$SZREG($sp)
  241. $REG_L $gp,0*$SZREG($sp)
  242. $PTR_ADD $sp,6*$SZREG
  243. ___
  244. $code.=<<___;
  245. jr $ra
  246. move $a0,$v0
  247. .end bn_mul_add_words_internal
  248. .align 5
  249. .globl bn_mul_words
  250. .ent bn_mul_words
  251. bn_mul_words:
  252. .set noreorder
  253. bgtz $a2,bn_mul_words_internal
  254. move $v0,$zero
  255. jr $ra
  256. move $a0,$v0
  257. .end bn_mul_words
  258. .align 5
  259. .ent bn_mul_words_internal
  260. bn_mul_words_internal:
  261. ___
  262. $code.=<<___ if ($flavour =~ /nubi/i);
  263. .frame $sp,6*$SZREG,$ra
  264. .mask 0x8000f008,-$SZREG
  265. .set noreorder
  266. $PTR_SUB $sp,6*$SZREG
  267. $REG_S $ra,5*$SZREG($sp)
  268. $REG_S $t3,4*$SZREG($sp)
  269. $REG_S $t2,3*$SZREG($sp)
  270. $REG_S $t1,2*$SZREG($sp)
  271. $REG_S $t0,1*$SZREG($sp)
  272. $REG_S $gp,0*$SZREG($sp)
  273. ___
  274. $code.=<<___;
  275. .set reorder
  276. li $minus4,-4
  277. and $ta0,$a2,$minus4
  278. beqz $ta0,.L_bn_mul_words_tail
  279. .L_bn_mul_words_loop:
  280. $LD $t0,0($a1)
  281. $MULTU $t0,$a3
  282. $LD $t2,$BNSZ($a1)
  283. $LD $ta0,2*$BNSZ($a1)
  284. $LD $ta2,3*$BNSZ($a1)
  285. mflo $at
  286. mfhi $t0
  287. $ADDU $v0,$at
  288. sltu $t1,$v0,$at
  289. $MULTU $t2,$a3
  290. $ST $v0,0($a0)
  291. $ADDU $v0,$t1,$t0
  292. subu $a2,4
  293. $PTR_ADD $a0,4*$BNSZ
  294. $PTR_ADD $a1,4*$BNSZ
  295. mflo $at
  296. mfhi $t2
  297. $ADDU $v0,$at
  298. sltu $t3,$v0,$at
  299. $MULTU $ta0,$a3
  300. $ST $v0,-3*$BNSZ($a0)
  301. $ADDU $v0,$t3,$t2
  302. mflo $at
  303. mfhi $ta0
  304. $ADDU $v0,$at
  305. sltu $ta1,$v0,$at
  306. $MULTU $ta2,$a3
  307. $ST $v0,-2*$BNSZ($a0)
  308. $ADDU $v0,$ta1,$ta0
  309. and $ta0,$a2,$minus4
  310. mflo $at
  311. mfhi $ta2
  312. $ADDU $v0,$at
  313. sltu $ta3,$v0,$at
  314. $ST $v0,-$BNSZ($a0)
  315. .set noreorder
  316. bgtz $ta0,.L_bn_mul_words_loop
  317. $ADDU $v0,$ta3,$ta2
  318. beqz $a2,.L_bn_mul_words_return
  319. nop
  320. .L_bn_mul_words_tail:
  321. .set reorder
  322. $LD $t0,0($a1)
  323. $MULTU $t0,$a3
  324. subu $a2,1
  325. mflo $at
  326. mfhi $t0
  327. $ADDU $v0,$at
  328. sltu $t1,$v0,$at
  329. $ST $v0,0($a0)
  330. $ADDU $v0,$t1,$t0
  331. beqz $a2,.L_bn_mul_words_return
  332. $LD $t0,$BNSZ($a1)
  333. $MULTU $t0,$a3
  334. subu $a2,1
  335. mflo $at
  336. mfhi $t0
  337. $ADDU $v0,$at
  338. sltu $t1,$v0,$at
  339. $ST $v0,$BNSZ($a0)
  340. $ADDU $v0,$t1,$t0
  341. beqz $a2,.L_bn_mul_words_return
  342. $LD $t0,2*$BNSZ($a1)
  343. $MULTU $t0,$a3
  344. mflo $at
  345. mfhi $t0
  346. $ADDU $v0,$at
  347. sltu $t1,$v0,$at
  348. $ST $v0,2*$BNSZ($a0)
  349. $ADDU $v0,$t1,$t0
  350. .L_bn_mul_words_return:
  351. .set noreorder
  352. ___
  353. $code.=<<___ if ($flavour =~ /nubi/i);
  354. $REG_L $t3,4*$SZREG($sp)
  355. $REG_L $t2,3*$SZREG($sp)
  356. $REG_L $t1,2*$SZREG($sp)
  357. $REG_L $t0,1*$SZREG($sp)
  358. $REG_L $gp,0*$SZREG($sp)
  359. $PTR_ADD $sp,6*$SZREG
  360. ___
  361. $code.=<<___;
  362. jr $ra
  363. move $a0,$v0
  364. .end bn_mul_words_internal
  365. .align 5
  366. .globl bn_sqr_words
  367. .ent bn_sqr_words
  368. bn_sqr_words:
  369. .set noreorder
  370. bgtz $a2,bn_sqr_words_internal
  371. move $v0,$zero
  372. jr $ra
  373. move $a0,$v0
  374. .end bn_sqr_words
  375. .align 5
  376. .ent bn_sqr_words_internal
  377. bn_sqr_words_internal:
  378. ___
  379. $code.=<<___ if ($flavour =~ /nubi/i);
  380. .frame $sp,6*$SZREG,$ra
  381. .mask 0x8000f008,-$SZREG
  382. .set noreorder
  383. $PTR_SUB $sp,6*$SZREG
  384. $REG_S $ra,5*$SZREG($sp)
  385. $REG_S $t3,4*$SZREG($sp)
  386. $REG_S $t2,3*$SZREG($sp)
  387. $REG_S $t1,2*$SZREG($sp)
  388. $REG_S $t0,1*$SZREG($sp)
  389. $REG_S $gp,0*$SZREG($sp)
  390. ___
  391. $code.=<<___;
  392. .set reorder
  393. li $minus4,-4
  394. and $ta0,$a2,$minus4
  395. beqz $ta0,.L_bn_sqr_words_tail
  396. .L_bn_sqr_words_loop:
  397. $LD $t0,0($a1)
  398. $MULTU $t0,$t0
  399. $LD $t2,$BNSZ($a1)
  400. $LD $ta0,2*$BNSZ($a1)
  401. $LD $ta2,3*$BNSZ($a1)
  402. mflo $t1
  403. mfhi $t0
  404. $ST $t1,0($a0)
  405. $ST $t0,$BNSZ($a0)
  406. $MULTU $t2,$t2
  407. subu $a2,4
  408. $PTR_ADD $a0,8*$BNSZ
  409. $PTR_ADD $a1,4*$BNSZ
  410. mflo $t3
  411. mfhi $t2
  412. $ST $t3,-6*$BNSZ($a0)
  413. $ST $t2,-5*$BNSZ($a0)
  414. $MULTU $ta0,$ta0
  415. mflo $ta1
  416. mfhi $ta0
  417. $ST $ta1,-4*$BNSZ($a0)
  418. $ST $ta0,-3*$BNSZ($a0)
  419. $MULTU $ta2,$ta2
  420. and $ta0,$a2,$minus4
  421. mflo $ta3
  422. mfhi $ta2
  423. $ST $ta3,-2*$BNSZ($a0)
  424. .set noreorder
  425. bgtz $ta0,.L_bn_sqr_words_loop
  426. $ST $ta2,-$BNSZ($a0)
  427. beqz $a2,.L_bn_sqr_words_return
  428. nop
  429. .L_bn_sqr_words_tail:
  430. .set reorder
  431. $LD $t0,0($a1)
  432. $MULTU $t0,$t0
  433. subu $a2,1
  434. mflo $t1
  435. mfhi $t0
  436. $ST $t1,0($a0)
  437. $ST $t0,$BNSZ($a0)
  438. beqz $a2,.L_bn_sqr_words_return
  439. $LD $t0,$BNSZ($a1)
  440. $MULTU $t0,$t0
  441. subu $a2,1
  442. mflo $t1
  443. mfhi $t0
  444. $ST $t1,2*$BNSZ($a0)
  445. $ST $t0,3*$BNSZ($a0)
  446. beqz $a2,.L_bn_sqr_words_return
  447. $LD $t0,2*$BNSZ($a1)
  448. $MULTU $t0,$t0
  449. mflo $t1
  450. mfhi $t0
  451. $ST $t1,4*$BNSZ($a0)
  452. $ST $t0,5*$BNSZ($a0)
  453. .L_bn_sqr_words_return:
  454. .set noreorder
  455. ___
  456. $code.=<<___ if ($flavour =~ /nubi/i);
  457. $REG_L $t3,4*$SZREG($sp)
  458. $REG_L $t2,3*$SZREG($sp)
  459. $REG_L $t1,2*$SZREG($sp)
  460. $REG_L $t0,1*$SZREG($sp)
  461. $REG_L $gp,0*$SZREG($sp)
  462. $PTR_ADD $sp,6*$SZREG
  463. ___
  464. $code.=<<___;
  465. jr $ra
  466. move $a0,$v0
  467. .end bn_sqr_words_internal
  468. .align 5
  469. .globl bn_add_words
  470. .ent bn_add_words
  471. bn_add_words:
  472. .set noreorder
  473. bgtz $a3,bn_add_words_internal
  474. move $v0,$zero
  475. jr $ra
  476. move $a0,$v0
  477. .end bn_add_words
  478. .align 5
  479. .ent bn_add_words_internal
  480. bn_add_words_internal:
  481. ___
  482. $code.=<<___ if ($flavour =~ /nubi/i);
  483. .frame $sp,6*$SZREG,$ra
  484. .mask 0x8000f008,-$SZREG
  485. .set noreorder
  486. $PTR_SUB $sp,6*$SZREG
  487. $REG_S $ra,5*$SZREG($sp)
  488. $REG_S $t3,4*$SZREG($sp)
  489. $REG_S $t2,3*$SZREG($sp)
  490. $REG_S $t1,2*$SZREG($sp)
  491. $REG_S $t0,1*$SZREG($sp)
  492. $REG_S $gp,0*$SZREG($sp)
  493. ___
  494. $code.=<<___;
  495. .set reorder
  496. li $minus4,-4
  497. and $at,$a3,$minus4
  498. beqz $at,.L_bn_add_words_tail
  499. .L_bn_add_words_loop:
  500. $LD $t0,0($a1)
  501. $LD $ta0,0($a2)
  502. subu $a3,4
  503. $LD $t1,$BNSZ($a1)
  504. and $at,$a3,$minus4
  505. $LD $t2,2*$BNSZ($a1)
  506. $PTR_ADD $a2,4*$BNSZ
  507. $LD $t3,3*$BNSZ($a1)
  508. $PTR_ADD $a0,4*$BNSZ
  509. $LD $ta1,-3*$BNSZ($a2)
  510. $PTR_ADD $a1,4*$BNSZ
  511. $LD $ta2,-2*$BNSZ($a2)
  512. $LD $ta3,-$BNSZ($a2)
  513. $ADDU $ta0,$t0
  514. sltu $t8,$ta0,$t0
  515. $ADDU $t0,$ta0,$v0
  516. sltu $v0,$t0,$ta0
  517. $ST $t0,-4*$BNSZ($a0)
  518. $ADDU $v0,$t8
  519. $ADDU $ta1,$t1
  520. sltu $t9,$ta1,$t1
  521. $ADDU $t1,$ta1,$v0
  522. sltu $v0,$t1,$ta1
  523. $ST $t1,-3*$BNSZ($a0)
  524. $ADDU $v0,$t9
  525. $ADDU $ta2,$t2
  526. sltu $t8,$ta2,$t2
  527. $ADDU $t2,$ta2,$v0
  528. sltu $v0,$t2,$ta2
  529. $ST $t2,-2*$BNSZ($a0)
  530. $ADDU $v0,$t8
  531. $ADDU $ta3,$t3
  532. sltu $t9,$ta3,$t3
  533. $ADDU $t3,$ta3,$v0
  534. sltu $v0,$t3,$ta3
  535. $ST $t3,-$BNSZ($a0)
  536. .set noreorder
  537. bgtz $at,.L_bn_add_words_loop
  538. $ADDU $v0,$t9
  539. beqz $a3,.L_bn_add_words_return
  540. nop
  541. .L_bn_add_words_tail:
  542. .set reorder
  543. $LD $t0,0($a1)
  544. $LD $ta0,0($a2)
  545. $ADDU $ta0,$t0
  546. subu $a3,1
  547. sltu $t8,$ta0,$t0
  548. $ADDU $t0,$ta0,$v0
  549. sltu $v0,$t0,$ta0
  550. $ST $t0,0($a0)
  551. $ADDU $v0,$t8
  552. beqz $a3,.L_bn_add_words_return
  553. $LD $t1,$BNSZ($a1)
  554. $LD $ta1,$BNSZ($a2)
  555. $ADDU $ta1,$t1
  556. subu $a3,1
  557. sltu $t9,$ta1,$t1
  558. $ADDU $t1,$ta1,$v0
  559. sltu $v0,$t1,$ta1
  560. $ST $t1,$BNSZ($a0)
  561. $ADDU $v0,$t9
  562. beqz $a3,.L_bn_add_words_return
  563. $LD $t2,2*$BNSZ($a1)
  564. $LD $ta2,2*$BNSZ($a2)
  565. $ADDU $ta2,$t2
  566. sltu $t8,$ta2,$t2
  567. $ADDU $t2,$ta2,$v0
  568. sltu $v0,$t2,$ta2
  569. $ST $t2,2*$BNSZ($a0)
  570. $ADDU $v0,$t8
  571. .L_bn_add_words_return:
  572. .set noreorder
  573. ___
  574. $code.=<<___ if ($flavour =~ /nubi/i);
  575. $REG_L $t3,4*$SZREG($sp)
  576. $REG_L $t2,3*$SZREG($sp)
  577. $REG_L $t1,2*$SZREG($sp)
  578. $REG_L $t0,1*$SZREG($sp)
  579. $REG_L $gp,0*$SZREG($sp)
  580. $PTR_ADD $sp,6*$SZREG
  581. ___
  582. $code.=<<___;
  583. jr $ra
  584. move $a0,$v0
  585. .end bn_add_words_internal
  586. .align 5
  587. .globl bn_sub_words
  588. .ent bn_sub_words
  589. bn_sub_words:
  590. .set noreorder
  591. bgtz $a3,bn_sub_words_internal
  592. move $v0,$zero
  593. jr $ra
  594. move $a0,$zero
  595. .end bn_sub_words
  596. .align 5
  597. .ent bn_sub_words_internal
  598. bn_sub_words_internal:
  599. ___
  600. $code.=<<___ if ($flavour =~ /nubi/i);
  601. .frame $sp,6*$SZREG,$ra
  602. .mask 0x8000f008,-$SZREG
  603. .set noreorder
  604. $PTR_SUB $sp,6*$SZREG
  605. $REG_S $ra,5*$SZREG($sp)
  606. $REG_S $t3,4*$SZREG($sp)
  607. $REG_S $t2,3*$SZREG($sp)
  608. $REG_S $t1,2*$SZREG($sp)
  609. $REG_S $t0,1*$SZREG($sp)
  610. $REG_S $gp,0*$SZREG($sp)
  611. ___
  612. $code.=<<___;
  613. .set reorder
  614. li $minus4,-4
  615. and $at,$a3,$minus4
  616. beqz $at,.L_bn_sub_words_tail
  617. .L_bn_sub_words_loop:
  618. $LD $t0,0($a1)
  619. $LD $ta0,0($a2)
  620. subu $a3,4
  621. $LD $t1,$BNSZ($a1)
  622. and $at,$a3,$minus4
  623. $LD $t2,2*$BNSZ($a1)
  624. $PTR_ADD $a2,4*$BNSZ
  625. $LD $t3,3*$BNSZ($a1)
  626. $PTR_ADD $a0,4*$BNSZ
  627. $LD $ta1,-3*$BNSZ($a2)
  628. $PTR_ADD $a1,4*$BNSZ
  629. $LD $ta2,-2*$BNSZ($a2)
  630. $LD $ta3,-$BNSZ($a2)
  631. sltu $t8,$t0,$ta0
  632. $SUBU $ta0,$t0,$ta0
  633. $SUBU $t0,$ta0,$v0
  634. sgtu $v0,$t0,$ta0
  635. $ST $t0,-4*$BNSZ($a0)
  636. $ADDU $v0,$t8
  637. sltu $t9,$t1,$ta1
  638. $SUBU $ta1,$t1,$ta1
  639. $SUBU $t1,$ta1,$v0
  640. sgtu $v0,$t1,$ta1
  641. $ST $t1,-3*$BNSZ($a0)
  642. $ADDU $v0,$t9
  643. sltu $t8,$t2,$ta2
  644. $SUBU $ta2,$t2,$ta2
  645. $SUBU $t2,$ta2,$v0
  646. sgtu $v0,$t2,$ta2
  647. $ST $t2,-2*$BNSZ($a0)
  648. $ADDU $v0,$t8
  649. sltu $t9,$t3,$ta3
  650. $SUBU $ta3,$t3,$ta3
  651. $SUBU $t3,$ta3,$v0
  652. sgtu $v0,$t3,$ta3
  653. $ST $t3,-$BNSZ($a0)
  654. .set noreorder
  655. bgtz $at,.L_bn_sub_words_loop
  656. $ADDU $v0,$t9
  657. beqz $a3,.L_bn_sub_words_return
  658. nop
  659. .L_bn_sub_words_tail:
  660. .set reorder
  661. $LD $t0,0($a1)
  662. $LD $ta0,0($a2)
  663. subu $a3,1
  664. sltu $t8,$t0,$ta0
  665. $SUBU $ta0,$t0,$ta0
  666. $SUBU $t0,$ta0,$v0
  667. sgtu $v0,$t0,$ta0
  668. $ST $t0,0($a0)
  669. $ADDU $v0,$t8
  670. beqz $a3,.L_bn_sub_words_return
  671. $LD $t1,$BNSZ($a1)
  672. subu $a3,1
  673. $LD $ta1,$BNSZ($a2)
  674. sltu $t9,$t1,$ta1
  675. $SUBU $ta1,$t1,$ta1
  676. $SUBU $t1,$ta1,$v0
  677. sgtu $v0,$t1,$ta1
  678. $ST $t1,$BNSZ($a0)
  679. $ADDU $v0,$t9
  680. beqz $a3,.L_bn_sub_words_return
  681. $LD $t2,2*$BNSZ($a1)
  682. $LD $ta2,2*$BNSZ($a2)
  683. sltu $t8,$t2,$ta2
  684. $SUBU $ta2,$t2,$ta2
  685. $SUBU $t2,$ta2,$v0
  686. sgtu $v0,$t2,$ta2
  687. $ST $t2,2*$BNSZ($a0)
  688. $ADDU $v0,$t8
  689. .L_bn_sub_words_return:
  690. .set noreorder
  691. ___
  692. $code.=<<___ if ($flavour =~ /nubi/i);
  693. $REG_L $t3,4*$SZREG($sp)
  694. $REG_L $t2,3*$SZREG($sp)
  695. $REG_L $t1,2*$SZREG($sp)
  696. $REG_L $t0,1*$SZREG($sp)
  697. $REG_L $gp,0*$SZREG($sp)
  698. $PTR_ADD $sp,6*$SZREG
  699. ___
  700. $code.=<<___;
  701. jr $ra
  702. move $a0,$v0
  703. .end bn_sub_words_internal
  704. .align 5
  705. .globl bn_div_3_words
  706. .ent bn_div_3_words
  707. bn_div_3_words:
  708. .set noreorder
  709. move $a3,$a0 # we know that bn_div_words does not
  710. # touch $a3, $ta2, $ta3 and preserves $a2
  711. # so that we can save two arguments
  712. # and return address in registers
  713. # instead of stack:-)
  714. $LD $a0,($a3)
  715. move $ta2,$a1
  716. bne $a0,$a2,bn_div_3_words_internal
  717. $LD $a1,-$BNSZ($a3)
  718. li $v0,-1
  719. jr $ra
  720. move $a0,$v0
  721. .end bn_div_3_words
  722. .align 5
  723. .ent bn_div_3_words_internal
  724. bn_div_3_words_internal:
  725. ___
  726. $code.=<<___ if ($flavour =~ /nubi/i);
  727. .frame $sp,6*$SZREG,$ra
  728. .mask 0x8000f008,-$SZREG
  729. .set noreorder
  730. $PTR_SUB $sp,6*$SZREG
  731. $REG_S $ra,5*$SZREG($sp)
  732. $REG_S $t3,4*$SZREG($sp)
  733. $REG_S $t2,3*$SZREG($sp)
  734. $REG_S $t1,2*$SZREG($sp)
  735. $REG_S $t0,1*$SZREG($sp)
  736. $REG_S $gp,0*$SZREG($sp)
  737. ___
  738. $code.=<<___;
  739. .set reorder
  740. move $ta3,$ra
  741. bal bn_div_words_internal
  742. move $ra,$ta3
  743. $MULTU $ta2,$v0
  744. $LD $t2,-2*$BNSZ($a3)
  745. move $ta0,$zero
  746. mfhi $t1
  747. mflo $t0
  748. sltu $t8,$t1,$a1
  749. .L_bn_div_3_words_inner_loop:
  750. bnez $t8,.L_bn_div_3_words_inner_loop_done
  751. sgeu $at,$t2,$t0
  752. seq $t9,$t1,$a1
  753. and $at,$t9
  754. sltu $t3,$t0,$ta2
  755. $ADDU $a1,$a2
  756. $SUBU $t1,$t3
  757. $SUBU $t0,$ta2
  758. sltu $t8,$t1,$a1
  759. sltu $ta0,$a1,$a2
  760. or $t8,$ta0
  761. .set noreorder
  762. beqz $at,.L_bn_div_3_words_inner_loop
  763. $SUBU $v0,1
  764. $ADDU $v0,1
  765. .set reorder
  766. .L_bn_div_3_words_inner_loop_done:
  767. .set noreorder
  768. ___
  769. $code.=<<___ if ($flavour =~ /nubi/i);
  770. $REG_L $t3,4*$SZREG($sp)
  771. $REG_L $t2,3*$SZREG($sp)
  772. $REG_L $t1,2*$SZREG($sp)
  773. $REG_L $t0,1*$SZREG($sp)
  774. $REG_L $gp,0*$SZREG($sp)
  775. $PTR_ADD $sp,6*$SZREG
  776. ___
  777. $code.=<<___;
  778. jr $ra
  779. move $a0,$v0
  780. .end bn_div_3_words_internal
  781. .align 5
  782. .globl bn_div_words
  783. .ent bn_div_words
  784. bn_div_words:
  785. .set noreorder
  786. bnez $a2,bn_div_words_internal
  787. li $v0,-1 # I would rather signal div-by-zero
  788. # which can be done with 'break 7'
  789. jr $ra
  790. move $a0,$v0
  791. .end bn_div_words
  792. .align 5
  793. .ent bn_div_words_internal
  794. bn_div_words_internal:
  795. ___
  796. $code.=<<___ if ($flavour =~ /nubi/i);
  797. .frame $sp,6*$SZREG,$ra
  798. .mask 0x8000f008,-$SZREG
  799. .set noreorder
  800. $PTR_SUB $sp,6*$SZREG
  801. $REG_S $ra,5*$SZREG($sp)
  802. $REG_S $t3,4*$SZREG($sp)
  803. $REG_S $t2,3*$SZREG($sp)
  804. $REG_S $t1,2*$SZREG($sp)
  805. $REG_S $t0,1*$SZREG($sp)
  806. $REG_S $gp,0*$SZREG($sp)
  807. ___
  808. $code.=<<___;
  809. move $v1,$zero
  810. bltz $a2,.L_bn_div_words_body
  811. move $t9,$v1
  812. $SLL $a2,1
  813. bgtz $a2,.-4
  814. addu $t9,1
  815. .set reorder
  816. negu $t1,$t9
  817. li $t2,-1
  818. $SLL $t2,$t1
  819. and $t2,$a0
  820. $SRL $at,$a1,$t1
  821. .set noreorder
  822. beqz $t2,.+12
  823. nop
  824. break 6 # signal overflow
  825. .set reorder
  826. $SLL $a0,$t9
  827. $SLL $a1,$t9
  828. or $a0,$at
  829. ___
  830. $QT=$ta0;
  831. $HH=$ta1;
  832. $DH=$v1;
  833. $code.=<<___;
  834. .L_bn_div_words_body:
  835. $SRL $DH,$a2,4*$BNSZ # bits
  836. sgeu $at,$a0,$a2
  837. .set noreorder
  838. beqz $at,.+12
  839. nop
  840. $SUBU $a0,$a2
  841. .set reorder
  842. li $QT,-1
  843. $SRL $HH,$a0,4*$BNSZ # bits
  844. $SRL $QT,4*$BNSZ # q=0xffffffff
  845. beq $DH,$HH,.L_bn_div_words_skip_div1
  846. $DIVU $zero,$a0,$DH
  847. mflo $QT
  848. .L_bn_div_words_skip_div1:
  849. $MULTU $a2,$QT
  850. $SLL $t3,$a0,4*$BNSZ # bits
  851. $SRL $at,$a1,4*$BNSZ # bits
  852. or $t3,$at
  853. mflo $t0
  854. mfhi $t1
  855. .L_bn_div_words_inner_loop1:
  856. sltu $t2,$t3,$t0
  857. seq $t8,$HH,$t1
  858. sltu $at,$HH,$t1
  859. and $t2,$t8
  860. sltu $v0,$t0,$a2
  861. or $at,$t2
  862. .set noreorder
  863. beqz $at,.L_bn_div_words_inner_loop1_done
  864. $SUBU $t1,$v0
  865. $SUBU $t0,$a2
  866. b .L_bn_div_words_inner_loop1
  867. $SUBU $QT,1
  868. .set reorder
  869. .L_bn_div_words_inner_loop1_done:
  870. $SLL $a1,4*$BNSZ # bits
  871. $SUBU $a0,$t3,$t0
  872. $SLL $v0,$QT,4*$BNSZ # bits
  873. li $QT,-1
  874. $SRL $HH,$a0,4*$BNSZ # bits
  875. $SRL $QT,4*$BNSZ # q=0xffffffff
  876. beq $DH,$HH,.L_bn_div_words_skip_div2
  877. $DIVU $zero,$a0,$DH
  878. mflo $QT
  879. .L_bn_div_words_skip_div2:
  880. $MULTU $a2,$QT
  881. $SLL $t3,$a0,4*$BNSZ # bits
  882. $SRL $at,$a1,4*$BNSZ # bits
  883. or $t3,$at
  884. mflo $t0
  885. mfhi $t1
  886. .L_bn_div_words_inner_loop2:
  887. sltu $t2,$t3,$t0
  888. seq $t8,$HH,$t1
  889. sltu $at,$HH,$t1
  890. and $t2,$t8
  891. sltu $v1,$t0,$a2
  892. or $at,$t2
  893. .set noreorder
  894. beqz $at,.L_bn_div_words_inner_loop2_done
  895. $SUBU $t1,$v1
  896. $SUBU $t0,$a2
  897. b .L_bn_div_words_inner_loop2
  898. $SUBU $QT,1
  899. .set reorder
  900. .L_bn_div_words_inner_loop2_done:
  901. $SUBU $a0,$t3,$t0
  902. or $v0,$QT
  903. $SRL $v1,$a0,$t9 # $v1 contains remainder if anybody wants it
  904. $SRL $a2,$t9 # restore $a2
  905. .set noreorder
  906. move $a1,$v1
  907. ___
  908. $code.=<<___ if ($flavour =~ /nubi/i);
  909. $REG_L $t3,4*$SZREG($sp)
  910. $REG_L $t2,3*$SZREG($sp)
  911. $REG_L $t1,2*$SZREG($sp)
  912. $REG_L $t0,1*$SZREG($sp)
  913. $REG_L $gp,0*$SZREG($sp)
  914. $PTR_ADD $sp,6*$SZREG
  915. ___
  916. $code.=<<___;
  917. jr $ra
  918. move $a0,$v0
  919. .end bn_div_words_internal
  920. ___
  921. undef $HH; undef $QT; undef $DH;
  922. ($a_0,$a_1,$a_2,$a_3)=($t0,$t1,$t2,$t3);
  923. ($b_0,$b_1,$b_2,$b_3)=($ta0,$ta1,$ta2,$ta3);
  924. ($a_4,$a_5,$a_6,$a_7)=($s0,$s2,$s4,$a1); # once we load a[7], no use for $a1
  925. ($b_4,$b_5,$b_6,$b_7)=($s1,$s3,$s5,$a2); # once we load b[7], no use for $a2
  926. ($t_1,$t_2,$c_1,$c_2,$c_3)=($t8,$t9,$v0,$v1,$a3);
  927. $code.=<<___;
  928. .align 5
  929. .globl bn_mul_comba8
  930. .ent bn_mul_comba8
  931. bn_mul_comba8:
  932. .set noreorder
  933. ___
  934. $code.=<<___ if ($flavour =~ /nubi/i);
  935. .frame $sp,12*$SZREG,$ra
  936. .mask 0x803ff008,-$SZREG
  937. $PTR_SUB $sp,12*$SZREG
  938. $REG_S $ra,11*$SZREG($sp)
  939. $REG_S $s5,10*$SZREG($sp)
  940. $REG_S $s4,9*$SZREG($sp)
  941. $REG_S $s3,8*$SZREG($sp)
  942. $REG_S $s2,7*$SZREG($sp)
  943. $REG_S $s1,6*$SZREG($sp)
  944. $REG_S $s0,5*$SZREG($sp)
  945. $REG_S $t3,4*$SZREG($sp)
  946. $REG_S $t2,3*$SZREG($sp)
  947. $REG_S $t1,2*$SZREG($sp)
  948. $REG_S $t0,1*$SZREG($sp)
  949. $REG_S $gp,0*$SZREG($sp)
  950. ___
  951. $code.=<<___ if ($flavour !~ /nubi/i);
  952. .frame $sp,6*$SZREG,$ra
  953. .mask 0x003f0000,-$SZREG
  954. $PTR_SUB $sp,6*$SZREG
  955. $REG_S $s5,5*$SZREG($sp)
  956. $REG_S $s4,4*$SZREG($sp)
  957. $REG_S $s3,3*$SZREG($sp)
  958. $REG_S $s2,2*$SZREG($sp)
  959. $REG_S $s1,1*$SZREG($sp)
  960. $REG_S $s0,0*$SZREG($sp)
  961. ___
  962. $code.=<<___;
  963. .set reorder
  964. $LD $a_0,0($a1) # If compiled with -mips3 option on
  965. # R5000 box assembler barks on this
  966. # 1ine with "should not have mult/div
  967. # as last instruction in bb (R10K
  968. # bug)" warning. If anybody out there
  969. # has a clue about how to circumvent
  970. # this do send me a note.
  971. # <appro\@fy.chalmers.se>
  972. $LD $b_0,0($a2)
  973. $LD $a_1,$BNSZ($a1)
  974. $LD $a_2,2*$BNSZ($a1)
  975. $MULTU $a_0,$b_0 # mul_add_c(a[0],b[0],c1,c2,c3);
  976. $LD $a_3,3*$BNSZ($a1)
  977. $LD $b_1,$BNSZ($a2)
  978. $LD $b_2,2*$BNSZ($a2)
  979. $LD $b_3,3*$BNSZ($a2)
  980. mflo $c_1
  981. mfhi $c_2
  982. $LD $a_4,4*$BNSZ($a1)
  983. $LD $a_5,5*$BNSZ($a1)
  984. $MULTU $a_0,$b_1 # mul_add_c(a[0],b[1],c2,c3,c1);
  985. $LD $a_6,6*$BNSZ($a1)
  986. $LD $a_7,7*$BNSZ($a1)
  987. $LD $b_4,4*$BNSZ($a2)
  988. $LD $b_5,5*$BNSZ($a2)
  989. mflo $t_1
  990. mfhi $t_2
  991. $ADDU $c_2,$t_1
  992. sltu $at,$c_2,$t_1
  993. $MULTU $a_1,$b_0 # mul_add_c(a[1],b[0],c2,c3,c1);
  994. $ADDU $c_3,$t_2,$at
  995. $LD $b_6,6*$BNSZ($a2)
  996. $LD $b_7,7*$BNSZ($a2)
  997. $ST $c_1,0($a0) # r[0]=c1;
  998. mflo $t_1
  999. mfhi $t_2
  1000. $ADDU $c_2,$t_1
  1001. sltu $at,$c_2,$t_1
  1002. $MULTU $a_2,$b_0 # mul_add_c(a[2],b[0],c3,c1,c2);
  1003. $ADDU $t_2,$at
  1004. $ADDU $c_3,$t_2
  1005. sltu $c_1,$c_3,$t_2
  1006. $ST $c_2,$BNSZ($a0) # r[1]=c2;
  1007. mflo $t_1
  1008. mfhi $t_2
  1009. $ADDU $c_3,$t_1
  1010. sltu $at,$c_3,$t_1
  1011. $MULTU $a_1,$b_1 # mul_add_c(a[1],b[1],c3,c1,c2);
  1012. $ADDU $t_2,$at
  1013. $ADDU $c_1,$t_2
  1014. mflo $t_1
  1015. mfhi $t_2
  1016. $ADDU $c_3,$t_1
  1017. sltu $at,$c_3,$t_1
  1018. $MULTU $a_0,$b_2 # mul_add_c(a[0],b[2],c3,c1,c2);
  1019. $ADDU $t_2,$at
  1020. $ADDU $c_1,$t_2
  1021. sltu $c_2,$c_1,$t_2
  1022. mflo $t_1
  1023. mfhi $t_2
  1024. $ADDU $c_3,$t_1
  1025. sltu $at,$c_3,$t_1
  1026. $MULTU $a_0,$b_3 # mul_add_c(a[0],b[3],c1,c2,c3);
  1027. $ADDU $t_2,$at
  1028. $ADDU $c_1,$t_2
  1029. sltu $at,$c_1,$t_2
  1030. $ADDU $c_2,$at
  1031. $ST $c_3,2*$BNSZ($a0) # r[2]=c3;
  1032. mflo $t_1
  1033. mfhi $t_2
  1034. $ADDU $c_1,$t_1
  1035. sltu $at,$c_1,$t_1
  1036. $MULTU $a_1,$b_2 # mul_add_c(a[1],b[2],c1,c2,c3);
  1037. $ADDU $t_2,$at
  1038. $ADDU $c_2,$t_2
  1039. sltu $c_3,$c_2,$t_2
  1040. mflo $t_1
  1041. mfhi $t_2
  1042. $ADDU $c_1,$t_1
  1043. sltu $at,$c_1,$t_1
  1044. $MULTU $a_2,$b_1 # mul_add_c(a[2],b[1],c1,c2,c3);
  1045. $ADDU $t_2,$at
  1046. $ADDU $c_2,$t_2
  1047. sltu $at,$c_2,$t_2
  1048. $ADDU $c_3,$at
  1049. mflo $t_1
  1050. mfhi $t_2
  1051. $ADDU $c_1,$t_1
  1052. sltu $at,$c_1,$t_1
  1053. $MULTU $a_3,$b_0 # mul_add_c(a[3],b[0],c1,c2,c3);
  1054. $ADDU $t_2,$at
  1055. $ADDU $c_2,$t_2
  1056. sltu $at,$c_2,$t_2
  1057. $ADDU $c_3,$at
  1058. mflo $t_1
  1059. mfhi $t_2
  1060. $ADDU $c_1,$t_1
  1061. sltu $at,$c_1,$t_1
  1062. $MULTU $a_4,$b_0 # mul_add_c(a[4],b[0],c2,c3,c1);
  1063. $ADDU $t_2,$at
  1064. $ADDU $c_2,$t_2
  1065. sltu $at,$c_2,$t_2
  1066. $ADDU $c_3,$at
  1067. $ST $c_1,3*$BNSZ($a0) # r[3]=c1;
  1068. mflo $t_1
  1069. mfhi $t_2
  1070. $ADDU $c_2,$t_1
  1071. sltu $at,$c_2,$t_1
  1072. $MULTU $a_3,$b_1 # mul_add_c(a[3],b[1],c2,c3,c1);
  1073. $ADDU $t_2,$at
  1074. $ADDU $c_3,$t_2
  1075. sltu $c_1,$c_3,$t_2
  1076. mflo $t_1
  1077. mfhi $t_2
  1078. $ADDU $c_2,$t_1
  1079. sltu $at,$c_2,$t_1
  1080. $MULTU $a_2,$b_2 # mul_add_c(a[2],b[2],c2,c3,c1);
  1081. $ADDU $t_2,$at
  1082. $ADDU $c_3,$t_2
  1083. sltu $at,$c_3,$t_2
  1084. $ADDU $c_1,$at
  1085. mflo $t_1
  1086. mfhi $t_2
  1087. $ADDU $c_2,$t_1
  1088. sltu $at,$c_2,$t_1
  1089. $MULTU $a_1,$b_3 # mul_add_c(a[1],b[3],c2,c3,c1);
  1090. $ADDU $t_2,$at
  1091. $ADDU $c_3,$t_2
  1092. sltu $at,$c_3,$t_2
  1093. $ADDU $c_1,$at
  1094. mflo $t_1
  1095. mfhi $t_2
  1096. $ADDU $c_2,$t_1
  1097. sltu $at,$c_2,$t_1
  1098. $MULTU $a_0,$b_4 # mul_add_c(a[0],b[4],c2,c3,c1);
  1099. $ADDU $t_2,$at
  1100. $ADDU $c_3,$t_2
  1101. sltu $at,$c_3,$t_2
  1102. $ADDU $c_1,$at
  1103. mflo $t_1
  1104. mfhi $t_2
  1105. $ADDU $c_2,$t_1
  1106. sltu $at,$c_2,$t_1
  1107. $MULTU $a_0,$b_5 # mul_add_c(a[0],b[5],c3,c1,c2);
  1108. $ADDU $t_2,$at
  1109. $ADDU $c_3,$t_2
  1110. sltu $at,$c_3,$t_2
  1111. $ADDU $c_1,$at
  1112. $ST $c_2,4*$BNSZ($a0) # r[4]=c2;
  1113. mflo $t_1
  1114. mfhi $t_2
  1115. $ADDU $c_3,$t_1
  1116. sltu $at,$c_3,$t_1
  1117. $MULTU $a_1,$b_4 # mul_add_c(a[1],b[4],c3,c1,c2);
  1118. $ADDU $t_2,$at
  1119. $ADDU $c_1,$t_2
  1120. sltu $c_2,$c_1,$t_2
  1121. mflo $t_1
  1122. mfhi $t_2
  1123. $ADDU $c_3,$t_1
  1124. sltu $at,$c_3,$t_1
  1125. $MULTU $a_2,$b_3 # mul_add_c(a[2],b[3],c3,c1,c2);
  1126. $ADDU $t_2,$at
  1127. $ADDU $c_1,$t_2
  1128. sltu $at,$c_1,$t_2
  1129. $ADDU $c_2,$at
  1130. mflo $t_1
  1131. mfhi $t_2
  1132. $ADDU $c_3,$t_1
  1133. sltu $at,$c_3,$t_1
  1134. $MULTU $a_3,$b_2 # mul_add_c(a[3],b[2],c3,c1,c2);
  1135. $ADDU $t_2,$at
  1136. $ADDU $c_1,$t_2
  1137. sltu $at,$c_1,$t_2
  1138. $ADDU $c_2,$at
  1139. mflo $t_1
  1140. mfhi $t_2
  1141. $ADDU $c_3,$t_1
  1142. sltu $at,$c_3,$t_1
  1143. $MULTU $a_4,$b_1 # mul_add_c(a[4],b[1],c3,c1,c2);
  1144. $ADDU $t_2,$at
  1145. $ADDU $c_1,$t_2
  1146. sltu $at,$c_1,$t_2
  1147. $ADDU $c_2,$at
  1148. mflo $t_1
  1149. mfhi $t_2
  1150. $ADDU $c_3,$t_1
  1151. sltu $at,$c_3,$t_1
  1152. $MULTU $a_5,$b_0 # mul_add_c(a[5],b[0],c3,c1,c2);
  1153. $ADDU $t_2,$at
  1154. $ADDU $c_1,$t_2
  1155. sltu $at,$c_1,$t_2
  1156. $ADDU $c_2,$at
  1157. mflo $t_1
  1158. mfhi $t_2
  1159. $ADDU $c_3,$t_1
  1160. sltu $at,$c_3,$t_1
  1161. $MULTU $a_6,$b_0 # mul_add_c(a[6],b[0],c1,c2,c3);
  1162. $ADDU $t_2,$at
  1163. $ADDU $c_1,$t_2
  1164. sltu $at,$c_1,$t_2
  1165. $ADDU $c_2,$at
  1166. $ST $c_3,5*$BNSZ($a0) # r[5]=c3;
  1167. mflo $t_1
  1168. mfhi $t_2
  1169. $ADDU $c_1,$t_1
  1170. sltu $at,$c_1,$t_1
  1171. $MULTU $a_5,$b_1 # mul_add_c(a[5],b[1],c1,c2,c3);
  1172. $ADDU $t_2,$at
  1173. $ADDU $c_2,$t_2
  1174. sltu $c_3,$c_2,$t_2
  1175. mflo $t_1
  1176. mfhi $t_2
  1177. $ADDU $c_1,$t_1
  1178. sltu $at,$c_1,$t_1
  1179. $MULTU $a_4,$b_2 # mul_add_c(a[4],b[2],c1,c2,c3);
  1180. $ADDU $t_2,$at
  1181. $ADDU $c_2,$t_2
  1182. sltu $at,$c_2,$t_2
  1183. $ADDU $c_3,$at
  1184. mflo $t_1
  1185. mfhi $t_2
  1186. $ADDU $c_1,$t_1
  1187. sltu $at,$c_1,$t_1
  1188. $MULTU $a_3,$b_3 # mul_add_c(a[3],b[3],c1,c2,c3);
  1189. $ADDU $t_2,$at
  1190. $ADDU $c_2,$t_2
  1191. sltu $at,$c_2,$t_2
  1192. $ADDU $c_3,$at
  1193. mflo $t_1
  1194. mfhi $t_2
  1195. $ADDU $c_1,$t_1
  1196. sltu $at,$c_1,$t_1
  1197. $MULTU $a_2,$b_4 # mul_add_c(a[2],b[4],c1,c2,c3);
  1198. $ADDU $t_2,$at
  1199. $ADDU $c_2,$t_2
  1200. sltu $at,$c_2,$t_2
  1201. $ADDU $c_3,$at
  1202. mflo $t_1
  1203. mfhi $t_2
  1204. $ADDU $c_1,$t_1
  1205. sltu $at,$c_1,$t_1
  1206. $MULTU $a_1,$b_5 # mul_add_c(a[1],b[5],c1,c2,c3);
  1207. $ADDU $t_2,$at
  1208. $ADDU $c_2,$t_2
  1209. sltu $at,$c_2,$t_2
  1210. $ADDU $c_3,$at
  1211. mflo $t_1
  1212. mfhi $t_2
  1213. $ADDU $c_1,$t_1
  1214. sltu $at,$c_1,$t_1
  1215. $MULTU $a_0,$b_6 # mul_add_c(a[0],b[6],c1,c2,c3);
  1216. $ADDU $t_2,$at
  1217. $ADDU $c_2,$t_2
  1218. sltu $at,$c_2,$t_2
  1219. $ADDU $c_3,$at
  1220. mflo $t_1
  1221. mfhi $t_2
  1222. $ADDU $c_1,$t_1
  1223. sltu $at,$c_1,$t_1
  1224. $MULTU $a_0,$b_7 # mul_add_c(a[0],b[7],c2,c3,c1);
  1225. $ADDU $t_2,$at
  1226. $ADDU $c_2,$t_2
  1227. sltu $at,$c_2,$t_2
  1228. $ADDU $c_3,$at
  1229. $ST $c_1,6*$BNSZ($a0) # r[6]=c1;
  1230. mflo $t_1
  1231. mfhi $t_2
  1232. $ADDU $c_2,$t_1
  1233. sltu $at,$c_2,$t_1
  1234. $MULTU $a_1,$b_6 # mul_add_c(a[1],b[6],c2,c3,c1);
  1235. $ADDU $t_2,$at
  1236. $ADDU $c_3,$t_2
  1237. sltu $c_1,$c_3,$t_2
  1238. mflo $t_1
  1239. mfhi $t_2
  1240. $ADDU $c_2,$t_1
  1241. sltu $at,$c_2,$t_1
  1242. $MULTU $a_2,$b_5 # mul_add_c(a[2],b[5],c2,c3,c1);
  1243. $ADDU $t_2,$at
  1244. $ADDU $c_3,$t_2
  1245. sltu $at,$c_3,$t_2
  1246. $ADDU $c_1,$at
  1247. mflo $t_1
  1248. mfhi $t_2
  1249. $ADDU $c_2,$t_1
  1250. sltu $at,$c_2,$t_1
  1251. $MULTU $a_3,$b_4 # mul_add_c(a[3],b[4],c2,c3,c1);
  1252. $ADDU $t_2,$at
  1253. $ADDU $c_3,$t_2
  1254. sltu $at,$c_3,$t_2
  1255. $ADDU $c_1,$at
  1256. mflo $t_1
  1257. mfhi $t_2
  1258. $ADDU $c_2,$t_1
  1259. sltu $at,$c_2,$t_1
  1260. $MULTU $a_4,$b_3 # mul_add_c(a[4],b[3],c2,c3,c1);
  1261. $ADDU $t_2,$at
  1262. $ADDU $c_3,$t_2
  1263. sltu $at,$c_3,$t_2
  1264. $ADDU $c_1,$at
  1265. mflo $t_1
  1266. mfhi $t_2
  1267. $ADDU $c_2,$t_1
  1268. sltu $at,$c_2,$t_1
  1269. $MULTU $a_5,$b_2 # mul_add_c(a[5],b[2],c2,c3,c1);
  1270. $ADDU $t_2,$at
  1271. $ADDU $c_3,$t_2
  1272. sltu $at,$c_3,$t_2
  1273. $ADDU $c_1,$at
  1274. mflo $t_1
  1275. mfhi $t_2
  1276. $ADDU $c_2,$t_1
  1277. sltu $at,$c_2,$t_1
  1278. $MULTU $a_6,$b_1 # mul_add_c(a[6],b[1],c2,c3,c1);
  1279. $ADDU $t_2,$at
  1280. $ADDU $c_3,$t_2
  1281. sltu $at,$c_3,$t_2
  1282. $ADDU $c_1,$at
  1283. mflo $t_1
  1284. mfhi $t_2
  1285. $ADDU $c_2,$t_1
  1286. sltu $at,$c_2,$t_1
  1287. $MULTU $a_7,$b_0 # mul_add_c(a[7],b[0],c2,c3,c1);
  1288. $ADDU $t_2,$at
  1289. $ADDU $c_3,$t_2
  1290. sltu $at,$c_3,$t_2
  1291. $ADDU $c_1,$at
  1292. mflo $t_1
  1293. mfhi $t_2
  1294. $ADDU $c_2,$t_1
  1295. sltu $at,$c_2,$t_1
  1296. $MULTU $a_7,$b_1 # mul_add_c(a[7],b[1],c3,c1,c2);
  1297. $ADDU $t_2,$at
  1298. $ADDU $c_3,$t_2
  1299. sltu $at,$c_3,$t_2
  1300. $ADDU $c_1,$at
  1301. $ST $c_2,7*$BNSZ($a0) # r[7]=c2;
  1302. mflo $t_1
  1303. mfhi $t_2
  1304. $ADDU $c_3,$t_1
  1305. sltu $at,$c_3,$t_1
  1306. $MULTU $a_6,$b_2 # mul_add_c(a[6],b[2],c3,c1,c2);
  1307. $ADDU $t_2,$at
  1308. $ADDU $c_1,$t_2
  1309. sltu $c_2,$c_1,$t_2
  1310. mflo $t_1
  1311. mfhi $t_2
  1312. $ADDU $c_3,$t_1
  1313. sltu $at,$c_3,$t_1
  1314. $MULTU $a_5,$b_3 # mul_add_c(a[5],b[3],c3,c1,c2);
  1315. $ADDU $t_2,$at
  1316. $ADDU $c_1,$t_2
  1317. sltu $at,$c_1,$t_2
  1318. $ADDU $c_2,$at
  1319. mflo $t_1
  1320. mfhi $t_2
  1321. $ADDU $c_3,$t_1
  1322. sltu $at,$c_3,$t_1
  1323. $MULTU $a_4,$b_4 # mul_add_c(a[4],b[4],c3,c1,c2);
  1324. $ADDU $t_2,$at
  1325. $ADDU $c_1,$t_2
  1326. sltu $at,$c_1,$t_2
  1327. $ADDU $c_2,$at
  1328. mflo $t_1
  1329. mfhi $t_2
  1330. $ADDU $c_3,$t_1
  1331. sltu $at,$c_3,$t_1
  1332. $MULTU $a_3,$b_5 # mul_add_c(a[3],b[5],c3,c1,c2);
  1333. $ADDU $t_2,$at
  1334. $ADDU $c_1,$t_2
  1335. sltu $at,$c_1,$t_2
  1336. $ADDU $c_2,$at
  1337. mflo $t_1
  1338. mfhi $t_2
  1339. $ADDU $c_3,$t_1
  1340. sltu $at,$c_3,$t_1
  1341. $MULTU $a_2,$b_6 # mul_add_c(a[2],b[6],c3,c1,c2);
  1342. $ADDU $t_2,$at
  1343. $ADDU $c_1,$t_2
  1344. sltu $at,$c_1,$t_2
  1345. $ADDU $c_2,$at
  1346. mflo $t_1
  1347. mfhi $t_2
  1348. $ADDU $c_3,$t_1
  1349. sltu $at,$c_3,$t_1
  1350. $MULTU $a_1,$b_7 # mul_add_c(a[1],b[7],c3,c1,c2);
  1351. $ADDU $t_2,$at
  1352. $ADDU $c_1,$t_2
  1353. sltu $at,$c_1,$t_2
  1354. $ADDU $c_2,$at
  1355. mflo $t_1
  1356. mfhi $t_2
  1357. $ADDU $c_3,$t_1
  1358. sltu $at,$c_3,$t_1
  1359. $MULTU $a_2,$b_7 # mul_add_c(a[2],b[7],c1,c2,c3);
  1360. $ADDU $t_2,$at
  1361. $ADDU $c_1,$t_2
  1362. sltu $at,$c_1,$t_2
  1363. $ADDU $c_2,$at
  1364. $ST $c_3,8*$BNSZ($a0) # r[8]=c3;
  1365. mflo $t_1
  1366. mfhi $t_2
  1367. $ADDU $c_1,$t_1
  1368. sltu $at,$c_1,$t_1
  1369. $MULTU $a_3,$b_6 # mul_add_c(a[3],b[6],c1,c2,c3);
  1370. $ADDU $t_2,$at
  1371. $ADDU $c_2,$t_2
  1372. sltu $c_3,$c_2,$t_2
  1373. mflo $t_1
  1374. mfhi $t_2
  1375. $ADDU $c_1,$t_1
  1376. sltu $at,$c_1,$t_1
  1377. $MULTU $a_4,$b_5 # mul_add_c(a[4],b[5],c1,c2,c3);
  1378. $ADDU $t_2,$at
  1379. $ADDU $c_2,$t_2
  1380. sltu $at,$c_2,$t_2
  1381. $ADDU $c_3,$at
  1382. mflo $t_1
  1383. mfhi $t_2
  1384. $ADDU $c_1,$t_1
  1385. sltu $at,$c_1,$t_1
  1386. $MULTU $a_5,$b_4 # mul_add_c(a[5],b[4],c1,c2,c3);
  1387. $ADDU $t_2,$at
  1388. $ADDU $c_2,$t_2
  1389. sltu $at,$c_2,$t_2
  1390. $ADDU $c_3,$at
  1391. mflo $t_1
  1392. mfhi $t_2
  1393. $ADDU $c_1,$t_1
  1394. sltu $at,$c_1,$t_1
  1395. $MULTU $a_6,$b_3 # mul_add_c(a[6],b[3],c1,c2,c3);
  1396. $ADDU $t_2,$at
  1397. $ADDU $c_2,$t_2
  1398. sltu $at,$c_2,$t_2
  1399. $ADDU $c_3,$at
  1400. mflo $t_1
  1401. mfhi $t_2
  1402. $ADDU $c_1,$t_1
  1403. sltu $at,$c_1,$t_1
  1404. $MULTU $a_7,$b_2 # mul_add_c(a[7],b[2],c1,c2,c3);
  1405. $ADDU $t_2,$at
  1406. $ADDU $c_2,$t_2
  1407. sltu $at,$c_2,$t_2
  1408. $ADDU $c_3,$at
  1409. mflo $t_1
  1410. mfhi $t_2
  1411. $ADDU $c_1,$t_1
  1412. sltu $at,$c_1,$t_1
  1413. $MULTU $a_7,$b_3 # mul_add_c(a[7],b[3],c2,c3,c1);
  1414. $ADDU $t_2,$at
  1415. $ADDU $c_2,$t_2
  1416. sltu $at,$c_2,$t_2
  1417. $ADDU $c_3,$at
  1418. $ST $c_1,9*$BNSZ($a0) # r[9]=c1;
  1419. mflo $t_1
  1420. mfhi $t_2
  1421. $ADDU $c_2,$t_1
  1422. sltu $at,$c_2,$t_1
  1423. $MULTU $a_6,$b_4 # mul_add_c(a[6],b[4],c2,c3,c1);
  1424. $ADDU $t_2,$at
  1425. $ADDU $c_3,$t_2
  1426. sltu $c_1,$c_3,$t_2
  1427. mflo $t_1
  1428. mfhi $t_2
  1429. $ADDU $c_2,$t_1
  1430. sltu $at,$c_2,$t_1
  1431. $MULTU $a_5,$b_5 # mul_add_c(a[5],b[5],c2,c3,c1);
  1432. $ADDU $t_2,$at
  1433. $ADDU $c_3,$t_2
  1434. sltu $at,$c_3,$t_2
  1435. $ADDU $c_1,$at
  1436. mflo $t_1
  1437. mfhi $t_2
  1438. $ADDU $c_2,$t_1
  1439. sltu $at,$c_2,$t_1
  1440. $MULTU $a_4,$b_6 # mul_add_c(a[4],b[6],c2,c3,c1);
  1441. $ADDU $t_2,$at
  1442. $ADDU $c_3,$t_2
  1443. sltu $at,$c_3,$t_2
  1444. $ADDU $c_1,$at
  1445. mflo $t_1
  1446. mfhi $t_2
  1447. $ADDU $c_2,$t_1
  1448. sltu $at,$c_2,$t_1
  1449. $MULTU $a_3,$b_7 # mul_add_c(a[3],b[7],c2,c3,c1);
  1450. $ADDU $t_2,$at
  1451. $ADDU $c_3,$t_2
  1452. sltu $at,$c_3,$t_2
  1453. $ADDU $c_1,$at
  1454. mflo $t_1
  1455. mfhi $t_2
  1456. $ADDU $c_2,$t_1
  1457. sltu $at,$c_2,$t_1
  1458. $MULTU $a_4,$b_7 # mul_add_c(a[4],b[7],c3,c1,c2);
  1459. $ADDU $t_2,$at
  1460. $ADDU $c_3,$t_2
  1461. sltu $at,$c_3,$t_2
  1462. $ADDU $c_1,$at
  1463. $ST $c_2,10*$BNSZ($a0) # r[10]=c2;
  1464. mflo $t_1
  1465. mfhi $t_2
  1466. $ADDU $c_3,$t_1
  1467. sltu $at,$c_3,$t_1
  1468. $MULTU $a_5,$b_6 # mul_add_c(a[5],b[6],c3,c1,c2);
  1469. $ADDU $t_2,$at
  1470. $ADDU $c_1,$t_2
  1471. sltu $c_2,$c_1,$t_2
  1472. mflo $t_1
  1473. mfhi $t_2
  1474. $ADDU $c_3,$t_1
  1475. sltu $at,$c_3,$t_1
  1476. $MULTU $a_6,$b_5 # mul_add_c(a[6],b[5],c3,c1,c2);
  1477. $ADDU $t_2,$at
  1478. $ADDU $c_1,$t_2
  1479. sltu $at,$c_1,$t_2
  1480. $ADDU $c_2,$at
  1481. mflo $t_1
  1482. mfhi $t_2
  1483. $ADDU $c_3,$t_1
  1484. sltu $at,$c_3,$t_1
  1485. $MULTU $a_7,$b_4 # mul_add_c(a[7],b[4],c3,c1,c2);
  1486. $ADDU $t_2,$at
  1487. $ADDU $c_1,$t_2
  1488. sltu $at,$c_1,$t_2
  1489. $ADDU $c_2,$at
  1490. mflo $t_1
  1491. mfhi $t_2
  1492. $ADDU $c_3,$t_1
  1493. sltu $at,$c_3,$t_1
  1494. $MULTU $a_7,$b_5 # mul_add_c(a[7],b[5],c1,c2,c3);
  1495. $ADDU $t_2,$at
  1496. $ADDU $c_1,$t_2
  1497. sltu $at,$c_1,$t_2
  1498. $ADDU $c_2,$at
  1499. $ST $c_3,11*$BNSZ($a0) # r[11]=c3;
  1500. mflo $t_1
  1501. mfhi $t_2
  1502. $ADDU $c_1,$t_1
  1503. sltu $at,$c_1,$t_1
  1504. $MULTU $a_6,$b_6 # mul_add_c(a[6],b[6],c1,c2,c3);
  1505. $ADDU $t_2,$at
  1506. $ADDU $c_2,$t_2
  1507. sltu $c_3,$c_2,$t_2
  1508. mflo $t_1
  1509. mfhi $t_2
  1510. $ADDU $c_1,$t_1
  1511. sltu $at,$c_1,$t_1
  1512. $MULTU $a_5,$b_7 # mul_add_c(a[5],b[7],c1,c2,c3);
  1513. $ADDU $t_2,$at
  1514. $ADDU $c_2,$t_2
  1515. sltu $at,$c_2,$t_2
  1516. $ADDU $c_3,$at
  1517. mflo $t_1
  1518. mfhi $t_2
  1519. $ADDU $c_1,$t_1
  1520. sltu $at,$c_1,$t_1
  1521. $MULTU $a_6,$b_7 # mul_add_c(a[6],b[7],c2,c3,c1);
  1522. $ADDU $t_2,$at
  1523. $ADDU $c_2,$t_2
  1524. sltu $at,$c_2,$t_2
  1525. $ADDU $c_3,$at
  1526. $ST $c_1,12*$BNSZ($a0) # r[12]=c1;
  1527. mflo $t_1
  1528. mfhi $t_2
  1529. $ADDU $c_2,$t_1
  1530. sltu $at,$c_2,$t_1
  1531. $MULTU $a_7,$b_6 # mul_add_c(a[7],b[6],c2,c3,c1);
  1532. $ADDU $t_2,$at
  1533. $ADDU $c_3,$t_2
  1534. sltu $c_1,$c_3,$t_2
  1535. mflo $t_1
  1536. mfhi $t_2
  1537. $ADDU $c_2,$t_1
  1538. sltu $at,$c_2,$t_1
  1539. $MULTU $a_7,$b_7 # mul_add_c(a[7],b[7],c3,c1,c2);
  1540. $ADDU $t_2,$at
  1541. $ADDU $c_3,$t_2
  1542. sltu $at,$c_3,$t_2
  1543. $ADDU $c_1,$at
  1544. $ST $c_2,13*$BNSZ($a0) # r[13]=c2;
  1545. mflo $t_1
  1546. mfhi $t_2
  1547. $ADDU $c_3,$t_1
  1548. sltu $at,$c_3,$t_1
  1549. $ADDU $t_2,$at
  1550. $ADDU $c_1,$t_2
  1551. $ST $c_3,14*$BNSZ($a0) # r[14]=c3;
  1552. $ST $c_1,15*$BNSZ($a0) # r[15]=c1;
  1553. .set noreorder
  1554. ___
  1555. $code.=<<___ if ($flavour =~ /nubi/i);
  1556. $REG_L $s5,10*$SZREG($sp)
  1557. $REG_L $s4,9*$SZREG($sp)
  1558. $REG_L $s3,8*$SZREG($sp)
  1559. $REG_L $s2,7*$SZREG($sp)
  1560. $REG_L $s1,6*$SZREG($sp)
  1561. $REG_L $s0,5*$SZREG($sp)
  1562. $REG_L $t3,4*$SZREG($sp)
  1563. $REG_L $t2,3*$SZREG($sp)
  1564. $REG_L $t1,2*$SZREG($sp)
  1565. $REG_L $t0,1*$SZREG($sp)
  1566. $REG_L $gp,0*$SZREG($sp)
  1567. jr $ra
  1568. $PTR_ADD $sp,12*$SZREG
  1569. ___
  1570. $code.=<<___ if ($flavour !~ /nubi/i);
  1571. $REG_L $s5,5*$SZREG($sp)
  1572. $REG_L $s4,4*$SZREG($sp)
  1573. $REG_L $s3,3*$SZREG($sp)
  1574. $REG_L $s2,2*$SZREG($sp)
  1575. $REG_L $s1,1*$SZREG($sp)
  1576. $REG_L $s0,0*$SZREG($sp)
  1577. jr $ra
  1578. $PTR_ADD $sp,6*$SZREG
  1579. ___
  1580. $code.=<<___;
  1581. .end bn_mul_comba8
  1582. .align 5
  1583. .globl bn_mul_comba4
  1584. .ent bn_mul_comba4
  1585. bn_mul_comba4:
  1586. ___
  1587. $code.=<<___ if ($flavour =~ /nubi/i);
  1588. .frame $sp,6*$SZREG,$ra
  1589. .mask 0x8000f008,-$SZREG
  1590. .set noreorder
  1591. $PTR_SUB $sp,6*$SZREG
  1592. $REG_S $ra,5*$SZREG($sp)
  1593. $REG_S $t3,4*$SZREG($sp)
  1594. $REG_S $t2,3*$SZREG($sp)
  1595. $REG_S $t1,2*$SZREG($sp)
  1596. $REG_S $t0,1*$SZREG($sp)
  1597. $REG_S $gp,0*$SZREG($sp)
  1598. ___
  1599. $code.=<<___;
  1600. .set reorder
  1601. $LD $a_0,0($a1)
  1602. $LD $b_0,0($a2)
  1603. $LD $a_1,$BNSZ($a1)
  1604. $LD $a_2,2*$BNSZ($a1)
  1605. $MULTU $a_0,$b_0 # mul_add_c(a[0],b[0],c1,c2,c3);
  1606. $LD $a_3,3*$BNSZ($a1)
  1607. $LD $b_1,$BNSZ($a2)
  1608. $LD $b_2,2*$BNSZ($a2)
  1609. $LD $b_3,3*$BNSZ($a2)
  1610. mflo $c_1
  1611. mfhi $c_2
  1612. $ST $c_1,0($a0)
  1613. $MULTU $a_0,$b_1 # mul_add_c(a[0],b[1],c2,c3,c1);
  1614. mflo $t_1
  1615. mfhi $t_2
  1616. $ADDU $c_2,$t_1
  1617. sltu $at,$c_2,$t_1
  1618. $MULTU $a_1,$b_0 # mul_add_c(a[1],b[0],c2,c3,c1);
  1619. $ADDU $c_3,$t_2,$at
  1620. mflo $t_1
  1621. mfhi $t_2
  1622. $ADDU $c_2,$t_1
  1623. sltu $at,$c_2,$t_1
  1624. $MULTU $a_2,$b_0 # mul_add_c(a[2],b[0],c3,c1,c2);
  1625. $ADDU $t_2,$at
  1626. $ADDU $c_3,$t_2
  1627. sltu $c_1,$c_3,$t_2
  1628. $ST $c_2,$BNSZ($a0)
  1629. mflo $t_1
  1630. mfhi $t_2
  1631. $ADDU $c_3,$t_1
  1632. sltu $at,$c_3,$t_1
  1633. $MULTU $a_1,$b_1 # mul_add_c(a[1],b[1],c3,c1,c2);
  1634. $ADDU $t_2,$at
  1635. $ADDU $c_1,$t_2
  1636. mflo $t_1
  1637. mfhi $t_2
  1638. $ADDU $c_3,$t_1
  1639. sltu $at,$c_3,$t_1
  1640. $MULTU $a_0,$b_2 # mul_add_c(a[0],b[2],c3,c1,c2);
  1641. $ADDU $t_2,$at
  1642. $ADDU $c_1,$t_2
  1643. sltu $c_2,$c_1,$t_2
  1644. mflo $t_1
  1645. mfhi $t_2
  1646. $ADDU $c_3,$t_1
  1647. sltu $at,$c_3,$t_1
  1648. $MULTU $a_0,$b_3 # mul_add_c(a[0],b[3],c1,c2,c3);
  1649. $ADDU $t_2,$at
  1650. $ADDU $c_1,$t_2
  1651. sltu $at,$c_1,$t_2
  1652. $ADDU $c_2,$at
  1653. $ST $c_3,2*$BNSZ($a0)
  1654. mflo $t_1
  1655. mfhi $t_2
  1656. $ADDU $c_1,$t_1
  1657. sltu $at,$c_1,$t_1
  1658. $MULTU $a_1,$b_2 # mul_add_c(a[1],b[2],c1,c2,c3);
  1659. $ADDU $t_2,$at
  1660. $ADDU $c_2,$t_2
  1661. sltu $c_3,$c_2,$t_2
  1662. mflo $t_1
  1663. mfhi $t_2
  1664. $ADDU $c_1,$t_1
  1665. sltu $at,$c_1,$t_1
  1666. $MULTU $a_2,$b_1 # mul_add_c(a[2],b[1],c1,c2,c3);
  1667. $ADDU $t_2,$at
  1668. $ADDU $c_2,$t_2
  1669. sltu $at,$c_2,$t_2
  1670. $ADDU $c_3,$at
  1671. mflo $t_1
  1672. mfhi $t_2
  1673. $ADDU $c_1,$t_1
  1674. sltu $at,$c_1,$t_1
  1675. $MULTU $a_3,$b_0 # mul_add_c(a[3],b[0],c1,c2,c3);
  1676. $ADDU $t_2,$at
  1677. $ADDU $c_2,$t_2
  1678. sltu $at,$c_2,$t_2
  1679. $ADDU $c_3,$at
  1680. mflo $t_1
  1681. mfhi $t_2
  1682. $ADDU $c_1,$t_1
  1683. sltu $at,$c_1,$t_1
  1684. $MULTU $a_3,$b_1 # mul_add_c(a[3],b[1],c2,c3,c1);
  1685. $ADDU $t_2,$at
  1686. $ADDU $c_2,$t_2
  1687. sltu $at,$c_2,$t_2
  1688. $ADDU $c_3,$at
  1689. $ST $c_1,3*$BNSZ($a0)
  1690. mflo $t_1
  1691. mfhi $t_2
  1692. $ADDU $c_2,$t_1
  1693. sltu $at,$c_2,$t_1
  1694. $MULTU $a_2,$b_2 # mul_add_c(a[2],b[2],c2,c3,c1);
  1695. $ADDU $t_2,$at
  1696. $ADDU $c_3,$t_2
  1697. sltu $c_1,$c_3,$t_2
  1698. mflo $t_1
  1699. mfhi $t_2
  1700. $ADDU $c_2,$t_1
  1701. sltu $at,$c_2,$t_1
  1702. $MULTU $a_1,$b_3 # mul_add_c(a[1],b[3],c2,c3,c1);
  1703. $ADDU $t_2,$at
  1704. $ADDU $c_3,$t_2
  1705. sltu $at,$c_3,$t_2
  1706. $ADDU $c_1,$at
  1707. mflo $t_1
  1708. mfhi $t_2
  1709. $ADDU $c_2,$t_1
  1710. sltu $at,$c_2,$t_1
  1711. $MULTU $a_2,$b_3 # mul_add_c(a[2],b[3],c3,c1,c2);
  1712. $ADDU $t_2,$at
  1713. $ADDU $c_3,$t_2
  1714. sltu $at,$c_3,$t_2
  1715. $ADDU $c_1,$at
  1716. $ST $c_2,4*$BNSZ($a0)
  1717. mflo $t_1
  1718. mfhi $t_2
  1719. $ADDU $c_3,$t_1
  1720. sltu $at,$c_3,$t_1
  1721. $MULTU $a_3,$b_2 # mul_add_c(a[3],b[2],c3,c1,c2);
  1722. $ADDU $t_2,$at
  1723. $ADDU $c_1,$t_2
  1724. sltu $c_2,$c_1,$t_2
  1725. mflo $t_1
  1726. mfhi $t_2
  1727. $ADDU $c_3,$t_1
  1728. sltu $at,$c_3,$t_1
  1729. $MULTU $a_3,$b_3 # mul_add_c(a[3],b[3],c1,c2,c3);
  1730. $ADDU $t_2,$at
  1731. $ADDU $c_1,$t_2
  1732. sltu $at,$c_1,$t_2
  1733. $ADDU $c_2,$at
  1734. $ST $c_3,5*$BNSZ($a0)
  1735. mflo $t_1
  1736. mfhi $t_2
  1737. $ADDU $c_1,$t_1
  1738. sltu $at,$c_1,$t_1
  1739. $ADDU $t_2,$at
  1740. $ADDU $c_2,$t_2
  1741. $ST $c_1,6*$BNSZ($a0)
  1742. $ST $c_2,7*$BNSZ($a0)
  1743. .set noreorder
  1744. ___
  1745. $code.=<<___ if ($flavour =~ /nubi/i);
  1746. $REG_L $t3,4*$SZREG($sp)
  1747. $REG_L $t2,3*$SZREG($sp)
  1748. $REG_L $t1,2*$SZREG($sp)
  1749. $REG_L $t0,1*$SZREG($sp)
  1750. $REG_L $gp,0*$SZREG($sp)
  1751. $PTR_ADD $sp,6*$SZREG
  1752. ___
  1753. $code.=<<___;
  1754. jr $ra
  1755. nop
  1756. .end bn_mul_comba4
  1757. ___
  1758. ($a_4,$a_5,$a_6,$a_7)=($b_0,$b_1,$b_2,$b_3);
  1759. sub add_c2 () {
  1760. my ($hi,$lo,$c0,$c1,$c2,
  1761. $warm, # !$warm denotes first call with specific sequence of
  1762. # $c_[XYZ] when there is no Z-carry to accumulate yet;
  1763. $an,$bn # these two are arguments for multiplication which
  1764. # result is used in *next* step [which is why it's
  1765. # commented as "forward multiplication" below];
  1766. )=@_;
  1767. $code.=<<___;
  1768. mflo $lo
  1769. mfhi $hi
  1770. $ADDU $c0,$lo
  1771. sltu $at,$c0,$lo
  1772. $MULTU $an,$bn # forward multiplication
  1773. $ADDU $c0,$lo
  1774. $ADDU $at,$hi
  1775. sltu $lo,$c0,$lo
  1776. $ADDU $c1,$at
  1777. $ADDU $hi,$lo
  1778. ___
  1779. $code.=<<___ if (!$warm);
  1780. sltu $c2,$c1,$at
  1781. $ADDU $c1,$hi
  1782. sltu $hi,$c1,$hi
  1783. $ADDU $c2,$hi
  1784. ___
  1785. $code.=<<___ if ($warm);
  1786. sltu $at,$c1,$at
  1787. $ADDU $c1,$hi
  1788. $ADDU $c2,$at
  1789. sltu $hi,$c1,$hi
  1790. $ADDU $c2,$hi
  1791. ___
  1792. }
  1793. $code.=<<___;
  1794. .align 5
  1795. .globl bn_sqr_comba8
  1796. .ent bn_sqr_comba8
  1797. bn_sqr_comba8:
  1798. ___
  1799. $code.=<<___ if ($flavour =~ /nubi/i);
  1800. .frame $sp,6*$SZREG,$ra
  1801. .mask 0x8000f008,-$SZREG
  1802. .set noreorder
  1803. $PTR_SUB $sp,6*$SZREG
  1804. $REG_S $ra,5*$SZREG($sp)
  1805. $REG_S $t3,4*$SZREG($sp)
  1806. $REG_S $t2,3*$SZREG($sp)
  1807. $REG_S $t1,2*$SZREG($sp)
  1808. $REG_S $t0,1*$SZREG($sp)
  1809. $REG_S $gp,0*$SZREG($sp)
  1810. ___
  1811. $code.=<<___;
  1812. .set reorder
  1813. $LD $a_0,0($a1)
  1814. $LD $a_1,$BNSZ($a1)
  1815. $LD $a_2,2*$BNSZ($a1)
  1816. $LD $a_3,3*$BNSZ($a1)
  1817. $MULTU $a_0,$a_0 # mul_add_c(a[0],b[0],c1,c2,c3);
  1818. $LD $a_4,4*$BNSZ($a1)
  1819. $LD $a_5,5*$BNSZ($a1)
  1820. $LD $a_6,6*$BNSZ($a1)
  1821. $LD $a_7,7*$BNSZ($a1)
  1822. mflo $c_1
  1823. mfhi $c_2
  1824. $ST $c_1,0($a0)
  1825. $MULTU $a_0,$a_1 # mul_add_c2(a[0],b[1],c2,c3,c1);
  1826. mflo $t_1
  1827. mfhi $t_2
  1828. slt $c_1,$t_2,$zero
  1829. $SLL $t_2,1
  1830. $MULTU $a_2,$a_0 # mul_add_c2(a[2],b[0],c3,c1,c2);
  1831. slt $a2,$t_1,$zero
  1832. $ADDU $t_2,$a2
  1833. $SLL $t_1,1
  1834. $ADDU $c_2,$t_1
  1835. sltu $at,$c_2,$t_1
  1836. $ADDU $c_3,$t_2,$at
  1837. $ST $c_2,$BNSZ($a0)
  1838. ___
  1839. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
  1840. $a_1,$a_1); # mul_add_c(a[1],b[1],c3,c1,c2);
  1841. $code.=<<___;
  1842. mflo $t_1
  1843. mfhi $t_2
  1844. $ADDU $c_3,$t_1
  1845. sltu $at,$c_3,$t_1
  1846. $MULTU $a_0,$a_3 # mul_add_c2(a[0],b[3],c1,c2,c3);
  1847. $ADDU $t_2,$at
  1848. $ADDU $c_1,$t_2
  1849. sltu $at,$c_1,$t_2
  1850. $ADDU $c_2,$at
  1851. $ST $c_3,2*$BNSZ($a0)
  1852. ___
  1853. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
  1854. $a_1,$a_2); # mul_add_c2(a[1],b[2],c1,c2,c3);
  1855. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
  1856. $a_4,$a_0); # mul_add_c2(a[4],b[0],c2,c3,c1);
  1857. $code.=<<___;
  1858. $ST $c_1,3*$BNSZ($a0)
  1859. ___
  1860. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
  1861. $a_3,$a_1); # mul_add_c2(a[3],b[1],c2,c3,c1);
  1862. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
  1863. $a_2,$a_2); # mul_add_c(a[2],b[2],c2,c3,c1);
  1864. $code.=<<___;
  1865. mflo $t_1
  1866. mfhi $t_2
  1867. $ADDU $c_2,$t_1
  1868. sltu $at,$c_2,$t_1
  1869. $MULTU $a_0,$a_5 # mul_add_c2(a[0],b[5],c3,c1,c2);
  1870. $ADDU $t_2,$at
  1871. $ADDU $c_3,$t_2
  1872. sltu $at,$c_3,$t_2
  1873. $ADDU $c_1,$at
  1874. $ST $c_2,4*$BNSZ($a0)
  1875. ___
  1876. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
  1877. $a_1,$a_4); # mul_add_c2(a[1],b[4],c3,c1,c2);
  1878. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
  1879. $a_2,$a_3); # mul_add_c2(a[2],b[3],c3,c1,c2);
  1880. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
  1881. $a_6,$a_0); # mul_add_c2(a[6],b[0],c1,c2,c3);
  1882. $code.=<<___;
  1883. $ST $c_3,5*$BNSZ($a0)
  1884. ___
  1885. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
  1886. $a_5,$a_1); # mul_add_c2(a[5],b[1],c1,c2,c3);
  1887. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
  1888. $a_4,$a_2); # mul_add_c2(a[4],b[2],c1,c2,c3);
  1889. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
  1890. $a_3,$a_3); # mul_add_c(a[3],b[3],c1,c2,c3);
  1891. $code.=<<___;
  1892. mflo $t_1
  1893. mfhi $t_2
  1894. $ADDU $c_1,$t_1
  1895. sltu $at,$c_1,$t_1
  1896. $MULTU $a_0,$a_7 # mul_add_c2(a[0],b[7],c2,c3,c1);
  1897. $ADDU $t_2,$at
  1898. $ADDU $c_2,$t_2
  1899. sltu $at,$c_2,$t_2
  1900. $ADDU $c_3,$at
  1901. $ST $c_1,6*$BNSZ($a0)
  1902. ___
  1903. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
  1904. $a_1,$a_6); # mul_add_c2(a[1],b[6],c2,c3,c1);
  1905. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
  1906. $a_2,$a_5); # mul_add_c2(a[2],b[5],c2,c3,c1);
  1907. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
  1908. $a_3,$a_4); # mul_add_c2(a[3],b[4],c2,c3,c1);
  1909. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
  1910. $a_7,$a_1); # mul_add_c2(a[7],b[1],c3,c1,c2);
  1911. $code.=<<___;
  1912. $ST $c_2,7*$BNSZ($a0)
  1913. ___
  1914. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
  1915. $a_6,$a_2); # mul_add_c2(a[6],b[2],c3,c1,c2);
  1916. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
  1917. $a_5,$a_3); # mul_add_c2(a[5],b[3],c3,c1,c2);
  1918. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
  1919. $a_4,$a_4); # mul_add_c(a[4],b[4],c3,c1,c2);
  1920. $code.=<<___;
  1921. mflo $t_1
  1922. mfhi $t_2
  1923. $ADDU $c_3,$t_1
  1924. sltu $at,$c_3,$t_1
  1925. $MULTU $a_2,$a_7 # mul_add_c2(a[2],b[7],c1,c2,c3);
  1926. $ADDU $t_2,$at
  1927. $ADDU $c_1,$t_2
  1928. sltu $at,$c_1,$t_2
  1929. $ADDU $c_2,$at
  1930. $ST $c_3,8*$BNSZ($a0)
  1931. ___
  1932. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
  1933. $a_3,$a_6); # mul_add_c2(a[3],b[6],c1,c2,c3);
  1934. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
  1935. $a_4,$a_5); # mul_add_c2(a[4],b[5],c1,c2,c3);
  1936. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
  1937. $a_7,$a_3); # mul_add_c2(a[7],b[3],c2,c3,c1);
  1938. $code.=<<___;
  1939. $ST $c_1,9*$BNSZ($a0)
  1940. ___
  1941. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
  1942. $a_6,$a_4); # mul_add_c2(a[6],b[4],c2,c3,c1);
  1943. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
  1944. $a_5,$a_5); # mul_add_c(a[5],b[5],c2,c3,c1);
  1945. $code.=<<___;
  1946. mflo $t_1
  1947. mfhi $t_2
  1948. $ADDU $c_2,$t_1
  1949. sltu $at,$c_2,$t_1
  1950. $MULTU $a_4,$a_7 # mul_add_c2(a[4],b[7],c3,c1,c2);
  1951. $ADDU $t_2,$at
  1952. $ADDU $c_3,$t_2
  1953. sltu $at,$c_3,$t_2
  1954. $ADDU $c_1,$at
  1955. $ST $c_2,10*$BNSZ($a0)
  1956. ___
  1957. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
  1958. $a_5,$a_6); # mul_add_c2(a[5],b[6],c3,c1,c2);
  1959. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
  1960. $a_7,$a_5); # mul_add_c2(a[7],b[5],c1,c2,c3);
  1961. $code.=<<___;
  1962. $ST $c_3,11*$BNSZ($a0)
  1963. ___
  1964. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
  1965. $a_6,$a_6); # mul_add_c(a[6],b[6],c1,c2,c3);
  1966. $code.=<<___;
  1967. mflo $t_1
  1968. mfhi $t_2
  1969. $ADDU $c_1,$t_1
  1970. sltu $at,$c_1,$t_1
  1971. $MULTU $a_6,$a_7 # mul_add_c2(a[6],b[7],c2,c3,c1);
  1972. $ADDU $t_2,$at
  1973. $ADDU $c_2,$t_2
  1974. sltu $at,$c_2,$t_2
  1975. $ADDU $c_3,$at
  1976. $ST $c_1,12*$BNSZ($a0)
  1977. ___
  1978. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
  1979. $a_7,$a_7); # mul_add_c(a[7],b[7],c3,c1,c2);
  1980. $code.=<<___;
  1981. $ST $c_2,13*$BNSZ($a0)
  1982. mflo $t_1
  1983. mfhi $t_2
  1984. $ADDU $c_3,$t_1
  1985. sltu $at,$c_3,$t_1
  1986. $ADDU $t_2,$at
  1987. $ADDU $c_1,$t_2
  1988. $ST $c_3,14*$BNSZ($a0)
  1989. $ST $c_1,15*$BNSZ($a0)
  1990. .set noreorder
  1991. ___
  1992. $code.=<<___ if ($flavour =~ /nubi/i);
  1993. $REG_L $t3,4*$SZREG($sp)
  1994. $REG_L $t2,3*$SZREG($sp)
  1995. $REG_L $t1,2*$SZREG($sp)
  1996. $REG_L $t0,1*$SZREG($sp)
  1997. $REG_L $gp,0*$SZREG($sp)
  1998. $PTR_ADD $sp,6*$SZREG
  1999. ___
  2000. $code.=<<___;
  2001. jr $ra
  2002. nop
  2003. .end bn_sqr_comba8
  2004. .align 5
  2005. .globl bn_sqr_comba4
  2006. .ent bn_sqr_comba4
  2007. bn_sqr_comba4:
  2008. ___
  2009. $code.=<<___ if ($flavour =~ /nubi/i);
  2010. .frame $sp,6*$SZREG,$ra
  2011. .mask 0x8000f008,-$SZREG
  2012. .set noreorder
  2013. $PTR_SUB $sp,6*$SZREG
  2014. $REG_S $ra,5*$SZREG($sp)
  2015. $REG_S $t3,4*$SZREG($sp)
  2016. $REG_S $t2,3*$SZREG($sp)
  2017. $REG_S $t1,2*$SZREG($sp)
  2018. $REG_S $t0,1*$SZREG($sp)
  2019. $REG_S $gp,0*$SZREG($sp)
  2020. ___
  2021. $code.=<<___;
  2022. .set reorder
  2023. $LD $a_0,0($a1)
  2024. $LD $a_1,$BNSZ($a1)
  2025. $MULTU $a_0,$a_0 # mul_add_c(a[0],b[0],c1,c2,c3);
  2026. $LD $a_2,2*$BNSZ($a1)
  2027. $LD $a_3,3*$BNSZ($a1)
  2028. mflo $c_1
  2029. mfhi $c_2
  2030. $ST $c_1,0($a0)
  2031. $MULTU $a_0,$a_1 # mul_add_c2(a[0],b[1],c2,c3,c1);
  2032. mflo $t_1
  2033. mfhi $t_2
  2034. slt $c_1,$t_2,$zero
  2035. $SLL $t_2,1
  2036. $MULTU $a_2,$a_0 # mul_add_c2(a[2],b[0],c3,c1,c2);
  2037. slt $a2,$t_1,$zero
  2038. $ADDU $t_2,$a2
  2039. $SLL $t_1,1
  2040. $ADDU $c_2,$t_1
  2041. sltu $at,$c_2,$t_1
  2042. $ADDU $c_3,$t_2,$at
  2043. $ST $c_2,$BNSZ($a0)
  2044. ___
  2045. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
  2046. $a_1,$a_1); # mul_add_c(a[1],b[1],c3,c1,c2);
  2047. $code.=<<___;
  2048. mflo $t_1
  2049. mfhi $t_2
  2050. $ADDU $c_3,$t_1
  2051. sltu $at,$c_3,$t_1
  2052. $MULTU $a_0,$a_3 # mul_add_c2(a[0],b[3],c1,c2,c3);
  2053. $ADDU $t_2,$at
  2054. $ADDU $c_1,$t_2
  2055. sltu $at,$c_1,$t_2
  2056. $ADDU $c_2,$at
  2057. $ST $c_3,2*$BNSZ($a0)
  2058. ___
  2059. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
  2060. $a_1,$a_2); # mul_add_c2(a2[1],b[2],c1,c2,c3);
  2061. &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
  2062. $a_3,$a_1); # mul_add_c2(a[3],b[1],c2,c3,c1);
  2063. $code.=<<___;
  2064. $ST $c_1,3*$BNSZ($a0)
  2065. ___
  2066. &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
  2067. $a_2,$a_2); # mul_add_c(a[2],b[2],c2,c3,c1);
  2068. $code.=<<___;
  2069. mflo $t_1
  2070. mfhi $t_2
  2071. $ADDU $c_2,$t_1
  2072. sltu $at,$c_2,$t_1
  2073. $MULTU $a_2,$a_3 # mul_add_c2(a[2],b[3],c3,c1,c2);
  2074. $ADDU $t_2,$at
  2075. $ADDU $c_3,$t_2
  2076. sltu $at,$c_3,$t_2
  2077. $ADDU $c_1,$at
  2078. $ST $c_2,4*$BNSZ($a0)
  2079. ___
  2080. &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
  2081. $a_3,$a_3); # mul_add_c(a[3],b[3],c1,c2,c3);
  2082. $code.=<<___;
  2083. $ST $c_3,5*$BNSZ($a0)
  2084. mflo $t_1
  2085. mfhi $t_2
  2086. $ADDU $c_1,$t_1
  2087. sltu $at,$c_1,$t_1
  2088. $ADDU $t_2,$at
  2089. $ADDU $c_2,$t_2
  2090. $ST $c_1,6*$BNSZ($a0)
  2091. $ST $c_2,7*$BNSZ($a0)
  2092. .set noreorder
  2093. ___
  2094. $code.=<<___ if ($flavour =~ /nubi/i);
  2095. $REG_L $t3,4*$SZREG($sp)
  2096. $REG_L $t2,3*$SZREG($sp)
  2097. $REG_L $t1,2*$SZREG($sp)
  2098. $REG_L $t0,1*$SZREG($sp)
  2099. $REG_L $gp,0*$SZREG($sp)
  2100. $PTR_ADD $sp,6*$SZREG
  2101. ___
  2102. $code.=<<___;
  2103. jr $ra
  2104. nop
  2105. .end bn_sqr_comba4
  2106. ___
  2107. print $code;
  2108. close STDOUT;