12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234 |
- #!/usr/bin/env perl
- #
- # ====================================================================
- # Written by Andy Polyakov <appro@fy.chalmers.se> for the OpenSSL
- # project.
- #
- # Rights for redistribution and usage in source and binary forms are
- # granted according to the OpenSSL license. Warranty of any kind is
- # disclaimed.
- # ====================================================================
- # July 1999
- #
- # This is drop-in MIPS III/IV ISA replacement for crypto/bn/bn_asm.c.
- #
- # The module is designed to work with either of the "new" MIPS ABI(5),
- # namely N32 or N64, offered by IRIX 6.x. It's not ment to work under
- # IRIX 5.x not only because it doesn't support new ABIs but also
- # because 5.x kernels put R4x00 CPU into 32-bit mode and all those
- # 64-bit instructions (daddu, dmultu, etc.) found below gonna only
- # cause illegal instruction exception:-(
- #
- # In addition the code depends on preprocessor flags set up by MIPSpro
- # compiler driver (either as or cc) and therefore (probably?) can't be
- # compiled by the GNU assembler. GNU C driver manages fine though...
- # I mean as long as -mmips-as is specified or is the default option,
- # because then it simply invokes /usr/bin/as which in turn takes
- # perfect care of the preprocessor definitions. Another neat feature
- # offered by the MIPSpro assembler is an optimization pass. This gave
- # me the opportunity to have the code looking more regular as all those
- # architecture dependent instruction rescheduling details were left to
- # the assembler. Cool, huh?
- #
- # Performance improvement is astonishing! 'apps/openssl speed rsa dsa'
- # goes way over 3 times faster!
- #
- # <appro@fy.chalmers.se>
- # October 2010
- #
- # Adapt the module even for 32-bit ABIs and other OSes. The former was
- # achieved by mechanical replacement of 64-bit arithmetic instructions
- # such as dmultu, daddu, etc. with their 32-bit counterparts and
- # adjusting offsets denoting multiples of BN_ULONG. Above mentioned
- # >3x performance improvement naturally does not apply to 32-bit code
- # [because there is no instruction 32-bit compiler can't use], one
- # has to content with 40-85% improvement depending on benchmark and
- # key length, more for longer keys.
- $flavour = shift || "o32";
- while (($output=shift) && ($output!~/^\w[\w\-]*\.\w+$/)) {}
- open STDOUT,">$output";
- if ($flavour =~ /64|n32/i) {
- $LD="ld";
- $ST="sd";
- $MULTU="dmultu";
- $DIVU="ddivu";
- $ADDU="daddu";
- $SUBU="dsubu";
- $SRL="dsrl";
- $SLL="dsll";
- $BNSZ=8;
- $PTR_ADD="daddu";
- $PTR_SUB="dsubu";
- $SZREG=8;
- $REG_S="sd";
- $REG_L="ld";
- } else {
- $LD="lw";
- $ST="sw";
- $MULTU="multu";
- $DIVU="divu";
- $ADDU="addu";
- $SUBU="subu";
- $SRL="srl";
- $SLL="sll";
- $BNSZ=4;
- $PTR_ADD="addu";
- $PTR_SUB="subu";
- $SZREG=4;
- $REG_S="sw";
- $REG_L="lw";
- $code=".set mips2\n";
- }
- # Below is N32/64 register layout used in the original module.
- #
- ($zero,$at,$v0,$v1)=map("\$$_",(0..3));
- ($a0,$a1,$a2,$a3,$a4,$a5,$a6,$a7)=map("\$$_",(4..11));
- ($t0,$t1,$t2,$t3,$t8,$t9)=map("\$$_",(12..15,24,25));
- ($s0,$s1,$s2,$s3,$s4,$s5,$s6,$s7)=map("\$$_",(16..23));
- ($gp,$sp,$fp,$ra)=map("\$$_",(28..31));
- ($ta0,$ta1,$ta2,$ta3)=($a4,$a5,$a6,$a7);
- #
- # No special adaptation is required for O32. NUBI on the other hand
- # is treated by saving/restoring ($v1,$t0..$t3).
- $gp=$v1 if ($flavour =~ /nubi/i);
- $minus4=$v1;
- $code.=<<___;
- .rdata
- .asciiz "mips3.s, Version 1.2"
- .asciiz "MIPS II/III/IV ISA artwork by Andy Polyakov <appro\@fy.chalmers.se>"
- .text
- .set noat
- .align 5
- .globl bn_mul_add_words
- .ent bn_mul_add_words
- bn_mul_add_words:
- .set noreorder
- bgtz $a2,bn_mul_add_words_internal
- move $v0,$zero
- jr $ra
- move $a0,$v0
- .end bn_mul_add_words
- .align 5
- .ent bn_mul_add_words_internal
- bn_mul_add_words_internal:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- li $minus4,-4
- and $ta0,$a2,$minus4
- beqz $ta0,.L_bn_mul_add_words_tail
- .L_bn_mul_add_words_loop:
- $LD $t0,0($a1)
- $MULTU $t0,$a3
- $LD $t1,0($a0)
- $LD $t2,$BNSZ($a1)
- $LD $t3,$BNSZ($a0)
- $LD $ta0,2*$BNSZ($a1)
- $LD $ta1,2*$BNSZ($a0)
- $ADDU $t1,$v0
- sltu $v0,$t1,$v0 # All manuals say it "compares 32-bit
- # values", but it seems to work fine
- # even on 64-bit registers.
- mflo $at
- mfhi $t0
- $ADDU $t1,$at
- $ADDU $v0,$t0
- $MULTU $t2,$a3
- sltu $at,$t1,$at
- $ST $t1,0($a0)
- $ADDU $v0,$at
- $LD $ta2,3*$BNSZ($a1)
- $LD $ta3,3*$BNSZ($a0)
- $ADDU $t3,$v0
- sltu $v0,$t3,$v0
- mflo $at
- mfhi $t2
- $ADDU $t3,$at
- $ADDU $v0,$t2
- $MULTU $ta0,$a3
- sltu $at,$t3,$at
- $ST $t3,$BNSZ($a0)
- $ADDU $v0,$at
- subu $a2,4
- $PTR_ADD $a0,4*$BNSZ
- $PTR_ADD $a1,4*$BNSZ
- $ADDU $ta1,$v0
- sltu $v0,$ta1,$v0
- mflo $at
- mfhi $ta0
- $ADDU $ta1,$at
- $ADDU $v0,$ta0
- $MULTU $ta2,$a3
- sltu $at,$ta1,$at
- $ST $ta1,-2*$BNSZ($a0)
- $ADDU $v0,$at
- and $ta0,$a2,$minus4
- $ADDU $ta3,$v0
- sltu $v0,$ta3,$v0
- mflo $at
- mfhi $ta2
- $ADDU $ta3,$at
- $ADDU $v0,$ta2
- sltu $at,$ta3,$at
- $ST $ta3,-$BNSZ($a0)
- .set noreorder
- bgtz $ta0,.L_bn_mul_add_words_loop
- $ADDU $v0,$at
- beqz $a2,.L_bn_mul_add_words_return
- nop
- .L_bn_mul_add_words_tail:
- .set reorder
- $LD $t0,0($a1)
- $MULTU $t0,$a3
- $LD $t1,0($a0)
- subu $a2,1
- $ADDU $t1,$v0
- sltu $v0,$t1,$v0
- mflo $at
- mfhi $t0
- $ADDU $t1,$at
- $ADDU $v0,$t0
- sltu $at,$t1,$at
- $ST $t1,0($a0)
- $ADDU $v0,$at
- beqz $a2,.L_bn_mul_add_words_return
- $LD $t0,$BNSZ($a1)
- $MULTU $t0,$a3
- $LD $t1,$BNSZ($a0)
- subu $a2,1
- $ADDU $t1,$v0
- sltu $v0,$t1,$v0
- mflo $at
- mfhi $t0
- $ADDU $t1,$at
- $ADDU $v0,$t0
- sltu $at,$t1,$at
- $ST $t1,$BNSZ($a0)
- $ADDU $v0,$at
- beqz $a2,.L_bn_mul_add_words_return
- $LD $t0,2*$BNSZ($a1)
- $MULTU $t0,$a3
- $LD $t1,2*$BNSZ($a0)
- $ADDU $t1,$v0
- sltu $v0,$t1,$v0
- mflo $at
- mfhi $t0
- $ADDU $t1,$at
- $ADDU $v0,$t0
- sltu $at,$t1,$at
- $ST $t1,2*$BNSZ($a0)
- $ADDU $v0,$at
- .L_bn_mul_add_words_return:
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- move $a0,$v0
- .end bn_mul_add_words_internal
- .align 5
- .globl bn_mul_words
- .ent bn_mul_words
- bn_mul_words:
- .set noreorder
- bgtz $a2,bn_mul_words_internal
- move $v0,$zero
- jr $ra
- move $a0,$v0
- .end bn_mul_words
- .align 5
- .ent bn_mul_words_internal
- bn_mul_words_internal:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- li $minus4,-4
- and $ta0,$a2,$minus4
- beqz $ta0,.L_bn_mul_words_tail
- .L_bn_mul_words_loop:
- $LD $t0,0($a1)
- $MULTU $t0,$a3
- $LD $t2,$BNSZ($a1)
- $LD $ta0,2*$BNSZ($a1)
- $LD $ta2,3*$BNSZ($a1)
- mflo $at
- mfhi $t0
- $ADDU $v0,$at
- sltu $t1,$v0,$at
- $MULTU $t2,$a3
- $ST $v0,0($a0)
- $ADDU $v0,$t1,$t0
- subu $a2,4
- $PTR_ADD $a0,4*$BNSZ
- $PTR_ADD $a1,4*$BNSZ
- mflo $at
- mfhi $t2
- $ADDU $v0,$at
- sltu $t3,$v0,$at
- $MULTU $ta0,$a3
- $ST $v0,-3*$BNSZ($a0)
- $ADDU $v0,$t3,$t2
- mflo $at
- mfhi $ta0
- $ADDU $v0,$at
- sltu $ta1,$v0,$at
- $MULTU $ta2,$a3
- $ST $v0,-2*$BNSZ($a0)
- $ADDU $v0,$ta1,$ta0
- and $ta0,$a2,$minus4
- mflo $at
- mfhi $ta2
- $ADDU $v0,$at
- sltu $ta3,$v0,$at
- $ST $v0,-$BNSZ($a0)
- .set noreorder
- bgtz $ta0,.L_bn_mul_words_loop
- $ADDU $v0,$ta3,$ta2
- beqz $a2,.L_bn_mul_words_return
- nop
- .L_bn_mul_words_tail:
- .set reorder
- $LD $t0,0($a1)
- $MULTU $t0,$a3
- subu $a2,1
- mflo $at
- mfhi $t0
- $ADDU $v0,$at
- sltu $t1,$v0,$at
- $ST $v0,0($a0)
- $ADDU $v0,$t1,$t0
- beqz $a2,.L_bn_mul_words_return
- $LD $t0,$BNSZ($a1)
- $MULTU $t0,$a3
- subu $a2,1
- mflo $at
- mfhi $t0
- $ADDU $v0,$at
- sltu $t1,$v0,$at
- $ST $v0,$BNSZ($a0)
- $ADDU $v0,$t1,$t0
- beqz $a2,.L_bn_mul_words_return
- $LD $t0,2*$BNSZ($a1)
- $MULTU $t0,$a3
- mflo $at
- mfhi $t0
- $ADDU $v0,$at
- sltu $t1,$v0,$at
- $ST $v0,2*$BNSZ($a0)
- $ADDU $v0,$t1,$t0
- .L_bn_mul_words_return:
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- move $a0,$v0
- .end bn_mul_words_internal
- .align 5
- .globl bn_sqr_words
- .ent bn_sqr_words
- bn_sqr_words:
- .set noreorder
- bgtz $a2,bn_sqr_words_internal
- move $v0,$zero
- jr $ra
- move $a0,$v0
- .end bn_sqr_words
- .align 5
- .ent bn_sqr_words_internal
- bn_sqr_words_internal:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- li $minus4,-4
- and $ta0,$a2,$minus4
- beqz $ta0,.L_bn_sqr_words_tail
- .L_bn_sqr_words_loop:
- $LD $t0,0($a1)
- $MULTU $t0,$t0
- $LD $t2,$BNSZ($a1)
- $LD $ta0,2*$BNSZ($a1)
- $LD $ta2,3*$BNSZ($a1)
- mflo $t1
- mfhi $t0
- $ST $t1,0($a0)
- $ST $t0,$BNSZ($a0)
- $MULTU $t2,$t2
- subu $a2,4
- $PTR_ADD $a0,8*$BNSZ
- $PTR_ADD $a1,4*$BNSZ
- mflo $t3
- mfhi $t2
- $ST $t3,-6*$BNSZ($a0)
- $ST $t2,-5*$BNSZ($a0)
- $MULTU $ta0,$ta0
- mflo $ta1
- mfhi $ta0
- $ST $ta1,-4*$BNSZ($a0)
- $ST $ta0,-3*$BNSZ($a0)
- $MULTU $ta2,$ta2
- and $ta0,$a2,$minus4
- mflo $ta3
- mfhi $ta2
- $ST $ta3,-2*$BNSZ($a0)
- .set noreorder
- bgtz $ta0,.L_bn_sqr_words_loop
- $ST $ta2,-$BNSZ($a0)
- beqz $a2,.L_bn_sqr_words_return
- nop
- .L_bn_sqr_words_tail:
- .set reorder
- $LD $t0,0($a1)
- $MULTU $t0,$t0
- subu $a2,1
- mflo $t1
- mfhi $t0
- $ST $t1,0($a0)
- $ST $t0,$BNSZ($a0)
- beqz $a2,.L_bn_sqr_words_return
- $LD $t0,$BNSZ($a1)
- $MULTU $t0,$t0
- subu $a2,1
- mflo $t1
- mfhi $t0
- $ST $t1,2*$BNSZ($a0)
- $ST $t0,3*$BNSZ($a0)
- beqz $a2,.L_bn_sqr_words_return
- $LD $t0,2*$BNSZ($a1)
- $MULTU $t0,$t0
- mflo $t1
- mfhi $t0
- $ST $t1,4*$BNSZ($a0)
- $ST $t0,5*$BNSZ($a0)
- .L_bn_sqr_words_return:
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- move $a0,$v0
- .end bn_sqr_words_internal
- .align 5
- .globl bn_add_words
- .ent bn_add_words
- bn_add_words:
- .set noreorder
- bgtz $a3,bn_add_words_internal
- move $v0,$zero
- jr $ra
- move $a0,$v0
- .end bn_add_words
- .align 5
- .ent bn_add_words_internal
- bn_add_words_internal:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- li $minus4,-4
- and $at,$a3,$minus4
- beqz $at,.L_bn_add_words_tail
- .L_bn_add_words_loop:
- $LD $t0,0($a1)
- $LD $ta0,0($a2)
- subu $a3,4
- $LD $t1,$BNSZ($a1)
- and $at,$a3,$minus4
- $LD $t2,2*$BNSZ($a1)
- $PTR_ADD $a2,4*$BNSZ
- $LD $t3,3*$BNSZ($a1)
- $PTR_ADD $a0,4*$BNSZ
- $LD $ta1,-3*$BNSZ($a2)
- $PTR_ADD $a1,4*$BNSZ
- $LD $ta2,-2*$BNSZ($a2)
- $LD $ta3,-$BNSZ($a2)
- $ADDU $ta0,$t0
- sltu $t8,$ta0,$t0
- $ADDU $t0,$ta0,$v0
- sltu $v0,$t0,$ta0
- $ST $t0,-4*$BNSZ($a0)
- $ADDU $v0,$t8
- $ADDU $ta1,$t1
- sltu $t9,$ta1,$t1
- $ADDU $t1,$ta1,$v0
- sltu $v0,$t1,$ta1
- $ST $t1,-3*$BNSZ($a0)
- $ADDU $v0,$t9
- $ADDU $ta2,$t2
- sltu $t8,$ta2,$t2
- $ADDU $t2,$ta2,$v0
- sltu $v0,$t2,$ta2
- $ST $t2,-2*$BNSZ($a0)
- $ADDU $v0,$t8
-
- $ADDU $ta3,$t3
- sltu $t9,$ta3,$t3
- $ADDU $t3,$ta3,$v0
- sltu $v0,$t3,$ta3
- $ST $t3,-$BNSZ($a0)
-
- .set noreorder
- bgtz $at,.L_bn_add_words_loop
- $ADDU $v0,$t9
- beqz $a3,.L_bn_add_words_return
- nop
- .L_bn_add_words_tail:
- .set reorder
- $LD $t0,0($a1)
- $LD $ta0,0($a2)
- $ADDU $ta0,$t0
- subu $a3,1
- sltu $t8,$ta0,$t0
- $ADDU $t0,$ta0,$v0
- sltu $v0,$t0,$ta0
- $ST $t0,0($a0)
- $ADDU $v0,$t8
- beqz $a3,.L_bn_add_words_return
- $LD $t1,$BNSZ($a1)
- $LD $ta1,$BNSZ($a2)
- $ADDU $ta1,$t1
- subu $a3,1
- sltu $t9,$ta1,$t1
- $ADDU $t1,$ta1,$v0
- sltu $v0,$t1,$ta1
- $ST $t1,$BNSZ($a0)
- $ADDU $v0,$t9
- beqz $a3,.L_bn_add_words_return
- $LD $t2,2*$BNSZ($a1)
- $LD $ta2,2*$BNSZ($a2)
- $ADDU $ta2,$t2
- sltu $t8,$ta2,$t2
- $ADDU $t2,$ta2,$v0
- sltu $v0,$t2,$ta2
- $ST $t2,2*$BNSZ($a0)
- $ADDU $v0,$t8
- .L_bn_add_words_return:
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- move $a0,$v0
- .end bn_add_words_internal
- .align 5
- .globl bn_sub_words
- .ent bn_sub_words
- bn_sub_words:
- .set noreorder
- bgtz $a3,bn_sub_words_internal
- move $v0,$zero
- jr $ra
- move $a0,$zero
- .end bn_sub_words
- .align 5
- .ent bn_sub_words_internal
- bn_sub_words_internal:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- li $minus4,-4
- and $at,$a3,$minus4
- beqz $at,.L_bn_sub_words_tail
- .L_bn_sub_words_loop:
- $LD $t0,0($a1)
- $LD $ta0,0($a2)
- subu $a3,4
- $LD $t1,$BNSZ($a1)
- and $at,$a3,$minus4
- $LD $t2,2*$BNSZ($a1)
- $PTR_ADD $a2,4*$BNSZ
- $LD $t3,3*$BNSZ($a1)
- $PTR_ADD $a0,4*$BNSZ
- $LD $ta1,-3*$BNSZ($a2)
- $PTR_ADD $a1,4*$BNSZ
- $LD $ta2,-2*$BNSZ($a2)
- $LD $ta3,-$BNSZ($a2)
- sltu $t8,$t0,$ta0
- $SUBU $ta0,$t0,$ta0
- $SUBU $t0,$ta0,$v0
- sgtu $v0,$t0,$ta0
- $ST $t0,-4*$BNSZ($a0)
- $ADDU $v0,$t8
- sltu $t9,$t1,$ta1
- $SUBU $ta1,$t1,$ta1
- $SUBU $t1,$ta1,$v0
- sgtu $v0,$t1,$ta1
- $ST $t1,-3*$BNSZ($a0)
- $ADDU $v0,$t9
- sltu $t8,$t2,$ta2
- $SUBU $ta2,$t2,$ta2
- $SUBU $t2,$ta2,$v0
- sgtu $v0,$t2,$ta2
- $ST $t2,-2*$BNSZ($a0)
- $ADDU $v0,$t8
- sltu $t9,$t3,$ta3
- $SUBU $ta3,$t3,$ta3
- $SUBU $t3,$ta3,$v0
- sgtu $v0,$t3,$ta3
- $ST $t3,-$BNSZ($a0)
- .set noreorder
- bgtz $at,.L_bn_sub_words_loop
- $ADDU $v0,$t9
- beqz $a3,.L_bn_sub_words_return
- nop
- .L_bn_sub_words_tail:
- .set reorder
- $LD $t0,0($a1)
- $LD $ta0,0($a2)
- subu $a3,1
- sltu $t8,$t0,$ta0
- $SUBU $ta0,$t0,$ta0
- $SUBU $t0,$ta0,$v0
- sgtu $v0,$t0,$ta0
- $ST $t0,0($a0)
- $ADDU $v0,$t8
- beqz $a3,.L_bn_sub_words_return
- $LD $t1,$BNSZ($a1)
- subu $a3,1
- $LD $ta1,$BNSZ($a2)
- sltu $t9,$t1,$ta1
- $SUBU $ta1,$t1,$ta1
- $SUBU $t1,$ta1,$v0
- sgtu $v0,$t1,$ta1
- $ST $t1,$BNSZ($a0)
- $ADDU $v0,$t9
- beqz $a3,.L_bn_sub_words_return
- $LD $t2,2*$BNSZ($a1)
- $LD $ta2,2*$BNSZ($a2)
- sltu $t8,$t2,$ta2
- $SUBU $ta2,$t2,$ta2
- $SUBU $t2,$ta2,$v0
- sgtu $v0,$t2,$ta2
- $ST $t2,2*$BNSZ($a0)
- $ADDU $v0,$t8
- .L_bn_sub_words_return:
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- move $a0,$v0
- .end bn_sub_words_internal
- .align 5
- .globl bn_div_3_words
- .ent bn_div_3_words
- bn_div_3_words:
- .set noreorder
- move $a3,$a0 # we know that bn_div_words does not
- # touch $a3, $ta2, $ta3 and preserves $a2
- # so that we can save two arguments
- # and return address in registers
- # instead of stack:-)
-
- $LD $a0,($a3)
- move $ta2,$a1
- bne $a0,$a2,bn_div_3_words_internal
- $LD $a1,-$BNSZ($a3)
- li $v0,-1
- jr $ra
- move $a0,$v0
- .end bn_div_3_words
- .align 5
- .ent bn_div_3_words_internal
- bn_div_3_words_internal:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- move $ta3,$ra
- bal bn_div_words_internal
- move $ra,$ta3
- $MULTU $ta2,$v0
- $LD $t2,-2*$BNSZ($a3)
- move $ta0,$zero
- mfhi $t1
- mflo $t0
- sltu $t8,$t1,$a1
- .L_bn_div_3_words_inner_loop:
- bnez $t8,.L_bn_div_3_words_inner_loop_done
- sgeu $at,$t2,$t0
- seq $t9,$t1,$a1
- and $at,$t9
- sltu $t3,$t0,$ta2
- $ADDU $a1,$a2
- $SUBU $t1,$t3
- $SUBU $t0,$ta2
- sltu $t8,$t1,$a1
- sltu $ta0,$a1,$a2
- or $t8,$ta0
- .set noreorder
- beqz $at,.L_bn_div_3_words_inner_loop
- $SUBU $v0,1
- $ADDU $v0,1
- .set reorder
- .L_bn_div_3_words_inner_loop_done:
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- move $a0,$v0
- .end bn_div_3_words_internal
- .align 5
- .globl bn_div_words
- .ent bn_div_words
- bn_div_words:
- .set noreorder
- bnez $a2,bn_div_words_internal
- li $v0,-1 # I would rather signal div-by-zero
- # which can be done with 'break 7'
- jr $ra
- move $a0,$v0
- .end bn_div_words
- .align 5
- .ent bn_div_words_internal
- bn_div_words_internal:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- move $v1,$zero
- bltz $a2,.L_bn_div_words_body
- move $t9,$v1
- $SLL $a2,1
- bgtz $a2,.-4
- addu $t9,1
- .set reorder
- negu $t1,$t9
- li $t2,-1
- $SLL $t2,$t1
- and $t2,$a0
- $SRL $at,$a1,$t1
- .set noreorder
- beqz $t2,.+12
- nop
- break 6 # signal overflow
- .set reorder
- $SLL $a0,$t9
- $SLL $a1,$t9
- or $a0,$at
- ___
- $QT=$ta0;
- $HH=$ta1;
- $DH=$v1;
- $code.=<<___;
- .L_bn_div_words_body:
- $SRL $DH,$a2,4*$BNSZ # bits
- sgeu $at,$a0,$a2
- .set noreorder
- beqz $at,.+12
- nop
- $SUBU $a0,$a2
- .set reorder
- li $QT,-1
- $SRL $HH,$a0,4*$BNSZ # bits
- $SRL $QT,4*$BNSZ # q=0xffffffff
- beq $DH,$HH,.L_bn_div_words_skip_div1
- $DIVU $zero,$a0,$DH
- mflo $QT
- .L_bn_div_words_skip_div1:
- $MULTU $a2,$QT
- $SLL $t3,$a0,4*$BNSZ # bits
- $SRL $at,$a1,4*$BNSZ # bits
- or $t3,$at
- mflo $t0
- mfhi $t1
- .L_bn_div_words_inner_loop1:
- sltu $t2,$t3,$t0
- seq $t8,$HH,$t1
- sltu $at,$HH,$t1
- and $t2,$t8
- sltu $v0,$t0,$a2
- or $at,$t2
- .set noreorder
- beqz $at,.L_bn_div_words_inner_loop1_done
- $SUBU $t1,$v0
- $SUBU $t0,$a2
- b .L_bn_div_words_inner_loop1
- $SUBU $QT,1
- .set reorder
- .L_bn_div_words_inner_loop1_done:
- $SLL $a1,4*$BNSZ # bits
- $SUBU $a0,$t3,$t0
- $SLL $v0,$QT,4*$BNSZ # bits
- li $QT,-1
- $SRL $HH,$a0,4*$BNSZ # bits
- $SRL $QT,4*$BNSZ # q=0xffffffff
- beq $DH,$HH,.L_bn_div_words_skip_div2
- $DIVU $zero,$a0,$DH
- mflo $QT
- .L_bn_div_words_skip_div2:
- $MULTU $a2,$QT
- $SLL $t3,$a0,4*$BNSZ # bits
- $SRL $at,$a1,4*$BNSZ # bits
- or $t3,$at
- mflo $t0
- mfhi $t1
- .L_bn_div_words_inner_loop2:
- sltu $t2,$t3,$t0
- seq $t8,$HH,$t1
- sltu $at,$HH,$t1
- and $t2,$t8
- sltu $v1,$t0,$a2
- or $at,$t2
- .set noreorder
- beqz $at,.L_bn_div_words_inner_loop2_done
- $SUBU $t1,$v1
- $SUBU $t0,$a2
- b .L_bn_div_words_inner_loop2
- $SUBU $QT,1
- .set reorder
- .L_bn_div_words_inner_loop2_done:
- $SUBU $a0,$t3,$t0
- or $v0,$QT
- $SRL $v1,$a0,$t9 # $v1 contains remainder if anybody wants it
- $SRL $a2,$t9 # restore $a2
- .set noreorder
- move $a1,$v1
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- move $a0,$v0
- .end bn_div_words_internal
- ___
- undef $HH; undef $QT; undef $DH;
- ($a_0,$a_1,$a_2,$a_3)=($t0,$t1,$t2,$t3);
- ($b_0,$b_1,$b_2,$b_3)=($ta0,$ta1,$ta2,$ta3);
- ($a_4,$a_5,$a_6,$a_7)=($s0,$s2,$s4,$a1); # once we load a[7], no use for $a1
- ($b_4,$b_5,$b_6,$b_7)=($s1,$s3,$s5,$a2); # once we load b[7], no use for $a2
- ($t_1,$t_2,$c_1,$c_2,$c_3)=($t8,$t9,$v0,$v1,$a3);
- $code.=<<___;
- .align 5
- .globl bn_mul_comba8
- .ent bn_mul_comba8
- bn_mul_comba8:
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,12*$SZREG,$ra
- .mask 0x803ff008,-$SZREG
- $PTR_SUB $sp,12*$SZREG
- $REG_S $ra,11*$SZREG($sp)
- $REG_S $s5,10*$SZREG($sp)
- $REG_S $s4,9*$SZREG($sp)
- $REG_S $s3,8*$SZREG($sp)
- $REG_S $s2,7*$SZREG($sp)
- $REG_S $s1,6*$SZREG($sp)
- $REG_S $s0,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___ if ($flavour !~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x003f0000,-$SZREG
- $PTR_SUB $sp,6*$SZREG
- $REG_S $s5,5*$SZREG($sp)
- $REG_S $s4,4*$SZREG($sp)
- $REG_S $s3,3*$SZREG($sp)
- $REG_S $s2,2*$SZREG($sp)
- $REG_S $s1,1*$SZREG($sp)
- $REG_S $s0,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- $LD $a_0,0($a1) # If compiled with -mips3 option on
- # R5000 box assembler barks on this
- # 1ine with "should not have mult/div
- # as last instruction in bb (R10K
- # bug)" warning. If anybody out there
- # has a clue about how to circumvent
- # this do send me a note.
- # <appro\@fy.chalmers.se>
- $LD $b_0,0($a2)
- $LD $a_1,$BNSZ($a1)
- $LD $a_2,2*$BNSZ($a1)
- $MULTU $a_0,$b_0 # mul_add_c(a[0],b[0],c1,c2,c3);
- $LD $a_3,3*$BNSZ($a1)
- $LD $b_1,$BNSZ($a2)
- $LD $b_2,2*$BNSZ($a2)
- $LD $b_3,3*$BNSZ($a2)
- mflo $c_1
- mfhi $c_2
- $LD $a_4,4*$BNSZ($a1)
- $LD $a_5,5*$BNSZ($a1)
- $MULTU $a_0,$b_1 # mul_add_c(a[0],b[1],c2,c3,c1);
- $LD $a_6,6*$BNSZ($a1)
- $LD $a_7,7*$BNSZ($a1)
- $LD $b_4,4*$BNSZ($a2)
- $LD $b_5,5*$BNSZ($a2)
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_1,$b_0 # mul_add_c(a[1],b[0],c2,c3,c1);
- $ADDU $c_3,$t_2,$at
- $LD $b_6,6*$BNSZ($a2)
- $LD $b_7,7*$BNSZ($a2)
- $ST $c_1,0($a0) # r[0]=c1;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_2,$b_0 # mul_add_c(a[2],b[0],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $c_1,$c_3,$t_2
- $ST $c_2,$BNSZ($a0) # r[1]=c2;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_1,$b_1 # mul_add_c(a[1],b[1],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_0,$b_2 # mul_add_c(a[0],b[2],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $c_2,$c_1,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_0,$b_3 # mul_add_c(a[0],b[3],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- $ST $c_3,2*$BNSZ($a0) # r[2]=c3;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_1,$b_2 # mul_add_c(a[1],b[2],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $c_3,$c_2,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_2,$b_1 # mul_add_c(a[2],b[1],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_3,$b_0 # mul_add_c(a[3],b[0],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_4,$b_0 # mul_add_c(a[4],b[0],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- $ST $c_1,3*$BNSZ($a0) # r[3]=c1;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_3,$b_1 # mul_add_c(a[3],b[1],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $c_1,$c_3,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_2,$b_2 # mul_add_c(a[2],b[2],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_1,$b_3 # mul_add_c(a[1],b[3],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_0,$b_4 # mul_add_c(a[0],b[4],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_0,$b_5 # mul_add_c(a[0],b[5],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- $ST $c_2,4*$BNSZ($a0) # r[4]=c2;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_1,$b_4 # mul_add_c(a[1],b[4],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $c_2,$c_1,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_2,$b_3 # mul_add_c(a[2],b[3],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_3,$b_2 # mul_add_c(a[3],b[2],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_4,$b_1 # mul_add_c(a[4],b[1],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_5,$b_0 # mul_add_c(a[5],b[0],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_6,$b_0 # mul_add_c(a[6],b[0],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- $ST $c_3,5*$BNSZ($a0) # r[5]=c3;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_5,$b_1 # mul_add_c(a[5],b[1],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $c_3,$c_2,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_4,$b_2 # mul_add_c(a[4],b[2],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_3,$b_3 # mul_add_c(a[3],b[3],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_2,$b_4 # mul_add_c(a[2],b[4],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_1,$b_5 # mul_add_c(a[1],b[5],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_0,$b_6 # mul_add_c(a[0],b[6],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_0,$b_7 # mul_add_c(a[0],b[7],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- $ST $c_1,6*$BNSZ($a0) # r[6]=c1;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_1,$b_6 # mul_add_c(a[1],b[6],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $c_1,$c_3,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_2,$b_5 # mul_add_c(a[2],b[5],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_3,$b_4 # mul_add_c(a[3],b[4],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_4,$b_3 # mul_add_c(a[4],b[3],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_5,$b_2 # mul_add_c(a[5],b[2],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_6,$b_1 # mul_add_c(a[6],b[1],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_7,$b_0 # mul_add_c(a[7],b[0],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_7,$b_1 # mul_add_c(a[7],b[1],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- $ST $c_2,7*$BNSZ($a0) # r[7]=c2;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_6,$b_2 # mul_add_c(a[6],b[2],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $c_2,$c_1,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_5,$b_3 # mul_add_c(a[5],b[3],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_4,$b_4 # mul_add_c(a[4],b[4],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_3,$b_5 # mul_add_c(a[3],b[5],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_2,$b_6 # mul_add_c(a[2],b[6],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_1,$b_7 # mul_add_c(a[1],b[7],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_2,$b_7 # mul_add_c(a[2],b[7],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- $ST $c_3,8*$BNSZ($a0) # r[8]=c3;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_3,$b_6 # mul_add_c(a[3],b[6],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $c_3,$c_2,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_4,$b_5 # mul_add_c(a[4],b[5],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_5,$b_4 # mul_add_c(a[5],b[4],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_6,$b_3 # mul_add_c(a[6],b[3],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_7,$b_2 # mul_add_c(a[7],b[2],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_7,$b_3 # mul_add_c(a[7],b[3],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- $ST $c_1,9*$BNSZ($a0) # r[9]=c1;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_6,$b_4 # mul_add_c(a[6],b[4],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $c_1,$c_3,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_5,$b_5 # mul_add_c(a[5],b[5],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_4,$b_6 # mul_add_c(a[4],b[6],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_3,$b_7 # mul_add_c(a[3],b[7],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_4,$b_7 # mul_add_c(a[4],b[7],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- $ST $c_2,10*$BNSZ($a0) # r[10]=c2;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_5,$b_6 # mul_add_c(a[5],b[6],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $c_2,$c_1,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_6,$b_5 # mul_add_c(a[6],b[5],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_7,$b_4 # mul_add_c(a[7],b[4],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_7,$b_5 # mul_add_c(a[7],b[5],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- $ST $c_3,11*$BNSZ($a0) # r[11]=c3;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_6,$b_6 # mul_add_c(a[6],b[6],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $c_3,$c_2,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_5,$b_7 # mul_add_c(a[5],b[7],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_6,$b_7 # mul_add_c(a[6],b[7],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- $ST $c_1,12*$BNSZ($a0) # r[12]=c1;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_7,$b_6 # mul_add_c(a[7],b[6],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $c_1,$c_3,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_7,$b_7 # mul_add_c(a[7],b[7],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- $ST $c_2,13*$BNSZ($a0) # r[13]=c2;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- $ST $c_3,14*$BNSZ($a0) # r[14]=c3;
- $ST $c_1,15*$BNSZ($a0) # r[15]=c1;
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $s5,10*$SZREG($sp)
- $REG_L $s4,9*$SZREG($sp)
- $REG_L $s3,8*$SZREG($sp)
- $REG_L $s2,7*$SZREG($sp)
- $REG_L $s1,6*$SZREG($sp)
- $REG_L $s0,5*$SZREG($sp)
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- jr $ra
- $PTR_ADD $sp,12*$SZREG
- ___
- $code.=<<___ if ($flavour !~ /nubi/i);
- $REG_L $s5,5*$SZREG($sp)
- $REG_L $s4,4*$SZREG($sp)
- $REG_L $s3,3*$SZREG($sp)
- $REG_L $s2,2*$SZREG($sp)
- $REG_L $s1,1*$SZREG($sp)
- $REG_L $s0,0*$SZREG($sp)
- jr $ra
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- .end bn_mul_comba8
- .align 5
- .globl bn_mul_comba4
- .ent bn_mul_comba4
- bn_mul_comba4:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- $LD $a_0,0($a1)
- $LD $b_0,0($a2)
- $LD $a_1,$BNSZ($a1)
- $LD $a_2,2*$BNSZ($a1)
- $MULTU $a_0,$b_0 # mul_add_c(a[0],b[0],c1,c2,c3);
- $LD $a_3,3*$BNSZ($a1)
- $LD $b_1,$BNSZ($a2)
- $LD $b_2,2*$BNSZ($a2)
- $LD $b_3,3*$BNSZ($a2)
- mflo $c_1
- mfhi $c_2
- $ST $c_1,0($a0)
- $MULTU $a_0,$b_1 # mul_add_c(a[0],b[1],c2,c3,c1);
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_1,$b_0 # mul_add_c(a[1],b[0],c2,c3,c1);
- $ADDU $c_3,$t_2,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_2,$b_0 # mul_add_c(a[2],b[0],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $c_1,$c_3,$t_2
- $ST $c_2,$BNSZ($a0)
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_1,$b_1 # mul_add_c(a[1],b[1],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_0,$b_2 # mul_add_c(a[0],b[2],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $c_2,$c_1,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_0,$b_3 # mul_add_c(a[0],b[3],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- $ST $c_3,2*$BNSZ($a0)
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_1,$b_2 # mul_add_c(a[1],b[2],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $c_3,$c_2,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_2,$b_1 # mul_add_c(a[2],b[1],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_3,$b_0 # mul_add_c(a[3],b[0],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_3,$b_1 # mul_add_c(a[3],b[1],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- $ST $c_1,3*$BNSZ($a0)
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_2,$b_2 # mul_add_c(a[2],b[2],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $c_1,$c_3,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_1,$b_3 # mul_add_c(a[1],b[3],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_2,$b_3 # mul_add_c(a[2],b[3],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- $ST $c_2,4*$BNSZ($a0)
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_3,$b_2 # mul_add_c(a[3],b[2],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $c_2,$c_1,$t_2
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_3,$b_3 # mul_add_c(a[3],b[3],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- $ST $c_3,5*$BNSZ($a0)
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- $ST $c_1,6*$BNSZ($a0)
- $ST $c_2,7*$BNSZ($a0)
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- nop
- .end bn_mul_comba4
- ___
- ($a_4,$a_5,$a_6,$a_7)=($b_0,$b_1,$b_2,$b_3);
- sub add_c2 () {
- my ($hi,$lo,$c0,$c1,$c2,
- $warm, # !$warm denotes first call with specific sequence of
- # $c_[XYZ] when there is no Z-carry to accumulate yet;
- $an,$bn # these two are arguments for multiplication which
- # result is used in *next* step [which is why it's
- # commented as "forward multiplication" below];
- )=@_;
- $code.=<<___;
- mflo $lo
- mfhi $hi
- $ADDU $c0,$lo
- sltu $at,$c0,$lo
- $MULTU $an,$bn # forward multiplication
- $ADDU $c0,$lo
- $ADDU $at,$hi
- sltu $lo,$c0,$lo
- $ADDU $c1,$at
- $ADDU $hi,$lo
- ___
- $code.=<<___ if (!$warm);
- sltu $c2,$c1,$at
- $ADDU $c1,$hi
- sltu $hi,$c1,$hi
- $ADDU $c2,$hi
- ___
- $code.=<<___ if ($warm);
- sltu $at,$c1,$at
- $ADDU $c1,$hi
- $ADDU $c2,$at
- sltu $hi,$c1,$hi
- $ADDU $c2,$hi
- ___
- }
- $code.=<<___;
- .align 5
- .globl bn_sqr_comba8
- .ent bn_sqr_comba8
- bn_sqr_comba8:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- $LD $a_0,0($a1)
- $LD $a_1,$BNSZ($a1)
- $LD $a_2,2*$BNSZ($a1)
- $LD $a_3,3*$BNSZ($a1)
- $MULTU $a_0,$a_0 # mul_add_c(a[0],b[0],c1,c2,c3);
- $LD $a_4,4*$BNSZ($a1)
- $LD $a_5,5*$BNSZ($a1)
- $LD $a_6,6*$BNSZ($a1)
- $LD $a_7,7*$BNSZ($a1)
- mflo $c_1
- mfhi $c_2
- $ST $c_1,0($a0)
- $MULTU $a_0,$a_1 # mul_add_c2(a[0],b[1],c2,c3,c1);
- mflo $t_1
- mfhi $t_2
- slt $c_1,$t_2,$zero
- $SLL $t_2,1
- $MULTU $a_2,$a_0 # mul_add_c2(a[2],b[0],c3,c1,c2);
- slt $a2,$t_1,$zero
- $ADDU $t_2,$a2
- $SLL $t_1,1
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $ADDU $c_3,$t_2,$at
- $ST $c_2,$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
- $a_1,$a_1); # mul_add_c(a[1],b[1],c3,c1,c2);
- $code.=<<___;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_0,$a_3 # mul_add_c2(a[0],b[3],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- $ST $c_3,2*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
- $a_1,$a_2); # mul_add_c2(a[1],b[2],c1,c2,c3);
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
- $a_4,$a_0); # mul_add_c2(a[4],b[0],c2,c3,c1);
- $code.=<<___;
- $ST $c_1,3*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
- $a_3,$a_1); # mul_add_c2(a[3],b[1],c2,c3,c1);
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
- $a_2,$a_2); # mul_add_c(a[2],b[2],c2,c3,c1);
- $code.=<<___;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_0,$a_5 # mul_add_c2(a[0],b[5],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- $ST $c_2,4*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
- $a_1,$a_4); # mul_add_c2(a[1],b[4],c3,c1,c2);
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
- $a_2,$a_3); # mul_add_c2(a[2],b[3],c3,c1,c2);
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
- $a_6,$a_0); # mul_add_c2(a[6],b[0],c1,c2,c3);
- $code.=<<___;
- $ST $c_3,5*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
- $a_5,$a_1); # mul_add_c2(a[5],b[1],c1,c2,c3);
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
- $a_4,$a_2); # mul_add_c2(a[4],b[2],c1,c2,c3);
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
- $a_3,$a_3); # mul_add_c(a[3],b[3],c1,c2,c3);
- $code.=<<___;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_0,$a_7 # mul_add_c2(a[0],b[7],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- $ST $c_1,6*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
- $a_1,$a_6); # mul_add_c2(a[1],b[6],c2,c3,c1);
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
- $a_2,$a_5); # mul_add_c2(a[2],b[5],c2,c3,c1);
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
- $a_3,$a_4); # mul_add_c2(a[3],b[4],c2,c3,c1);
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
- $a_7,$a_1); # mul_add_c2(a[7],b[1],c3,c1,c2);
- $code.=<<___;
- $ST $c_2,7*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
- $a_6,$a_2); # mul_add_c2(a[6],b[2],c3,c1,c2);
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
- $a_5,$a_3); # mul_add_c2(a[5],b[3],c3,c1,c2);
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
- $a_4,$a_4); # mul_add_c(a[4],b[4],c3,c1,c2);
- $code.=<<___;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_2,$a_7 # mul_add_c2(a[2],b[7],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- $ST $c_3,8*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
- $a_3,$a_6); # mul_add_c2(a[3],b[6],c1,c2,c3);
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
- $a_4,$a_5); # mul_add_c2(a[4],b[5],c1,c2,c3);
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
- $a_7,$a_3); # mul_add_c2(a[7],b[3],c2,c3,c1);
- $code.=<<___;
- $ST $c_1,9*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
- $a_6,$a_4); # mul_add_c2(a[6],b[4],c2,c3,c1);
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,1,
- $a_5,$a_5); # mul_add_c(a[5],b[5],c2,c3,c1);
- $code.=<<___;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_4,$a_7 # mul_add_c2(a[4],b[7],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- $ST $c_2,10*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
- $a_5,$a_6); # mul_add_c2(a[5],b[6],c3,c1,c2);
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,1,
- $a_7,$a_5); # mul_add_c2(a[7],b[5],c1,c2,c3);
- $code.=<<___;
- $ST $c_3,11*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
- $a_6,$a_6); # mul_add_c(a[6],b[6],c1,c2,c3);
- $code.=<<___;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $MULTU $a_6,$a_7 # mul_add_c2(a[6],b[7],c2,c3,c1);
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- sltu $at,$c_2,$t_2
- $ADDU $c_3,$at
- $ST $c_1,12*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
- $a_7,$a_7); # mul_add_c(a[7],b[7],c3,c1,c2);
- $code.=<<___;
- $ST $c_2,13*$BNSZ($a0)
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- $ST $c_3,14*$BNSZ($a0)
- $ST $c_1,15*$BNSZ($a0)
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- nop
- .end bn_sqr_comba8
- .align 5
- .globl bn_sqr_comba4
- .ent bn_sqr_comba4
- bn_sqr_comba4:
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- .frame $sp,6*$SZREG,$ra
- .mask 0x8000f008,-$SZREG
- .set noreorder
- $PTR_SUB $sp,6*$SZREG
- $REG_S $ra,5*$SZREG($sp)
- $REG_S $t3,4*$SZREG($sp)
- $REG_S $t2,3*$SZREG($sp)
- $REG_S $t1,2*$SZREG($sp)
- $REG_S $t0,1*$SZREG($sp)
- $REG_S $gp,0*$SZREG($sp)
- ___
- $code.=<<___;
- .set reorder
- $LD $a_0,0($a1)
- $LD $a_1,$BNSZ($a1)
- $MULTU $a_0,$a_0 # mul_add_c(a[0],b[0],c1,c2,c3);
- $LD $a_2,2*$BNSZ($a1)
- $LD $a_3,3*$BNSZ($a1)
- mflo $c_1
- mfhi $c_2
- $ST $c_1,0($a0)
- $MULTU $a_0,$a_1 # mul_add_c2(a[0],b[1],c2,c3,c1);
- mflo $t_1
- mfhi $t_2
- slt $c_1,$t_2,$zero
- $SLL $t_2,1
- $MULTU $a_2,$a_0 # mul_add_c2(a[2],b[0],c3,c1,c2);
- slt $a2,$t_1,$zero
- $ADDU $t_2,$a2
- $SLL $t_1,1
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $ADDU $c_3,$t_2,$at
- $ST $c_2,$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
- $a_1,$a_1); # mul_add_c(a[1],b[1],c3,c1,c2);
- $code.=<<___;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_3,$t_1
- sltu $at,$c_3,$t_1
- $MULTU $a_0,$a_3 # mul_add_c2(a[0],b[3],c1,c2,c3);
- $ADDU $t_2,$at
- $ADDU $c_1,$t_2
- sltu $at,$c_1,$t_2
- $ADDU $c_2,$at
- $ST $c_3,2*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,0,
- $a_1,$a_2); # mul_add_c2(a2[1],b[2],c1,c2,c3);
- &add_c2($t_2,$t_1,$c_1,$c_2,$c_3,1,
- $a_3,$a_1); # mul_add_c2(a[3],b[1],c2,c3,c1);
- $code.=<<___;
- $ST $c_1,3*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_2,$c_3,$c_1,0,
- $a_2,$a_2); # mul_add_c(a[2],b[2],c2,c3,c1);
- $code.=<<___;
- mflo $t_1
- mfhi $t_2
- $ADDU $c_2,$t_1
- sltu $at,$c_2,$t_1
- $MULTU $a_2,$a_3 # mul_add_c2(a[2],b[3],c3,c1,c2);
- $ADDU $t_2,$at
- $ADDU $c_3,$t_2
- sltu $at,$c_3,$t_2
- $ADDU $c_1,$at
- $ST $c_2,4*$BNSZ($a0)
- ___
- &add_c2($t_2,$t_1,$c_3,$c_1,$c_2,0,
- $a_3,$a_3); # mul_add_c(a[3],b[3],c1,c2,c3);
- $code.=<<___;
- $ST $c_3,5*$BNSZ($a0)
- mflo $t_1
- mfhi $t_2
- $ADDU $c_1,$t_1
- sltu $at,$c_1,$t_1
- $ADDU $t_2,$at
- $ADDU $c_2,$t_2
- $ST $c_1,6*$BNSZ($a0)
- $ST $c_2,7*$BNSZ($a0)
- .set noreorder
- ___
- $code.=<<___ if ($flavour =~ /nubi/i);
- $REG_L $t3,4*$SZREG($sp)
- $REG_L $t2,3*$SZREG($sp)
- $REG_L $t1,2*$SZREG($sp)
- $REG_L $t0,1*$SZREG($sp)
- $REG_L $gp,0*$SZREG($sp)
- $PTR_ADD $sp,6*$SZREG
- ___
- $code.=<<___;
- jr $ra
- nop
- .end bn_sqr_comba4
- ___
- print $code;
- close STDOUT;
|