aes.c 409 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439
  1. /* aes.c
  2. *
  3. * Copyright (C) 2006-2022 wolfSSL Inc.
  4. *
  5. * This file is part of wolfSSL.
  6. *
  7. * wolfSSL is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * wolfSSL is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  20. */
  21. /*
  22. DESCRIPTION
  23. This library provides the interfaces to the Advanced Encryption Standard (AES)
  24. for encrypting and decrypting data. AES is the standard known for a symmetric
  25. block cipher mechanism that uses n-bit binary string parameter key with 128-bits,
  26. 192-bits, and 256-bits of key sizes.
  27. */
  28. #ifdef HAVE_CONFIG_H
  29. #include <config.h>
  30. #endif
  31. #include <wolfssl/wolfcrypt/settings.h>
  32. #include <wolfssl/wolfcrypt/error-crypt.h>
  33. #if !defined(NO_AES)
  34. /* Tip: Locate the software cipher modes by searching for "Software AES" */
  35. #if defined(HAVE_FIPS) && \
  36. defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
  37. /* set NO_WRAPPERS before headers, use direct internal f()s not wrappers */
  38. #define FIPS_NO_WRAPPERS
  39. #ifdef USE_WINDOWS_API
  40. #pragma code_seg(".fipsA$g")
  41. #pragma const_seg(".fipsB$g")
  42. #endif
  43. #endif
  44. #include <wolfssl/wolfcrypt/aes.h>
  45. #ifdef WOLFSSL_AESNI
  46. #include <wmmintrin.h>
  47. #include <emmintrin.h>
  48. #include <smmintrin.h>
  49. #endif /* WOLFSSL_AESNI */
  50. #include <wolfssl/wolfcrypt/cpuid.h>
  51. #ifdef WOLF_CRYPTO_CB
  52. #include <wolfssl/wolfcrypt/cryptocb.h>
  53. #endif
  54. #ifdef WOLFSSL_SECO_CAAM
  55. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  56. #endif
  57. #ifdef WOLFSSL_IMXRT_DCP
  58. #include <wolfssl/wolfcrypt/port/nxp/dcp_port.h>
  59. #endif
  60. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  61. #include <wolfssl/wolfcrypt/port/nxp/se050_port.h>
  62. #endif
  63. #ifdef WOLFSSL_AES_SIV
  64. #include <wolfssl/wolfcrypt/cmac.h>
  65. #endif
  66. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  67. #include <wolfssl/wolfcrypt/port/psa/psa.h>
  68. #endif
  69. /* fips wrapper calls, user can call direct */
  70. #if defined(HAVE_FIPS) && \
  71. (!defined(HAVE_FIPS_VERSION) || (HAVE_FIPS_VERSION < 2))
  72. int wc_AesSetKey(Aes* aes, const byte* key, word32 len, const byte* iv,
  73. int dir)
  74. {
  75. if (aes == NULL || !( (len == 16) || (len == 24) || (len == 32)) ) {
  76. return BAD_FUNC_ARG;
  77. }
  78. return AesSetKey_fips(aes, key, len, iv, dir);
  79. }
  80. int wc_AesSetIV(Aes* aes, const byte* iv)
  81. {
  82. if (aes == NULL) {
  83. return BAD_FUNC_ARG;
  84. }
  85. return AesSetIV_fips(aes, iv);
  86. }
  87. #ifdef HAVE_AES_CBC
  88. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  89. {
  90. if (aes == NULL || out == NULL || in == NULL) {
  91. return BAD_FUNC_ARG;
  92. }
  93. return AesCbcEncrypt_fips(aes, out, in, sz);
  94. }
  95. #ifdef HAVE_AES_DECRYPT
  96. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  97. {
  98. if (aes == NULL || out == NULL || in == NULL
  99. || sz % AES_BLOCK_SIZE != 0) {
  100. return BAD_FUNC_ARG;
  101. }
  102. return AesCbcDecrypt_fips(aes, out, in, sz);
  103. }
  104. #endif /* HAVE_AES_DECRYPT */
  105. #endif /* HAVE_AES_CBC */
  106. /* AES-CTR */
  107. #ifdef WOLFSSL_AES_COUNTER
  108. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  109. {
  110. if (aes == NULL || out == NULL || in == NULL) {
  111. return BAD_FUNC_ARG;
  112. }
  113. return AesCtrEncrypt(aes, out, in, sz);
  114. }
  115. #endif
  116. /* AES-DIRECT */
  117. #if defined(WOLFSSL_AES_DIRECT)
  118. void wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in)
  119. {
  120. AesEncryptDirect(aes, out, in);
  121. }
  122. #ifdef HAVE_AES_DECRYPT
  123. void wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in)
  124. {
  125. AesDecryptDirect(aes, out, in);
  126. }
  127. #endif /* HAVE_AES_DECRYPT */
  128. int wc_AesSetKeyDirect(Aes* aes, const byte* key, word32 len,
  129. const byte* iv, int dir)
  130. {
  131. return AesSetKeyDirect(aes, key, len, iv, dir);
  132. }
  133. #endif /* WOLFSSL_AES_DIRECT */
  134. /* AES-GCM */
  135. #ifdef HAVE_AESGCM
  136. int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len)
  137. {
  138. if (aes == NULL || !( (len == 16) || (len == 24) || (len == 32)) ) {
  139. return BAD_FUNC_ARG;
  140. }
  141. return AesGcmSetKey_fips(aes, key, len);
  142. }
  143. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  144. const byte* iv, word32 ivSz,
  145. byte* authTag, word32 authTagSz,
  146. const byte* authIn, word32 authInSz)
  147. {
  148. if (aes == NULL || authTagSz > AES_BLOCK_SIZE ||
  149. authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ ||
  150. ivSz == 0 || ivSz > AES_BLOCK_SIZE) {
  151. return BAD_FUNC_ARG;
  152. }
  153. return AesGcmEncrypt_fips(aes, out, in, sz, iv, ivSz, authTag,
  154. authTagSz, authIn, authInSz);
  155. }
  156. #ifdef HAVE_AES_DECRYPT
  157. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  158. const byte* iv, word32 ivSz,
  159. const byte* authTag, word32 authTagSz,
  160. const byte* authIn, word32 authInSz)
  161. {
  162. if (aes == NULL || out == NULL || in == NULL || iv == NULL
  163. || authTag == NULL || authTagSz > AES_BLOCK_SIZE ||
  164. ivSz == 0 || ivSz > AES_BLOCK_SIZE) {
  165. return BAD_FUNC_ARG;
  166. }
  167. return AesGcmDecrypt_fips(aes, out, in, sz, iv, ivSz, authTag,
  168. authTagSz, authIn, authInSz);
  169. }
  170. #endif /* HAVE_AES_DECRYPT */
  171. int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len)
  172. {
  173. if (gmac == NULL || key == NULL || !((len == 16) ||
  174. (len == 24) || (len == 32)) ) {
  175. return BAD_FUNC_ARG;
  176. }
  177. return GmacSetKey(gmac, key, len);
  178. }
  179. int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz,
  180. const byte* authIn, word32 authInSz,
  181. byte* authTag, word32 authTagSz)
  182. {
  183. if (gmac == NULL || authTagSz > AES_BLOCK_SIZE ||
  184. authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  185. return BAD_FUNC_ARG;
  186. }
  187. return GmacUpdate(gmac, iv, ivSz, authIn, authInSz,
  188. authTag, authTagSz);
  189. }
  190. #endif /* HAVE_AESGCM */
  191. /* AES-CCM */
  192. #if defined(HAVE_AESCCM) && \
  193. defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
  194. int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz)
  195. {
  196. return AesCcmSetKey(aes, key, keySz);
  197. }
  198. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  199. const byte* nonce, word32 nonceSz,
  200. byte* authTag, word32 authTagSz,
  201. const byte* authIn, word32 authInSz)
  202. {
  203. /* sanity check on arguments */
  204. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  205. || authTag == NULL || nonceSz < 7 || nonceSz > 13)
  206. return BAD_FUNC_ARG;
  207. AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz, authTag,
  208. authTagSz, authIn, authInSz);
  209. return 0;
  210. }
  211. #ifdef HAVE_AES_DECRYPT
  212. int wc_AesCcmDecrypt(Aes* aes, byte* out,
  213. const byte* in, word32 inSz,
  214. const byte* nonce, word32 nonceSz,
  215. const byte* authTag, word32 authTagSz,
  216. const byte* authIn, word32 authInSz)
  217. {
  218. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  219. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  220. return BAD_FUNC_ARG;
  221. }
  222. return AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz,
  223. authTag, authTagSz, authIn, authInSz);
  224. }
  225. #endif /* HAVE_AES_DECRYPT */
  226. #endif /* HAVE_AESCCM && HAVE_FIPS_VERSION 2 */
  227. int wc_AesInit(Aes* aes, void* h, int i)
  228. {
  229. if (aes == NULL)
  230. return BAD_FUNC_ARG;
  231. (void)h;
  232. (void)i;
  233. /* FIPS doesn't support */
  234. #ifdef WOLFSSL_KCAPI_AES
  235. return AesInit(aes, h, i);
  236. #else
  237. return 0;
  238. #endif
  239. }
  240. void wc_AesFree(Aes* aes)
  241. {
  242. (void)aes;
  243. /* FIPS doesn't support */
  244. #ifdef WOLFSSL_KCAPI_AES
  245. AesFree(aes);
  246. #endif
  247. }
  248. #else /* else build without fips, or for FIPS v2+ */
  249. #if defined(WOLFSSL_TI_CRYPT)
  250. #include <wolfcrypt/src/port/ti/ti-aes.c>
  251. #else
  252. #include <wolfssl/wolfcrypt/logging.h>
  253. #ifdef NO_INLINE
  254. #include <wolfssl/wolfcrypt/misc.h>
  255. #else
  256. #define WOLFSSL_MISC_INCLUDED
  257. #include <wolfcrypt/src/misc.c>
  258. #endif
  259. #if !defined(WOLFSSL_ARMASM)
  260. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  261. /* case of possibly not using hardware acceleration for AES but using key
  262. blobs */
  263. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  264. #endif
  265. #ifdef DEBUG_AESNI
  266. #include <stdio.h>
  267. #endif
  268. #ifdef _MSC_VER
  269. /* 4127 warning constant while(1) */
  270. #pragma warning(disable: 4127)
  271. #endif
  272. /* Define AES implementation includes and functions */
  273. #if defined(STM32_CRYPTO)
  274. /* STM32F2/F4/F7/L4/L5/H7/WB55 hardware AES support for ECB, CBC, CTR and GCM modes */
  275. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESGCM) || defined(HAVE_AESCCM)
  276. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  277. Aes* aes, const byte* inBlock, byte* outBlock)
  278. {
  279. int ret = 0;
  280. #ifdef WOLFSSL_STM32_CUBEMX
  281. CRYP_HandleTypeDef hcryp;
  282. #else
  283. CRYP_InitTypeDef cryptInit;
  284. CRYP_KeyInitTypeDef keyInit;
  285. #endif
  286. #ifdef WOLFSSL_STM32_CUBEMX
  287. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  288. if (ret != 0)
  289. return ret;
  290. ret = wolfSSL_CryptHwMutexLock();
  291. if (ret != 0)
  292. return ret;
  293. #if defined(STM32_HAL_V2)
  294. hcryp.Init.Algorithm = CRYP_AES_ECB;
  295. #elif defined(STM32_CRYPTO_AES_ONLY)
  296. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  297. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  298. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  299. #endif
  300. HAL_CRYP_Init(&hcryp);
  301. #if defined(STM32_HAL_V2)
  302. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  303. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  304. #elif defined(STM32_CRYPTO_AES_ONLY)
  305. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  306. outBlock, STM32_HAL_TIMEOUT);
  307. #else
  308. ret = HAL_CRYP_AESECB_Encrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  309. outBlock, STM32_HAL_TIMEOUT);
  310. #endif
  311. if (ret != HAL_OK) {
  312. ret = WC_TIMEOUT_E;
  313. }
  314. HAL_CRYP_DeInit(&hcryp);
  315. #else /* Standard Peripheral Library */
  316. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  317. if (ret != 0)
  318. return ret;
  319. ret = wolfSSL_CryptHwMutexLock();
  320. if (ret != 0)
  321. return ret;
  322. /* reset registers to their default values */
  323. CRYP_DeInit();
  324. /* setup key */
  325. CRYP_KeyInit(&keyInit);
  326. /* set direction and mode */
  327. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  328. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  329. CRYP_Init(&cryptInit);
  330. /* enable crypto processor */
  331. CRYP_Cmd(ENABLE);
  332. /* flush IN/OUT FIFOs */
  333. CRYP_FIFOFlush();
  334. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  335. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  336. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  337. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  338. /* wait until the complete message has been processed */
  339. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  340. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  341. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  342. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  343. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  344. /* disable crypto processor */
  345. CRYP_Cmd(DISABLE);
  346. #endif /* WOLFSSL_STM32_CUBEMX */
  347. wolfSSL_CryptHwMutexUnLock();
  348. return ret;
  349. }
  350. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESGCM || HAVE_AESCCM */
  351. #ifdef HAVE_AES_DECRYPT
  352. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESCCM)
  353. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  354. Aes* aes, const byte* inBlock, byte* outBlock)
  355. {
  356. int ret = 0;
  357. #ifdef WOLFSSL_STM32_CUBEMX
  358. CRYP_HandleTypeDef hcryp;
  359. #else
  360. CRYP_InitTypeDef cryptInit;
  361. CRYP_KeyInitTypeDef keyInit;
  362. #endif
  363. #ifdef WOLFSSL_STM32_CUBEMX
  364. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  365. if (ret != 0)
  366. return ret;
  367. ret = wolfSSL_CryptHwMutexLock();
  368. if (ret != 0)
  369. return ret;
  370. #if defined(STM32_HAL_V2)
  371. hcryp.Init.Algorithm = CRYP_AES_ECB;
  372. #elif defined(STM32_CRYPTO_AES_ONLY)
  373. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  374. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  375. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  376. #endif
  377. HAL_CRYP_Init(&hcryp);
  378. #if defined(STM32_HAL_V2)
  379. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  380. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  381. #elif defined(STM32_CRYPTO_AES_ONLY)
  382. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  383. outBlock, STM32_HAL_TIMEOUT);
  384. #else
  385. ret = HAL_CRYP_AESECB_Decrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  386. outBlock, STM32_HAL_TIMEOUT);
  387. #endif
  388. if (ret != HAL_OK) {
  389. ret = WC_TIMEOUT_E;
  390. }
  391. HAL_CRYP_DeInit(&hcryp);
  392. #else /* Standard Peripheral Library */
  393. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  394. if (ret != 0)
  395. return ret;
  396. ret = wolfSSL_CryptHwMutexLock();
  397. if (ret != 0)
  398. return ret;
  399. /* reset registers to their default values */
  400. CRYP_DeInit();
  401. /* set direction and key */
  402. CRYP_KeyInit(&keyInit);
  403. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  404. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  405. CRYP_Init(&cryptInit);
  406. /* enable crypto processor */
  407. CRYP_Cmd(ENABLE);
  408. /* wait until decrypt key has been initialized */
  409. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  410. /* set direction and mode */
  411. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  412. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  413. CRYP_Init(&cryptInit);
  414. /* enable crypto processor */
  415. CRYP_Cmd(ENABLE);
  416. /* flush IN/OUT FIFOs */
  417. CRYP_FIFOFlush();
  418. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  419. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  420. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  421. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  422. /* wait until the complete message has been processed */
  423. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  424. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  425. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  426. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  427. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  428. /* disable crypto processor */
  429. CRYP_Cmd(DISABLE);
  430. #endif /* WOLFSSL_STM32_CUBEMX */
  431. wolfSSL_CryptHwMutexUnLock();
  432. return ret;
  433. }
  434. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESCCM */
  435. #endif /* HAVE_AES_DECRYPT */
  436. #elif defined(HAVE_COLDFIRE_SEC)
  437. /* Freescale Coldfire SEC support for CBC mode.
  438. * NOTE: no support for AES-CTR/GCM/CCM/Direct */
  439. #include <wolfssl/wolfcrypt/types.h>
  440. #include "sec.h"
  441. #include "mcf5475_sec.h"
  442. #include "mcf5475_siu.h"
  443. #elif defined(FREESCALE_LTC)
  444. #include "fsl_ltc.h"
  445. #if defined(FREESCALE_LTC_AES_GCM)
  446. #undef NEED_AES_TABLES
  447. #undef GCM_TABLE
  448. #endif
  449. /* if LTC doesn't have GCM, use software with LTC AES ECB mode */
  450. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  451. Aes* aes, const byte* inBlock, byte* outBlock)
  452. {
  453. word32 keySize = 0;
  454. byte* key = (byte*)aes->key;
  455. int ret = wc_AesGetKeySize(aes, &keySize);
  456. if (ret != 0)
  457. return ret;
  458. if (wolfSSL_CryptHwMutexLock() == 0) {
  459. LTC_AES_EncryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  460. key, keySize);
  461. wolfSSL_CryptHwMutexUnLock();
  462. }
  463. return 0;
  464. }
  465. #ifdef HAVE_AES_DECRYPT
  466. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  467. Aes* aes, const byte* inBlock, byte* outBlock)
  468. {
  469. word32 keySize = 0;
  470. byte* key = (byte*)aes->key;
  471. int ret = wc_AesGetKeySize(aes, &keySize);
  472. if (ret != 0)
  473. return ret;
  474. if (wolfSSL_CryptHwMutexLock() == 0) {
  475. LTC_AES_DecryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  476. key, keySize, kLTC_EncryptKey);
  477. wolfSSL_CryptHwMutexUnLock();
  478. }
  479. return 0;
  480. }
  481. #endif
  482. #elif defined(FREESCALE_MMCAU)
  483. /* Freescale mmCAU hardware AES support for Direct, CBC, CCM, GCM modes
  484. * through the CAU/mmCAU library. Documentation located in
  485. * ColdFire/ColdFire+ CAU and Kinetis mmCAU Software Library User
  486. * Guide (See note in README). */
  487. #ifdef FREESCALE_MMCAU_CLASSIC
  488. /* MMCAU 1.4 library used with non-KSDK / classic MQX builds */
  489. #include "cau_api.h"
  490. #else
  491. #include "fsl_mmcau.h"
  492. #endif
  493. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  494. Aes* aes, const byte* inBlock, byte* outBlock)
  495. {
  496. if (wolfSSL_CryptHwMutexLock() == 0) {
  497. #ifdef FREESCALE_MMCAU_CLASSIC
  498. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  499. WOLFSSL_MSG("Bad cau_aes_encrypt alignment");
  500. return BAD_ALIGN_E;
  501. }
  502. cau_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  503. #else
  504. MMCAU_AES_EncryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  505. outBlock);
  506. #endif
  507. wolfSSL_CryptHwMutexUnLock();
  508. }
  509. return 0;
  510. }
  511. #ifdef HAVE_AES_DECRYPT
  512. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  513. Aes* aes, const byte* inBlock, byte* outBlock)
  514. {
  515. if (wolfSSL_CryptHwMutexLock() == 0) {
  516. #ifdef FREESCALE_MMCAU_CLASSIC
  517. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  518. WOLFSSL_MSG("Bad cau_aes_decrypt alignment");
  519. return BAD_ALIGN_E;
  520. }
  521. cau_aes_decrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  522. #else
  523. MMCAU_AES_DecryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  524. outBlock);
  525. #endif
  526. wolfSSL_CryptHwMutexUnLock();
  527. }
  528. return 0;
  529. }
  530. #endif /* HAVE_AES_DECRYPT */
  531. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  532. #include <wolfssl/wolfcrypt/port/pic32/pic32mz-crypt.h>
  533. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  534. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  535. Aes* aes, const byte* inBlock, byte* outBlock)
  536. {
  537. /* Thread mutex protection handled in Pic32Crypto */
  538. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  539. outBlock, inBlock, AES_BLOCK_SIZE,
  540. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  541. }
  542. #endif
  543. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  544. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  545. Aes* aes, const byte* inBlock, byte* outBlock)
  546. {
  547. /* Thread mutex protection handled in Pic32Crypto */
  548. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  549. outBlock, inBlock, AES_BLOCK_SIZE,
  550. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  551. }
  552. #endif
  553. #elif defined(WOLFSSL_NRF51_AES)
  554. /* Use built-in AES hardware - AES 128 ECB Encrypt Only */
  555. #include "wolfssl/wolfcrypt/port/nrf51.h"
  556. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  557. Aes* aes, const byte* inBlock, byte* outBlock)
  558. {
  559. int ret;
  560. ret = wolfSSL_CryptHwMutexLock();
  561. if (ret == 0) {
  562. ret = nrf51_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds,
  563. outBlock);
  564. wolfSSL_CryptHwMutexUnLock();
  565. }
  566. return ret;
  567. }
  568. #ifdef HAVE_AES_DECRYPT
  569. #error nRF51 AES Hardware does not support decrypt
  570. #endif /* HAVE_AES_DECRYPT */
  571. #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
  572. !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES)
  573. #include "wolfssl/wolfcrypt/port/Espressif/esp32-crypt.h"
  574. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  575. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  576. Aes* aes, const byte* inBlock, byte* outBlock)
  577. {
  578. /* Thread mutex protection handled in esp_aes_hw_InUse */
  579. return wc_esp32AesEncrypt(aes, inBlock, outBlock);
  580. }
  581. #endif
  582. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  583. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  584. Aes* aes, const byte* inBlock, byte* outBlock)
  585. {
  586. /* Thread mutex protection handled in esp_aes_hw_InUse */
  587. return wc_esp32AesDecrypt(aes, inBlock, outBlock);
  588. }
  589. #endif
  590. #elif defined(WOLFSSL_AESNI)
  591. #define NEED_AES_TABLES
  592. /* Each platform needs to query info type 1 from cpuid to see if aesni is
  593. * supported. Also, let's setup a macro for proper linkage w/o ABI conflicts
  594. */
  595. #ifndef AESNI_ALIGN
  596. #define AESNI_ALIGN 16
  597. #endif
  598. #ifdef _MSC_VER
  599. #define XASM_LINK(f)
  600. #elif defined(__APPLE__)
  601. #define XASM_LINK(f) asm("_" f)
  602. #else
  603. #define XASM_LINK(f) asm(f)
  604. #endif /* _MSC_VER */
  605. static int checkAESNI = 0;
  606. static int haveAESNI = 0;
  607. static word32 intel_flags = 0;
  608. static WARN_UNUSED_RESULT int Check_CPU_support_AES(void)
  609. {
  610. intel_flags = cpuid_get_flags();
  611. return IS_INTEL_AESNI(intel_flags) != 0;
  612. }
  613. /* tell C compiler these are asm functions in case any mix up of ABI underscore
  614. prefix between clang/gcc/llvm etc */
  615. #ifdef HAVE_AES_CBC
  616. void AES_CBC_encrypt(const unsigned char* in, unsigned char* out,
  617. unsigned char* ivec, unsigned long length,
  618. const unsigned char* KS, int nr)
  619. XASM_LINK("AES_CBC_encrypt");
  620. #ifdef HAVE_AES_DECRYPT
  621. #if defined(WOLFSSL_AESNI_BY4)
  622. void AES_CBC_decrypt_by4(const unsigned char* in, unsigned char* out,
  623. unsigned char* ivec, unsigned long length,
  624. const unsigned char* KS, int nr)
  625. XASM_LINK("AES_CBC_decrypt_by4");
  626. #elif defined(WOLFSSL_AESNI_BY6)
  627. void AES_CBC_decrypt_by6(const unsigned char* in, unsigned char* out,
  628. unsigned char* ivec, unsigned long length,
  629. const unsigned char* KS, int nr)
  630. XASM_LINK("AES_CBC_decrypt_by6");
  631. #else /* WOLFSSL_AESNI_BYx */
  632. void AES_CBC_decrypt_by8(const unsigned char* in, unsigned char* out,
  633. unsigned char* ivec, unsigned long length,
  634. const unsigned char* KS, int nr)
  635. XASM_LINK("AES_CBC_decrypt_by8");
  636. #endif /* WOLFSSL_AESNI_BYx */
  637. #endif /* HAVE_AES_DECRYPT */
  638. #endif /* HAVE_AES_CBC */
  639. void AES_ECB_encrypt(const unsigned char* in, unsigned char* out,
  640. unsigned long length, const unsigned char* KS, int nr)
  641. XASM_LINK("AES_ECB_encrypt");
  642. #ifdef HAVE_AES_DECRYPT
  643. void AES_ECB_decrypt(const unsigned char* in, unsigned char* out,
  644. unsigned long length, const unsigned char* KS, int nr)
  645. XASM_LINK("AES_ECB_decrypt");
  646. #endif
  647. void AES_128_Key_Expansion(const unsigned char* userkey,
  648. unsigned char* key_schedule)
  649. XASM_LINK("AES_128_Key_Expansion");
  650. void AES_192_Key_Expansion(const unsigned char* userkey,
  651. unsigned char* key_schedule)
  652. XASM_LINK("AES_192_Key_Expansion");
  653. void AES_256_Key_Expansion(const unsigned char* userkey,
  654. unsigned char* key_schedule)
  655. XASM_LINK("AES_256_Key_Expansion");
  656. static WARN_UNUSED_RESULT int AES_set_encrypt_key(
  657. const unsigned char *userKey, const int bits, Aes* aes)
  658. {
  659. int ret;
  660. if (!userKey || !aes)
  661. return BAD_FUNC_ARG;
  662. switch (bits) {
  663. case 128:
  664. AES_128_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 10;
  665. return 0;
  666. case 192:
  667. AES_192_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 12;
  668. return 0;
  669. case 256:
  670. AES_256_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 14;
  671. return 0;
  672. default:
  673. ret = BAD_FUNC_ARG;
  674. }
  675. return ret;
  676. }
  677. #ifdef HAVE_AES_DECRYPT
  678. static WARN_UNUSED_RESULT int AES_set_decrypt_key(
  679. const unsigned char* userKey, const int bits, Aes* aes)
  680. {
  681. int nr;
  682. #ifdef WOLFSSL_SMALL_STACK
  683. Aes *temp_key;
  684. #else
  685. Aes temp_key[1];
  686. #endif
  687. __m128i *Key_Schedule;
  688. __m128i *Temp_Key_Schedule;
  689. if (!userKey || !aes)
  690. return BAD_FUNC_ARG;
  691. #ifdef WOLFSSL_SMALL_STACK
  692. if ((temp_key = (Aes *)XMALLOC(sizeof *aes, aes->heap,
  693. DYNAMIC_TYPE_AES)) == NULL)
  694. return MEMORY_E;
  695. #endif
  696. if (AES_set_encrypt_key(userKey,bits,temp_key) == BAD_FUNC_ARG) {
  697. #ifdef WOLFSSL_SMALL_STACK
  698. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  699. #endif
  700. return BAD_FUNC_ARG;
  701. }
  702. Key_Schedule = (__m128i*)aes->key;
  703. Temp_Key_Schedule = (__m128i*)temp_key->key;
  704. nr = temp_key->rounds;
  705. aes->rounds = nr;
  706. #ifdef WOLFSSL_SMALL_STACK
  707. SAVE_VECTOR_REGISTERS(XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES); return _svr_ret;);
  708. #else
  709. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  710. #endif
  711. Key_Schedule[nr] = Temp_Key_Schedule[0];
  712. Key_Schedule[nr-1] = _mm_aesimc_si128(Temp_Key_Schedule[1]);
  713. Key_Schedule[nr-2] = _mm_aesimc_si128(Temp_Key_Schedule[2]);
  714. Key_Schedule[nr-3] = _mm_aesimc_si128(Temp_Key_Schedule[3]);
  715. Key_Schedule[nr-4] = _mm_aesimc_si128(Temp_Key_Schedule[4]);
  716. Key_Schedule[nr-5] = _mm_aesimc_si128(Temp_Key_Schedule[5]);
  717. Key_Schedule[nr-6] = _mm_aesimc_si128(Temp_Key_Schedule[6]);
  718. Key_Schedule[nr-7] = _mm_aesimc_si128(Temp_Key_Schedule[7]);
  719. Key_Schedule[nr-8] = _mm_aesimc_si128(Temp_Key_Schedule[8]);
  720. Key_Schedule[nr-9] = _mm_aesimc_si128(Temp_Key_Schedule[9]);
  721. if (nr>10) {
  722. Key_Schedule[nr-10] = _mm_aesimc_si128(Temp_Key_Schedule[10]);
  723. Key_Schedule[nr-11] = _mm_aesimc_si128(Temp_Key_Schedule[11]);
  724. }
  725. if (nr>12) {
  726. Key_Schedule[nr-12] = _mm_aesimc_si128(Temp_Key_Schedule[12]);
  727. Key_Schedule[nr-13] = _mm_aesimc_si128(Temp_Key_Schedule[13]);
  728. }
  729. Key_Schedule[0] = Temp_Key_Schedule[nr];
  730. RESTORE_VECTOR_REGISTERS();
  731. #ifdef WOLFSSL_SMALL_STACK
  732. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  733. #endif
  734. return 0;
  735. }
  736. #endif /* HAVE_AES_DECRYPT */
  737. #elif (defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  738. && !defined(WOLFSSL_QNX_CAAM)) || \
  739. ((defined(WOLFSSL_AFALG) || defined(WOLFSSL_DEVCRYPTO_AES)) && \
  740. defined(HAVE_AESCCM))
  741. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  742. Aes* aes, const byte* inBlock, byte* outBlock)
  743. {
  744. return wc_AesEncryptDirect(aes, outBlock, inBlock);
  745. }
  746. #elif defined(WOLFSSL_AFALG)
  747. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  748. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  749. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  750. #elif defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  751. static WARN_UNUSED_RESULT int AES_ECB_encrypt(
  752. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  753. {
  754. return se050_aes_crypt(aes, inBlock, outBlock, sz, AES_ENCRYPTION,
  755. kAlgorithm_SSS_AES_ECB);
  756. }
  757. static WARN_UNUSED_RESULT int AES_ECB_decrypt(
  758. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  759. {
  760. return se050_aes_crypt(aes, inBlock, outBlock, sz, AES_DECRYPTION,
  761. kAlgorithm_SSS_AES_ECB);
  762. }
  763. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  764. Aes* aes, const byte* inBlock, byte* outBlock)
  765. {
  766. return AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  767. }
  768. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  769. Aes* aes, const byte* inBlock, byte* outBlock)
  770. {
  771. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  772. }
  773. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  774. #include "hal_data.h"
  775. #ifndef WOLFSSL_SCE_AES256_HANDLE
  776. #define WOLFSSL_SCE_AES256_HANDLE g_sce_aes_256
  777. #endif
  778. #ifndef WOLFSSL_SCE_AES192_HANDLE
  779. #define WOLFSSL_SCE_AES192_HANDLE g_sce_aes_192
  780. #endif
  781. #ifndef WOLFSSL_SCE_AES128_HANDLE
  782. #define WOLFSSL_SCE_AES128_HANDLE g_sce_aes_128
  783. #endif
  784. static WARN_UNUSED_RESULT int AES_ECB_encrypt(
  785. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  786. {
  787. word32 ret;
  788. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  789. CRYPTO_WORD_ENDIAN_BIG) {
  790. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  791. }
  792. switch (aes->keylen) {
  793. #ifdef WOLFSSL_AES_128
  794. case AES_128_KEY_SIZE:
  795. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->encrypt(
  796. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key,
  797. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  798. (word32*)outBlock);
  799. break;
  800. #endif
  801. #ifdef WOLFSSL_AES_192
  802. case AES_192_KEY_SIZE:
  803. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->encrypt(
  804. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key,
  805. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  806. (word32*)outBlock);
  807. break;
  808. #endif
  809. #ifdef WOLFSSL_AES_256
  810. case AES_256_KEY_SIZE:
  811. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->encrypt(
  812. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key,
  813. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  814. (word32*)outBlock);
  815. break;
  816. #endif
  817. default:
  818. WOLFSSL_MSG("Unknown key size");
  819. return BAD_FUNC_ARG;
  820. }
  821. if (ret != SSP_SUCCESS) {
  822. /* revert input */
  823. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  824. return WC_HW_E;
  825. }
  826. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  827. CRYPTO_WORD_ENDIAN_BIG) {
  828. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  829. if (inBlock != outBlock) {
  830. /* revert input */
  831. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  832. }
  833. }
  834. return 0;
  835. }
  836. #if defined(HAVE_AES_DECRYPT)
  837. static WARN_UNUSED_RESULT int AES_ECB_decrypt(
  838. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  839. {
  840. word32 ret;
  841. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  842. CRYPTO_WORD_ENDIAN_BIG) {
  843. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  844. }
  845. switch (aes->keylen) {
  846. #ifdef WOLFSSL_AES_128
  847. case AES_128_KEY_SIZE:
  848. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->decrypt(
  849. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key, aes->reg,
  850. (sz / sizeof(word32)), (word32*)inBlock,
  851. (word32*)outBlock);
  852. break;
  853. #endif
  854. #ifdef WOLFSSL_AES_192
  855. case AES_192_KEY_SIZE:
  856. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->decrypt(
  857. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key, aes->reg,
  858. (sz / sizeof(word32)), (word32*)inBlock,
  859. (word32*)outBlock);
  860. break;
  861. #endif
  862. #ifdef WOLFSSL_AES_256
  863. case AES_256_KEY_SIZE:
  864. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->decrypt(
  865. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key, aes->reg,
  866. (sz / sizeof(word32)), (word32*)inBlock,
  867. (word32*)outBlock);
  868. break;
  869. #endif
  870. default:
  871. WOLFSSL_MSG("Unknown key size");
  872. return BAD_FUNC_ARG;
  873. }
  874. if (ret != SSP_SUCCESS) {
  875. return WC_HW_E;
  876. }
  877. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  878. CRYPTO_WORD_ENDIAN_BIG) {
  879. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  880. if (inBlock != outBlock) {
  881. /* revert input */
  882. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  883. }
  884. }
  885. return 0;
  886. }
  887. #endif /* HAVE_AES_DECRYPT */
  888. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  889. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  890. Aes* aes, const byte* inBlock, byte* outBlock)
  891. {
  892. return AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  893. }
  894. #endif
  895. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  896. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  897. Aes* aes, const byte* inBlock, byte* outBlock)
  898. {
  899. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  900. }
  901. #endif
  902. #elif defined(WOLFSSL_KCAPI_AES)
  903. /* Only CBC and GCM that are in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  904. #if defined(WOLFSSL_AES_COUNTER) || defined(HAVE_AESCCM) || \
  905. defined(WOLFSSL_CMAC) || defined(WOLFSSL_AES_OFB) || \
  906. defined(WOLFSSL_AES_CFB) || defined(HAVE_AES_ECB) || \
  907. defined(WOLFSSL_AES_DIRECT) || \
  908. (defined(HAVE_AES_CBC) && defined(WOLFSSL_NO_KCAPI_AES_CBC))
  909. #define NEED_AES_TABLES
  910. #endif
  911. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  912. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  913. #else
  914. /* using wolfCrypt software implementation */
  915. #define NEED_AES_TABLES
  916. #endif
  917. #ifdef NEED_AES_TABLES
  918. static const FLASH_QUALIFIER word32 rcon[] = {
  919. 0x01000000, 0x02000000, 0x04000000, 0x08000000,
  920. 0x10000000, 0x20000000, 0x40000000, 0x80000000,
  921. 0x1B000000, 0x36000000,
  922. /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  923. };
  924. #ifndef WOLFSSL_AES_SMALL_TABLES
  925. static const FLASH_QUALIFIER word32 Te[4][256] = {
  926. {
  927. 0xc66363a5U, 0xf87c7c84U, 0xee777799U, 0xf67b7b8dU,
  928. 0xfff2f20dU, 0xd66b6bbdU, 0xde6f6fb1U, 0x91c5c554U,
  929. 0x60303050U, 0x02010103U, 0xce6767a9U, 0x562b2b7dU,
  930. 0xe7fefe19U, 0xb5d7d762U, 0x4dababe6U, 0xec76769aU,
  931. 0x8fcaca45U, 0x1f82829dU, 0x89c9c940U, 0xfa7d7d87U,
  932. 0xeffafa15U, 0xb25959ebU, 0x8e4747c9U, 0xfbf0f00bU,
  933. 0x41adadecU, 0xb3d4d467U, 0x5fa2a2fdU, 0x45afafeaU,
  934. 0x239c9cbfU, 0x53a4a4f7U, 0xe4727296U, 0x9bc0c05bU,
  935. 0x75b7b7c2U, 0xe1fdfd1cU, 0x3d9393aeU, 0x4c26266aU,
  936. 0x6c36365aU, 0x7e3f3f41U, 0xf5f7f702U, 0x83cccc4fU,
  937. 0x6834345cU, 0x51a5a5f4U, 0xd1e5e534U, 0xf9f1f108U,
  938. 0xe2717193U, 0xabd8d873U, 0x62313153U, 0x2a15153fU,
  939. 0x0804040cU, 0x95c7c752U, 0x46232365U, 0x9dc3c35eU,
  940. 0x30181828U, 0x379696a1U, 0x0a05050fU, 0x2f9a9ab5U,
  941. 0x0e070709U, 0x24121236U, 0x1b80809bU, 0xdfe2e23dU,
  942. 0xcdebeb26U, 0x4e272769U, 0x7fb2b2cdU, 0xea75759fU,
  943. 0x1209091bU, 0x1d83839eU, 0x582c2c74U, 0x341a1a2eU,
  944. 0x361b1b2dU, 0xdc6e6eb2U, 0xb45a5aeeU, 0x5ba0a0fbU,
  945. 0xa45252f6U, 0x763b3b4dU, 0xb7d6d661U, 0x7db3b3ceU,
  946. 0x5229297bU, 0xdde3e33eU, 0x5e2f2f71U, 0x13848497U,
  947. 0xa65353f5U, 0xb9d1d168U, 0x00000000U, 0xc1eded2cU,
  948. 0x40202060U, 0xe3fcfc1fU, 0x79b1b1c8U, 0xb65b5bedU,
  949. 0xd46a6abeU, 0x8dcbcb46U, 0x67bebed9U, 0x7239394bU,
  950. 0x944a4adeU, 0x984c4cd4U, 0xb05858e8U, 0x85cfcf4aU,
  951. 0xbbd0d06bU, 0xc5efef2aU, 0x4faaaae5U, 0xedfbfb16U,
  952. 0x864343c5U, 0x9a4d4dd7U, 0x66333355U, 0x11858594U,
  953. 0x8a4545cfU, 0xe9f9f910U, 0x04020206U, 0xfe7f7f81U,
  954. 0xa05050f0U, 0x783c3c44U, 0x259f9fbaU, 0x4ba8a8e3U,
  955. 0xa25151f3U, 0x5da3a3feU, 0x804040c0U, 0x058f8f8aU,
  956. 0x3f9292adU, 0x219d9dbcU, 0x70383848U, 0xf1f5f504U,
  957. 0x63bcbcdfU, 0x77b6b6c1U, 0xafdada75U, 0x42212163U,
  958. 0x20101030U, 0xe5ffff1aU, 0xfdf3f30eU, 0xbfd2d26dU,
  959. 0x81cdcd4cU, 0x180c0c14U, 0x26131335U, 0xc3ecec2fU,
  960. 0xbe5f5fe1U, 0x359797a2U, 0x884444ccU, 0x2e171739U,
  961. 0x93c4c457U, 0x55a7a7f2U, 0xfc7e7e82U, 0x7a3d3d47U,
  962. 0xc86464acU, 0xba5d5de7U, 0x3219192bU, 0xe6737395U,
  963. 0xc06060a0U, 0x19818198U, 0x9e4f4fd1U, 0xa3dcdc7fU,
  964. 0x44222266U, 0x542a2a7eU, 0x3b9090abU, 0x0b888883U,
  965. 0x8c4646caU, 0xc7eeee29U, 0x6bb8b8d3U, 0x2814143cU,
  966. 0xa7dede79U, 0xbc5e5ee2U, 0x160b0b1dU, 0xaddbdb76U,
  967. 0xdbe0e03bU, 0x64323256U, 0x743a3a4eU, 0x140a0a1eU,
  968. 0x924949dbU, 0x0c06060aU, 0x4824246cU, 0xb85c5ce4U,
  969. 0x9fc2c25dU, 0xbdd3d36eU, 0x43acacefU, 0xc46262a6U,
  970. 0x399191a8U, 0x319595a4U, 0xd3e4e437U, 0xf279798bU,
  971. 0xd5e7e732U, 0x8bc8c843U, 0x6e373759U, 0xda6d6db7U,
  972. 0x018d8d8cU, 0xb1d5d564U, 0x9c4e4ed2U, 0x49a9a9e0U,
  973. 0xd86c6cb4U, 0xac5656faU, 0xf3f4f407U, 0xcfeaea25U,
  974. 0xca6565afU, 0xf47a7a8eU, 0x47aeaee9U, 0x10080818U,
  975. 0x6fbabad5U, 0xf0787888U, 0x4a25256fU, 0x5c2e2e72U,
  976. 0x381c1c24U, 0x57a6a6f1U, 0x73b4b4c7U, 0x97c6c651U,
  977. 0xcbe8e823U, 0xa1dddd7cU, 0xe874749cU, 0x3e1f1f21U,
  978. 0x964b4bddU, 0x61bdbddcU, 0x0d8b8b86U, 0x0f8a8a85U,
  979. 0xe0707090U, 0x7c3e3e42U, 0x71b5b5c4U, 0xcc6666aaU,
  980. 0x904848d8U, 0x06030305U, 0xf7f6f601U, 0x1c0e0e12U,
  981. 0xc26161a3U, 0x6a35355fU, 0xae5757f9U, 0x69b9b9d0U,
  982. 0x17868691U, 0x99c1c158U, 0x3a1d1d27U, 0x279e9eb9U,
  983. 0xd9e1e138U, 0xebf8f813U, 0x2b9898b3U, 0x22111133U,
  984. 0xd26969bbU, 0xa9d9d970U, 0x078e8e89U, 0x339494a7U,
  985. 0x2d9b9bb6U, 0x3c1e1e22U, 0x15878792U, 0xc9e9e920U,
  986. 0x87cece49U, 0xaa5555ffU, 0x50282878U, 0xa5dfdf7aU,
  987. 0x038c8c8fU, 0x59a1a1f8U, 0x09898980U, 0x1a0d0d17U,
  988. 0x65bfbfdaU, 0xd7e6e631U, 0x844242c6U, 0xd06868b8U,
  989. 0x824141c3U, 0x299999b0U, 0x5a2d2d77U, 0x1e0f0f11U,
  990. 0x7bb0b0cbU, 0xa85454fcU, 0x6dbbbbd6U, 0x2c16163aU,
  991. },
  992. {
  993. 0xa5c66363U, 0x84f87c7cU, 0x99ee7777U, 0x8df67b7bU,
  994. 0x0dfff2f2U, 0xbdd66b6bU, 0xb1de6f6fU, 0x5491c5c5U,
  995. 0x50603030U, 0x03020101U, 0xa9ce6767U, 0x7d562b2bU,
  996. 0x19e7fefeU, 0x62b5d7d7U, 0xe64dababU, 0x9aec7676U,
  997. 0x458fcacaU, 0x9d1f8282U, 0x4089c9c9U, 0x87fa7d7dU,
  998. 0x15effafaU, 0xebb25959U, 0xc98e4747U, 0x0bfbf0f0U,
  999. 0xec41adadU, 0x67b3d4d4U, 0xfd5fa2a2U, 0xea45afafU,
  1000. 0xbf239c9cU, 0xf753a4a4U, 0x96e47272U, 0x5b9bc0c0U,
  1001. 0xc275b7b7U, 0x1ce1fdfdU, 0xae3d9393U, 0x6a4c2626U,
  1002. 0x5a6c3636U, 0x417e3f3fU, 0x02f5f7f7U, 0x4f83ccccU,
  1003. 0x5c683434U, 0xf451a5a5U, 0x34d1e5e5U, 0x08f9f1f1U,
  1004. 0x93e27171U, 0x73abd8d8U, 0x53623131U, 0x3f2a1515U,
  1005. 0x0c080404U, 0x5295c7c7U, 0x65462323U, 0x5e9dc3c3U,
  1006. 0x28301818U, 0xa1379696U, 0x0f0a0505U, 0xb52f9a9aU,
  1007. 0x090e0707U, 0x36241212U, 0x9b1b8080U, 0x3ddfe2e2U,
  1008. 0x26cdebebU, 0x694e2727U, 0xcd7fb2b2U, 0x9fea7575U,
  1009. 0x1b120909U, 0x9e1d8383U, 0x74582c2cU, 0x2e341a1aU,
  1010. 0x2d361b1bU, 0xb2dc6e6eU, 0xeeb45a5aU, 0xfb5ba0a0U,
  1011. 0xf6a45252U, 0x4d763b3bU, 0x61b7d6d6U, 0xce7db3b3U,
  1012. 0x7b522929U, 0x3edde3e3U, 0x715e2f2fU, 0x97138484U,
  1013. 0xf5a65353U, 0x68b9d1d1U, 0x00000000U, 0x2cc1ededU,
  1014. 0x60402020U, 0x1fe3fcfcU, 0xc879b1b1U, 0xedb65b5bU,
  1015. 0xbed46a6aU, 0x468dcbcbU, 0xd967bebeU, 0x4b723939U,
  1016. 0xde944a4aU, 0xd4984c4cU, 0xe8b05858U, 0x4a85cfcfU,
  1017. 0x6bbbd0d0U, 0x2ac5efefU, 0xe54faaaaU, 0x16edfbfbU,
  1018. 0xc5864343U, 0xd79a4d4dU, 0x55663333U, 0x94118585U,
  1019. 0xcf8a4545U, 0x10e9f9f9U, 0x06040202U, 0x81fe7f7fU,
  1020. 0xf0a05050U, 0x44783c3cU, 0xba259f9fU, 0xe34ba8a8U,
  1021. 0xf3a25151U, 0xfe5da3a3U, 0xc0804040U, 0x8a058f8fU,
  1022. 0xad3f9292U, 0xbc219d9dU, 0x48703838U, 0x04f1f5f5U,
  1023. 0xdf63bcbcU, 0xc177b6b6U, 0x75afdadaU, 0x63422121U,
  1024. 0x30201010U, 0x1ae5ffffU, 0x0efdf3f3U, 0x6dbfd2d2U,
  1025. 0x4c81cdcdU, 0x14180c0cU, 0x35261313U, 0x2fc3ececU,
  1026. 0xe1be5f5fU, 0xa2359797U, 0xcc884444U, 0x392e1717U,
  1027. 0x5793c4c4U, 0xf255a7a7U, 0x82fc7e7eU, 0x477a3d3dU,
  1028. 0xacc86464U, 0xe7ba5d5dU, 0x2b321919U, 0x95e67373U,
  1029. 0xa0c06060U, 0x98198181U, 0xd19e4f4fU, 0x7fa3dcdcU,
  1030. 0x66442222U, 0x7e542a2aU, 0xab3b9090U, 0x830b8888U,
  1031. 0xca8c4646U, 0x29c7eeeeU, 0xd36bb8b8U, 0x3c281414U,
  1032. 0x79a7dedeU, 0xe2bc5e5eU, 0x1d160b0bU, 0x76addbdbU,
  1033. 0x3bdbe0e0U, 0x56643232U, 0x4e743a3aU, 0x1e140a0aU,
  1034. 0xdb924949U, 0x0a0c0606U, 0x6c482424U, 0xe4b85c5cU,
  1035. 0x5d9fc2c2U, 0x6ebdd3d3U, 0xef43acacU, 0xa6c46262U,
  1036. 0xa8399191U, 0xa4319595U, 0x37d3e4e4U, 0x8bf27979U,
  1037. 0x32d5e7e7U, 0x438bc8c8U, 0x596e3737U, 0xb7da6d6dU,
  1038. 0x8c018d8dU, 0x64b1d5d5U, 0xd29c4e4eU, 0xe049a9a9U,
  1039. 0xb4d86c6cU, 0xfaac5656U, 0x07f3f4f4U, 0x25cfeaeaU,
  1040. 0xafca6565U, 0x8ef47a7aU, 0xe947aeaeU, 0x18100808U,
  1041. 0xd56fbabaU, 0x88f07878U, 0x6f4a2525U, 0x725c2e2eU,
  1042. 0x24381c1cU, 0xf157a6a6U, 0xc773b4b4U, 0x5197c6c6U,
  1043. 0x23cbe8e8U, 0x7ca1ddddU, 0x9ce87474U, 0x213e1f1fU,
  1044. 0xdd964b4bU, 0xdc61bdbdU, 0x860d8b8bU, 0x850f8a8aU,
  1045. 0x90e07070U, 0x427c3e3eU, 0xc471b5b5U, 0xaacc6666U,
  1046. 0xd8904848U, 0x05060303U, 0x01f7f6f6U, 0x121c0e0eU,
  1047. 0xa3c26161U, 0x5f6a3535U, 0xf9ae5757U, 0xd069b9b9U,
  1048. 0x91178686U, 0x5899c1c1U, 0x273a1d1dU, 0xb9279e9eU,
  1049. 0x38d9e1e1U, 0x13ebf8f8U, 0xb32b9898U, 0x33221111U,
  1050. 0xbbd26969U, 0x70a9d9d9U, 0x89078e8eU, 0xa7339494U,
  1051. 0xb62d9b9bU, 0x223c1e1eU, 0x92158787U, 0x20c9e9e9U,
  1052. 0x4987ceceU, 0xffaa5555U, 0x78502828U, 0x7aa5dfdfU,
  1053. 0x8f038c8cU, 0xf859a1a1U, 0x80098989U, 0x171a0d0dU,
  1054. 0xda65bfbfU, 0x31d7e6e6U, 0xc6844242U, 0xb8d06868U,
  1055. 0xc3824141U, 0xb0299999U, 0x775a2d2dU, 0x111e0f0fU,
  1056. 0xcb7bb0b0U, 0xfca85454U, 0xd66dbbbbU, 0x3a2c1616U,
  1057. },
  1058. {
  1059. 0x63a5c663U, 0x7c84f87cU, 0x7799ee77U, 0x7b8df67bU,
  1060. 0xf20dfff2U, 0x6bbdd66bU, 0x6fb1de6fU, 0xc55491c5U,
  1061. 0x30506030U, 0x01030201U, 0x67a9ce67U, 0x2b7d562bU,
  1062. 0xfe19e7feU, 0xd762b5d7U, 0xabe64dabU, 0x769aec76U,
  1063. 0xca458fcaU, 0x829d1f82U, 0xc94089c9U, 0x7d87fa7dU,
  1064. 0xfa15effaU, 0x59ebb259U, 0x47c98e47U, 0xf00bfbf0U,
  1065. 0xadec41adU, 0xd467b3d4U, 0xa2fd5fa2U, 0xafea45afU,
  1066. 0x9cbf239cU, 0xa4f753a4U, 0x7296e472U, 0xc05b9bc0U,
  1067. 0xb7c275b7U, 0xfd1ce1fdU, 0x93ae3d93U, 0x266a4c26U,
  1068. 0x365a6c36U, 0x3f417e3fU, 0xf702f5f7U, 0xcc4f83ccU,
  1069. 0x345c6834U, 0xa5f451a5U, 0xe534d1e5U, 0xf108f9f1U,
  1070. 0x7193e271U, 0xd873abd8U, 0x31536231U, 0x153f2a15U,
  1071. 0x040c0804U, 0xc75295c7U, 0x23654623U, 0xc35e9dc3U,
  1072. 0x18283018U, 0x96a13796U, 0x050f0a05U, 0x9ab52f9aU,
  1073. 0x07090e07U, 0x12362412U, 0x809b1b80U, 0xe23ddfe2U,
  1074. 0xeb26cdebU, 0x27694e27U, 0xb2cd7fb2U, 0x759fea75U,
  1075. 0x091b1209U, 0x839e1d83U, 0x2c74582cU, 0x1a2e341aU,
  1076. 0x1b2d361bU, 0x6eb2dc6eU, 0x5aeeb45aU, 0xa0fb5ba0U,
  1077. 0x52f6a452U, 0x3b4d763bU, 0xd661b7d6U, 0xb3ce7db3U,
  1078. 0x297b5229U, 0xe33edde3U, 0x2f715e2fU, 0x84971384U,
  1079. 0x53f5a653U, 0xd168b9d1U, 0x00000000U, 0xed2cc1edU,
  1080. 0x20604020U, 0xfc1fe3fcU, 0xb1c879b1U, 0x5bedb65bU,
  1081. 0x6abed46aU, 0xcb468dcbU, 0xbed967beU, 0x394b7239U,
  1082. 0x4ade944aU, 0x4cd4984cU, 0x58e8b058U, 0xcf4a85cfU,
  1083. 0xd06bbbd0U, 0xef2ac5efU, 0xaae54faaU, 0xfb16edfbU,
  1084. 0x43c58643U, 0x4dd79a4dU, 0x33556633U, 0x85941185U,
  1085. 0x45cf8a45U, 0xf910e9f9U, 0x02060402U, 0x7f81fe7fU,
  1086. 0x50f0a050U, 0x3c44783cU, 0x9fba259fU, 0xa8e34ba8U,
  1087. 0x51f3a251U, 0xa3fe5da3U, 0x40c08040U, 0x8f8a058fU,
  1088. 0x92ad3f92U, 0x9dbc219dU, 0x38487038U, 0xf504f1f5U,
  1089. 0xbcdf63bcU, 0xb6c177b6U, 0xda75afdaU, 0x21634221U,
  1090. 0x10302010U, 0xff1ae5ffU, 0xf30efdf3U, 0xd26dbfd2U,
  1091. 0xcd4c81cdU, 0x0c14180cU, 0x13352613U, 0xec2fc3ecU,
  1092. 0x5fe1be5fU, 0x97a23597U, 0x44cc8844U, 0x17392e17U,
  1093. 0xc45793c4U, 0xa7f255a7U, 0x7e82fc7eU, 0x3d477a3dU,
  1094. 0x64acc864U, 0x5de7ba5dU, 0x192b3219U, 0x7395e673U,
  1095. 0x60a0c060U, 0x81981981U, 0x4fd19e4fU, 0xdc7fa3dcU,
  1096. 0x22664422U, 0x2a7e542aU, 0x90ab3b90U, 0x88830b88U,
  1097. 0x46ca8c46U, 0xee29c7eeU, 0xb8d36bb8U, 0x143c2814U,
  1098. 0xde79a7deU, 0x5ee2bc5eU, 0x0b1d160bU, 0xdb76addbU,
  1099. 0xe03bdbe0U, 0x32566432U, 0x3a4e743aU, 0x0a1e140aU,
  1100. 0x49db9249U, 0x060a0c06U, 0x246c4824U, 0x5ce4b85cU,
  1101. 0xc25d9fc2U, 0xd36ebdd3U, 0xacef43acU, 0x62a6c462U,
  1102. 0x91a83991U, 0x95a43195U, 0xe437d3e4U, 0x798bf279U,
  1103. 0xe732d5e7U, 0xc8438bc8U, 0x37596e37U, 0x6db7da6dU,
  1104. 0x8d8c018dU, 0xd564b1d5U, 0x4ed29c4eU, 0xa9e049a9U,
  1105. 0x6cb4d86cU, 0x56faac56U, 0xf407f3f4U, 0xea25cfeaU,
  1106. 0x65afca65U, 0x7a8ef47aU, 0xaee947aeU, 0x08181008U,
  1107. 0xbad56fbaU, 0x7888f078U, 0x256f4a25U, 0x2e725c2eU,
  1108. 0x1c24381cU, 0xa6f157a6U, 0xb4c773b4U, 0xc65197c6U,
  1109. 0xe823cbe8U, 0xdd7ca1ddU, 0x749ce874U, 0x1f213e1fU,
  1110. 0x4bdd964bU, 0xbddc61bdU, 0x8b860d8bU, 0x8a850f8aU,
  1111. 0x7090e070U, 0x3e427c3eU, 0xb5c471b5U, 0x66aacc66U,
  1112. 0x48d89048U, 0x03050603U, 0xf601f7f6U, 0x0e121c0eU,
  1113. 0x61a3c261U, 0x355f6a35U, 0x57f9ae57U, 0xb9d069b9U,
  1114. 0x86911786U, 0xc15899c1U, 0x1d273a1dU, 0x9eb9279eU,
  1115. 0xe138d9e1U, 0xf813ebf8U, 0x98b32b98U, 0x11332211U,
  1116. 0x69bbd269U, 0xd970a9d9U, 0x8e89078eU, 0x94a73394U,
  1117. 0x9bb62d9bU, 0x1e223c1eU, 0x87921587U, 0xe920c9e9U,
  1118. 0xce4987ceU, 0x55ffaa55U, 0x28785028U, 0xdf7aa5dfU,
  1119. 0x8c8f038cU, 0xa1f859a1U, 0x89800989U, 0x0d171a0dU,
  1120. 0xbfda65bfU, 0xe631d7e6U, 0x42c68442U, 0x68b8d068U,
  1121. 0x41c38241U, 0x99b02999U, 0x2d775a2dU, 0x0f111e0fU,
  1122. 0xb0cb7bb0U, 0x54fca854U, 0xbbd66dbbU, 0x163a2c16U,
  1123. },
  1124. {
  1125. 0x6363a5c6U, 0x7c7c84f8U, 0x777799eeU, 0x7b7b8df6U,
  1126. 0xf2f20dffU, 0x6b6bbdd6U, 0x6f6fb1deU, 0xc5c55491U,
  1127. 0x30305060U, 0x01010302U, 0x6767a9ceU, 0x2b2b7d56U,
  1128. 0xfefe19e7U, 0xd7d762b5U, 0xababe64dU, 0x76769aecU,
  1129. 0xcaca458fU, 0x82829d1fU, 0xc9c94089U, 0x7d7d87faU,
  1130. 0xfafa15efU, 0x5959ebb2U, 0x4747c98eU, 0xf0f00bfbU,
  1131. 0xadadec41U, 0xd4d467b3U, 0xa2a2fd5fU, 0xafafea45U,
  1132. 0x9c9cbf23U, 0xa4a4f753U, 0x727296e4U, 0xc0c05b9bU,
  1133. 0xb7b7c275U, 0xfdfd1ce1U, 0x9393ae3dU, 0x26266a4cU,
  1134. 0x36365a6cU, 0x3f3f417eU, 0xf7f702f5U, 0xcccc4f83U,
  1135. 0x34345c68U, 0xa5a5f451U, 0xe5e534d1U, 0xf1f108f9U,
  1136. 0x717193e2U, 0xd8d873abU, 0x31315362U, 0x15153f2aU,
  1137. 0x04040c08U, 0xc7c75295U, 0x23236546U, 0xc3c35e9dU,
  1138. 0x18182830U, 0x9696a137U, 0x05050f0aU, 0x9a9ab52fU,
  1139. 0x0707090eU, 0x12123624U, 0x80809b1bU, 0xe2e23ddfU,
  1140. 0xebeb26cdU, 0x2727694eU, 0xb2b2cd7fU, 0x75759feaU,
  1141. 0x09091b12U, 0x83839e1dU, 0x2c2c7458U, 0x1a1a2e34U,
  1142. 0x1b1b2d36U, 0x6e6eb2dcU, 0x5a5aeeb4U, 0xa0a0fb5bU,
  1143. 0x5252f6a4U, 0x3b3b4d76U, 0xd6d661b7U, 0xb3b3ce7dU,
  1144. 0x29297b52U, 0xe3e33eddU, 0x2f2f715eU, 0x84849713U,
  1145. 0x5353f5a6U, 0xd1d168b9U, 0x00000000U, 0xeded2cc1U,
  1146. 0x20206040U, 0xfcfc1fe3U, 0xb1b1c879U, 0x5b5bedb6U,
  1147. 0x6a6abed4U, 0xcbcb468dU, 0xbebed967U, 0x39394b72U,
  1148. 0x4a4ade94U, 0x4c4cd498U, 0x5858e8b0U, 0xcfcf4a85U,
  1149. 0xd0d06bbbU, 0xefef2ac5U, 0xaaaae54fU, 0xfbfb16edU,
  1150. 0x4343c586U, 0x4d4dd79aU, 0x33335566U, 0x85859411U,
  1151. 0x4545cf8aU, 0xf9f910e9U, 0x02020604U, 0x7f7f81feU,
  1152. 0x5050f0a0U, 0x3c3c4478U, 0x9f9fba25U, 0xa8a8e34bU,
  1153. 0x5151f3a2U, 0xa3a3fe5dU, 0x4040c080U, 0x8f8f8a05U,
  1154. 0x9292ad3fU, 0x9d9dbc21U, 0x38384870U, 0xf5f504f1U,
  1155. 0xbcbcdf63U, 0xb6b6c177U, 0xdada75afU, 0x21216342U,
  1156. 0x10103020U, 0xffff1ae5U, 0xf3f30efdU, 0xd2d26dbfU,
  1157. 0xcdcd4c81U, 0x0c0c1418U, 0x13133526U, 0xecec2fc3U,
  1158. 0x5f5fe1beU, 0x9797a235U, 0x4444cc88U, 0x1717392eU,
  1159. 0xc4c45793U, 0xa7a7f255U, 0x7e7e82fcU, 0x3d3d477aU,
  1160. 0x6464acc8U, 0x5d5de7baU, 0x19192b32U, 0x737395e6U,
  1161. 0x6060a0c0U, 0x81819819U, 0x4f4fd19eU, 0xdcdc7fa3U,
  1162. 0x22226644U, 0x2a2a7e54U, 0x9090ab3bU, 0x8888830bU,
  1163. 0x4646ca8cU, 0xeeee29c7U, 0xb8b8d36bU, 0x14143c28U,
  1164. 0xdede79a7U, 0x5e5ee2bcU, 0x0b0b1d16U, 0xdbdb76adU,
  1165. 0xe0e03bdbU, 0x32325664U, 0x3a3a4e74U, 0x0a0a1e14U,
  1166. 0x4949db92U, 0x06060a0cU, 0x24246c48U, 0x5c5ce4b8U,
  1167. 0xc2c25d9fU, 0xd3d36ebdU, 0xacacef43U, 0x6262a6c4U,
  1168. 0x9191a839U, 0x9595a431U, 0xe4e437d3U, 0x79798bf2U,
  1169. 0xe7e732d5U, 0xc8c8438bU, 0x3737596eU, 0x6d6db7daU,
  1170. 0x8d8d8c01U, 0xd5d564b1U, 0x4e4ed29cU, 0xa9a9e049U,
  1171. 0x6c6cb4d8U, 0x5656faacU, 0xf4f407f3U, 0xeaea25cfU,
  1172. 0x6565afcaU, 0x7a7a8ef4U, 0xaeaee947U, 0x08081810U,
  1173. 0xbabad56fU, 0x787888f0U, 0x25256f4aU, 0x2e2e725cU,
  1174. 0x1c1c2438U, 0xa6a6f157U, 0xb4b4c773U, 0xc6c65197U,
  1175. 0xe8e823cbU, 0xdddd7ca1U, 0x74749ce8U, 0x1f1f213eU,
  1176. 0x4b4bdd96U, 0xbdbddc61U, 0x8b8b860dU, 0x8a8a850fU,
  1177. 0x707090e0U, 0x3e3e427cU, 0xb5b5c471U, 0x6666aaccU,
  1178. 0x4848d890U, 0x03030506U, 0xf6f601f7U, 0x0e0e121cU,
  1179. 0x6161a3c2U, 0x35355f6aU, 0x5757f9aeU, 0xb9b9d069U,
  1180. 0x86869117U, 0xc1c15899U, 0x1d1d273aU, 0x9e9eb927U,
  1181. 0xe1e138d9U, 0xf8f813ebU, 0x9898b32bU, 0x11113322U,
  1182. 0x6969bbd2U, 0xd9d970a9U, 0x8e8e8907U, 0x9494a733U,
  1183. 0x9b9bb62dU, 0x1e1e223cU, 0x87879215U, 0xe9e920c9U,
  1184. 0xcece4987U, 0x5555ffaaU, 0x28287850U, 0xdfdf7aa5U,
  1185. 0x8c8c8f03U, 0xa1a1f859U, 0x89898009U, 0x0d0d171aU,
  1186. 0xbfbfda65U, 0xe6e631d7U, 0x4242c684U, 0x6868b8d0U,
  1187. 0x4141c382U, 0x9999b029U, 0x2d2d775aU, 0x0f0f111eU,
  1188. 0xb0b0cb7bU, 0x5454fca8U, 0xbbbbd66dU, 0x16163a2cU,
  1189. }
  1190. };
  1191. #ifdef HAVE_AES_DECRYPT
  1192. static const FLASH_QUALIFIER word32 Td[4][256] = {
  1193. {
  1194. 0x51f4a750U, 0x7e416553U, 0x1a17a4c3U, 0x3a275e96U,
  1195. 0x3bab6bcbU, 0x1f9d45f1U, 0xacfa58abU, 0x4be30393U,
  1196. 0x2030fa55U, 0xad766df6U, 0x88cc7691U, 0xf5024c25U,
  1197. 0x4fe5d7fcU, 0xc52acbd7U, 0x26354480U, 0xb562a38fU,
  1198. 0xdeb15a49U, 0x25ba1b67U, 0x45ea0e98U, 0x5dfec0e1U,
  1199. 0xc32f7502U, 0x814cf012U, 0x8d4697a3U, 0x6bd3f9c6U,
  1200. 0x038f5fe7U, 0x15929c95U, 0xbf6d7aebU, 0x955259daU,
  1201. 0xd4be832dU, 0x587421d3U, 0x49e06929U, 0x8ec9c844U,
  1202. 0x75c2896aU, 0xf48e7978U, 0x99583e6bU, 0x27b971ddU,
  1203. 0xbee14fb6U, 0xf088ad17U, 0xc920ac66U, 0x7dce3ab4U,
  1204. 0x63df4a18U, 0xe51a3182U, 0x97513360U, 0x62537f45U,
  1205. 0xb16477e0U, 0xbb6bae84U, 0xfe81a01cU, 0xf9082b94U,
  1206. 0x70486858U, 0x8f45fd19U, 0x94de6c87U, 0x527bf8b7U,
  1207. 0xab73d323U, 0x724b02e2U, 0xe31f8f57U, 0x6655ab2aU,
  1208. 0xb2eb2807U, 0x2fb5c203U, 0x86c57b9aU, 0xd33708a5U,
  1209. 0x302887f2U, 0x23bfa5b2U, 0x02036abaU, 0xed16825cU,
  1210. 0x8acf1c2bU, 0xa779b492U, 0xf307f2f0U, 0x4e69e2a1U,
  1211. 0x65daf4cdU, 0x0605bed5U, 0xd134621fU, 0xc4a6fe8aU,
  1212. 0x342e539dU, 0xa2f355a0U, 0x058ae132U, 0xa4f6eb75U,
  1213. 0x0b83ec39U, 0x4060efaaU, 0x5e719f06U, 0xbd6e1051U,
  1214. 0x3e218af9U, 0x96dd063dU, 0xdd3e05aeU, 0x4de6bd46U,
  1215. 0x91548db5U, 0x71c45d05U, 0x0406d46fU, 0x605015ffU,
  1216. 0x1998fb24U, 0xd6bde997U, 0x894043ccU, 0x67d99e77U,
  1217. 0xb0e842bdU, 0x07898b88U, 0xe7195b38U, 0x79c8eedbU,
  1218. 0xa17c0a47U, 0x7c420fe9U, 0xf8841ec9U, 0x00000000U,
  1219. 0x09808683U, 0x322bed48U, 0x1e1170acU, 0x6c5a724eU,
  1220. 0xfd0efffbU, 0x0f853856U, 0x3daed51eU, 0x362d3927U,
  1221. 0x0a0fd964U, 0x685ca621U, 0x9b5b54d1U, 0x24362e3aU,
  1222. 0x0c0a67b1U, 0x9357e70fU, 0xb4ee96d2U, 0x1b9b919eU,
  1223. 0x80c0c54fU, 0x61dc20a2U, 0x5a774b69U, 0x1c121a16U,
  1224. 0xe293ba0aU, 0xc0a02ae5U, 0x3c22e043U, 0x121b171dU,
  1225. 0x0e090d0bU, 0xf28bc7adU, 0x2db6a8b9U, 0x141ea9c8U,
  1226. 0x57f11985U, 0xaf75074cU, 0xee99ddbbU, 0xa37f60fdU,
  1227. 0xf701269fU, 0x5c72f5bcU, 0x44663bc5U, 0x5bfb7e34U,
  1228. 0x8b432976U, 0xcb23c6dcU, 0xb6edfc68U, 0xb8e4f163U,
  1229. 0xd731dccaU, 0x42638510U, 0x13972240U, 0x84c61120U,
  1230. 0x854a247dU, 0xd2bb3df8U, 0xaef93211U, 0xc729a16dU,
  1231. 0x1d9e2f4bU, 0xdcb230f3U, 0x0d8652ecU, 0x77c1e3d0U,
  1232. 0x2bb3166cU, 0xa970b999U, 0x119448faU, 0x47e96422U,
  1233. 0xa8fc8cc4U, 0xa0f03f1aU, 0x567d2cd8U, 0x223390efU,
  1234. 0x87494ec7U, 0xd938d1c1U, 0x8ccaa2feU, 0x98d40b36U,
  1235. 0xa6f581cfU, 0xa57ade28U, 0xdab78e26U, 0x3fadbfa4U,
  1236. 0x2c3a9de4U, 0x5078920dU, 0x6a5fcc9bU, 0x547e4662U,
  1237. 0xf68d13c2U, 0x90d8b8e8U, 0x2e39f75eU, 0x82c3aff5U,
  1238. 0x9f5d80beU, 0x69d0937cU, 0x6fd52da9U, 0xcf2512b3U,
  1239. 0xc8ac993bU, 0x10187da7U, 0xe89c636eU, 0xdb3bbb7bU,
  1240. 0xcd267809U, 0x6e5918f4U, 0xec9ab701U, 0x834f9aa8U,
  1241. 0xe6956e65U, 0xaaffe67eU, 0x21bccf08U, 0xef15e8e6U,
  1242. 0xbae79bd9U, 0x4a6f36ceU, 0xea9f09d4U, 0x29b07cd6U,
  1243. 0x31a4b2afU, 0x2a3f2331U, 0xc6a59430U, 0x35a266c0U,
  1244. 0x744ebc37U, 0xfc82caa6U, 0xe090d0b0U, 0x33a7d815U,
  1245. 0xf104984aU, 0x41ecdaf7U, 0x7fcd500eU, 0x1791f62fU,
  1246. 0x764dd68dU, 0x43efb04dU, 0xccaa4d54U, 0xe49604dfU,
  1247. 0x9ed1b5e3U, 0x4c6a881bU, 0xc12c1fb8U, 0x4665517fU,
  1248. 0x9d5eea04U, 0x018c355dU, 0xfa877473U, 0xfb0b412eU,
  1249. 0xb3671d5aU, 0x92dbd252U, 0xe9105633U, 0x6dd64713U,
  1250. 0x9ad7618cU, 0x37a10c7aU, 0x59f8148eU, 0xeb133c89U,
  1251. 0xcea927eeU, 0xb761c935U, 0xe11ce5edU, 0x7a47b13cU,
  1252. 0x9cd2df59U, 0x55f2733fU, 0x1814ce79U, 0x73c737bfU,
  1253. 0x53f7cdeaU, 0x5ffdaa5bU, 0xdf3d6f14U, 0x7844db86U,
  1254. 0xcaaff381U, 0xb968c43eU, 0x3824342cU, 0xc2a3405fU,
  1255. 0x161dc372U, 0xbce2250cU, 0x283c498bU, 0xff0d9541U,
  1256. 0x39a80171U, 0x080cb3deU, 0xd8b4e49cU, 0x6456c190U,
  1257. 0x7bcb8461U, 0xd532b670U, 0x486c5c74U, 0xd0b85742U,
  1258. },
  1259. {
  1260. 0x5051f4a7U, 0x537e4165U, 0xc31a17a4U, 0x963a275eU,
  1261. 0xcb3bab6bU, 0xf11f9d45U, 0xabacfa58U, 0x934be303U,
  1262. 0x552030faU, 0xf6ad766dU, 0x9188cc76U, 0x25f5024cU,
  1263. 0xfc4fe5d7U, 0xd7c52acbU, 0x80263544U, 0x8fb562a3U,
  1264. 0x49deb15aU, 0x6725ba1bU, 0x9845ea0eU, 0xe15dfec0U,
  1265. 0x02c32f75U, 0x12814cf0U, 0xa38d4697U, 0xc66bd3f9U,
  1266. 0xe7038f5fU, 0x9515929cU, 0xebbf6d7aU, 0xda955259U,
  1267. 0x2dd4be83U, 0xd3587421U, 0x2949e069U, 0x448ec9c8U,
  1268. 0x6a75c289U, 0x78f48e79U, 0x6b99583eU, 0xdd27b971U,
  1269. 0xb6bee14fU, 0x17f088adU, 0x66c920acU, 0xb47dce3aU,
  1270. 0x1863df4aU, 0x82e51a31U, 0x60975133U, 0x4562537fU,
  1271. 0xe0b16477U, 0x84bb6baeU, 0x1cfe81a0U, 0x94f9082bU,
  1272. 0x58704868U, 0x198f45fdU, 0x8794de6cU, 0xb7527bf8U,
  1273. 0x23ab73d3U, 0xe2724b02U, 0x57e31f8fU, 0x2a6655abU,
  1274. 0x07b2eb28U, 0x032fb5c2U, 0x9a86c57bU, 0xa5d33708U,
  1275. 0xf2302887U, 0xb223bfa5U, 0xba02036aU, 0x5ced1682U,
  1276. 0x2b8acf1cU, 0x92a779b4U, 0xf0f307f2U, 0xa14e69e2U,
  1277. 0xcd65daf4U, 0xd50605beU, 0x1fd13462U, 0x8ac4a6feU,
  1278. 0x9d342e53U, 0xa0a2f355U, 0x32058ae1U, 0x75a4f6ebU,
  1279. 0x390b83ecU, 0xaa4060efU, 0x065e719fU, 0x51bd6e10U,
  1280. 0xf93e218aU, 0x3d96dd06U, 0xaedd3e05U, 0x464de6bdU,
  1281. 0xb591548dU, 0x0571c45dU, 0x6f0406d4U, 0xff605015U,
  1282. 0x241998fbU, 0x97d6bde9U, 0xcc894043U, 0x7767d99eU,
  1283. 0xbdb0e842U, 0x8807898bU, 0x38e7195bU, 0xdb79c8eeU,
  1284. 0x47a17c0aU, 0xe97c420fU, 0xc9f8841eU, 0x00000000U,
  1285. 0x83098086U, 0x48322bedU, 0xac1e1170U, 0x4e6c5a72U,
  1286. 0xfbfd0effU, 0x560f8538U, 0x1e3daed5U, 0x27362d39U,
  1287. 0x640a0fd9U, 0x21685ca6U, 0xd19b5b54U, 0x3a24362eU,
  1288. 0xb10c0a67U, 0x0f9357e7U, 0xd2b4ee96U, 0x9e1b9b91U,
  1289. 0x4f80c0c5U, 0xa261dc20U, 0x695a774bU, 0x161c121aU,
  1290. 0x0ae293baU, 0xe5c0a02aU, 0x433c22e0U, 0x1d121b17U,
  1291. 0x0b0e090dU, 0xadf28bc7U, 0xb92db6a8U, 0xc8141ea9U,
  1292. 0x8557f119U, 0x4caf7507U, 0xbbee99ddU, 0xfda37f60U,
  1293. 0x9ff70126U, 0xbc5c72f5U, 0xc544663bU, 0x345bfb7eU,
  1294. 0x768b4329U, 0xdccb23c6U, 0x68b6edfcU, 0x63b8e4f1U,
  1295. 0xcad731dcU, 0x10426385U, 0x40139722U, 0x2084c611U,
  1296. 0x7d854a24U, 0xf8d2bb3dU, 0x11aef932U, 0x6dc729a1U,
  1297. 0x4b1d9e2fU, 0xf3dcb230U, 0xec0d8652U, 0xd077c1e3U,
  1298. 0x6c2bb316U, 0x99a970b9U, 0xfa119448U, 0x2247e964U,
  1299. 0xc4a8fc8cU, 0x1aa0f03fU, 0xd8567d2cU, 0xef223390U,
  1300. 0xc787494eU, 0xc1d938d1U, 0xfe8ccaa2U, 0x3698d40bU,
  1301. 0xcfa6f581U, 0x28a57adeU, 0x26dab78eU, 0xa43fadbfU,
  1302. 0xe42c3a9dU, 0x0d507892U, 0x9b6a5fccU, 0x62547e46U,
  1303. 0xc2f68d13U, 0xe890d8b8U, 0x5e2e39f7U, 0xf582c3afU,
  1304. 0xbe9f5d80U, 0x7c69d093U, 0xa96fd52dU, 0xb3cf2512U,
  1305. 0x3bc8ac99U, 0xa710187dU, 0x6ee89c63U, 0x7bdb3bbbU,
  1306. 0x09cd2678U, 0xf46e5918U, 0x01ec9ab7U, 0xa8834f9aU,
  1307. 0x65e6956eU, 0x7eaaffe6U, 0x0821bccfU, 0xe6ef15e8U,
  1308. 0xd9bae79bU, 0xce4a6f36U, 0xd4ea9f09U, 0xd629b07cU,
  1309. 0xaf31a4b2U, 0x312a3f23U, 0x30c6a594U, 0xc035a266U,
  1310. 0x37744ebcU, 0xa6fc82caU, 0xb0e090d0U, 0x1533a7d8U,
  1311. 0x4af10498U, 0xf741ecdaU, 0x0e7fcd50U, 0x2f1791f6U,
  1312. 0x8d764dd6U, 0x4d43efb0U, 0x54ccaa4dU, 0xdfe49604U,
  1313. 0xe39ed1b5U, 0x1b4c6a88U, 0xb8c12c1fU, 0x7f466551U,
  1314. 0x049d5eeaU, 0x5d018c35U, 0x73fa8774U, 0x2efb0b41U,
  1315. 0x5ab3671dU, 0x5292dbd2U, 0x33e91056U, 0x136dd647U,
  1316. 0x8c9ad761U, 0x7a37a10cU, 0x8e59f814U, 0x89eb133cU,
  1317. 0xeecea927U, 0x35b761c9U, 0xede11ce5U, 0x3c7a47b1U,
  1318. 0x599cd2dfU, 0x3f55f273U, 0x791814ceU, 0xbf73c737U,
  1319. 0xea53f7cdU, 0x5b5ffdaaU, 0x14df3d6fU, 0x867844dbU,
  1320. 0x81caaff3U, 0x3eb968c4U, 0x2c382434U, 0x5fc2a340U,
  1321. 0x72161dc3U, 0x0cbce225U, 0x8b283c49U, 0x41ff0d95U,
  1322. 0x7139a801U, 0xde080cb3U, 0x9cd8b4e4U, 0x906456c1U,
  1323. 0x617bcb84U, 0x70d532b6U, 0x74486c5cU, 0x42d0b857U,
  1324. },
  1325. {
  1326. 0xa75051f4U, 0x65537e41U, 0xa4c31a17U, 0x5e963a27U,
  1327. 0x6bcb3babU, 0x45f11f9dU, 0x58abacfaU, 0x03934be3U,
  1328. 0xfa552030U, 0x6df6ad76U, 0x769188ccU, 0x4c25f502U,
  1329. 0xd7fc4fe5U, 0xcbd7c52aU, 0x44802635U, 0xa38fb562U,
  1330. 0x5a49deb1U, 0x1b6725baU, 0x0e9845eaU, 0xc0e15dfeU,
  1331. 0x7502c32fU, 0xf012814cU, 0x97a38d46U, 0xf9c66bd3U,
  1332. 0x5fe7038fU, 0x9c951592U, 0x7aebbf6dU, 0x59da9552U,
  1333. 0x832dd4beU, 0x21d35874U, 0x692949e0U, 0xc8448ec9U,
  1334. 0x896a75c2U, 0x7978f48eU, 0x3e6b9958U, 0x71dd27b9U,
  1335. 0x4fb6bee1U, 0xad17f088U, 0xac66c920U, 0x3ab47dceU,
  1336. 0x4a1863dfU, 0x3182e51aU, 0x33609751U, 0x7f456253U,
  1337. 0x77e0b164U, 0xae84bb6bU, 0xa01cfe81U, 0x2b94f908U,
  1338. 0x68587048U, 0xfd198f45U, 0x6c8794deU, 0xf8b7527bU,
  1339. 0xd323ab73U, 0x02e2724bU, 0x8f57e31fU, 0xab2a6655U,
  1340. 0x2807b2ebU, 0xc2032fb5U, 0x7b9a86c5U, 0x08a5d337U,
  1341. 0x87f23028U, 0xa5b223bfU, 0x6aba0203U, 0x825ced16U,
  1342. 0x1c2b8acfU, 0xb492a779U, 0xf2f0f307U, 0xe2a14e69U,
  1343. 0xf4cd65daU, 0xbed50605U, 0x621fd134U, 0xfe8ac4a6U,
  1344. 0x539d342eU, 0x55a0a2f3U, 0xe132058aU, 0xeb75a4f6U,
  1345. 0xec390b83U, 0xefaa4060U, 0x9f065e71U, 0x1051bd6eU,
  1346. 0x8af93e21U, 0x063d96ddU, 0x05aedd3eU, 0xbd464de6U,
  1347. 0x8db59154U, 0x5d0571c4U, 0xd46f0406U, 0x15ff6050U,
  1348. 0xfb241998U, 0xe997d6bdU, 0x43cc8940U, 0x9e7767d9U,
  1349. 0x42bdb0e8U, 0x8b880789U, 0x5b38e719U, 0xeedb79c8U,
  1350. 0x0a47a17cU, 0x0fe97c42U, 0x1ec9f884U, 0x00000000U,
  1351. 0x86830980U, 0xed48322bU, 0x70ac1e11U, 0x724e6c5aU,
  1352. 0xfffbfd0eU, 0x38560f85U, 0xd51e3daeU, 0x3927362dU,
  1353. 0xd9640a0fU, 0xa621685cU, 0x54d19b5bU, 0x2e3a2436U,
  1354. 0x67b10c0aU, 0xe70f9357U, 0x96d2b4eeU, 0x919e1b9bU,
  1355. 0xc54f80c0U, 0x20a261dcU, 0x4b695a77U, 0x1a161c12U,
  1356. 0xba0ae293U, 0x2ae5c0a0U, 0xe0433c22U, 0x171d121bU,
  1357. 0x0d0b0e09U, 0xc7adf28bU, 0xa8b92db6U, 0xa9c8141eU,
  1358. 0x198557f1U, 0x074caf75U, 0xddbbee99U, 0x60fda37fU,
  1359. 0x269ff701U, 0xf5bc5c72U, 0x3bc54466U, 0x7e345bfbU,
  1360. 0x29768b43U, 0xc6dccb23U, 0xfc68b6edU, 0xf163b8e4U,
  1361. 0xdccad731U, 0x85104263U, 0x22401397U, 0x112084c6U,
  1362. 0x247d854aU, 0x3df8d2bbU, 0x3211aef9U, 0xa16dc729U,
  1363. 0x2f4b1d9eU, 0x30f3dcb2U, 0x52ec0d86U, 0xe3d077c1U,
  1364. 0x166c2bb3U, 0xb999a970U, 0x48fa1194U, 0x642247e9U,
  1365. 0x8cc4a8fcU, 0x3f1aa0f0U, 0x2cd8567dU, 0x90ef2233U,
  1366. 0x4ec78749U, 0xd1c1d938U, 0xa2fe8ccaU, 0x0b3698d4U,
  1367. 0x81cfa6f5U, 0xde28a57aU, 0x8e26dab7U, 0xbfa43fadU,
  1368. 0x9de42c3aU, 0x920d5078U, 0xcc9b6a5fU, 0x4662547eU,
  1369. 0x13c2f68dU, 0xb8e890d8U, 0xf75e2e39U, 0xaff582c3U,
  1370. 0x80be9f5dU, 0x937c69d0U, 0x2da96fd5U, 0x12b3cf25U,
  1371. 0x993bc8acU, 0x7da71018U, 0x636ee89cU, 0xbb7bdb3bU,
  1372. 0x7809cd26U, 0x18f46e59U, 0xb701ec9aU, 0x9aa8834fU,
  1373. 0x6e65e695U, 0xe67eaaffU, 0xcf0821bcU, 0xe8e6ef15U,
  1374. 0x9bd9bae7U, 0x36ce4a6fU, 0x09d4ea9fU, 0x7cd629b0U,
  1375. 0xb2af31a4U, 0x23312a3fU, 0x9430c6a5U, 0x66c035a2U,
  1376. 0xbc37744eU, 0xcaa6fc82U, 0xd0b0e090U, 0xd81533a7U,
  1377. 0x984af104U, 0xdaf741ecU, 0x500e7fcdU, 0xf62f1791U,
  1378. 0xd68d764dU, 0xb04d43efU, 0x4d54ccaaU, 0x04dfe496U,
  1379. 0xb5e39ed1U, 0x881b4c6aU, 0x1fb8c12cU, 0x517f4665U,
  1380. 0xea049d5eU, 0x355d018cU, 0x7473fa87U, 0x412efb0bU,
  1381. 0x1d5ab367U, 0xd25292dbU, 0x5633e910U, 0x47136dd6U,
  1382. 0x618c9ad7U, 0x0c7a37a1U, 0x148e59f8U, 0x3c89eb13U,
  1383. 0x27eecea9U, 0xc935b761U, 0xe5ede11cU, 0xb13c7a47U,
  1384. 0xdf599cd2U, 0x733f55f2U, 0xce791814U, 0x37bf73c7U,
  1385. 0xcdea53f7U, 0xaa5b5ffdU, 0x6f14df3dU, 0xdb867844U,
  1386. 0xf381caafU, 0xc43eb968U, 0x342c3824U, 0x405fc2a3U,
  1387. 0xc372161dU, 0x250cbce2U, 0x498b283cU, 0x9541ff0dU,
  1388. 0x017139a8U, 0xb3de080cU, 0xe49cd8b4U, 0xc1906456U,
  1389. 0x84617bcbU, 0xb670d532U, 0x5c74486cU, 0x5742d0b8U,
  1390. },
  1391. {
  1392. 0xf4a75051U, 0x4165537eU, 0x17a4c31aU, 0x275e963aU,
  1393. 0xab6bcb3bU, 0x9d45f11fU, 0xfa58abacU, 0xe303934bU,
  1394. 0x30fa5520U, 0x766df6adU, 0xcc769188U, 0x024c25f5U,
  1395. 0xe5d7fc4fU, 0x2acbd7c5U, 0x35448026U, 0x62a38fb5U,
  1396. 0xb15a49deU, 0xba1b6725U, 0xea0e9845U, 0xfec0e15dU,
  1397. 0x2f7502c3U, 0x4cf01281U, 0x4697a38dU, 0xd3f9c66bU,
  1398. 0x8f5fe703U, 0x929c9515U, 0x6d7aebbfU, 0x5259da95U,
  1399. 0xbe832dd4U, 0x7421d358U, 0xe0692949U, 0xc9c8448eU,
  1400. 0xc2896a75U, 0x8e7978f4U, 0x583e6b99U, 0xb971dd27U,
  1401. 0xe14fb6beU, 0x88ad17f0U, 0x20ac66c9U, 0xce3ab47dU,
  1402. 0xdf4a1863U, 0x1a3182e5U, 0x51336097U, 0x537f4562U,
  1403. 0x6477e0b1U, 0x6bae84bbU, 0x81a01cfeU, 0x082b94f9U,
  1404. 0x48685870U, 0x45fd198fU, 0xde6c8794U, 0x7bf8b752U,
  1405. 0x73d323abU, 0x4b02e272U, 0x1f8f57e3U, 0x55ab2a66U,
  1406. 0xeb2807b2U, 0xb5c2032fU, 0xc57b9a86U, 0x3708a5d3U,
  1407. 0x2887f230U, 0xbfa5b223U, 0x036aba02U, 0x16825cedU,
  1408. 0xcf1c2b8aU, 0x79b492a7U, 0x07f2f0f3U, 0x69e2a14eU,
  1409. 0xdaf4cd65U, 0x05bed506U, 0x34621fd1U, 0xa6fe8ac4U,
  1410. 0x2e539d34U, 0xf355a0a2U, 0x8ae13205U, 0xf6eb75a4U,
  1411. 0x83ec390bU, 0x60efaa40U, 0x719f065eU, 0x6e1051bdU,
  1412. 0x218af93eU, 0xdd063d96U, 0x3e05aeddU, 0xe6bd464dU,
  1413. 0x548db591U, 0xc45d0571U, 0x06d46f04U, 0x5015ff60U,
  1414. 0x98fb2419U, 0xbde997d6U, 0x4043cc89U, 0xd99e7767U,
  1415. 0xe842bdb0U, 0x898b8807U, 0x195b38e7U, 0xc8eedb79U,
  1416. 0x7c0a47a1U, 0x420fe97cU, 0x841ec9f8U, 0x00000000U,
  1417. 0x80868309U, 0x2bed4832U, 0x1170ac1eU, 0x5a724e6cU,
  1418. 0x0efffbfdU, 0x8538560fU, 0xaed51e3dU, 0x2d392736U,
  1419. 0x0fd9640aU, 0x5ca62168U, 0x5b54d19bU, 0x362e3a24U,
  1420. 0x0a67b10cU, 0x57e70f93U, 0xee96d2b4U, 0x9b919e1bU,
  1421. 0xc0c54f80U, 0xdc20a261U, 0x774b695aU, 0x121a161cU,
  1422. 0x93ba0ae2U, 0xa02ae5c0U, 0x22e0433cU, 0x1b171d12U,
  1423. 0x090d0b0eU, 0x8bc7adf2U, 0xb6a8b92dU, 0x1ea9c814U,
  1424. 0xf1198557U, 0x75074cafU, 0x99ddbbeeU, 0x7f60fda3U,
  1425. 0x01269ff7U, 0x72f5bc5cU, 0x663bc544U, 0xfb7e345bU,
  1426. 0x4329768bU, 0x23c6dccbU, 0xedfc68b6U, 0xe4f163b8U,
  1427. 0x31dccad7U, 0x63851042U, 0x97224013U, 0xc6112084U,
  1428. 0x4a247d85U, 0xbb3df8d2U, 0xf93211aeU, 0x29a16dc7U,
  1429. 0x9e2f4b1dU, 0xb230f3dcU, 0x8652ec0dU, 0xc1e3d077U,
  1430. 0xb3166c2bU, 0x70b999a9U, 0x9448fa11U, 0xe9642247U,
  1431. 0xfc8cc4a8U, 0xf03f1aa0U, 0x7d2cd856U, 0x3390ef22U,
  1432. 0x494ec787U, 0x38d1c1d9U, 0xcaa2fe8cU, 0xd40b3698U,
  1433. 0xf581cfa6U, 0x7ade28a5U, 0xb78e26daU, 0xadbfa43fU,
  1434. 0x3a9de42cU, 0x78920d50U, 0x5fcc9b6aU, 0x7e466254U,
  1435. 0x8d13c2f6U, 0xd8b8e890U, 0x39f75e2eU, 0xc3aff582U,
  1436. 0x5d80be9fU, 0xd0937c69U, 0xd52da96fU, 0x2512b3cfU,
  1437. 0xac993bc8U, 0x187da710U, 0x9c636ee8U, 0x3bbb7bdbU,
  1438. 0x267809cdU, 0x5918f46eU, 0x9ab701ecU, 0x4f9aa883U,
  1439. 0x956e65e6U, 0xffe67eaaU, 0xbccf0821U, 0x15e8e6efU,
  1440. 0xe79bd9baU, 0x6f36ce4aU, 0x9f09d4eaU, 0xb07cd629U,
  1441. 0xa4b2af31U, 0x3f23312aU, 0xa59430c6U, 0xa266c035U,
  1442. 0x4ebc3774U, 0x82caa6fcU, 0x90d0b0e0U, 0xa7d81533U,
  1443. 0x04984af1U, 0xecdaf741U, 0xcd500e7fU, 0x91f62f17U,
  1444. 0x4dd68d76U, 0xefb04d43U, 0xaa4d54ccU, 0x9604dfe4U,
  1445. 0xd1b5e39eU, 0x6a881b4cU, 0x2c1fb8c1U, 0x65517f46U,
  1446. 0x5eea049dU, 0x8c355d01U, 0x877473faU, 0x0b412efbU,
  1447. 0x671d5ab3U, 0xdbd25292U, 0x105633e9U, 0xd647136dU,
  1448. 0xd7618c9aU, 0xa10c7a37U, 0xf8148e59U, 0x133c89ebU,
  1449. 0xa927eeceU, 0x61c935b7U, 0x1ce5ede1U, 0x47b13c7aU,
  1450. 0xd2df599cU, 0xf2733f55U, 0x14ce7918U, 0xc737bf73U,
  1451. 0xf7cdea53U, 0xfdaa5b5fU, 0x3d6f14dfU, 0x44db8678U,
  1452. 0xaff381caU, 0x68c43eb9U, 0x24342c38U, 0xa3405fc2U,
  1453. 0x1dc37216U, 0xe2250cbcU, 0x3c498b28U, 0x0d9541ffU,
  1454. 0xa8017139U, 0x0cb3de08U, 0xb4e49cd8U, 0x56c19064U,
  1455. 0xcb84617bU, 0x32b670d5U, 0x6c5c7448U, 0xb85742d0U,
  1456. }
  1457. };
  1458. #endif /* HAVE_AES_DECRYPT */
  1459. #endif /* WOLFSSL_AES_SMALL_TABLES */
  1460. #ifdef HAVE_AES_DECRYPT
  1461. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC)) \
  1462. || defined(WOLFSSL_AES_DIRECT)
  1463. static const FLASH_QUALIFIER byte Td4[256] =
  1464. {
  1465. 0x52U, 0x09U, 0x6aU, 0xd5U, 0x30U, 0x36U, 0xa5U, 0x38U,
  1466. 0xbfU, 0x40U, 0xa3U, 0x9eU, 0x81U, 0xf3U, 0xd7U, 0xfbU,
  1467. 0x7cU, 0xe3U, 0x39U, 0x82U, 0x9bU, 0x2fU, 0xffU, 0x87U,
  1468. 0x34U, 0x8eU, 0x43U, 0x44U, 0xc4U, 0xdeU, 0xe9U, 0xcbU,
  1469. 0x54U, 0x7bU, 0x94U, 0x32U, 0xa6U, 0xc2U, 0x23U, 0x3dU,
  1470. 0xeeU, 0x4cU, 0x95U, 0x0bU, 0x42U, 0xfaU, 0xc3U, 0x4eU,
  1471. 0x08U, 0x2eU, 0xa1U, 0x66U, 0x28U, 0xd9U, 0x24U, 0xb2U,
  1472. 0x76U, 0x5bU, 0xa2U, 0x49U, 0x6dU, 0x8bU, 0xd1U, 0x25U,
  1473. 0x72U, 0xf8U, 0xf6U, 0x64U, 0x86U, 0x68U, 0x98U, 0x16U,
  1474. 0xd4U, 0xa4U, 0x5cU, 0xccU, 0x5dU, 0x65U, 0xb6U, 0x92U,
  1475. 0x6cU, 0x70U, 0x48U, 0x50U, 0xfdU, 0xedU, 0xb9U, 0xdaU,
  1476. 0x5eU, 0x15U, 0x46U, 0x57U, 0xa7U, 0x8dU, 0x9dU, 0x84U,
  1477. 0x90U, 0xd8U, 0xabU, 0x00U, 0x8cU, 0xbcU, 0xd3U, 0x0aU,
  1478. 0xf7U, 0xe4U, 0x58U, 0x05U, 0xb8U, 0xb3U, 0x45U, 0x06U,
  1479. 0xd0U, 0x2cU, 0x1eU, 0x8fU, 0xcaU, 0x3fU, 0x0fU, 0x02U,
  1480. 0xc1U, 0xafU, 0xbdU, 0x03U, 0x01U, 0x13U, 0x8aU, 0x6bU,
  1481. 0x3aU, 0x91U, 0x11U, 0x41U, 0x4fU, 0x67U, 0xdcU, 0xeaU,
  1482. 0x97U, 0xf2U, 0xcfU, 0xceU, 0xf0U, 0xb4U, 0xe6U, 0x73U,
  1483. 0x96U, 0xacU, 0x74U, 0x22U, 0xe7U, 0xadU, 0x35U, 0x85U,
  1484. 0xe2U, 0xf9U, 0x37U, 0xe8U, 0x1cU, 0x75U, 0xdfU, 0x6eU,
  1485. 0x47U, 0xf1U, 0x1aU, 0x71U, 0x1dU, 0x29U, 0xc5U, 0x89U,
  1486. 0x6fU, 0xb7U, 0x62U, 0x0eU, 0xaaU, 0x18U, 0xbeU, 0x1bU,
  1487. 0xfcU, 0x56U, 0x3eU, 0x4bU, 0xc6U, 0xd2U, 0x79U, 0x20U,
  1488. 0x9aU, 0xdbU, 0xc0U, 0xfeU, 0x78U, 0xcdU, 0x5aU, 0xf4U,
  1489. 0x1fU, 0xddU, 0xa8U, 0x33U, 0x88U, 0x07U, 0xc7U, 0x31U,
  1490. 0xb1U, 0x12U, 0x10U, 0x59U, 0x27U, 0x80U, 0xecU, 0x5fU,
  1491. 0x60U, 0x51U, 0x7fU, 0xa9U, 0x19U, 0xb5U, 0x4aU, 0x0dU,
  1492. 0x2dU, 0xe5U, 0x7aU, 0x9fU, 0x93U, 0xc9U, 0x9cU, 0xefU,
  1493. 0xa0U, 0xe0U, 0x3bU, 0x4dU, 0xaeU, 0x2aU, 0xf5U, 0xb0U,
  1494. 0xc8U, 0xebU, 0xbbU, 0x3cU, 0x83U, 0x53U, 0x99U, 0x61U,
  1495. 0x17U, 0x2bU, 0x04U, 0x7eU, 0xbaU, 0x77U, 0xd6U, 0x26U,
  1496. 0xe1U, 0x69U, 0x14U, 0x63U, 0x55U, 0x21U, 0x0cU, 0x7dU,
  1497. };
  1498. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  1499. #endif /* HAVE_AES_DECRYPT */
  1500. #define GETBYTE(x, y) (word32)((byte)((x) >> (8 * (y))))
  1501. #ifdef WOLFSSL_AES_SMALL_TABLES
  1502. static const byte Tsbox[256] = {
  1503. 0x63U, 0x7cU, 0x77U, 0x7bU, 0xf2U, 0x6bU, 0x6fU, 0xc5U,
  1504. 0x30U, 0x01U, 0x67U, 0x2bU, 0xfeU, 0xd7U, 0xabU, 0x76U,
  1505. 0xcaU, 0x82U, 0xc9U, 0x7dU, 0xfaU, 0x59U, 0x47U, 0xf0U,
  1506. 0xadU, 0xd4U, 0xa2U, 0xafU, 0x9cU, 0xa4U, 0x72U, 0xc0U,
  1507. 0xb7U, 0xfdU, 0x93U, 0x26U, 0x36U, 0x3fU, 0xf7U, 0xccU,
  1508. 0x34U, 0xa5U, 0xe5U, 0xf1U, 0x71U, 0xd8U, 0x31U, 0x15U,
  1509. 0x04U, 0xc7U, 0x23U, 0xc3U, 0x18U, 0x96U, 0x05U, 0x9aU,
  1510. 0x07U, 0x12U, 0x80U, 0xe2U, 0xebU, 0x27U, 0xb2U, 0x75U,
  1511. 0x09U, 0x83U, 0x2cU, 0x1aU, 0x1bU, 0x6eU, 0x5aU, 0xa0U,
  1512. 0x52U, 0x3bU, 0xd6U, 0xb3U, 0x29U, 0xe3U, 0x2fU, 0x84U,
  1513. 0x53U, 0xd1U, 0x00U, 0xedU, 0x20U, 0xfcU, 0xb1U, 0x5bU,
  1514. 0x6aU, 0xcbU, 0xbeU, 0x39U, 0x4aU, 0x4cU, 0x58U, 0xcfU,
  1515. 0xd0U, 0xefU, 0xaaU, 0xfbU, 0x43U, 0x4dU, 0x33U, 0x85U,
  1516. 0x45U, 0xf9U, 0x02U, 0x7fU, 0x50U, 0x3cU, 0x9fU, 0xa8U,
  1517. 0x51U, 0xa3U, 0x40U, 0x8fU, 0x92U, 0x9dU, 0x38U, 0xf5U,
  1518. 0xbcU, 0xb6U, 0xdaU, 0x21U, 0x10U, 0xffU, 0xf3U, 0xd2U,
  1519. 0xcdU, 0x0cU, 0x13U, 0xecU, 0x5fU, 0x97U, 0x44U, 0x17U,
  1520. 0xc4U, 0xa7U, 0x7eU, 0x3dU, 0x64U, 0x5dU, 0x19U, 0x73U,
  1521. 0x60U, 0x81U, 0x4fU, 0xdcU, 0x22U, 0x2aU, 0x90U, 0x88U,
  1522. 0x46U, 0xeeU, 0xb8U, 0x14U, 0xdeU, 0x5eU, 0x0bU, 0xdbU,
  1523. 0xe0U, 0x32U, 0x3aU, 0x0aU, 0x49U, 0x06U, 0x24U, 0x5cU,
  1524. 0xc2U, 0xd3U, 0xacU, 0x62U, 0x91U, 0x95U, 0xe4U, 0x79U,
  1525. 0xe7U, 0xc8U, 0x37U, 0x6dU, 0x8dU, 0xd5U, 0x4eU, 0xa9U,
  1526. 0x6cU, 0x56U, 0xf4U, 0xeaU, 0x65U, 0x7aU, 0xaeU, 0x08U,
  1527. 0xbaU, 0x78U, 0x25U, 0x2eU, 0x1cU, 0xa6U, 0xb4U, 0xc6U,
  1528. 0xe8U, 0xddU, 0x74U, 0x1fU, 0x4bU, 0xbdU, 0x8bU, 0x8aU,
  1529. 0x70U, 0x3eU, 0xb5U, 0x66U, 0x48U, 0x03U, 0xf6U, 0x0eU,
  1530. 0x61U, 0x35U, 0x57U, 0xb9U, 0x86U, 0xc1U, 0x1dU, 0x9eU,
  1531. 0xe1U, 0xf8U, 0x98U, 0x11U, 0x69U, 0xd9U, 0x8eU, 0x94U,
  1532. 0x9bU, 0x1eU, 0x87U, 0xe9U, 0xceU, 0x55U, 0x28U, 0xdfU,
  1533. 0x8cU, 0xa1U, 0x89U, 0x0dU, 0xbfU, 0xe6U, 0x42U, 0x68U,
  1534. 0x41U, 0x99U, 0x2dU, 0x0fU, 0xb0U, 0x54U, 0xbbU, 0x16U
  1535. };
  1536. #define AES_XTIME(x) ((byte)((byte)((x) << 1) ^ ((0 - ((x) >> 7)) & 0x1b)))
  1537. static WARN_UNUSED_RESULT word32 col_mul(
  1538. word32 t, int i2, int i3, int ia, int ib)
  1539. {
  1540. byte t3 = GETBYTE(t, i3);
  1541. byte tm = AES_XTIME(GETBYTE(t, i2) ^ t3);
  1542. return GETBYTE(t, ia) ^ GETBYTE(t, ib) ^ t3 ^ tm;
  1543. }
  1544. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)
  1545. static WARN_UNUSED_RESULT word32 inv_col_mul(
  1546. word32 t, int i9, int ib, int id, int ie)
  1547. {
  1548. byte t9 = GETBYTE(t, i9);
  1549. byte tb = GETBYTE(t, ib);
  1550. byte td = GETBYTE(t, id);
  1551. byte te = GETBYTE(t, ie);
  1552. byte t0 = t9 ^ tb ^ td;
  1553. return t0 ^ AES_XTIME(AES_XTIME(AES_XTIME(t0 ^ te) ^ td ^ te) ^ tb ^ te);
  1554. }
  1555. #endif
  1556. #endif
  1557. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) || \
  1558. defined(HAVE_AESCCM) || defined(HAVE_AESGCM)
  1559. #ifndef WC_CACHE_LINE_SZ
  1560. #if defined(__x86_64__) || defined(_M_X64) || \
  1561. (defined(__ILP32__) && (__ILP32__ >= 1))
  1562. #define WC_CACHE_LINE_SZ 64
  1563. #else
  1564. /* default cache line size */
  1565. #define WC_CACHE_LINE_SZ 32
  1566. #endif
  1567. #endif
  1568. #ifndef WC_NO_CACHE_RESISTANT
  1569. #ifndef WOLFSSL_AES_SMALL_TABLES
  1570. /* load 4 Te Tables into cache by cache line stride */
  1571. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTe(void)
  1572. {
  1573. word32 x = 0;
  1574. int i,j;
  1575. for (i = 0; i < 4; i++) {
  1576. /* 256 elements, each one is 4 bytes */
  1577. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  1578. x &= Te[i][j];
  1579. }
  1580. }
  1581. return x;
  1582. }
  1583. #else
  1584. /* load sbox into cache by cache line stride */
  1585. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchSBox(void)
  1586. {
  1587. word32 x = 0;
  1588. int i;
  1589. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ/4) {
  1590. x &= Tsbox[i];
  1591. }
  1592. return x;
  1593. }
  1594. #endif
  1595. #endif
  1596. /* Software AES - ECB Encrypt */
  1597. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  1598. Aes* aes, const byte* inBlock, byte* outBlock)
  1599. {
  1600. word32 s0, s1, s2, s3;
  1601. word32 t0, t1, t2, t3;
  1602. word32 r = aes->rounds >> 1;
  1603. const word32* rk = aes->key;
  1604. if (r > 7 || r == 0) {
  1605. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  1606. return KEYUSAGE_E;
  1607. }
  1608. #ifdef WOLFSSL_AESNI
  1609. if (haveAESNI && aes->use_aesni) {
  1610. #ifdef DEBUG_AESNI
  1611. printf("about to aes encrypt\n");
  1612. printf("in = %p\n", inBlock);
  1613. printf("out = %p\n", outBlock);
  1614. printf("aes->key = %p\n", aes->key);
  1615. printf("aes->rounds = %d\n", aes->rounds);
  1616. printf("sz = %d\n", AES_BLOCK_SIZE);
  1617. #endif
  1618. /* check alignment, decrypt doesn't need alignment */
  1619. if ((wc_ptr_t)inBlock % AESNI_ALIGN) {
  1620. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  1621. byte* tmp = (byte*)XMALLOC(AES_BLOCK_SIZE + AESNI_ALIGN, aes->heap,
  1622. DYNAMIC_TYPE_TMP_BUFFER);
  1623. byte* tmp_align;
  1624. if (tmp == NULL)
  1625. return MEMORY_E;
  1626. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  1627. XMEMCPY(tmp_align, inBlock, AES_BLOCK_SIZE);
  1628. AES_ECB_encrypt(tmp_align, tmp_align, AES_BLOCK_SIZE,
  1629. (byte*)aes->key, aes->rounds);
  1630. XMEMCPY(outBlock, tmp_align, AES_BLOCK_SIZE);
  1631. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  1632. return 0;
  1633. #else
  1634. WOLFSSL_MSG("AES-ECB encrypt with bad alignment");
  1635. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  1636. return BAD_ALIGN_E;
  1637. #endif
  1638. }
  1639. AES_ECB_encrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  1640. aes->rounds);
  1641. return 0;
  1642. }
  1643. else {
  1644. #ifdef DEBUG_AESNI
  1645. printf("Skipping AES-NI\n");
  1646. #endif
  1647. }
  1648. #endif
  1649. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  1650. AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  1651. return 0;
  1652. #endif
  1653. #if defined(WOLFSSL_IMXRT_DCP)
  1654. if (aes->keylen == 16) {
  1655. DCPAesEcbEncrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  1656. return 0;
  1657. }
  1658. #endif
  1659. /*
  1660. * map byte array block to cipher state
  1661. * and add initial round key:
  1662. */
  1663. XMEMCPY(&s0, inBlock, sizeof(s0));
  1664. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  1665. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  1666. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  1667. #ifdef LITTLE_ENDIAN_ORDER
  1668. s0 = ByteReverseWord32(s0);
  1669. s1 = ByteReverseWord32(s1);
  1670. s2 = ByteReverseWord32(s2);
  1671. s3 = ByteReverseWord32(s3);
  1672. #endif
  1673. /* AddRoundKey */
  1674. s0 ^= rk[0];
  1675. s1 ^= rk[1];
  1676. s2 ^= rk[2];
  1677. s3 ^= rk[3];
  1678. #ifndef WOLFSSL_AES_SMALL_TABLES
  1679. #ifndef WC_NO_CACHE_RESISTANT
  1680. s0 |= PreFetchTe();
  1681. #endif
  1682. #ifndef WOLFSSL_AES_NO_UNROLL
  1683. /* Unroll the loop. */
  1684. #define ENC_ROUND_T_S(o) \
  1685. t0 = Te[0][GETBYTE(s0, 3)] ^ Te[1][GETBYTE(s1, 2)] ^ \
  1686. Te[2][GETBYTE(s2, 1)] ^ Te[3][GETBYTE(s3, 0)] ^ rk[(o)+4]; \
  1687. t1 = Te[0][GETBYTE(s1, 3)] ^ Te[1][GETBYTE(s2, 2)] ^ \
  1688. Te[2][GETBYTE(s3, 1)] ^ Te[3][GETBYTE(s0, 0)] ^ rk[(o)+5]; \
  1689. t2 = Te[0][GETBYTE(s2, 3)] ^ Te[1][GETBYTE(s3, 2)] ^ \
  1690. Te[2][GETBYTE(s0, 1)] ^ Te[3][GETBYTE(s1, 0)] ^ rk[(o)+6]; \
  1691. t3 = Te[0][GETBYTE(s3, 3)] ^ Te[1][GETBYTE(s0, 2)] ^ \
  1692. Te[2][GETBYTE(s1, 1)] ^ Te[3][GETBYTE(s2, 0)] ^ rk[(o)+7]
  1693. #define ENC_ROUND_S_T(o) \
  1694. s0 = Te[0][GETBYTE(t0, 3)] ^ Te[1][GETBYTE(t1, 2)] ^ \
  1695. Te[2][GETBYTE(t2, 1)] ^ Te[3][GETBYTE(t3, 0)] ^ rk[(o)+0]; \
  1696. s1 = Te[0][GETBYTE(t1, 3)] ^ Te[1][GETBYTE(t2, 2)] ^ \
  1697. Te[2][GETBYTE(t3, 1)] ^ Te[3][GETBYTE(t0, 0)] ^ rk[(o)+1]; \
  1698. s2 = Te[0][GETBYTE(t2, 3)] ^ Te[1][GETBYTE(t3, 2)] ^ \
  1699. Te[2][GETBYTE(t0, 1)] ^ Te[3][GETBYTE(t1, 0)] ^ rk[(o)+2]; \
  1700. s3 = Te[0][GETBYTE(t3, 3)] ^ Te[1][GETBYTE(t0, 2)] ^ \
  1701. Te[2][GETBYTE(t1, 1)] ^ Te[3][GETBYTE(t2, 0)] ^ rk[(o)+3]
  1702. ENC_ROUND_T_S( 0);
  1703. ENC_ROUND_S_T( 8); ENC_ROUND_T_S( 8);
  1704. ENC_ROUND_S_T(16); ENC_ROUND_T_S(16);
  1705. ENC_ROUND_S_T(24); ENC_ROUND_T_S(24);
  1706. ENC_ROUND_S_T(32); ENC_ROUND_T_S(32);
  1707. if (r > 5) {
  1708. ENC_ROUND_S_T(40); ENC_ROUND_T_S(40);
  1709. if (r > 6) {
  1710. ENC_ROUND_S_T(48); ENC_ROUND_T_S(48);
  1711. }
  1712. }
  1713. rk += r * 8;
  1714. #else
  1715. /*
  1716. * Nr - 1 full rounds:
  1717. */
  1718. for (;;) {
  1719. t0 =
  1720. Te[0][GETBYTE(s0, 3)] ^
  1721. Te[1][GETBYTE(s1, 2)] ^
  1722. Te[2][GETBYTE(s2, 1)] ^
  1723. Te[3][GETBYTE(s3, 0)] ^
  1724. rk[4];
  1725. t1 =
  1726. Te[0][GETBYTE(s1, 3)] ^
  1727. Te[1][GETBYTE(s2, 2)] ^
  1728. Te[2][GETBYTE(s3, 1)] ^
  1729. Te[3][GETBYTE(s0, 0)] ^
  1730. rk[5];
  1731. t2 =
  1732. Te[0][GETBYTE(s2, 3)] ^
  1733. Te[1][GETBYTE(s3, 2)] ^
  1734. Te[2][GETBYTE(s0, 1)] ^
  1735. Te[3][GETBYTE(s1, 0)] ^
  1736. rk[6];
  1737. t3 =
  1738. Te[0][GETBYTE(s3, 3)] ^
  1739. Te[1][GETBYTE(s0, 2)] ^
  1740. Te[2][GETBYTE(s1, 1)] ^
  1741. Te[3][GETBYTE(s2, 0)] ^
  1742. rk[7];
  1743. rk += 8;
  1744. if (--r == 0) {
  1745. break;
  1746. }
  1747. s0 =
  1748. Te[0][GETBYTE(t0, 3)] ^
  1749. Te[1][GETBYTE(t1, 2)] ^
  1750. Te[2][GETBYTE(t2, 1)] ^
  1751. Te[3][GETBYTE(t3, 0)] ^
  1752. rk[0];
  1753. s1 =
  1754. Te[0][GETBYTE(t1, 3)] ^
  1755. Te[1][GETBYTE(t2, 2)] ^
  1756. Te[2][GETBYTE(t3, 1)] ^
  1757. Te[3][GETBYTE(t0, 0)] ^
  1758. rk[1];
  1759. s2 =
  1760. Te[0][GETBYTE(t2, 3)] ^
  1761. Te[1][GETBYTE(t3, 2)] ^
  1762. Te[2][GETBYTE(t0, 1)] ^
  1763. Te[3][GETBYTE(t1, 0)] ^
  1764. rk[2];
  1765. s3 =
  1766. Te[0][GETBYTE(t3, 3)] ^
  1767. Te[1][GETBYTE(t0, 2)] ^
  1768. Te[2][GETBYTE(t1, 1)] ^
  1769. Te[3][GETBYTE(t2, 0)] ^
  1770. rk[3];
  1771. }
  1772. #endif
  1773. /*
  1774. * apply last round and
  1775. * map cipher state to byte array block:
  1776. */
  1777. s0 =
  1778. (Te[2][GETBYTE(t0, 3)] & 0xff000000) ^
  1779. (Te[3][GETBYTE(t1, 2)] & 0x00ff0000) ^
  1780. (Te[0][GETBYTE(t2, 1)] & 0x0000ff00) ^
  1781. (Te[1][GETBYTE(t3, 0)] & 0x000000ff) ^
  1782. rk[0];
  1783. s1 =
  1784. (Te[2][GETBYTE(t1, 3)] & 0xff000000) ^
  1785. (Te[3][GETBYTE(t2, 2)] & 0x00ff0000) ^
  1786. (Te[0][GETBYTE(t3, 1)] & 0x0000ff00) ^
  1787. (Te[1][GETBYTE(t0, 0)] & 0x000000ff) ^
  1788. rk[1];
  1789. s2 =
  1790. (Te[2][GETBYTE(t2, 3)] & 0xff000000) ^
  1791. (Te[3][GETBYTE(t3, 2)] & 0x00ff0000) ^
  1792. (Te[0][GETBYTE(t0, 1)] & 0x0000ff00) ^
  1793. (Te[1][GETBYTE(t1, 0)] & 0x000000ff) ^
  1794. rk[2];
  1795. s3 =
  1796. (Te[2][GETBYTE(t3, 3)] & 0xff000000) ^
  1797. (Te[3][GETBYTE(t0, 2)] & 0x00ff0000) ^
  1798. (Te[0][GETBYTE(t1, 1)] & 0x0000ff00) ^
  1799. (Te[1][GETBYTE(t2, 0)] & 0x000000ff) ^
  1800. rk[3];
  1801. #else
  1802. #ifndef WC_NO_CACHE_RESISTANT
  1803. s0 |= PreFetchSBox();
  1804. #endif
  1805. r *= 2;
  1806. /* Two rounds at a time */
  1807. for (rk += 4; r > 1; r--, rk += 4) {
  1808. t0 =
  1809. ((word32)Tsbox[GETBYTE(s0, 3)] << 24) ^
  1810. ((word32)Tsbox[GETBYTE(s1, 2)] << 16) ^
  1811. ((word32)Tsbox[GETBYTE(s2, 1)] << 8) ^
  1812. ((word32)Tsbox[GETBYTE(s3, 0)]);
  1813. t1 =
  1814. ((word32)Tsbox[GETBYTE(s1, 3)] << 24) ^
  1815. ((word32)Tsbox[GETBYTE(s2, 2)] << 16) ^
  1816. ((word32)Tsbox[GETBYTE(s3, 1)] << 8) ^
  1817. ((word32)Tsbox[GETBYTE(s0, 0)]);
  1818. t2 =
  1819. ((word32)Tsbox[GETBYTE(s2, 3)] << 24) ^
  1820. ((word32)Tsbox[GETBYTE(s3, 2)] << 16) ^
  1821. ((word32)Tsbox[GETBYTE(s0, 1)] << 8) ^
  1822. ((word32)Tsbox[GETBYTE(s1, 0)]);
  1823. t3 =
  1824. ((word32)Tsbox[GETBYTE(s3, 3)] << 24) ^
  1825. ((word32)Tsbox[GETBYTE(s0, 2)] << 16) ^
  1826. ((word32)Tsbox[GETBYTE(s1, 1)] << 8) ^
  1827. ((word32)Tsbox[GETBYTE(s2, 0)]);
  1828. s0 =
  1829. (col_mul(t0, 3, 2, 0, 1) << 24) ^
  1830. (col_mul(t0, 2, 1, 0, 3) << 16) ^
  1831. (col_mul(t0, 1, 0, 2, 3) << 8) ^
  1832. (col_mul(t0, 0, 3, 2, 1) ) ^
  1833. rk[0];
  1834. s1 =
  1835. (col_mul(t1, 3, 2, 0, 1) << 24) ^
  1836. (col_mul(t1, 2, 1, 0, 3) << 16) ^
  1837. (col_mul(t1, 1, 0, 2, 3) << 8) ^
  1838. (col_mul(t1, 0, 3, 2, 1) ) ^
  1839. rk[1];
  1840. s2 =
  1841. (col_mul(t2, 3, 2, 0, 1) << 24) ^
  1842. (col_mul(t2, 2, 1, 0, 3) << 16) ^
  1843. (col_mul(t2, 1, 0, 2, 3) << 8) ^
  1844. (col_mul(t2, 0, 3, 2, 1) ) ^
  1845. rk[2];
  1846. s3 =
  1847. (col_mul(t3, 3, 2, 0, 1) << 24) ^
  1848. (col_mul(t3, 2, 1, 0, 3) << 16) ^
  1849. (col_mul(t3, 1, 0, 2, 3) << 8) ^
  1850. (col_mul(t3, 0, 3, 2, 1) ) ^
  1851. rk[3];
  1852. }
  1853. t0 =
  1854. ((word32)Tsbox[GETBYTE(s0, 3)] << 24) ^
  1855. ((word32)Tsbox[GETBYTE(s1, 2)] << 16) ^
  1856. ((word32)Tsbox[GETBYTE(s2, 1)] << 8) ^
  1857. ((word32)Tsbox[GETBYTE(s3, 0)]);
  1858. t1 =
  1859. ((word32)Tsbox[GETBYTE(s1, 3)] << 24) ^
  1860. ((word32)Tsbox[GETBYTE(s2, 2)] << 16) ^
  1861. ((word32)Tsbox[GETBYTE(s3, 1)] << 8) ^
  1862. ((word32)Tsbox[GETBYTE(s0, 0)]);
  1863. t2 =
  1864. ((word32)Tsbox[GETBYTE(s2, 3)] << 24) ^
  1865. ((word32)Tsbox[GETBYTE(s3, 2)] << 16) ^
  1866. ((word32)Tsbox[GETBYTE(s0, 1)] << 8) ^
  1867. ((word32)Tsbox[GETBYTE(s1, 0)]);
  1868. t3 =
  1869. ((word32)Tsbox[GETBYTE(s3, 3)] << 24) ^
  1870. ((word32)Tsbox[GETBYTE(s0, 2)] << 16) ^
  1871. ((word32)Tsbox[GETBYTE(s1, 1)] << 8) ^
  1872. ((word32)Tsbox[GETBYTE(s2, 0)]);
  1873. s0 = t0 ^ rk[0];
  1874. s1 = t1 ^ rk[1];
  1875. s2 = t2 ^ rk[2];
  1876. s3 = t3 ^ rk[3];
  1877. #endif
  1878. /* write out */
  1879. #ifdef LITTLE_ENDIAN_ORDER
  1880. s0 = ByteReverseWord32(s0);
  1881. s1 = ByteReverseWord32(s1);
  1882. s2 = ByteReverseWord32(s2);
  1883. s3 = ByteReverseWord32(s3);
  1884. #endif
  1885. XMEMCPY(outBlock, &s0, sizeof(s0));
  1886. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  1887. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  1888. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  1889. return 0;
  1890. }
  1891. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT || HAVE_AESGCM */
  1892. #if defined(HAVE_AES_DECRYPT)
  1893. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC)) || \
  1894. defined(WOLFSSL_AES_DIRECT)
  1895. #ifndef WC_NO_CACHE_RESISTANT
  1896. #ifndef WOLFSSL_AES_SMALL_TABLES
  1897. /* load 4 Td Tables into cache by cache line stride */
  1898. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd(void)
  1899. {
  1900. word32 x = 0;
  1901. int i,j;
  1902. for (i = 0; i < 4; i++) {
  1903. /* 256 elements, each one is 4 bytes */
  1904. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  1905. x &= Td[i][j];
  1906. }
  1907. }
  1908. return x;
  1909. }
  1910. #endif
  1911. /* load Td Table4 into cache by cache line stride */
  1912. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd4(void)
  1913. {
  1914. word32 x = 0;
  1915. int i;
  1916. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ) {
  1917. x &= (word32)Td4[i];
  1918. }
  1919. return x;
  1920. }
  1921. #endif
  1922. /* Software AES - ECB Decrypt */
  1923. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  1924. Aes* aes, const byte* inBlock, byte* outBlock)
  1925. {
  1926. word32 s0, s1, s2, s3;
  1927. word32 t0, t1, t2, t3;
  1928. word32 r = aes->rounds >> 1;
  1929. const word32* rk = aes->key;
  1930. if (r > 7 || r == 0) {
  1931. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  1932. return KEYUSAGE_E;
  1933. }
  1934. #ifdef WOLFSSL_AESNI
  1935. if (haveAESNI && aes->use_aesni) {
  1936. #ifdef DEBUG_AESNI
  1937. printf("about to aes decrypt\n");
  1938. printf("in = %p\n", inBlock);
  1939. printf("out = %p\n", outBlock);
  1940. printf("aes->key = %p\n", aes->key);
  1941. printf("aes->rounds = %d\n", aes->rounds);
  1942. printf("sz = %d\n", AES_BLOCK_SIZE);
  1943. #endif
  1944. /* if input and output same will overwrite input iv */
  1945. if ((const byte*)aes->tmp != inBlock)
  1946. XMEMCPY(aes->tmp, inBlock, AES_BLOCK_SIZE);
  1947. AES_ECB_decrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  1948. aes->rounds);
  1949. return 0;
  1950. }
  1951. else {
  1952. #ifdef DEBUG_AESNI
  1953. printf("Skipping AES-NI\n");
  1954. #endif
  1955. }
  1956. #endif /* WOLFSSL_AESNI */
  1957. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  1958. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  1959. #endif
  1960. #if defined(WOLFSSL_IMXRT_DCP)
  1961. if (aes->keylen == 16) {
  1962. DCPAesEcbDecrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  1963. return 0;
  1964. }
  1965. #endif
  1966. /*
  1967. * map byte array block to cipher state
  1968. * and add initial round key:
  1969. */
  1970. XMEMCPY(&s0, inBlock, sizeof(s0));
  1971. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  1972. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  1973. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  1974. #ifdef LITTLE_ENDIAN_ORDER
  1975. s0 = ByteReverseWord32(s0);
  1976. s1 = ByteReverseWord32(s1);
  1977. s2 = ByteReverseWord32(s2);
  1978. s3 = ByteReverseWord32(s3);
  1979. #endif
  1980. s0 ^= rk[0];
  1981. s1 ^= rk[1];
  1982. s2 ^= rk[2];
  1983. s3 ^= rk[3];
  1984. #ifndef WOLFSSL_AES_SMALL_TABLES
  1985. #ifndef WC_NO_CACHE_RESISTANT
  1986. s0 |= PreFetchTd();
  1987. #endif
  1988. #ifndef WOLFSSL_AES_NO_UNROLL
  1989. /* Unroll the loop. */
  1990. #define DEC_ROUND_T_S(o) \
  1991. t0 = Td[0][GETBYTE(s0, 3)] ^ Td[1][GETBYTE(s3, 2)] ^ \
  1992. Td[2][GETBYTE(s2, 1)] ^ Td[3][GETBYTE(s1, 0)] ^ rk[(o)+4]; \
  1993. t1 = Td[0][GETBYTE(s1, 3)] ^ Td[1][GETBYTE(s0, 2)] ^ \
  1994. Td[2][GETBYTE(s3, 1)] ^ Td[3][GETBYTE(s2, 0)] ^ rk[(o)+5]; \
  1995. t2 = Td[0][GETBYTE(s2, 3)] ^ Td[1][GETBYTE(s1, 2)] ^ \
  1996. Td[2][GETBYTE(s0, 1)] ^ Td[3][GETBYTE(s3, 0)] ^ rk[(o)+6]; \
  1997. t3 = Td[0][GETBYTE(s3, 3)] ^ Td[1][GETBYTE(s2, 2)] ^ \
  1998. Td[2][GETBYTE(s1, 1)] ^ Td[3][GETBYTE(s0, 0)] ^ rk[(o)+7]
  1999. #define DEC_ROUND_S_T(o) \
  2000. s0 = Td[0][GETBYTE(t0, 3)] ^ Td[1][GETBYTE(t3, 2)] ^ \
  2001. Td[2][GETBYTE(t2, 1)] ^ Td[3][GETBYTE(t1, 0)] ^ rk[(o)+0]; \
  2002. s1 = Td[0][GETBYTE(t1, 3)] ^ Td[1][GETBYTE(t0, 2)] ^ \
  2003. Td[2][GETBYTE(t3, 1)] ^ Td[3][GETBYTE(t2, 0)] ^ rk[(o)+1]; \
  2004. s2 = Td[0][GETBYTE(t2, 3)] ^ Td[1][GETBYTE(t1, 2)] ^ \
  2005. Td[2][GETBYTE(t0, 1)] ^ Td[3][GETBYTE(t3, 0)] ^ rk[(o)+2]; \
  2006. s3 = Td[0][GETBYTE(t3, 3)] ^ Td[1][GETBYTE(t2, 2)] ^ \
  2007. Td[2][GETBYTE(t1, 1)] ^ Td[3][GETBYTE(t0, 0)] ^ rk[(o)+3]
  2008. DEC_ROUND_T_S( 0);
  2009. DEC_ROUND_S_T( 8); DEC_ROUND_T_S( 8);
  2010. DEC_ROUND_S_T(16); DEC_ROUND_T_S(16);
  2011. DEC_ROUND_S_T(24); DEC_ROUND_T_S(24);
  2012. DEC_ROUND_S_T(32); DEC_ROUND_T_S(32);
  2013. if (r > 5) {
  2014. DEC_ROUND_S_T(40); DEC_ROUND_T_S(40);
  2015. if (r > 6) {
  2016. DEC_ROUND_S_T(48); DEC_ROUND_T_S(48);
  2017. }
  2018. }
  2019. rk += r * 8;
  2020. #else
  2021. /*
  2022. * Nr - 1 full rounds:
  2023. */
  2024. for (;;) {
  2025. t0 =
  2026. Td[0][GETBYTE(s0, 3)] ^
  2027. Td[1][GETBYTE(s3, 2)] ^
  2028. Td[2][GETBYTE(s2, 1)] ^
  2029. Td[3][GETBYTE(s1, 0)] ^
  2030. rk[4];
  2031. t1 =
  2032. Td[0][GETBYTE(s1, 3)] ^
  2033. Td[1][GETBYTE(s0, 2)] ^
  2034. Td[2][GETBYTE(s3, 1)] ^
  2035. Td[3][GETBYTE(s2, 0)] ^
  2036. rk[5];
  2037. t2 =
  2038. Td[0][GETBYTE(s2, 3)] ^
  2039. Td[1][GETBYTE(s1, 2)] ^
  2040. Td[2][GETBYTE(s0, 1)] ^
  2041. Td[3][GETBYTE(s3, 0)] ^
  2042. rk[6];
  2043. t3 =
  2044. Td[0][GETBYTE(s3, 3)] ^
  2045. Td[1][GETBYTE(s2, 2)] ^
  2046. Td[2][GETBYTE(s1, 1)] ^
  2047. Td[3][GETBYTE(s0, 0)] ^
  2048. rk[7];
  2049. rk += 8;
  2050. if (--r == 0) {
  2051. break;
  2052. }
  2053. s0 =
  2054. Td[0][GETBYTE(t0, 3)] ^
  2055. Td[1][GETBYTE(t3, 2)] ^
  2056. Td[2][GETBYTE(t2, 1)] ^
  2057. Td[3][GETBYTE(t1, 0)] ^
  2058. rk[0];
  2059. s1 =
  2060. Td[0][GETBYTE(t1, 3)] ^
  2061. Td[1][GETBYTE(t0, 2)] ^
  2062. Td[2][GETBYTE(t3, 1)] ^
  2063. Td[3][GETBYTE(t2, 0)] ^
  2064. rk[1];
  2065. s2 =
  2066. Td[0][GETBYTE(t2, 3)] ^
  2067. Td[1][GETBYTE(t1, 2)] ^
  2068. Td[2][GETBYTE(t0, 1)] ^
  2069. Td[3][GETBYTE(t3, 0)] ^
  2070. rk[2];
  2071. s3 =
  2072. Td[0][GETBYTE(t3, 3)] ^
  2073. Td[1][GETBYTE(t2, 2)] ^
  2074. Td[2][GETBYTE(t1, 1)] ^
  2075. Td[3][GETBYTE(t0, 0)] ^
  2076. rk[3];
  2077. }
  2078. #endif
  2079. /*
  2080. * apply last round and
  2081. * map cipher state to byte array block:
  2082. */
  2083. #ifndef WC_NO_CACHE_RESISTANT
  2084. t0 |= PreFetchTd4();
  2085. #endif
  2086. s0 =
  2087. ((word32)Td4[GETBYTE(t0, 3)] << 24) ^
  2088. ((word32)Td4[GETBYTE(t3, 2)] << 16) ^
  2089. ((word32)Td4[GETBYTE(t2, 1)] << 8) ^
  2090. ((word32)Td4[GETBYTE(t1, 0)]) ^
  2091. rk[0];
  2092. s1 =
  2093. ((word32)Td4[GETBYTE(t1, 3)] << 24) ^
  2094. ((word32)Td4[GETBYTE(t0, 2)] << 16) ^
  2095. ((word32)Td4[GETBYTE(t3, 1)] << 8) ^
  2096. ((word32)Td4[GETBYTE(t2, 0)]) ^
  2097. rk[1];
  2098. s2 =
  2099. ((word32)Td4[GETBYTE(t2, 3)] << 24) ^
  2100. ((word32)Td4[GETBYTE(t1, 2)] << 16) ^
  2101. ((word32)Td4[GETBYTE(t0, 1)] << 8) ^
  2102. ((word32)Td4[GETBYTE(t3, 0)]) ^
  2103. rk[2];
  2104. s3 =
  2105. ((word32)Td4[GETBYTE(t3, 3)] << 24) ^
  2106. ((word32)Td4[GETBYTE(t2, 2)] << 16) ^
  2107. ((word32)Td4[GETBYTE(t1, 1)] << 8) ^
  2108. ((word32)Td4[GETBYTE(t0, 0)]) ^
  2109. rk[3];
  2110. #else
  2111. #ifndef WC_NO_CACHE_RESISTANT
  2112. s0 |= PreFetchTd4();
  2113. #endif
  2114. r *= 2;
  2115. for (rk += 4; r > 1; r--, rk += 4) {
  2116. t0 =
  2117. ((word32)Td4[GETBYTE(s0, 3)] << 24) ^
  2118. ((word32)Td4[GETBYTE(s3, 2)] << 16) ^
  2119. ((word32)Td4[GETBYTE(s2, 1)] << 8) ^
  2120. ((word32)Td4[GETBYTE(s1, 0)]) ^
  2121. rk[0];
  2122. t1 =
  2123. ((word32)Td4[GETBYTE(s1, 3)] << 24) ^
  2124. ((word32)Td4[GETBYTE(s0, 2)] << 16) ^
  2125. ((word32)Td4[GETBYTE(s3, 1)] << 8) ^
  2126. ((word32)Td4[GETBYTE(s2, 0)]) ^
  2127. rk[1];
  2128. t2 =
  2129. ((word32)Td4[GETBYTE(s2, 3)] << 24) ^
  2130. ((word32)Td4[GETBYTE(s1, 2)] << 16) ^
  2131. ((word32)Td4[GETBYTE(s0, 1)] << 8) ^
  2132. ((word32)Td4[GETBYTE(s3, 0)]) ^
  2133. rk[2];
  2134. t3 =
  2135. ((word32)Td4[GETBYTE(s3, 3)] << 24) ^
  2136. ((word32)Td4[GETBYTE(s2, 2)] << 16) ^
  2137. ((word32)Td4[GETBYTE(s1, 1)] << 8) ^
  2138. ((word32)Td4[GETBYTE(s0, 0)]) ^
  2139. rk[3];
  2140. s0 =
  2141. (inv_col_mul(t0, 0, 2, 1, 3) << 24) ^
  2142. (inv_col_mul(t0, 3, 1, 0, 2) << 16) ^
  2143. (inv_col_mul(t0, 2, 0, 3, 1) << 8) ^
  2144. (inv_col_mul(t0, 1, 3, 2, 0) );
  2145. s1 =
  2146. (inv_col_mul(t1, 0, 2, 1, 3) << 24) ^
  2147. (inv_col_mul(t1, 3, 1, 0, 2) << 16) ^
  2148. (inv_col_mul(t1, 2, 0, 3, 1) << 8) ^
  2149. (inv_col_mul(t1, 1, 3, 2, 0) );
  2150. s2 =
  2151. (inv_col_mul(t2, 0, 2, 1, 3) << 24) ^
  2152. (inv_col_mul(t2, 3, 1, 0, 2) << 16) ^
  2153. (inv_col_mul(t2, 2, 0, 3, 1) << 8) ^
  2154. (inv_col_mul(t2, 1, 3, 2, 0) );
  2155. s3 =
  2156. (inv_col_mul(t3, 0, 2, 1, 3) << 24) ^
  2157. (inv_col_mul(t3, 3, 1, 0, 2) << 16) ^
  2158. (inv_col_mul(t3, 2, 0, 3, 1) << 8) ^
  2159. (inv_col_mul(t3, 1, 3, 2, 0) );
  2160. }
  2161. t0 =
  2162. ((word32)Td4[GETBYTE(s0, 3)] << 24) ^
  2163. ((word32)Td4[GETBYTE(s3, 2)] << 16) ^
  2164. ((word32)Td4[GETBYTE(s2, 1)] << 8) ^
  2165. ((word32)Td4[GETBYTE(s1, 0)]);
  2166. t1 =
  2167. ((word32)Td4[GETBYTE(s1, 3)] << 24) ^
  2168. ((word32)Td4[GETBYTE(s0, 2)] << 16) ^
  2169. ((word32)Td4[GETBYTE(s3, 1)] << 8) ^
  2170. ((word32)Td4[GETBYTE(s2, 0)]);
  2171. t2 =
  2172. ((word32)Td4[GETBYTE(s2, 3)] << 24) ^
  2173. ((word32)Td4[GETBYTE(s1, 2)] << 16) ^
  2174. ((word32)Td4[GETBYTE(s0, 1)] << 8) ^
  2175. ((word32)Td4[GETBYTE(s3, 0)]);
  2176. t3 =
  2177. ((word32)Td4[GETBYTE(s3, 3)] << 24) ^
  2178. ((word32)Td4[GETBYTE(s2, 2)] << 16) ^
  2179. ((word32)Td4[GETBYTE(s1, 1)] << 8) ^
  2180. ((word32)Td4[GETBYTE(s0, 0)]);
  2181. s0 = t0 ^ rk[0];
  2182. s1 = t1 ^ rk[1];
  2183. s2 = t2 ^ rk[2];
  2184. s3 = t3 ^ rk[3];
  2185. #endif
  2186. /* write out */
  2187. #ifdef LITTLE_ENDIAN_ORDER
  2188. s0 = ByteReverseWord32(s0);
  2189. s1 = ByteReverseWord32(s1);
  2190. s2 = ByteReverseWord32(s2);
  2191. s3 = ByteReverseWord32(s3);
  2192. #endif
  2193. XMEMCPY(outBlock, &s0, sizeof(s0));
  2194. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  2195. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  2196. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  2197. return 0;
  2198. }
  2199. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  2200. #endif /* HAVE_AES_DECRYPT */
  2201. #endif /* NEED_AES_TABLES */
  2202. /* wc_AesSetKey */
  2203. #if defined(STM32_CRYPTO)
  2204. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2205. const byte* iv, int dir)
  2206. {
  2207. word32 *rk;
  2208. (void)dir;
  2209. if (aes == NULL || (keylen != 16 &&
  2210. #ifdef WOLFSSL_AES_192
  2211. keylen != 24 &&
  2212. #endif
  2213. keylen != 32)) {
  2214. return BAD_FUNC_ARG;
  2215. }
  2216. rk = aes->key;
  2217. aes->keylen = keylen;
  2218. aes->rounds = keylen/4 + 6;
  2219. XMEMCPY(rk, userKey, keylen);
  2220. #if !defined(WOLFSSL_STM32_CUBEMX) || defined(STM32_HAL_V2)
  2221. ByteReverseWords(rk, rk, keylen);
  2222. #endif
  2223. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2224. defined(WOLFSSL_AES_OFB)
  2225. aes->left = 0;
  2226. #endif
  2227. return wc_AesSetIV(aes, iv);
  2228. }
  2229. #if defined(WOLFSSL_AES_DIRECT)
  2230. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2231. const byte* iv, int dir)
  2232. {
  2233. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2234. }
  2235. #endif
  2236. #elif defined(HAVE_COLDFIRE_SEC)
  2237. #if defined (HAVE_THREADX)
  2238. #include "memory_pools.h"
  2239. extern TX_BYTE_POOL mp_ncached; /* Non Cached memory pool */
  2240. #endif
  2241. #define AES_BUFFER_SIZE (AES_BLOCK_SIZE * 64)
  2242. static unsigned char *AESBuffIn = NULL;
  2243. static unsigned char *AESBuffOut = NULL;
  2244. static byte *secReg;
  2245. static byte *secKey;
  2246. static volatile SECdescriptorType *secDesc;
  2247. static wolfSSL_Mutex Mutex_AesSEC;
  2248. #define SEC_DESC_AES_CBC_ENCRYPT 0x60300010
  2249. #define SEC_DESC_AES_CBC_DECRYPT 0x60200010
  2250. extern volatile unsigned char __MBAR[];
  2251. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2252. const byte* iv, int dir)
  2253. {
  2254. if (AESBuffIn == NULL) {
  2255. #if defined (HAVE_THREADX)
  2256. int s1, s2, s3, s4, s5;
  2257. s5 = tx_byte_allocate(&mp_ncached,(void *)&secDesc,
  2258. sizeof(SECdescriptorType), TX_NO_WAIT);
  2259. s1 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffIn,
  2260. AES_BUFFER_SIZE, TX_NO_WAIT);
  2261. s2 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffOut,
  2262. AES_BUFFER_SIZE, TX_NO_WAIT);
  2263. s3 = tx_byte_allocate(&mp_ncached, (void *)&secKey,
  2264. AES_BLOCK_SIZE*2, TX_NO_WAIT);
  2265. s4 = tx_byte_allocate(&mp_ncached, (void *)&secReg,
  2266. AES_BLOCK_SIZE, TX_NO_WAIT);
  2267. if (s1 || s2 || s3 || s4 || s5)
  2268. return BAD_FUNC_ARG;
  2269. #else
  2270. #warning "Allocate non-Cache buffers"
  2271. #endif
  2272. wc_InitMutex(&Mutex_AesSEC);
  2273. }
  2274. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2275. return BAD_FUNC_ARG;
  2276. if (aes == NULL)
  2277. return BAD_FUNC_ARG;
  2278. aes->keylen = keylen;
  2279. aes->rounds = keylen/4 + 6;
  2280. XMEMCPY(aes->key, userKey, keylen);
  2281. if (iv)
  2282. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2283. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2284. defined(WOLFSSL_AES_OFB)
  2285. aes->left = 0;
  2286. #endif
  2287. return 0;
  2288. }
  2289. #elif defined(FREESCALE_LTC)
  2290. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  2291. int dir)
  2292. {
  2293. if (aes == NULL || !((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2294. return BAD_FUNC_ARG;
  2295. aes->rounds = keylen/4 + 6;
  2296. XMEMCPY(aes->key, userKey, keylen);
  2297. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2298. defined(WOLFSSL_AES_OFB)
  2299. aes->left = 0;
  2300. #endif
  2301. return wc_AesSetIV(aes, iv);
  2302. }
  2303. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2304. const byte* iv, int dir)
  2305. {
  2306. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2307. }
  2308. #elif defined(FREESCALE_MMCAU)
  2309. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2310. const byte* iv, int dir)
  2311. {
  2312. int ret;
  2313. byte* rk;
  2314. byte* tmpKey = (byte*)userKey;
  2315. int tmpKeyDynamic = 0;
  2316. word32 alignOffset = 0;
  2317. (void)dir;
  2318. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2319. return BAD_FUNC_ARG;
  2320. if (aes == NULL)
  2321. return BAD_FUNC_ARG;
  2322. rk = (byte*)aes->key;
  2323. if (rk == NULL)
  2324. return BAD_FUNC_ARG;
  2325. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2326. defined(WOLFSSL_AES_OFB)
  2327. aes->left = 0;
  2328. #endif
  2329. aes->rounds = keylen/4 + 6;
  2330. #ifdef FREESCALE_MMCAU_CLASSIC
  2331. if ((wc_ptr_t)userKey % WOLFSSL_MMCAU_ALIGNMENT) {
  2332. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  2333. byte* tmp = (byte*)XMALLOC(keylen + WOLFSSL_MMCAU_ALIGNMENT,
  2334. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  2335. if (tmp == NULL) {
  2336. return MEMORY_E;
  2337. }
  2338. alignOffset = WOLFSSL_MMCAU_ALIGNMENT -
  2339. ((wc_ptr_t)tmp % WOLFSSL_MMCAU_ALIGNMENT);
  2340. tmpKey = tmp + alignOffset;
  2341. XMEMCPY(tmpKey, userKey, keylen);
  2342. tmpKeyDynamic = 1;
  2343. #else
  2344. WOLFSSL_MSG("Bad cau_aes_set_key alignment");
  2345. return BAD_ALIGN_E;
  2346. #endif
  2347. }
  2348. #endif
  2349. ret = wolfSSL_CryptHwMutexLock();
  2350. if(ret == 0) {
  2351. #ifdef FREESCALE_MMCAU_CLASSIC
  2352. cau_aes_set_key(tmpKey, keylen*8, rk);
  2353. #else
  2354. MMCAU_AES_SetKey(tmpKey, keylen, rk);
  2355. #endif
  2356. wolfSSL_CryptHwMutexUnLock();
  2357. ret = wc_AesSetIV(aes, iv);
  2358. }
  2359. if (tmpKeyDynamic == 1) {
  2360. XFREE(tmpKey - alignOffset, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  2361. }
  2362. return ret;
  2363. }
  2364. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2365. const byte* iv, int dir)
  2366. {
  2367. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2368. }
  2369. #elif defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  2370. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  2371. int dir)
  2372. {
  2373. int ret;
  2374. if (aes == NULL || (keylen != 16 && keylen != 24 && keylen != 32)) {
  2375. return BAD_FUNC_ARG;
  2376. }
  2377. aes->ctxInitDone = 0;
  2378. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2379. defined(WOLFSSL_AES_OFB)
  2380. aes->left = 0;
  2381. #endif
  2382. ret = se050_aes_set_key(aes, userKey, keylen, iv, dir);
  2383. if (ret == 0) {
  2384. ret = wc_AesSetIV(aes, iv);
  2385. }
  2386. return ret;
  2387. }
  2388. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2389. const byte* iv, int dir)
  2390. {
  2391. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2392. }
  2393. #elif defined(WOLFSSL_NRF51_AES)
  2394. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2395. const byte* iv, int dir)
  2396. {
  2397. int ret;
  2398. (void)dir;
  2399. (void)iv;
  2400. if (aes == NULL || keylen != 16)
  2401. return BAD_FUNC_ARG;
  2402. aes->keylen = keylen;
  2403. aes->rounds = keylen/4 + 6;
  2404. XMEMCPY(aes->key, userKey, keylen);
  2405. ret = nrf51_aes_set_key(userKey);
  2406. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2407. defined(WOLFSSL_AES_OFB)
  2408. aes->left = 0;
  2409. #endif
  2410. return ret;
  2411. }
  2412. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2413. const byte* iv, int dir)
  2414. {
  2415. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2416. }
  2417. #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
  2418. !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES)
  2419. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2420. const byte* iv, int dir)
  2421. {
  2422. (void)dir;
  2423. (void)iv;
  2424. if (aes == NULL || (keylen != 16 && keylen != 24 && keylen != 32)) {
  2425. return BAD_FUNC_ARG;
  2426. }
  2427. aes->keylen = keylen;
  2428. aes->rounds = keylen/4 + 6;
  2429. XMEMCPY(aes->key, userKey, keylen);
  2430. #if defined(WOLFSSL_AES_COUNTER)
  2431. aes->left = 0;
  2432. #endif
  2433. return wc_AesSetIV(aes, iv);
  2434. }
  2435. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2436. const byte* iv, int dir)
  2437. {
  2438. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2439. }
  2440. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  2441. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  2442. int dir)
  2443. {
  2444. SaSiError_t ret = SASI_OK;
  2445. SaSiAesIv_t iv_aes;
  2446. if (aes == NULL ||
  2447. (keylen != AES_128_KEY_SIZE &&
  2448. keylen != AES_192_KEY_SIZE &&
  2449. keylen != AES_256_KEY_SIZE)) {
  2450. return BAD_FUNC_ARG;
  2451. }
  2452. #if defined(AES_MAX_KEY_SIZE)
  2453. if (keylen > (AES_MAX_KEY_SIZE/8)) {
  2454. return BAD_FUNC_ARG;
  2455. }
  2456. #endif
  2457. if (dir != AES_ENCRYPTION &&
  2458. dir != AES_DECRYPTION) {
  2459. return BAD_FUNC_ARG;
  2460. }
  2461. if (dir == AES_ENCRYPTION) {
  2462. aes->ctx.mode = SASI_AES_ENCRYPT;
  2463. SaSi_AesInit(&aes->ctx.user_ctx,
  2464. SASI_AES_ENCRYPT,
  2465. SASI_AES_MODE_CBC,
  2466. SASI_AES_PADDING_NONE);
  2467. }
  2468. else {
  2469. aes->ctx.mode = SASI_AES_DECRYPT;
  2470. SaSi_AesInit(&aes->ctx.user_ctx,
  2471. SASI_AES_DECRYPT,
  2472. SASI_AES_MODE_CBC,
  2473. SASI_AES_PADDING_NONE);
  2474. }
  2475. aes->keylen = keylen;
  2476. aes->rounds = keylen/4 + 6;
  2477. XMEMCPY(aes->key, userKey, keylen);
  2478. aes->ctx.key.pKey = (byte*)aes->key;
  2479. aes->ctx.key.keySize= keylen;
  2480. ret = SaSi_AesSetKey(&aes->ctx.user_ctx,
  2481. SASI_AES_USER_KEY,
  2482. &aes->ctx.key,
  2483. sizeof(aes->ctx.key));
  2484. if (ret != SASI_OK) {
  2485. return BAD_FUNC_ARG;
  2486. }
  2487. ret = wc_AesSetIV(aes, iv);
  2488. if (iv)
  2489. XMEMCPY(iv_aes, iv, AES_BLOCK_SIZE);
  2490. else
  2491. XMEMSET(iv_aes, 0, AES_BLOCK_SIZE);
  2492. ret = SaSi_AesSetIv(&aes->ctx.user_ctx, iv_aes);
  2493. if (ret != SASI_OK) {
  2494. return ret;
  2495. }
  2496. return ret;
  2497. }
  2498. #if defined(WOLFSSL_AES_DIRECT)
  2499. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2500. const byte* iv, int dir)
  2501. {
  2502. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2503. }
  2504. #endif
  2505. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  2506. && !defined(WOLFSSL_QNX_CAAM)
  2507. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  2508. #elif defined(WOLFSSL_AFALG)
  2509. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  2510. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  2511. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  2512. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  2513. /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
  2514. #else
  2515. /* Software AES - SetKey */
  2516. static WARN_UNUSED_RESULT int wc_AesSetKeyLocal(
  2517. Aes* aes, const byte* userKey, word32 keylen, const byte* iv, int dir,
  2518. int checkKeyLen)
  2519. {
  2520. int ret;
  2521. word32 *rk;
  2522. #ifdef NEED_AES_TABLES
  2523. word32 temp;
  2524. unsigned int i = 0;
  2525. #endif
  2526. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2527. byte local[32];
  2528. word32 localSz = 32;
  2529. #endif
  2530. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2531. if (keylen == (16 + WC_CAAM_BLOB_SZ) ||
  2532. keylen == (24 + WC_CAAM_BLOB_SZ) ||
  2533. keylen == (32 + WC_CAAM_BLOB_SZ)) {
  2534. if (wc_caamOpenBlob((byte*)userKey, keylen, local, &localSz) != 0) {
  2535. return BAD_FUNC_ARG;
  2536. }
  2537. /* set local values */
  2538. userKey = local;
  2539. keylen = localSz;
  2540. }
  2541. #endif
  2542. #ifdef WOLFSSL_SECO_CAAM
  2543. /* if set to use hardware than import the key */
  2544. if (aes->devId == WOLFSSL_SECO_DEVID) {
  2545. int keyGroup = 1; /* group one was chosen arbitrarily */
  2546. unsigned int keyIdOut;
  2547. byte importiv[GCM_NONCE_MID_SZ];
  2548. int importivSz = GCM_NONCE_MID_SZ;
  2549. int keyType = 0;
  2550. WC_RNG rng;
  2551. if (wc_InitRng(&rng) != 0) {
  2552. WOLFSSL_MSG("RNG init for IV failed");
  2553. return WC_HW_E;
  2554. }
  2555. if (wc_RNG_GenerateBlock(&rng, importiv, importivSz) != 0) {
  2556. WOLFSSL_MSG("Generate IV failed");
  2557. wc_FreeRng(&rng);
  2558. return WC_HW_E;
  2559. }
  2560. wc_FreeRng(&rng);
  2561. switch (keylen) {
  2562. case AES_128_KEY_SIZE: keyType = CAAM_KEYTYPE_AES128; break;
  2563. case AES_192_KEY_SIZE: keyType = CAAM_KEYTYPE_AES192; break;
  2564. case AES_256_KEY_SIZE: keyType = CAAM_KEYTYPE_AES256; break;
  2565. }
  2566. keyIdOut = wc_SECO_WrapKey(0, (byte*)userKey, keylen, importiv,
  2567. importivSz, keyType, CAAM_KEY_TRANSIENT, keyGroup);
  2568. if (keyIdOut == 0) {
  2569. return WC_HW_E;
  2570. }
  2571. aes->blackKey = keyIdOut;
  2572. return 0;
  2573. }
  2574. #endif
  2575. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  2576. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  2577. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  2578. #ifdef WOLF_CRYPTO_CB
  2579. if (aes->devId != INVALID_DEVID)
  2580. #endif
  2581. {
  2582. if (keylen > sizeof(aes->devKey)) {
  2583. return BAD_FUNC_ARG;
  2584. }
  2585. XMEMCPY(aes->devKey, userKey, keylen);
  2586. }
  2587. #endif
  2588. if (checkKeyLen) {
  2589. if (keylen != 16 && keylen != 24 && keylen != 32) {
  2590. return BAD_FUNC_ARG;
  2591. }
  2592. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE < 256
  2593. /* Check key length only when AES_MAX_KEY_SIZE doesn't allow
  2594. * all key sizes. Otherwise this condition is never true. */
  2595. if (keylen > (AES_MAX_KEY_SIZE / 8)) {
  2596. return BAD_FUNC_ARG;
  2597. }
  2598. #endif
  2599. }
  2600. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2601. defined(WOLFSSL_AES_OFB)
  2602. aes->left = 0;
  2603. #endif
  2604. aes->keylen = keylen;
  2605. aes->rounds = (keylen/4) + 6;
  2606. #ifdef WOLFSSL_AESNI
  2607. aes->use_aesni = 0;
  2608. if (checkAESNI == 0) {
  2609. haveAESNI = Check_CPU_support_AES();
  2610. checkAESNI = 1;
  2611. }
  2612. if (haveAESNI) {
  2613. aes->use_aesni = 1;
  2614. if (iv)
  2615. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2616. else
  2617. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  2618. if (dir == AES_ENCRYPTION)
  2619. return AES_set_encrypt_key(userKey, keylen * 8, aes);
  2620. #ifdef HAVE_AES_DECRYPT
  2621. else
  2622. return AES_set_decrypt_key(userKey, keylen * 8, aes);
  2623. #endif
  2624. }
  2625. #endif /* WOLFSSL_AESNI */
  2626. #ifdef WOLFSSL_KCAPI_AES
  2627. XMEMCPY(aes->devKey, userKey, keylen);
  2628. if (aes->init != 0) {
  2629. kcapi_cipher_destroy(aes->handle);
  2630. aes->handle = NULL;
  2631. aes->init = 0;
  2632. }
  2633. (void)dir;
  2634. #endif
  2635. if (keylen > sizeof(aes->key)) {
  2636. return BAD_FUNC_ARG;
  2637. }
  2638. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  2639. return wc_psa_aes_set_key(aes, userKey, keylen, (uint8_t*)iv,
  2640. ((psa_algorithm_t)0), dir);
  2641. #endif
  2642. rk = aes->key;
  2643. XMEMCPY(rk, userKey, keylen);
  2644. #if defined(LITTLE_ENDIAN_ORDER) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  2645. (!defined(WOLFSSL_ESP32WROOM32_CRYPT) || \
  2646. defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES))
  2647. ByteReverseWords(rk, rk, keylen);
  2648. #endif
  2649. #ifdef WOLFSSL_IMXRT_DCP
  2650. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  2651. temp = 0;
  2652. if (keylen == 16)
  2653. temp = DCPAesSetKey(aes, userKey, keylen, iv, dir);
  2654. if (temp != 0)
  2655. return WC_HW_E;
  2656. #endif
  2657. #ifdef NEED_AES_TABLES
  2658. switch (keylen) {
  2659. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 128 && \
  2660. defined(WOLFSSL_AES_128)
  2661. case 16:
  2662. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2663. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2664. #endif
  2665. while (1)
  2666. {
  2667. temp = rk[3];
  2668. rk[4] = rk[0] ^
  2669. #ifndef WOLFSSL_AES_SMALL_TABLES
  2670. (Te[2][GETBYTE(temp, 2)] & 0xff000000) ^
  2671. (Te[3][GETBYTE(temp, 1)] & 0x00ff0000) ^
  2672. (Te[0][GETBYTE(temp, 0)] & 0x0000ff00) ^
  2673. (Te[1][GETBYTE(temp, 3)] & 0x000000ff) ^
  2674. #else
  2675. ((word32)Tsbox[GETBYTE(temp, 2)] << 24) ^
  2676. ((word32)Tsbox[GETBYTE(temp, 1)] << 16) ^
  2677. ((word32)Tsbox[GETBYTE(temp, 0)] << 8) ^
  2678. ((word32)Tsbox[GETBYTE(temp, 3)]) ^
  2679. #endif
  2680. rcon[i];
  2681. rk[5] = rk[1] ^ rk[4];
  2682. rk[6] = rk[2] ^ rk[5];
  2683. rk[7] = rk[3] ^ rk[6];
  2684. if (++i == 10)
  2685. break;
  2686. rk += 4;
  2687. }
  2688. break;
  2689. #endif /* 128 */
  2690. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 192 && \
  2691. defined(WOLFSSL_AES_192)
  2692. case 24:
  2693. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2694. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2695. #endif
  2696. /* for (;;) here triggers a bug in VC60 SP4 w/ Pro Pack */
  2697. while (1)
  2698. {
  2699. temp = rk[ 5];
  2700. rk[ 6] = rk[ 0] ^
  2701. #ifndef WOLFSSL_AES_SMALL_TABLES
  2702. (Te[2][GETBYTE(temp, 2)] & 0xff000000) ^
  2703. (Te[3][GETBYTE(temp, 1)] & 0x00ff0000) ^
  2704. (Te[0][GETBYTE(temp, 0)] & 0x0000ff00) ^
  2705. (Te[1][GETBYTE(temp, 3)] & 0x000000ff) ^
  2706. #else
  2707. ((word32)Tsbox[GETBYTE(temp, 2)] << 24) ^
  2708. ((word32)Tsbox[GETBYTE(temp, 1)] << 16) ^
  2709. ((word32)Tsbox[GETBYTE(temp, 0)] << 8) ^
  2710. ((word32)Tsbox[GETBYTE(temp, 3)]) ^
  2711. #endif
  2712. rcon[i];
  2713. rk[ 7] = rk[ 1] ^ rk[ 6];
  2714. rk[ 8] = rk[ 2] ^ rk[ 7];
  2715. rk[ 9] = rk[ 3] ^ rk[ 8];
  2716. if (++i == 8)
  2717. break;
  2718. rk[10] = rk[ 4] ^ rk[ 9];
  2719. rk[11] = rk[ 5] ^ rk[10];
  2720. rk += 6;
  2721. }
  2722. break;
  2723. #endif /* 192 */
  2724. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 256 && \
  2725. defined(WOLFSSL_AES_256)
  2726. case 32:
  2727. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2728. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2729. #endif
  2730. while (1)
  2731. {
  2732. temp = rk[ 7];
  2733. rk[ 8] = rk[ 0] ^
  2734. #ifndef WOLFSSL_AES_SMALL_TABLES
  2735. (Te[2][GETBYTE(temp, 2)] & 0xff000000) ^
  2736. (Te[3][GETBYTE(temp, 1)] & 0x00ff0000) ^
  2737. (Te[0][GETBYTE(temp, 0)] & 0x0000ff00) ^
  2738. (Te[1][GETBYTE(temp, 3)] & 0x000000ff) ^
  2739. #else
  2740. ((word32)Tsbox[GETBYTE(temp, 2)] << 24) ^
  2741. ((word32)Tsbox[GETBYTE(temp, 1)] << 16) ^
  2742. ((word32)Tsbox[GETBYTE(temp, 0)] << 8) ^
  2743. ((word32)Tsbox[GETBYTE(temp, 3)]) ^
  2744. #endif
  2745. rcon[i];
  2746. rk[ 9] = rk[ 1] ^ rk[ 8];
  2747. rk[10] = rk[ 2] ^ rk[ 9];
  2748. rk[11] = rk[ 3] ^ rk[10];
  2749. if (++i == 7)
  2750. break;
  2751. temp = rk[11];
  2752. rk[12] = rk[ 4] ^
  2753. #ifndef WOLFSSL_AES_SMALL_TABLES
  2754. (Te[2][GETBYTE(temp, 3)] & 0xff000000) ^
  2755. (Te[3][GETBYTE(temp, 2)] & 0x00ff0000) ^
  2756. (Te[0][GETBYTE(temp, 1)] & 0x0000ff00) ^
  2757. (Te[1][GETBYTE(temp, 0)] & 0x000000ff);
  2758. #else
  2759. ((word32)Tsbox[GETBYTE(temp, 3)] << 24) ^
  2760. ((word32)Tsbox[GETBYTE(temp, 2)] << 16) ^
  2761. ((word32)Tsbox[GETBYTE(temp, 1)] << 8) ^
  2762. ((word32)Tsbox[GETBYTE(temp, 0)]);
  2763. #endif
  2764. rk[13] = rk[ 5] ^ rk[12];
  2765. rk[14] = rk[ 6] ^ rk[13];
  2766. rk[15] = rk[ 7] ^ rk[14];
  2767. rk += 8;
  2768. }
  2769. break;
  2770. #endif /* 256 */
  2771. default:
  2772. return BAD_FUNC_ARG;
  2773. } /* switch */
  2774. ForceZero(&temp, sizeof(temp));
  2775. #if defined(HAVE_AES_DECRYPT)
  2776. if (dir == AES_DECRYPTION) {
  2777. unsigned int j;
  2778. rk = aes->key;
  2779. /* invert the order of the round keys: */
  2780. for (i = 0, j = 4* aes->rounds; i < j; i += 4, j -= 4) {
  2781. temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp;
  2782. temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp;
  2783. temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp;
  2784. temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp;
  2785. }
  2786. ForceZero(&temp, sizeof(temp));
  2787. #if !defined(WOLFSSL_AES_SMALL_TABLES)
  2788. /* apply the inverse MixColumn transform to all round keys but the
  2789. first and the last: */
  2790. for (i = 1; i < aes->rounds; i++) {
  2791. rk += 4;
  2792. rk[0] =
  2793. Td[0][Te[1][GETBYTE(rk[0], 3)] & 0xff] ^
  2794. Td[1][Te[1][GETBYTE(rk[0], 2)] & 0xff] ^
  2795. Td[2][Te[1][GETBYTE(rk[0], 1)] & 0xff] ^
  2796. Td[3][Te[1][GETBYTE(rk[0], 0)] & 0xff];
  2797. rk[1] =
  2798. Td[0][Te[1][GETBYTE(rk[1], 3)] & 0xff] ^
  2799. Td[1][Te[1][GETBYTE(rk[1], 2)] & 0xff] ^
  2800. Td[2][Te[1][GETBYTE(rk[1], 1)] & 0xff] ^
  2801. Td[3][Te[1][GETBYTE(rk[1], 0)] & 0xff];
  2802. rk[2] =
  2803. Td[0][Te[1][GETBYTE(rk[2], 3)] & 0xff] ^
  2804. Td[1][Te[1][GETBYTE(rk[2], 2)] & 0xff] ^
  2805. Td[2][Te[1][GETBYTE(rk[2], 1)] & 0xff] ^
  2806. Td[3][Te[1][GETBYTE(rk[2], 0)] & 0xff];
  2807. rk[3] =
  2808. Td[0][Te[1][GETBYTE(rk[3], 3)] & 0xff] ^
  2809. Td[1][Te[1][GETBYTE(rk[3], 2)] & 0xff] ^
  2810. Td[2][Te[1][GETBYTE(rk[3], 1)] & 0xff] ^
  2811. Td[3][Te[1][GETBYTE(rk[3], 0)] & 0xff];
  2812. }
  2813. #endif
  2814. }
  2815. #else
  2816. (void)dir;
  2817. #endif /* HAVE_AES_DECRYPT */
  2818. (void)temp;
  2819. #endif /* NEED_AES_TABLES */
  2820. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  2821. XMEMCPY((byte*)aes->key, userKey, keylen);
  2822. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == CRYPTO_WORD_ENDIAN_BIG) {
  2823. ByteReverseWords(aes->key, aes->key, 32);
  2824. }
  2825. #endif
  2826. ret = wc_AesSetIV(aes, iv);
  2827. #if defined(WOLFSSL_DEVCRYPTO) && \
  2828. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  2829. aes->ctx.cfd = -1;
  2830. #endif
  2831. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2832. ForceZero(local, sizeof(local));
  2833. #endif
  2834. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2835. wc_MemZero_Check(&temp, sizeof(temp));
  2836. #endif
  2837. return ret;
  2838. }
  2839. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2840. const byte* iv, int dir)
  2841. {
  2842. if (aes == NULL) {
  2843. return BAD_FUNC_ARG;
  2844. }
  2845. if (keylen > sizeof(aes->key)) {
  2846. return BAD_FUNC_ARG;
  2847. }
  2848. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 1);
  2849. }
  2850. #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER)
  2851. /* AES-CTR and AES-DIRECT need to use this for key setup */
  2852. /* This function allows key sizes that are not 128/192/256 bits */
  2853. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2854. const byte* iv, int dir)
  2855. {
  2856. if (aes == NULL) {
  2857. return BAD_FUNC_ARG;
  2858. }
  2859. if (keylen > sizeof(aes->key)) {
  2860. return BAD_FUNC_ARG;
  2861. }
  2862. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 0);
  2863. }
  2864. #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER */
  2865. #endif /* wc_AesSetKey block */
  2866. /* wc_AesSetIV is shared between software and hardware */
  2867. int wc_AesSetIV(Aes* aes, const byte* iv)
  2868. {
  2869. if (aes == NULL)
  2870. return BAD_FUNC_ARG;
  2871. if (iv)
  2872. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2873. else
  2874. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  2875. #if defined(WOLFSSL_AES_COUNTER) || defined(WOLFSSL_AES_CFB) || \
  2876. defined(WOLFSSL_AES_OFB) || defined(WOLFSSL_AES_XTS)
  2877. /* Clear any unused bytes from last cipher op. */
  2878. aes->left = 0;
  2879. #endif
  2880. return 0;
  2881. }
  2882. /* AES-DIRECT */
  2883. #if defined(WOLFSSL_AES_DIRECT)
  2884. #if defined(HAVE_COLDFIRE_SEC)
  2885. #error "Coldfire SEC doesn't yet support AES direct"
  2886. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  2887. !defined(WOLFSSL_QNX_CAAM)
  2888. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  2889. #elif defined(WOLFSSL_AFALG)
  2890. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  2891. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  2892. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  2893. #elif defined(WOLFSSL_LINUXKM) && defined(WOLFSSL_AESNI)
  2894. WARN_UNUSED_RESULT int wc_AesEncryptDirect(
  2895. Aes* aes, byte* out, const byte* in)
  2896. {
  2897. int ret;
  2898. if (haveAESNI && aes->use_aesni)
  2899. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  2900. ret = wc_AesEncrypt(aes, in, out);
  2901. if (haveAESNI && aes->use_aesni)
  2902. RESTORE_VECTOR_REGISTERS();
  2903. return ret;
  2904. }
  2905. /* vector reg save/restore is explicit in all below calls to
  2906. * wc_Aes{En,De}cryptDirect(), so bypass the public version with a
  2907. * macro.
  2908. */
  2909. #define wc_AesEncryptDirect(aes, out, in) wc_AesEncrypt(aes, in, out)
  2910. #ifdef HAVE_AES_DECRYPT
  2911. /* Allow direct access to one block decrypt */
  2912. WARN_UNUSED_RESULT int wc_AesDecryptDirect(
  2913. Aes* aes, byte* out, const byte* in)
  2914. {
  2915. int ret;
  2916. if (haveAESNI && aes->use_aesni)
  2917. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  2918. ret = wc_AesDecrypt(aes, in, out);
  2919. if (haveAESNI && aes->use_aesni)
  2920. RESTORE_VECTOR_REGISTERS();
  2921. return ret;
  2922. }
  2923. #define wc_AesDecryptDirect(aes, out, in) wc_AesDecrypt(aes, in, out)
  2924. #endif /* HAVE_AES_DECRYPT */
  2925. #else
  2926. /* Allow direct access to one block encrypt */
  2927. int wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in)
  2928. {
  2929. return wc_AesEncrypt(aes, in, out);
  2930. }
  2931. #ifdef HAVE_AES_DECRYPT
  2932. /* Allow direct access to one block decrypt */
  2933. int wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in)
  2934. {
  2935. return wc_AesDecrypt(aes, in, out);
  2936. }
  2937. #endif /* HAVE_AES_DECRYPT */
  2938. #endif /* AES direct block */
  2939. #endif /* WOLFSSL_AES_DIRECT */
  2940. /* AES-CBC */
  2941. #ifdef HAVE_AES_CBC
  2942. #if defined(STM32_CRYPTO)
  2943. #ifdef WOLFSSL_STM32_CUBEMX
  2944. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  2945. {
  2946. int ret = 0;
  2947. CRYP_HandleTypeDef hcryp;
  2948. word32 blocks = (sz / AES_BLOCK_SIZE);
  2949. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  2950. if (sz % AES_BLOCK_SIZE) {
  2951. return BAD_LENGTH_E;
  2952. }
  2953. #endif
  2954. if (blocks == 0)
  2955. return 0;
  2956. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  2957. if (ret != 0)
  2958. return ret;
  2959. ret = wolfSSL_CryptHwMutexLock();
  2960. if (ret != 0) {
  2961. return ret;
  2962. }
  2963. #if defined(STM32_HAL_V2)
  2964. hcryp.Init.Algorithm = CRYP_AES_CBC;
  2965. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  2966. #elif defined(STM32_CRYPTO_AES_ONLY)
  2967. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  2968. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  2969. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  2970. #endif
  2971. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  2972. HAL_CRYP_Init(&hcryp);
  2973. #if defined(STM32_HAL_V2)
  2974. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  2975. (uint32_t*)out, STM32_HAL_TIMEOUT);
  2976. #elif defined(STM32_CRYPTO_AES_ONLY)
  2977. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  2978. out, STM32_HAL_TIMEOUT);
  2979. #else
  2980. ret = HAL_CRYP_AESCBC_Encrypt(&hcryp, (uint8_t*)in,
  2981. blocks * AES_BLOCK_SIZE,
  2982. out, STM32_HAL_TIMEOUT);
  2983. #endif
  2984. if (ret != HAL_OK) {
  2985. ret = WC_TIMEOUT_E;
  2986. }
  2987. /* store iv for next call */
  2988. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  2989. HAL_CRYP_DeInit(&hcryp);
  2990. wolfSSL_CryptHwMutexUnLock();
  2991. return ret;
  2992. }
  2993. #ifdef HAVE_AES_DECRYPT
  2994. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  2995. {
  2996. int ret = 0;
  2997. CRYP_HandleTypeDef hcryp;
  2998. word32 blocks = (sz / AES_BLOCK_SIZE);
  2999. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3000. if (sz % AES_BLOCK_SIZE) {
  3001. return BAD_LENGTH_E;
  3002. }
  3003. #endif
  3004. if (blocks == 0)
  3005. return 0;
  3006. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3007. if (ret != 0)
  3008. return ret;
  3009. ret = wolfSSL_CryptHwMutexLock();
  3010. if (ret != 0) {
  3011. return ret;
  3012. }
  3013. /* if input and output same will overwrite input iv */
  3014. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3015. #if defined(STM32_HAL_V2)
  3016. hcryp.Init.Algorithm = CRYP_AES_CBC;
  3017. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  3018. #elif defined(STM32_CRYPTO_AES_ONLY)
  3019. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  3020. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  3021. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3022. #endif
  3023. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3024. HAL_CRYP_Init(&hcryp);
  3025. #if defined(STM32_HAL_V2)
  3026. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  3027. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3028. #elif defined(STM32_CRYPTO_AES_ONLY)
  3029. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  3030. out, STM32_HAL_TIMEOUT);
  3031. #else
  3032. ret = HAL_CRYP_AESCBC_Decrypt(&hcryp, (uint8_t*)in,
  3033. blocks * AES_BLOCK_SIZE,
  3034. out, STM32_HAL_TIMEOUT);
  3035. #endif
  3036. if (ret != HAL_OK) {
  3037. ret = WC_TIMEOUT_E;
  3038. }
  3039. /* store iv for next call */
  3040. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3041. HAL_CRYP_DeInit(&hcryp);
  3042. wolfSSL_CryptHwMutexUnLock();
  3043. return ret;
  3044. }
  3045. #endif /* HAVE_AES_DECRYPT */
  3046. #else /* Standard Peripheral Library */
  3047. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3048. {
  3049. int ret;
  3050. word32 *iv;
  3051. CRYP_InitTypeDef cryptInit;
  3052. CRYP_KeyInitTypeDef keyInit;
  3053. CRYP_IVInitTypeDef ivInit;
  3054. word32 blocks = (sz / AES_BLOCK_SIZE);
  3055. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3056. if (sz % AES_BLOCK_SIZE) {
  3057. return BAD_LENGTH_E;
  3058. }
  3059. #endif
  3060. if (blocks == 0)
  3061. return 0;
  3062. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3063. if (ret != 0)
  3064. return ret;
  3065. ret = wolfSSL_CryptHwMutexLock();
  3066. if (ret != 0) {
  3067. return ret;
  3068. }
  3069. /* reset registers to their default values */
  3070. CRYP_DeInit();
  3071. /* set key */
  3072. CRYP_KeyInit(&keyInit);
  3073. /* set iv */
  3074. iv = aes->reg;
  3075. CRYP_IVStructInit(&ivInit);
  3076. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  3077. ivInit.CRYP_IV0Left = iv[0];
  3078. ivInit.CRYP_IV0Right = iv[1];
  3079. ivInit.CRYP_IV1Left = iv[2];
  3080. ivInit.CRYP_IV1Right = iv[3];
  3081. CRYP_IVInit(&ivInit);
  3082. /* set direction and mode */
  3083. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  3084. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  3085. CRYP_Init(&cryptInit);
  3086. /* enable crypto processor */
  3087. CRYP_Cmd(ENABLE);
  3088. while (blocks--) {
  3089. /* flush IN/OUT FIFOs */
  3090. CRYP_FIFOFlush();
  3091. CRYP_DataIn(*(uint32_t*)&in[0]);
  3092. CRYP_DataIn(*(uint32_t*)&in[4]);
  3093. CRYP_DataIn(*(uint32_t*)&in[8]);
  3094. CRYP_DataIn(*(uint32_t*)&in[12]);
  3095. /* wait until the complete message has been processed */
  3096. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3097. *(uint32_t*)&out[0] = CRYP_DataOut();
  3098. *(uint32_t*)&out[4] = CRYP_DataOut();
  3099. *(uint32_t*)&out[8] = CRYP_DataOut();
  3100. *(uint32_t*)&out[12] = CRYP_DataOut();
  3101. /* store iv for next call */
  3102. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3103. sz -= AES_BLOCK_SIZE;
  3104. in += AES_BLOCK_SIZE;
  3105. out += AES_BLOCK_SIZE;
  3106. }
  3107. /* disable crypto processor */
  3108. CRYP_Cmd(DISABLE);
  3109. wolfSSL_CryptHwMutexUnLock();
  3110. return ret;
  3111. }
  3112. #ifdef HAVE_AES_DECRYPT
  3113. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3114. {
  3115. int ret;
  3116. word32 *iv;
  3117. CRYP_InitTypeDef cryptInit;
  3118. CRYP_KeyInitTypeDef keyInit;
  3119. CRYP_IVInitTypeDef ivInit;
  3120. word32 blocks = (sz / AES_BLOCK_SIZE);
  3121. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3122. if (sz % AES_BLOCK_SIZE) {
  3123. return BAD_LENGTH_E;
  3124. }
  3125. #endif
  3126. if (blocks == 0)
  3127. return 0;
  3128. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3129. if (ret != 0)
  3130. return ret;
  3131. ret = wolfSSL_CryptHwMutexLock();
  3132. if (ret != 0) {
  3133. return ret;
  3134. }
  3135. /* if input and output same will overwrite input iv */
  3136. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3137. /* reset registers to their default values */
  3138. CRYP_DeInit();
  3139. /* set direction and key */
  3140. CRYP_KeyInit(&keyInit);
  3141. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  3142. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  3143. CRYP_Init(&cryptInit);
  3144. /* enable crypto processor */
  3145. CRYP_Cmd(ENABLE);
  3146. /* wait until key has been prepared */
  3147. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3148. /* set direction and mode */
  3149. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  3150. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  3151. CRYP_Init(&cryptInit);
  3152. /* set iv */
  3153. iv = aes->reg;
  3154. CRYP_IVStructInit(&ivInit);
  3155. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  3156. ivInit.CRYP_IV0Left = iv[0];
  3157. ivInit.CRYP_IV0Right = iv[1];
  3158. ivInit.CRYP_IV1Left = iv[2];
  3159. ivInit.CRYP_IV1Right = iv[3];
  3160. CRYP_IVInit(&ivInit);
  3161. /* enable crypto processor */
  3162. CRYP_Cmd(ENABLE);
  3163. while (blocks--) {
  3164. /* flush IN/OUT FIFOs */
  3165. CRYP_FIFOFlush();
  3166. CRYP_DataIn(*(uint32_t*)&in[0]);
  3167. CRYP_DataIn(*(uint32_t*)&in[4]);
  3168. CRYP_DataIn(*(uint32_t*)&in[8]);
  3169. CRYP_DataIn(*(uint32_t*)&in[12]);
  3170. /* wait until the complete message has been processed */
  3171. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3172. *(uint32_t*)&out[0] = CRYP_DataOut();
  3173. *(uint32_t*)&out[4] = CRYP_DataOut();
  3174. *(uint32_t*)&out[8] = CRYP_DataOut();
  3175. *(uint32_t*)&out[12] = CRYP_DataOut();
  3176. /* store iv for next call */
  3177. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3178. in += AES_BLOCK_SIZE;
  3179. out += AES_BLOCK_SIZE;
  3180. }
  3181. /* disable crypto processor */
  3182. CRYP_Cmd(DISABLE);
  3183. wolfSSL_CryptHwMutexUnLock();
  3184. return ret;
  3185. }
  3186. #endif /* HAVE_AES_DECRYPT */
  3187. #endif /* WOLFSSL_STM32_CUBEMX */
  3188. #elif defined(HAVE_COLDFIRE_SEC)
  3189. static WARN_UNUSED_RESULT int wc_AesCbcCrypt(
  3190. Aes* aes, byte* po, const byte* pi, word32 sz, word32 descHeader)
  3191. {
  3192. #ifdef DEBUG_WOLFSSL
  3193. int i; int stat1, stat2; int ret;
  3194. #endif
  3195. int size;
  3196. volatile int v;
  3197. if ((pi == NULL) || (po == NULL))
  3198. return BAD_FUNC_ARG; /*wrong pointer*/
  3199. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3200. if (sz % AES_BLOCK_SIZE) {
  3201. return BAD_LENGTH_E;
  3202. }
  3203. #endif
  3204. wc_LockMutex(&Mutex_AesSEC);
  3205. /* Set descriptor for SEC */
  3206. secDesc->length1 = 0x0;
  3207. secDesc->pointer1 = NULL;
  3208. secDesc->length2 = AES_BLOCK_SIZE;
  3209. secDesc->pointer2 = (byte *)secReg; /* Initial Vector */
  3210. switch(aes->rounds) {
  3211. case 10: secDesc->length3 = 16; break;
  3212. case 12: secDesc->length3 = 24; break;
  3213. case 14: secDesc->length3 = 32; break;
  3214. }
  3215. XMEMCPY(secKey, aes->key, secDesc->length3);
  3216. secDesc->pointer3 = (byte *)secKey;
  3217. secDesc->pointer4 = AESBuffIn;
  3218. secDesc->pointer5 = AESBuffOut;
  3219. secDesc->length6 = 0x0;
  3220. secDesc->pointer6 = NULL;
  3221. secDesc->length7 = 0x0;
  3222. secDesc->pointer7 = NULL;
  3223. secDesc->nextDescriptorPtr = NULL;
  3224. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3225. size = AES_BUFFER_SIZE;
  3226. #endif
  3227. while (sz) {
  3228. secDesc->header = descHeader;
  3229. XMEMCPY(secReg, aes->reg, AES_BLOCK_SIZE);
  3230. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3231. sz -= AES_BUFFER_SIZE;
  3232. #else
  3233. if (sz < AES_BUFFER_SIZE) {
  3234. size = sz;
  3235. sz = 0;
  3236. } else {
  3237. size = AES_BUFFER_SIZE;
  3238. sz -= AES_BUFFER_SIZE;
  3239. }
  3240. #endif
  3241. secDesc->length4 = size;
  3242. secDesc->length5 = size;
  3243. XMEMCPY(AESBuffIn, pi, size);
  3244. if(descHeader == SEC_DESC_AES_CBC_DECRYPT) {
  3245. XMEMCPY((void*)aes->tmp, (void*)&(pi[size-AES_BLOCK_SIZE]),
  3246. AES_BLOCK_SIZE);
  3247. }
  3248. /* Point SEC to the location of the descriptor */
  3249. MCF_SEC_FR0 = (uint32)secDesc;
  3250. /* Initialize SEC and wait for encryption to complete */
  3251. MCF_SEC_CCCR0 = 0x0000001a;
  3252. /* poll SISR to determine when channel is complete */
  3253. v=0;
  3254. while ((secDesc->header>> 24) != 0xff) v++;
  3255. #ifdef DEBUG_WOLFSSL
  3256. ret = MCF_SEC_SISRH;
  3257. stat1 = MCF_SEC_AESSR;
  3258. stat2 = MCF_SEC_AESISR;
  3259. if (ret & 0xe0000000) {
  3260. db_printf("Aes_Cbc(i=%d):ISRH=%08x, AESSR=%08x, "
  3261. "AESISR=%08x\n", i, ret, stat1, stat2);
  3262. }
  3263. #endif
  3264. XMEMCPY(po, AESBuffOut, size);
  3265. if (descHeader == SEC_DESC_AES_CBC_ENCRYPT) {
  3266. XMEMCPY((void*)aes->reg, (void*)&(po[size-AES_BLOCK_SIZE]),
  3267. AES_BLOCK_SIZE);
  3268. } else {
  3269. XMEMCPY((void*)aes->reg, (void*)aes->tmp, AES_BLOCK_SIZE);
  3270. }
  3271. pi += size;
  3272. po += size;
  3273. }
  3274. wc_UnLockMutex(&Mutex_AesSEC);
  3275. return 0;
  3276. }
  3277. int wc_AesCbcEncrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  3278. {
  3279. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_ENCRYPT));
  3280. }
  3281. #ifdef HAVE_AES_DECRYPT
  3282. int wc_AesCbcDecrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  3283. {
  3284. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_DECRYPT));
  3285. }
  3286. #endif /* HAVE_AES_DECRYPT */
  3287. #elif defined(FREESCALE_LTC)
  3288. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3289. {
  3290. word32 keySize;
  3291. status_t status;
  3292. byte *iv, *enc_key;
  3293. word32 blocks = (sz / AES_BLOCK_SIZE);
  3294. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3295. if (sz % AES_BLOCK_SIZE) {
  3296. return BAD_LENGTH_E;
  3297. }
  3298. #endif
  3299. if (blocks == 0)
  3300. return 0;
  3301. iv = (byte*)aes->reg;
  3302. enc_key = (byte*)aes->key;
  3303. status = wc_AesGetKeySize(aes, &keySize);
  3304. if (status != 0) {
  3305. return status;
  3306. }
  3307. status = wolfSSL_CryptHwMutexLock();
  3308. if (status != 0)
  3309. return status;
  3310. status = LTC_AES_EncryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  3311. iv, enc_key, keySize);
  3312. wolfSSL_CryptHwMutexUnLock();
  3313. /* store iv for next call */
  3314. if (status == kStatus_Success) {
  3315. XMEMCPY(iv, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3316. }
  3317. return (status == kStatus_Success) ? 0 : -1;
  3318. }
  3319. #ifdef HAVE_AES_DECRYPT
  3320. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3321. {
  3322. word32 keySize;
  3323. status_t status;
  3324. byte* iv, *dec_key;
  3325. byte temp_block[AES_BLOCK_SIZE];
  3326. word32 blocks = (sz / AES_BLOCK_SIZE);
  3327. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3328. if (sz % AES_BLOCK_SIZE) {
  3329. return BAD_LENGTH_E;
  3330. }
  3331. #endif
  3332. if (blocks == 0)
  3333. return 0;
  3334. iv = (byte*)aes->reg;
  3335. dec_key = (byte*)aes->key;
  3336. status = wc_AesGetKeySize(aes, &keySize);
  3337. if (status != 0) {
  3338. return status;
  3339. }
  3340. /* get IV for next call */
  3341. XMEMCPY(temp_block, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3342. status = wolfSSL_CryptHwMutexLock();
  3343. if (status != 0)
  3344. return status;
  3345. status = LTC_AES_DecryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  3346. iv, dec_key, keySize, kLTC_EncryptKey);
  3347. wolfSSL_CryptHwMutexUnLock();
  3348. /* store IV for next call */
  3349. if (status == kStatus_Success) {
  3350. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  3351. }
  3352. return (status == kStatus_Success) ? 0 : -1;
  3353. }
  3354. #endif /* HAVE_AES_DECRYPT */
  3355. #elif defined(FREESCALE_MMCAU)
  3356. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3357. {
  3358. int i;
  3359. int offset = 0;
  3360. byte *iv;
  3361. byte temp_block[AES_BLOCK_SIZE];
  3362. word32 blocks = (sz / AES_BLOCK_SIZE);
  3363. int ret;
  3364. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3365. if (sz % AES_BLOCK_SIZE) {
  3366. return BAD_LENGTH_E;
  3367. }
  3368. #endif
  3369. if (blocks == 0)
  3370. return 0;
  3371. iv = (byte*)aes->reg;
  3372. while (blocks--) {
  3373. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  3374. /* XOR block with IV for CBC */
  3375. for (i = 0; i < AES_BLOCK_SIZE; i++)
  3376. temp_block[i] ^= iv[i];
  3377. ret = wc_AesEncrypt(aes, temp_block, out + offset);
  3378. if (ret != 0)
  3379. return ret;
  3380. offset += AES_BLOCK_SIZE;
  3381. /* store IV for next block */
  3382. XMEMCPY(iv, out + offset - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3383. }
  3384. return 0;
  3385. }
  3386. #ifdef HAVE_AES_DECRYPT
  3387. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3388. {
  3389. int i;
  3390. int offset = 0;
  3391. byte* iv;
  3392. byte temp_block[AES_BLOCK_SIZE];
  3393. word32 blocks = (sz / AES_BLOCK_SIZE);
  3394. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3395. if (sz % AES_BLOCK_SIZE) {
  3396. return BAD_LENGTH_E;
  3397. }
  3398. #endif
  3399. if (blocks == 0)
  3400. return 0;
  3401. iv = (byte*)aes->reg;
  3402. while (blocks--) {
  3403. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  3404. wc_AesDecrypt(aes, in + offset, out + offset);
  3405. /* XOR block with IV for CBC */
  3406. for (i = 0; i < AES_BLOCK_SIZE; i++)
  3407. (out + offset)[i] ^= iv[i];
  3408. /* store IV for next block */
  3409. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  3410. offset += AES_BLOCK_SIZE;
  3411. }
  3412. return 0;
  3413. }
  3414. #endif /* HAVE_AES_DECRYPT */
  3415. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  3416. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3417. {
  3418. int ret;
  3419. if (sz == 0)
  3420. return 0;
  3421. /* hardware fails on input that is not a multiple of AES block size */
  3422. if (sz % AES_BLOCK_SIZE != 0) {
  3423. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3424. return BAD_LENGTH_E;
  3425. #else
  3426. return BAD_FUNC_ARG;
  3427. #endif
  3428. }
  3429. ret = wc_Pic32AesCrypt(
  3430. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  3431. out, in, sz, PIC32_ENCRYPTION,
  3432. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  3433. /* store iv for next call */
  3434. if (ret == 0) {
  3435. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3436. }
  3437. return ret;
  3438. }
  3439. #ifdef HAVE_AES_DECRYPT
  3440. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3441. {
  3442. int ret;
  3443. byte scratch[AES_BLOCK_SIZE];
  3444. if (sz == 0)
  3445. return 0;
  3446. /* hardware fails on input that is not a multiple of AES block size */
  3447. if (sz % AES_BLOCK_SIZE != 0) {
  3448. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3449. return BAD_LENGTH_E;
  3450. #else
  3451. return BAD_FUNC_ARG;
  3452. #endif
  3453. }
  3454. XMEMCPY(scratch, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3455. ret = wc_Pic32AesCrypt(
  3456. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  3457. out, in, sz, PIC32_DECRYPTION,
  3458. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  3459. /* store iv for next call */
  3460. if (ret == 0) {
  3461. XMEMCPY((byte*)aes->reg, scratch, AES_BLOCK_SIZE);
  3462. }
  3463. return ret;
  3464. }
  3465. #endif /* HAVE_AES_DECRYPT */
  3466. #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
  3467. !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES)
  3468. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3469. {
  3470. return wc_esp32AesCbcEncrypt(aes, out, in, sz);
  3471. }
  3472. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3473. {
  3474. return wc_esp32AesCbcDecrypt(aes, out, in, sz);
  3475. }
  3476. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  3477. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3478. {
  3479. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  3480. }
  3481. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3482. {
  3483. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  3484. }
  3485. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  3486. !defined(WOLFSSL_QNX_CAAM)
  3487. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  3488. #elif defined(WOLFSSL_AFALG)
  3489. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  3490. #elif defined(WOLFSSL_KCAPI_AES) && !defined(WOLFSSL_NO_KCAPI_AES_CBC)
  3491. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  3492. #elif defined(WOLFSSL_DEVCRYPTO_CBC)
  3493. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  3494. #elif defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  3495. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3496. {
  3497. return se050_aes_crypt(aes, in, out, sz, AES_ENCRYPTION,
  3498. kAlgorithm_SSS_AES_CBC);
  3499. }
  3500. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3501. {
  3502. return se050_aes_crypt(aes, in, out, sz, AES_DECRYPTION,
  3503. kAlgorithm_SSS_AES_CBC);
  3504. }
  3505. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  3506. /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
  3507. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  3508. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  3509. #else
  3510. /* Software AES - CBC Encrypt */
  3511. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3512. {
  3513. word32 blocks;
  3514. if (aes == NULL || out == NULL || in == NULL) {
  3515. return BAD_FUNC_ARG;
  3516. }
  3517. if (sz == 0) {
  3518. return 0;
  3519. }
  3520. blocks = sz / AES_BLOCK_SIZE;
  3521. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3522. if (sz % AES_BLOCK_SIZE) {
  3523. WOLFSSL_ERROR_VERBOSE(BAD_LENGTH_E);
  3524. return BAD_LENGTH_E;
  3525. }
  3526. #endif
  3527. #ifdef WOLFSSL_IMXRT_DCP
  3528. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  3529. if (aes->keylen == 16)
  3530. return DCPAesCbcEncrypt(aes, out, in, sz);
  3531. #endif
  3532. #ifdef WOLF_CRYPTO_CB
  3533. if (aes->devId != INVALID_DEVID) {
  3534. int crypto_cb_ret = wc_CryptoCb_AesCbcEncrypt(aes, out, in, sz);
  3535. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  3536. return crypto_cb_ret;
  3537. /* fall-through when unavailable */
  3538. }
  3539. #endif
  3540. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  3541. /* if async and byte count above threshold */
  3542. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  3543. sz >= WC_ASYNC_THRESH_AES_CBC) {
  3544. #if defined(HAVE_CAVIUM)
  3545. return NitroxAesCbcEncrypt(aes, out, in, sz);
  3546. #elif defined(HAVE_INTEL_QA)
  3547. return IntelQaSymAesCbcEncrypt(&aes->asyncDev, out, in, sz,
  3548. (const byte*)aes->devKey, aes->keylen,
  3549. (byte*)aes->reg, AES_BLOCK_SIZE);
  3550. #else /* WOLFSSL_ASYNC_CRYPT_TEST */
  3551. if (wc_AsyncTestInit(&aes->asyncDev, ASYNC_TEST_AES_CBC_ENCRYPT)) {
  3552. WC_ASYNC_TEST* testDev = &aes->asyncDev.test;
  3553. testDev->aes.aes = aes;
  3554. testDev->aes.out = out;
  3555. testDev->aes.in = in;
  3556. testDev->aes.sz = sz;
  3557. return WC_PENDING_E;
  3558. }
  3559. #endif
  3560. }
  3561. #endif /* WOLFSSL_ASYNC_CRYPT */
  3562. #ifdef WOLFSSL_AESNI
  3563. if (haveAESNI) {
  3564. #ifdef DEBUG_AESNI
  3565. printf("about to aes cbc encrypt\n");
  3566. printf("in = %p\n", in);
  3567. printf("out = %p\n", out);
  3568. printf("aes->key = %p\n", aes->key);
  3569. printf("aes->reg = %p\n", aes->reg);
  3570. printf("aes->rounds = %d\n", aes->rounds);
  3571. printf("sz = %d\n", sz);
  3572. #endif
  3573. /* check alignment, decrypt doesn't need alignment */
  3574. if ((wc_ptr_t)in % AESNI_ALIGN) {
  3575. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  3576. byte* tmp = (byte*)XMALLOC(sz + AES_BLOCK_SIZE + AESNI_ALIGN,
  3577. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3578. byte* tmp_align;
  3579. if (tmp == NULL) return MEMORY_E;
  3580. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  3581. XMEMCPY(tmp_align, in, sz);
  3582. SAVE_VECTOR_REGISTERS(XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); return _svr_ret;);
  3583. AES_CBC_encrypt(tmp_align, tmp_align, (byte*)aes->reg, sz,
  3584. (byte*)aes->key, aes->rounds);
  3585. RESTORE_VECTOR_REGISTERS();
  3586. /* store iv for next call */
  3587. XMEMCPY(aes->reg, tmp_align + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3588. XMEMCPY(out, tmp_align, sz);
  3589. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3590. return 0;
  3591. #else
  3592. WOLFSSL_MSG("AES-CBC encrypt with bad alignment");
  3593. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  3594. return BAD_ALIGN_E;
  3595. #endif
  3596. }
  3597. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3598. AES_CBC_encrypt(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3599. aes->rounds);
  3600. RESTORE_VECTOR_REGISTERS();
  3601. /* store iv for next call */
  3602. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3603. return 0;
  3604. }
  3605. #endif
  3606. while (blocks--) {
  3607. int ret;
  3608. xorbuf((byte*)aes->reg, in, AES_BLOCK_SIZE);
  3609. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->reg);
  3610. if (ret != 0)
  3611. return ret;
  3612. XMEMCPY(out, aes->reg, AES_BLOCK_SIZE);
  3613. out += AES_BLOCK_SIZE;
  3614. in += AES_BLOCK_SIZE;
  3615. }
  3616. return 0;
  3617. }
  3618. #ifdef HAVE_AES_DECRYPT
  3619. /* Software AES - CBC Decrypt */
  3620. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3621. {
  3622. word32 blocks;
  3623. if (aes == NULL || out == NULL || in == NULL) {
  3624. return BAD_FUNC_ARG;
  3625. }
  3626. if (sz == 0) {
  3627. return 0;
  3628. }
  3629. blocks = sz / AES_BLOCK_SIZE;
  3630. if (sz % AES_BLOCK_SIZE) {
  3631. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3632. return BAD_LENGTH_E;
  3633. #else
  3634. return BAD_FUNC_ARG;
  3635. #endif
  3636. }
  3637. #ifdef WOLFSSL_IMXRT_DCP
  3638. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  3639. if (aes->keylen == 16)
  3640. return DCPAesCbcDecrypt(aes, out, in, sz);
  3641. #endif
  3642. #ifdef WOLF_CRYPTO_CB
  3643. if (aes->devId != INVALID_DEVID) {
  3644. int crypto_cb_ret = wc_CryptoCb_AesCbcDecrypt(aes, out, in, sz);
  3645. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  3646. return crypto_cb_ret;
  3647. /* fall-through when unavailable */
  3648. }
  3649. #endif
  3650. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  3651. /* if async and byte count above threshold */
  3652. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  3653. sz >= WC_ASYNC_THRESH_AES_CBC) {
  3654. #if defined(HAVE_CAVIUM)
  3655. return NitroxAesCbcDecrypt(aes, out, in, sz);
  3656. #elif defined(HAVE_INTEL_QA)
  3657. return IntelQaSymAesCbcDecrypt(&aes->asyncDev, out, in, sz,
  3658. (const byte*)aes->devKey, aes->keylen,
  3659. (byte*)aes->reg, AES_BLOCK_SIZE);
  3660. #else /* WOLFSSL_ASYNC_CRYPT_TEST */
  3661. if (wc_AsyncTestInit(&aes->asyncDev, ASYNC_TEST_AES_CBC_DECRYPT)) {
  3662. WC_ASYNC_TEST* testDev = &aes->asyncDev.test;
  3663. testDev->aes.aes = aes;
  3664. testDev->aes.out = out;
  3665. testDev->aes.in = in;
  3666. testDev->aes.sz = sz;
  3667. return WC_PENDING_E;
  3668. }
  3669. #endif
  3670. }
  3671. #endif
  3672. #ifdef WOLFSSL_AESNI
  3673. if (haveAESNI) {
  3674. #ifdef DEBUG_AESNI
  3675. printf("about to aes cbc decrypt\n");
  3676. printf("in = %p\n", in);
  3677. printf("out = %p\n", out);
  3678. printf("aes->key = %p\n", aes->key);
  3679. printf("aes->reg = %p\n", aes->reg);
  3680. printf("aes->rounds = %d\n", aes->rounds);
  3681. printf("sz = %d\n", sz);
  3682. #endif
  3683. /* if input and output same will overwrite input iv */
  3684. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3685. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3686. #if defined(WOLFSSL_AESNI_BY4)
  3687. AES_CBC_decrypt_by4(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3688. aes->rounds);
  3689. #elif defined(WOLFSSL_AESNI_BY6)
  3690. AES_CBC_decrypt_by6(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3691. aes->rounds);
  3692. #else /* WOLFSSL_AESNI_BYx */
  3693. AES_CBC_decrypt_by8(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3694. aes->rounds);
  3695. #endif /* WOLFSSL_AESNI_BYx */
  3696. /* store iv for next call */
  3697. RESTORE_VECTOR_REGISTERS();
  3698. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3699. return 0;
  3700. }
  3701. #endif
  3702. while (blocks--) {
  3703. int ret;
  3704. XMEMCPY(aes->tmp, in, AES_BLOCK_SIZE);
  3705. ret = wc_AesDecrypt(aes, (byte*)aes->tmp, out);
  3706. if (ret != 0)
  3707. return ret;
  3708. xorbuf(out, (byte*)aes->reg, AES_BLOCK_SIZE);
  3709. /* store iv for next call */
  3710. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3711. out += AES_BLOCK_SIZE;
  3712. in += AES_BLOCK_SIZE;
  3713. }
  3714. return 0;
  3715. }
  3716. #endif
  3717. #endif /* AES-CBC block */
  3718. #endif /* HAVE_AES_CBC */
  3719. /* AES-CTR */
  3720. #if defined(WOLFSSL_AES_COUNTER)
  3721. #ifdef STM32_CRYPTO
  3722. #define NEED_AES_CTR_SOFT
  3723. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  3724. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  3725. {
  3726. int ret = 0;
  3727. #ifdef WOLFSSL_STM32_CUBEMX
  3728. CRYP_HandleTypeDef hcryp;
  3729. #ifdef STM32_HAL_V2
  3730. word32 iv[AES_BLOCK_SIZE/sizeof(word32)];
  3731. #endif
  3732. #else
  3733. word32 *iv;
  3734. CRYP_InitTypeDef cryptInit;
  3735. CRYP_KeyInitTypeDef keyInit;
  3736. CRYP_IVInitTypeDef ivInit;
  3737. #endif
  3738. #ifdef WOLFSSL_STM32_CUBEMX
  3739. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3740. if (ret != 0) {
  3741. return ret;
  3742. }
  3743. ret = wolfSSL_CryptHwMutexLock();
  3744. if (ret != 0) {
  3745. return ret;
  3746. }
  3747. #if defined(STM32_HAL_V2)
  3748. hcryp.Init.Algorithm = CRYP_AES_CTR;
  3749. ByteReverseWords(iv, aes->reg, AES_BLOCK_SIZE);
  3750. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)iv;
  3751. #elif defined(STM32_CRYPTO_AES_ONLY)
  3752. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  3753. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CTR;
  3754. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3755. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3756. #else
  3757. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3758. #endif
  3759. HAL_CRYP_Init(&hcryp);
  3760. #if defined(STM32_HAL_V2)
  3761. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, AES_BLOCK_SIZE,
  3762. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3763. #elif defined(STM32_CRYPTO_AES_ONLY)
  3764. ret = HAL_CRYPEx_AES(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  3765. out, STM32_HAL_TIMEOUT);
  3766. #else
  3767. ret = HAL_CRYP_AESCTR_Encrypt(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  3768. out, STM32_HAL_TIMEOUT);
  3769. #endif
  3770. if (ret != HAL_OK) {
  3771. ret = WC_TIMEOUT_E;
  3772. }
  3773. HAL_CRYP_DeInit(&hcryp);
  3774. #else /* Standard Peripheral Library */
  3775. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3776. if (ret != 0) {
  3777. return ret;
  3778. }
  3779. ret = wolfSSL_CryptHwMutexLock();
  3780. if (ret != 0) {
  3781. return ret;
  3782. }
  3783. /* reset registers to their default values */
  3784. CRYP_DeInit();
  3785. /* set key */
  3786. CRYP_KeyInit(&keyInit);
  3787. /* set iv */
  3788. iv = aes->reg;
  3789. CRYP_IVStructInit(&ivInit);
  3790. ivInit.CRYP_IV0Left = ByteReverseWord32(iv[0]);
  3791. ivInit.CRYP_IV0Right = ByteReverseWord32(iv[1]);
  3792. ivInit.CRYP_IV1Left = ByteReverseWord32(iv[2]);
  3793. ivInit.CRYP_IV1Right = ByteReverseWord32(iv[3]);
  3794. CRYP_IVInit(&ivInit);
  3795. /* set direction and mode */
  3796. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  3797. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CTR;
  3798. CRYP_Init(&cryptInit);
  3799. /* enable crypto processor */
  3800. CRYP_Cmd(ENABLE);
  3801. /* flush IN/OUT FIFOs */
  3802. CRYP_FIFOFlush();
  3803. CRYP_DataIn(*(uint32_t*)&in[0]);
  3804. CRYP_DataIn(*(uint32_t*)&in[4]);
  3805. CRYP_DataIn(*(uint32_t*)&in[8]);
  3806. CRYP_DataIn(*(uint32_t*)&in[12]);
  3807. /* wait until the complete message has been processed */
  3808. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3809. *(uint32_t*)&out[0] = CRYP_DataOut();
  3810. *(uint32_t*)&out[4] = CRYP_DataOut();
  3811. *(uint32_t*)&out[8] = CRYP_DataOut();
  3812. *(uint32_t*)&out[12] = CRYP_DataOut();
  3813. /* disable crypto processor */
  3814. CRYP_Cmd(DISABLE);
  3815. #endif /* WOLFSSL_STM32_CUBEMX */
  3816. wolfSSL_CryptHwMutexUnLock();
  3817. return ret;
  3818. }
  3819. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  3820. #define NEED_AES_CTR_SOFT
  3821. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  3822. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  3823. {
  3824. word32 tmpIv[AES_BLOCK_SIZE / sizeof(word32)];
  3825. XMEMCPY(tmpIv, aes->reg, AES_BLOCK_SIZE);
  3826. return wc_Pic32AesCrypt(
  3827. aes->key, aes->keylen, tmpIv, AES_BLOCK_SIZE,
  3828. out, in, AES_BLOCK_SIZE,
  3829. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCTR);
  3830. }
  3831. #elif defined(HAVE_COLDFIRE_SEC)
  3832. #error "Coldfire SEC doesn't currently support AES-CTR mode"
  3833. #elif defined(FREESCALE_LTC)
  3834. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3835. {
  3836. int ret = 0;
  3837. word32 keySize;
  3838. byte *iv, *enc_key;
  3839. byte* tmp;
  3840. if (aes == NULL || out == NULL || in == NULL) {
  3841. return BAD_FUNC_ARG;
  3842. }
  3843. /* consume any unused bytes left in aes->tmp */
  3844. tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left;
  3845. while (aes->left && sz) {
  3846. *(out++) = *(in++) ^ *(tmp++);
  3847. aes->left--;
  3848. sz--;
  3849. }
  3850. if (sz) {
  3851. iv = (byte*)aes->reg;
  3852. enc_key = (byte*)aes->key;
  3853. ret = wc_AesGetKeySize(aes, &keySize);
  3854. if (ret != 0)
  3855. return ret;
  3856. ret = wolfSSL_CryptHwMutexLock();
  3857. if (ret != 0)
  3858. return ret;
  3859. LTC_AES_CryptCtr(LTC_BASE, in, out, sz,
  3860. iv, enc_key, keySize, (byte*)aes->tmp,
  3861. (uint32_t*)&aes->left);
  3862. wolfSSL_CryptHwMutexUnLock();
  3863. }
  3864. return ret;
  3865. }
  3866. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  3867. !defined(WOLFSSL_QNX_CAAM)
  3868. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  3869. #elif defined(WOLFSSL_AFALG)
  3870. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  3871. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  3872. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  3873. #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
  3874. !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES)
  3875. /* esp32 doesn't support CRT mode by hw. */
  3876. /* use aes ecnryption plus sw implementation */
  3877. #define NEED_AES_CTR_SOFT
  3878. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  3879. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  3880. #else
  3881. /* Use software based AES counter */
  3882. #define NEED_AES_CTR_SOFT
  3883. #endif
  3884. #ifdef NEED_AES_CTR_SOFT
  3885. /* Increment AES counter */
  3886. static WC_INLINE void IncrementAesCounter(byte* inOutCtr)
  3887. {
  3888. /* in network byte order so start at end and work back */
  3889. int i;
  3890. for (i = AES_BLOCK_SIZE - 1; i >= 0; i--) {
  3891. if (++inOutCtr[i]) /* we're done unless we overflow */
  3892. return;
  3893. }
  3894. }
  3895. /* Software AES - CTR Encrypt */
  3896. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3897. {
  3898. byte* tmp;
  3899. byte scratch[AES_BLOCK_SIZE];
  3900. int ret;
  3901. if (aes == NULL || out == NULL || in == NULL) {
  3902. return BAD_FUNC_ARG;
  3903. }
  3904. #ifdef WOLF_CRYPTO_CB
  3905. if (aes->devId != INVALID_DEVID) {
  3906. int crypto_cb_ret = wc_CryptoCb_AesCtrEncrypt(aes, out, in, sz);
  3907. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  3908. return crypto_cb_ret;
  3909. /* fall-through when unavailable */
  3910. }
  3911. #endif
  3912. /* consume any unused bytes left in aes->tmp */
  3913. tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left;
  3914. while (aes->left && sz) {
  3915. *(out++) = *(in++) ^ *(tmp++);
  3916. aes->left--;
  3917. sz--;
  3918. }
  3919. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3920. wc_MemZero_Add("wc_AesCtrEncrypt scratch", scratch, AES_BLOCK_SIZE);
  3921. #endif
  3922. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  3923. !defined(XTRANSFORM_AESCTRBLOCK)
  3924. if (in != out && sz >= AES_BLOCK_SIZE) {
  3925. int blocks = sz / AES_BLOCK_SIZE;
  3926. byte* counter = (byte*)aes->reg;
  3927. byte* c = out;
  3928. while (blocks--) {
  3929. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  3930. c += AES_BLOCK_SIZE;
  3931. IncrementAesCounter(counter);
  3932. }
  3933. /* reset number of blocks and then do encryption */
  3934. blocks = sz / AES_BLOCK_SIZE;
  3935. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  3936. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  3937. in += AES_BLOCK_SIZE * blocks;
  3938. out += AES_BLOCK_SIZE * blocks;
  3939. sz -= blocks * AES_BLOCK_SIZE;
  3940. }
  3941. else
  3942. #endif
  3943. {
  3944. /* do as many block size ops as possible */
  3945. while (sz >= AES_BLOCK_SIZE) {
  3946. #ifdef XTRANSFORM_AESCTRBLOCK
  3947. XTRANSFORM_AESCTRBLOCK(aes, out, in);
  3948. #else
  3949. ret = wc_AesEncrypt(aes, (byte*)aes->reg, scratch);
  3950. if (ret != 0) {
  3951. ForceZero(scratch, AES_BLOCK_SIZE);
  3952. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3953. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  3954. #endif
  3955. return ret;
  3956. }
  3957. xorbuf(scratch, in, AES_BLOCK_SIZE);
  3958. XMEMCPY(out, scratch, AES_BLOCK_SIZE);
  3959. #endif
  3960. IncrementAesCounter((byte*)aes->reg);
  3961. out += AES_BLOCK_SIZE;
  3962. in += AES_BLOCK_SIZE;
  3963. sz -= AES_BLOCK_SIZE;
  3964. aes->left = 0;
  3965. }
  3966. ForceZero(scratch, AES_BLOCK_SIZE);
  3967. }
  3968. /* handle non block size remaining and store unused byte count in left */
  3969. if (sz) {
  3970. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->tmp);
  3971. if (ret != 0) {
  3972. ForceZero(scratch, AES_BLOCK_SIZE);
  3973. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3974. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  3975. #endif
  3976. return ret;
  3977. }
  3978. IncrementAesCounter((byte*)aes->reg);
  3979. aes->left = AES_BLOCK_SIZE;
  3980. tmp = (byte*)aes->tmp;
  3981. while (sz--) {
  3982. *(out++) = *(in++) ^ *(tmp++);
  3983. aes->left--;
  3984. }
  3985. }
  3986. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3987. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  3988. #endif
  3989. return 0;
  3990. }
  3991. #endif /* NEED_AES_CTR_SOFT */
  3992. #endif /* WOLFSSL_AES_COUNTER */
  3993. #endif /* !WOLFSSL_ARMASM */
  3994. /*
  3995. * The IV for AES GCM and CCM, stored in struct Aes's member reg, is comprised
  3996. * of two parts in order:
  3997. * 1. The fixed field which may be 0 or 4 bytes long. In TLS, this is set
  3998. * to the implicit IV.
  3999. * 2. The explicit IV is generated by wolfCrypt. It needs to be managed
  4000. * by wolfCrypt to ensure the IV is unique for each call to encrypt.
  4001. * The IV may be a 96-bit random value, or the 32-bit fixed value and a
  4002. * 64-bit set of 0 or random data. The final 32-bits of reg is used as a
  4003. * block counter during the encryption.
  4004. */
  4005. #if (defined(HAVE_AESGCM) && !defined(WC_NO_RNG)) || defined(HAVE_AESCCM)
  4006. static WC_INLINE void IncCtr(byte* ctr, word32 ctrSz)
  4007. {
  4008. int i;
  4009. for (i = ctrSz-1; i >= 0; i--) {
  4010. if (++ctr[i])
  4011. break;
  4012. }
  4013. }
  4014. #endif /* HAVE_AESGCM || HAVE_AESCCM */
  4015. #ifdef HAVE_AESGCM
  4016. #ifdef WOLFSSL_AESGCM_STREAM
  4017. /* Access initialization counter data. */
  4018. #define AES_INITCTR(aes) ((aes)->streamData + 0 * AES_BLOCK_SIZE)
  4019. /* Access counter data. */
  4020. #define AES_COUNTER(aes) ((aes)->streamData + 1 * AES_BLOCK_SIZE)
  4021. /* Access tag data. */
  4022. #define AES_TAG(aes) ((aes)->streamData + 2 * AES_BLOCK_SIZE)
  4023. /* Access last GHASH block. */
  4024. #define AES_LASTGBLOCK(aes) ((aes)->streamData + 3 * AES_BLOCK_SIZE)
  4025. /* Access last encrypted block. */
  4026. #define AES_LASTBLOCK(aes) ((aes)->streamData + 4 * AES_BLOCK_SIZE)
  4027. #endif
  4028. #if defined(HAVE_COLDFIRE_SEC)
  4029. #error "Coldfire SEC doesn't currently support AES-GCM mode"
  4030. #endif
  4031. #ifdef WOLFSSL_ARMASM
  4032. /* implementation is located in wolfcrypt/src/port/arm/armv8-aes.c */
  4033. #elif defined(WOLFSSL_AFALG)
  4034. /* implemented in wolfcrypt/src/port/afalg/afalg_aes.c */
  4035. #elif defined(WOLFSSL_KCAPI_AES)
  4036. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  4037. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  4038. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  4039. #else /* software + AESNI implementation */
  4040. #if !defined(FREESCALE_LTC_AES_GCM)
  4041. static WC_INLINE void IncrementGcmCounter(byte* inOutCtr)
  4042. {
  4043. int i;
  4044. /* in network byte order so start at end and work back */
  4045. for (i = AES_BLOCK_SIZE - 1; i >= AES_BLOCK_SIZE - CTR_SZ; i--) {
  4046. if (++inOutCtr[i]) /* we're done unless we overflow */
  4047. return;
  4048. }
  4049. }
  4050. #endif /* !FREESCALE_LTC_AES_GCM */
  4051. #if defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  4052. static WC_INLINE void FlattenSzInBits(byte* buf, word32 sz)
  4053. {
  4054. /* Multiply the sz by 8 */
  4055. word32 szHi = (sz >> (8*sizeof(sz) - 3));
  4056. sz <<= 3;
  4057. /* copy over the words of the sz into the destination buffer */
  4058. buf[0] = (szHi >> 24) & 0xff;
  4059. buf[1] = (szHi >> 16) & 0xff;
  4060. buf[2] = (szHi >> 8) & 0xff;
  4061. buf[3] = szHi & 0xff;
  4062. buf[4] = (sz >> 24) & 0xff;
  4063. buf[5] = (sz >> 16) & 0xff;
  4064. buf[6] = (sz >> 8) & 0xff;
  4065. buf[7] = sz & 0xff;
  4066. }
  4067. static WC_INLINE void RIGHTSHIFTX(byte* x)
  4068. {
  4069. int i;
  4070. int carryIn = 0;
  4071. byte borrow = (0x00 - (x[15] & 0x01)) & 0xE1;
  4072. for (i = 0; i < AES_BLOCK_SIZE; i++) {
  4073. int carryOut = (x[i] & 0x01) << 7;
  4074. x[i] = (byte) ((x[i] >> 1) | carryIn);
  4075. carryIn = carryOut;
  4076. }
  4077. x[0] ^= borrow;
  4078. }
  4079. #endif /* defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT) */
  4080. #ifdef GCM_TABLE
  4081. static void GenerateM0(Aes* aes)
  4082. {
  4083. int i, j;
  4084. byte (*m)[AES_BLOCK_SIZE] = aes->M0;
  4085. XMEMCPY(m[128], aes->H, AES_BLOCK_SIZE);
  4086. for (i = 64; i > 0; i /= 2) {
  4087. XMEMCPY(m[i], m[i*2], AES_BLOCK_SIZE);
  4088. RIGHTSHIFTX(m[i]);
  4089. }
  4090. for (i = 2; i < 256; i *= 2) {
  4091. for (j = 1; j < i; j++) {
  4092. XMEMCPY(m[i+j], m[i], AES_BLOCK_SIZE);
  4093. xorbuf(m[i+j], m[j], AES_BLOCK_SIZE);
  4094. }
  4095. }
  4096. XMEMSET(m[0], 0, AES_BLOCK_SIZE);
  4097. }
  4098. #elif defined(GCM_TABLE_4BIT)
  4099. static WC_INLINE void Shift4_M0(byte *r8, byte* z8)
  4100. {
  4101. int i;
  4102. for (i = 15; i > 0; i--)
  4103. r8[i] = (z8[i-1] << 4) | (z8[i] >> 4);
  4104. r8[0] = z8[0] >> 4;
  4105. }
  4106. static void GenerateM0(Aes* aes)
  4107. {
  4108. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4109. int i;
  4110. #endif
  4111. byte (*m)[AES_BLOCK_SIZE] = aes->M0;
  4112. /* 0 times -> 0x0 */
  4113. XMEMSET(m[0x0], 0, AES_BLOCK_SIZE);
  4114. /* 1 times -> 0x8 */
  4115. XMEMCPY(m[0x8], aes->H, AES_BLOCK_SIZE);
  4116. /* 2 times -> 0x4 */
  4117. XMEMCPY(m[0x4], m[0x8], AES_BLOCK_SIZE);
  4118. RIGHTSHIFTX(m[0x4]);
  4119. /* 4 times -> 0x2 */
  4120. XMEMCPY(m[0x2], m[0x4], AES_BLOCK_SIZE);
  4121. RIGHTSHIFTX(m[0x2]);
  4122. /* 8 times -> 0x1 */
  4123. XMEMCPY(m[0x1], m[0x2], AES_BLOCK_SIZE);
  4124. RIGHTSHIFTX(m[0x1]);
  4125. /* 0x3 */
  4126. XMEMCPY(m[0x3], m[0x2], AES_BLOCK_SIZE);
  4127. xorbuf (m[0x3], m[0x1], AES_BLOCK_SIZE);
  4128. /* 0x5 -> 0x7 */
  4129. XMEMCPY(m[0x5], m[0x4], AES_BLOCK_SIZE);
  4130. xorbuf (m[0x5], m[0x1], AES_BLOCK_SIZE);
  4131. XMEMCPY(m[0x6], m[0x4], AES_BLOCK_SIZE);
  4132. xorbuf (m[0x6], m[0x2], AES_BLOCK_SIZE);
  4133. XMEMCPY(m[0x7], m[0x4], AES_BLOCK_SIZE);
  4134. xorbuf (m[0x7], m[0x3], AES_BLOCK_SIZE);
  4135. /* 0x9 -> 0xf */
  4136. XMEMCPY(m[0x9], m[0x8], AES_BLOCK_SIZE);
  4137. xorbuf (m[0x9], m[0x1], AES_BLOCK_SIZE);
  4138. XMEMCPY(m[0xa], m[0x8], AES_BLOCK_SIZE);
  4139. xorbuf (m[0xa], m[0x2], AES_BLOCK_SIZE);
  4140. XMEMCPY(m[0xb], m[0x8], AES_BLOCK_SIZE);
  4141. xorbuf (m[0xb], m[0x3], AES_BLOCK_SIZE);
  4142. XMEMCPY(m[0xc], m[0x8], AES_BLOCK_SIZE);
  4143. xorbuf (m[0xc], m[0x4], AES_BLOCK_SIZE);
  4144. XMEMCPY(m[0xd], m[0x8], AES_BLOCK_SIZE);
  4145. xorbuf (m[0xd], m[0x5], AES_BLOCK_SIZE);
  4146. XMEMCPY(m[0xe], m[0x8], AES_BLOCK_SIZE);
  4147. xorbuf (m[0xe], m[0x6], AES_BLOCK_SIZE);
  4148. XMEMCPY(m[0xf], m[0x8], AES_BLOCK_SIZE);
  4149. xorbuf (m[0xf], m[0x7], AES_BLOCK_SIZE);
  4150. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4151. for (i = 0; i < 16; i++) {
  4152. Shift4_M0(m[16+i], m[i]);
  4153. }
  4154. #endif
  4155. }
  4156. #endif /* GCM_TABLE */
  4157. /* Software AES - GCM SetKey */
  4158. int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len)
  4159. {
  4160. int ret;
  4161. byte iv[AES_BLOCK_SIZE];
  4162. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4163. byte local[32];
  4164. word32 localSz = 32;
  4165. if (len == (16 + WC_CAAM_BLOB_SZ) ||
  4166. len == (24 + WC_CAAM_BLOB_SZ) ||
  4167. len == (32 + WC_CAAM_BLOB_SZ)) {
  4168. if (wc_caamOpenBlob((byte*)key, len, local, &localSz) != 0) {
  4169. return BAD_FUNC_ARG;
  4170. }
  4171. /* set local values */
  4172. key = local;
  4173. len = localSz;
  4174. }
  4175. #endif
  4176. if (!((len == 16) || (len == 24) || (len == 32)))
  4177. return BAD_FUNC_ARG;
  4178. if (aes == NULL) {
  4179. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4180. ForceZero(local, sizeof(local));
  4181. #endif
  4182. return BAD_FUNC_ARG;
  4183. }
  4184. #ifdef OPENSSL_EXTRA
  4185. XMEMSET(aes->aadH, 0, sizeof(aes->aadH));
  4186. aes->aadLen = 0;
  4187. #endif
  4188. XMEMSET(iv, 0, AES_BLOCK_SIZE);
  4189. ret = wc_AesSetKey(aes, key, len, iv, AES_ENCRYPTION);
  4190. #ifdef WOLFSSL_AESGCM_STREAM
  4191. aes->gcmKeySet = 1;
  4192. #endif
  4193. #ifdef WOLFSSL_AESNI
  4194. /* AES-NI code generates its own H value. */
  4195. if (haveAESNI)
  4196. return ret;
  4197. #endif /* WOLFSSL_AESNI */
  4198. #if defined(WOLFSSL_SECO_CAAM)
  4199. if (aes->devId == WOLFSSL_SECO_DEVID) {
  4200. return ret;
  4201. }
  4202. #endif /* WOLFSSL_SECO_CAAM */
  4203. #if !defined(FREESCALE_LTC_AES_GCM)
  4204. if (ret == 0)
  4205. ret = wc_AesEncrypt(aes, iv, aes->H);
  4206. if (ret == 0) {
  4207. #if defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  4208. GenerateM0(aes);
  4209. #endif /* GCM_TABLE */
  4210. }
  4211. #endif /* FREESCALE_LTC_AES_GCM */
  4212. #if defined(WOLFSSL_XILINX_CRYPT)
  4213. wc_AesGcmSetKey_ex(aes, key, len, XSECURE_CSU_AES_KEY_SRC_KUP);
  4214. #elif defined(WOLFSSL_AFALG_XILINX_AES)
  4215. wc_AesGcmSetKey_ex(aes, key, len, 0);
  4216. #endif
  4217. #ifdef WOLF_CRYPTO_CB
  4218. if (aes->devId != INVALID_DEVID) {
  4219. XMEMCPY(aes->devKey, key, len);
  4220. }
  4221. #endif
  4222. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4223. ForceZero(local, sizeof(local));
  4224. #endif
  4225. return ret;
  4226. }
  4227. #ifdef WOLFSSL_AESNI
  4228. #if defined(USE_INTEL_SPEEDUP)
  4229. #define HAVE_INTEL_AVX1
  4230. #define HAVE_INTEL_AVX2
  4231. #endif /* USE_INTEL_SPEEDUP */
  4232. #ifndef _MSC_VER
  4233. void AES_GCM_encrypt(const unsigned char *in, unsigned char *out,
  4234. const unsigned char* addt, const unsigned char* ivec,
  4235. unsigned char *tag, word32 nbytes,
  4236. word32 abytes, word32 ibytes,
  4237. word32 tbytes, const unsigned char* key, int nr)
  4238. XASM_LINK("AES_GCM_encrypt");
  4239. #ifdef HAVE_INTEL_AVX1
  4240. void AES_GCM_encrypt_avx1(const unsigned char *in, unsigned char *out,
  4241. const unsigned char* addt, const unsigned char* ivec,
  4242. unsigned char *tag, word32 nbytes,
  4243. word32 abytes, word32 ibytes,
  4244. word32 tbytes, const unsigned char* key,
  4245. int nr)
  4246. XASM_LINK("AES_GCM_encrypt_avx1");
  4247. #ifdef HAVE_INTEL_AVX2
  4248. void AES_GCM_encrypt_avx2(const unsigned char *in, unsigned char *out,
  4249. const unsigned char* addt, const unsigned char* ivec,
  4250. unsigned char *tag, word32 nbytes,
  4251. word32 abytes, word32 ibytes,
  4252. word32 tbytes, const unsigned char* key,
  4253. int nr)
  4254. XASM_LINK("AES_GCM_encrypt_avx2");
  4255. #endif /* HAVE_INTEL_AVX2 */
  4256. #endif /* HAVE_INTEL_AVX1 */
  4257. #ifdef HAVE_AES_DECRYPT
  4258. void AES_GCM_decrypt(const unsigned char *in, unsigned char *out,
  4259. const unsigned char* addt, const unsigned char* ivec,
  4260. const unsigned char *tag, word32 nbytes, word32 abytes,
  4261. word32 ibytes, word32 tbytes, const unsigned char* key,
  4262. int nr, int* res)
  4263. XASM_LINK("AES_GCM_decrypt");
  4264. #ifdef HAVE_INTEL_AVX1
  4265. void AES_GCM_decrypt_avx1(const unsigned char *in, unsigned char *out,
  4266. const unsigned char* addt, const unsigned char* ivec,
  4267. const unsigned char *tag, word32 nbytes,
  4268. word32 abytes, word32 ibytes, word32 tbytes,
  4269. const unsigned char* key, int nr, int* res)
  4270. XASM_LINK("AES_GCM_decrypt_avx1");
  4271. #ifdef HAVE_INTEL_AVX2
  4272. void AES_GCM_decrypt_avx2(const unsigned char *in, unsigned char *out,
  4273. const unsigned char* addt, const unsigned char* ivec,
  4274. const unsigned char *tag, word32 nbytes,
  4275. word32 abytes, word32 ibytes, word32 tbytes,
  4276. const unsigned char* key, int nr, int* res)
  4277. XASM_LINK("AES_GCM_decrypt_avx2");
  4278. #endif /* HAVE_INTEL_AVX2 */
  4279. #endif /* HAVE_INTEL_AVX1 */
  4280. #endif /* HAVE_AES_DECRYPT */
  4281. #else /* _MSC_VER */
  4282. /* AESNI with Microsoft */
  4283. #ifdef __clang__
  4284. /* With Clang the __m128i in emmintrin.h is union using:
  4285. * "unsigned __int64 m128i_u64[2];"
  4286. * Notes: Must add "-maes -msse4.1 -mpclmul" to compiler flags.
  4287. * Must mark "aes_asm.asm" as included/compiled C file.
  4288. */
  4289. #define M128_INIT(x,y) { (long long)x, (long long)y }
  4290. #else
  4291. /* Typically this is array of 16 int8's */
  4292. #define S(w,z) ((char)((unsigned long long)(w) >> (8*(7-(z))) & 0xFF))
  4293. #define M128_INIT(x,y) { S((x),7), S((x),6), S((x),5), S((x),4), \
  4294. S((x),3), S((x),2), S((x),1), S((x),0), \
  4295. S((y),7), S((y),6), S((y),5), S((y),4), \
  4296. S((y),3), S((y),2), S((y),1), S((y),0) }
  4297. #endif
  4298. static const __m128i MOD2_128 =
  4299. M128_INIT(0x1, (long long int)0xc200000000000000UL);
  4300. /* See Intel Carry-Less Multiplication Instruction
  4301. * and its Usage for Computing the GCM Mode White Paper
  4302. * by Shay Gueron, Intel Mobility Group, Israel Development Center;
  4303. * and Michael E. Kounavis, Intel Labs, Circuits and Systems Research */
  4304. /* Figure 9. AES-GCM - Encrypt With Single Block Ghash at a Time */
  4305. static const __m128i ONE = M128_INIT(0x0, 0x1);
  4306. #ifndef AES_GCM_AESNI_NO_UNROLL
  4307. static const __m128i TWO = M128_INIT(0x0, 0x2);
  4308. static const __m128i THREE = M128_INIT(0x0, 0x3);
  4309. static const __m128i FOUR = M128_INIT(0x0, 0x4);
  4310. static const __m128i FIVE = M128_INIT(0x0, 0x5);
  4311. static const __m128i SIX = M128_INIT(0x0, 0x6);
  4312. static const __m128i SEVEN = M128_INIT(0x0, 0x7);
  4313. static const __m128i EIGHT = M128_INIT(0x0, 0x8);
  4314. #endif
  4315. static const __m128i BSWAP_EPI64 =
  4316. M128_INIT(0x0001020304050607, 0x08090a0b0c0d0e0f);
  4317. static const __m128i BSWAP_MASK =
  4318. M128_INIT(0x08090a0b0c0d0e0f, 0x0001020304050607);
  4319. /* The following are for MSC based builds which do not allow
  4320. * inline assembly. Intrinsic functions are used instead. */
  4321. #define aes_gcm_calc_iv_12(KEY, ivec, nr, H, Y, T) \
  4322. do \
  4323. { \
  4324. word32 iv12[4]; \
  4325. iv12[0] = *(word32*)&ivec[0]; \
  4326. iv12[1] = *(word32*)&ivec[4]; \
  4327. iv12[2] = *(word32*)&ivec[8]; \
  4328. iv12[3] = 0x01000000; \
  4329. Y = _mm_loadu_si128((__m128i*)iv12); \
  4330. \
  4331. /* (Compute E[ZERO, KS] and E[Y0, KS] together */ \
  4332. tmp1 = _mm_load_si128(&KEY[0]); \
  4333. tmp2 = _mm_xor_si128(Y, KEY[0]); \
  4334. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); \
  4335. tmp2 = _mm_aesenc_si128(tmp2, KEY[1]); \
  4336. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); \
  4337. tmp2 = _mm_aesenc_si128(tmp2, KEY[2]); \
  4338. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); \
  4339. tmp2 = _mm_aesenc_si128(tmp2, KEY[3]); \
  4340. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); \
  4341. tmp2 = _mm_aesenc_si128(tmp2, KEY[4]); \
  4342. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); \
  4343. tmp2 = _mm_aesenc_si128(tmp2, KEY[5]); \
  4344. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); \
  4345. tmp2 = _mm_aesenc_si128(tmp2, KEY[6]); \
  4346. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); \
  4347. tmp2 = _mm_aesenc_si128(tmp2, KEY[7]); \
  4348. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); \
  4349. tmp2 = _mm_aesenc_si128(tmp2, KEY[8]); \
  4350. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); \
  4351. tmp2 = _mm_aesenc_si128(tmp2, KEY[9]); \
  4352. lastKey = KEY[10]; \
  4353. if (nr > 10) { \
  4354. tmp1 = _mm_aesenc_si128(tmp1, lastKey); \
  4355. tmp2 = _mm_aesenc_si128(tmp2, lastKey); \
  4356. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); \
  4357. tmp2 = _mm_aesenc_si128(tmp2, KEY[11]); \
  4358. lastKey = KEY[12]; \
  4359. if (nr > 12) { \
  4360. tmp1 = _mm_aesenc_si128(tmp1, lastKey); \
  4361. tmp2 = _mm_aesenc_si128(tmp2, lastKey); \
  4362. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); \
  4363. tmp2 = _mm_aesenc_si128(tmp2, KEY[13]); \
  4364. lastKey = KEY[14]; \
  4365. } \
  4366. } \
  4367. H = _mm_aesenclast_si128(tmp1, lastKey); \
  4368. T = _mm_aesenclast_si128(tmp2, lastKey); \
  4369. H = _mm_shuffle_epi8(H, BSWAP_MASK); \
  4370. } \
  4371. while (0)
  4372. #ifdef _M_X64
  4373. /* 64-bit */
  4374. #define AES_GCM_INSERT_EPI(tmp1, a, b) \
  4375. tmp1 = _mm_insert_epi64(tmp1, ((word64)(a))*8, 0); \
  4376. tmp1 = _mm_insert_epi64(tmp1, ((word64)(b))*8, 1);
  4377. #else
  4378. /* 32-bit */
  4379. #define AES_GCM_INSERT_EPI(tmp1, a, b) \
  4380. tmp1 = _mm_insert_epi32(tmp1, ((int)(a))*8, 0); \
  4381. tmp1 = _mm_insert_epi32(tmp1, 0, 1); \
  4382. tmp1 = _mm_insert_epi32(tmp1, ((int)(b))*8, 2); \
  4383. tmp1 = _mm_insert_epi32(tmp1, 0, 3);
  4384. #endif
  4385. #define aes_gcm_calc_iv(KEY, ivec, ibytes, nr, H, Y, T) \
  4386. do \
  4387. { \
  4388. if (ibytes % 16) { \
  4389. i = ibytes / 16; \
  4390. for (j=0; j < (int)(ibytes%16); j++) \
  4391. ((unsigned char*)&last_block)[j] = ivec[i*16+j]; \
  4392. } \
  4393. tmp1 = _mm_load_si128(&KEY[0]); \
  4394. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); \
  4395. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); \
  4396. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); \
  4397. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); \
  4398. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); \
  4399. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); \
  4400. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); \
  4401. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); \
  4402. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); \
  4403. lastKey = KEY[10]; \
  4404. if (nr > 10) { \
  4405. tmp1 = _mm_aesenc_si128(tmp1, lastKey); \
  4406. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); \
  4407. lastKey = KEY[12]; \
  4408. if (nr > 12) { \
  4409. tmp1 = _mm_aesenc_si128(tmp1, lastKey); \
  4410. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); \
  4411. lastKey = KEY[14]; \
  4412. } \
  4413. } \
  4414. H = _mm_aesenclast_si128(tmp1, lastKey); \
  4415. H = _mm_shuffle_epi8(H, BSWAP_MASK); \
  4416. Y = _mm_setzero_si128(); \
  4417. for (i=0; i < (int)(ibytes/16); i++) { \
  4418. tmp1 = _mm_loadu_si128(&((__m128i*)ivec)[i]); \
  4419. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); \
  4420. Y = _mm_xor_si128(Y, tmp1); \
  4421. Y = gfmul_sw(Y, H); \
  4422. } \
  4423. if (ibytes % 16) { \
  4424. tmp1 = last_block; \
  4425. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); \
  4426. Y = _mm_xor_si128(Y, tmp1); \
  4427. Y = gfmul_sw(Y, H); \
  4428. } \
  4429. AES_GCM_INSERT_EPI(tmp1, ibytes, 0); \
  4430. Y = _mm_xor_si128(Y, tmp1); \
  4431. Y = gfmul_sw(Y, H); \
  4432. Y = _mm_shuffle_epi8(Y, BSWAP_MASK); /* Compute E(K, Y0) */ \
  4433. tmp1 = _mm_xor_si128(Y, KEY[0]); \
  4434. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); \
  4435. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); \
  4436. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); \
  4437. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); \
  4438. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); \
  4439. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); \
  4440. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); \
  4441. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); \
  4442. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); \
  4443. lastKey = KEY[10]; \
  4444. if (nr > 10) { \
  4445. tmp1 = _mm_aesenc_si128(tmp1, lastKey); \
  4446. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); \
  4447. lastKey = KEY[12]; \
  4448. if (nr > 12) { \
  4449. tmp1 = _mm_aesenc_si128(tmp1, lastKey); \
  4450. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); \
  4451. lastKey = KEY[14]; \
  4452. } \
  4453. } \
  4454. T = _mm_aesenclast_si128(tmp1, lastKey); \
  4455. } \
  4456. while (0)
  4457. #define AES_ENC_8(j) \
  4458. tmp1 = _mm_aesenc_si128(tmp1, KEY[j]); \
  4459. tmp2 = _mm_aesenc_si128(tmp2, KEY[j]); \
  4460. tmp3 = _mm_aesenc_si128(tmp3, KEY[j]); \
  4461. tmp4 = _mm_aesenc_si128(tmp4, KEY[j]); \
  4462. tmp5 = _mm_aesenc_si128(tmp5, KEY[j]); \
  4463. tmp6 = _mm_aesenc_si128(tmp6, KEY[j]); \
  4464. tmp7 = _mm_aesenc_si128(tmp7, KEY[j]); \
  4465. tmp8 = _mm_aesenc_si128(tmp8, KEY[j]);
  4466. #define AES_ENC_LAST_8() \
  4467. tmp1 =_mm_aesenclast_si128(tmp1, lastKey); \
  4468. tmp2 =_mm_aesenclast_si128(tmp2, lastKey); \
  4469. tmp1 = _mm_xor_si128(tmp1, _mm_loadu_si128(&((__m128i*)in)[i*8+0])); \
  4470. tmp2 = _mm_xor_si128(tmp2, _mm_loadu_si128(&((__m128i*)in)[i*8+1])); \
  4471. _mm_storeu_si128(&((__m128i*)out)[i*8+0], tmp1); \
  4472. _mm_storeu_si128(&((__m128i*)out)[i*8+1], tmp2); \
  4473. tmp3 =_mm_aesenclast_si128(tmp3, lastKey); \
  4474. tmp4 =_mm_aesenclast_si128(tmp4, lastKey); \
  4475. tmp3 = _mm_xor_si128(tmp3, _mm_loadu_si128(&((__m128i*)in)[i*8+2])); \
  4476. tmp4 = _mm_xor_si128(tmp4, _mm_loadu_si128(&((__m128i*)in)[i*8+3])); \
  4477. _mm_storeu_si128(&((__m128i*)out)[i*8+2], tmp3); \
  4478. _mm_storeu_si128(&((__m128i*)out)[i*8+3], tmp4); \
  4479. tmp5 =_mm_aesenclast_si128(tmp5, lastKey); \
  4480. tmp6 =_mm_aesenclast_si128(tmp6, lastKey); \
  4481. tmp5 = _mm_xor_si128(tmp5, _mm_loadu_si128(&((__m128i*)in)[i*8+4])); \
  4482. tmp6 = _mm_xor_si128(tmp6, _mm_loadu_si128(&((__m128i*)in)[i*8+5])); \
  4483. _mm_storeu_si128(&((__m128i*)out)[i*8+4], tmp5); \
  4484. _mm_storeu_si128(&((__m128i*)out)[i*8+5], tmp6); \
  4485. tmp7 =_mm_aesenclast_si128(tmp7, lastKey); \
  4486. tmp8 =_mm_aesenclast_si128(tmp8, lastKey); \
  4487. tmp7 = _mm_xor_si128(tmp7, _mm_loadu_si128(&((__m128i*)in)[i*8+6])); \
  4488. tmp8 = _mm_xor_si128(tmp8, _mm_loadu_si128(&((__m128i*)in)[i*8+7])); \
  4489. _mm_storeu_si128(&((__m128i*)out)[i*8+6], tmp7); \
  4490. _mm_storeu_si128(&((__m128i*)out)[i*8+7], tmp8);
  4491. static WARN_UNUSED_RESULT __m128i gfmul_sw(__m128i a, __m128i b)
  4492. {
  4493. __m128i r, t1, t2, t3, t4, t5, t6, t7;
  4494. t2 = _mm_shuffle_epi32(b, 78);
  4495. t3 = _mm_shuffle_epi32(a, 78);
  4496. t2 = _mm_xor_si128(t2, b);
  4497. t3 = _mm_xor_si128(t3, a);
  4498. t4 = _mm_clmulepi64_si128(b, a, 0x11);
  4499. t1 = _mm_clmulepi64_si128(b, a, 0x00);
  4500. t2 = _mm_clmulepi64_si128(t2, t3, 0x00);
  4501. t2 = _mm_xor_si128(t2, t1);
  4502. t2 = _mm_xor_si128(t2, t4);
  4503. t3 = _mm_slli_si128(t2, 8);
  4504. t2 = _mm_srli_si128(t2, 8);
  4505. t1 = _mm_xor_si128(t1, t3);
  4506. t4 = _mm_xor_si128(t4, t2);
  4507. t5 = _mm_srli_epi32(t1, 31);
  4508. t6 = _mm_srli_epi32(t4, 31);
  4509. t1 = _mm_slli_epi32(t1, 1);
  4510. t4 = _mm_slli_epi32(t4, 1);
  4511. t7 = _mm_srli_si128(t5, 12);
  4512. t5 = _mm_slli_si128(t5, 4);
  4513. t6 = _mm_slli_si128(t6, 4);
  4514. t4 = _mm_or_si128(t4, t7);
  4515. t1 = _mm_or_si128(t1, t5);
  4516. t4 = _mm_or_si128(t4, t6);
  4517. t5 = _mm_slli_epi32(t1, 31);
  4518. t6 = _mm_slli_epi32(t1, 30);
  4519. t7 = _mm_slli_epi32(t1, 25);
  4520. t5 = _mm_xor_si128(t5, t6);
  4521. t5 = _mm_xor_si128(t5, t7);
  4522. t6 = _mm_srli_si128(t5, 4);
  4523. t5 = _mm_slli_si128(t5, 12);
  4524. t1 = _mm_xor_si128(t1, t5);
  4525. t7 = _mm_srli_epi32(t1, 1);
  4526. t3 = _mm_srli_epi32(t1, 2);
  4527. t2 = _mm_srli_epi32(t1, 7);
  4528. t7 = _mm_xor_si128(t7, t3);
  4529. t7 = _mm_xor_si128(t7, t2);
  4530. t7 = _mm_xor_si128(t7, t6);
  4531. t7 = _mm_xor_si128(t7, t1);
  4532. r = _mm_xor_si128(t4, t7);
  4533. return r;
  4534. }
  4535. static void gfmul_only(__m128i a, __m128i b, __m128i* r0, __m128i* r1)
  4536. {
  4537. __m128i t1, t2, t3, t4;
  4538. /* 128 x 128 Carryless Multiply */
  4539. t2 = _mm_shuffle_epi32(b, 78);
  4540. t3 = _mm_shuffle_epi32(a, 78);
  4541. t2 = _mm_xor_si128(t2, b);
  4542. t3 = _mm_xor_si128(t3, a);
  4543. t4 = _mm_clmulepi64_si128(b, a, 0x11);
  4544. t1 = _mm_clmulepi64_si128(b, a, 0x00);
  4545. t2 = _mm_clmulepi64_si128(t2, t3, 0x00);
  4546. t2 = _mm_xor_si128(t2, t1);
  4547. t2 = _mm_xor_si128(t2, t4);
  4548. t3 = _mm_slli_si128(t2, 8);
  4549. t2 = _mm_srli_si128(t2, 8);
  4550. t1 = _mm_xor_si128(t1, t3);
  4551. t4 = _mm_xor_si128(t4, t2);
  4552. *r0 = _mm_xor_si128(t1, *r0);
  4553. *r1 = _mm_xor_si128(t4, *r1);
  4554. }
  4555. static WARN_UNUSED_RESULT __m128i gfmul_shl1(__m128i a)
  4556. {
  4557. __m128i t1 = a, t2;
  4558. t2 = _mm_srli_epi64(t1, 63);
  4559. t1 = _mm_slli_epi64(t1, 1);
  4560. t2 = _mm_slli_si128(t2, 8);
  4561. t1 = _mm_or_si128(t1, t2);
  4562. /* if (a[1] >> 63) t1 = _mm_xor_si128(t1, MOD2_128); */
  4563. a = _mm_shuffle_epi32(a, 0xff);
  4564. a = _mm_srai_epi32(a, 31);
  4565. a = _mm_and_si128(a, MOD2_128);
  4566. t1 = _mm_xor_si128(t1, a);
  4567. return t1;
  4568. }
  4569. static WARN_UNUSED_RESULT __m128i ghash_red(__m128i r0, __m128i r1)
  4570. {
  4571. __m128i t2, t3;
  4572. __m128i t5, t6, t7;
  4573. t5 = _mm_slli_epi32(r0, 31);
  4574. t6 = _mm_slli_epi32(r0, 30);
  4575. t7 = _mm_slli_epi32(r0, 25);
  4576. t5 = _mm_xor_si128(t5, t6);
  4577. t5 = _mm_xor_si128(t5, t7);
  4578. t6 = _mm_srli_si128(t5, 4);
  4579. t5 = _mm_slli_si128(t5, 12);
  4580. r0 = _mm_xor_si128(r0, t5);
  4581. t7 = _mm_srli_epi32(r0, 1);
  4582. t3 = _mm_srli_epi32(r0, 2);
  4583. t2 = _mm_srli_epi32(r0, 7);
  4584. t7 = _mm_xor_si128(t7, t3);
  4585. t7 = _mm_xor_si128(t7, t2);
  4586. t7 = _mm_xor_si128(t7, t6);
  4587. t7 = _mm_xor_si128(t7, r0);
  4588. return _mm_xor_si128(r1, t7);
  4589. }
  4590. static WARN_UNUSED_RESULT __m128i gfmul_shifted(__m128i a, __m128i b)
  4591. {
  4592. __m128i t0 = _mm_setzero_si128(), t1 = _mm_setzero_si128();
  4593. gfmul_only(a, b, &t0, &t1);
  4594. return ghash_red(t0, t1);
  4595. }
  4596. #ifndef AES_GCM_AESNI_NO_UNROLL
  4597. static WARN_UNUSED_RESULT __m128i gfmul8(
  4598. __m128i a1, __m128i a2, __m128i a3, __m128i a4,
  4599. __m128i a5, __m128i a6, __m128i a7, __m128i a8,
  4600. __m128i b1, __m128i b2, __m128i b3, __m128i b4,
  4601. __m128i b5, __m128i b6, __m128i b7, __m128i b8)
  4602. {
  4603. __m128i t0 = _mm_setzero_si128(), t1 = _mm_setzero_si128();
  4604. gfmul_only(a1, b8, &t0, &t1);
  4605. gfmul_only(a2, b7, &t0, &t1);
  4606. gfmul_only(a3, b6, &t0, &t1);
  4607. gfmul_only(a4, b5, &t0, &t1);
  4608. gfmul_only(a5, b4, &t0, &t1);
  4609. gfmul_only(a6, b3, &t0, &t1);
  4610. gfmul_only(a7, b2, &t0, &t1);
  4611. gfmul_only(a8, b1, &t0, &t1);
  4612. return ghash_red(t0, t1);
  4613. }
  4614. #endif
  4615. static WARN_UNUSED_RESULT int AES_GCM_encrypt(
  4616. const unsigned char *in, unsigned char *out,
  4617. const unsigned char* addt,
  4618. const unsigned char* ivec, unsigned char *tag,
  4619. word32 nbytes, word32 abytes, word32 ibytes,
  4620. word32 tbytes, const unsigned char* key, int nr)
  4621. {
  4622. int i, j ,k;
  4623. __m128i ctr1;
  4624. __m128i H, Y, T;
  4625. __m128i X = _mm_setzero_si128();
  4626. __m128i *KEY = (__m128i*)key, lastKey;
  4627. __m128i last_block = _mm_setzero_si128();
  4628. __m128i tmp1, tmp2;
  4629. #ifndef AES_GCM_AESNI_NO_UNROLL
  4630. __m128i HT[8];
  4631. __m128i r0, r1;
  4632. __m128i XV;
  4633. __m128i tmp3, tmp4, tmp5, tmp6, tmp7, tmp8;
  4634. #endif
  4635. if (ibytes == GCM_NONCE_MID_SZ)
  4636. aes_gcm_calc_iv_12(KEY, ivec, nr, H, Y, T);
  4637. else
  4638. aes_gcm_calc_iv(KEY, ivec, ibytes, nr, H, Y, T);
  4639. for (i=0; i < (int)(abytes/16); i++) {
  4640. tmp1 = _mm_loadu_si128(&((__m128i*)addt)[i]);
  4641. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK);
  4642. X = _mm_xor_si128(X, tmp1);
  4643. X = gfmul_sw(X, H);
  4644. }
  4645. if (abytes%16) {
  4646. last_block = _mm_setzero_si128();
  4647. for (j=0; j < (int)(abytes%16); j++)
  4648. ((unsigned char*)&last_block)[j] = addt[i*16+j];
  4649. tmp1 = last_block;
  4650. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK);
  4651. X = _mm_xor_si128(X, tmp1);
  4652. X = gfmul_sw(X, H);
  4653. }
  4654. tmp1 = _mm_shuffle_epi8(Y, BSWAP_EPI64);
  4655. ctr1 = _mm_add_epi32(tmp1, ONE);
  4656. H = gfmul_shl1(H);
  4657. #ifndef AES_GCM_AESNI_NO_UNROLL
  4658. i = 0;
  4659. if (nbytes >= 16*8) {
  4660. HT[0] = H;
  4661. HT[1] = gfmul_shifted(H, H);
  4662. HT[2] = gfmul_shifted(H, HT[1]);
  4663. HT[3] = gfmul_shifted(HT[1], HT[1]);
  4664. HT[4] = gfmul_shifted(HT[1], HT[2]);
  4665. HT[5] = gfmul_shifted(HT[2], HT[2]);
  4666. HT[6] = gfmul_shifted(HT[2], HT[3]);
  4667. HT[7] = gfmul_shifted(HT[3], HT[3]);
  4668. tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64);
  4669. tmp2 = _mm_add_epi32(ctr1, ONE);
  4670. tmp2 = _mm_shuffle_epi8(tmp2, BSWAP_EPI64);
  4671. tmp3 = _mm_add_epi32(ctr1, TWO);
  4672. tmp3 = _mm_shuffle_epi8(tmp3, BSWAP_EPI64);
  4673. tmp4 = _mm_add_epi32(ctr1, THREE);
  4674. tmp4 = _mm_shuffle_epi8(tmp4, BSWAP_EPI64);
  4675. tmp5 = _mm_add_epi32(ctr1, FOUR);
  4676. tmp5 = _mm_shuffle_epi8(tmp5, BSWAP_EPI64);
  4677. tmp6 = _mm_add_epi32(ctr1, FIVE);
  4678. tmp6 = _mm_shuffle_epi8(tmp6, BSWAP_EPI64);
  4679. tmp7 = _mm_add_epi32(ctr1, SIX);
  4680. tmp7 = _mm_shuffle_epi8(tmp7, BSWAP_EPI64);
  4681. tmp8 = _mm_add_epi32(ctr1, SEVEN);
  4682. tmp8 = _mm_shuffle_epi8(tmp8, BSWAP_EPI64);
  4683. ctr1 = _mm_add_epi32(ctr1, EIGHT);
  4684. tmp1 =_mm_xor_si128(tmp1, KEY[0]);
  4685. tmp2 =_mm_xor_si128(tmp2, KEY[0]);
  4686. tmp3 =_mm_xor_si128(tmp3, KEY[0]);
  4687. tmp4 =_mm_xor_si128(tmp4, KEY[0]);
  4688. tmp5 =_mm_xor_si128(tmp5, KEY[0]);
  4689. tmp6 =_mm_xor_si128(tmp6, KEY[0]);
  4690. tmp7 =_mm_xor_si128(tmp7, KEY[0]);
  4691. tmp8 =_mm_xor_si128(tmp8, KEY[0]);
  4692. AES_ENC_8(1);
  4693. AES_ENC_8(2);
  4694. AES_ENC_8(3);
  4695. AES_ENC_8(4);
  4696. AES_ENC_8(5);
  4697. AES_ENC_8(6);
  4698. AES_ENC_8(7);
  4699. AES_ENC_8(8);
  4700. AES_ENC_8(9);
  4701. lastKey = KEY[10];
  4702. if (nr > 10) {
  4703. AES_ENC_8(10);
  4704. AES_ENC_8(11);
  4705. lastKey = KEY[12];
  4706. if (nr > 12) {
  4707. AES_ENC_8(12);
  4708. AES_ENC_8(13);
  4709. lastKey = KEY[14];
  4710. }
  4711. }
  4712. AES_ENC_LAST_8();
  4713. for (i=1; i < (int)(nbytes/16/8); i++) {
  4714. r0 = _mm_setzero_si128();
  4715. r1 = _mm_setzero_si128();
  4716. tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64);
  4717. tmp2 = _mm_add_epi32(ctr1, ONE);
  4718. tmp2 = _mm_shuffle_epi8(tmp2, BSWAP_EPI64);
  4719. tmp3 = _mm_add_epi32(ctr1, TWO);
  4720. tmp3 = _mm_shuffle_epi8(tmp3, BSWAP_EPI64);
  4721. tmp4 = _mm_add_epi32(ctr1, THREE);
  4722. tmp4 = _mm_shuffle_epi8(tmp4, BSWAP_EPI64);
  4723. tmp5 = _mm_add_epi32(ctr1, FOUR);
  4724. tmp5 = _mm_shuffle_epi8(tmp5, BSWAP_EPI64);
  4725. tmp6 = _mm_add_epi32(ctr1, FIVE);
  4726. tmp6 = _mm_shuffle_epi8(tmp6, BSWAP_EPI64);
  4727. tmp7 = _mm_add_epi32(ctr1, SIX);
  4728. tmp7 = _mm_shuffle_epi8(tmp7, BSWAP_EPI64);
  4729. tmp8 = _mm_add_epi32(ctr1, SEVEN);
  4730. tmp8 = _mm_shuffle_epi8(tmp8, BSWAP_EPI64);
  4731. ctr1 = _mm_add_epi32(ctr1, EIGHT);
  4732. tmp1 =_mm_xor_si128(tmp1, KEY[0]);
  4733. tmp2 =_mm_xor_si128(tmp2, KEY[0]);
  4734. tmp3 =_mm_xor_si128(tmp3, KEY[0]);
  4735. tmp4 =_mm_xor_si128(tmp4, KEY[0]);
  4736. tmp5 =_mm_xor_si128(tmp5, KEY[0]);
  4737. tmp6 =_mm_xor_si128(tmp6, KEY[0]);
  4738. tmp7 =_mm_xor_si128(tmp7, KEY[0]);
  4739. tmp8 =_mm_xor_si128(tmp8, KEY[0]);
  4740. /* 128 x 128 Carryless Multiply */
  4741. XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+0]);
  4742. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  4743. XV = _mm_xor_si128(XV, X);
  4744. gfmul_only(XV, HT[7], &r0, &r1);
  4745. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]);
  4746. tmp2 = _mm_aesenc_si128(tmp2, KEY[1]);
  4747. tmp3 = _mm_aesenc_si128(tmp3, KEY[1]);
  4748. tmp4 = _mm_aesenc_si128(tmp4, KEY[1]);
  4749. tmp5 = _mm_aesenc_si128(tmp5, KEY[1]);
  4750. tmp6 = _mm_aesenc_si128(tmp6, KEY[1]);
  4751. tmp7 = _mm_aesenc_si128(tmp7, KEY[1]);
  4752. tmp8 = _mm_aesenc_si128(tmp8, KEY[1]);
  4753. /* 128 x 128 Carryless Multiply */
  4754. XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+1]);
  4755. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  4756. gfmul_only(XV, HT[6], &r0, &r1);
  4757. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]);
  4758. tmp2 = _mm_aesenc_si128(tmp2, KEY[2]);
  4759. tmp3 = _mm_aesenc_si128(tmp3, KEY[2]);
  4760. tmp4 = _mm_aesenc_si128(tmp4, KEY[2]);
  4761. tmp5 = _mm_aesenc_si128(tmp5, KEY[2]);
  4762. tmp6 = _mm_aesenc_si128(tmp6, KEY[2]);
  4763. tmp7 = _mm_aesenc_si128(tmp7, KEY[2]);
  4764. tmp8 = _mm_aesenc_si128(tmp8, KEY[2]);
  4765. /* 128 x 128 Carryless Multiply */
  4766. XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+2]);
  4767. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  4768. gfmul_only(XV, HT[5], &r0, &r1);
  4769. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]);
  4770. tmp2 = _mm_aesenc_si128(tmp2, KEY[3]);
  4771. tmp3 = _mm_aesenc_si128(tmp3, KEY[3]);
  4772. tmp4 = _mm_aesenc_si128(tmp4, KEY[3]);
  4773. tmp5 = _mm_aesenc_si128(tmp5, KEY[3]);
  4774. tmp6 = _mm_aesenc_si128(tmp6, KEY[3]);
  4775. tmp7 = _mm_aesenc_si128(tmp7, KEY[3]);
  4776. tmp8 = _mm_aesenc_si128(tmp8, KEY[3]);
  4777. /* 128 x 128 Carryless Multiply */
  4778. XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+3]);
  4779. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  4780. gfmul_only(XV, HT[4], &r0, &r1);
  4781. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]);
  4782. tmp2 = _mm_aesenc_si128(tmp2, KEY[4]);
  4783. tmp3 = _mm_aesenc_si128(tmp3, KEY[4]);
  4784. tmp4 = _mm_aesenc_si128(tmp4, KEY[4]);
  4785. tmp5 = _mm_aesenc_si128(tmp5, KEY[4]);
  4786. tmp6 = _mm_aesenc_si128(tmp6, KEY[4]);
  4787. tmp7 = _mm_aesenc_si128(tmp7, KEY[4]);
  4788. tmp8 = _mm_aesenc_si128(tmp8, KEY[4]);
  4789. /* 128 x 128 Carryless Multiply */
  4790. XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+4]);
  4791. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  4792. gfmul_only(XV, HT[3], &r0, &r1);
  4793. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]);
  4794. tmp2 = _mm_aesenc_si128(tmp2, KEY[5]);
  4795. tmp3 = _mm_aesenc_si128(tmp3, KEY[5]);
  4796. tmp4 = _mm_aesenc_si128(tmp4, KEY[5]);
  4797. tmp5 = _mm_aesenc_si128(tmp5, KEY[5]);
  4798. tmp6 = _mm_aesenc_si128(tmp6, KEY[5]);
  4799. tmp7 = _mm_aesenc_si128(tmp7, KEY[5]);
  4800. tmp8 = _mm_aesenc_si128(tmp8, KEY[5]);
  4801. /* 128 x 128 Carryless Multiply */
  4802. XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+5]);
  4803. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  4804. gfmul_only(XV, HT[2], &r0, &r1);
  4805. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]);
  4806. tmp2 = _mm_aesenc_si128(tmp2, KEY[6]);
  4807. tmp3 = _mm_aesenc_si128(tmp3, KEY[6]);
  4808. tmp4 = _mm_aesenc_si128(tmp4, KEY[6]);
  4809. tmp5 = _mm_aesenc_si128(tmp5, KEY[6]);
  4810. tmp6 = _mm_aesenc_si128(tmp6, KEY[6]);
  4811. tmp7 = _mm_aesenc_si128(tmp7, KEY[6]);
  4812. tmp8 = _mm_aesenc_si128(tmp8, KEY[6]);
  4813. /* 128 x 128 Carryless Multiply */
  4814. XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+6]);
  4815. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  4816. gfmul_only(XV, HT[1], &r0, &r1);
  4817. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]);
  4818. tmp2 = _mm_aesenc_si128(tmp2, KEY[7]);
  4819. tmp3 = _mm_aesenc_si128(tmp3, KEY[7]);
  4820. tmp4 = _mm_aesenc_si128(tmp4, KEY[7]);
  4821. tmp5 = _mm_aesenc_si128(tmp5, KEY[7]);
  4822. tmp6 = _mm_aesenc_si128(tmp6, KEY[7]);
  4823. tmp7 = _mm_aesenc_si128(tmp7, KEY[7]);
  4824. tmp8 = _mm_aesenc_si128(tmp8, KEY[7]);
  4825. /* 128 x 128 Carryless Multiply */
  4826. XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+7]);
  4827. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  4828. gfmul_only(XV, HT[0], &r0, &r1);
  4829. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]);
  4830. tmp2 = _mm_aesenc_si128(tmp2, KEY[8]);
  4831. tmp3 = _mm_aesenc_si128(tmp3, KEY[8]);
  4832. tmp4 = _mm_aesenc_si128(tmp4, KEY[8]);
  4833. tmp5 = _mm_aesenc_si128(tmp5, KEY[8]);
  4834. tmp6 = _mm_aesenc_si128(tmp6, KEY[8]);
  4835. tmp7 = _mm_aesenc_si128(tmp7, KEY[8]);
  4836. tmp8 = _mm_aesenc_si128(tmp8, KEY[8]);
  4837. /* Reduction */
  4838. X = ghash_red(r0, r1);
  4839. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]);
  4840. tmp2 = _mm_aesenc_si128(tmp2, KEY[9]);
  4841. tmp3 = _mm_aesenc_si128(tmp3, KEY[9]);
  4842. tmp4 = _mm_aesenc_si128(tmp4, KEY[9]);
  4843. tmp5 = _mm_aesenc_si128(tmp5, KEY[9]);
  4844. tmp6 = _mm_aesenc_si128(tmp6, KEY[9]);
  4845. tmp7 = _mm_aesenc_si128(tmp7, KEY[9]);
  4846. tmp8 = _mm_aesenc_si128(tmp8, KEY[9]);
  4847. lastKey = KEY[10];
  4848. if (nr > 10) {
  4849. tmp1 = _mm_aesenc_si128(tmp1, KEY[10]);
  4850. tmp2 = _mm_aesenc_si128(tmp2, KEY[10]);
  4851. tmp3 = _mm_aesenc_si128(tmp3, KEY[10]);
  4852. tmp4 = _mm_aesenc_si128(tmp4, KEY[10]);
  4853. tmp5 = _mm_aesenc_si128(tmp5, KEY[10]);
  4854. tmp6 = _mm_aesenc_si128(tmp6, KEY[10]);
  4855. tmp7 = _mm_aesenc_si128(tmp7, KEY[10]);
  4856. tmp8 = _mm_aesenc_si128(tmp8, KEY[10]);
  4857. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]);
  4858. tmp2 = _mm_aesenc_si128(tmp2, KEY[11]);
  4859. tmp3 = _mm_aesenc_si128(tmp3, KEY[11]);
  4860. tmp4 = _mm_aesenc_si128(tmp4, KEY[11]);
  4861. tmp5 = _mm_aesenc_si128(tmp5, KEY[11]);
  4862. tmp6 = _mm_aesenc_si128(tmp6, KEY[11]);
  4863. tmp7 = _mm_aesenc_si128(tmp7, KEY[11]);
  4864. tmp8 = _mm_aesenc_si128(tmp8, KEY[11]);
  4865. lastKey = KEY[12];
  4866. if (nr > 12) {
  4867. tmp1 = _mm_aesenc_si128(tmp1, KEY[12]);
  4868. tmp2 = _mm_aesenc_si128(tmp2, KEY[12]);
  4869. tmp3 = _mm_aesenc_si128(tmp3, KEY[12]);
  4870. tmp4 = _mm_aesenc_si128(tmp4, KEY[12]);
  4871. tmp5 = _mm_aesenc_si128(tmp5, KEY[12]);
  4872. tmp6 = _mm_aesenc_si128(tmp6, KEY[12]);
  4873. tmp7 = _mm_aesenc_si128(tmp7, KEY[12]);
  4874. tmp8 = _mm_aesenc_si128(tmp8, KEY[12]);
  4875. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]);
  4876. tmp2 = _mm_aesenc_si128(tmp2, KEY[13]);
  4877. tmp3 = _mm_aesenc_si128(tmp3, KEY[13]);
  4878. tmp4 = _mm_aesenc_si128(tmp4, KEY[13]);
  4879. tmp5 = _mm_aesenc_si128(tmp5, KEY[13]);
  4880. tmp6 = _mm_aesenc_si128(tmp6, KEY[13]);
  4881. tmp7 = _mm_aesenc_si128(tmp7, KEY[13]);
  4882. tmp8 = _mm_aesenc_si128(tmp8, KEY[13]);
  4883. lastKey = KEY[14];
  4884. }
  4885. }
  4886. AES_ENC_LAST_8();
  4887. }
  4888. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK);
  4889. tmp2 = _mm_shuffle_epi8(tmp2, BSWAP_MASK);
  4890. tmp3 = _mm_shuffle_epi8(tmp3, BSWAP_MASK);
  4891. tmp4 = _mm_shuffle_epi8(tmp4, BSWAP_MASK);
  4892. tmp5 = _mm_shuffle_epi8(tmp5, BSWAP_MASK);
  4893. tmp6 = _mm_shuffle_epi8(tmp6, BSWAP_MASK);
  4894. tmp7 = _mm_shuffle_epi8(tmp7, BSWAP_MASK);
  4895. tmp8 = _mm_shuffle_epi8(tmp8, BSWAP_MASK);
  4896. tmp1 = _mm_xor_si128(X, tmp1);
  4897. X = gfmul8(tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, tmp8,
  4898. HT[0], HT[1], HT[2], HT[3], HT[4], HT[5], HT[6], HT[7]);
  4899. }
  4900. for (k = i*8; k < (int)(nbytes/16); k++) {
  4901. tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64);
  4902. ctr1 = _mm_add_epi32(ctr1, ONE);
  4903. tmp1 = _mm_xor_si128(tmp1, KEY[0]);
  4904. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]);
  4905. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]);
  4906. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]);
  4907. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]);
  4908. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]);
  4909. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]);
  4910. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]);
  4911. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]);
  4912. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]);
  4913. lastKey = KEY[10];
  4914. if (nr > 10) {
  4915. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  4916. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]);
  4917. lastKey = KEY[12];
  4918. if (nr > 12) {
  4919. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  4920. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]);
  4921. lastKey = KEY[14];
  4922. }
  4923. }
  4924. tmp1 = _mm_aesenclast_si128(tmp1, lastKey);
  4925. tmp1 = _mm_xor_si128(tmp1, _mm_loadu_si128(&((__m128i*)in)[k]));
  4926. _mm_storeu_si128(&((__m128i*)out)[k], tmp1);
  4927. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK);
  4928. X =_mm_xor_si128(X, tmp1);
  4929. X = gfmul_shifted(X, H);
  4930. }
  4931. #else /* AES_GCM_AESNI_NO_UNROLL */
  4932. for (k = 0; k < (int)(nbytes/16) && k < 1; k++) {
  4933. tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64);
  4934. ctr1 = _mm_add_epi32(ctr1, ONE);
  4935. tmp1 = _mm_xor_si128(tmp1, KEY[0]);
  4936. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]);
  4937. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]);
  4938. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]);
  4939. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]);
  4940. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]);
  4941. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]);
  4942. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]);
  4943. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]);
  4944. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]);
  4945. lastKey = KEY[10];
  4946. if (nr > 10) {
  4947. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  4948. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]);
  4949. lastKey = KEY[12];
  4950. if (nr > 12) {
  4951. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  4952. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]);
  4953. lastKey = KEY[14];
  4954. }
  4955. }
  4956. tmp1 = _mm_aesenclast_si128(tmp1, lastKey);
  4957. tmp1 = _mm_xor_si128(tmp1, _mm_loadu_si128(&((__m128i*)in)[k]));
  4958. _mm_storeu_si128(&((__m128i*)out)[k], tmp1);
  4959. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK);
  4960. X =_mm_xor_si128(X, tmp1);
  4961. }
  4962. for (; k < (int)(nbytes/16); k++) {
  4963. tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64);
  4964. ctr1 = _mm_add_epi32(ctr1, ONE);
  4965. tmp1 = _mm_xor_si128(tmp1, KEY[0]);
  4966. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]);
  4967. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]);
  4968. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]);
  4969. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]);
  4970. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]);
  4971. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]);
  4972. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]);
  4973. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]);
  4974. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]);
  4975. X = gfmul_shifted(X, H);
  4976. lastKey = KEY[10];
  4977. if (nr > 10) {
  4978. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  4979. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]);
  4980. lastKey = KEY[12];
  4981. if (nr > 12) {
  4982. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  4983. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]);
  4984. lastKey = KEY[14];
  4985. }
  4986. }
  4987. tmp1 = _mm_aesenclast_si128(tmp1, lastKey);
  4988. tmp1 = _mm_xor_si128(tmp1, _mm_loadu_si128(&((__m128i*)in)[k]));
  4989. _mm_storeu_si128(&((__m128i*)out)[k], tmp1);
  4990. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK);
  4991. X =_mm_xor_si128(X, tmp1);
  4992. }
  4993. if (k > 0) {
  4994. X = gfmul_shifted(X, H);
  4995. }
  4996. #endif /* AES_GCM_AESNI_NO_UNROLL */
  4997. /* If one partial block remains */
  4998. if (nbytes % 16) {
  4999. tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64);
  5000. tmp1 = _mm_xor_si128(tmp1, KEY[0]);
  5001. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]);
  5002. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]);
  5003. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]);
  5004. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]);
  5005. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]);
  5006. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]);
  5007. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]);
  5008. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]);
  5009. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]);
  5010. lastKey = KEY[10];
  5011. if (nr > 10) {
  5012. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  5013. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]);
  5014. lastKey = KEY[12];
  5015. if (nr > 12) {
  5016. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  5017. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]);
  5018. lastKey = KEY[14];
  5019. }
  5020. }
  5021. tmp1 = _mm_aesenclast_si128(tmp1, lastKey);
  5022. last_block = tmp1;
  5023. for (j=0; j < (int)(nbytes%16); j++)
  5024. ((unsigned char*)&last_block)[j] = in[k*16+j];
  5025. tmp1 = _mm_xor_si128(tmp1, last_block);
  5026. last_block = tmp1;
  5027. for (j=0; j < (int)(nbytes%16); j++)
  5028. out[k*16+j] = ((unsigned char*)&last_block)[j];
  5029. tmp1 = last_block;
  5030. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK);
  5031. X =_mm_xor_si128(X, tmp1);
  5032. X = gfmul_shifted(X, H);
  5033. }
  5034. AES_GCM_INSERT_EPI(tmp1, nbytes, abytes);
  5035. X = _mm_xor_si128(X, tmp1);
  5036. X = gfmul_shifted(X, H);
  5037. X = _mm_shuffle_epi8(X, BSWAP_MASK);
  5038. T = _mm_xor_si128(X, T);
  5039. /*_mm_storeu_si128((__m128i*)tag, T);*/
  5040. XMEMCPY(tag, &T, tbytes);
  5041. ForceZero(&lastKey, sizeof(lastKey));
  5042. return 0;
  5043. }
  5044. #ifdef HAVE_AES_DECRYPT
  5045. static WARN_UNUSED_RESULT int AES_GCM_decrypt(
  5046. const unsigned char *in, unsigned char *out,
  5047. const unsigned char* addt,
  5048. const unsigned char* ivec, const unsigned char *tag,
  5049. word32 nbytes, word32 abytes, word32 ibytes,
  5050. word32 tbytes, const unsigned char* key, int nr,
  5051. int* res)
  5052. {
  5053. int i, j ,k;
  5054. __m128i H, Y, T;
  5055. __m128i *KEY = (__m128i*)key, lastKey;
  5056. __m128i ctr1;
  5057. __m128i last_block = _mm_setzero_si128();
  5058. __m128i X = _mm_setzero_si128();
  5059. __m128i tmp1, tmp2, XV;
  5060. #ifndef AES_GCM_AESNI_NO_UNROLL
  5061. __m128i HT[8];
  5062. __m128i r0, r1;
  5063. __m128i tmp3, tmp4, tmp5, tmp6, tmp7, tmp8;
  5064. #endif /* AES_GCM_AESNI_NO_UNROLL */
  5065. if (ibytes == GCM_NONCE_MID_SZ)
  5066. aes_gcm_calc_iv_12(KEY, ivec, nr, H, Y, T);
  5067. else
  5068. aes_gcm_calc_iv(KEY, ivec, ibytes, nr, H, Y, T);
  5069. for (i=0; i<(int)(abytes/16); i++) {
  5070. tmp1 = _mm_loadu_si128(&((__m128i*)addt)[i]);
  5071. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK);
  5072. X = _mm_xor_si128(X, tmp1);
  5073. X = gfmul_sw(X, H);
  5074. }
  5075. if (abytes%16) {
  5076. last_block = _mm_setzero_si128();
  5077. for (j=0; j<(int)(abytes%16); j++)
  5078. ((unsigned char*)&last_block)[j] = addt[i*16+j];
  5079. tmp1 = last_block;
  5080. tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK);
  5081. X = _mm_xor_si128(X, tmp1);
  5082. X = gfmul_sw(X, H);
  5083. }
  5084. tmp1 = _mm_shuffle_epi8(Y, BSWAP_EPI64);
  5085. ctr1 = _mm_add_epi32(tmp1, ONE);
  5086. H = gfmul_shl1(H);
  5087. i = 0;
  5088. #ifndef AES_GCM_AESNI_NO_UNROLL
  5089. if (0 < nbytes/16/8) {
  5090. HT[0] = H;
  5091. HT[1] = gfmul_shifted(H, H);
  5092. HT[2] = gfmul_shifted(H, HT[1]);
  5093. HT[3] = gfmul_shifted(HT[1], HT[1]);
  5094. HT[4] = gfmul_shifted(HT[1], HT[2]);
  5095. HT[5] = gfmul_shifted(HT[2], HT[2]);
  5096. HT[6] = gfmul_shifted(HT[2], HT[3]);
  5097. HT[7] = gfmul_shifted(HT[3], HT[3]);
  5098. for (; i < (int)(nbytes/16/8); i++) {
  5099. r0 = _mm_setzero_si128();
  5100. r1 = _mm_setzero_si128();
  5101. tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64);
  5102. tmp2 = _mm_add_epi32(ctr1, ONE);
  5103. tmp2 = _mm_shuffle_epi8(tmp2, BSWAP_EPI64);
  5104. tmp3 = _mm_add_epi32(ctr1, TWO);
  5105. tmp3 = _mm_shuffle_epi8(tmp3, BSWAP_EPI64);
  5106. tmp4 = _mm_add_epi32(ctr1, THREE);
  5107. tmp4 = _mm_shuffle_epi8(tmp4, BSWAP_EPI64);
  5108. tmp5 = _mm_add_epi32(ctr1, FOUR);
  5109. tmp5 = _mm_shuffle_epi8(tmp5, BSWAP_EPI64);
  5110. tmp6 = _mm_add_epi32(ctr1, FIVE);
  5111. tmp6 = _mm_shuffle_epi8(tmp6, BSWAP_EPI64);
  5112. tmp7 = _mm_add_epi32(ctr1, SIX);
  5113. tmp7 = _mm_shuffle_epi8(tmp7, BSWAP_EPI64);
  5114. tmp8 = _mm_add_epi32(ctr1, SEVEN);
  5115. tmp8 = _mm_shuffle_epi8(tmp8, BSWAP_EPI64);
  5116. ctr1 = _mm_add_epi32(ctr1, EIGHT);
  5117. tmp1 =_mm_xor_si128(tmp1, KEY[0]);
  5118. tmp2 =_mm_xor_si128(tmp2, KEY[0]);
  5119. tmp3 =_mm_xor_si128(tmp3, KEY[0]);
  5120. tmp4 =_mm_xor_si128(tmp4, KEY[0]);
  5121. tmp5 =_mm_xor_si128(tmp5, KEY[0]);
  5122. tmp6 =_mm_xor_si128(tmp6, KEY[0]);
  5123. tmp7 =_mm_xor_si128(tmp7, KEY[0]);
  5124. tmp8 =_mm_xor_si128(tmp8, KEY[0]);
  5125. /* 128 x 128 Carryless Multiply */
  5126. XV = _mm_loadu_si128(&((__m128i*)in)[i*8+0]);
  5127. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5128. XV = _mm_xor_si128(XV, X);
  5129. gfmul_only(XV, HT[7], &r0, &r1);
  5130. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]);
  5131. tmp2 = _mm_aesenc_si128(tmp2, KEY[1]);
  5132. tmp3 = _mm_aesenc_si128(tmp3, KEY[1]);
  5133. tmp4 = _mm_aesenc_si128(tmp4, KEY[1]);
  5134. tmp5 = _mm_aesenc_si128(tmp5, KEY[1]);
  5135. tmp6 = _mm_aesenc_si128(tmp6, KEY[1]);
  5136. tmp7 = _mm_aesenc_si128(tmp7, KEY[1]);
  5137. tmp8 = _mm_aesenc_si128(tmp8, KEY[1]);
  5138. /* 128 x 128 Carryless Multiply */
  5139. XV = _mm_loadu_si128(&((__m128i*)in)[i*8+1]);
  5140. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5141. gfmul_only(XV, HT[6], &r0, &r1);
  5142. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]);
  5143. tmp2 = _mm_aesenc_si128(tmp2, KEY[2]);
  5144. tmp3 = _mm_aesenc_si128(tmp3, KEY[2]);
  5145. tmp4 = _mm_aesenc_si128(tmp4, KEY[2]);
  5146. tmp5 = _mm_aesenc_si128(tmp5, KEY[2]);
  5147. tmp6 = _mm_aesenc_si128(tmp6, KEY[2]);
  5148. tmp7 = _mm_aesenc_si128(tmp7, KEY[2]);
  5149. tmp8 = _mm_aesenc_si128(tmp8, KEY[2]);
  5150. /* 128 x 128 Carryless Multiply */
  5151. XV = _mm_loadu_si128(&((__m128i*)in)[i*8+2]);
  5152. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5153. gfmul_only(XV, HT[5], &r0, &r1);
  5154. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]);
  5155. tmp2 = _mm_aesenc_si128(tmp2, KEY[3]);
  5156. tmp3 = _mm_aesenc_si128(tmp3, KEY[3]);
  5157. tmp4 = _mm_aesenc_si128(tmp4, KEY[3]);
  5158. tmp5 = _mm_aesenc_si128(tmp5, KEY[3]);
  5159. tmp6 = _mm_aesenc_si128(tmp6, KEY[3]);
  5160. tmp7 = _mm_aesenc_si128(tmp7, KEY[3]);
  5161. tmp8 = _mm_aesenc_si128(tmp8, KEY[3]);
  5162. /* 128 x 128 Carryless Multiply */
  5163. XV = _mm_loadu_si128(&((__m128i*)in)[i*8+3]);
  5164. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5165. gfmul_only(XV, HT[4], &r0, &r1);
  5166. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]);
  5167. tmp2 = _mm_aesenc_si128(tmp2, KEY[4]);
  5168. tmp3 = _mm_aesenc_si128(tmp3, KEY[4]);
  5169. tmp4 = _mm_aesenc_si128(tmp4, KEY[4]);
  5170. tmp5 = _mm_aesenc_si128(tmp5, KEY[4]);
  5171. tmp6 = _mm_aesenc_si128(tmp6, KEY[4]);
  5172. tmp7 = _mm_aesenc_si128(tmp7, KEY[4]);
  5173. tmp8 = _mm_aesenc_si128(tmp8, KEY[4]);
  5174. /* 128 x 128 Carryless Multiply */
  5175. XV = _mm_loadu_si128(&((__m128i*)in)[i*8+4]);
  5176. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5177. gfmul_only(XV, HT[3], &r0, &r1);
  5178. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]);
  5179. tmp2 = _mm_aesenc_si128(tmp2, KEY[5]);
  5180. tmp3 = _mm_aesenc_si128(tmp3, KEY[5]);
  5181. tmp4 = _mm_aesenc_si128(tmp4, KEY[5]);
  5182. tmp5 = _mm_aesenc_si128(tmp5, KEY[5]);
  5183. tmp6 = _mm_aesenc_si128(tmp6, KEY[5]);
  5184. tmp7 = _mm_aesenc_si128(tmp7, KEY[5]);
  5185. tmp8 = _mm_aesenc_si128(tmp8, KEY[5]);
  5186. /* 128 x 128 Carryless Multiply */
  5187. XV = _mm_loadu_si128(&((__m128i*)in)[i*8+5]);
  5188. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5189. gfmul_only(XV, HT[2], &r0, &r1);
  5190. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]);
  5191. tmp2 = _mm_aesenc_si128(tmp2, KEY[6]);
  5192. tmp3 = _mm_aesenc_si128(tmp3, KEY[6]);
  5193. tmp4 = _mm_aesenc_si128(tmp4, KEY[6]);
  5194. tmp5 = _mm_aesenc_si128(tmp5, KEY[6]);
  5195. tmp6 = _mm_aesenc_si128(tmp6, KEY[6]);
  5196. tmp7 = _mm_aesenc_si128(tmp7, KEY[6]);
  5197. tmp8 = _mm_aesenc_si128(tmp8, KEY[6]);
  5198. /* 128 x 128 Carryless Multiply */
  5199. XV = _mm_loadu_si128(&((__m128i*)in)[i*8+6]);
  5200. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5201. gfmul_only(XV, HT[1], &r0, &r1);
  5202. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]);
  5203. tmp2 = _mm_aesenc_si128(tmp2, KEY[7]);
  5204. tmp3 = _mm_aesenc_si128(tmp3, KEY[7]);
  5205. tmp4 = _mm_aesenc_si128(tmp4, KEY[7]);
  5206. tmp5 = _mm_aesenc_si128(tmp5, KEY[7]);
  5207. tmp6 = _mm_aesenc_si128(tmp6, KEY[7]);
  5208. tmp7 = _mm_aesenc_si128(tmp7, KEY[7]);
  5209. tmp8 = _mm_aesenc_si128(tmp8, KEY[7]);
  5210. /* 128 x 128 Carryless Multiply */
  5211. XV = _mm_loadu_si128(&((__m128i*)in)[i*8+7]);
  5212. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5213. gfmul_only(XV, HT[0], &r0, &r1);
  5214. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]);
  5215. tmp2 = _mm_aesenc_si128(tmp2, KEY[8]);
  5216. tmp3 = _mm_aesenc_si128(tmp3, KEY[8]);
  5217. tmp4 = _mm_aesenc_si128(tmp4, KEY[8]);
  5218. tmp5 = _mm_aesenc_si128(tmp5, KEY[8]);
  5219. tmp6 = _mm_aesenc_si128(tmp6, KEY[8]);
  5220. tmp7 = _mm_aesenc_si128(tmp7, KEY[8]);
  5221. tmp8 = _mm_aesenc_si128(tmp8, KEY[8]);
  5222. /* Reduction */
  5223. X = ghash_red(r0, r1);
  5224. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]);
  5225. tmp2 = _mm_aesenc_si128(tmp2, KEY[9]);
  5226. tmp3 = _mm_aesenc_si128(tmp3, KEY[9]);
  5227. tmp4 = _mm_aesenc_si128(tmp4, KEY[9]);
  5228. tmp5 = _mm_aesenc_si128(tmp5, KEY[9]);
  5229. tmp6 = _mm_aesenc_si128(tmp6, KEY[9]);
  5230. tmp7 = _mm_aesenc_si128(tmp7, KEY[9]);
  5231. tmp8 = _mm_aesenc_si128(tmp8, KEY[9]);
  5232. lastKey = KEY[10];
  5233. if (nr > 10) {
  5234. tmp1 = _mm_aesenc_si128(tmp1, KEY[10]);
  5235. tmp2 = _mm_aesenc_si128(tmp2, KEY[10]);
  5236. tmp3 = _mm_aesenc_si128(tmp3, KEY[10]);
  5237. tmp4 = _mm_aesenc_si128(tmp4, KEY[10]);
  5238. tmp5 = _mm_aesenc_si128(tmp5, KEY[10]);
  5239. tmp6 = _mm_aesenc_si128(tmp6, KEY[10]);
  5240. tmp7 = _mm_aesenc_si128(tmp7, KEY[10]);
  5241. tmp8 = _mm_aesenc_si128(tmp8, KEY[10]);
  5242. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]);
  5243. tmp2 = _mm_aesenc_si128(tmp2, KEY[11]);
  5244. tmp3 = _mm_aesenc_si128(tmp3, KEY[11]);
  5245. tmp4 = _mm_aesenc_si128(tmp4, KEY[11]);
  5246. tmp5 = _mm_aesenc_si128(tmp5, KEY[11]);
  5247. tmp6 = _mm_aesenc_si128(tmp6, KEY[11]);
  5248. tmp7 = _mm_aesenc_si128(tmp7, KEY[11]);
  5249. tmp8 = _mm_aesenc_si128(tmp8, KEY[11]);
  5250. lastKey = KEY[12];
  5251. if (nr > 12) {
  5252. tmp1 = _mm_aesenc_si128(tmp1, KEY[12]);
  5253. tmp2 = _mm_aesenc_si128(tmp2, KEY[12]);
  5254. tmp3 = _mm_aesenc_si128(tmp3, KEY[12]);
  5255. tmp4 = _mm_aesenc_si128(tmp4, KEY[12]);
  5256. tmp5 = _mm_aesenc_si128(tmp5, KEY[12]);
  5257. tmp6 = _mm_aesenc_si128(tmp6, KEY[12]);
  5258. tmp7 = _mm_aesenc_si128(tmp7, KEY[12]);
  5259. tmp8 = _mm_aesenc_si128(tmp8, KEY[12]);
  5260. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]);
  5261. tmp2 = _mm_aesenc_si128(tmp2, KEY[13]);
  5262. tmp3 = _mm_aesenc_si128(tmp3, KEY[13]);
  5263. tmp4 = _mm_aesenc_si128(tmp4, KEY[13]);
  5264. tmp5 = _mm_aesenc_si128(tmp5, KEY[13]);
  5265. tmp6 = _mm_aesenc_si128(tmp6, KEY[13]);
  5266. tmp7 = _mm_aesenc_si128(tmp7, KEY[13]);
  5267. tmp8 = _mm_aesenc_si128(tmp8, KEY[13]);
  5268. lastKey = KEY[14];
  5269. }
  5270. }
  5271. AES_ENC_LAST_8();
  5272. }
  5273. }
  5274. #endif /* AES_GCM_AESNI_NO_UNROLL */
  5275. for (k = i*8; k < (int)(nbytes/16); k++) {
  5276. tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64);
  5277. ctr1 = _mm_add_epi32(ctr1, ONE);
  5278. tmp1 = _mm_xor_si128(tmp1, KEY[0]);
  5279. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]);
  5280. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]);
  5281. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]);
  5282. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]);
  5283. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]);
  5284. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]);
  5285. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]);
  5286. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]);
  5287. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]);
  5288. /* 128 x 128 Carryless Multiply */
  5289. XV = _mm_loadu_si128(&((__m128i*)in)[k]);
  5290. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5291. XV = _mm_xor_si128(XV, X);
  5292. X = gfmul_shifted(XV, H);
  5293. lastKey = KEY[10];
  5294. if (nr > 10) {
  5295. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  5296. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]);
  5297. lastKey = KEY[12];
  5298. if (nr > 12) {
  5299. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  5300. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]);
  5301. lastKey = KEY[14];
  5302. }
  5303. }
  5304. tmp1 = _mm_aesenclast_si128(tmp1, lastKey);
  5305. tmp2 = _mm_loadu_si128(&((__m128i*)in)[k]);
  5306. tmp1 = _mm_xor_si128(tmp1, tmp2);
  5307. _mm_storeu_si128(&((__m128i*)out)[k], tmp1);
  5308. }
  5309. /* If one partial block remains */
  5310. if (nbytes % 16) {
  5311. tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64);
  5312. tmp1 = _mm_xor_si128(tmp1, KEY[0]);
  5313. tmp1 = _mm_aesenc_si128(tmp1, KEY[1]);
  5314. tmp1 = _mm_aesenc_si128(tmp1, KEY[2]);
  5315. tmp1 = _mm_aesenc_si128(tmp1, KEY[3]);
  5316. tmp1 = _mm_aesenc_si128(tmp1, KEY[4]);
  5317. tmp1 = _mm_aesenc_si128(tmp1, KEY[5]);
  5318. tmp1 = _mm_aesenc_si128(tmp1, KEY[6]);
  5319. tmp1 = _mm_aesenc_si128(tmp1, KEY[7]);
  5320. tmp1 = _mm_aesenc_si128(tmp1, KEY[8]);
  5321. tmp1 = _mm_aesenc_si128(tmp1, KEY[9]);
  5322. lastKey = KEY[10];
  5323. if (nr > 10) {
  5324. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  5325. tmp1 = _mm_aesenc_si128(tmp1, KEY[11]);
  5326. lastKey = KEY[12];
  5327. if (nr > 12) {
  5328. tmp1 = _mm_aesenc_si128(tmp1, lastKey);
  5329. tmp1 = _mm_aesenc_si128(tmp1, KEY[13]);
  5330. lastKey = KEY[14];
  5331. }
  5332. }
  5333. tmp1 = _mm_aesenclast_si128(tmp1, lastKey);
  5334. last_block = _mm_setzero_si128();
  5335. for (j=0; j < (int)(nbytes%16); j++)
  5336. ((unsigned char*)&last_block)[j] = in[k*16+j];
  5337. XV = last_block;
  5338. tmp1 = _mm_xor_si128(tmp1, last_block);
  5339. last_block = tmp1;
  5340. for (j=0; j < (int)(nbytes%16); j++)
  5341. out[k*16+j] = ((unsigned char*)&last_block)[j];
  5342. XV = _mm_shuffle_epi8(XV, BSWAP_MASK);
  5343. XV = _mm_xor_si128(XV, X);
  5344. X = gfmul_shifted(XV, H);
  5345. }
  5346. AES_GCM_INSERT_EPI(tmp1, nbytes, abytes);
  5347. /* 128 x 128 Carryless Multiply */
  5348. X = _mm_xor_si128(X, tmp1);
  5349. X = gfmul_shifted(X, H);
  5350. X = _mm_shuffle_epi8(X, BSWAP_MASK);
  5351. T = _mm_xor_si128(X, T);
  5352. /* if (0xffff !=
  5353. _mm_movemask_epi8(_mm_cmpeq_epi8(T, _mm_loadu_si128((__m128i*)tag)))) */
  5354. if (XMEMCMP(tag, &T, tbytes) != 0)
  5355. *res = 0; /* in case the authentication failed */
  5356. else
  5357. *res = 1; /* when successful returns 1 */
  5358. ForceZero(&lastKey, sizeof(lastKey));
  5359. return 0;
  5360. }
  5361. #endif /* HAVE_AES_DECRYPT */
  5362. #endif /* _MSC_VER */
  5363. #endif /* WOLFSSL_AESNI */
  5364. #if defined(GCM_SMALL)
  5365. static void GMULT(byte* X, byte* Y)
  5366. {
  5367. byte Z[AES_BLOCK_SIZE];
  5368. byte V[AES_BLOCK_SIZE];
  5369. int i, j;
  5370. XMEMSET(Z, 0, AES_BLOCK_SIZE);
  5371. XMEMCPY(V, X, AES_BLOCK_SIZE);
  5372. for (i = 0; i < AES_BLOCK_SIZE; i++)
  5373. {
  5374. byte y = Y[i];
  5375. for (j = 0; j < 8; j++)
  5376. {
  5377. if (y & 0x80) {
  5378. xorbuf(Z, V, AES_BLOCK_SIZE);
  5379. }
  5380. RIGHTSHIFTX(V);
  5381. y = y << 1;
  5382. }
  5383. }
  5384. XMEMCPY(X, Z, AES_BLOCK_SIZE);
  5385. }
  5386. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  5387. word32 cSz, byte* s, word32 sSz)
  5388. {
  5389. byte x[AES_BLOCK_SIZE];
  5390. byte scratch[AES_BLOCK_SIZE];
  5391. word32 blocks, partial;
  5392. byte* h;
  5393. if (aes == NULL) {
  5394. return;
  5395. }
  5396. h = aes->H;
  5397. XMEMSET(x, 0, AES_BLOCK_SIZE);
  5398. /* Hash in A, the Additional Authentication Data */
  5399. if (aSz != 0 && a != NULL) {
  5400. blocks = aSz / AES_BLOCK_SIZE;
  5401. partial = aSz % AES_BLOCK_SIZE;
  5402. while (blocks--) {
  5403. xorbuf(x, a, AES_BLOCK_SIZE);
  5404. GMULT(x, h);
  5405. a += AES_BLOCK_SIZE;
  5406. }
  5407. if (partial != 0) {
  5408. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5409. XMEMCPY(scratch, a, partial);
  5410. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5411. GMULT(x, h);
  5412. }
  5413. }
  5414. /* Hash in C, the Ciphertext */
  5415. if (cSz != 0 && c != NULL) {
  5416. blocks = cSz / AES_BLOCK_SIZE;
  5417. partial = cSz % AES_BLOCK_SIZE;
  5418. while (blocks--) {
  5419. xorbuf(x, c, AES_BLOCK_SIZE);
  5420. GMULT(x, h);
  5421. c += AES_BLOCK_SIZE;
  5422. }
  5423. if (partial != 0) {
  5424. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5425. XMEMCPY(scratch, c, partial);
  5426. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5427. GMULT(x, h);
  5428. }
  5429. }
  5430. /* Hash in the lengths of A and C in bits */
  5431. FlattenSzInBits(&scratch[0], aSz);
  5432. FlattenSzInBits(&scratch[8], cSz);
  5433. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5434. GMULT(x, h);
  5435. /* Copy the result into s. */
  5436. XMEMCPY(s, x, sSz);
  5437. }
  5438. #ifdef WOLFSSL_AESGCM_STREAM
  5439. /* No extra initialization for small implementation.
  5440. *
  5441. * @param [in] aes AES GCM object.
  5442. */
  5443. #define GHASH_INIT_EXTRA(aes)
  5444. /* GHASH one block of data..
  5445. *
  5446. * XOR block into tag and GMULT with H.
  5447. *
  5448. * @param [in, out] aes AES GCM object.
  5449. * @param [in] block Block of AAD or cipher text.
  5450. */
  5451. #define GHASH_ONE_BLOCK(aes, block) \
  5452. do { \
  5453. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  5454. GMULT(AES_TAG(aes), aes->H); \
  5455. } \
  5456. while (0)
  5457. #endif /* WOLFSSL_AESGCM_STREAM */
  5458. /* end GCM_SMALL */
  5459. #elif defined(GCM_TABLE)
  5460. static const byte R[256][2] = {
  5461. {0x00, 0x00}, {0x01, 0xc2}, {0x03, 0x84}, {0x02, 0x46},
  5462. {0x07, 0x08}, {0x06, 0xca}, {0x04, 0x8c}, {0x05, 0x4e},
  5463. {0x0e, 0x10}, {0x0f, 0xd2}, {0x0d, 0x94}, {0x0c, 0x56},
  5464. {0x09, 0x18}, {0x08, 0xda}, {0x0a, 0x9c}, {0x0b, 0x5e},
  5465. {0x1c, 0x20}, {0x1d, 0xe2}, {0x1f, 0xa4}, {0x1e, 0x66},
  5466. {0x1b, 0x28}, {0x1a, 0xea}, {0x18, 0xac}, {0x19, 0x6e},
  5467. {0x12, 0x30}, {0x13, 0xf2}, {0x11, 0xb4}, {0x10, 0x76},
  5468. {0x15, 0x38}, {0x14, 0xfa}, {0x16, 0xbc}, {0x17, 0x7e},
  5469. {0x38, 0x40}, {0x39, 0x82}, {0x3b, 0xc4}, {0x3a, 0x06},
  5470. {0x3f, 0x48}, {0x3e, 0x8a}, {0x3c, 0xcc}, {0x3d, 0x0e},
  5471. {0x36, 0x50}, {0x37, 0x92}, {0x35, 0xd4}, {0x34, 0x16},
  5472. {0x31, 0x58}, {0x30, 0x9a}, {0x32, 0xdc}, {0x33, 0x1e},
  5473. {0x24, 0x60}, {0x25, 0xa2}, {0x27, 0xe4}, {0x26, 0x26},
  5474. {0x23, 0x68}, {0x22, 0xaa}, {0x20, 0xec}, {0x21, 0x2e},
  5475. {0x2a, 0x70}, {0x2b, 0xb2}, {0x29, 0xf4}, {0x28, 0x36},
  5476. {0x2d, 0x78}, {0x2c, 0xba}, {0x2e, 0xfc}, {0x2f, 0x3e},
  5477. {0x70, 0x80}, {0x71, 0x42}, {0x73, 0x04}, {0x72, 0xc6},
  5478. {0x77, 0x88}, {0x76, 0x4a}, {0x74, 0x0c}, {0x75, 0xce},
  5479. {0x7e, 0x90}, {0x7f, 0x52}, {0x7d, 0x14}, {0x7c, 0xd6},
  5480. {0x79, 0x98}, {0x78, 0x5a}, {0x7a, 0x1c}, {0x7b, 0xde},
  5481. {0x6c, 0xa0}, {0x6d, 0x62}, {0x6f, 0x24}, {0x6e, 0xe6},
  5482. {0x6b, 0xa8}, {0x6a, 0x6a}, {0x68, 0x2c}, {0x69, 0xee},
  5483. {0x62, 0xb0}, {0x63, 0x72}, {0x61, 0x34}, {0x60, 0xf6},
  5484. {0x65, 0xb8}, {0x64, 0x7a}, {0x66, 0x3c}, {0x67, 0xfe},
  5485. {0x48, 0xc0}, {0x49, 0x02}, {0x4b, 0x44}, {0x4a, 0x86},
  5486. {0x4f, 0xc8}, {0x4e, 0x0a}, {0x4c, 0x4c}, {0x4d, 0x8e},
  5487. {0x46, 0xd0}, {0x47, 0x12}, {0x45, 0x54}, {0x44, 0x96},
  5488. {0x41, 0xd8}, {0x40, 0x1a}, {0x42, 0x5c}, {0x43, 0x9e},
  5489. {0x54, 0xe0}, {0x55, 0x22}, {0x57, 0x64}, {0x56, 0xa6},
  5490. {0x53, 0xe8}, {0x52, 0x2a}, {0x50, 0x6c}, {0x51, 0xae},
  5491. {0x5a, 0xf0}, {0x5b, 0x32}, {0x59, 0x74}, {0x58, 0xb6},
  5492. {0x5d, 0xf8}, {0x5c, 0x3a}, {0x5e, 0x7c}, {0x5f, 0xbe},
  5493. {0xe1, 0x00}, {0xe0, 0xc2}, {0xe2, 0x84}, {0xe3, 0x46},
  5494. {0xe6, 0x08}, {0xe7, 0xca}, {0xe5, 0x8c}, {0xe4, 0x4e},
  5495. {0xef, 0x10}, {0xee, 0xd2}, {0xec, 0x94}, {0xed, 0x56},
  5496. {0xe8, 0x18}, {0xe9, 0xda}, {0xeb, 0x9c}, {0xea, 0x5e},
  5497. {0xfd, 0x20}, {0xfc, 0xe2}, {0xfe, 0xa4}, {0xff, 0x66},
  5498. {0xfa, 0x28}, {0xfb, 0xea}, {0xf9, 0xac}, {0xf8, 0x6e},
  5499. {0xf3, 0x30}, {0xf2, 0xf2}, {0xf0, 0xb4}, {0xf1, 0x76},
  5500. {0xf4, 0x38}, {0xf5, 0xfa}, {0xf7, 0xbc}, {0xf6, 0x7e},
  5501. {0xd9, 0x40}, {0xd8, 0x82}, {0xda, 0xc4}, {0xdb, 0x06},
  5502. {0xde, 0x48}, {0xdf, 0x8a}, {0xdd, 0xcc}, {0xdc, 0x0e},
  5503. {0xd7, 0x50}, {0xd6, 0x92}, {0xd4, 0xd4}, {0xd5, 0x16},
  5504. {0xd0, 0x58}, {0xd1, 0x9a}, {0xd3, 0xdc}, {0xd2, 0x1e},
  5505. {0xc5, 0x60}, {0xc4, 0xa2}, {0xc6, 0xe4}, {0xc7, 0x26},
  5506. {0xc2, 0x68}, {0xc3, 0xaa}, {0xc1, 0xec}, {0xc0, 0x2e},
  5507. {0xcb, 0x70}, {0xca, 0xb2}, {0xc8, 0xf4}, {0xc9, 0x36},
  5508. {0xcc, 0x78}, {0xcd, 0xba}, {0xcf, 0xfc}, {0xce, 0x3e},
  5509. {0x91, 0x80}, {0x90, 0x42}, {0x92, 0x04}, {0x93, 0xc6},
  5510. {0x96, 0x88}, {0x97, 0x4a}, {0x95, 0x0c}, {0x94, 0xce},
  5511. {0x9f, 0x90}, {0x9e, 0x52}, {0x9c, 0x14}, {0x9d, 0xd6},
  5512. {0x98, 0x98}, {0x99, 0x5a}, {0x9b, 0x1c}, {0x9a, 0xde},
  5513. {0x8d, 0xa0}, {0x8c, 0x62}, {0x8e, 0x24}, {0x8f, 0xe6},
  5514. {0x8a, 0xa8}, {0x8b, 0x6a}, {0x89, 0x2c}, {0x88, 0xee},
  5515. {0x83, 0xb0}, {0x82, 0x72}, {0x80, 0x34}, {0x81, 0xf6},
  5516. {0x84, 0xb8}, {0x85, 0x7a}, {0x87, 0x3c}, {0x86, 0xfe},
  5517. {0xa9, 0xc0}, {0xa8, 0x02}, {0xaa, 0x44}, {0xab, 0x86},
  5518. {0xae, 0xc8}, {0xaf, 0x0a}, {0xad, 0x4c}, {0xac, 0x8e},
  5519. {0xa7, 0xd0}, {0xa6, 0x12}, {0xa4, 0x54}, {0xa5, 0x96},
  5520. {0xa0, 0xd8}, {0xa1, 0x1a}, {0xa3, 0x5c}, {0xa2, 0x9e},
  5521. {0xb5, 0xe0}, {0xb4, 0x22}, {0xb6, 0x64}, {0xb7, 0xa6},
  5522. {0xb2, 0xe8}, {0xb3, 0x2a}, {0xb1, 0x6c}, {0xb0, 0xae},
  5523. {0xbb, 0xf0}, {0xba, 0x32}, {0xb8, 0x74}, {0xb9, 0xb6},
  5524. {0xbc, 0xf8}, {0xbd, 0x3a}, {0xbf, 0x7c}, {0xbe, 0xbe} };
  5525. static void GMULT(byte *x, byte m[256][AES_BLOCK_SIZE])
  5526. {
  5527. #if !defined(WORD64_AVAILABLE) || defined(BIG_ENDIAN_ORDER)
  5528. int i, j;
  5529. byte Z[AES_BLOCK_SIZE];
  5530. byte a;
  5531. XMEMSET(Z, 0, sizeof(Z));
  5532. for (i = 15; i > 0; i--) {
  5533. xorbuf(Z, m[x[i]], AES_BLOCK_SIZE);
  5534. a = Z[15];
  5535. for (j = 15; j > 0; j--) {
  5536. Z[j] = Z[j-1];
  5537. }
  5538. Z[0] = R[a][0];
  5539. Z[1] ^= R[a][1];
  5540. }
  5541. xorbuf(Z, m[x[0]], AES_BLOCK_SIZE);
  5542. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  5543. #else
  5544. byte Z[AES_BLOCK_SIZE + AES_BLOCK_SIZE];
  5545. byte a;
  5546. word64* pZ;
  5547. word64* pm;
  5548. word64* px = (word64*)(x);
  5549. int i;
  5550. pZ = (word64*)(Z + 15 + 1);
  5551. pm = (word64*)(m[x[15]]);
  5552. pZ[0] = pm[0];
  5553. pZ[1] = pm[1];
  5554. a = Z[16 + 15];
  5555. Z[15] = R[a][0];
  5556. Z[16] ^= R[a][1];
  5557. for (i = 14; i > 0; i--) {
  5558. pZ = (word64*)(Z + i + 1);
  5559. pm = (word64*)(m[x[i]]);
  5560. pZ[0] ^= pm[0];
  5561. pZ[1] ^= pm[1];
  5562. a = Z[16 + i];
  5563. Z[i] = R[a][0];
  5564. Z[i+1] ^= R[a][1];
  5565. }
  5566. pZ = (word64*)(Z + 1);
  5567. pm = (word64*)(m[x[0]]);
  5568. px[0] = pZ[0] ^ pm[0]; px[1] = pZ[1] ^ pm[1];
  5569. #endif
  5570. }
  5571. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  5572. word32 cSz, byte* s, word32 sSz)
  5573. {
  5574. byte x[AES_BLOCK_SIZE];
  5575. byte scratch[AES_BLOCK_SIZE];
  5576. word32 blocks, partial;
  5577. if (aes == NULL) {
  5578. return;
  5579. }
  5580. XMEMSET(x, 0, AES_BLOCK_SIZE);
  5581. /* Hash in A, the Additional Authentication Data */
  5582. if (aSz != 0 && a != NULL) {
  5583. blocks = aSz / AES_BLOCK_SIZE;
  5584. partial = aSz % AES_BLOCK_SIZE;
  5585. while (blocks--) {
  5586. xorbuf(x, a, AES_BLOCK_SIZE);
  5587. GMULT(x, aes->M0);
  5588. a += AES_BLOCK_SIZE;
  5589. }
  5590. if (partial != 0) {
  5591. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5592. XMEMCPY(scratch, a, partial);
  5593. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5594. GMULT(x, aes->M0);
  5595. }
  5596. }
  5597. /* Hash in C, the Ciphertext */
  5598. if (cSz != 0 && c != NULL) {
  5599. blocks = cSz / AES_BLOCK_SIZE;
  5600. partial = cSz % AES_BLOCK_SIZE;
  5601. while (blocks--) {
  5602. xorbuf(x, c, AES_BLOCK_SIZE);
  5603. GMULT(x, aes->M0);
  5604. c += AES_BLOCK_SIZE;
  5605. }
  5606. if (partial != 0) {
  5607. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5608. XMEMCPY(scratch, c, partial);
  5609. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5610. GMULT(x, aes->M0);
  5611. }
  5612. }
  5613. /* Hash in the lengths of A and C in bits */
  5614. FlattenSzInBits(&scratch[0], aSz);
  5615. FlattenSzInBits(&scratch[8], cSz);
  5616. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5617. GMULT(x, aes->M0);
  5618. /* Copy the result into s. */
  5619. XMEMCPY(s, x, sSz);
  5620. }
  5621. #ifdef WOLFSSL_AESGCM_STREAM
  5622. /* No extra initialization for table implementation.
  5623. *
  5624. * @param [in] aes AES GCM object.
  5625. */
  5626. #define GHASH_INIT_EXTRA(aes)
  5627. /* GHASH one block of data..
  5628. *
  5629. * XOR block into tag and GMULT with H using pre-computed table.
  5630. *
  5631. * @param [in, out] aes AES GCM object.
  5632. * @param [in] block Block of AAD or cipher text.
  5633. */
  5634. #define GHASH_ONE_BLOCK(aes, block) \
  5635. do { \
  5636. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  5637. GMULT(AES_TAG(aes), aes->M0); \
  5638. } \
  5639. while (0)
  5640. #endif /* WOLFSSL_AESGCM_STREAM */
  5641. /* end GCM_TABLE */
  5642. #elif defined(GCM_TABLE_4BIT)
  5643. /* remainder = x^7 + x^2 + x^1 + 1 => 0xe1
  5644. * R shifts right a reverse bit pair of bytes such that:
  5645. * R(b0, b1) => b1 = (b1 >> 1) | (b0 << 7); b0 >>= 1
  5646. * 0 => 0, 0, 0, 0 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ 00,00 = 00,00
  5647. * 8 => 0, 0, 0, 1 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ e1,00 = e1,00
  5648. * 4 => 0, 0, 1, 0 => R(R(R(00,00) ^ 00,00) ^ e1,00) ^ 00,00 = 70,80
  5649. * 2 => 0, 1, 0, 0 => R(R(R(00,00) ^ e1,00) ^ 00,00) ^ 00,00 = 38,40
  5650. * 1 => 1, 0, 0, 0 => R(R(R(e1,00) ^ 00,00) ^ 00,00) ^ 00,00 = 1c,20
  5651. * To calculate te rest, XOR result for each bit.
  5652. * e.g. 6 = 4 ^ 2 => 48,c0
  5653. *
  5654. * Second half is same values rotated by 4-bits.
  5655. */
  5656. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  5657. static const byte R[16][2] = {
  5658. {0x00, 0x00}, {0x1c, 0x20}, {0x38, 0x40}, {0x24, 0x60},
  5659. {0x70, 0x80}, {0x6c, 0xa0}, {0x48, 0xc0}, {0x54, 0xe0},
  5660. {0xe1, 0x00}, {0xfd, 0x20}, {0xd9, 0x40}, {0xc5, 0x60},
  5661. {0x91, 0x80}, {0x8d, 0xa0}, {0xa9, 0xc0}, {0xb5, 0xe0},
  5662. };
  5663. #else
  5664. static const word16 R[32] = {
  5665. 0x0000, 0x201c, 0x4038, 0x6024,
  5666. 0x8070, 0xa06c, 0xc048, 0xe054,
  5667. 0x00e1, 0x20fd, 0x40d9, 0x60c5,
  5668. 0x8091, 0xa08d, 0xc0a9, 0xe0b5,
  5669. 0x0000, 0xc201, 0x8403, 0x4602,
  5670. 0x0807, 0xca06, 0x8c04, 0x4e05,
  5671. 0x100e, 0xd20f, 0x940d, 0x560c,
  5672. 0x1809, 0xda08, 0x9c0a, 0x5e0b,
  5673. };
  5674. #endif
  5675. /* Multiply in GF(2^128) defined by polynomial:
  5676. * x^128 + x^7 + x^2 + x^1 + 1.
  5677. *
  5678. * H: hash key = encrypt(key, 0)
  5679. * x = x * H in field
  5680. *
  5681. * x: cumlative result
  5682. * m: 4-bit table
  5683. * [0..15] * H
  5684. */
  5685. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  5686. static void GMULT(byte *x, byte m[16][AES_BLOCK_SIZE])
  5687. {
  5688. int i, j, n;
  5689. byte Z[AES_BLOCK_SIZE];
  5690. byte a;
  5691. XMEMSET(Z, 0, sizeof(Z));
  5692. for (i = 15; i >= 0; i--) {
  5693. for (n = 0; n < 2; n++) {
  5694. if (n == 0)
  5695. xorbuf(Z, m[x[i] & 0xf], AES_BLOCK_SIZE);
  5696. else {
  5697. xorbuf(Z, m[x[i] >> 4], AES_BLOCK_SIZE);
  5698. if (i == 0)
  5699. break;
  5700. }
  5701. a = Z[15] & 0xf;
  5702. for (j = 15; j > 0; j--)
  5703. Z[j] = (Z[j-1] << 4) | (Z[j] >> 4);
  5704. Z[0] >>= 4;
  5705. Z[0] ^= R[a][0];
  5706. Z[1] ^= R[a][1];
  5707. }
  5708. }
  5709. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  5710. }
  5711. #elif defined(WC_32BIT_CPU)
  5712. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  5713. {
  5714. int i;
  5715. word32 z8[4] = {0, 0, 0, 0};
  5716. byte a;
  5717. word32* x8 = (word32*)x;
  5718. word32* m8;
  5719. byte xi;
  5720. word32 n7, n6, n5, n4, n3, n2, n1, n0;
  5721. for (i = 15; i > 0; i--) {
  5722. xi = x[i];
  5723. /* XOR in (msn * H) */
  5724. m8 = (word32*)m[xi & 0xf];
  5725. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  5726. /* Cache top byte for remainder calculations - lost in rotate. */
  5727. a = z8[3] >> 24;
  5728. /* Rotate Z by 8-bits */
  5729. z8[3] = (z8[2] >> 24) | (z8[3] << 8);
  5730. z8[2] = (z8[1] >> 24) | (z8[2] << 8);
  5731. z8[1] = (z8[0] >> 24) | (z8[1] << 8);
  5732. z8[0] <<= 8;
  5733. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  5734. z8[0] ^= (word32)R[16 + (a & 0xf)];
  5735. xi >>= 4;
  5736. /* XOR in next significant nibble (XORed with H) * remainder */
  5737. m8 = (word32*)m[xi];
  5738. a ^= (byte)(m8[3] >> 20);
  5739. z8[0] ^= (word32)R[a >> 4];
  5740. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  5741. m8 = (word32*)m[16 + xi];
  5742. z8[0] ^= m8[0]; z8[1] ^= m8[1];
  5743. z8[2] ^= m8[2]; z8[3] ^= m8[3];
  5744. }
  5745. xi = x[0];
  5746. /* XOR in most significant nibble * H */
  5747. m8 = (word32*)m[xi & 0xf];
  5748. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  5749. /* Cache top byte for remainder calculations - lost in rotate. */
  5750. a = (z8[3] >> 24) & 0xf;
  5751. /* Rotate z by 4-bits */
  5752. n7 = z8[3] & 0xf0f0f0f0ULL;
  5753. n6 = z8[3] & 0x0f0f0f0fULL;
  5754. n5 = z8[2] & 0xf0f0f0f0ULL;
  5755. n4 = z8[2] & 0x0f0f0f0fULL;
  5756. n3 = z8[1] & 0xf0f0f0f0ULL;
  5757. n2 = z8[1] & 0x0f0f0f0fULL;
  5758. n1 = z8[0] & 0xf0f0f0f0ULL;
  5759. n0 = z8[0] & 0x0f0f0f0fULL;
  5760. z8[3] = (n7 >> 4) | (n6 << 12) | (n4 >> 20);
  5761. z8[2] = (n5 >> 4) | (n4 << 12) | (n2 >> 20);
  5762. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 20);
  5763. z8[0] = (n1 >> 4) | (n0 << 12);
  5764. /* XOR in most significant nibble * remainder */
  5765. z8[0] ^= (word32)R[a];
  5766. /* XOR in next significant nibble * H */
  5767. m8 = (word32*)m[xi >> 4];
  5768. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  5769. /* Write back result. */
  5770. x8[0] = z8[0]; x8[1] = z8[1]; x8[2] = z8[2]; x8[3] = z8[3];
  5771. }
  5772. #else
  5773. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  5774. {
  5775. int i;
  5776. word64 z8[2] = {0, 0};
  5777. byte a;
  5778. word64* x8 = (word64*)x;
  5779. word64* m8;
  5780. word64 n0, n1, n2, n3;
  5781. byte xi;
  5782. for (i = 15; i > 0; i--) {
  5783. xi = x[i];
  5784. /* XOR in (msn * H) */
  5785. m8 = (word64*)m[xi & 0xf];
  5786. z8[0] ^= m8[0];
  5787. z8[1] ^= m8[1];
  5788. /* Cache top byte for remainder calculations - lost in rotate. */
  5789. a = z8[1] >> 56;
  5790. /* Rotate Z by 8-bits */
  5791. z8[1] = (z8[0] >> 56) | (z8[1] << 8);
  5792. z8[0] <<= 8;
  5793. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  5794. m8 = (word64*)m[16 + (xi >> 4)];
  5795. z8[0] ^= m8[0];
  5796. z8[1] ^= m8[1];
  5797. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  5798. z8[0] ^= (word64)R[16 + (a & 0xf)];
  5799. /* XOR in next significant nibble (XORed with H) * remainder */
  5800. m8 = (word64*)m[xi >> 4];
  5801. a ^= (byte)(m8[1] >> 52);
  5802. z8[0] ^= (word64)R[a >> 4];
  5803. }
  5804. xi = x[0];
  5805. /* XOR in most significant nibble * H */
  5806. m8 = (word64*)m[xi & 0xf];
  5807. z8[0] ^= m8[0];
  5808. z8[1] ^= m8[1];
  5809. /* Cache top byte for remainder calculations - lost in rotate. */
  5810. a = (z8[1] >> 56) & 0xf;
  5811. /* Rotate z by 4-bits */
  5812. n3 = z8[1] & 0xf0f0f0f0f0f0f0f0ULL;
  5813. n2 = z8[1] & 0x0f0f0f0f0f0f0f0fULL;
  5814. n1 = z8[0] & 0xf0f0f0f0f0f0f0f0ULL;
  5815. n0 = z8[0] & 0x0f0f0f0f0f0f0f0fULL;
  5816. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 52);
  5817. z8[0] = (n1 >> 4) | (n0 << 12);
  5818. /* XOR in next significant nibble * H */
  5819. m8 = (word64*)m[xi >> 4];
  5820. z8[0] ^= m8[0];
  5821. z8[1] ^= m8[1];
  5822. /* XOR in most significant nibble * remainder */
  5823. z8[0] ^= (word64)R[a];
  5824. /* Write back result. */
  5825. x8[0] = z8[0];
  5826. x8[1] = z8[1];
  5827. }
  5828. #endif
  5829. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  5830. word32 cSz, byte* s, word32 sSz)
  5831. {
  5832. byte x[AES_BLOCK_SIZE];
  5833. byte scratch[AES_BLOCK_SIZE];
  5834. word32 blocks, partial;
  5835. if (aes == NULL) {
  5836. return;
  5837. }
  5838. XMEMSET(x, 0, AES_BLOCK_SIZE);
  5839. /* Hash in A, the Additional Authentication Data */
  5840. if (aSz != 0 && a != NULL) {
  5841. blocks = aSz / AES_BLOCK_SIZE;
  5842. partial = aSz % AES_BLOCK_SIZE;
  5843. while (blocks--) {
  5844. xorbuf(x, a, AES_BLOCK_SIZE);
  5845. GMULT(x, aes->M0);
  5846. a += AES_BLOCK_SIZE;
  5847. }
  5848. if (partial != 0) {
  5849. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5850. XMEMCPY(scratch, a, partial);
  5851. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5852. GMULT(x, aes->M0);
  5853. }
  5854. }
  5855. /* Hash in C, the Ciphertext */
  5856. if (cSz != 0 && c != NULL) {
  5857. blocks = cSz / AES_BLOCK_SIZE;
  5858. partial = cSz % AES_BLOCK_SIZE;
  5859. while (blocks--) {
  5860. xorbuf(x, c, AES_BLOCK_SIZE);
  5861. GMULT(x, aes->M0);
  5862. c += AES_BLOCK_SIZE;
  5863. }
  5864. if (partial != 0) {
  5865. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5866. XMEMCPY(scratch, c, partial);
  5867. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5868. GMULT(x, aes->M0);
  5869. }
  5870. }
  5871. /* Hash in the lengths of A and C in bits */
  5872. FlattenSzInBits(&scratch[0], aSz);
  5873. FlattenSzInBits(&scratch[8], cSz);
  5874. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5875. GMULT(x, aes->M0);
  5876. /* Copy the result into s. */
  5877. XMEMCPY(s, x, sSz);
  5878. }
  5879. #ifdef WOLFSSL_AESGCM_STREAM
  5880. /* No extra initialization for 4-bit table implementation.
  5881. *
  5882. * @param [in] aes AES GCM object.
  5883. */
  5884. #define GHASH_INIT_EXTRA(aes)
  5885. /* GHASH one block of data..
  5886. *
  5887. * XOR block into tag and GMULT with H using pre-computed table.
  5888. *
  5889. * @param [in, out] aes AES GCM object.
  5890. * @param [in] block Block of AAD or cipher text.
  5891. */
  5892. #define GHASH_ONE_BLOCK(aes, block) \
  5893. do { \
  5894. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  5895. GMULT(AES_TAG(aes), (aes)->M0); \
  5896. } \
  5897. while (0)
  5898. #endif /* WOLFSSL_AESGCM_STREAM */
  5899. #elif defined(WORD64_AVAILABLE) && !defined(GCM_WORD32)
  5900. #if !defined(FREESCALE_LTC_AES_GCM)
  5901. static void GMULT(word64* X, word64* Y)
  5902. {
  5903. word64 Z[2] = {0,0};
  5904. word64 V[2];
  5905. int i, j;
  5906. word64 v1;
  5907. V[0] = X[0]; V[1] = X[1];
  5908. for (i = 0; i < 2; i++)
  5909. {
  5910. word64 y = Y[i];
  5911. for (j = 0; j < 64; j++)
  5912. {
  5913. #ifndef AES_GCM_GMULT_NCT
  5914. word64 mask = 0 - (y >> 63);
  5915. Z[0] ^= V[0] & mask;
  5916. Z[1] ^= V[1] & mask;
  5917. #else
  5918. if (y & 0x8000000000000000ULL) {
  5919. Z[0] ^= V[0];
  5920. Z[1] ^= V[1];
  5921. }
  5922. #endif
  5923. v1 = (0 - (V[1] & 1)) & 0xE100000000000000ULL;
  5924. V[1] >>= 1;
  5925. V[1] |= V[0] << 63;
  5926. V[0] >>= 1;
  5927. V[0] ^= v1;
  5928. y <<= 1;
  5929. }
  5930. }
  5931. X[0] = Z[0];
  5932. X[1] = Z[1];
  5933. }
  5934. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  5935. word32 cSz, byte* s, word32 sSz)
  5936. {
  5937. word64 x[2] = {0,0};
  5938. word32 blocks, partial;
  5939. word64 bigH[2];
  5940. if (aes == NULL) {
  5941. return;
  5942. }
  5943. XMEMCPY(bigH, aes->H, AES_BLOCK_SIZE);
  5944. #ifdef LITTLE_ENDIAN_ORDER
  5945. ByteReverseWords64(bigH, bigH, AES_BLOCK_SIZE);
  5946. #endif
  5947. /* Hash in A, the Additional Authentication Data */
  5948. if (aSz != 0 && a != NULL) {
  5949. word64 bigA[2];
  5950. blocks = aSz / AES_BLOCK_SIZE;
  5951. partial = aSz % AES_BLOCK_SIZE;
  5952. while (blocks--) {
  5953. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  5954. #ifdef LITTLE_ENDIAN_ORDER
  5955. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  5956. #endif
  5957. x[0] ^= bigA[0];
  5958. x[1] ^= bigA[1];
  5959. GMULT(x, bigH);
  5960. a += AES_BLOCK_SIZE;
  5961. }
  5962. if (partial != 0) {
  5963. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  5964. XMEMCPY(bigA, a, partial);
  5965. #ifdef LITTLE_ENDIAN_ORDER
  5966. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  5967. #endif
  5968. x[0] ^= bigA[0];
  5969. x[1] ^= bigA[1];
  5970. GMULT(x, bigH);
  5971. }
  5972. #ifdef OPENSSL_EXTRA
  5973. /* store AAD partial tag for next call */
  5974. aes->aadH[0] = (word32)((x[0] & 0xFFFFFFFF00000000ULL) >> 32);
  5975. aes->aadH[1] = (word32)(x[0] & 0xFFFFFFFF);
  5976. aes->aadH[2] = (word32)((x[1] & 0xFFFFFFFF00000000ULL) >> 32);
  5977. aes->aadH[3] = (word32)(x[1] & 0xFFFFFFFF);
  5978. #endif
  5979. }
  5980. /* Hash in C, the Ciphertext */
  5981. if (cSz != 0 && c != NULL) {
  5982. word64 bigC[2];
  5983. blocks = cSz / AES_BLOCK_SIZE;
  5984. partial = cSz % AES_BLOCK_SIZE;
  5985. #ifdef OPENSSL_EXTRA
  5986. /* Start from last AAD partial tag */
  5987. if(aes->aadLen) {
  5988. x[0] = ((word64)aes->aadH[0]) << 32 | aes->aadH[1];
  5989. x[1] = ((word64)aes->aadH[2]) << 32 | aes->aadH[3];
  5990. }
  5991. #endif
  5992. while (blocks--) {
  5993. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  5994. #ifdef LITTLE_ENDIAN_ORDER
  5995. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  5996. #endif
  5997. x[0] ^= bigC[0];
  5998. x[1] ^= bigC[1];
  5999. GMULT(x, bigH);
  6000. c += AES_BLOCK_SIZE;
  6001. }
  6002. if (partial != 0) {
  6003. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  6004. XMEMCPY(bigC, c, partial);
  6005. #ifdef LITTLE_ENDIAN_ORDER
  6006. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  6007. #endif
  6008. x[0] ^= bigC[0];
  6009. x[1] ^= bigC[1];
  6010. GMULT(x, bigH);
  6011. }
  6012. }
  6013. /* Hash in the lengths in bits of A and C */
  6014. {
  6015. word64 len[2];
  6016. len[0] = aSz; len[1] = cSz;
  6017. #ifdef OPENSSL_EXTRA
  6018. if (aes->aadLen)
  6019. len[0] = (word64)aes->aadLen;
  6020. #endif
  6021. /* Lengths are in bytes. Convert to bits. */
  6022. len[0] *= 8;
  6023. len[1] *= 8;
  6024. x[0] ^= len[0];
  6025. x[1] ^= len[1];
  6026. GMULT(x, bigH);
  6027. }
  6028. #ifdef LITTLE_ENDIAN_ORDER
  6029. ByteReverseWords64(x, x, AES_BLOCK_SIZE);
  6030. #endif
  6031. XMEMCPY(s, x, sSz);
  6032. }
  6033. #endif /* !FREESCALE_LTC_AES_GCM */
  6034. #ifdef WOLFSSL_AESGCM_STREAM
  6035. #ifdef LITTLE_ENDIAN_ORDER
  6036. /* No extra initialization for small implementation.
  6037. *
  6038. * @param [in] aes AES GCM object.
  6039. */
  6040. #define GHASH_INIT_EXTRA(aes) \
  6041. ByteReverseWords64((word64*)aes->H, (word64*)aes->H, AES_BLOCK_SIZE)
  6042. /* GHASH one block of data..
  6043. *
  6044. * XOR block into tag and GMULT with H.
  6045. *
  6046. * @param [in, out] aes AES GCM object.
  6047. * @param [in] block Block of AAD or cipher text.
  6048. */
  6049. #define GHASH_ONE_BLOCK(aes, block) \
  6050. do { \
  6051. word64* x = (word64*)AES_TAG(aes); \
  6052. word64* h = (word64*)aes->H; \
  6053. word64 block64[2]; \
  6054. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  6055. ByteReverseWords64(block64, block64, AES_BLOCK_SIZE); \
  6056. x[0] ^= block64[0]; \
  6057. x[1] ^= block64[1]; \
  6058. GMULT(x, h); \
  6059. } \
  6060. while (0)
  6061. #ifdef OPENSSL_EXTRA
  6062. /* GHASH in AAD and cipher text lengths in bits.
  6063. *
  6064. * Convert tag back to little-endian.
  6065. *
  6066. * @param [in, out] aes AES GCM object.
  6067. */
  6068. #define GHASH_LEN_BLOCK(aes) \
  6069. do { \
  6070. word64* x = (word64*)AES_TAG(aes); \
  6071. word64* h = (word64*)aes->H; \
  6072. word64 len[2]; \
  6073. len[0] = aes->aSz; len[1] = aes->cSz; \
  6074. if (aes->aadLen) \
  6075. len[0] = (word64)aes->aadLen; \
  6076. /* Lengths are in bytes. Convert to bits. */ \
  6077. len[0] *= 8; \
  6078. len[1] *= 8; \
  6079. \
  6080. x[0] ^= len[0]; \
  6081. x[1] ^= len[1]; \
  6082. GMULT(x, h); \
  6083. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  6084. } \
  6085. while (0)
  6086. #else
  6087. /* GHASH in AAD and cipher text lengths in bits.
  6088. *
  6089. * Convert tag back to little-endian.
  6090. *
  6091. * @param [in, out] aes AES GCM object.
  6092. */
  6093. #define GHASH_LEN_BLOCK(aes) \
  6094. do { \
  6095. word64* x = (word64*)AES_TAG(aes); \
  6096. word64* h = (word64*)aes->H; \
  6097. word64 len[2]; \
  6098. len[0] = aes->aSz; len[1] = aes->cSz; \
  6099. /* Lengths are in bytes. Convert to bits. */ \
  6100. len[0] *= 8; \
  6101. len[1] *= 8; \
  6102. \
  6103. x[0] ^= len[0]; \
  6104. x[1] ^= len[1]; \
  6105. GMULT(x, h); \
  6106. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  6107. } \
  6108. while (0)
  6109. #endif
  6110. #else
  6111. /* No extra initialization for small implementation.
  6112. *
  6113. * @param [in] aes AES GCM object.
  6114. */
  6115. #define GHASH_INIT_EXTRA(aes)
  6116. /* GHASH one block of data..
  6117. *
  6118. * XOR block into tag and GMULT with H.
  6119. *
  6120. * @param [in, out] aes AES GCM object.
  6121. * @param [in] block Block of AAD or cipher text.
  6122. */
  6123. #define GHASH_ONE_BLOCK(aes, block) \
  6124. do { \
  6125. word64* x = (word64*)AES_TAG(aes); \
  6126. word64* h = (word64*)aes->H; \
  6127. word64 block64[2]; \
  6128. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  6129. x[0] ^= block64[0]; \
  6130. x[1] ^= block64[1]; \
  6131. GMULT(x, h); \
  6132. } \
  6133. while (0)
  6134. #ifdef OPENSSL_EXTRA
  6135. /* GHASH in AAD and cipher text lengths in bits.
  6136. *
  6137. * Convert tag back to little-endian.
  6138. *
  6139. * @param [in, out] aes AES GCM object.
  6140. */
  6141. #define GHASH_LEN_BLOCK(aes) \
  6142. do { \
  6143. word64* x = (word64*)AES_TAG(aes); \
  6144. word64* h = (word64*)aes->H; \
  6145. word64 len[2]; \
  6146. len[0] = aes->aSz; len[1] = aes->cSz; \
  6147. if (aes->aadLen) \
  6148. len[0] = (word64)aes->aadLen; \
  6149. /* Lengths are in bytes. Convert to bits. */ \
  6150. len[0] *= 8; \
  6151. len[1] *= 8; \
  6152. \
  6153. x[0] ^= len[0]; \
  6154. x[1] ^= len[1]; \
  6155. GMULT(x, h); \
  6156. } \
  6157. while (0)
  6158. #else
  6159. /* GHASH in AAD and cipher text lengths in bits.
  6160. *
  6161. * Convert tag back to little-endian.
  6162. *
  6163. * @param [in, out] aes AES GCM object.
  6164. */
  6165. #define GHASH_LEN_BLOCK(aes) \
  6166. do { \
  6167. word64* x = (word64*)AES_TAG(aes); \
  6168. word64* h = (word64*)aes->H; \
  6169. word64 len[2]; \
  6170. len[0] = aes->aSz; len[1] = aes->cSz; \
  6171. /* Lengths are in bytes. Convert to bits. */ \
  6172. len[0] *= 8; \
  6173. len[1] *= 8; \
  6174. \
  6175. x[0] ^= len[0]; \
  6176. x[1] ^= len[1]; \
  6177. GMULT(x, h); \
  6178. } \
  6179. while (0)
  6180. #endif
  6181. #endif /* !LITTLE_ENDIAN_ORDER */
  6182. #endif /* WOLFSSL_AESGCM_STREAM */
  6183. /* end defined(WORD64_AVAILABLE) && !defined(GCM_WORD32) */
  6184. #else /* GCM_WORD32 */
  6185. static void GMULT(word32* X, word32* Y)
  6186. {
  6187. word32 Z[4] = {0,0,0,0};
  6188. word32 V[4];
  6189. int i, j;
  6190. V[0] = X[0]; V[1] = X[1]; V[2] = X[2]; V[3] = X[3];
  6191. for (i = 0; i < 4; i++)
  6192. {
  6193. word32 y = Y[i];
  6194. for (j = 0; j < 32; j++)
  6195. {
  6196. if (y & 0x80000000) {
  6197. Z[0] ^= V[0];
  6198. Z[1] ^= V[1];
  6199. Z[2] ^= V[2];
  6200. Z[3] ^= V[3];
  6201. }
  6202. if (V[3] & 0x00000001) {
  6203. V[3] >>= 1;
  6204. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  6205. V[2] >>= 1;
  6206. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  6207. V[1] >>= 1;
  6208. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  6209. V[0] >>= 1;
  6210. V[0] ^= 0xE1000000;
  6211. } else {
  6212. V[3] >>= 1;
  6213. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  6214. V[2] >>= 1;
  6215. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  6216. V[1] >>= 1;
  6217. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  6218. V[0] >>= 1;
  6219. }
  6220. y <<= 1;
  6221. }
  6222. }
  6223. X[0] = Z[0];
  6224. X[1] = Z[1];
  6225. X[2] = Z[2];
  6226. X[3] = Z[3];
  6227. }
  6228. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  6229. word32 cSz, byte* s, word32 sSz)
  6230. {
  6231. word32 x[4] = {0,0,0,0};
  6232. word32 blocks, partial;
  6233. word32 bigH[4];
  6234. if (aes == NULL) {
  6235. return;
  6236. }
  6237. XMEMCPY(bigH, aes->H, AES_BLOCK_SIZE);
  6238. #ifdef LITTLE_ENDIAN_ORDER
  6239. ByteReverseWords(bigH, bigH, AES_BLOCK_SIZE);
  6240. #endif
  6241. /* Hash in A, the Additional Authentication Data */
  6242. if (aSz != 0 && a != NULL) {
  6243. word32 bigA[4];
  6244. blocks = aSz / AES_BLOCK_SIZE;
  6245. partial = aSz % AES_BLOCK_SIZE;
  6246. while (blocks--) {
  6247. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  6248. #ifdef LITTLE_ENDIAN_ORDER
  6249. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  6250. #endif
  6251. x[0] ^= bigA[0];
  6252. x[1] ^= bigA[1];
  6253. x[2] ^= bigA[2];
  6254. x[3] ^= bigA[3];
  6255. GMULT(x, bigH);
  6256. a += AES_BLOCK_SIZE;
  6257. }
  6258. if (partial != 0) {
  6259. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  6260. XMEMCPY(bigA, a, partial);
  6261. #ifdef LITTLE_ENDIAN_ORDER
  6262. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  6263. #endif
  6264. x[0] ^= bigA[0];
  6265. x[1] ^= bigA[1];
  6266. x[2] ^= bigA[2];
  6267. x[3] ^= bigA[3];
  6268. GMULT(x, bigH);
  6269. }
  6270. }
  6271. /* Hash in C, the Ciphertext */
  6272. if (cSz != 0 && c != NULL) {
  6273. word32 bigC[4];
  6274. blocks = cSz / AES_BLOCK_SIZE;
  6275. partial = cSz % AES_BLOCK_SIZE;
  6276. while (blocks--) {
  6277. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  6278. #ifdef LITTLE_ENDIAN_ORDER
  6279. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  6280. #endif
  6281. x[0] ^= bigC[0];
  6282. x[1] ^= bigC[1];
  6283. x[2] ^= bigC[2];
  6284. x[3] ^= bigC[3];
  6285. GMULT(x, bigH);
  6286. c += AES_BLOCK_SIZE;
  6287. }
  6288. if (partial != 0) {
  6289. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  6290. XMEMCPY(bigC, c, partial);
  6291. #ifdef LITTLE_ENDIAN_ORDER
  6292. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  6293. #endif
  6294. x[0] ^= bigC[0];
  6295. x[1] ^= bigC[1];
  6296. x[2] ^= bigC[2];
  6297. x[3] ^= bigC[3];
  6298. GMULT(x, bigH);
  6299. }
  6300. }
  6301. /* Hash in the lengths in bits of A and C */
  6302. {
  6303. word32 len[4];
  6304. /* Lengths are in bytes. Convert to bits. */
  6305. len[0] = (aSz >> (8*sizeof(aSz) - 3));
  6306. len[1] = aSz << 3;
  6307. len[2] = (cSz >> (8*sizeof(cSz) - 3));
  6308. len[3] = cSz << 3;
  6309. x[0] ^= len[0];
  6310. x[1] ^= len[1];
  6311. x[2] ^= len[2];
  6312. x[3] ^= len[3];
  6313. GMULT(x, bigH);
  6314. }
  6315. #ifdef LITTLE_ENDIAN_ORDER
  6316. ByteReverseWords(x, x, AES_BLOCK_SIZE);
  6317. #endif
  6318. XMEMCPY(s, x, sSz);
  6319. }
  6320. #ifdef WOLFSSL_AESGCM_STREAM
  6321. #ifdef LITTLE_ENDIAN_ORDER
  6322. /* Little-endian 32-bit word implementation requires byte reversal of H.
  6323. *
  6324. * H is all-zeros block encrypted with key.
  6325. *
  6326. * @param [in, out] aes AES GCM object.
  6327. */
  6328. #define GHASH_INIT_EXTRA(aes) \
  6329. ByteReverseWords((word32*)aes->H, (word32*)aes->H, AES_BLOCK_SIZE)
  6330. /* GHASH one block of data..
  6331. *
  6332. * XOR block, in big-endian form, into tag and GMULT with H.
  6333. *
  6334. * @param [in, out] aes AES GCM object.
  6335. * @param [in] block Block of AAD or cipher text.
  6336. */
  6337. #define GHASH_ONE_BLOCK(aes, block) \
  6338. do { \
  6339. word32* x = (word32*)AES_TAG(aes); \
  6340. word32* h = (word32*)aes->H; \
  6341. word32 bigEnd[4]; \
  6342. XMEMCPY(bigEnd, block, AES_BLOCK_SIZE); \
  6343. ByteReverseWords(bigEnd, bigEnd, AES_BLOCK_SIZE); \
  6344. x[0] ^= bigEnd[0]; \
  6345. x[1] ^= bigEnd[1]; \
  6346. x[2] ^= bigEnd[2]; \
  6347. x[3] ^= bigEnd[3]; \
  6348. GMULT(x, h); \
  6349. } \
  6350. while (0)
  6351. /* GHASH in AAD and cipher text lengths in bits.
  6352. *
  6353. * Convert tag back to little-endian.
  6354. *
  6355. * @param [in, out] aes AES GCM object.
  6356. */
  6357. #define GHASH_LEN_BLOCK(aes) \
  6358. do { \
  6359. word32 len[4]; \
  6360. word32* x = (word32*)AES_TAG(aes); \
  6361. word32* h = (word32*)aes->H; \
  6362. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  6363. len[1] = aes->aSz << 3; \
  6364. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  6365. len[3] = aes->cSz << 3; \
  6366. x[0] ^= len[0]; \
  6367. x[1] ^= len[1]; \
  6368. x[2] ^= len[2]; \
  6369. x[3] ^= len[3]; \
  6370. GMULT(x, h); \
  6371. ByteReverseWords(x, x, AES_BLOCK_SIZE); \
  6372. } \
  6373. while (0)
  6374. #else
  6375. /* No extra initialization for 32-bit word implementation.
  6376. *
  6377. * @param [in] aes AES GCM object.
  6378. */
  6379. #define GHASH_INIT_EXTRA(aes)
  6380. /* GHASH one block of data..
  6381. *
  6382. * XOR block into tag and GMULT with H.
  6383. *
  6384. * @param [in, out] aes AES GCM object.
  6385. * @param [in] block Block of AAD or cipher text.
  6386. */
  6387. #define GHASH_ONE_BLOCK(aes, block) \
  6388. do { \
  6389. word32* x = (word32*)AES_TAG(aes); \
  6390. word32* h = (word32*)aes->H; \
  6391. word32 block32[4]; \
  6392. XMEMCPY(block32, block, AES_BLOCK_SIZE); \
  6393. x[0] ^= block32[0]; \
  6394. x[1] ^= block32[1]; \
  6395. x[2] ^= block32[2]; \
  6396. x[3] ^= block32[3]; \
  6397. GMULT(x, h); \
  6398. } \
  6399. while (0)
  6400. /* GHASH in AAD and cipher text lengths in bits.
  6401. *
  6402. * @param [in, out] aes AES GCM object.
  6403. */
  6404. #define GHASH_LEN_BLOCK(aes) \
  6405. do { \
  6406. word32 len[4]; \
  6407. word32* x = (word32*)AES_TAG(aes); \
  6408. word32* h = (word32*)aes->H; \
  6409. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  6410. len[1] = aes->aSz << 3; \
  6411. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  6412. len[3] = aes->cSz << 3; \
  6413. x[0] ^= len[0]; \
  6414. x[1] ^= len[1]; \
  6415. x[2] ^= len[2]; \
  6416. x[3] ^= len[3]; \
  6417. GMULT(x, h); \
  6418. } \
  6419. while (0)
  6420. #endif /* LITTLE_ENDIAN_ORDER */
  6421. #endif /* WOLFSSL_AESGCM_STREAM */
  6422. #endif /* end GCM_WORD32 */
  6423. #if !defined(WOLFSSL_XILINX_CRYPT) && !defined(WOLFSSL_AFALG_XILINX_AES)
  6424. #ifdef WOLFSSL_AESGCM_STREAM
  6425. #ifndef GHASH_LEN_BLOCK
  6426. /* Hash in the lengths of the AAD and cipher text in bits.
  6427. *
  6428. * Default implementation.
  6429. *
  6430. * @param [in, out] aes AES GCM object.
  6431. */
  6432. #define GHASH_LEN_BLOCK(aes) \
  6433. do { \
  6434. byte scratch[AES_BLOCK_SIZE]; \
  6435. FlattenSzInBits(&scratch[0], (aes)->aSz); \
  6436. FlattenSzInBits(&scratch[8], (aes)->cSz); \
  6437. GHASH_ONE_BLOCK(aes, scratch); \
  6438. } \
  6439. while (0)
  6440. #endif
  6441. /* Initialize a GHASH for streaming operations.
  6442. *
  6443. * @param [in, out] aes AES GCM object.
  6444. */
  6445. static void GHASH_INIT(Aes* aes) {
  6446. /* Set tag to all zeros as initial value. */
  6447. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  6448. /* Reset counts of AAD and cipher text. */
  6449. aes->aOver = 0;
  6450. aes->cOver = 0;
  6451. /* Extra initialization baed on implementation. */
  6452. GHASH_INIT_EXTRA(aes);
  6453. }
  6454. /* Update the GHASH with AAD and/or cipher text.
  6455. *
  6456. * @param [in,out] aes AES GCM object.
  6457. * @param [in] a Additional authentication data buffer.
  6458. * @param [in] aSz Size of data in AAD buffer.
  6459. * @param [in] c Cipher text buffer.
  6460. * @param [in] cSz Size of data in cipher text buffer.
  6461. */
  6462. static void GHASH_UPDATE(Aes* aes, const byte* a, word32 aSz, const byte* c,
  6463. word32 cSz)
  6464. {
  6465. word32 blocks;
  6466. word32 partial;
  6467. /* Hash in A, the Additional Authentication Data */
  6468. if (aSz != 0 && a != NULL) {
  6469. /* Update count of AAD we have hashed. */
  6470. aes->aSz += aSz;
  6471. /* Check if we have unprocessed data. */
  6472. if (aes->aOver > 0) {
  6473. /* Calculate amount we can use - fill up the block. */
  6474. byte sz = AES_BLOCK_SIZE - aes->aOver;
  6475. if (sz > aSz) {
  6476. sz = aSz;
  6477. }
  6478. /* Copy extra into last GHASH block array and update count. */
  6479. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  6480. aes->aOver += sz;
  6481. if (aes->aOver == AES_BLOCK_SIZE) {
  6482. /* We have filled up the block and can process. */
  6483. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  6484. /* Reset count. */
  6485. aes->aOver = 0;
  6486. }
  6487. /* Used up some data. */
  6488. aSz -= sz;
  6489. a += sz;
  6490. }
  6491. /* Calculate number of blocks of AAD and the leftover. */
  6492. blocks = aSz / AES_BLOCK_SIZE;
  6493. partial = aSz % AES_BLOCK_SIZE;
  6494. /* GHASH full blocks now. */
  6495. while (blocks--) {
  6496. GHASH_ONE_BLOCK(aes, a);
  6497. a += AES_BLOCK_SIZE;
  6498. }
  6499. if (partial != 0) {
  6500. /* Cache the partial block. */
  6501. XMEMCPY(AES_LASTGBLOCK(aes), a, partial);
  6502. aes->aOver = (byte)partial;
  6503. }
  6504. }
  6505. if (aes->aOver > 0 && cSz > 0 && c != NULL) {
  6506. /* No more AAD coming and we have a partial block. */
  6507. /* Fill the rest of the block with zeros. */
  6508. byte sz = AES_BLOCK_SIZE - aes->aOver;
  6509. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0, sz);
  6510. /* GHASH last AAD block. */
  6511. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  6512. /* Clear partial count for next time through. */
  6513. aes->aOver = 0;
  6514. }
  6515. /* Hash in C, the Ciphertext */
  6516. if (cSz != 0 && c != NULL) {
  6517. /* Update count of cipher text we have hashed. */
  6518. aes->cSz += cSz;
  6519. if (aes->cOver > 0) {
  6520. /* Calculate amount we can use - fill up the block. */
  6521. byte sz = AES_BLOCK_SIZE - aes->cOver;
  6522. if (sz > cSz) {
  6523. sz = cSz;
  6524. }
  6525. XMEMCPY(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  6526. /* Update count of unsed encrypted counter. */
  6527. aes->cOver += sz;
  6528. if (aes->cOver == AES_BLOCK_SIZE) {
  6529. /* We have filled up the block and can process. */
  6530. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  6531. /* Reset count. */
  6532. aes->cOver = 0;
  6533. }
  6534. /* Used up some data. */
  6535. cSz -= sz;
  6536. c += sz;
  6537. }
  6538. /* Calculate number of blocks of cipher text and the leftover. */
  6539. blocks = cSz / AES_BLOCK_SIZE;
  6540. partial = cSz % AES_BLOCK_SIZE;
  6541. /* GHASH full blocks now. */
  6542. while (blocks--) {
  6543. GHASH_ONE_BLOCK(aes, c);
  6544. c += AES_BLOCK_SIZE;
  6545. }
  6546. if (partial != 0) {
  6547. /* Cache the partial block. */
  6548. XMEMCPY(AES_LASTGBLOCK(aes), c, partial);
  6549. aes->cOver = (byte)partial;
  6550. }
  6551. }
  6552. }
  6553. /* Finalize the GHASH calculation.
  6554. *
  6555. * Complete hashing cipher text and hash the AAD and cipher text lengths.
  6556. *
  6557. * @param [in, out] aes AES GCM object.
  6558. * @param [out] s Authentication tag.
  6559. * @param [in] sSz Size of authentication tag required.
  6560. */
  6561. static void GHASH_FINAL(Aes* aes, byte* s, word32 sSz)
  6562. {
  6563. /* AAD block incomplete when > 0 */
  6564. byte over = aes->aOver;
  6565. if (aes->cOver > 0) {
  6566. /* Cipher text block incomplete. */
  6567. over = aes->cOver;
  6568. }
  6569. if (over > 0) {
  6570. /* Zeroize the unused part of the block. */
  6571. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  6572. /* Hash the last block of cipher text. */
  6573. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  6574. }
  6575. /* Hash in the lengths of AAD and cipher text in bits */
  6576. GHASH_LEN_BLOCK(aes);
  6577. /* Copy the result into s. */
  6578. XMEMCPY(s, AES_TAG(aes), sSz);
  6579. }
  6580. #endif /* WOLFSSL_AESGCM_STREAM */
  6581. #ifdef FREESCALE_LTC_AES_GCM
  6582. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6583. const byte* iv, word32 ivSz,
  6584. byte* authTag, word32 authTagSz,
  6585. const byte* authIn, word32 authInSz)
  6586. {
  6587. status_t status;
  6588. word32 keySize;
  6589. /* argument checks */
  6590. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  6591. return BAD_FUNC_ARG;
  6592. }
  6593. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  6594. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  6595. return BAD_FUNC_ARG;
  6596. }
  6597. status = wc_AesGetKeySize(aes, &keySize);
  6598. if (status)
  6599. return status;
  6600. status = wolfSSL_CryptHwMutexLock();
  6601. if (status != 0)
  6602. return status;
  6603. status = LTC_AES_EncryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  6604. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  6605. wolfSSL_CryptHwMutexUnLock();
  6606. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  6607. }
  6608. #else
  6609. #ifdef STM32_CRYPTO_AES_GCM
  6610. /* this function supports inline encrypt */
  6611. /* define STM32_AESGCM_PARTIAL for newer STM Cube HAL's with workaround
  6612. for handling partial packets to improve auth tag calculation performance by
  6613. using hardware */
  6614. static WARN_UNUSED_RESULT int wc_AesGcmEncrypt_STM32(
  6615. Aes* aes, byte* out, const byte* in, word32 sz,
  6616. const byte* iv, word32 ivSz,
  6617. byte* authTag, word32 authTagSz,
  6618. const byte* authIn, word32 authInSz)
  6619. {
  6620. int ret;
  6621. #ifdef WOLFSSL_STM32_CUBEMX
  6622. CRYP_HandleTypeDef hcryp;
  6623. #else
  6624. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  6625. #endif
  6626. word32 keySize;
  6627. #ifdef WOLFSSL_STM32_CUBEMX
  6628. int status = HAL_OK;
  6629. word32 blocks = sz / AES_BLOCK_SIZE;
  6630. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  6631. #else
  6632. int status = SUCCESS;
  6633. #endif
  6634. word32 partial = sz % AES_BLOCK_SIZE;
  6635. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  6636. word32 ctrInit[AES_BLOCK_SIZE/sizeof(word32)];
  6637. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  6638. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  6639. byte* authInPadded = NULL;
  6640. int authPadSz, wasAlloc = 0, useSwGhash = 0;
  6641. ret = wc_AesGetKeySize(aes, &keySize);
  6642. if (ret != 0)
  6643. return ret;
  6644. #ifdef WOLFSSL_STM32_CUBEMX
  6645. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  6646. if (ret != 0)
  6647. return ret;
  6648. #endif
  6649. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  6650. if (ivSz == GCM_NONCE_MID_SZ) {
  6651. byte* pCtr = (byte*)ctr;
  6652. XMEMCPY(ctr, iv, ivSz);
  6653. pCtr[AES_BLOCK_SIZE - 1] = 1;
  6654. }
  6655. else {
  6656. GHASH(aes, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  6657. }
  6658. XMEMCPY(ctrInit, ctr, sizeof(ctr)); /* save off initial counter for GMAC */
  6659. /* Authentication buffer - must be 4-byte multiple zero padded */
  6660. authPadSz = authInSz % sizeof(word32);
  6661. if (authPadSz != 0) {
  6662. authPadSz = authInSz + sizeof(word32) - authPadSz;
  6663. if (authPadSz <= sizeof(authhdr)) {
  6664. authInPadded = (byte*)authhdr;
  6665. }
  6666. else {
  6667. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  6668. DYNAMIC_TYPE_TMP_BUFFER);
  6669. if (authInPadded == NULL) {
  6670. wolfSSL_CryptHwMutexUnLock();
  6671. return MEMORY_E;
  6672. }
  6673. wasAlloc = 1;
  6674. }
  6675. XMEMSET(authInPadded, 0, authPadSz);
  6676. XMEMCPY(authInPadded, authIn, authInSz);
  6677. } else {
  6678. authPadSz = authInSz;
  6679. authInPadded = (byte*)authIn;
  6680. }
  6681. /* for cases where hardware cannot be used for authTag calculate it */
  6682. /* if IV is not 12 calculate GHASH using software */
  6683. if (ivSz != GCM_NONCE_MID_SZ
  6684. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  6685. /* or harware that does not support partial block */
  6686. || sz == 0 || partial != 0
  6687. #endif
  6688. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  6689. /* or authIn is not a multiple of 4 */
  6690. || authPadSz != authInSz
  6691. #endif
  6692. ) {
  6693. useSwGhash = 1;
  6694. }
  6695. /* Hardware requires counter + 1 */
  6696. IncrementGcmCounter((byte*)ctr);
  6697. ret = wolfSSL_CryptHwMutexLock();
  6698. if (ret != 0) {
  6699. return ret;
  6700. }
  6701. #ifdef WOLFSSL_STM32_CUBEMX
  6702. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  6703. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  6704. #if defined(STM32_HAL_V2)
  6705. hcryp.Init.Algorithm = CRYP_AES_GCM;
  6706. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  6707. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  6708. hcryp.Init.HeaderSize = authInSz;
  6709. #else
  6710. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  6711. #endif
  6712. #ifdef STM32_AESGCM_PARTIAL
  6713. hcryp.Init.HeaderPadSize = authPadSz - authInSz;
  6714. #endif
  6715. #ifdef CRYP_KEYIVCONFIG_ONCE
  6716. /* allows repeated calls to HAL_CRYP_Encrypt */
  6717. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  6718. #endif
  6719. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  6720. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  6721. HAL_CRYP_Init(&hcryp);
  6722. #ifndef CRYP_KEYIVCONFIG_ONCE
  6723. /* GCM payload phase - can handle partial blocks */
  6724. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  6725. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  6726. #else
  6727. /* GCM payload phase - blocks */
  6728. if (blocks) {
  6729. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  6730. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  6731. }
  6732. /* GCM payload phase - partial remainder */
  6733. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  6734. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6735. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6736. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)partialBlock, partial,
  6737. (uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  6738. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6739. }
  6740. #endif
  6741. if (status == HAL_OK && !useSwGhash) {
  6742. /* Compute the authTag */
  6743. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  6744. STM32_HAL_TIMEOUT);
  6745. }
  6746. #elif defined(STM32_CRYPTO_AES_ONLY)
  6747. /* Set the CRYP parameters */
  6748. hcryp.Init.HeaderSize = authPadSz;
  6749. if (authPadSz == 0)
  6750. hcryp.Init.Header = NULL; /* cannot pass pointer here when authIn == 0 */
  6751. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  6752. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  6753. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  6754. HAL_CRYP_Init(&hcryp);
  6755. /* GCM init phase */
  6756. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  6757. if (status == HAL_OK) {
  6758. /* GCM header phase */
  6759. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  6760. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  6761. }
  6762. if (status == HAL_OK) {
  6763. /* GCM payload phase - blocks */
  6764. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  6765. if (blocks) {
  6766. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  6767. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  6768. }
  6769. }
  6770. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  6771. /* GCM payload phase - partial remainder */
  6772. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6773. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6774. status = HAL_CRYPEx_AES_Auth(&hcryp, (uint8_t*)partialBlock, partial,
  6775. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  6776. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6777. }
  6778. if (status == HAL_OK && !useSwGhash) {
  6779. /* GCM final phase */
  6780. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  6781. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  6782. }
  6783. #else
  6784. hcryp.Init.HeaderSize = authPadSz;
  6785. HAL_CRYP_Init(&hcryp);
  6786. if (blocks) {
  6787. /* GCM payload phase - blocks */
  6788. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (byte*)in,
  6789. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  6790. }
  6791. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  6792. /* GCM payload phase - partial remainder */
  6793. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6794. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6795. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (uint8_t*)partialBlock, partial,
  6796. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  6797. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6798. }
  6799. if (status == HAL_OK && !useSwGhash) {
  6800. /* Compute the authTag */
  6801. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  6802. }
  6803. #endif
  6804. if (status != HAL_OK)
  6805. ret = AES_GCM_AUTH_E;
  6806. HAL_CRYP_DeInit(&hcryp);
  6807. #else /* Standard Peripheral Library */
  6808. ByteReverseWords(keyCopy, (word32*)aes->key, keySize);
  6809. status = CRYP_AES_GCM(MODE_ENCRYPT, (uint8_t*)ctr,
  6810. (uint8_t*)keyCopy, keySize * 8,
  6811. (uint8_t*)in, sz,
  6812. (uint8_t*)authInPadded, authInSz,
  6813. (uint8_t*)out, (uint8_t*)tag);
  6814. if (status != SUCCESS)
  6815. ret = AES_GCM_AUTH_E;
  6816. #endif /* WOLFSSL_STM32_CUBEMX */
  6817. wolfSSL_CryptHwMutexUnLock();
  6818. if (ret == 0) {
  6819. /* return authTag */
  6820. if (authTag) {
  6821. if (useSwGhash) {
  6822. GHASH(aes, authIn, authInSz, out, sz, authTag, authTagSz);
  6823. ret = wc_AesEncrypt(aes, (byte*)ctrInit, (byte*)tag);
  6824. if (ret == 0) {
  6825. xorbuf(authTag, tag, authTagSz);
  6826. }
  6827. }
  6828. else {
  6829. /* use hardware calculated tag */
  6830. XMEMCPY(authTag, tag, authTagSz);
  6831. }
  6832. }
  6833. }
  6834. /* Free memory */
  6835. if (wasAlloc) {
  6836. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  6837. }
  6838. return ret;
  6839. }
  6840. #endif /* STM32_CRYPTO_AES_GCM */
  6841. #ifdef WOLFSSL_AESNI
  6842. /* For performance reasons, this code needs to be not inlined. */
  6843. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  6844. Aes* aes, byte* out, const byte* in, word32 sz,
  6845. const byte* iv, word32 ivSz,
  6846. byte* authTag, word32 authTagSz,
  6847. const byte* authIn, word32 authInSz);
  6848. #else
  6849. static
  6850. #endif
  6851. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  6852. Aes* aes, byte* out, const byte* in, word32 sz,
  6853. const byte* iv, word32 ivSz,
  6854. byte* authTag, word32 authTagSz,
  6855. const byte* authIn, word32 authInSz)
  6856. {
  6857. int ret = 0;
  6858. word32 blocks = sz / AES_BLOCK_SIZE;
  6859. word32 partial = sz % AES_BLOCK_SIZE;
  6860. const byte* p = in;
  6861. byte* c = out;
  6862. ALIGN32 byte counter[AES_BLOCK_SIZE];
  6863. ALIGN32 byte initialCounter[AES_BLOCK_SIZE];
  6864. ALIGN32 byte scratch[AES_BLOCK_SIZE];
  6865. if (ivSz == GCM_NONCE_MID_SZ) {
  6866. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  6867. XMEMCPY(counter, iv, ivSz);
  6868. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  6869. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  6870. counter[AES_BLOCK_SIZE - 1] = 1;
  6871. }
  6872. else {
  6873. /* Counter is GHASH of IV. */
  6874. #ifdef OPENSSL_EXTRA
  6875. word32 aadTemp = aes->aadLen;
  6876. aes->aadLen = 0;
  6877. #endif
  6878. GHASH(aes, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  6879. #ifdef OPENSSL_EXTRA
  6880. aes->aadLen = aadTemp;
  6881. #endif
  6882. }
  6883. XMEMCPY(initialCounter, counter, AES_BLOCK_SIZE);
  6884. #ifdef WOLFSSL_PIC32MZ_CRYPT
  6885. if (blocks) {
  6886. /* use initial IV for HW, but don't use it below */
  6887. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  6888. ret = wc_Pic32AesCrypt(
  6889. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  6890. out, in, (blocks * AES_BLOCK_SIZE),
  6891. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  6892. if (ret != 0)
  6893. return ret;
  6894. }
  6895. /* process remainder using partial handling */
  6896. #endif
  6897. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  6898. /* some hardware acceleration can gain performance from doing AES encryption
  6899. * of the whole buffer at once */
  6900. if (c != p && blocks > 0) { /* can not handle inline encryption */
  6901. while (blocks--) {
  6902. IncrementGcmCounter(counter);
  6903. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  6904. c += AES_BLOCK_SIZE;
  6905. }
  6906. /* reset number of blocks and then do encryption */
  6907. blocks = sz / AES_BLOCK_SIZE;
  6908. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6909. xorbuf(out, p, AES_BLOCK_SIZE * blocks);
  6910. p += AES_BLOCK_SIZE * blocks;
  6911. }
  6912. else
  6913. #endif /* HAVE_AES_ECB && !WOLFSSL_PIC32MZ_CRYPT */
  6914. {
  6915. while (blocks--) {
  6916. IncrementGcmCounter(counter);
  6917. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  6918. ret = wc_AesEncrypt(aes, counter, scratch);
  6919. if (ret != 0)
  6920. return ret;
  6921. xorbufout(c, scratch, p, AES_BLOCK_SIZE);
  6922. #endif
  6923. p += AES_BLOCK_SIZE;
  6924. c += AES_BLOCK_SIZE;
  6925. }
  6926. }
  6927. if (partial != 0) {
  6928. IncrementGcmCounter(counter);
  6929. ret = wc_AesEncrypt(aes, counter, scratch);
  6930. if (ret != 0)
  6931. return ret;
  6932. xorbufout(c, scratch, p, partial);
  6933. }
  6934. if (authTag) {
  6935. GHASH(aes, authIn, authInSz, out, sz, authTag, authTagSz);
  6936. ret = wc_AesEncrypt(aes, initialCounter, scratch);
  6937. if (ret != 0)
  6938. return ret;
  6939. xorbuf(authTag, scratch, authTagSz);
  6940. #ifdef OPENSSL_EXTRA
  6941. if (!in && !sz)
  6942. /* store AAD size for next call */
  6943. aes->aadLen = authInSz;
  6944. #endif
  6945. }
  6946. return ret;
  6947. }
  6948. /* Software AES - GCM Encrypt */
  6949. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6950. const byte* iv, word32 ivSz,
  6951. byte* authTag, word32 authTagSz,
  6952. const byte* authIn, word32 authInSz)
  6953. {
  6954. /* argument checks */
  6955. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  6956. return BAD_FUNC_ARG;
  6957. }
  6958. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  6959. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  6960. return BAD_FUNC_ARG;
  6961. }
  6962. #ifdef WOLF_CRYPTO_CB
  6963. if (aes->devId != INVALID_DEVID) {
  6964. int crypto_cb_ret =
  6965. wc_CryptoCb_AesGcmEncrypt(aes, out, in, sz, iv, ivSz, authTag,
  6966. authTagSz, authIn, authInSz);
  6967. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  6968. return crypto_cb_ret;
  6969. /* fall-through when unavailable */
  6970. }
  6971. #endif
  6972. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  6973. /* if async and byte count above threshold */
  6974. /* only 12-byte IV is supported in HW */
  6975. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  6976. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  6977. #if defined(HAVE_CAVIUM)
  6978. #ifdef HAVE_CAVIUM_V
  6979. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  6980. return NitroxAesGcmEncrypt(aes, out, in, sz,
  6981. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6982. authTag, authTagSz, authIn, authInSz);
  6983. }
  6984. #endif
  6985. #elif defined(HAVE_INTEL_QA)
  6986. return IntelQaSymAesGcmEncrypt(&aes->asyncDev, out, in, sz,
  6987. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6988. authTag, authTagSz, authIn, authInSz);
  6989. #else /* WOLFSSL_ASYNC_CRYPT_TEST */
  6990. if (wc_AsyncTestInit(&aes->asyncDev, ASYNC_TEST_AES_GCM_ENCRYPT)) {
  6991. WC_ASYNC_TEST* testDev = &aes->asyncDev.test;
  6992. testDev->aes.aes = aes;
  6993. testDev->aes.out = out;
  6994. testDev->aes.in = in;
  6995. testDev->aes.sz = sz;
  6996. testDev->aes.iv = iv;
  6997. testDev->aes.ivSz = ivSz;
  6998. testDev->aes.authTag = authTag;
  6999. testDev->aes.authTagSz = authTagSz;
  7000. testDev->aes.authIn = authIn;
  7001. testDev->aes.authInSz = authInSz;
  7002. return WC_PENDING_E;
  7003. }
  7004. #endif
  7005. }
  7006. #endif /* WOLFSSL_ASYNC_CRYPT */
  7007. #ifdef WOLFSSL_SILABS_SE_ACCEL
  7008. return wc_AesGcmEncrypt_silabs(
  7009. aes, out, in, sz,
  7010. iv, ivSz,
  7011. authTag, authTagSz,
  7012. authIn, authInSz);
  7013. #endif
  7014. #ifdef STM32_CRYPTO_AES_GCM
  7015. return wc_AesGcmEncrypt_STM32(
  7016. aes, out, in, sz, iv, ivSz,
  7017. authTag, authTagSz, authIn, authInSz);
  7018. #endif /* STM32_CRYPTO_AES_GCM */
  7019. #ifdef WOLFSSL_AESNI
  7020. #ifdef HAVE_INTEL_AVX2
  7021. if (IS_INTEL_AVX2(intel_flags)) {
  7022. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7023. AES_GCM_encrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7024. authTagSz, (const byte*)aes->key, aes->rounds);
  7025. RESTORE_VECTOR_REGISTERS();
  7026. return 0;
  7027. }
  7028. else
  7029. #endif
  7030. #ifdef HAVE_INTEL_AVX1
  7031. if (IS_INTEL_AVX1(intel_flags)) {
  7032. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7033. AES_GCM_encrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7034. authTagSz, (const byte*)aes->key, aes->rounds);
  7035. RESTORE_VECTOR_REGISTERS();
  7036. return 0;
  7037. }
  7038. else
  7039. #endif
  7040. if (haveAESNI) {
  7041. AES_GCM_encrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7042. authTagSz, (const byte*)aes->key, aes->rounds);
  7043. return 0;
  7044. }
  7045. else
  7046. #endif
  7047. {
  7048. return AES_GCM_encrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  7049. authIn, authInSz);
  7050. }
  7051. }
  7052. #endif
  7053. /* AES GCM Decrypt */
  7054. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  7055. #ifdef FREESCALE_LTC_AES_GCM
  7056. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  7057. const byte* iv, word32 ivSz,
  7058. const byte* authTag, word32 authTagSz,
  7059. const byte* authIn, word32 authInSz)
  7060. {
  7061. int ret;
  7062. word32 keySize;
  7063. status_t status;
  7064. /* argument checks */
  7065. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  7066. * in and out are don't cares, as this is is the GMAC case. */
  7067. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  7068. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  7069. ivSz == 0) {
  7070. return BAD_FUNC_ARG;
  7071. }
  7072. ret = wc_AesGetKeySize(aes, &keySize);
  7073. if (ret != 0) {
  7074. return ret;
  7075. }
  7076. status = wolfSSL_CryptHwMutexLock();
  7077. if (status != 0)
  7078. return status;
  7079. status = LTC_AES_DecryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  7080. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  7081. wolfSSL_CryptHwMutexUnLock();
  7082. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  7083. }
  7084. #else
  7085. #ifdef STM32_CRYPTO_AES_GCM
  7086. /* this function supports inline decrypt */
  7087. static WARN_UNUSED_RESULT int wc_AesGcmDecrypt_STM32(
  7088. Aes* aes, byte* out,
  7089. const byte* in, word32 sz,
  7090. const byte* iv, word32 ivSz,
  7091. const byte* authTag, word32 authTagSz,
  7092. const byte* authIn, word32 authInSz)
  7093. {
  7094. int ret;
  7095. #ifdef WOLFSSL_STM32_CUBEMX
  7096. int status = HAL_OK;
  7097. CRYP_HandleTypeDef hcryp;
  7098. word32 blocks = sz / AES_BLOCK_SIZE;
  7099. #else
  7100. int status = SUCCESS;
  7101. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  7102. #endif
  7103. word32 keySize;
  7104. word32 partial = sz % AES_BLOCK_SIZE;
  7105. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  7106. word32 tagExpected[AES_BLOCK_SIZE/sizeof(word32)];
  7107. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  7108. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  7109. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  7110. byte* authInPadded = NULL;
  7111. int authPadSz, wasAlloc = 0, tagComputed = 0;
  7112. ret = wc_AesGetKeySize(aes, &keySize);
  7113. if (ret != 0)
  7114. return ret;
  7115. #ifdef WOLFSSL_STM32_CUBEMX
  7116. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  7117. if (ret != 0)
  7118. return ret;
  7119. #endif
  7120. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  7121. if (ivSz == GCM_NONCE_MID_SZ) {
  7122. byte* pCtr = (byte*)ctr;
  7123. XMEMCPY(ctr, iv, ivSz);
  7124. pCtr[AES_BLOCK_SIZE - 1] = 1;
  7125. }
  7126. else {
  7127. GHASH(aes, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  7128. }
  7129. /* Make copy of expected authTag, which could get corrupted in some
  7130. * Cube HAL versions without proper partial block support.
  7131. * For TLS blocks the authTag is after the output buffer, so save it */
  7132. XMEMCPY(tagExpected, authTag, authTagSz);
  7133. /* Authentication buffer - must be 4-byte multiple zero padded */
  7134. authPadSz = authInSz % sizeof(word32);
  7135. if (authPadSz != 0) {
  7136. authPadSz = authInSz + sizeof(word32) - authPadSz;
  7137. }
  7138. else {
  7139. authPadSz = authInSz;
  7140. }
  7141. /* for cases where hardware cannot be used for authTag calculate it */
  7142. /* if IV is not 12 calculate GHASH using software */
  7143. if (ivSz != GCM_NONCE_MID_SZ
  7144. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  7145. /* or harware that does not support partial block */
  7146. || sz == 0 || partial != 0
  7147. #endif
  7148. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  7149. /* or authIn is not a multiple of 4 */
  7150. || authPadSz != authInSz
  7151. #endif
  7152. ) {
  7153. GHASH(aes, authIn, authInSz, in, sz, (byte*)tag, sizeof(tag));
  7154. ret = wc_AesEncrypt(aes, (byte*)ctr, (byte*)partialBlock);
  7155. if (ret != 0)
  7156. return ret;
  7157. xorbuf(tag, partialBlock, sizeof(tag));
  7158. tagComputed = 1;
  7159. }
  7160. /* if using hardware for authentication tag make sure its aligned and zero padded */
  7161. if (authPadSz != authInSz && !tagComputed) {
  7162. if (authPadSz <= sizeof(authhdr)) {
  7163. authInPadded = (byte*)authhdr;
  7164. }
  7165. else {
  7166. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  7167. DYNAMIC_TYPE_TMP_BUFFER);
  7168. if (authInPadded == NULL) {
  7169. wolfSSL_CryptHwMutexUnLock();
  7170. return MEMORY_E;
  7171. }
  7172. wasAlloc = 1;
  7173. }
  7174. XMEMSET(authInPadded, 0, authPadSz);
  7175. XMEMCPY(authInPadded, authIn, authInSz);
  7176. } else {
  7177. authInPadded = (byte*)authIn;
  7178. }
  7179. /* Hardware requires counter + 1 */
  7180. IncrementGcmCounter((byte*)ctr);
  7181. ret = wolfSSL_CryptHwMutexLock();
  7182. if (ret != 0) {
  7183. return ret;
  7184. }
  7185. #ifdef WOLFSSL_STM32_CUBEMX
  7186. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  7187. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  7188. #if defined(STM32_HAL_V2)
  7189. hcryp.Init.Algorithm = CRYP_AES_GCM;
  7190. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  7191. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  7192. hcryp.Init.HeaderSize = authInSz;
  7193. #else
  7194. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  7195. #endif
  7196. #ifdef STM32_AESGCM_PARTIAL
  7197. hcryp.Init.HeaderPadSize = authPadSz - authInSz;
  7198. #endif
  7199. #ifdef CRYP_KEYIVCONFIG_ONCE
  7200. /* allows repeated calls to HAL_CRYP_Decrypt */
  7201. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  7202. #endif
  7203. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  7204. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  7205. HAL_CRYP_Init(&hcryp);
  7206. #ifndef CRYP_KEYIVCONFIG_ONCE
  7207. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  7208. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  7209. #else
  7210. /* GCM payload phase - blocks */
  7211. if (blocks) {
  7212. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  7213. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  7214. }
  7215. /* GCM payload phase - partial remainder */
  7216. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  7217. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  7218. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  7219. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)partialBlock, partial,
  7220. ( uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  7221. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  7222. }
  7223. #endif
  7224. if (status == HAL_OK && !tagComputed) {
  7225. /* Compute the authTag */
  7226. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  7227. STM32_HAL_TIMEOUT);
  7228. }
  7229. #elif defined(STM32_CRYPTO_AES_ONLY)
  7230. /* Set the CRYP parameters */
  7231. hcryp.Init.HeaderSize = authPadSz;
  7232. if (authPadSz == 0)
  7233. hcryp.Init.Header = NULL; /* cannot pass pointer when authIn == 0 */
  7234. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  7235. hcryp.Init.OperatingMode = CRYP_ALGOMODE_DECRYPT;
  7236. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  7237. HAL_CRYP_Init(&hcryp);
  7238. /* GCM init phase */
  7239. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  7240. if (status == HAL_OK) {
  7241. /* GCM header phase */
  7242. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  7243. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  7244. }
  7245. if (status == HAL_OK) {
  7246. /* GCM payload phase - blocks */
  7247. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  7248. if (blocks) {
  7249. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  7250. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  7251. }
  7252. }
  7253. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  7254. /* GCM payload phase - partial remainder */
  7255. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  7256. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  7257. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)partialBlock, partial,
  7258. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  7259. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  7260. }
  7261. if (status == HAL_OK && tagComputed == 0) {
  7262. /* GCM final phase */
  7263. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  7264. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  7265. }
  7266. #else
  7267. hcryp.Init.HeaderSize = authPadSz;
  7268. HAL_CRYP_Init(&hcryp);
  7269. if (blocks) {
  7270. /* GCM payload phase - blocks */
  7271. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)in,
  7272. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  7273. }
  7274. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  7275. /* GCM payload phase - partial remainder */
  7276. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  7277. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  7278. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)partialBlock, partial,
  7279. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  7280. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  7281. }
  7282. if (status == HAL_OK && tagComputed == 0) {
  7283. /* Compute the authTag */
  7284. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  7285. }
  7286. #endif
  7287. if (status != HAL_OK)
  7288. ret = AES_GCM_AUTH_E;
  7289. HAL_CRYP_DeInit(&hcryp);
  7290. #else /* Standard Peripheral Library */
  7291. ByteReverseWords(keyCopy, (word32*)aes->key, aes->keylen);
  7292. /* Input size and auth size need to be the actual sizes, even though
  7293. * they are not block aligned, because this length (in bits) is used
  7294. * in the final GHASH. */
  7295. XMEMSET(partialBlock, 0, sizeof(partialBlock)); /* use this to get tag */
  7296. status = CRYP_AES_GCM(MODE_DECRYPT, (uint8_t*)ctr,
  7297. (uint8_t*)keyCopy, keySize * 8,
  7298. (uint8_t*)in, sz,
  7299. (uint8_t*)authInPadded, authInSz,
  7300. (uint8_t*)out, (uint8_t*)partialBlock);
  7301. if (status != SUCCESS)
  7302. ret = AES_GCM_AUTH_E;
  7303. if (tagComputed == 0)
  7304. XMEMCPY(tag, partialBlock, authTagSz);
  7305. #endif /* WOLFSSL_STM32_CUBEMX */
  7306. wolfSSL_CryptHwMutexUnLock();
  7307. /* Check authentication tag */
  7308. if (ConstantCompare((const byte*)tagExpected, (byte*)tag, authTagSz) != 0) {
  7309. ret = AES_GCM_AUTH_E;
  7310. }
  7311. /* Free memory */
  7312. if (wasAlloc) {
  7313. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  7314. }
  7315. return ret;
  7316. }
  7317. #endif /* STM32_CRYPTO_AES_GCM */
  7318. #ifdef WOLFSSL_AESNI
  7319. /* For performance reasons, this code needs to be not inlined. */
  7320. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  7321. Aes* aes, byte* out, const byte* in, word32 sz,
  7322. const byte* iv, word32 ivSz,
  7323. const byte* authTag, word32 authTagSz,
  7324. const byte* authIn, word32 authInSz);
  7325. #else
  7326. static
  7327. #endif
  7328. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  7329. Aes* aes, byte* out, const byte* in, word32 sz,
  7330. const byte* iv, word32 ivSz,
  7331. const byte* authTag, word32 authTagSz,
  7332. const byte* authIn, word32 authInSz)
  7333. {
  7334. int ret = 0;
  7335. word32 blocks = sz / AES_BLOCK_SIZE;
  7336. word32 partial = sz % AES_BLOCK_SIZE;
  7337. const byte* c = in;
  7338. byte* p = out;
  7339. ALIGN32 byte counter[AES_BLOCK_SIZE];
  7340. ALIGN32 byte scratch[AES_BLOCK_SIZE];
  7341. ALIGN32 byte Tprime[AES_BLOCK_SIZE];
  7342. ALIGN32 byte EKY0[AES_BLOCK_SIZE];
  7343. sword32 res;
  7344. if (ivSz == GCM_NONCE_MID_SZ) {
  7345. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  7346. XMEMCPY(counter, iv, ivSz);
  7347. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  7348. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  7349. counter[AES_BLOCK_SIZE - 1] = 1;
  7350. }
  7351. else {
  7352. /* Counter is GHASH of IV. */
  7353. #ifdef OPENSSL_EXTRA
  7354. word32 aadTemp = aes->aadLen;
  7355. aes->aadLen = 0;
  7356. #endif
  7357. GHASH(aes, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  7358. #ifdef OPENSSL_EXTRA
  7359. aes->aadLen = aadTemp;
  7360. #endif
  7361. }
  7362. /* Calc the authTag again using received auth data and the cipher text */
  7363. GHASH(aes, authIn, authInSz, in, sz, Tprime, sizeof(Tprime));
  7364. ret = wc_AesEncrypt(aes, counter, EKY0);
  7365. if (ret != 0)
  7366. return ret;
  7367. xorbuf(Tprime, EKY0, sizeof(Tprime));
  7368. #ifdef OPENSSL_EXTRA
  7369. if (!out) {
  7370. /* authenticated, non-confidential data */
  7371. /* store AAD size for next call */
  7372. aes->aadLen = authInSz;
  7373. }
  7374. #endif
  7375. #if defined(WOLFSSL_PIC32MZ_CRYPT)
  7376. if (blocks) {
  7377. /* use initial IV for HW, but don't use it below */
  7378. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  7379. ret = wc_Pic32AesCrypt(
  7380. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  7381. out, in, (blocks * AES_BLOCK_SIZE),
  7382. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  7383. if (ret != 0)
  7384. return ret;
  7385. }
  7386. /* process remainder using partial handling */
  7387. #endif
  7388. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  7389. /* some hardware acceleration can gain performance from doing AES encryption
  7390. * of the whole buffer at once */
  7391. if (c != p && blocks > 0) { /* can not handle inline decryption */
  7392. while (blocks--) {
  7393. IncrementGcmCounter(counter);
  7394. XMEMCPY(p, counter, AES_BLOCK_SIZE);
  7395. p += AES_BLOCK_SIZE;
  7396. }
  7397. /* reset number of blocks and then do encryption */
  7398. blocks = sz / AES_BLOCK_SIZE;
  7399. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  7400. xorbuf(out, c, AES_BLOCK_SIZE * blocks);
  7401. c += AES_BLOCK_SIZE * blocks;
  7402. }
  7403. else
  7404. #endif /* HAVE_AES_ECB && !PIC32MZ */
  7405. {
  7406. while (blocks--) {
  7407. IncrementGcmCounter(counter);
  7408. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  7409. ret = wc_AesEncrypt(aes, counter, scratch);
  7410. if (ret != 0)
  7411. return ret;
  7412. xorbufout(p, scratch, c, AES_BLOCK_SIZE);
  7413. #endif
  7414. p += AES_BLOCK_SIZE;
  7415. c += AES_BLOCK_SIZE;
  7416. }
  7417. }
  7418. if (partial != 0) {
  7419. IncrementGcmCounter(counter);
  7420. ret = wc_AesEncrypt(aes, counter, scratch);
  7421. if (ret != 0)
  7422. return ret;
  7423. xorbuf(scratch, c, partial);
  7424. XMEMCPY(p, scratch, partial);
  7425. }
  7426. /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of
  7427. * the pairwise bytes in the strings.
  7428. */
  7429. res = ConstantCompare(authTag, Tprime, authTagSz);
  7430. /* convert positive retval from ConstantCompare() to all-1s word, in
  7431. * constant time.
  7432. */
  7433. res = 0 - (sword32)(((word32)(0 - res)) >> 31U);
  7434. /* now use res as a mask for constant time return of ret, unless tag
  7435. * mismatch, whereupon AES_GCM_AUTH_E is returned.
  7436. */
  7437. ret = (ret & ~res) | (res & AES_GCM_AUTH_E);
  7438. return ret;
  7439. }
  7440. /* Software AES - GCM Decrypt */
  7441. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  7442. const byte* iv, word32 ivSz,
  7443. const byte* authTag, word32 authTagSz,
  7444. const byte* authIn, word32 authInSz)
  7445. {
  7446. #ifdef WOLFSSL_AESNI
  7447. int res = AES_GCM_AUTH_E;
  7448. #endif
  7449. /* argument checks */
  7450. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  7451. * in and out are don't cares, as this is is the GMAC case. */
  7452. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  7453. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  7454. ivSz == 0) {
  7455. return BAD_FUNC_ARG;
  7456. }
  7457. #ifdef WOLF_CRYPTO_CB
  7458. if (aes->devId != INVALID_DEVID) {
  7459. int crypto_cb_ret =
  7460. wc_CryptoCb_AesGcmDecrypt(aes, out, in, sz, iv, ivSz,
  7461. authTag, authTagSz, authIn, authInSz);
  7462. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  7463. return crypto_cb_ret;
  7464. /* fall-through when unavailable */
  7465. }
  7466. #endif
  7467. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  7468. /* if async and byte count above threshold */
  7469. /* only 12-byte IV is supported in HW */
  7470. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  7471. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  7472. #if defined(HAVE_CAVIUM)
  7473. #ifdef HAVE_CAVIUM_V
  7474. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  7475. return NitroxAesGcmDecrypt(aes, out, in, sz,
  7476. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  7477. authTag, authTagSz, authIn, authInSz);
  7478. }
  7479. #endif
  7480. #elif defined(HAVE_INTEL_QA)
  7481. return IntelQaSymAesGcmDecrypt(&aes->asyncDev, out, in, sz,
  7482. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  7483. authTag, authTagSz, authIn, authInSz);
  7484. #else /* WOLFSSL_ASYNC_CRYPT_TEST */
  7485. if (wc_AsyncTestInit(&aes->asyncDev, ASYNC_TEST_AES_GCM_DECRYPT)) {
  7486. WC_ASYNC_TEST* testDev = &aes->asyncDev.test;
  7487. testDev->aes.aes = aes;
  7488. testDev->aes.out = out;
  7489. testDev->aes.in = in;
  7490. testDev->aes.sz = sz;
  7491. testDev->aes.iv = iv;
  7492. testDev->aes.ivSz = ivSz;
  7493. testDev->aes.authTag = (byte*)authTag;
  7494. testDev->aes.authTagSz = authTagSz;
  7495. testDev->aes.authIn = authIn;
  7496. testDev->aes.authInSz = authInSz;
  7497. return WC_PENDING_E;
  7498. }
  7499. #endif
  7500. }
  7501. #endif /* WOLFSSL_ASYNC_CRYPT */
  7502. #ifdef WOLFSSL_SILABS_SE_ACCEL
  7503. return wc_AesGcmDecrypt_silabs(
  7504. aes, out, in, sz, iv, ivSz,
  7505. authTag, authTagSz, authIn, authInSz);
  7506. #endif
  7507. #ifdef STM32_CRYPTO_AES_GCM
  7508. /* The STM standard peripheral library API's doesn't support partial blocks */
  7509. return wc_AesGcmDecrypt_STM32(
  7510. aes, out, in, sz, iv, ivSz,
  7511. authTag, authTagSz, authIn, authInSz);
  7512. #endif /* STM32_CRYPTO_AES_GCM */
  7513. #ifdef WOLFSSL_AESNI
  7514. #ifdef HAVE_INTEL_AVX2
  7515. if (IS_INTEL_AVX2(intel_flags)) {
  7516. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7517. AES_GCM_decrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7518. authTagSz, (byte*)aes->key, aes->rounds, &res);
  7519. RESTORE_VECTOR_REGISTERS();
  7520. if (res == 0)
  7521. return AES_GCM_AUTH_E;
  7522. return 0;
  7523. }
  7524. else
  7525. #endif
  7526. #ifdef HAVE_INTEL_AVX1
  7527. if (IS_INTEL_AVX1(intel_flags)) {
  7528. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7529. AES_GCM_decrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7530. authTagSz, (byte*)aes->key, aes->rounds, &res);
  7531. RESTORE_VECTOR_REGISTERS();
  7532. if (res == 0)
  7533. return AES_GCM_AUTH_E;
  7534. return 0;
  7535. }
  7536. else
  7537. #endif
  7538. if (haveAESNI) {
  7539. AES_GCM_decrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7540. authTagSz, (byte*)aes->key, aes->rounds, &res);
  7541. if (res == 0)
  7542. return AES_GCM_AUTH_E;
  7543. return 0;
  7544. }
  7545. else
  7546. #endif
  7547. {
  7548. return AES_GCM_decrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  7549. authIn, authInSz);
  7550. }
  7551. }
  7552. #endif
  7553. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  7554. #ifdef WOLFSSL_AESGCM_STREAM
  7555. /* Initialize the AES GCM cipher with an IV. C implementation.
  7556. *
  7557. * @param [in, out] aes AES object.
  7558. * @param [in] iv IV/nonce buffer.
  7559. * @param [in] ivSz Length of IV/nonce data.
  7560. */
  7561. static WARN_UNUSED_RESULT int AesGcmInit_C(Aes* aes, const byte* iv, word32 ivSz)
  7562. {
  7563. ALIGN32 byte counter[AES_BLOCK_SIZE];
  7564. int ret;
  7565. if (ivSz == GCM_NONCE_MID_SZ) {
  7566. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  7567. XMEMCPY(counter, iv, ivSz);
  7568. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  7569. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  7570. counter[AES_BLOCK_SIZE - 1] = 1;
  7571. }
  7572. else {
  7573. /* Counter is GHASH of IV. */
  7574. #ifdef OPENSSL_EXTRA
  7575. word32 aadTemp = aes->aadLen;
  7576. aes->aadLen = 0;
  7577. #endif
  7578. GHASH(aes, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  7579. #ifdef OPENSSL_EXTRA
  7580. aes->aadLen = aadTemp;
  7581. #endif
  7582. }
  7583. /* Copy in the counter for use with cipher. */
  7584. XMEMCPY(AES_COUNTER(aes), counter, AES_BLOCK_SIZE);
  7585. /* Encrypt initial counter into a buffer for GCM. */
  7586. ret = wc_AesEncrypt(aes, counter, AES_INITCTR(aes));
  7587. if (ret != 0)
  7588. return ret;
  7589. /* Reset state fields. */
  7590. aes->over = 0;
  7591. aes->aSz = 0;
  7592. aes->cSz = 0;
  7593. /* Initialization for GHASH. */
  7594. GHASH_INIT(aes);
  7595. return 0;
  7596. }
  7597. /* Update the AES GCM cipher with data. C implementation.
  7598. *
  7599. * Only enciphers data.
  7600. *
  7601. * @param [in, out] aes AES object.
  7602. * @param [in] out Cipher text or plaintext buffer.
  7603. * @param [in] in Plaintext or cipher text buffer.
  7604. * @param [in] sz Length of data.
  7605. */
  7606. static WARN_UNUSED_RESULT int AesGcmCryptUpdate_C(
  7607. Aes* aes, byte* out, const byte* in, word32 sz)
  7608. {
  7609. word32 blocks;
  7610. word32 partial;
  7611. int ret;
  7612. /* Check if previous encrypted block was not used up. */
  7613. if (aes->over > 0) {
  7614. byte pSz = AES_BLOCK_SIZE - aes->over;
  7615. if (pSz > sz) pSz = sz;
  7616. /* Use some/all of last encrypted block. */
  7617. xorbufout(out, AES_LASTBLOCK(aes) + aes->over, in, pSz);
  7618. aes->over = (aes->over + pSz) & (AES_BLOCK_SIZE - 1);
  7619. /* Some data used. */
  7620. sz -= pSz;
  7621. in += pSz;
  7622. out += pSz;
  7623. }
  7624. /* Calculate the number of blocks needing to be encrypted and any leftover.
  7625. */
  7626. blocks = sz / AES_BLOCK_SIZE;
  7627. partial = sz & (AES_BLOCK_SIZE - 1);
  7628. #if defined(HAVE_AES_ECB)
  7629. /* Some hardware acceleration can gain performance from doing AES encryption
  7630. * of the whole buffer at once.
  7631. * Overwrites the cipher text before using plaintext - no inline encryption.
  7632. */
  7633. if ((out != in) && blocks > 0) {
  7634. word32 b;
  7635. /* Place incrementing counter blocks into cipher text. */
  7636. for (b = 0; b < blocks; b++) {
  7637. IncrementGcmCounter(AES_COUNTER(aes));
  7638. XMEMCPY(out + b * AES_BLOCK_SIZE, AES_COUNTER(aes), AES_BLOCK_SIZE);
  7639. }
  7640. /* Encrypt counter blocks. */
  7641. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  7642. /* XOR in plaintext. */
  7643. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  7644. /* Skip over processed data. */
  7645. in += AES_BLOCK_SIZE * blocks;
  7646. out += AES_BLOCK_SIZE * blocks;
  7647. }
  7648. else
  7649. #endif /* HAVE_AES_ECB */
  7650. {
  7651. /* Encrypt block by block. */
  7652. while (blocks--) {
  7653. ALIGN32 byte scratch[AES_BLOCK_SIZE];
  7654. IncrementGcmCounter(AES_COUNTER(aes));
  7655. /* Encrypt counter into a buffer. */
  7656. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), scratch);
  7657. if (ret != 0)
  7658. return ret;
  7659. /* XOR plain text into encrypted counter into cipher text buffer. */
  7660. xorbufout(out, scratch, in, AES_BLOCK_SIZE);
  7661. /* Data complete. */
  7662. in += AES_BLOCK_SIZE;
  7663. out += AES_BLOCK_SIZE;
  7664. }
  7665. }
  7666. if (partial != 0) {
  7667. /* Generate an extra block and use up as much as needed. */
  7668. IncrementGcmCounter(AES_COUNTER(aes));
  7669. /* Encrypt counter into cache. */
  7670. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), AES_LASTBLOCK(aes));
  7671. if (ret != 0)
  7672. return ret;
  7673. /* XOR plain text into encrypted counter into cipher text buffer. */
  7674. xorbufout(out, AES_LASTBLOCK(aes), in, partial);
  7675. /* Keep amount of encrypted block used. */
  7676. aes->over = partial;
  7677. }
  7678. return 0;
  7679. }
  7680. /* Calculates authentication tag for AES GCM. C implementation.
  7681. *
  7682. * @param [in, out] aes AES object.
  7683. * @param [out] authTag Buffer to store authentication tag in.
  7684. * @param [in] authTagSz Length of tag to create.
  7685. */
  7686. static WARN_UNUSED_RESULT int AesGcmFinal_C(
  7687. Aes* aes, byte* authTag, word32 authTagSz)
  7688. {
  7689. /* Calculate authentication tag. */
  7690. GHASH_FINAL(aes, authTag, authTagSz);
  7691. /* XOR in as much of encrypted counter as is required. */
  7692. xorbuf(authTag, AES_INITCTR(aes), authTagSz);
  7693. #ifdef OPENSSL_EXTRA
  7694. /* store AAD size for next call */
  7695. aes->aadLen = aes->aSz;
  7696. #endif
  7697. /* Zeroize last block to protect sensitive data. */
  7698. ForceZero(AES_LASTBLOCK(aes), AES_BLOCK_SIZE);
  7699. return 0;
  7700. }
  7701. #ifdef WOLFSSL_AESNI
  7702. #ifdef __cplusplus
  7703. extern "C" {
  7704. #endif
  7705. /* Assembly code implementations in: aes_gcm_asm.S */
  7706. #ifdef HAVE_INTEL_AVX2
  7707. extern void AES_GCM_init_avx2(const unsigned char* key, int nr,
  7708. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  7709. unsigned char* counter, unsigned char* initCtr);
  7710. extern void AES_GCM_aad_update_avx2(const unsigned char* addt,
  7711. unsigned int abytes, unsigned char* tag, unsigned char* h);
  7712. extern void AES_GCM_encrypt_block_avx2(const unsigned char* key, int nr,
  7713. unsigned char* out, const unsigned char* in, unsigned char* counter);
  7714. extern void AES_GCM_ghash_block_avx2(const unsigned char* data,
  7715. unsigned char* tag, unsigned char* h);
  7716. extern void AES_GCM_encrypt_update_avx2(const unsigned char* key, int nr,
  7717. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7718. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7719. extern void AES_GCM_encrypt_final_avx2(unsigned char* tag,
  7720. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7721. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  7722. #endif
  7723. #ifdef HAVE_INTEL_AVX1
  7724. extern void AES_GCM_init_avx1(const unsigned char* key, int nr,
  7725. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  7726. unsigned char* counter, unsigned char* initCtr);
  7727. extern void AES_GCM_aad_update_avx1(const unsigned char* addt,
  7728. unsigned int abytes, unsigned char* tag, unsigned char* h);
  7729. extern void AES_GCM_encrypt_block_avx1(const unsigned char* key, int nr,
  7730. unsigned char* out, const unsigned char* in, unsigned char* counter);
  7731. extern void AES_GCM_ghash_block_avx1(const unsigned char* data,
  7732. unsigned char* tag, unsigned char* h);
  7733. extern void AES_GCM_encrypt_update_avx1(const unsigned char* key, int nr,
  7734. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7735. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7736. extern void AES_GCM_encrypt_final_avx1(unsigned char* tag,
  7737. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7738. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  7739. #endif
  7740. extern void AES_GCM_init_aesni(const unsigned char* key, int nr,
  7741. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  7742. unsigned char* counter, unsigned char* initCtr);
  7743. extern void AES_GCM_aad_update_aesni(const unsigned char* addt,
  7744. unsigned int abytes, unsigned char* tag, unsigned char* h);
  7745. extern void AES_GCM_encrypt_block_aesni(const unsigned char* key, int nr,
  7746. unsigned char* out, const unsigned char* in, unsigned char* counter);
  7747. extern void AES_GCM_ghash_block_aesni(const unsigned char* data,
  7748. unsigned char* tag, unsigned char* h);
  7749. extern void AES_GCM_encrypt_update_aesni(const unsigned char* key, int nr,
  7750. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7751. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7752. extern void AES_GCM_encrypt_final_aesni(unsigned char* tag,
  7753. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7754. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  7755. #ifdef __cplusplus
  7756. } /* extern "C" */
  7757. #endif
  7758. /* Initialize the AES GCM cipher with an IV. AES-NI implementations.
  7759. *
  7760. * @param [in, out] aes AES object.
  7761. * @param [in] iv IV/nonce buffer.
  7762. * @param [in] ivSz Length of IV/nonce data.
  7763. */
  7764. static WARN_UNUSED_RESULT int AesGcmInit_aesni(
  7765. Aes* aes, const byte* iv, word32 ivSz)
  7766. {
  7767. /* Reset state fields. */
  7768. aes->aSz = 0;
  7769. aes->cSz = 0;
  7770. /* Set tag to all zeros as initial value. */
  7771. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  7772. /* Reset counts of AAD and cipher text. */
  7773. aes->aOver = 0;
  7774. aes->cOver = 0;
  7775. #ifdef HAVE_INTEL_AVX2
  7776. if (IS_INTEL_AVX2(intel_flags)) {
  7777. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7778. AES_GCM_init_avx2((byte*)aes->key, aes->rounds, iv, ivSz, aes->H,
  7779. AES_COUNTER(aes), AES_INITCTR(aes));
  7780. RESTORE_VECTOR_REGISTERS();
  7781. }
  7782. else
  7783. #endif
  7784. #ifdef HAVE_INTEL_AVX1
  7785. if (IS_INTEL_AVX1(intel_flags)) {
  7786. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7787. AES_GCM_init_avx1((byte*)aes->key, aes->rounds, iv, ivSz, aes->H,
  7788. AES_COUNTER(aes), AES_INITCTR(aes));
  7789. RESTORE_VECTOR_REGISTERS();
  7790. }
  7791. else
  7792. #endif
  7793. {
  7794. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7795. AES_GCM_init_aesni((byte*)aes->key, aes->rounds, iv, ivSz, aes->H,
  7796. AES_COUNTER(aes), AES_INITCTR(aes));
  7797. RESTORE_VECTOR_REGISTERS();
  7798. }
  7799. return 0;
  7800. }
  7801. /* Update the AES GCM for encryption with authentication data.
  7802. *
  7803. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  7804. *
  7805. * @param [in, out] aes AES object.
  7806. * @param [in] a Buffer holding authentication data.
  7807. * @param [in] aSz Length of authentication data in bytes.
  7808. * @param [in] endA Whether no more authentication data is expected.
  7809. */
  7810. static WARN_UNUSED_RESULT int AesGcmAadUpdate_aesni(
  7811. Aes* aes, const byte* a, word32 aSz, int endA)
  7812. {
  7813. word32 blocks;
  7814. int partial;
  7815. ASSERT_SAVED_VECTOR_REGISTERS();
  7816. if (aSz != 0 && a != NULL) {
  7817. /* Total count of AAD updated. */
  7818. aes->aSz += aSz;
  7819. /* Check if we have unprocessed data. */
  7820. if (aes->aOver > 0) {
  7821. /* Calculate amount we can use - fill up the block. */
  7822. byte sz = AES_BLOCK_SIZE - aes->aOver;
  7823. if (sz > aSz) {
  7824. sz = aSz;
  7825. }
  7826. /* Copy extra into last GHASH block array and update count. */
  7827. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  7828. aes->aOver += sz;
  7829. if (aes->aOver == AES_BLOCK_SIZE) {
  7830. /* We have filled up the block and can process. */
  7831. #ifdef HAVE_INTEL_AVX2
  7832. if (IS_INTEL_AVX2(intel_flags)) {
  7833. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7834. aes->H);
  7835. }
  7836. else
  7837. #endif
  7838. #ifdef HAVE_INTEL_AVX1
  7839. if (IS_INTEL_AVX1(intel_flags)) {
  7840. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7841. aes->H);
  7842. }
  7843. else
  7844. #endif
  7845. {
  7846. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7847. aes->H);
  7848. }
  7849. /* Reset count. */
  7850. aes->aOver = 0;
  7851. }
  7852. /* Used up some data. */
  7853. aSz -= sz;
  7854. a += sz;
  7855. }
  7856. /* Calculate number of blocks of AAD and the leftover. */
  7857. blocks = aSz / AES_BLOCK_SIZE;
  7858. partial = aSz % AES_BLOCK_SIZE;
  7859. if (blocks > 0) {
  7860. /* GHASH full blocks now. */
  7861. #ifdef HAVE_INTEL_AVX2
  7862. if (IS_INTEL_AVX2(intel_flags)) {
  7863. AES_GCM_aad_update_avx2(a, blocks * AES_BLOCK_SIZE,
  7864. AES_TAG(aes), aes->H);
  7865. }
  7866. else
  7867. #endif
  7868. #ifdef HAVE_INTEL_AVX1
  7869. if (IS_INTEL_AVX1(intel_flags)) {
  7870. AES_GCM_aad_update_avx1(a, blocks * AES_BLOCK_SIZE,
  7871. AES_TAG(aes), aes->H);
  7872. }
  7873. else
  7874. #endif
  7875. {
  7876. AES_GCM_aad_update_aesni(a, blocks * AES_BLOCK_SIZE,
  7877. AES_TAG(aes), aes->H);
  7878. }
  7879. /* Skip over to end of AAD blocks. */
  7880. a += blocks * AES_BLOCK_SIZE;
  7881. }
  7882. if (partial != 0) {
  7883. /* Cache the partial block. */
  7884. XMEMCPY(AES_LASTGBLOCK(aes), a, partial);
  7885. aes->aOver = (byte)partial;
  7886. }
  7887. }
  7888. if (endA && (aes->aOver > 0)) {
  7889. /* No more AAD coming and we have a partial block. */
  7890. /* Fill the rest of the block with zeros. */
  7891. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0,
  7892. AES_BLOCK_SIZE - aes->aOver);
  7893. /* GHASH last AAD block. */
  7894. #ifdef HAVE_INTEL_AVX2
  7895. if (IS_INTEL_AVX2(intel_flags)) {
  7896. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H);
  7897. }
  7898. else
  7899. #endif
  7900. #ifdef HAVE_INTEL_AVX1
  7901. if (IS_INTEL_AVX1(intel_flags)) {
  7902. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H);
  7903. }
  7904. else
  7905. #endif
  7906. {
  7907. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7908. aes->H);
  7909. }
  7910. /* Clear partial count for next time through. */
  7911. aes->aOver = 0;
  7912. }
  7913. return 0;
  7914. }
  7915. /* Update the AES GCM for encryption with data and/or authentication data.
  7916. *
  7917. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  7918. *
  7919. * @param [in, out] aes AES object.
  7920. * @param [out] c Buffer to hold cipher text.
  7921. * @param [in] p Buffer holding plaintext.
  7922. * @param [in] cSz Length of cipher text/plaintext in bytes.
  7923. * @param [in] a Buffer holding authentication data.
  7924. * @param [in] aSz Length of authentication data in bytes.
  7925. */
  7926. static WARN_UNUSED_RESULT int AesGcmEncryptUpdate_aesni(
  7927. Aes* aes, byte* c, const byte* p, word32 cSz, const byte* a, word32 aSz)
  7928. {
  7929. word32 blocks;
  7930. int partial;
  7931. int ret;
  7932. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7933. /* Hash in A, the Authentication Data */
  7934. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  7935. if (ret != 0)
  7936. return ret;
  7937. /* Encrypt plaintext and Hash in C, the Cipher text */
  7938. if (cSz != 0 && c != NULL) {
  7939. /* Update count of cipher text we have hashed. */
  7940. aes->cSz += cSz;
  7941. if (aes->cOver > 0) {
  7942. /* Calculate amount we can use - fill up the block. */
  7943. byte sz = AES_BLOCK_SIZE - aes->cOver;
  7944. if (sz > cSz) {
  7945. sz = cSz;
  7946. }
  7947. /* Encrypt some of the plaintext. */
  7948. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, p, sz);
  7949. XMEMCPY(c, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  7950. /* Update count of unsed encrypted counter. */
  7951. aes->cOver += sz;
  7952. if (aes->cOver == AES_BLOCK_SIZE) {
  7953. /* We have filled up the block and can process. */
  7954. #ifdef HAVE_INTEL_AVX2
  7955. if (IS_INTEL_AVX2(intel_flags)) {
  7956. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7957. aes->H);
  7958. }
  7959. else
  7960. #endif
  7961. #ifdef HAVE_INTEL_AVX1
  7962. if (IS_INTEL_AVX1(intel_flags)) {
  7963. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7964. aes->H);
  7965. }
  7966. else
  7967. #endif
  7968. {
  7969. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7970. aes->H);
  7971. }
  7972. /* Reset count. */
  7973. aes->cOver = 0;
  7974. }
  7975. /* Used up some data. */
  7976. cSz -= sz;
  7977. p += sz;
  7978. c += sz;
  7979. }
  7980. /* Calculate number of blocks of plaintext and the leftover. */
  7981. blocks = cSz / AES_BLOCK_SIZE;
  7982. partial = cSz % AES_BLOCK_SIZE;
  7983. if (blocks > 0) {
  7984. /* Encrypt and GHASH full blocks now. */
  7985. #ifdef HAVE_INTEL_AVX2
  7986. if (IS_INTEL_AVX2(intel_flags)) {
  7987. AES_GCM_encrypt_update_avx2((byte*)aes->key, aes->rounds, c, p,
  7988. blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  7989. AES_COUNTER(aes));
  7990. }
  7991. else
  7992. #endif
  7993. #ifdef HAVE_INTEL_AVX1
  7994. if (IS_INTEL_AVX1(intel_flags)) {
  7995. AES_GCM_encrypt_update_avx1((byte*)aes->key, aes->rounds, c, p,
  7996. blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  7997. AES_COUNTER(aes));
  7998. }
  7999. else
  8000. #endif
  8001. {
  8002. AES_GCM_encrypt_update_aesni((byte*)aes->key, aes->rounds, c, p,
  8003. blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  8004. AES_COUNTER(aes));
  8005. }
  8006. /* Skip over to end of blocks. */
  8007. p += blocks * AES_BLOCK_SIZE;
  8008. c += blocks * AES_BLOCK_SIZE;
  8009. }
  8010. if (partial != 0) {
  8011. /* Encrypt the counter - XOR in zeros as proxy for plaintext. */
  8012. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  8013. #ifdef HAVE_INTEL_AVX2
  8014. if (IS_INTEL_AVX2(intel_flags)) {
  8015. AES_GCM_encrypt_block_avx2((byte*)aes->key, aes->rounds,
  8016. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8017. }
  8018. else
  8019. #endif
  8020. #ifdef HAVE_INTEL_AVX1
  8021. if (IS_INTEL_AVX1(intel_flags)) {
  8022. AES_GCM_encrypt_block_avx1((byte*)aes->key, aes->rounds,
  8023. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8024. }
  8025. else
  8026. #endif
  8027. {
  8028. AES_GCM_encrypt_block_aesni((byte*)aes->key, aes->rounds,
  8029. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8030. }
  8031. /* XOR the remaining plaintext to calculate cipher text.
  8032. * Keep cipher text for GHASH of last partial block.
  8033. */
  8034. xorbuf(AES_LASTGBLOCK(aes), p, partial);
  8035. XMEMCPY(c, AES_LASTGBLOCK(aes), partial);
  8036. /* Update count of the block used. */
  8037. aes->cOver = (byte)partial;
  8038. }
  8039. }
  8040. RESTORE_VECTOR_REGISTERS();
  8041. return 0;
  8042. }
  8043. /* Finalize the AES GCM for encryption and calculate the authentication tag.
  8044. *
  8045. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  8046. *
  8047. * @param [in, out] aes AES object.
  8048. * @param [in] authTag Buffer to hold authentication tag.
  8049. * @param [in] authTagSz Length of authentication tag in bytes.
  8050. * @return 0 on success.
  8051. */
  8052. static WARN_UNUSED_RESULT int AesGcmEncryptFinal_aesni(
  8053. Aes* aes, byte* authTag, word32 authTagSz)
  8054. {
  8055. /* AAD block incomplete when > 0 */
  8056. byte over = aes->aOver;
  8057. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8058. if (aes->cOver > 0) {
  8059. /* Cipher text block incomplete. */
  8060. over = aes->cOver;
  8061. }
  8062. if (over > 0) {
  8063. /* Fill the rest of the block with zeros. */
  8064. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  8065. /* GHASH last cipher block. */
  8066. #ifdef HAVE_INTEL_AVX2
  8067. if (IS_INTEL_AVX2(intel_flags)) {
  8068. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H);
  8069. }
  8070. else
  8071. #endif
  8072. #ifdef HAVE_INTEL_AVX1
  8073. if (IS_INTEL_AVX1(intel_flags)) {
  8074. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H);
  8075. }
  8076. else
  8077. #endif
  8078. {
  8079. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8080. aes->H);
  8081. }
  8082. }
  8083. /* Calculate the authentication tag. */
  8084. #ifdef HAVE_INTEL_AVX2
  8085. if (IS_INTEL_AVX2(intel_flags)) {
  8086. AES_GCM_encrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8087. aes->aSz, aes->H, AES_INITCTR(aes));
  8088. }
  8089. else
  8090. #endif
  8091. #ifdef HAVE_INTEL_AVX1
  8092. if (IS_INTEL_AVX1(intel_flags)) {
  8093. AES_GCM_encrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8094. aes->aSz, aes->H, AES_INITCTR(aes));
  8095. }
  8096. else
  8097. #endif
  8098. {
  8099. AES_GCM_encrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8100. aes->aSz, aes->H, AES_INITCTR(aes));
  8101. }
  8102. RESTORE_VECTOR_REGISTERS();
  8103. return 0;
  8104. }
  8105. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  8106. #ifdef __cplusplus
  8107. extern "C" {
  8108. #endif
  8109. /* Assembly code implementations in: aes_gcm_asm.S */
  8110. #ifdef HAVE_INTEL_AVX2
  8111. extern void AES_GCM_decrypt_update_avx2(const unsigned char* key, int nr,
  8112. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  8113. unsigned char* tag, unsigned char* h, unsigned char* counter);
  8114. extern void AES_GCM_decrypt_final_avx2(unsigned char* tag,
  8115. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  8116. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  8117. #endif
  8118. #ifdef HAVE_INTEL_AVX1
  8119. extern void AES_GCM_decrypt_update_avx1(const unsigned char* key, int nr,
  8120. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  8121. unsigned char* tag, unsigned char* h, unsigned char* counter);
  8122. extern void AES_GCM_decrypt_final_avx1(unsigned char* tag,
  8123. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  8124. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  8125. #endif
  8126. extern void AES_GCM_decrypt_update_aesni(const unsigned char* key, int nr,
  8127. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  8128. unsigned char* tag, unsigned char* h, unsigned char* counter);
  8129. extern void AES_GCM_decrypt_final_aesni(unsigned char* tag,
  8130. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  8131. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  8132. #ifdef __cplusplus
  8133. } /* extern "C" */
  8134. #endif
  8135. /* Update the AES GCM for decryption with data and/or authentication data.
  8136. *
  8137. * @param [in, out] aes AES object.
  8138. * @param [out] p Buffer to hold plaintext.
  8139. * @param [in] c Buffer holding ciper text.
  8140. * @param [in] cSz Length of cipher text/plaintext in bytes.
  8141. * @param [in] a Buffer holding authentication data.
  8142. * @param [in] aSz Length of authentication data in bytes.
  8143. */
  8144. static WARN_UNUSED_RESULT int AesGcmDecryptUpdate_aesni(
  8145. Aes* aes, byte* p, const byte* c, word32 cSz, const byte* a, word32 aSz)
  8146. {
  8147. word32 blocks;
  8148. int partial;
  8149. int ret;
  8150. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8151. /* Hash in A, the Authentication Data */
  8152. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  8153. if (ret != 0)
  8154. return ret;
  8155. /* Hash in C, the Cipher text, and decrypt. */
  8156. if (cSz != 0 && p != NULL) {
  8157. /* Update count of cipher text we have hashed. */
  8158. aes->cSz += cSz;
  8159. if (aes->cOver > 0) {
  8160. /* Calculate amount we can use - fill up the block. */
  8161. byte sz = AES_BLOCK_SIZE - aes->cOver;
  8162. if (sz > cSz) {
  8163. sz = cSz;
  8164. }
  8165. /* Keep a copy of the cipher text for GHASH. */
  8166. XMEMCPY(AES_LASTBLOCK(aes) + aes->cOver, c, sz);
  8167. /* Decrypt some of the cipher text. */
  8168. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  8169. XMEMCPY(p, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  8170. /* Update count of unsed encrypted counter. */
  8171. aes->cOver += sz;
  8172. if (aes->cOver == AES_BLOCK_SIZE) {
  8173. /* We have filled up the block and can process. */
  8174. #ifdef HAVE_INTEL_AVX2
  8175. if (IS_INTEL_AVX2(intel_flags)) {
  8176. AES_GCM_ghash_block_avx2(AES_LASTBLOCK(aes), AES_TAG(aes),
  8177. aes->H);
  8178. }
  8179. else
  8180. #endif
  8181. #ifdef HAVE_INTEL_AVX1
  8182. if (IS_INTEL_AVX1(intel_flags)) {
  8183. AES_GCM_ghash_block_avx1(AES_LASTBLOCK(aes), AES_TAG(aes),
  8184. aes->H);
  8185. }
  8186. else
  8187. #endif
  8188. {
  8189. AES_GCM_ghash_block_aesni(AES_LASTBLOCK(aes), AES_TAG(aes),
  8190. aes->H);
  8191. }
  8192. /* Reset count. */
  8193. aes->cOver = 0;
  8194. }
  8195. /* Used up some data. */
  8196. cSz -= sz;
  8197. c += sz;
  8198. p += sz;
  8199. }
  8200. /* Calculate number of blocks of plaintext and the leftover. */
  8201. blocks = cSz / AES_BLOCK_SIZE;
  8202. partial = cSz % AES_BLOCK_SIZE;
  8203. if (blocks > 0) {
  8204. /* Decrypt and GHASH full blocks now. */
  8205. #ifdef HAVE_INTEL_AVX2
  8206. if (IS_INTEL_AVX2(intel_flags)) {
  8207. AES_GCM_decrypt_update_avx2((byte*)aes->key, aes->rounds, p, c,
  8208. blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  8209. AES_COUNTER(aes));
  8210. }
  8211. else
  8212. #endif
  8213. #ifdef HAVE_INTEL_AVX1
  8214. if (IS_INTEL_AVX1(intel_flags)) {
  8215. AES_GCM_decrypt_update_avx1((byte*)aes->key, aes->rounds, p, c,
  8216. blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  8217. AES_COUNTER(aes));
  8218. }
  8219. else
  8220. #endif
  8221. {
  8222. AES_GCM_decrypt_update_aesni((byte*)aes->key, aes->rounds, p, c,
  8223. blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  8224. AES_COUNTER(aes));
  8225. }
  8226. /* Skip over to end of blocks. */
  8227. c += blocks * AES_BLOCK_SIZE;
  8228. p += blocks * AES_BLOCK_SIZE;
  8229. }
  8230. if (partial != 0) {
  8231. /* Encrypt the counter - XOR in zeros as proxy for cipher text. */
  8232. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  8233. #ifdef HAVE_INTEL_AVX2
  8234. if (IS_INTEL_AVX2(intel_flags)) {
  8235. AES_GCM_encrypt_block_avx2((byte*)aes->key, aes->rounds,
  8236. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8237. }
  8238. else
  8239. #endif
  8240. #ifdef HAVE_INTEL_AVX1
  8241. if (IS_INTEL_AVX1(intel_flags)) {
  8242. AES_GCM_encrypt_block_avx1((byte*)aes->key, aes->rounds,
  8243. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8244. }
  8245. else
  8246. #endif
  8247. {
  8248. AES_GCM_encrypt_block_aesni((byte*)aes->key, aes->rounds,
  8249. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8250. }
  8251. /* Keep cipher text for GHASH of last partial block. */
  8252. XMEMCPY(AES_LASTBLOCK(aes), c, partial);
  8253. /* XOR the remaining cipher text to calculate plaintext. */
  8254. xorbuf(AES_LASTGBLOCK(aes), c, partial);
  8255. XMEMCPY(p, AES_LASTGBLOCK(aes), partial);
  8256. /* Update count of the block used. */
  8257. aes->cOver = (byte)partial;
  8258. }
  8259. }
  8260. RESTORE_VECTOR_REGISTERS();
  8261. return 0;
  8262. }
  8263. /* Finalize the AES GCM for decryption and check the authentication tag.
  8264. *
  8265. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  8266. *
  8267. * @param [in, out] aes AES object.
  8268. * @param [in] authTag Buffer holding authentication tag.
  8269. * @param [in] authTagSz Length of authentication tag in bytes.
  8270. * @return 0 on success.
  8271. * @return AES_GCM_AUTH_E when authentication tag doesn't match calculated
  8272. * value.
  8273. */
  8274. static WARN_UNUSED_RESULT int AesGcmDecryptFinal_aesni(
  8275. Aes* aes, const byte* authTag, word32 authTagSz)
  8276. {
  8277. int ret = 0;
  8278. int res;
  8279. /* AAD block incomplete when > 0 */
  8280. byte over = aes->aOver;
  8281. byte *lastBlock = AES_LASTGBLOCK(aes);
  8282. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8283. if (aes->cOver > 0) {
  8284. /* Cipher text block incomplete. */
  8285. over = aes->cOver;
  8286. lastBlock = AES_LASTBLOCK(aes);
  8287. }
  8288. if (over > 0) {
  8289. /* Zeroize the unused part of the block. */
  8290. XMEMSET(lastBlock + over, 0, AES_BLOCK_SIZE - over);
  8291. /* Hash the last block of cipher text. */
  8292. #ifdef HAVE_INTEL_AVX2
  8293. if (IS_INTEL_AVX2(intel_flags)) {
  8294. AES_GCM_ghash_block_avx2(lastBlock, AES_TAG(aes), aes->H);
  8295. }
  8296. else
  8297. #endif
  8298. #ifdef HAVE_INTEL_AVX1
  8299. if (IS_INTEL_AVX1(intel_flags)) {
  8300. AES_GCM_ghash_block_avx1(lastBlock, AES_TAG(aes), aes->H);
  8301. }
  8302. else
  8303. #endif
  8304. {
  8305. AES_GCM_ghash_block_aesni(lastBlock, AES_TAG(aes), aes->H);
  8306. }
  8307. }
  8308. /* Calculate and compare the authentication tag. */
  8309. #ifdef HAVE_INTEL_AVX2
  8310. if (IS_INTEL_AVX2(intel_flags)) {
  8311. AES_GCM_decrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8312. aes->aSz, aes->H, AES_INITCTR(aes), &res);
  8313. }
  8314. else
  8315. #endif
  8316. #ifdef HAVE_INTEL_AVX1
  8317. if (IS_INTEL_AVX1(intel_flags)) {
  8318. AES_GCM_decrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8319. aes->aSz, aes->H, AES_INITCTR(aes), &res);
  8320. }
  8321. else
  8322. #endif
  8323. {
  8324. AES_GCM_decrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8325. aes->aSz, aes->H, AES_INITCTR(aes), &res);
  8326. }
  8327. RESTORE_VECTOR_REGISTERS();
  8328. /* Return error code when calculated doesn't match input. */
  8329. if (res == 0) {
  8330. ret = AES_GCM_AUTH_E;
  8331. }
  8332. return ret;
  8333. }
  8334. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  8335. #endif /* WOLFSSL_AESNI */
  8336. /* Initialize an AES GCM cipher for encryption or decryption.
  8337. *
  8338. * Must call wc_AesInit() before calling this function.
  8339. *
  8340. * @param [in, out] aes AES object.
  8341. * @param [in] key Buffer holding key.
  8342. * @param [in] len Length of key in bytes.
  8343. * @param [in] iv Buffer holding IV/nonce.
  8344. * @param [in] ivSz Length of IV/nonce in bytes.
  8345. * @return 0 on success.
  8346. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8347. * is NULL, or the IV is NULL and no previous IV has been set.
  8348. * @return MEMORY_E when dynamic memory allocation fails. (WOLFSSL_SMALL_STACK)
  8349. */
  8350. int wc_AesGcmInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  8351. word32 ivSz)
  8352. {
  8353. int ret = 0;
  8354. /* Check validity of parameters. */
  8355. if ((aes == NULL) || ((len > 0) && (key == NULL)) ||
  8356. ((ivSz == 0) && (iv != NULL)) || (ivSz > AES_BLOCK_SIZE) ||
  8357. ((ivSz > 0) && (iv == NULL))) {
  8358. ret = BAD_FUNC_ARG;
  8359. }
  8360. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  8361. if ((ret == 0) && (aes->streamData == NULL)) {
  8362. /* Allocate buffers for streaming. */
  8363. aes->streamData = (byte*)XMALLOC(5 * AES_BLOCK_SIZE, aes->heap,
  8364. DYNAMIC_TYPE_AES);
  8365. if (aes->streamData == NULL) {
  8366. ret = MEMORY_E;
  8367. }
  8368. }
  8369. #endif
  8370. /* Set the key if passed in. */
  8371. if ((ret == 0) && (key != NULL)) {
  8372. ret = wc_AesGcmSetKey(aes, key, len);
  8373. }
  8374. if (ret == 0) {
  8375. /* Setup with IV if needed. */
  8376. if (iv != NULL) {
  8377. /* Cache the IV in AES GCM object. */
  8378. XMEMCPY((byte*)aes->reg, iv, ivSz);
  8379. aes->nonceSz = ivSz;
  8380. }
  8381. else if (aes->nonceSz != 0) {
  8382. /* Copy out the cached copy. */
  8383. iv = (byte*)aes->reg;
  8384. ivSz = aes->nonceSz;
  8385. }
  8386. if (iv != NULL) {
  8387. /* Initialize with the IV. */
  8388. #ifdef WOLFSSL_AESNI
  8389. if (haveAESNI
  8390. #ifdef HAVE_INTEL_AVX2
  8391. || IS_INTEL_AVX2(intel_flags)
  8392. #endif
  8393. #ifdef HAVE_INTEL_AVX1
  8394. || IS_INTEL_AVX1(intel_flags)
  8395. #endif
  8396. ) {
  8397. ret = AesGcmInit_aesni(aes, iv, ivSz);
  8398. }
  8399. else
  8400. #endif
  8401. {
  8402. ret = AesGcmInit_C(aes, iv, ivSz);
  8403. }
  8404. aes->nonceSet = 1;
  8405. }
  8406. }
  8407. return ret;
  8408. }
  8409. /* Initialize an AES GCM cipher for encryption.
  8410. *
  8411. * Must call wc_AesInit() before calling this function.
  8412. *
  8413. * @param [in, out] aes AES object.
  8414. * @param [in] key Buffer holding key.
  8415. * @param [in] len Length of key in bytes.
  8416. * @param [in] iv Buffer holding IV/nonce.
  8417. * @param [in] ivSz Length of IV/nonce in bytes.
  8418. * @return 0 on success.
  8419. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8420. * is NULL, or the IV is NULL and no previous IV has been set.
  8421. */
  8422. int wc_AesGcmEncryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  8423. word32 ivSz)
  8424. {
  8425. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  8426. }
  8427. /* Initialize an AES GCM cipher for encryption or decryption. Get IV.
  8428. *
  8429. * Must call wc_AesInit() before calling this function.
  8430. *
  8431. * @param [in, out] aes AES object.
  8432. * @param [in] key Buffer holding key.
  8433. * @param [in] len Length of key in bytes.
  8434. * @param [in] iv Buffer holding IV/nonce.
  8435. * @param [in] ivSz Length of IV/nonce in bytes.
  8436. * @return 0 on success.
  8437. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8438. * is NULL, or the IV is NULL and no previous IV has been set.
  8439. */
  8440. int wc_AesGcmEncryptInit_ex(Aes* aes, const byte* key, word32 len, byte* ivOut,
  8441. word32 ivOutSz)
  8442. {
  8443. XMEMCPY(ivOut, aes->reg, ivOutSz);
  8444. return wc_AesGcmInit(aes, key, len, NULL, 0);
  8445. }
  8446. /* Update the AES GCM for encryption with data and/or authentication data.
  8447. *
  8448. * All the AAD must be passed to update before the plaintext.
  8449. * Last part of AAD can be passed with first part of plaintext.
  8450. *
  8451. * Must set key and IV before calling this function.
  8452. * Must call wc_AesGcmInit() before calling this function.
  8453. *
  8454. * @param [in, out] aes AES object.
  8455. * @param [out] out Buffer to hold cipher text.
  8456. * @param [in] in Buffer holding plaintext.
  8457. * @param [in] sz Length of plaintext in bytes.
  8458. * @param [in] authIn Buffer holding authentication data.
  8459. * @param [in] authInSz Length of authentication data in bytes.
  8460. * @return 0 on success.
  8461. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8462. * is NULL.
  8463. */
  8464. int wc_AesGcmEncryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  8465. const byte* authIn, word32 authInSz)
  8466. {
  8467. int ret = 0;
  8468. /* Check validity of parameters. */
  8469. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  8470. ((out == NULL) || (in == NULL)))) {
  8471. ret = BAD_FUNC_ARG;
  8472. }
  8473. /* Check key has been set. */
  8474. if ((ret == 0) && (!aes->gcmKeySet)) {
  8475. ret = MISSING_KEY;
  8476. }
  8477. /* Check IV has been set. */
  8478. if ((ret == 0) && (!aes->nonceSet)) {
  8479. ret = MISSING_IV;
  8480. }
  8481. if ((ret == 0) && aes->ctrSet && (aes->aSz == 0) && (aes->cSz == 0)) {
  8482. aes->invokeCtr[0]++;
  8483. if (aes->invokeCtr[0] == 0) {
  8484. aes->invokeCtr[1]++;
  8485. if (aes->invokeCtr[1] == 0)
  8486. ret = AES_GCM_OVERFLOW_E;
  8487. }
  8488. }
  8489. if (ret == 0) {
  8490. /* Encrypt with AAD and/or plaintext. */
  8491. #if defined(WOLFSSL_AESNI)
  8492. if (haveAESNI
  8493. #ifdef HAVE_INTEL_AVX2
  8494. || IS_INTEL_AVX2(intel_flags)
  8495. #endif
  8496. #ifdef HAVE_INTEL_AVX1
  8497. || IS_INTEL_AVX1(intel_flags)
  8498. #endif
  8499. ) {
  8500. ret = AesGcmEncryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  8501. }
  8502. else
  8503. #endif
  8504. {
  8505. /* Encrypt the plaintext. */
  8506. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  8507. if (ret != 0)
  8508. return ret;
  8509. /* Update the authenication tag with any authentication data and the
  8510. * new cipher text. */
  8511. GHASH_UPDATE(aes, authIn, authInSz, out, sz);
  8512. }
  8513. }
  8514. return ret;
  8515. }
  8516. /* Finalize the AES GCM for encryption and return the authentication tag.
  8517. *
  8518. * Must set key and IV before calling this function.
  8519. * Must call wc_AesGcmInit() before calling this function.
  8520. *
  8521. * @param [in, out] aes AES object.
  8522. * @param [out] authTag Buffer to hold authentication tag.
  8523. * @param [in] authTagSz Length of authentication tag in bytes.
  8524. * @return 0 on success.
  8525. */
  8526. int wc_AesGcmEncryptFinal(Aes* aes, byte* authTag, word32 authTagSz)
  8527. {
  8528. int ret = 0;
  8529. /* Check validity of parameters. */
  8530. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  8531. (authTagSz == 0)) {
  8532. ret = BAD_FUNC_ARG;
  8533. }
  8534. /* Check key has been set. */
  8535. if ((ret == 0) && (!aes->gcmKeySet)) {
  8536. ret = MISSING_KEY;
  8537. }
  8538. /* Check IV has been set. */
  8539. if ((ret == 0) && (!aes->nonceSet)) {
  8540. ret = MISSING_IV;
  8541. }
  8542. if (ret == 0) {
  8543. /* Calculate authentication tag. */
  8544. #ifdef WOLFSSL_AESNI
  8545. if (haveAESNI
  8546. #ifdef HAVE_INTEL_AVX2
  8547. || IS_INTEL_AVX2(intel_flags)
  8548. #endif
  8549. #ifdef HAVE_INTEL_AVX1
  8550. || IS_INTEL_AVX1(intel_flags)
  8551. #endif
  8552. ) {
  8553. ret = AesGcmEncryptFinal_aesni(aes, authTag, authTagSz);
  8554. }
  8555. else
  8556. #endif
  8557. {
  8558. ret = AesGcmFinal_C(aes, authTag, authTagSz);
  8559. }
  8560. }
  8561. if ((ret == 0) && aes->ctrSet) {
  8562. IncCtr((byte*)aes->reg, aes->nonceSz);
  8563. }
  8564. return ret;
  8565. }
  8566. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  8567. /* Initialize an AES GCM cipher for decryption.
  8568. *
  8569. * Must call wc_AesInit() before calling this function.
  8570. *
  8571. * @param [in, out] aes AES object.
  8572. * @param [in] key Buffer holding key.
  8573. * @param [in] len Length of key in bytes.
  8574. * @param [in] iv Buffer holding IV/nonce.
  8575. * @param [in] ivSz Length of IV/nonce in bytes.
  8576. * @return 0 on success.
  8577. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8578. * is NULL, or the IV is NULL and no previous IV has been set.
  8579. */
  8580. int wc_AesGcmDecryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  8581. word32 ivSz)
  8582. {
  8583. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  8584. }
  8585. /* Update the AES GCM for decryption with data and/or authentication data.
  8586. *
  8587. * All the AAD must be passed to update before the cipher text.
  8588. * Last part of AAD can be passed with first part of cipher text.
  8589. *
  8590. * Must set key and IV before calling this function.
  8591. * Must call wc_AesGcmInit() before calling this function.
  8592. *
  8593. * @param [in, out] aes AES object.
  8594. * @param [out] out Buffer to hold plaintext.
  8595. * @param [in] in Buffer holding cipher text.
  8596. * @param [in] sz Length of cipher text in bytes.
  8597. * @param [in] authIn Buffer holding authentication data.
  8598. * @param [in] authInSz Length of authentication data in bytes.
  8599. * @return 0 on success.
  8600. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8601. * is NULL.
  8602. */
  8603. int wc_AesGcmDecryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  8604. const byte* authIn, word32 authInSz)
  8605. {
  8606. int ret = 0;
  8607. /* Check validity of parameters. */
  8608. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  8609. ((out == NULL) || (in == NULL)))) {
  8610. ret = BAD_FUNC_ARG;
  8611. }
  8612. /* Check key has been set. */
  8613. if ((ret == 0) && (!aes->gcmKeySet)) {
  8614. ret = MISSING_KEY;
  8615. }
  8616. /* Check IV has been set. */
  8617. if ((ret == 0) && (!aes->nonceSet)) {
  8618. ret = MISSING_IV;
  8619. }
  8620. if (ret == 0) {
  8621. /* Decrypt with AAD and/or cipher text. */
  8622. #if defined(WOLFSSL_AESNI)
  8623. if (haveAESNI
  8624. #ifdef HAVE_INTEL_AVX2
  8625. || IS_INTEL_AVX2(intel_flags)
  8626. #endif
  8627. #ifdef HAVE_INTEL_AVX1
  8628. || IS_INTEL_AVX1(intel_flags)
  8629. #endif
  8630. ) {
  8631. ret = AesGcmDecryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  8632. }
  8633. else
  8634. #endif
  8635. {
  8636. /* Update the authenication tag with any authentication data and
  8637. * cipher text. */
  8638. GHASH_UPDATE(aes, authIn, authInSz, in, sz);
  8639. /* Decrypt the cipher text. */
  8640. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  8641. }
  8642. }
  8643. return ret;
  8644. }
  8645. /* Finalize the AES GCM for decryption and check the authentication tag.
  8646. *
  8647. * Must set key and IV before calling this function.
  8648. * Must call wc_AesGcmInit() before calling this function.
  8649. *
  8650. * @param [in, out] aes AES object.
  8651. * @param [in] authTag Buffer holding authentication tag.
  8652. * @param [in] authTagSz Length of authentication tag in bytes.
  8653. * @return 0 on success.
  8654. */
  8655. int wc_AesGcmDecryptFinal(Aes* aes, const byte* authTag, word32 authTagSz)
  8656. {
  8657. int ret = 0;
  8658. /* Check validity of parameters. */
  8659. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  8660. (authTagSz == 0)) {
  8661. ret = BAD_FUNC_ARG;
  8662. }
  8663. /* Check key has been set. */
  8664. if ((ret == 0) && (!aes->gcmKeySet)) {
  8665. ret = MISSING_KEY;
  8666. }
  8667. /* Check IV has been set. */
  8668. if ((ret == 0) && (!aes->nonceSet)) {
  8669. ret = MISSING_IV;
  8670. }
  8671. if (ret == 0) {
  8672. /* Calculate authentication tag and compare with one passed in.. */
  8673. #ifdef WOLFSSL_AESNI
  8674. if (haveAESNI
  8675. #ifdef HAVE_INTEL_AVX2
  8676. || IS_INTEL_AVX2(intel_flags)
  8677. #endif
  8678. #ifdef HAVE_INTEL_AVX1
  8679. || IS_INTEL_AVX1(intel_flags)
  8680. #endif
  8681. ) {
  8682. ret = AesGcmDecryptFinal_aesni(aes, authTag, authTagSz);
  8683. }
  8684. else
  8685. #endif
  8686. {
  8687. ALIGN32 byte calcTag[AES_BLOCK_SIZE];
  8688. /* Calculate authentication tag. */
  8689. ret = AesGcmFinal_C(aes, calcTag, authTagSz);
  8690. if (ret == 0) {
  8691. /* Check calculated tag matches the one passed in. */
  8692. if (ConstantCompare(authTag, calcTag, authTagSz) != 0) {
  8693. ret = AES_GCM_AUTH_E;
  8694. }
  8695. }
  8696. }
  8697. }
  8698. /* reset the state */
  8699. if (ret == 0)
  8700. wc_AesFree(aes);
  8701. return ret;
  8702. }
  8703. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  8704. #endif /* WOLFSSL_AESGCM_STREAM */
  8705. #endif /* WOLFSSL_XILINX_CRYPT */
  8706. #endif /* end of block for AESGCM implementation selection */
  8707. /* Common to all, abstract functions that build off of lower level AESGCM
  8708. * functions */
  8709. #ifndef WC_NO_RNG
  8710. static WARN_UNUSED_RESULT WC_INLINE int CheckAesGcmIvSize(int ivSz) {
  8711. return (ivSz == GCM_NONCE_MIN_SZ ||
  8712. ivSz == GCM_NONCE_MID_SZ ||
  8713. ivSz == GCM_NONCE_MAX_SZ);
  8714. }
  8715. int wc_AesGcmSetExtIV(Aes* aes, const byte* iv, word32 ivSz)
  8716. {
  8717. int ret = 0;
  8718. if (aes == NULL || iv == NULL || !CheckAesGcmIvSize(ivSz)) {
  8719. ret = BAD_FUNC_ARG;
  8720. }
  8721. if (ret == 0) {
  8722. XMEMCPY((byte*)aes->reg, iv, ivSz);
  8723. /* If the IV is 96, allow for a 2^64 invocation counter.
  8724. * For any other size for the nonce, limit the invocation
  8725. * counter to 32-bits. (SP 800-38D 8.3) */
  8726. aes->invokeCtr[0] = 0;
  8727. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  8728. #ifdef WOLFSSL_AESGCM_STREAM
  8729. aes->ctrSet = 1;
  8730. #endif
  8731. aes->nonceSz = ivSz;
  8732. }
  8733. return ret;
  8734. }
  8735. int wc_AesGcmSetIV(Aes* aes, word32 ivSz,
  8736. const byte* ivFixed, word32 ivFixedSz,
  8737. WC_RNG* rng)
  8738. {
  8739. int ret = 0;
  8740. if (aes == NULL || rng == NULL || !CheckAesGcmIvSize(ivSz) ||
  8741. (ivFixed == NULL && ivFixedSz != 0) ||
  8742. (ivFixed != NULL && ivFixedSz != AES_IV_FIXED_SZ)) {
  8743. ret = BAD_FUNC_ARG;
  8744. }
  8745. if (ret == 0) {
  8746. byte* iv = (byte*)aes->reg;
  8747. if (ivFixedSz)
  8748. XMEMCPY(iv, ivFixed, ivFixedSz);
  8749. ret = wc_RNG_GenerateBlock(rng, iv + ivFixedSz, ivSz - ivFixedSz);
  8750. }
  8751. if (ret == 0) {
  8752. /* If the IV is 96, allow for a 2^64 invocation counter.
  8753. * For any other size for the nonce, limit the invocation
  8754. * counter to 32-bits. (SP 800-38D 8.3) */
  8755. aes->invokeCtr[0] = 0;
  8756. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  8757. #ifdef WOLFSSL_AESGCM_STREAM
  8758. aes->ctrSet = 1;
  8759. #endif
  8760. aes->nonceSz = ivSz;
  8761. }
  8762. return ret;
  8763. }
  8764. int wc_AesGcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  8765. byte* ivOut, word32 ivOutSz,
  8766. byte* authTag, word32 authTagSz,
  8767. const byte* authIn, word32 authInSz)
  8768. {
  8769. int ret = 0;
  8770. if (aes == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  8771. ivOut == NULL || ivOutSz != aes->nonceSz ||
  8772. (authIn == NULL && authInSz != 0)) {
  8773. ret = BAD_FUNC_ARG;
  8774. }
  8775. if (ret == 0) {
  8776. aes->invokeCtr[0]++;
  8777. if (aes->invokeCtr[0] == 0) {
  8778. aes->invokeCtr[1]++;
  8779. if (aes->invokeCtr[1] == 0)
  8780. ret = AES_GCM_OVERFLOW_E;
  8781. }
  8782. }
  8783. if (ret == 0) {
  8784. XMEMCPY(ivOut, aes->reg, ivOutSz);
  8785. ret = wc_AesGcmEncrypt(aes, out, in, sz,
  8786. (byte*)aes->reg, ivOutSz,
  8787. authTag, authTagSz,
  8788. authIn, authInSz);
  8789. if (ret == 0)
  8790. IncCtr((byte*)aes->reg, ivOutSz);
  8791. }
  8792. return ret;
  8793. }
  8794. int wc_Gmac(const byte* key, word32 keySz, byte* iv, word32 ivSz,
  8795. const byte* authIn, word32 authInSz,
  8796. byte* authTag, word32 authTagSz, WC_RNG* rng)
  8797. {
  8798. #ifdef WOLFSSL_SMALL_STACK
  8799. Aes *aes = NULL;
  8800. #else
  8801. Aes aes[1];
  8802. #endif
  8803. int ret;
  8804. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  8805. authTag == NULL || authTagSz == 0 || rng == NULL) {
  8806. return BAD_FUNC_ARG;
  8807. }
  8808. #ifdef WOLFSSL_SMALL_STACK
  8809. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  8810. DYNAMIC_TYPE_AES)) == NULL)
  8811. return MEMORY_E;
  8812. #endif
  8813. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  8814. if (ret == 0) {
  8815. ret = wc_AesGcmSetKey(aes, key, keySz);
  8816. if (ret == 0)
  8817. ret = wc_AesGcmSetIV(aes, ivSz, NULL, 0, rng);
  8818. if (ret == 0)
  8819. ret = wc_AesGcmEncrypt_ex(aes, NULL, NULL, 0, iv, ivSz,
  8820. authTag, authTagSz, authIn, authInSz);
  8821. wc_AesFree(aes);
  8822. }
  8823. ForceZero(aes, sizeof *aes);
  8824. #ifdef WOLFSSL_SMALL_STACK
  8825. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  8826. #endif
  8827. return ret;
  8828. }
  8829. int wc_GmacVerify(const byte* key, word32 keySz,
  8830. const byte* iv, word32 ivSz,
  8831. const byte* authIn, word32 authInSz,
  8832. const byte* authTag, word32 authTagSz)
  8833. {
  8834. int ret;
  8835. #ifdef HAVE_AES_DECRYPT
  8836. #ifdef WOLFSSL_SMALL_STACK
  8837. Aes *aes = NULL;
  8838. #else
  8839. Aes aes[1];
  8840. #endif
  8841. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  8842. authTag == NULL || authTagSz == 0 || authTagSz > AES_BLOCK_SIZE) {
  8843. return BAD_FUNC_ARG;
  8844. }
  8845. #ifdef WOLFSSL_SMALL_STACK
  8846. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  8847. DYNAMIC_TYPE_AES)) == NULL)
  8848. return MEMORY_E;
  8849. #endif
  8850. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  8851. if (ret == 0) {
  8852. ret = wc_AesGcmSetKey(aes, key, keySz);
  8853. if (ret == 0)
  8854. ret = wc_AesGcmDecrypt(aes, NULL, NULL, 0, iv, ivSz,
  8855. authTag, authTagSz, authIn, authInSz);
  8856. wc_AesFree(aes);
  8857. }
  8858. ForceZero(aes, sizeof *aes);
  8859. #ifdef WOLFSSL_SMALL_STACK
  8860. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  8861. #endif
  8862. #else
  8863. (void)key;
  8864. (void)keySz;
  8865. (void)iv;
  8866. (void)ivSz;
  8867. (void)authIn;
  8868. (void)authInSz;
  8869. (void)authTag;
  8870. (void)authTagSz;
  8871. ret = NOT_COMPILED_IN;
  8872. #endif
  8873. return ret;
  8874. }
  8875. #endif /* WC_NO_RNG */
  8876. WOLFSSL_API int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len)
  8877. {
  8878. if (gmac == NULL || key == NULL) {
  8879. return BAD_FUNC_ARG;
  8880. }
  8881. return wc_AesGcmSetKey(&gmac->aes, key, len);
  8882. }
  8883. WOLFSSL_API int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz,
  8884. const byte* authIn, word32 authInSz,
  8885. byte* authTag, word32 authTagSz)
  8886. {
  8887. if (gmac == NULL) {
  8888. return BAD_FUNC_ARG;
  8889. }
  8890. return wc_AesGcmEncrypt(&gmac->aes, NULL, NULL, 0, iv, ivSz,
  8891. authTag, authTagSz, authIn, authInSz);
  8892. }
  8893. #endif /* HAVE_AESGCM */
  8894. #ifdef HAVE_AESCCM
  8895. int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz)
  8896. {
  8897. if (!((keySz == 16) || (keySz == 24) || (keySz == 32)))
  8898. return BAD_FUNC_ARG;
  8899. return wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  8900. }
  8901. /* Checks if the tag size is an accepted value based on RFC 3610 section 2
  8902. * returns 0 if tag size is ok
  8903. */
  8904. int wc_AesCcmCheckTagSize(int sz)
  8905. {
  8906. /* values here are from RFC 3610 section 2 */
  8907. if (sz != 4 && sz != 6 && sz != 8 && sz != 10 && sz != 12 && sz != 14
  8908. && sz != 16) {
  8909. WOLFSSL_MSG("Bad auth tag size AES-CCM");
  8910. return BAD_FUNC_ARG;
  8911. }
  8912. return 0;
  8913. }
  8914. #ifdef WOLFSSL_ARMASM
  8915. /* implementation located in wolfcrypt/src/port/arm/armv8-aes.c */
  8916. #elif defined(HAVE_COLDFIRE_SEC)
  8917. #error "Coldfire SEC doesn't currently support AES-CCM mode"
  8918. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  8919. !defined(WOLFSSL_QNX_CAAM)
  8920. /* implemented in wolfcrypt/src/port/caam_aes.c */
  8921. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  8922. /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */
  8923. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8924. const byte* nonce, word32 nonceSz,
  8925. byte* authTag, word32 authTagSz,
  8926. const byte* authIn, word32 authInSz)
  8927. {
  8928. return wc_AesCcmEncrypt_silabs(
  8929. aes, out, in, inSz,
  8930. nonce, nonceSz,
  8931. authTag, authTagSz,
  8932. authIn, authInSz);
  8933. }
  8934. #ifdef HAVE_AES_DECRYPT
  8935. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8936. const byte* nonce, word32 nonceSz,
  8937. const byte* authTag, word32 authTagSz,
  8938. const byte* authIn, word32 authInSz)
  8939. {
  8940. return wc_AesCcmDecrypt_silabs(
  8941. aes, out, in, inSz,
  8942. nonce, nonceSz,
  8943. authTag, authTagSz,
  8944. authIn, authInSz);
  8945. }
  8946. #endif
  8947. #elif defined(FREESCALE_LTC)
  8948. /* return 0 on success */
  8949. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8950. const byte* nonce, word32 nonceSz,
  8951. byte* authTag, word32 authTagSz,
  8952. const byte* authIn, word32 authInSz)
  8953. {
  8954. byte *key;
  8955. word32 keySize;
  8956. status_t status;
  8957. /* sanity check on arguments */
  8958. /* note, LTC_AES_EncryptTagCcm() doesn't allow null src or dst
  8959. * ptrs even if inSz is zero (ltc_aes_ccm_check_input_args()), so
  8960. * don't allow it here either.
  8961. */
  8962. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  8963. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  8964. return BAD_FUNC_ARG;
  8965. }
  8966. if (wc_AesCcmCheckTagSize(authTagSz) != 0) {
  8967. return BAD_FUNC_ARG;
  8968. }
  8969. key = (byte*)aes->key;
  8970. status = wc_AesGetKeySize(aes, &keySize);
  8971. if (status != 0) {
  8972. return status;
  8973. }
  8974. status = wolfSSL_CryptHwMutexLock();
  8975. if (status != 0)
  8976. return status;
  8977. status = LTC_AES_EncryptTagCcm(LTC_BASE, in, out, inSz,
  8978. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  8979. wolfSSL_CryptHwMutexUnLock();
  8980. return (kStatus_Success == status) ? 0 : BAD_FUNC_ARG;
  8981. }
  8982. #ifdef HAVE_AES_DECRYPT
  8983. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8984. const byte* nonce, word32 nonceSz,
  8985. const byte* authTag, word32 authTagSz,
  8986. const byte* authIn, word32 authInSz)
  8987. {
  8988. byte *key;
  8989. word32 keySize;
  8990. status_t status;
  8991. /* sanity check on arguments */
  8992. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  8993. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  8994. return BAD_FUNC_ARG;
  8995. }
  8996. key = (byte*)aes->key;
  8997. status = wc_AesGetKeySize(aes, &keySize);
  8998. if (status != 0) {
  8999. return status;
  9000. }
  9001. status = wolfSSL_CryptHwMutexLock();
  9002. if (status != 0)
  9003. return status;
  9004. status = LTC_AES_DecryptTagCcm(LTC_BASE, in, out, inSz,
  9005. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  9006. wolfSSL_CryptHwMutexUnLock();
  9007. if (status != kStatus_Success) {
  9008. XMEMSET(out, 0, inSz);
  9009. return AES_CCM_AUTH_E;
  9010. }
  9011. return 0;
  9012. }
  9013. #endif /* HAVE_AES_DECRYPT */
  9014. #else
  9015. /* Software CCM */
  9016. static WARN_UNUSED_RESULT int roll_x(
  9017. Aes* aes, const byte* in, word32 inSz, byte* out)
  9018. {
  9019. int ret;
  9020. /* process the bulk of the data */
  9021. while (inSz >= AES_BLOCK_SIZE) {
  9022. xorbuf(out, in, AES_BLOCK_SIZE);
  9023. in += AES_BLOCK_SIZE;
  9024. inSz -= AES_BLOCK_SIZE;
  9025. ret = wc_AesEncrypt(aes, out, out);
  9026. if (ret != 0)
  9027. return ret;
  9028. }
  9029. /* process remainder of the data */
  9030. if (inSz > 0) {
  9031. xorbuf(out, in, inSz);
  9032. ret = wc_AesEncrypt(aes, out, out);
  9033. if (ret != 0)
  9034. return ret;
  9035. }
  9036. return 0;
  9037. }
  9038. static WARN_UNUSED_RESULT int roll_auth(
  9039. Aes* aes, const byte* in, word32 inSz, byte* out)
  9040. {
  9041. word32 authLenSz;
  9042. word32 remainder;
  9043. int ret;
  9044. /* encode the length in */
  9045. if (inSz <= 0xFEFF) {
  9046. authLenSz = 2;
  9047. out[0] ^= ((inSz & 0xFF00) >> 8);
  9048. out[1] ^= (inSz & 0x00FF);
  9049. }
  9050. else if (inSz <= 0xFFFFFFFF) {
  9051. authLenSz = 6;
  9052. out[0] ^= 0xFF; out[1] ^= 0xFE;
  9053. out[2] ^= ((inSz & 0xFF000000) >> 24);
  9054. out[3] ^= ((inSz & 0x00FF0000) >> 16);
  9055. out[4] ^= ((inSz & 0x0000FF00) >> 8);
  9056. out[5] ^= (inSz & 0x000000FF);
  9057. }
  9058. /* Note, the protocol handles auth data up to 2^64, but we are
  9059. * using 32-bit sizes right now, so the bigger data isn't handled
  9060. * else if (inSz <= 0xFFFFFFFFFFFFFFFF) {} */
  9061. else
  9062. return BAD_LENGTH_E;
  9063. /* start fill out the rest of the first block */
  9064. remainder = AES_BLOCK_SIZE - authLenSz;
  9065. if (inSz >= remainder) {
  9066. /* plenty of bulk data to fill the remainder of this block */
  9067. xorbuf(out + authLenSz, in, remainder);
  9068. inSz -= remainder;
  9069. in += remainder;
  9070. }
  9071. else {
  9072. /* not enough bulk data, copy what is available, and pad zero */
  9073. xorbuf(out + authLenSz, in, inSz);
  9074. inSz = 0;
  9075. }
  9076. ret = wc_AesEncrypt(aes, out, out);
  9077. if ((ret == 0) && (inSz > 0)) {
  9078. ret = roll_x(aes, in, inSz, out);
  9079. }
  9080. return ret;
  9081. }
  9082. static WC_INLINE void AesCcmCtrInc(byte* B, word32 lenSz)
  9083. {
  9084. word32 i;
  9085. for (i = 0; i < lenSz; i++) {
  9086. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) return;
  9087. }
  9088. }
  9089. #ifdef WOLFSSL_AESNI
  9090. static WC_INLINE void AesCcmCtrIncSet4(byte* B, word32 lenSz)
  9091. {
  9092. word32 i;
  9093. /* B+1 = B */
  9094. XMEMCPY(B + AES_BLOCK_SIZE * 1, B, AES_BLOCK_SIZE);
  9095. /* B+2,B+3 = B,B+1 */
  9096. XMEMCPY(B + AES_BLOCK_SIZE * 2, B, AES_BLOCK_SIZE * 2);
  9097. for (i = 0; i < lenSz; i++) {
  9098. if (++B[AES_BLOCK_SIZE * 2 - 1 - i] != 0) break;
  9099. }
  9100. B[AES_BLOCK_SIZE * 3 - 1] += 2;
  9101. if (B[AES_BLOCK_SIZE * 3 - 1] < 2) {
  9102. for (i = 1; i < lenSz; i++) {
  9103. if (++B[AES_BLOCK_SIZE * 3 - 1 - i] != 0) break;
  9104. }
  9105. }
  9106. B[AES_BLOCK_SIZE * 4 - 1] += 3;
  9107. if (B[AES_BLOCK_SIZE * 4 - 1] < 3) {
  9108. for (i = 1; i < lenSz; i++) {
  9109. if (++B[AES_BLOCK_SIZE * 4 - 1 - i] != 0) break;
  9110. }
  9111. }
  9112. }
  9113. static WC_INLINE void AesCcmCtrInc4(byte* B, word32 lenSz)
  9114. {
  9115. word32 i;
  9116. B[AES_BLOCK_SIZE - 1] += 4;
  9117. if (B[AES_BLOCK_SIZE - 1] < 4) {
  9118. for (i = 1; i < lenSz; i++) {
  9119. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) break;
  9120. }
  9121. }
  9122. }
  9123. #endif
  9124. /* Software AES - CCM Encrypt */
  9125. /* return 0 on success */
  9126. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  9127. const byte* nonce, word32 nonceSz,
  9128. byte* authTag, word32 authTagSz,
  9129. const byte* authIn, word32 authInSz)
  9130. {
  9131. #ifndef WOLFSSL_AESNI
  9132. byte A[AES_BLOCK_SIZE];
  9133. byte B[AES_BLOCK_SIZE];
  9134. #else
  9135. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  9136. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  9137. #endif
  9138. byte lenSz;
  9139. word32 i;
  9140. byte mask = 0xFF;
  9141. const word32 wordSz = (word32)sizeof(word32);
  9142. int ret;
  9143. /* sanity check on arguments */
  9144. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  9145. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  9146. authTagSz > AES_BLOCK_SIZE)
  9147. return BAD_FUNC_ARG;
  9148. /* sanity check on tag size */
  9149. if (wc_AesCcmCheckTagSize(authTagSz) != 0) {
  9150. return BAD_FUNC_ARG;
  9151. }
  9152. #ifdef WOLF_CRYPTO_CB
  9153. if (aes->devId != INVALID_DEVID) {
  9154. int crypto_cb_ret =
  9155. wc_CryptoCb_AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz,
  9156. authTag, authTagSz, authIn, authInSz);
  9157. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  9158. return crypto_cb_ret;
  9159. /* fall-through when unavailable */
  9160. }
  9161. #endif
  9162. XMEMSET(A, 0, sizeof(A));
  9163. XMEMCPY(B+1, nonce, nonceSz);
  9164. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  9165. B[0] = (authInSz > 0 ? 64 : 0)
  9166. + (8 * (((byte)authTagSz - 2) / 2))
  9167. + (lenSz - 1);
  9168. for (i = 0; i < lenSz; i++) {
  9169. if (mask && i >= wordSz)
  9170. mask = 0x00;
  9171. B[AES_BLOCK_SIZE - 1 - i] = (inSz >> ((8 * i) & mask)) & mask;
  9172. }
  9173. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9174. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  9175. #endif
  9176. ret = wc_AesEncrypt(aes, B, A);
  9177. if (ret != 0) {
  9178. ForceZero(B, sizeof(B));
  9179. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9180. wc_MemZero_Check(B, sizeof(B));
  9181. #endif
  9182. return ret;
  9183. }
  9184. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9185. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  9186. #endif
  9187. if (authInSz > 0) {
  9188. ret = roll_auth(aes, authIn, authInSz, A);
  9189. if (ret != 0) {
  9190. ForceZero(A, sizeof(A));
  9191. ForceZero(B, sizeof(B));
  9192. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9193. wc_MemZero_Check(A, sizeof(A));
  9194. wc_MemZero_Check(B, sizeof(B));
  9195. #endif
  9196. return ret;
  9197. }
  9198. }
  9199. if (inSz > 0) {
  9200. ret = roll_x(aes, in, inSz, A);
  9201. if (ret != 0) {
  9202. ForceZero(A, sizeof(A));
  9203. ForceZero(B, sizeof(B));
  9204. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9205. wc_MemZero_Check(A, sizeof(A));
  9206. wc_MemZero_Check(B, sizeof(B));
  9207. #endif
  9208. return ret;
  9209. }
  9210. }
  9211. XMEMCPY(authTag, A, authTagSz);
  9212. B[0] = lenSz - 1;
  9213. for (i = 0; i < lenSz; i++)
  9214. B[AES_BLOCK_SIZE - 1 - i] = 0;
  9215. ret = wc_AesEncrypt(aes, B, A);
  9216. if (ret != 0) {
  9217. ForceZero(A, sizeof(A));
  9218. ForceZero(B, sizeof(B));
  9219. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9220. wc_MemZero_Check(A, sizeof(A));
  9221. wc_MemZero_Check(B, sizeof(B));
  9222. #endif
  9223. return ret;
  9224. }
  9225. xorbuf(authTag, A, authTagSz);
  9226. B[15] = 1;
  9227. #ifdef WOLFSSL_AESNI
  9228. if (haveAESNI && aes->use_aesni) {
  9229. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9230. while (inSz >= AES_BLOCK_SIZE * 4) {
  9231. AesCcmCtrIncSet4(B, lenSz);
  9232. AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  9233. aes->rounds);
  9234. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  9235. XMEMCPY(out, A, AES_BLOCK_SIZE * 4);
  9236. inSz -= AES_BLOCK_SIZE * 4;
  9237. in += AES_BLOCK_SIZE * 4;
  9238. out += AES_BLOCK_SIZE * 4;
  9239. AesCcmCtrInc4(B, lenSz);
  9240. }
  9241. RESTORE_VECTOR_REGISTERS();
  9242. }
  9243. #endif
  9244. while (inSz >= AES_BLOCK_SIZE) {
  9245. ret = wc_AesEncrypt(aes, B, A);
  9246. if (ret != 0) {
  9247. ForceZero(A, sizeof(A));
  9248. ForceZero(B, sizeof(B));
  9249. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9250. wc_MemZero_Check(A, sizeof(A));
  9251. wc_MemZero_Check(B, sizeof(B));
  9252. #endif
  9253. return ret;
  9254. }
  9255. xorbuf(A, in, AES_BLOCK_SIZE);
  9256. XMEMCPY(out, A, AES_BLOCK_SIZE);
  9257. AesCcmCtrInc(B, lenSz);
  9258. inSz -= AES_BLOCK_SIZE;
  9259. in += AES_BLOCK_SIZE;
  9260. out += AES_BLOCK_SIZE;
  9261. }
  9262. if (inSz > 0) {
  9263. ret = wc_AesEncrypt(aes, B, A);
  9264. if (ret != 0) {
  9265. ForceZero(A, sizeof(A));
  9266. ForceZero(B, sizeof(B));
  9267. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9268. wc_MemZero_Check(A, sizeof(A));
  9269. wc_MemZero_Check(B, sizeof(B));
  9270. #endif
  9271. return ret;
  9272. }
  9273. xorbuf(A, in, inSz);
  9274. XMEMCPY(out, A, inSz);
  9275. }
  9276. ForceZero(A, sizeof(A));
  9277. ForceZero(B, sizeof(B));
  9278. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9279. wc_MemZero_Check(A, sizeof(A));
  9280. wc_MemZero_Check(B, sizeof(B));
  9281. #endif
  9282. return 0;
  9283. }
  9284. #ifdef HAVE_AES_DECRYPT
  9285. /* Software AES - CCM Decrypt */
  9286. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  9287. const byte* nonce, word32 nonceSz,
  9288. const byte* authTag, word32 authTagSz,
  9289. const byte* authIn, word32 authInSz)
  9290. {
  9291. #ifndef WOLFSSL_AESNI
  9292. byte A[AES_BLOCK_SIZE];
  9293. byte B[AES_BLOCK_SIZE];
  9294. #else
  9295. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  9296. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  9297. #endif
  9298. byte* o;
  9299. byte lenSz;
  9300. word32 i, oSz;
  9301. byte mask = 0xFF;
  9302. const word32 wordSz = (word32)sizeof(word32);
  9303. int ret;
  9304. /* sanity check on arguments */
  9305. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  9306. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  9307. authTagSz > AES_BLOCK_SIZE)
  9308. return BAD_FUNC_ARG;
  9309. /* sanity check on tag size */
  9310. if (wc_AesCcmCheckTagSize(authTagSz) != 0) {
  9311. return BAD_FUNC_ARG;
  9312. }
  9313. #ifdef WOLF_CRYPTO_CB
  9314. if (aes->devId != INVALID_DEVID) {
  9315. int crypto_cb_ret =
  9316. wc_CryptoCb_AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz,
  9317. authTag, authTagSz, authIn, authInSz);
  9318. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  9319. return crypto_cb_ret;
  9320. /* fall-through when unavailable */
  9321. }
  9322. #endif
  9323. o = out;
  9324. oSz = inSz;
  9325. XMEMSET(A, 0, sizeof A);
  9326. XMEMCPY(B+1, nonce, nonceSz);
  9327. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  9328. B[0] = lenSz - 1;
  9329. for (i = 0; i < lenSz; i++)
  9330. B[AES_BLOCK_SIZE - 1 - i] = 0;
  9331. B[15] = 1;
  9332. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9333. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  9334. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  9335. #endif
  9336. #ifdef WOLFSSL_AESNI
  9337. if (haveAESNI && aes->use_aesni) {
  9338. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9339. while (oSz >= AES_BLOCK_SIZE * 4) {
  9340. AesCcmCtrIncSet4(B, lenSz);
  9341. AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  9342. aes->rounds);
  9343. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  9344. XMEMCPY(o, A, AES_BLOCK_SIZE * 4);
  9345. oSz -= AES_BLOCK_SIZE * 4;
  9346. in += AES_BLOCK_SIZE * 4;
  9347. o += AES_BLOCK_SIZE * 4;
  9348. AesCcmCtrInc4(B, lenSz);
  9349. }
  9350. RESTORE_VECTOR_REGISTERS();
  9351. }
  9352. #endif
  9353. while (oSz >= AES_BLOCK_SIZE) {
  9354. ret = wc_AesEncrypt(aes, B, A);
  9355. if (ret != 0) {
  9356. ForceZero(A, sizeof(A));
  9357. ForceZero(B, sizeof(B));
  9358. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9359. wc_MemZero_Check(A, sizeof(A));
  9360. wc_MemZero_Check(B, sizeof(B));
  9361. #endif
  9362. return ret;
  9363. }
  9364. xorbuf(A, in, AES_BLOCK_SIZE);
  9365. XMEMCPY(o, A, AES_BLOCK_SIZE);
  9366. AesCcmCtrInc(B, lenSz);
  9367. oSz -= AES_BLOCK_SIZE;
  9368. in += AES_BLOCK_SIZE;
  9369. o += AES_BLOCK_SIZE;
  9370. }
  9371. if (inSz > 0) {
  9372. ret = wc_AesEncrypt(aes, B, A);
  9373. if (ret != 0) {
  9374. ForceZero(A, sizeof(A));
  9375. ForceZero(B, sizeof(B));
  9376. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9377. wc_MemZero_Check(A, sizeof(A));
  9378. wc_MemZero_Check(B, sizeof(B));
  9379. #endif
  9380. return ret;
  9381. }
  9382. xorbuf(A, in, oSz);
  9383. XMEMCPY(o, A, oSz);
  9384. }
  9385. for (i = 0; i < lenSz; i++)
  9386. B[AES_BLOCK_SIZE - 1 - i] = 0;
  9387. ret = wc_AesEncrypt(aes, B, A);
  9388. if (ret != 0) {
  9389. ForceZero(A, sizeof(A));
  9390. ForceZero(B, sizeof(B));
  9391. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9392. wc_MemZero_Check(A, sizeof(A));
  9393. wc_MemZero_Check(B, sizeof(B));
  9394. #endif
  9395. return ret;
  9396. }
  9397. o = out;
  9398. oSz = inSz;
  9399. B[0] = (authInSz > 0 ? 64 : 0)
  9400. + (8 * (((byte)authTagSz - 2) / 2))
  9401. + (lenSz - 1);
  9402. for (i = 0; i < lenSz; i++) {
  9403. if (mask && i >= wordSz)
  9404. mask = 0x00;
  9405. B[AES_BLOCK_SIZE - 1 - i] = (inSz >> ((8 * i) & mask)) & mask;
  9406. }
  9407. ret = wc_AesEncrypt(aes, B, A);
  9408. if (ret != 0) {
  9409. ForceZero(A, sizeof(A));
  9410. ForceZero(B, sizeof(B));
  9411. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9412. wc_MemZero_Check(A, sizeof(A));
  9413. wc_MemZero_Check(B, sizeof(B));
  9414. #endif
  9415. return ret;
  9416. }
  9417. if (authInSz > 0) {
  9418. ret = roll_auth(aes, authIn, authInSz, A);
  9419. if (ret != 0) {
  9420. ForceZero(A, sizeof(A));
  9421. ForceZero(B, sizeof(B));
  9422. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9423. wc_MemZero_Check(A, sizeof(A));
  9424. wc_MemZero_Check(B, sizeof(B));
  9425. #endif
  9426. return ret;
  9427. }
  9428. }
  9429. if (inSz > 0) {
  9430. ret = roll_x(aes, o, oSz, A);
  9431. if (ret != 0) {
  9432. ForceZero(A, sizeof(A));
  9433. ForceZero(B, sizeof(B));
  9434. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9435. wc_MemZero_Check(A, sizeof(A));
  9436. wc_MemZero_Check(B, sizeof(B));
  9437. #endif
  9438. return ret;
  9439. }
  9440. }
  9441. B[0] = lenSz - 1;
  9442. for (i = 0; i < lenSz; i++)
  9443. B[AES_BLOCK_SIZE - 1 - i] = 0;
  9444. ret = wc_AesEncrypt(aes, B, B);
  9445. if (ret != 0) {
  9446. ForceZero(A, sizeof(A));
  9447. ForceZero(B, sizeof(B));
  9448. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9449. wc_MemZero_Check(A, sizeof(A));
  9450. wc_MemZero_Check(B, sizeof(B));
  9451. #endif
  9452. return ret;
  9453. }
  9454. xorbuf(A, B, authTagSz);
  9455. if (ConstantCompare(A, authTag, authTagSz) != 0) {
  9456. /* If the authTag check fails, don't keep the decrypted data.
  9457. * Unfortunately, you need the decrypted data to calculate the
  9458. * check value. */
  9459. #if defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2) && \
  9460. defined(ACVP_VECTOR_TESTING)
  9461. WOLFSSL_MSG("Preserve output for vector responses");
  9462. #else
  9463. if (inSz > 0)
  9464. XMEMSET(out, 0, inSz);
  9465. #endif
  9466. ret = AES_CCM_AUTH_E;
  9467. }
  9468. ForceZero(A, sizeof(A));
  9469. ForceZero(B, sizeof(B));
  9470. o = NULL;
  9471. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9472. wc_MemZero_Check(A, sizeof(A));
  9473. wc_MemZero_Check(B, sizeof(B));
  9474. #endif
  9475. return ret;
  9476. }
  9477. #endif /* HAVE_AES_DECRYPT */
  9478. #endif /* software CCM */
  9479. /* abstract functions that call lower level AESCCM functions */
  9480. #ifndef WC_NO_RNG
  9481. int wc_AesCcmSetNonce(Aes* aes, const byte* nonce, word32 nonceSz)
  9482. {
  9483. int ret = 0;
  9484. if (aes == NULL || nonce == NULL ||
  9485. nonceSz < CCM_NONCE_MIN_SZ || nonceSz > CCM_NONCE_MAX_SZ) {
  9486. ret = BAD_FUNC_ARG;
  9487. }
  9488. if (ret == 0) {
  9489. XMEMCPY(aes->reg, nonce, nonceSz);
  9490. aes->nonceSz = nonceSz;
  9491. /* Invocation counter should be 2^61 */
  9492. aes->invokeCtr[0] = 0;
  9493. aes->invokeCtr[1] = 0xE0000000;
  9494. }
  9495. return ret;
  9496. }
  9497. int wc_AesCcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  9498. byte* ivOut, word32 ivOutSz,
  9499. byte* authTag, word32 authTagSz,
  9500. const byte* authIn, word32 authInSz)
  9501. {
  9502. int ret = 0;
  9503. if (aes == NULL || out == NULL ||
  9504. (in == NULL && sz != 0) ||
  9505. ivOut == NULL ||
  9506. (authIn == NULL && authInSz != 0) ||
  9507. (ivOutSz != aes->nonceSz)) {
  9508. ret = BAD_FUNC_ARG;
  9509. }
  9510. if (ret == 0) {
  9511. aes->invokeCtr[0]++;
  9512. if (aes->invokeCtr[0] == 0) {
  9513. aes->invokeCtr[1]++;
  9514. if (aes->invokeCtr[1] == 0)
  9515. ret = AES_CCM_OVERFLOW_E;
  9516. }
  9517. }
  9518. if (ret == 0) {
  9519. ret = wc_AesCcmEncrypt(aes, out, in, sz,
  9520. (byte*)aes->reg, aes->nonceSz,
  9521. authTag, authTagSz,
  9522. authIn, authInSz);
  9523. if (ret == 0) {
  9524. XMEMCPY(ivOut, aes->reg, aes->nonceSz);
  9525. IncCtr((byte*)aes->reg, aes->nonceSz);
  9526. }
  9527. }
  9528. return ret;
  9529. }
  9530. #endif /* WC_NO_RNG */
  9531. #endif /* HAVE_AESCCM */
  9532. /* Initialize Aes for use with async hardware */
  9533. int wc_AesInit(Aes* aes, void* heap, int devId)
  9534. {
  9535. int ret = 0;
  9536. if (aes == NULL)
  9537. return BAD_FUNC_ARG;
  9538. aes->heap = heap;
  9539. #ifdef WOLF_CRYPTO_CB
  9540. aes->devId = devId;
  9541. aes->devCtx = NULL;
  9542. #else
  9543. (void)devId;
  9544. #endif
  9545. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  9546. ret = wolfAsync_DevCtxInit(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES,
  9547. aes->heap, devId);
  9548. #endif /* WOLFSSL_ASYNC_CRYPT */
  9549. #ifdef WOLFSSL_AFALG
  9550. aes->alFd = -1;
  9551. aes->rdFd = -1;
  9552. #endif
  9553. #ifdef WOLFSSL_KCAPI_AES
  9554. aes->handle = NULL;
  9555. aes->init = 0;
  9556. #endif
  9557. #if defined(WOLFSSL_DEVCRYPTO) && \
  9558. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  9559. aes->ctx.cfd = -1;
  9560. #endif
  9561. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  9562. XMEMSET(&aes->ctx, 0, sizeof(aes->ctx));
  9563. #endif
  9564. #if defined(WOLFSSL_IMXRT_DCP)
  9565. DCPAesInit(aes);
  9566. #endif
  9567. #ifdef HAVE_AESGCM
  9568. #ifdef OPENSSL_EXTRA
  9569. XMEMSET(aes->aadH, 0, sizeof(aes->aadH));
  9570. aes->aadLen = 0;
  9571. #endif
  9572. #endif
  9573. #ifdef WOLFSSL_AESGCM_STREAM
  9574. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  9575. aes->streamData = NULL;
  9576. #endif
  9577. aes->keylen = 0;
  9578. aes->nonceSz = 0;
  9579. aes->gcmKeySet = 0;
  9580. aes->nonceSet = 0;
  9581. aes->ctrSet = 0;
  9582. #endif
  9583. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  9584. ret = wc_psa_aes_init(aes);
  9585. #endif
  9586. return ret;
  9587. }
  9588. #ifdef WOLF_PRIVATE_KEY_ID
  9589. int wc_AesInit_Id(Aes* aes, unsigned char* id, int len, void* heap, int devId)
  9590. {
  9591. int ret = 0;
  9592. if (aes == NULL)
  9593. ret = BAD_FUNC_ARG;
  9594. if (ret == 0 && (len < 0 || len > AES_MAX_ID_LEN))
  9595. ret = BUFFER_E;
  9596. if (ret == 0)
  9597. ret = wc_AesInit(aes, heap, devId);
  9598. if (ret == 0) {
  9599. XMEMCPY(aes->id, id, len);
  9600. aes->idLen = len;
  9601. aes->labelLen = 0;
  9602. }
  9603. return ret;
  9604. }
  9605. int wc_AesInit_Label(Aes* aes, const char* label, void* heap, int devId)
  9606. {
  9607. int ret = 0;
  9608. int labelLen = 0;
  9609. if (aes == NULL || label == NULL)
  9610. ret = BAD_FUNC_ARG;
  9611. if (ret == 0) {
  9612. labelLen = (int)XSTRLEN(label);
  9613. if (labelLen == 0 || labelLen > AES_MAX_LABEL_LEN)
  9614. ret = BUFFER_E;
  9615. }
  9616. if (ret == 0)
  9617. ret = wc_AesInit(aes, heap, devId);
  9618. if (ret == 0) {
  9619. XMEMCPY(aes->label, label, labelLen);
  9620. aes->labelLen = labelLen;
  9621. aes->idLen = 0;
  9622. }
  9623. return ret;
  9624. }
  9625. #endif
  9626. /* Free Aes from use with async hardware */
  9627. void wc_AesFree(Aes* aes)
  9628. {
  9629. if (aes == NULL)
  9630. return;
  9631. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  9632. wolfAsync_DevCtxFree(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES);
  9633. #endif /* WOLFSSL_ASYNC_CRYPT */
  9634. #if defined(WOLFSSL_AFALG) || defined(WOLFSSL_AFALG_XILINX_AES)
  9635. if (aes->rdFd > 0) { /* negative is error case */
  9636. close(aes->rdFd);
  9637. }
  9638. if (aes->alFd > 0) {
  9639. close(aes->alFd);
  9640. }
  9641. #endif /* WOLFSSL_AFALG */
  9642. #ifdef WOLFSSL_KCAPI_AES
  9643. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  9644. if (aes->init == 1) {
  9645. kcapi_cipher_destroy(aes->handle);
  9646. }
  9647. aes->init = 0;
  9648. aes->handle = NULL;
  9649. #endif
  9650. #if defined(WOLFSSL_DEVCRYPTO) && \
  9651. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  9652. wc_DevCryptoFree(&aes->ctx);
  9653. #endif
  9654. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  9655. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  9656. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  9657. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  9658. #endif
  9659. #if defined(WOLFSSL_IMXRT_DCP)
  9660. DCPAesFree(aes);
  9661. #endif
  9662. #if defined(WOLFSSL_AESGCM_STREAM) && defined(WOLFSSL_SMALL_STACK) && \
  9663. !defined(WOLFSSL_AESNI)
  9664. if (aes->streamData != NULL) {
  9665. XFREE(aes->streamData, aes->heap, DYNAMIC_TYPE_AES);
  9666. aes->streamData = NULL;
  9667. }
  9668. #endif
  9669. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  9670. se050_aes_free(aes);
  9671. #endif
  9672. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  9673. wc_psa_aes_free(aes);
  9674. #endif
  9675. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9676. wc_MemZero_Check(aes, sizeof(Aes));
  9677. #endif
  9678. }
  9679. int wc_AesGetKeySize(Aes* aes, word32* keySize)
  9680. {
  9681. int ret = 0;
  9682. if (aes == NULL || keySize == NULL) {
  9683. return BAD_FUNC_ARG;
  9684. }
  9685. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  9686. return wc_psa_aes_get_key_size(aes, keySize);
  9687. #endif
  9688. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  9689. *keySize = aes->ctx.key.keySize;
  9690. return ret;
  9691. #endif
  9692. switch (aes->rounds) {
  9693. #ifdef WOLFSSL_AES_128
  9694. case 10:
  9695. *keySize = 16;
  9696. break;
  9697. #endif
  9698. #ifdef WOLFSSL_AES_192
  9699. case 12:
  9700. *keySize = 24;
  9701. break;
  9702. #endif
  9703. #ifdef WOLFSSL_AES_256
  9704. case 14:
  9705. *keySize = 32;
  9706. break;
  9707. #endif
  9708. default:
  9709. *keySize = 0;
  9710. ret = BAD_FUNC_ARG;
  9711. }
  9712. return ret;
  9713. }
  9714. #endif /* !WOLFSSL_TI_CRYPT */
  9715. #ifdef HAVE_AES_ECB
  9716. #if defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  9717. !defined(WOLFSSL_QNX_CAAM)
  9718. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  9719. #elif defined(WOLFSSL_AFALG)
  9720. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  9721. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  9722. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  9723. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  9724. /* Software AES - ECB */
  9725. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9726. {
  9727. if ((in == NULL) || (out == NULL) || (aes == NULL))
  9728. return BAD_FUNC_ARG;
  9729. return AES_ECB_encrypt(aes, in, out, sz);
  9730. }
  9731. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9732. {
  9733. if ((in == NULL) || (out == NULL) || (aes == NULL))
  9734. return BAD_FUNC_ARG;
  9735. return AES_ECB_decrypt(aes, in, out, sz);
  9736. }
  9737. #else
  9738. /* Software AES - ECB */
  9739. static WARN_UNUSED_RESULT int _AesEcbEncrypt(
  9740. Aes* aes, byte* out, const byte* in, word32 sz)
  9741. {
  9742. word32 blocks = sz / AES_BLOCK_SIZE;
  9743. #ifdef WOLF_CRYPTO_CB
  9744. if (aes->devId != INVALID_DEVID) {
  9745. int ret = wc_CryptoCb_AesEcbEncrypt(aes, out, in, sz);
  9746. if (ret != CRYPTOCB_UNAVAILABLE)
  9747. return ret;
  9748. /* fall-through when unavailable */
  9749. }
  9750. #endif
  9751. #ifdef WOLFSSL_IMXRT_DCP
  9752. if (aes->keylen == 16)
  9753. return DCPAesEcbEncrypt(aes, out, in, sz);
  9754. #endif
  9755. while (blocks > 0) {
  9756. int ret = wc_AesEncryptDirect(aes, out, in);
  9757. if (ret != 0)
  9758. return ret;
  9759. out += AES_BLOCK_SIZE;
  9760. in += AES_BLOCK_SIZE;
  9761. blocks--;
  9762. }
  9763. return 0;
  9764. }
  9765. static WARN_UNUSED_RESULT int _AesEcbDecrypt(
  9766. Aes* aes, byte* out, const byte* in, word32 sz)
  9767. {
  9768. word32 blocks = sz / AES_BLOCK_SIZE;
  9769. #ifdef WOLF_CRYPTO_CB
  9770. if (aes->devId != INVALID_DEVID) {
  9771. int ret = wc_CryptoCb_AesEcbDecrypt(aes, out, in, sz);
  9772. if (ret != CRYPTOCB_UNAVAILABLE)
  9773. return ret;
  9774. /* fall-through when unavailable */
  9775. }
  9776. #endif
  9777. #ifdef WOLFSSL_IMXRT_DCP
  9778. if (aes->keylen == 16)
  9779. return DCPAesEcbDecrypt(aes, out, in, sz);
  9780. #endif
  9781. while (blocks > 0) {
  9782. int ret = wc_AesDecryptDirect(aes, out, in);
  9783. if (ret != 0)
  9784. return ret;
  9785. out += AES_BLOCK_SIZE;
  9786. in += AES_BLOCK_SIZE;
  9787. blocks--;
  9788. }
  9789. return 0;
  9790. }
  9791. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9792. {
  9793. int ret;
  9794. if ((in == NULL) || (out == NULL) || (aes == NULL))
  9795. return BAD_FUNC_ARG;
  9796. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9797. ret = _AesEcbEncrypt(aes, out, in, sz);
  9798. RESTORE_VECTOR_REGISTERS();
  9799. return ret;
  9800. }
  9801. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9802. {
  9803. int ret;
  9804. if ((in == NULL) || (out == NULL) || (aes == NULL))
  9805. return BAD_FUNC_ARG;
  9806. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9807. ret = _AesEcbDecrypt(aes, out, in, sz);
  9808. RESTORE_VECTOR_REGISTERS();
  9809. return ret;
  9810. }
  9811. #endif
  9812. #endif /* HAVE_AES_ECB */
  9813. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_OFB)
  9814. /* Feedback AES mode
  9815. *
  9816. * aes structure holding key to use for encryption
  9817. * out buffer to hold result of encryption (must be at least as large as input
  9818. * buffer)
  9819. * in buffer to encrypt
  9820. * sz size of input buffer
  9821. * mode flag to specify AES mode
  9822. *
  9823. * returns 0 on success and negative error values on failure
  9824. */
  9825. /* Software AES - CFB Encrypt */
  9826. static WARN_UNUSED_RESULT int wc_AesFeedbackEncrypt(
  9827. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  9828. {
  9829. byte* tmp = NULL;
  9830. #ifdef WOLFSSL_AES_CFB
  9831. byte* reg = NULL;
  9832. #endif
  9833. int ret = 0;
  9834. if (aes == NULL || out == NULL || in == NULL) {
  9835. return BAD_FUNC_ARG;
  9836. }
  9837. #ifdef WOLFSSL_AES_CFB
  9838. if (aes->left && sz) {
  9839. reg = (byte*)aes->reg + AES_BLOCK_SIZE - aes->left;
  9840. }
  9841. #endif
  9842. /* consume any unused bytes left in aes->tmp */
  9843. tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left;
  9844. while (aes->left && sz) {
  9845. *(out) = *(in++) ^ *(tmp++);
  9846. #ifdef WOLFSSL_AES_CFB
  9847. if (mode == AES_CFB_MODE) {
  9848. *(reg++) = *out;
  9849. }
  9850. #endif
  9851. out++;
  9852. aes->left--;
  9853. sz--;
  9854. }
  9855. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9856. while (sz >= AES_BLOCK_SIZE) {
  9857. /* Using aes->tmp here for inline case i.e. in=out */
  9858. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9859. if (ret != 0)
  9860. break;
  9861. #ifdef WOLFSSL_AES_OFB
  9862. if (mode == AES_OFB_MODE) {
  9863. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9864. }
  9865. #endif
  9866. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  9867. #ifdef WOLFSSL_AES_CFB
  9868. if (mode == AES_CFB_MODE) {
  9869. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9870. }
  9871. #endif
  9872. XMEMCPY(out, aes->tmp, AES_BLOCK_SIZE);
  9873. out += AES_BLOCK_SIZE;
  9874. in += AES_BLOCK_SIZE;
  9875. sz -= AES_BLOCK_SIZE;
  9876. aes->left = 0;
  9877. }
  9878. /* encrypt left over data */
  9879. if ((ret == 0) && sz) {
  9880. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9881. }
  9882. if ((ret == 0) && sz) {
  9883. aes->left = AES_BLOCK_SIZE;
  9884. tmp = (byte*)aes->tmp;
  9885. #ifdef WOLFSSL_AES_OFB
  9886. if (mode == AES_OFB_MODE) {
  9887. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9888. }
  9889. #endif
  9890. #ifdef WOLFSSL_AES_CFB
  9891. reg = (byte*)aes->reg;
  9892. #endif
  9893. while (sz--) {
  9894. *(out) = *(in++) ^ *(tmp++);
  9895. #ifdef WOLFSSL_AES_CFB
  9896. if (mode == AES_CFB_MODE) {
  9897. *(reg++) = *out;
  9898. }
  9899. #endif
  9900. out++;
  9901. aes->left--;
  9902. }
  9903. }
  9904. RESTORE_VECTOR_REGISTERS();
  9905. return ret;
  9906. }
  9907. #ifdef HAVE_AES_DECRYPT
  9908. /* CFB 128
  9909. *
  9910. * aes structure holding key to use for decryption
  9911. * out buffer to hold result of decryption (must be at least as large as input
  9912. * buffer)
  9913. * in buffer to decrypt
  9914. * sz size of input buffer
  9915. *
  9916. * returns 0 on success and negative error values on failure
  9917. */
  9918. /* Software AES - CFB Decrypt */
  9919. static WARN_UNUSED_RESULT int wc_AesFeedbackDecrypt(
  9920. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  9921. {
  9922. byte* tmp;
  9923. int ret = 0;
  9924. if (aes == NULL || out == NULL || in == NULL) {
  9925. return BAD_FUNC_ARG;
  9926. }
  9927. #ifdef WOLFSSL_AES_CFB
  9928. /* check if more input needs copied over to aes->reg */
  9929. if (aes->left && sz && mode == AES_CFB_MODE) {
  9930. int size = min(aes->left, sz);
  9931. XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, in, size);
  9932. }
  9933. #endif
  9934. /* consume any unused bytes left in aes->tmp */
  9935. tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left;
  9936. while (aes->left && sz) {
  9937. *(out++) = *(in++) ^ *(tmp++);
  9938. aes->left--;
  9939. sz--;
  9940. }
  9941. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9942. while (sz > AES_BLOCK_SIZE) {
  9943. /* Using aes->tmp here for inline case i.e. in=out */
  9944. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9945. if (ret != 0)
  9946. break;
  9947. #ifdef WOLFSSL_AES_OFB
  9948. if (mode == AES_OFB_MODE) {
  9949. XMEMCPY((byte*)aes->reg, (byte*)aes->tmp, AES_BLOCK_SIZE);
  9950. }
  9951. #endif
  9952. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  9953. #ifdef WOLFSSL_AES_CFB
  9954. if (mode == AES_CFB_MODE) {
  9955. XMEMCPY(aes->reg, in, AES_BLOCK_SIZE);
  9956. }
  9957. #endif
  9958. XMEMCPY(out, (byte*)aes->tmp, AES_BLOCK_SIZE);
  9959. out += AES_BLOCK_SIZE;
  9960. in += AES_BLOCK_SIZE;
  9961. sz -= AES_BLOCK_SIZE;
  9962. aes->left = 0;
  9963. }
  9964. /* decrypt left over data */
  9965. if ((ret == 0) && sz) {
  9966. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9967. }
  9968. if ((ret == 0) && sz) {
  9969. #ifdef WOLFSSL_AES_CFB
  9970. if (mode == AES_CFB_MODE) {
  9971. XMEMCPY(aes->reg, in, sz);
  9972. }
  9973. #endif
  9974. #ifdef WOLFSSL_AES_OFB
  9975. if (mode == AES_OFB_MODE) {
  9976. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9977. }
  9978. #endif
  9979. aes->left = AES_BLOCK_SIZE;
  9980. tmp = (byte*)aes->tmp;
  9981. while (sz--) {
  9982. *(out++) = *(in++) ^ *(tmp++);
  9983. aes->left--;
  9984. }
  9985. }
  9986. RESTORE_VECTOR_REGISTERS();
  9987. return ret;
  9988. }
  9989. #endif /* HAVE_AES_DECRYPT */
  9990. #endif /* WOLFSSL_AES_CFB */
  9991. #ifdef WOLFSSL_AES_CFB
  9992. /* CFB 128
  9993. *
  9994. * aes structure holding key to use for encryption
  9995. * out buffer to hold result of encryption (must be at least as large as input
  9996. * buffer)
  9997. * in buffer to encrypt
  9998. * sz size of input buffer
  9999. *
  10000. * returns 0 on success and negative error values on failure
  10001. */
  10002. /* Software AES - CFB Encrypt */
  10003. int wc_AesCfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10004. {
  10005. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_CFB_MODE);
  10006. }
  10007. #ifdef HAVE_AES_DECRYPT
  10008. /* CFB 128
  10009. *
  10010. * aes structure holding key to use for decryption
  10011. * out buffer to hold result of decryption (must be at least as large as input
  10012. * buffer)
  10013. * in buffer to decrypt
  10014. * sz size of input buffer
  10015. *
  10016. * returns 0 on success and negative error values on failure
  10017. */
  10018. /* Software AES - CFB Decrypt */
  10019. int wc_AesCfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10020. {
  10021. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_CFB_MODE);
  10022. }
  10023. #endif /* HAVE_AES_DECRYPT */
  10024. /* shift the whole AES_BLOCK_SIZE array left by 8 or 1 bits */
  10025. static void shiftLeftArray(byte* ary, byte shift)
  10026. {
  10027. int i;
  10028. if (shift == WOLFSSL_BIT_SIZE) {
  10029. /* shifting over by 8 bits */
  10030. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  10031. ary[i] = ary[i+1];
  10032. }
  10033. ary[i] = 0;
  10034. }
  10035. else {
  10036. /* shifting over by 7 or less bits */
  10037. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  10038. byte carry = ary[i+1] & (0XFF << (WOLFSSL_BIT_SIZE - shift));
  10039. carry >>= (WOLFSSL_BIT_SIZE - shift);
  10040. ary[i] = (ary[i] << shift) + carry;
  10041. }
  10042. ary[i] = ary[i] << shift;
  10043. }
  10044. }
  10045. /* returns 0 on success and negative values on failure */
  10046. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB8(
  10047. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  10048. {
  10049. byte *pt;
  10050. int ret = 0;
  10051. if (aes == NULL || out == NULL || in == NULL) {
  10052. return BAD_FUNC_ARG;
  10053. }
  10054. if (sz == 0) {
  10055. return 0;
  10056. }
  10057. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  10058. while (sz > 0) {
  10059. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  10060. if (ret != 0)
  10061. break;
  10062. if (dir == AES_DECRYPTION) {
  10063. pt = (byte*)aes->reg;
  10064. /* LSB + CAT */
  10065. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  10066. pt[AES_BLOCK_SIZE - 1] = in[0];
  10067. }
  10068. /* MSB + XOR */
  10069. #ifdef BIG_ENDIAN_ORDER
  10070. ByteReverseWords(aes->tmp, aes->tmp, AES_BLOCK_SIZE);
  10071. #endif
  10072. out[0] = (byte)(aes->tmp[0] ^ in[0]);
  10073. if (dir == AES_ENCRYPTION) {
  10074. pt = (byte*)aes->reg;
  10075. /* LSB + CAT */
  10076. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  10077. pt[AES_BLOCK_SIZE - 1] = out[0];
  10078. }
  10079. out += 1;
  10080. in += 1;
  10081. sz -= 1;
  10082. }
  10083. RESTORE_VECTOR_REGISTERS();
  10084. return ret;
  10085. }
  10086. /* returns 0 on success and negative values on failure */
  10087. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB1(
  10088. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  10089. {
  10090. byte tmp;
  10091. byte cur = 0; /* hold current work in order to handle inline in=out */
  10092. byte* pt;
  10093. int bit = 7;
  10094. int ret = 0;
  10095. if (aes == NULL || out == NULL || in == NULL) {
  10096. return BAD_FUNC_ARG;
  10097. }
  10098. if (sz == 0) {
  10099. return 0;
  10100. }
  10101. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  10102. while (sz > 0) {
  10103. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  10104. if (ret != 0)
  10105. break;
  10106. if (dir == AES_DECRYPTION) {
  10107. pt = (byte*)aes->reg;
  10108. /* LSB + CAT */
  10109. tmp = (0X01 << bit) & in[0];
  10110. tmp = tmp >> bit;
  10111. tmp &= 0x01;
  10112. shiftLeftArray((byte*)aes->reg, 1);
  10113. pt[AES_BLOCK_SIZE - 1] |= tmp;
  10114. }
  10115. /* MSB + XOR */
  10116. tmp = (0X01 << bit) & in[0];
  10117. pt = (byte*)aes->tmp;
  10118. tmp = (pt[0] >> 7) ^ (tmp >> bit);
  10119. tmp &= 0x01;
  10120. cur |= (tmp << bit);
  10121. if (dir == AES_ENCRYPTION) {
  10122. pt = (byte*)aes->reg;
  10123. /* LSB + CAT */
  10124. shiftLeftArray((byte*)aes->reg, 1);
  10125. pt[AES_BLOCK_SIZE - 1] |= tmp;
  10126. }
  10127. bit--;
  10128. if (bit < 0) {
  10129. out[0] = cur;
  10130. out += 1;
  10131. in += 1;
  10132. sz -= 1;
  10133. bit = 7;
  10134. cur = 0;
  10135. }
  10136. else {
  10137. sz -= 1;
  10138. }
  10139. }
  10140. if (ret == 0) {
  10141. if (bit > 0 && bit < 7) {
  10142. out[0] = cur;
  10143. }
  10144. }
  10145. RESTORE_VECTOR_REGISTERS();
  10146. return ret;
  10147. }
  10148. /* CFB 1
  10149. *
  10150. * aes structure holding key to use for encryption
  10151. * out buffer to hold result of encryption (must be at least as large as input
  10152. * buffer)
  10153. * in buffer to encrypt (packed to left, i.e. 101 is 0x90)
  10154. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  10155. *
  10156. * returns 0 on success and negative values on failure
  10157. */
  10158. int wc_AesCfb1Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10159. {
  10160. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_ENCRYPTION);
  10161. }
  10162. /* CFB 8
  10163. *
  10164. * aes structure holding key to use for encryption
  10165. * out buffer to hold result of encryption (must be at least as large as input
  10166. * buffer)
  10167. * in buffer to encrypt
  10168. * sz size of input buffer
  10169. *
  10170. * returns 0 on success and negative values on failure
  10171. */
  10172. int wc_AesCfb8Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10173. {
  10174. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_ENCRYPTION);
  10175. }
  10176. #ifdef HAVE_AES_DECRYPT
  10177. /* CFB 1
  10178. *
  10179. * aes structure holding key to use for encryption
  10180. * out buffer to hold result of encryption (must be at least as large as input
  10181. * buffer)
  10182. * in buffer to encrypt
  10183. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  10184. *
  10185. * returns 0 on success and negative values on failure
  10186. */
  10187. int wc_AesCfb1Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10188. {
  10189. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_DECRYPTION);
  10190. }
  10191. /* CFB 8
  10192. *
  10193. * aes structure holding key to use for encryption
  10194. * out buffer to hold result of encryption (must be at least as large as input
  10195. * buffer)
  10196. * in buffer to encrypt
  10197. * sz size of input buffer
  10198. *
  10199. * returns 0 on success and negative values on failure
  10200. */
  10201. int wc_AesCfb8Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10202. {
  10203. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_DECRYPTION);
  10204. }
  10205. #endif /* HAVE_AES_DECRYPT */
  10206. #endif /* WOLFSSL_AES_CFB */
  10207. #ifdef WOLFSSL_AES_OFB
  10208. /* OFB
  10209. *
  10210. * aes structure holding key to use for encryption
  10211. * out buffer to hold result of encryption (must be at least as large as input
  10212. * buffer)
  10213. * in buffer to encrypt
  10214. * sz size of input buffer
  10215. *
  10216. * returns 0 on success and negative error values on failure
  10217. */
  10218. /* Software AES - CFB Encrypt */
  10219. int wc_AesOfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10220. {
  10221. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_OFB_MODE);
  10222. }
  10223. #ifdef HAVE_AES_DECRYPT
  10224. /* OFB
  10225. *
  10226. * aes structure holding key to use for decryption
  10227. * out buffer to hold result of decryption (must be at least as large as input
  10228. * buffer)
  10229. * in buffer to decrypt
  10230. * sz size of input buffer
  10231. *
  10232. * returns 0 on success and negative error values on failure
  10233. */
  10234. /* Software AES - OFB Decrypt */
  10235. int wc_AesOfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10236. {
  10237. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_OFB_MODE);
  10238. }
  10239. #endif /* HAVE_AES_DECRYPT */
  10240. #endif /* WOLFSSL_AES_OFB */
  10241. #ifdef HAVE_AES_KEYWRAP
  10242. /* Initialize key wrap counter with value */
  10243. static WC_INLINE void InitKeyWrapCounter(byte* inOutCtr, word32 value)
  10244. {
  10245. int i;
  10246. word32 bytes;
  10247. bytes = sizeof(word32);
  10248. for (i = 0; i < (int)sizeof(word32); i++) {
  10249. inOutCtr[i+sizeof(word32)] = (value >> ((bytes - 1) * 8)) & 0xFF;
  10250. bytes--;
  10251. }
  10252. }
  10253. /* Increment key wrap counter */
  10254. static WC_INLINE void IncrementKeyWrapCounter(byte* inOutCtr)
  10255. {
  10256. int i;
  10257. /* in network byte order so start at end and work back */
  10258. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  10259. if (++inOutCtr[i]) /* we're done unless we overflow */
  10260. return;
  10261. }
  10262. }
  10263. /* Decrement key wrap counter */
  10264. static WC_INLINE void DecrementKeyWrapCounter(byte* inOutCtr)
  10265. {
  10266. int i;
  10267. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  10268. if (--inOutCtr[i] != 0xFF) /* we're done unless we underflow */
  10269. return;
  10270. }
  10271. }
  10272. int wc_AesKeyWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  10273. word32 outSz, const byte* iv)
  10274. {
  10275. word32 i;
  10276. byte* r;
  10277. int j;
  10278. int ret = 0;
  10279. byte t[KEYWRAP_BLOCK_SIZE];
  10280. byte tmp[AES_BLOCK_SIZE];
  10281. /* n must be at least 2 64-bit blocks, output size is (n + 1) 8 bytes (64-bit) */
  10282. if (aes == NULL || in == NULL || inSz < 2*KEYWRAP_BLOCK_SIZE ||
  10283. out == NULL || outSz < (inSz + KEYWRAP_BLOCK_SIZE))
  10284. return BAD_FUNC_ARG;
  10285. /* input must be multiple of 64-bits */
  10286. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  10287. return BAD_FUNC_ARG;
  10288. r = out + 8;
  10289. XMEMCPY(r, in, inSz);
  10290. XMEMSET(t, 0, sizeof(t));
  10291. /* user IV is optional */
  10292. if (iv == NULL) {
  10293. XMEMSET(tmp, 0xA6, KEYWRAP_BLOCK_SIZE);
  10294. } else {
  10295. XMEMCPY(tmp, iv, KEYWRAP_BLOCK_SIZE);
  10296. }
  10297. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  10298. for (j = 0; j <= 5; j++) {
  10299. for (i = 1; i <= inSz / KEYWRAP_BLOCK_SIZE; i++) {
  10300. /* load R[i] */
  10301. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  10302. ret = wc_AesEncryptDirect(aes, tmp, tmp);
  10303. if (ret != 0)
  10304. break;
  10305. /* calculate new A */
  10306. IncrementKeyWrapCounter(t);
  10307. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  10308. /* save R[i] */
  10309. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  10310. r += KEYWRAP_BLOCK_SIZE;
  10311. }
  10312. if (ret != 0)
  10313. break;
  10314. r = out + KEYWRAP_BLOCK_SIZE;
  10315. }
  10316. RESTORE_VECTOR_REGISTERS();
  10317. if (ret != 0)
  10318. return ret;
  10319. /* C[0] = A */
  10320. XMEMCPY(out, tmp, KEYWRAP_BLOCK_SIZE);
  10321. return inSz + KEYWRAP_BLOCK_SIZE;
  10322. }
  10323. /* perform AES key wrap (RFC3394), return out sz on success, negative on err */
  10324. int wc_AesKeyWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  10325. byte* out, word32 outSz, const byte* iv)
  10326. {
  10327. #ifdef WOLFSSL_SMALL_STACK
  10328. Aes *aes = NULL;
  10329. #else
  10330. Aes aes[1];
  10331. #endif
  10332. int ret;
  10333. if (key == NULL)
  10334. return BAD_FUNC_ARG;
  10335. #ifdef WOLFSSL_SMALL_STACK
  10336. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  10337. DYNAMIC_TYPE_AES)) == NULL)
  10338. return MEMORY_E;
  10339. #endif
  10340. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  10341. if (ret != 0)
  10342. goto out;
  10343. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  10344. if (ret != 0) {
  10345. wc_AesFree(aes);
  10346. goto out;
  10347. }
  10348. ret = wc_AesKeyWrap_ex(aes, in, inSz, out, outSz, iv);
  10349. wc_AesFree(aes);
  10350. out:
  10351. #ifdef WOLFSSL_SMALL_STACK
  10352. if (aes != NULL)
  10353. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  10354. #endif
  10355. return ret;
  10356. }
  10357. int wc_AesKeyUnWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  10358. word32 outSz, const byte* iv)
  10359. {
  10360. byte* r;
  10361. word32 i, n;
  10362. int j;
  10363. int ret = 0;
  10364. byte t[KEYWRAP_BLOCK_SIZE];
  10365. byte tmp[AES_BLOCK_SIZE];
  10366. const byte* expIv;
  10367. const byte defaultIV[] = {
  10368. 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6
  10369. };
  10370. if (aes == NULL || in == NULL || inSz < 3 * KEYWRAP_BLOCK_SIZE ||
  10371. out == NULL || outSz < (inSz - KEYWRAP_BLOCK_SIZE))
  10372. return BAD_FUNC_ARG;
  10373. /* input must be multiple of 64-bits */
  10374. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  10375. return BAD_FUNC_ARG;
  10376. /* user IV optional */
  10377. if (iv != NULL)
  10378. expIv = iv;
  10379. else
  10380. expIv = defaultIV;
  10381. /* A = C[0], R[i] = C[i] */
  10382. XMEMCPY(tmp, in, KEYWRAP_BLOCK_SIZE);
  10383. XMEMCPY(out, in + KEYWRAP_BLOCK_SIZE, inSz - KEYWRAP_BLOCK_SIZE);
  10384. XMEMSET(t, 0, sizeof(t));
  10385. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  10386. /* initialize counter to 6n */
  10387. n = (inSz - 1) / KEYWRAP_BLOCK_SIZE;
  10388. InitKeyWrapCounter(t, 6 * n);
  10389. for (j = 5; j >= 0; j--) {
  10390. for (i = n; i >= 1; i--) {
  10391. /* calculate A */
  10392. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  10393. DecrementKeyWrapCounter(t);
  10394. /* load R[i], starting at end of R */
  10395. r = out + ((i - 1) * KEYWRAP_BLOCK_SIZE);
  10396. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  10397. ret = wc_AesDecryptDirect(aes, tmp, tmp);
  10398. if (ret != 0)
  10399. break;
  10400. /* save R[i] */
  10401. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  10402. }
  10403. if (ret != 0)
  10404. break;
  10405. }
  10406. RESTORE_VECTOR_REGISTERS();
  10407. if (ret != 0)
  10408. return ret;
  10409. /* verify IV */
  10410. if (XMEMCMP(tmp, expIv, KEYWRAP_BLOCK_SIZE) != 0)
  10411. return BAD_KEYWRAP_IV_E;
  10412. return inSz - KEYWRAP_BLOCK_SIZE;
  10413. }
  10414. int wc_AesKeyUnWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  10415. byte* out, word32 outSz, const byte* iv)
  10416. {
  10417. #ifdef WOLFSSL_SMALL_STACK
  10418. Aes *aes = NULL;
  10419. #else
  10420. Aes aes[1];
  10421. #endif
  10422. int ret;
  10423. (void)iv;
  10424. if (key == NULL)
  10425. return BAD_FUNC_ARG;
  10426. #ifdef WOLFSSL_SMALL_STACK
  10427. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  10428. DYNAMIC_TYPE_AES)) == NULL)
  10429. return MEMORY_E;
  10430. #endif
  10431. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  10432. if (ret != 0)
  10433. goto out;
  10434. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_DECRYPTION);
  10435. if (ret != 0) {
  10436. wc_AesFree(aes);
  10437. goto out;
  10438. }
  10439. ret = wc_AesKeyUnWrap_ex(aes, in, inSz, out, outSz, iv);
  10440. wc_AesFree(aes);
  10441. out:
  10442. #ifdef WOLFSSL_SMALL_STACK
  10443. if (aes)
  10444. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  10445. #endif
  10446. return ret;
  10447. }
  10448. #endif /* HAVE_AES_KEYWRAP */
  10449. #ifdef WOLFSSL_AES_XTS
  10450. /* Galios Field to use */
  10451. #define GF_XTS 0x87
  10452. /* This is to help with setting keys to correct encrypt or decrypt type.
  10453. *
  10454. * tweak AES key for tweak in XTS
  10455. * aes AES key for encrypt/decrypt process
  10456. * key buffer holding aes key | tweak key
  10457. * len length of key buffer in bytes. Should be twice that of key size. i.e.
  10458. * 32 for a 16 byte key.
  10459. * dir direction, either AES_ENCRYPTION or AES_DECRYPTION
  10460. * heap heap hint to use for memory. Can be NULL
  10461. * devId id to use with async crypto. Can be 0
  10462. *
  10463. * Note: is up to user to call wc_AesFree on tweak and aes key when done.
  10464. *
  10465. * return 0 on success
  10466. */
  10467. int wc_AesXtsSetKey(XtsAes* aes, const byte* key, word32 len, int dir,
  10468. void* heap, int devId)
  10469. {
  10470. word32 keySz;
  10471. int ret = 0;
  10472. if (aes == NULL || key == NULL) {
  10473. return BAD_FUNC_ARG;
  10474. }
  10475. if ((ret = wc_AesInit(&aes->tweak, heap, devId)) != 0) {
  10476. return ret;
  10477. }
  10478. if ((ret = wc_AesInit(&aes->aes, heap, devId)) != 0) {
  10479. return ret;
  10480. }
  10481. keySz = len/2;
  10482. if (keySz != 16 && keySz != 32) {
  10483. WOLFSSL_MSG("Unsupported key size");
  10484. return WC_KEY_SIZE_E;
  10485. }
  10486. if ((ret = wc_AesSetKey(&aes->aes, key, keySz, NULL, dir)) == 0) {
  10487. ret = wc_AesSetKey(&aes->tweak, key + keySz, keySz, NULL,
  10488. AES_ENCRYPTION);
  10489. if (ret != 0) {
  10490. wc_AesFree(&aes->aes);
  10491. }
  10492. }
  10493. return ret;
  10494. }
  10495. /* This is used to free up resources used by Aes structs
  10496. *
  10497. * aes AES keys to free
  10498. *
  10499. * return 0 on success
  10500. */
  10501. int wc_AesXtsFree(XtsAes* aes)
  10502. {
  10503. if (aes != NULL) {
  10504. wc_AesFree(&aes->aes);
  10505. wc_AesFree(&aes->tweak);
  10506. }
  10507. return 0;
  10508. }
  10509. /* Same process as wc_AesXtsEncrypt but uses a word64 type as the tweak value
  10510. * instead of a byte array. This just converts the word64 to a byte array and
  10511. * calls wc_AesXtsEncrypt.
  10512. *
  10513. * aes AES keys to use for block encrypt/decrypt
  10514. * out output buffer to hold cipher text
  10515. * in input plain text buffer to encrypt
  10516. * sz size of both out and in buffers
  10517. * sector value to use for tweak
  10518. *
  10519. * returns 0 on success
  10520. */
  10521. int wc_AesXtsEncryptSector(XtsAes* aes, byte* out, const byte* in,
  10522. word32 sz, word64 sector)
  10523. {
  10524. byte* pt;
  10525. byte i[AES_BLOCK_SIZE];
  10526. XMEMSET(i, 0, AES_BLOCK_SIZE);
  10527. #ifdef BIG_ENDIAN_ORDER
  10528. sector = ByteReverseWord64(sector);
  10529. #endif
  10530. pt = (byte*)&sector;
  10531. XMEMCPY(i, pt, sizeof(word64));
  10532. return wc_AesXtsEncrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  10533. }
  10534. /* Same process as wc_AesXtsDecrypt but uses a word64 type as the tweak value
  10535. * instead of a byte array. This just converts the word64 to a byte array.
  10536. *
  10537. * aes AES keys to use for block encrypt/decrypt
  10538. * out output buffer to hold plain text
  10539. * in input cipher text buffer to encrypt
  10540. * sz size of both out and in buffers
  10541. * sector value to use for tweak
  10542. *
  10543. * returns 0 on success
  10544. */
  10545. int wc_AesXtsDecryptSector(XtsAes* aes, byte* out, const byte* in, word32 sz,
  10546. word64 sector)
  10547. {
  10548. byte* pt;
  10549. byte i[AES_BLOCK_SIZE];
  10550. XMEMSET(i, 0, AES_BLOCK_SIZE);
  10551. #ifdef BIG_ENDIAN_ORDER
  10552. sector = ByteReverseWord64(sector);
  10553. #endif
  10554. pt = (byte*)&sector;
  10555. XMEMCPY(i, pt, sizeof(word64));
  10556. return wc_AesXtsDecrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  10557. }
  10558. #ifdef HAVE_AES_ECB
  10559. /* helper function for encrypting / decrypting full buffer at once */
  10560. static WARN_UNUSED_RESULT int _AesXtsHelper(
  10561. Aes* aes, byte* out, const byte* in, word32 sz, int dir)
  10562. {
  10563. word32 outSz = sz;
  10564. word32 totalSz = (sz / AES_BLOCK_SIZE) * AES_BLOCK_SIZE; /* total bytes */
  10565. byte* pt = out;
  10566. outSz -= AES_BLOCK_SIZE;
  10567. while (outSz > 0) {
  10568. word32 j;
  10569. byte carry = 0;
  10570. /* multiply by shift left and propagate carry */
  10571. for (j = 0; j < AES_BLOCK_SIZE && outSz > 0; j++, outSz--) {
  10572. byte tmpC;
  10573. tmpC = (pt[j] >> 7) & 0x01;
  10574. pt[j+AES_BLOCK_SIZE] = ((pt[j] << 1) + carry) & 0xFF;
  10575. carry = tmpC;
  10576. }
  10577. if (carry) {
  10578. pt[AES_BLOCK_SIZE] ^= GF_XTS;
  10579. }
  10580. pt += AES_BLOCK_SIZE;
  10581. }
  10582. xorbuf(out, in, totalSz);
  10583. if (dir == AES_ENCRYPTION) {
  10584. return _AesEcbEncrypt(aes, out, out, totalSz);
  10585. }
  10586. else {
  10587. return _AesEcbDecrypt(aes, out, out, totalSz);
  10588. }
  10589. }
  10590. #endif /* HAVE_AES_ECB */
  10591. /* AES with XTS mode. (XTS) XEX encryption with Tweak and cipher text Stealing.
  10592. *
  10593. * xaes AES keys to use for block encrypt/decrypt
  10594. * out output buffer to hold cipher text
  10595. * in input plain text buffer to encrypt
  10596. * sz size of both out and in buffers
  10597. * i value to use for tweak
  10598. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  10599. * adds a sanity check on how the user calls the function.
  10600. *
  10601. * returns 0 on success
  10602. */
  10603. /* Software AES - XTS Encrypt */
  10604. int wc_AesXtsEncrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  10605. const byte* i, word32 iSz)
  10606. {
  10607. int ret = 0;
  10608. word32 blocks = (sz / AES_BLOCK_SIZE);
  10609. Aes *aes, *tweak;
  10610. if (xaes == NULL || out == NULL || in == NULL) {
  10611. return BAD_FUNC_ARG;
  10612. }
  10613. aes = &xaes->aes;
  10614. tweak = &xaes->tweak;
  10615. if (iSz < AES_BLOCK_SIZE) {
  10616. return BAD_FUNC_ARG;
  10617. }
  10618. if (blocks > 0) {
  10619. byte tmp[AES_BLOCK_SIZE];
  10620. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  10621. * key setup passed to encrypt direct*/
  10622. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  10623. ret = wc_AesEncryptDirect(tweak, tmp, i);
  10624. if (ret != 0) {
  10625. RESTORE_VECTOR_REGISTERS();
  10626. return ret;
  10627. }
  10628. #ifdef HAVE_AES_ECB
  10629. /* encrypt all of buffer at once when possible */
  10630. if (in != out) { /* can not handle inline */
  10631. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  10632. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_ENCRYPTION)) != 0) {
  10633. RESTORE_VECTOR_REGISTERS();
  10634. return ret;
  10635. }
  10636. }
  10637. #endif
  10638. while (blocks > 0) {
  10639. word32 j;
  10640. byte carry = 0;
  10641. #ifdef HAVE_AES_ECB
  10642. if (in == out)
  10643. #endif
  10644. { /* check for if inline */
  10645. byte buf[AES_BLOCK_SIZE];
  10646. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  10647. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  10648. ret = wc_AesEncryptDirect(aes, out, buf);
  10649. if (ret != 0) {
  10650. RESTORE_VECTOR_REGISTERS();
  10651. return ret;
  10652. }
  10653. }
  10654. xorbuf(out, tmp, AES_BLOCK_SIZE);
  10655. /* multiply by shift left and propagate carry */
  10656. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  10657. byte tmpC;
  10658. tmpC = (tmp[j] >> 7) & 0x01;
  10659. tmp[j] = ((tmp[j] << 1) + carry) & 0xFF;
  10660. carry = tmpC;
  10661. }
  10662. if (carry) {
  10663. tmp[0] ^= GF_XTS;
  10664. }
  10665. in += AES_BLOCK_SIZE;
  10666. out += AES_BLOCK_SIZE;
  10667. sz -= AES_BLOCK_SIZE;
  10668. blocks--;
  10669. }
  10670. /* stealing operation of XTS to handle left overs */
  10671. if (sz > 0) {
  10672. byte buf[AES_BLOCK_SIZE];
  10673. XMEMCPY(buf, out - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  10674. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  10675. RESTORE_VECTOR_REGISTERS();
  10676. return BUFFER_E;
  10677. }
  10678. XMEMCPY(out, buf, sz);
  10679. XMEMCPY(buf, in, sz);
  10680. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  10681. ret = wc_AesEncryptDirect(aes, out - AES_BLOCK_SIZE, buf);
  10682. if (ret == 0)
  10683. xorbuf(out - AES_BLOCK_SIZE, tmp, AES_BLOCK_SIZE);
  10684. }
  10685. RESTORE_VECTOR_REGISTERS();
  10686. }
  10687. else {
  10688. WOLFSSL_MSG("Plain text input too small for encryption");
  10689. return BAD_FUNC_ARG;
  10690. }
  10691. return ret;
  10692. }
  10693. /* Same process as encryption but Aes key is AES_DECRYPTION type.
  10694. *
  10695. * xaes AES keys to use for block encrypt/decrypt
  10696. * out output buffer to hold plain text
  10697. * in input cipher text buffer to decrypt
  10698. * sz size of both out and in buffers
  10699. * i value to use for tweak
  10700. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  10701. * adds a sanity check on how the user calls the function.
  10702. *
  10703. * returns 0 on success
  10704. */
  10705. /* Software AES - XTS Decrypt */
  10706. int wc_AesXtsDecrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  10707. const byte* i, word32 iSz)
  10708. {
  10709. int ret = 0;
  10710. word32 blocks = (sz / AES_BLOCK_SIZE);
  10711. Aes *aes, *tweak;
  10712. if (xaes == NULL || out == NULL || in == NULL) {
  10713. return BAD_FUNC_ARG;
  10714. }
  10715. aes = &xaes->aes;
  10716. tweak = &xaes->tweak;
  10717. if (iSz < AES_BLOCK_SIZE) {
  10718. return BAD_FUNC_ARG;
  10719. }
  10720. if (blocks > 0) {
  10721. word32 j;
  10722. byte carry = 0;
  10723. byte tmp[AES_BLOCK_SIZE];
  10724. byte stl = (sz % AES_BLOCK_SIZE);
  10725. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  10726. * key setup passed to decrypt direct*/
  10727. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  10728. ret = wc_AesEncryptDirect(tweak, tmp, i);
  10729. if (ret != 0) {
  10730. RESTORE_VECTOR_REGISTERS();
  10731. return ret;
  10732. }
  10733. /* if Stealing then break out of loop one block early to handle special
  10734. * case */
  10735. if (stl > 0) {
  10736. blocks--;
  10737. }
  10738. #ifdef HAVE_AES_ECB
  10739. /* decrypt all of buffer at once when possible */
  10740. if (in != out) { /* can not handle inline */
  10741. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  10742. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_DECRYPTION)) != 0) {
  10743. RESTORE_VECTOR_REGISTERS();
  10744. return ret;
  10745. }
  10746. }
  10747. #endif
  10748. while (blocks > 0) {
  10749. #ifdef HAVE_AES_ECB
  10750. if (in == out)
  10751. #endif
  10752. { /* check for if inline */
  10753. byte buf[AES_BLOCK_SIZE];
  10754. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  10755. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  10756. ret = wc_AesDecryptDirect(aes, out, buf);
  10757. if (ret != 0) {
  10758. RESTORE_VECTOR_REGISTERS();
  10759. return ret;
  10760. }
  10761. }
  10762. xorbuf(out, tmp, AES_BLOCK_SIZE);
  10763. /* multiply by shift left and propagate carry */
  10764. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  10765. byte tmpC;
  10766. tmpC = (tmp[j] >> 7) & 0x01;
  10767. tmp[j] = ((tmp[j] << 1) + carry) & 0xFF;
  10768. carry = tmpC;
  10769. }
  10770. if (carry) {
  10771. tmp[0] ^= GF_XTS;
  10772. }
  10773. carry = 0;
  10774. in += AES_BLOCK_SIZE;
  10775. out += AES_BLOCK_SIZE;
  10776. sz -= AES_BLOCK_SIZE;
  10777. blocks--;
  10778. }
  10779. /* stealing operation of XTS to handle left overs */
  10780. if (sz >= AES_BLOCK_SIZE) {
  10781. byte buf[AES_BLOCK_SIZE];
  10782. byte tmp2[AES_BLOCK_SIZE];
  10783. /* multiply by shift left and propagate carry */
  10784. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  10785. byte tmpC;
  10786. tmpC = (tmp[j] >> 7) & 0x01;
  10787. tmp2[j] = ((tmp[j] << 1) + carry) & 0xFF;
  10788. carry = tmpC;
  10789. }
  10790. if (carry) {
  10791. tmp2[0] ^= GF_XTS;
  10792. }
  10793. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  10794. xorbuf(buf, tmp2, AES_BLOCK_SIZE);
  10795. ret = wc_AesDecryptDirect(aes, out, buf);
  10796. if (ret != 0) {
  10797. RESTORE_VECTOR_REGISTERS();
  10798. return ret;
  10799. }
  10800. xorbuf(out, tmp2, AES_BLOCK_SIZE);
  10801. /* tmp2 holds partial | last */
  10802. XMEMCPY(tmp2, out, AES_BLOCK_SIZE);
  10803. in += AES_BLOCK_SIZE;
  10804. out += AES_BLOCK_SIZE;
  10805. sz -= AES_BLOCK_SIZE;
  10806. /* Make buffer with end of cipher text | last */
  10807. XMEMCPY(buf, tmp2, AES_BLOCK_SIZE);
  10808. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  10809. RESTORE_VECTOR_REGISTERS();
  10810. return BUFFER_E;
  10811. }
  10812. XMEMCPY(buf, in, sz);
  10813. XMEMCPY(out, tmp2, sz);
  10814. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  10815. ret = wc_AesDecryptDirect(aes, tmp2, buf);
  10816. if (ret != 0) {
  10817. RESTORE_VECTOR_REGISTERS();
  10818. return ret;
  10819. }
  10820. xorbuf(tmp2, tmp, AES_BLOCK_SIZE);
  10821. XMEMCPY(out - AES_BLOCK_SIZE, tmp2, AES_BLOCK_SIZE);
  10822. }
  10823. RESTORE_VECTOR_REGISTERS();
  10824. }
  10825. else {
  10826. WOLFSSL_MSG("Plain text input too small for encryption");
  10827. return BAD_FUNC_ARG;
  10828. }
  10829. return ret;
  10830. }
  10831. #endif /* WOLFSSL_AES_XTS */
  10832. #ifdef WOLFSSL_AES_SIV
  10833. /*
  10834. * See RFC 5297 Section 2.4.
  10835. */
  10836. static WARN_UNUSED_RESULT int S2V(
  10837. const byte* key, word32 keySz, const byte* assoc, word32 assocSz,
  10838. const byte* nonce, word32 nonceSz, const byte* data,
  10839. word32 dataSz, byte* out)
  10840. {
  10841. #ifdef WOLFSSL_SMALL_STACK
  10842. byte* tmp[3] = {NULL, NULL, NULL};
  10843. int i;
  10844. Cmac* cmac;
  10845. #else
  10846. byte tmp[3][AES_BLOCK_SIZE];
  10847. Cmac cmac[1];
  10848. #endif
  10849. word32 macSz = AES_BLOCK_SIZE;
  10850. int ret = 0;
  10851. word32 zeroBytes;
  10852. #ifdef WOLFSSL_SMALL_STACK
  10853. for (i = 0; i < 3; ++i) {
  10854. tmp[i] = (byte*)XMALLOC(AES_BLOCK_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10855. if (tmp[i] == NULL) {
  10856. ret = MEMORY_E;
  10857. break;
  10858. }
  10859. }
  10860. if (ret == 0)
  10861. #endif
  10862. {
  10863. XMEMSET(tmp[1], 0, AES_BLOCK_SIZE);
  10864. XMEMSET(tmp[2], 0, AES_BLOCK_SIZE);
  10865. ret = wc_AesCmacGenerate(tmp[0], &macSz, tmp[1], AES_BLOCK_SIZE,
  10866. key, keySz);
  10867. if (ret == 0) {
  10868. ShiftAndXorRb(tmp[1], tmp[0]);
  10869. ret = wc_AesCmacGenerate(tmp[0], &macSz, assoc, assocSz, key,
  10870. keySz);
  10871. if (ret == 0) {
  10872. xorbuf(tmp[1], tmp[0], AES_BLOCK_SIZE);
  10873. }
  10874. }
  10875. }
  10876. if (ret == 0) {
  10877. if (nonceSz > 0) {
  10878. ShiftAndXorRb(tmp[0], tmp[1]);
  10879. ret = wc_AesCmacGenerate(tmp[1], &macSz, nonce, nonceSz, key,
  10880. keySz);
  10881. if (ret == 0) {
  10882. xorbuf(tmp[0], tmp[1], AES_BLOCK_SIZE);
  10883. }
  10884. }
  10885. else {
  10886. XMEMCPY(tmp[0], tmp[1], AES_BLOCK_SIZE);
  10887. }
  10888. }
  10889. if (ret == 0) {
  10890. if (dataSz >= AES_BLOCK_SIZE) {
  10891. #ifdef WOLFSSL_SMALL_STACK
  10892. cmac = (Cmac*)XMALLOC(sizeof(Cmac), NULL, DYNAMIC_TYPE_CMAC);
  10893. if (cmac == NULL) {
  10894. ret = MEMORY_E;
  10895. }
  10896. if (ret == 0)
  10897. #endif
  10898. {
  10899. #ifdef WOLFSSL_CHECK_MEM_ZERO
  10900. /* Aes part is checked by wc_AesFree. */
  10901. wc_MemZero_Add("wc_AesCmacGenerate cmac",
  10902. ((unsigned char *)cmac) + sizeof(Aes),
  10903. sizeof(Cmac) - sizeof(Aes));
  10904. #endif
  10905. xorbuf(tmp[0], data + (dataSz - AES_BLOCK_SIZE),
  10906. AES_BLOCK_SIZE);
  10907. ret = wc_InitCmac(cmac, key, keySz, WC_CMAC_AES, NULL);
  10908. if (ret == 0) {
  10909. ret = wc_CmacUpdate(cmac, data, dataSz - AES_BLOCK_SIZE);
  10910. }
  10911. if (ret == 0) {
  10912. ret = wc_CmacUpdate(cmac, tmp[0], AES_BLOCK_SIZE);
  10913. }
  10914. if (ret == 0) {
  10915. ret = wc_CmacFinal(cmac, out, &macSz);
  10916. }
  10917. }
  10918. #ifdef WOLFSSL_SMALL_STACK
  10919. if (cmac != NULL) {
  10920. XFREE(cmac, NULL, DYNAMIC_TYPE_CMAC);
  10921. }
  10922. #elif defined(WOLFSSL_CHECK_MEM_ZERO)
  10923. wc_MemZero_Check(cmac, sizeof(Cmac));
  10924. #endif
  10925. }
  10926. else {
  10927. XMEMCPY(tmp[2], data, dataSz);
  10928. tmp[2][dataSz] |= 0x80;
  10929. zeroBytes = AES_BLOCK_SIZE - (dataSz + 1);
  10930. if (zeroBytes != 0) {
  10931. XMEMSET(tmp[2] + dataSz + 1, 0, zeroBytes);
  10932. }
  10933. ShiftAndXorRb(tmp[1], tmp[0]);
  10934. xorbuf(tmp[1], tmp[2], AES_BLOCK_SIZE);
  10935. ret = wc_AesCmacGenerate(out, &macSz, tmp[1], AES_BLOCK_SIZE, key,
  10936. keySz);
  10937. }
  10938. }
  10939. #ifdef WOLFSSL_SMALL_STACK
  10940. for (i = 0; i < 3; ++i) {
  10941. if (tmp[i] != NULL) {
  10942. XFREE(tmp[i], NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10943. }
  10944. }
  10945. #endif
  10946. return ret;
  10947. }
  10948. static WARN_UNUSED_RESULT int AesSivCipher(
  10949. const byte* key, word32 keySz, const byte* assoc,
  10950. word32 assocSz, const byte* nonce, word32 nonceSz,
  10951. const byte* data, word32 dataSz, byte* siv, byte* out,
  10952. int enc)
  10953. {
  10954. int ret = 0;
  10955. #ifdef WOLFSSL_SMALL_STACK
  10956. Aes* aes = NULL;
  10957. #else
  10958. Aes aes[1];
  10959. #endif
  10960. byte sivTmp[AES_BLOCK_SIZE];
  10961. if (key == NULL || siv == NULL || out == NULL) {
  10962. WOLFSSL_MSG("Bad parameter");
  10963. ret = BAD_FUNC_ARG;
  10964. }
  10965. if (ret == 0 && keySz != 32 && keySz != 48 && keySz != 64) {
  10966. WOLFSSL_MSG("Bad key size. Must be 256, 384, or 512 bits.");
  10967. ret = BAD_FUNC_ARG;
  10968. }
  10969. #ifdef WOLFSSL_SMALL_STACK
  10970. if (ret == 0) {
  10971. aes = (Aes*)XMALLOC(sizeof(Aes), NULL, DYNAMIC_TYPE_AES);
  10972. if (aes == NULL) {
  10973. ret = MEMORY_E;
  10974. }
  10975. }
  10976. #endif
  10977. if (ret == 0) {
  10978. if (enc == 1) {
  10979. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, data,
  10980. dataSz, sivTmp);
  10981. if (ret != 0) {
  10982. WOLFSSL_MSG("S2V failed.");
  10983. }
  10984. else {
  10985. XMEMCPY(siv, sivTmp, AES_BLOCK_SIZE);
  10986. }
  10987. }
  10988. else {
  10989. XMEMCPY(sivTmp, siv, AES_BLOCK_SIZE);
  10990. }
  10991. }
  10992. if (ret == 0) {
  10993. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  10994. if (ret != 0) {
  10995. WOLFSSL_MSG("Failed to initialized AES object.");
  10996. }
  10997. }
  10998. if (ret == 0 && dataSz > 0) {
  10999. sivTmp[12] &= 0x7f;
  11000. sivTmp[8] &= 0x7f;
  11001. ret = wc_AesSetKey(aes, key + keySz / 2, keySz / 2, sivTmp,
  11002. AES_ENCRYPTION);
  11003. if (ret != 0) {
  11004. WOLFSSL_MSG("Failed to set key for AES-CTR.");
  11005. }
  11006. else {
  11007. ret = wc_AesCtrEncrypt(aes, out, data, dataSz);
  11008. if (ret != 0) {
  11009. WOLFSSL_MSG("AES-CTR encryption failed.");
  11010. }
  11011. }
  11012. }
  11013. if (ret == 0 && enc == 0) {
  11014. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, out, dataSz,
  11015. sivTmp);
  11016. if (ret != 0) {
  11017. WOLFSSL_MSG("S2V failed.");
  11018. }
  11019. if (XMEMCMP(siv, sivTmp, AES_BLOCK_SIZE) != 0) {
  11020. WOLFSSL_MSG("Computed SIV doesn't match received SIV.");
  11021. ret = AES_SIV_AUTH_E;
  11022. }
  11023. }
  11024. wc_AesFree(aes);
  11025. #ifdef WOLFSSL_SMALL_STACK
  11026. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  11027. #endif
  11028. return ret;
  11029. }
  11030. /*
  11031. * See RFC 5297 Section 2.6.
  11032. */
  11033. int wc_AesSivEncrypt(const byte* key, word32 keySz, const byte* assoc,
  11034. word32 assocSz, const byte* nonce, word32 nonceSz,
  11035. const byte* in, word32 inSz, byte* siv, byte* out)
  11036. {
  11037. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  11038. siv, out, 1);
  11039. }
  11040. /*
  11041. * See RFC 5297 Section 2.7.
  11042. */
  11043. int wc_AesSivDecrypt(const byte* key, word32 keySz, const byte* assoc,
  11044. word32 assocSz, const byte* nonce, word32 nonceSz,
  11045. const byte* in, word32 inSz, byte* siv, byte* out)
  11046. {
  11047. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  11048. siv, out, 0);
  11049. }
  11050. #endif /* WOLFSSL_AES_SIV */
  11051. #endif /* HAVE_FIPS */
  11052. #endif /* !NO_AES */