aes.c 375 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589
  1. /* aes.c
  2. *
  3. * Copyright (C) 2006-2023 wolfSSL Inc.
  4. *
  5. * This file is part of wolfSSL.
  6. *
  7. * wolfSSL is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * wolfSSL is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  20. */
  21. /*
  22. DESCRIPTION
  23. This library provides the interfaces to the Advanced Encryption Standard (AES)
  24. for encrypting and decrypting data. AES is the standard known for a symmetric
  25. block cipher mechanism that uses n-bit binary string parameter key with 128-bits,
  26. 192-bits, and 256-bits of key sizes.
  27. */
  28. #ifdef HAVE_CONFIG_H
  29. #include <config.h>
  30. #endif
  31. #include <wolfssl/wolfcrypt/settings.h>
  32. #include <wolfssl/wolfcrypt/error-crypt.h>
  33. #if !defined(NO_AES)
  34. /* Tip: Locate the software cipher modes by searching for "Software AES" */
  35. #if defined(HAVE_FIPS) && \
  36. defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
  37. /* set NO_WRAPPERS before headers, use direct internal f()s not wrappers */
  38. #define FIPS_NO_WRAPPERS
  39. #ifdef USE_WINDOWS_API
  40. #pragma code_seg(".fipsA$g")
  41. #pragma const_seg(".fipsB$g")
  42. #endif
  43. #endif
  44. #include <wolfssl/wolfcrypt/aes.h>
  45. #ifdef WOLFSSL_AESNI
  46. #include <wmmintrin.h>
  47. #include <emmintrin.h>
  48. #include <smmintrin.h>
  49. #endif /* WOLFSSL_AESNI */
  50. #include <wolfssl/wolfcrypt/cpuid.h>
  51. #ifdef WOLF_CRYPTO_CB
  52. #include <wolfssl/wolfcrypt/cryptocb.h>
  53. #endif
  54. #ifdef WOLFSSL_SECO_CAAM
  55. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  56. #endif
  57. #ifdef WOLFSSL_IMXRT_DCP
  58. #include <wolfssl/wolfcrypt/port/nxp/dcp_port.h>
  59. #endif
  60. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  61. #include <wolfssl/wolfcrypt/port/nxp/se050_port.h>
  62. #endif
  63. #ifdef WOLFSSL_AES_SIV
  64. #include <wolfssl/wolfcrypt/cmac.h>
  65. #endif
  66. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  67. #include <wolfssl/wolfcrypt/port/psa/psa.h>
  68. #endif
  69. /* fips wrapper calls, user can call direct */
  70. #if defined(HAVE_FIPS) && \
  71. (!defined(HAVE_FIPS_VERSION) || (HAVE_FIPS_VERSION < 2))
  72. int wc_AesSetKey(Aes* aes, const byte* key, word32 len, const byte* iv,
  73. int dir)
  74. {
  75. if (aes == NULL || !( (len == 16) || (len == 24) || (len == 32)) ) {
  76. return BAD_FUNC_ARG;
  77. }
  78. return AesSetKey_fips(aes, key, len, iv, dir);
  79. }
  80. int wc_AesSetIV(Aes* aes, const byte* iv)
  81. {
  82. if (aes == NULL) {
  83. return BAD_FUNC_ARG;
  84. }
  85. return AesSetIV_fips(aes, iv);
  86. }
  87. #ifdef HAVE_AES_CBC
  88. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  89. {
  90. if (aes == NULL || out == NULL || in == NULL) {
  91. return BAD_FUNC_ARG;
  92. }
  93. return AesCbcEncrypt_fips(aes, out, in, sz);
  94. }
  95. #ifdef HAVE_AES_DECRYPT
  96. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  97. {
  98. if (aes == NULL || out == NULL || in == NULL
  99. || sz % AES_BLOCK_SIZE != 0) {
  100. return BAD_FUNC_ARG;
  101. }
  102. return AesCbcDecrypt_fips(aes, out, in, sz);
  103. }
  104. #endif /* HAVE_AES_DECRYPT */
  105. #endif /* HAVE_AES_CBC */
  106. /* AES-CTR */
  107. #ifdef WOLFSSL_AES_COUNTER
  108. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  109. {
  110. if (aes == NULL || out == NULL || in == NULL) {
  111. return BAD_FUNC_ARG;
  112. }
  113. return AesCtrEncrypt(aes, out, in, sz);
  114. }
  115. #endif
  116. /* AES-DIRECT */
  117. #if defined(WOLFSSL_AES_DIRECT)
  118. void wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in)
  119. {
  120. AesEncryptDirect(aes, out, in);
  121. }
  122. #ifdef HAVE_AES_DECRYPT
  123. void wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in)
  124. {
  125. AesDecryptDirect(aes, out, in);
  126. }
  127. #endif /* HAVE_AES_DECRYPT */
  128. int wc_AesSetKeyDirect(Aes* aes, const byte* key, word32 len,
  129. const byte* iv, int dir)
  130. {
  131. return AesSetKeyDirect(aes, key, len, iv, dir);
  132. }
  133. #endif /* WOLFSSL_AES_DIRECT */
  134. /* AES-GCM */
  135. #ifdef HAVE_AESGCM
  136. int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len)
  137. {
  138. if (aes == NULL || !( (len == 16) || (len == 24) || (len == 32)) ) {
  139. return BAD_FUNC_ARG;
  140. }
  141. return AesGcmSetKey_fips(aes, key, len);
  142. }
  143. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  144. const byte* iv, word32 ivSz,
  145. byte* authTag, word32 authTagSz,
  146. const byte* authIn, word32 authInSz)
  147. {
  148. if (aes == NULL || authTagSz > AES_BLOCK_SIZE ||
  149. authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ ||
  150. ivSz == 0 || ivSz > AES_BLOCK_SIZE) {
  151. return BAD_FUNC_ARG;
  152. }
  153. return AesGcmEncrypt_fips(aes, out, in, sz, iv, ivSz, authTag,
  154. authTagSz, authIn, authInSz);
  155. }
  156. #ifdef HAVE_AES_DECRYPT
  157. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  158. const byte* iv, word32 ivSz,
  159. const byte* authTag, word32 authTagSz,
  160. const byte* authIn, word32 authInSz)
  161. {
  162. if (aes == NULL || out == NULL || in == NULL || iv == NULL
  163. || authTag == NULL || authTagSz > AES_BLOCK_SIZE ||
  164. ivSz == 0 || ivSz > AES_BLOCK_SIZE) {
  165. return BAD_FUNC_ARG;
  166. }
  167. return AesGcmDecrypt_fips(aes, out, in, sz, iv, ivSz, authTag,
  168. authTagSz, authIn, authInSz);
  169. }
  170. #endif /* HAVE_AES_DECRYPT */
  171. int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len)
  172. {
  173. if (gmac == NULL || key == NULL || !((len == 16) ||
  174. (len == 24) || (len == 32)) ) {
  175. return BAD_FUNC_ARG;
  176. }
  177. return GmacSetKey(gmac, key, len);
  178. }
  179. int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz,
  180. const byte* authIn, word32 authInSz,
  181. byte* authTag, word32 authTagSz)
  182. {
  183. if (gmac == NULL || authTagSz > AES_BLOCK_SIZE ||
  184. authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  185. return BAD_FUNC_ARG;
  186. }
  187. return GmacUpdate(gmac, iv, ivSz, authIn, authInSz,
  188. authTag, authTagSz);
  189. }
  190. #endif /* HAVE_AESGCM */
  191. /* AES-CCM */
  192. #if defined(HAVE_AESCCM) && \
  193. defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
  194. int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz)
  195. {
  196. return AesCcmSetKey(aes, key, keySz);
  197. }
  198. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  199. const byte* nonce, word32 nonceSz,
  200. byte* authTag, word32 authTagSz,
  201. const byte* authIn, word32 authInSz)
  202. {
  203. /* sanity check on arguments */
  204. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  205. || authTag == NULL || nonceSz < 7 || nonceSz > 13)
  206. return BAD_FUNC_ARG;
  207. AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz, authTag,
  208. authTagSz, authIn, authInSz);
  209. return 0;
  210. }
  211. #ifdef HAVE_AES_DECRYPT
  212. int wc_AesCcmDecrypt(Aes* aes, byte* out,
  213. const byte* in, word32 inSz,
  214. const byte* nonce, word32 nonceSz,
  215. const byte* authTag, word32 authTagSz,
  216. const byte* authIn, word32 authInSz)
  217. {
  218. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  219. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  220. return BAD_FUNC_ARG;
  221. }
  222. return AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz,
  223. authTag, authTagSz, authIn, authInSz);
  224. }
  225. #endif /* HAVE_AES_DECRYPT */
  226. #endif /* HAVE_AESCCM && HAVE_FIPS_VERSION 2 */
  227. int wc_AesInit(Aes* aes, void* h, int i)
  228. {
  229. if (aes == NULL)
  230. return BAD_FUNC_ARG;
  231. (void)h;
  232. (void)i;
  233. /* FIPS doesn't support */
  234. #ifdef WOLFSSL_KCAPI_AES
  235. return AesInit(aes, h, i);
  236. #else
  237. return 0;
  238. #endif
  239. }
  240. void wc_AesFree(Aes* aes)
  241. {
  242. (void)aes;
  243. /* FIPS doesn't support */
  244. #ifdef WOLFSSL_KCAPI_AES
  245. AesFree(aes);
  246. #endif
  247. }
  248. #else /* else build without fips, or for FIPS v2+ */
  249. #if defined(WOLFSSL_TI_CRYPT)
  250. #include <wolfcrypt/src/port/ti/ti-aes.c>
  251. #else
  252. #include <wolfssl/wolfcrypt/logging.h>
  253. #ifdef NO_INLINE
  254. #include <wolfssl/wolfcrypt/misc.h>
  255. #else
  256. #define WOLFSSL_MISC_INCLUDED
  257. #include <wolfcrypt/src/misc.c>
  258. #endif
  259. #ifndef WOLFSSL_ARMASM
  260. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  261. /* case of possibly not using hardware acceleration for AES but using key
  262. blobs */
  263. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  264. #endif
  265. #ifdef DEBUG_AESNI
  266. #include <stdio.h>
  267. #endif
  268. #ifdef _MSC_VER
  269. /* 4127 warning constant while(1) */
  270. #pragma warning(disable: 4127)
  271. #endif
  272. /* Define AES implementation includes and functions */
  273. #if defined(STM32_CRYPTO)
  274. /* STM32F2/F4/F7/L4/L5/H7/WB55 hardware AES support for ECB, CBC, CTR and GCM modes */
  275. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESGCM) || defined(HAVE_AESCCM)
  276. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  277. Aes* aes, const byte* inBlock, byte* outBlock)
  278. {
  279. int ret = 0;
  280. #ifdef WOLFSSL_STM32_CUBEMX
  281. CRYP_HandleTypeDef hcryp;
  282. #else
  283. CRYP_InitTypeDef cryptInit;
  284. CRYP_KeyInitTypeDef keyInit;
  285. #endif
  286. #ifdef WOLFSSL_STM32_CUBEMX
  287. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  288. if (ret != 0)
  289. return ret;
  290. ret = wolfSSL_CryptHwMutexLock();
  291. if (ret != 0)
  292. return ret;
  293. #if defined(STM32_HAL_V2)
  294. hcryp.Init.Algorithm = CRYP_AES_ECB;
  295. #elif defined(STM32_CRYPTO_AES_ONLY)
  296. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  297. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  298. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  299. #endif
  300. HAL_CRYP_Init(&hcryp);
  301. #if defined(STM32_HAL_V2)
  302. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  303. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  304. #elif defined(STM32_CRYPTO_AES_ONLY)
  305. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  306. outBlock, STM32_HAL_TIMEOUT);
  307. #else
  308. ret = HAL_CRYP_AESECB_Encrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  309. outBlock, STM32_HAL_TIMEOUT);
  310. #endif
  311. if (ret != HAL_OK) {
  312. ret = WC_TIMEOUT_E;
  313. }
  314. HAL_CRYP_DeInit(&hcryp);
  315. #else /* Standard Peripheral Library */
  316. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  317. if (ret != 0)
  318. return ret;
  319. ret = wolfSSL_CryptHwMutexLock();
  320. if (ret != 0)
  321. return ret;
  322. /* reset registers to their default values */
  323. CRYP_DeInit();
  324. /* setup key */
  325. CRYP_KeyInit(&keyInit);
  326. /* set direction and mode */
  327. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  328. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  329. CRYP_Init(&cryptInit);
  330. /* enable crypto processor */
  331. CRYP_Cmd(ENABLE);
  332. /* flush IN/OUT FIFOs */
  333. CRYP_FIFOFlush();
  334. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  335. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  336. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  337. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  338. /* wait until the complete message has been processed */
  339. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  340. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  341. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  342. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  343. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  344. /* disable crypto processor */
  345. CRYP_Cmd(DISABLE);
  346. #endif /* WOLFSSL_STM32_CUBEMX */
  347. wolfSSL_CryptHwMutexUnLock();
  348. wc_Stm32_Aes_Cleanup();
  349. return ret;
  350. }
  351. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESGCM || HAVE_AESCCM */
  352. #ifdef HAVE_AES_DECRYPT
  353. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESCCM)
  354. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  355. Aes* aes, const byte* inBlock, byte* outBlock)
  356. {
  357. int ret = 0;
  358. #ifdef WOLFSSL_STM32_CUBEMX
  359. CRYP_HandleTypeDef hcryp;
  360. #else
  361. CRYP_InitTypeDef cryptInit;
  362. CRYP_KeyInitTypeDef keyInit;
  363. #endif
  364. #ifdef WOLFSSL_STM32_CUBEMX
  365. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  366. if (ret != 0)
  367. return ret;
  368. ret = wolfSSL_CryptHwMutexLock();
  369. if (ret != 0)
  370. return ret;
  371. #if defined(STM32_HAL_V2)
  372. hcryp.Init.Algorithm = CRYP_AES_ECB;
  373. #elif defined(STM32_CRYPTO_AES_ONLY)
  374. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  375. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  376. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  377. #endif
  378. HAL_CRYP_Init(&hcryp);
  379. #if defined(STM32_HAL_V2)
  380. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  381. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  382. #elif defined(STM32_CRYPTO_AES_ONLY)
  383. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  384. outBlock, STM32_HAL_TIMEOUT);
  385. #else
  386. ret = HAL_CRYP_AESECB_Decrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  387. outBlock, STM32_HAL_TIMEOUT);
  388. #endif
  389. if (ret != HAL_OK) {
  390. ret = WC_TIMEOUT_E;
  391. }
  392. HAL_CRYP_DeInit(&hcryp);
  393. #else /* Standard Peripheral Library */
  394. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  395. if (ret != 0)
  396. return ret;
  397. ret = wolfSSL_CryptHwMutexLock();
  398. if (ret != 0)
  399. return ret;
  400. /* reset registers to their default values */
  401. CRYP_DeInit();
  402. /* set direction and key */
  403. CRYP_KeyInit(&keyInit);
  404. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  405. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  406. CRYP_Init(&cryptInit);
  407. /* enable crypto processor */
  408. CRYP_Cmd(ENABLE);
  409. /* wait until decrypt key has been initialized */
  410. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  411. /* set direction and mode */
  412. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  413. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  414. CRYP_Init(&cryptInit);
  415. /* enable crypto processor */
  416. CRYP_Cmd(ENABLE);
  417. /* flush IN/OUT FIFOs */
  418. CRYP_FIFOFlush();
  419. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  420. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  421. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  422. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  423. /* wait until the complete message has been processed */
  424. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  425. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  426. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  427. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  428. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  429. /* disable crypto processor */
  430. CRYP_Cmd(DISABLE);
  431. #endif /* WOLFSSL_STM32_CUBEMX */
  432. wolfSSL_CryptHwMutexUnLock();
  433. wc_Stm32_Aes_Cleanup();
  434. return ret;
  435. }
  436. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESCCM */
  437. #endif /* HAVE_AES_DECRYPT */
  438. #elif defined(HAVE_COLDFIRE_SEC)
  439. /* Freescale Coldfire SEC support for CBC mode.
  440. * NOTE: no support for AES-CTR/GCM/CCM/Direct */
  441. #include <wolfssl/wolfcrypt/types.h>
  442. #include "sec.h"
  443. #include "mcf5475_sec.h"
  444. #include "mcf5475_siu.h"
  445. #elif defined(FREESCALE_LTC)
  446. #include "fsl_ltc.h"
  447. #if defined(FREESCALE_LTC_AES_GCM)
  448. #undef NEED_AES_TABLES
  449. #undef GCM_TABLE
  450. #endif
  451. /* if LTC doesn't have GCM, use software with LTC AES ECB mode */
  452. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  453. Aes* aes, const byte* inBlock, byte* outBlock)
  454. {
  455. word32 keySize = 0;
  456. byte* key = (byte*)aes->key;
  457. int ret = wc_AesGetKeySize(aes, &keySize);
  458. if (ret != 0)
  459. return ret;
  460. if (wolfSSL_CryptHwMutexLock() == 0) {
  461. LTC_AES_EncryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  462. key, keySize);
  463. wolfSSL_CryptHwMutexUnLock();
  464. }
  465. return 0;
  466. }
  467. #ifdef HAVE_AES_DECRYPT
  468. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  469. Aes* aes, const byte* inBlock, byte* outBlock)
  470. {
  471. word32 keySize = 0;
  472. byte* key = (byte*)aes->key;
  473. int ret = wc_AesGetKeySize(aes, &keySize);
  474. if (ret != 0)
  475. return ret;
  476. if (wolfSSL_CryptHwMutexLock() == 0) {
  477. LTC_AES_DecryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  478. key, keySize, kLTC_EncryptKey);
  479. wolfSSL_CryptHwMutexUnLock();
  480. }
  481. return 0;
  482. }
  483. #endif
  484. #elif defined(FREESCALE_MMCAU)
  485. /* Freescale mmCAU hardware AES support for Direct, CBC, CCM, GCM modes
  486. * through the CAU/mmCAU library. Documentation located in
  487. * ColdFire/ColdFire+ CAU and Kinetis mmCAU Software Library User
  488. * Guide (See note in README). */
  489. #ifdef FREESCALE_MMCAU_CLASSIC
  490. /* MMCAU 1.4 library used with non-KSDK / classic MQX builds */
  491. #include "cau_api.h"
  492. #else
  493. #include "fsl_mmcau.h"
  494. #endif
  495. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  496. Aes* aes, const byte* inBlock, byte* outBlock)
  497. {
  498. if (wolfSSL_CryptHwMutexLock() == 0) {
  499. #ifdef FREESCALE_MMCAU_CLASSIC
  500. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  501. WOLFSSL_MSG("Bad cau_aes_encrypt alignment");
  502. return BAD_ALIGN_E;
  503. }
  504. cau_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  505. #else
  506. MMCAU_AES_EncryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  507. outBlock);
  508. #endif
  509. wolfSSL_CryptHwMutexUnLock();
  510. }
  511. return 0;
  512. }
  513. #ifdef HAVE_AES_DECRYPT
  514. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  515. Aes* aes, const byte* inBlock, byte* outBlock)
  516. {
  517. if (wolfSSL_CryptHwMutexLock() == 0) {
  518. #ifdef FREESCALE_MMCAU_CLASSIC
  519. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  520. WOLFSSL_MSG("Bad cau_aes_decrypt alignment");
  521. return BAD_ALIGN_E;
  522. }
  523. cau_aes_decrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  524. #else
  525. MMCAU_AES_DecryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  526. outBlock);
  527. #endif
  528. wolfSSL_CryptHwMutexUnLock();
  529. }
  530. return 0;
  531. }
  532. #endif /* HAVE_AES_DECRYPT */
  533. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  534. #include <wolfssl/wolfcrypt/port/pic32/pic32mz-crypt.h>
  535. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  536. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  537. Aes* aes, const byte* inBlock, byte* outBlock)
  538. {
  539. /* Thread mutex protection handled in Pic32Crypto */
  540. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  541. outBlock, inBlock, AES_BLOCK_SIZE,
  542. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  543. }
  544. #endif
  545. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  546. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  547. Aes* aes, const byte* inBlock, byte* outBlock)
  548. {
  549. /* Thread mutex protection handled in Pic32Crypto */
  550. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  551. outBlock, inBlock, AES_BLOCK_SIZE,
  552. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  553. }
  554. #endif
  555. #elif defined(WOLFSSL_NRF51_AES)
  556. /* Use built-in AES hardware - AES 128 ECB Encrypt Only */
  557. #include "wolfssl/wolfcrypt/port/nrf51.h"
  558. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  559. Aes* aes, const byte* inBlock, byte* outBlock)
  560. {
  561. int ret;
  562. ret = wolfSSL_CryptHwMutexLock();
  563. if (ret == 0) {
  564. ret = nrf51_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds,
  565. outBlock);
  566. wolfSSL_CryptHwMutexUnLock();
  567. }
  568. return ret;
  569. }
  570. #ifdef HAVE_AES_DECRYPT
  571. #error nRF51 AES Hardware does not support decrypt
  572. #endif /* HAVE_AES_DECRYPT */
  573. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  574. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  575. #include "wolfssl/wolfcrypt/port/Espressif/esp32-crypt.h"
  576. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  577. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  578. Aes* aes, const byte* inBlock, byte* outBlock)
  579. {
  580. /* Thread mutex protection handled in esp_aes_hw_InUse */
  581. return wc_esp32AesEncrypt(aes, inBlock, outBlock);
  582. }
  583. #endif
  584. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  585. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  586. Aes* aes, const byte* inBlock, byte* outBlock)
  587. {
  588. /* Thread mutex protection handled in esp_aes_hw_InUse */
  589. return wc_esp32AesDecrypt(aes, inBlock, outBlock);
  590. }
  591. #endif
  592. #elif defined(WOLFSSL_AESNI)
  593. #define NEED_AES_TABLES
  594. /* Each platform needs to query info type 1 from cpuid to see if aesni is
  595. * supported. Also, let's setup a macro for proper linkage w/o ABI conflicts
  596. */
  597. #ifndef AESNI_ALIGN
  598. #define AESNI_ALIGN 16
  599. #endif
  600. static int checkAESNI = 0;
  601. static int haveAESNI = 0;
  602. static word32 intel_flags = 0;
  603. static WARN_UNUSED_RESULT int Check_CPU_support_AES(void)
  604. {
  605. intel_flags = cpuid_get_flags();
  606. return IS_INTEL_AESNI(intel_flags) != 0;
  607. }
  608. /* tell C compiler these are asm functions in case any mix up of ABI underscore
  609. prefix between clang/gcc/llvm etc */
  610. #ifdef HAVE_AES_CBC
  611. void AES_CBC_encrypt(const unsigned char* in, unsigned char* out,
  612. unsigned char* ivec, unsigned long length,
  613. const unsigned char* KS, int nr)
  614. XASM_LINK("AES_CBC_encrypt");
  615. #ifdef HAVE_AES_DECRYPT
  616. #if defined(WOLFSSL_AESNI_BY4) || defined(WOLFSSL_X86_BUILD)
  617. void AES_CBC_decrypt_by4(const unsigned char* in, unsigned char* out,
  618. unsigned char* ivec, unsigned long length,
  619. const unsigned char* KS, int nr)
  620. XASM_LINK("AES_CBC_decrypt_by4");
  621. #elif defined(WOLFSSL_AESNI_BY6)
  622. void AES_CBC_decrypt_by6(const unsigned char* in, unsigned char* out,
  623. unsigned char* ivec, unsigned long length,
  624. const unsigned char* KS, int nr)
  625. XASM_LINK("AES_CBC_decrypt_by6");
  626. #else /* WOLFSSL_AESNI_BYx */
  627. void AES_CBC_decrypt_by8(const unsigned char* in, unsigned char* out,
  628. unsigned char* ivec, unsigned long length,
  629. const unsigned char* KS, int nr)
  630. XASM_LINK("AES_CBC_decrypt_by8");
  631. #endif /* WOLFSSL_AESNI_BYx */
  632. #endif /* HAVE_AES_DECRYPT */
  633. #endif /* HAVE_AES_CBC */
  634. void AES_ECB_encrypt(const unsigned char* in, unsigned char* out,
  635. unsigned long length, const unsigned char* KS, int nr)
  636. XASM_LINK("AES_ECB_encrypt");
  637. #ifdef HAVE_AES_DECRYPT
  638. void AES_ECB_decrypt(const unsigned char* in, unsigned char* out,
  639. unsigned long length, const unsigned char* KS, int nr)
  640. XASM_LINK("AES_ECB_decrypt");
  641. #endif
  642. void AES_128_Key_Expansion(const unsigned char* userkey,
  643. unsigned char* key_schedule)
  644. XASM_LINK("AES_128_Key_Expansion");
  645. void AES_192_Key_Expansion(const unsigned char* userkey,
  646. unsigned char* key_schedule)
  647. XASM_LINK("AES_192_Key_Expansion");
  648. void AES_256_Key_Expansion(const unsigned char* userkey,
  649. unsigned char* key_schedule)
  650. XASM_LINK("AES_256_Key_Expansion");
  651. static WARN_UNUSED_RESULT int AES_set_encrypt_key(
  652. const unsigned char *userKey, const int bits, Aes* aes)
  653. {
  654. int ret;
  655. if (!userKey || !aes)
  656. return BAD_FUNC_ARG;
  657. switch (bits) {
  658. case 128:
  659. AES_128_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 10;
  660. return 0;
  661. case 192:
  662. AES_192_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 12;
  663. return 0;
  664. case 256:
  665. AES_256_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 14;
  666. return 0;
  667. default:
  668. ret = BAD_FUNC_ARG;
  669. }
  670. return ret;
  671. }
  672. #ifdef HAVE_AES_DECRYPT
  673. static WARN_UNUSED_RESULT int AES_set_decrypt_key(
  674. const unsigned char* userKey, const int bits, Aes* aes)
  675. {
  676. word32 nr;
  677. #ifdef WOLFSSL_SMALL_STACK
  678. Aes *temp_key;
  679. #else
  680. Aes temp_key[1];
  681. #endif
  682. __m128i *Key_Schedule;
  683. __m128i *Temp_Key_Schedule;
  684. if (!userKey || !aes)
  685. return BAD_FUNC_ARG;
  686. #ifdef WOLFSSL_SMALL_STACK
  687. if ((temp_key = (Aes *)XMALLOC(sizeof *aes, aes->heap,
  688. DYNAMIC_TYPE_AES)) == NULL)
  689. return MEMORY_E;
  690. #endif
  691. if (AES_set_encrypt_key(userKey,bits,temp_key) == BAD_FUNC_ARG) {
  692. #ifdef WOLFSSL_SMALL_STACK
  693. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  694. #endif
  695. return BAD_FUNC_ARG;
  696. }
  697. Key_Schedule = (__m128i*)aes->key;
  698. Temp_Key_Schedule = (__m128i*)temp_key->key;
  699. nr = temp_key->rounds;
  700. aes->rounds = nr;
  701. #ifdef WOLFSSL_SMALL_STACK
  702. SAVE_VECTOR_REGISTERS(XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES); return _svr_ret;);
  703. #else
  704. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  705. #endif
  706. Key_Schedule[nr] = Temp_Key_Schedule[0];
  707. Key_Schedule[nr-1] = _mm_aesimc_si128(Temp_Key_Schedule[1]);
  708. Key_Schedule[nr-2] = _mm_aesimc_si128(Temp_Key_Schedule[2]);
  709. Key_Schedule[nr-3] = _mm_aesimc_si128(Temp_Key_Schedule[3]);
  710. Key_Schedule[nr-4] = _mm_aesimc_si128(Temp_Key_Schedule[4]);
  711. Key_Schedule[nr-5] = _mm_aesimc_si128(Temp_Key_Schedule[5]);
  712. Key_Schedule[nr-6] = _mm_aesimc_si128(Temp_Key_Schedule[6]);
  713. Key_Schedule[nr-7] = _mm_aesimc_si128(Temp_Key_Schedule[7]);
  714. Key_Schedule[nr-8] = _mm_aesimc_si128(Temp_Key_Schedule[8]);
  715. Key_Schedule[nr-9] = _mm_aesimc_si128(Temp_Key_Schedule[9]);
  716. if (nr>10) {
  717. Key_Schedule[nr-10] = _mm_aesimc_si128(Temp_Key_Schedule[10]);
  718. Key_Schedule[nr-11] = _mm_aesimc_si128(Temp_Key_Schedule[11]);
  719. }
  720. if (nr>12) {
  721. Key_Schedule[nr-12] = _mm_aesimc_si128(Temp_Key_Schedule[12]);
  722. Key_Schedule[nr-13] = _mm_aesimc_si128(Temp_Key_Schedule[13]);
  723. }
  724. Key_Schedule[0] = Temp_Key_Schedule[nr];
  725. RESTORE_VECTOR_REGISTERS();
  726. #ifdef WOLFSSL_SMALL_STACK
  727. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  728. #endif
  729. return 0;
  730. }
  731. #endif /* HAVE_AES_DECRYPT */
  732. #elif (defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  733. && !defined(WOLFSSL_QNX_CAAM)) || \
  734. ((defined(WOLFSSL_AFALG) || defined(WOLFSSL_DEVCRYPTO_AES)) && \
  735. defined(HAVE_AESCCM))
  736. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  737. Aes* aes, const byte* inBlock, byte* outBlock)
  738. {
  739. return wc_AesEncryptDirect(aes, outBlock, inBlock);
  740. }
  741. #elif defined(WOLFSSL_AFALG)
  742. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  743. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  744. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  745. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  746. #include "hal_data.h"
  747. #ifndef WOLFSSL_SCE_AES256_HANDLE
  748. #define WOLFSSL_SCE_AES256_HANDLE g_sce_aes_256
  749. #endif
  750. #ifndef WOLFSSL_SCE_AES192_HANDLE
  751. #define WOLFSSL_SCE_AES192_HANDLE g_sce_aes_192
  752. #endif
  753. #ifndef WOLFSSL_SCE_AES128_HANDLE
  754. #define WOLFSSL_SCE_AES128_HANDLE g_sce_aes_128
  755. #endif
  756. static WARN_UNUSED_RESULT int AES_ECB_encrypt(
  757. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  758. {
  759. word32 ret;
  760. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  761. CRYPTO_WORD_ENDIAN_BIG) {
  762. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  763. }
  764. switch (aes->keylen) {
  765. #ifdef WOLFSSL_AES_128
  766. case AES_128_KEY_SIZE:
  767. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->encrypt(
  768. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key,
  769. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  770. (word32*)outBlock);
  771. break;
  772. #endif
  773. #ifdef WOLFSSL_AES_192
  774. case AES_192_KEY_SIZE:
  775. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->encrypt(
  776. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key,
  777. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  778. (word32*)outBlock);
  779. break;
  780. #endif
  781. #ifdef WOLFSSL_AES_256
  782. case AES_256_KEY_SIZE:
  783. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->encrypt(
  784. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key,
  785. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  786. (word32*)outBlock);
  787. break;
  788. #endif
  789. default:
  790. WOLFSSL_MSG("Unknown key size");
  791. return BAD_FUNC_ARG;
  792. }
  793. if (ret != SSP_SUCCESS) {
  794. /* revert input */
  795. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  796. return WC_HW_E;
  797. }
  798. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  799. CRYPTO_WORD_ENDIAN_BIG) {
  800. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  801. if (inBlock != outBlock) {
  802. /* revert input */
  803. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  804. }
  805. }
  806. return 0;
  807. }
  808. #if defined(HAVE_AES_DECRYPT)
  809. static WARN_UNUSED_RESULT int AES_ECB_decrypt(
  810. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  811. {
  812. word32 ret;
  813. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  814. CRYPTO_WORD_ENDIAN_BIG) {
  815. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  816. }
  817. switch (aes->keylen) {
  818. #ifdef WOLFSSL_AES_128
  819. case AES_128_KEY_SIZE:
  820. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->decrypt(
  821. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key, aes->reg,
  822. (sz / sizeof(word32)), (word32*)inBlock,
  823. (word32*)outBlock);
  824. break;
  825. #endif
  826. #ifdef WOLFSSL_AES_192
  827. case AES_192_KEY_SIZE:
  828. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->decrypt(
  829. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key, aes->reg,
  830. (sz / sizeof(word32)), (word32*)inBlock,
  831. (word32*)outBlock);
  832. break;
  833. #endif
  834. #ifdef WOLFSSL_AES_256
  835. case AES_256_KEY_SIZE:
  836. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->decrypt(
  837. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key, aes->reg,
  838. (sz / sizeof(word32)), (word32*)inBlock,
  839. (word32*)outBlock);
  840. break;
  841. #endif
  842. default:
  843. WOLFSSL_MSG("Unknown key size");
  844. return BAD_FUNC_ARG;
  845. }
  846. if (ret != SSP_SUCCESS) {
  847. return WC_HW_E;
  848. }
  849. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  850. CRYPTO_WORD_ENDIAN_BIG) {
  851. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  852. if (inBlock != outBlock) {
  853. /* revert input */
  854. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  855. }
  856. }
  857. return 0;
  858. }
  859. #endif /* HAVE_AES_DECRYPT */
  860. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  861. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  862. Aes* aes, const byte* inBlock, byte* outBlock)
  863. {
  864. return AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  865. }
  866. #endif
  867. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  868. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  869. Aes* aes, const byte* inBlock, byte* outBlock)
  870. {
  871. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  872. }
  873. #endif
  874. #elif defined(WOLFSSL_KCAPI_AES)
  875. /* Only CBC and GCM that are in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  876. #if defined(WOLFSSL_AES_COUNTER) || defined(HAVE_AESCCM) || \
  877. defined(WOLFSSL_CMAC) || defined(WOLFSSL_AES_OFB) || \
  878. defined(WOLFSSL_AES_CFB) || defined(HAVE_AES_ECB) || \
  879. defined(WOLFSSL_AES_DIRECT) || \
  880. (defined(HAVE_AES_CBC) && defined(WOLFSSL_NO_KCAPI_AES_CBC))
  881. #define NEED_AES_TABLES
  882. #endif
  883. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  884. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  885. #else
  886. /* using wolfCrypt software implementation */
  887. #define NEED_AES_TABLES
  888. #endif
  889. #ifdef NEED_AES_TABLES
  890. static const FLASH_QUALIFIER word32 rcon[] = {
  891. 0x01000000, 0x02000000, 0x04000000, 0x08000000,
  892. 0x10000000, 0x20000000, 0x40000000, 0x80000000,
  893. 0x1B000000, 0x36000000,
  894. /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  895. };
  896. #ifndef WOLFSSL_AES_SMALL_TABLES
  897. static const FLASH_QUALIFIER word32 Te[4][256] = {
  898. {
  899. 0xc66363a5U, 0xf87c7c84U, 0xee777799U, 0xf67b7b8dU,
  900. 0xfff2f20dU, 0xd66b6bbdU, 0xde6f6fb1U, 0x91c5c554U,
  901. 0x60303050U, 0x02010103U, 0xce6767a9U, 0x562b2b7dU,
  902. 0xe7fefe19U, 0xb5d7d762U, 0x4dababe6U, 0xec76769aU,
  903. 0x8fcaca45U, 0x1f82829dU, 0x89c9c940U, 0xfa7d7d87U,
  904. 0xeffafa15U, 0xb25959ebU, 0x8e4747c9U, 0xfbf0f00bU,
  905. 0x41adadecU, 0xb3d4d467U, 0x5fa2a2fdU, 0x45afafeaU,
  906. 0x239c9cbfU, 0x53a4a4f7U, 0xe4727296U, 0x9bc0c05bU,
  907. 0x75b7b7c2U, 0xe1fdfd1cU, 0x3d9393aeU, 0x4c26266aU,
  908. 0x6c36365aU, 0x7e3f3f41U, 0xf5f7f702U, 0x83cccc4fU,
  909. 0x6834345cU, 0x51a5a5f4U, 0xd1e5e534U, 0xf9f1f108U,
  910. 0xe2717193U, 0xabd8d873U, 0x62313153U, 0x2a15153fU,
  911. 0x0804040cU, 0x95c7c752U, 0x46232365U, 0x9dc3c35eU,
  912. 0x30181828U, 0x379696a1U, 0x0a05050fU, 0x2f9a9ab5U,
  913. 0x0e070709U, 0x24121236U, 0x1b80809bU, 0xdfe2e23dU,
  914. 0xcdebeb26U, 0x4e272769U, 0x7fb2b2cdU, 0xea75759fU,
  915. 0x1209091bU, 0x1d83839eU, 0x582c2c74U, 0x341a1a2eU,
  916. 0x361b1b2dU, 0xdc6e6eb2U, 0xb45a5aeeU, 0x5ba0a0fbU,
  917. 0xa45252f6U, 0x763b3b4dU, 0xb7d6d661U, 0x7db3b3ceU,
  918. 0x5229297bU, 0xdde3e33eU, 0x5e2f2f71U, 0x13848497U,
  919. 0xa65353f5U, 0xb9d1d168U, 0x00000000U, 0xc1eded2cU,
  920. 0x40202060U, 0xe3fcfc1fU, 0x79b1b1c8U, 0xb65b5bedU,
  921. 0xd46a6abeU, 0x8dcbcb46U, 0x67bebed9U, 0x7239394bU,
  922. 0x944a4adeU, 0x984c4cd4U, 0xb05858e8U, 0x85cfcf4aU,
  923. 0xbbd0d06bU, 0xc5efef2aU, 0x4faaaae5U, 0xedfbfb16U,
  924. 0x864343c5U, 0x9a4d4dd7U, 0x66333355U, 0x11858594U,
  925. 0x8a4545cfU, 0xe9f9f910U, 0x04020206U, 0xfe7f7f81U,
  926. 0xa05050f0U, 0x783c3c44U, 0x259f9fbaU, 0x4ba8a8e3U,
  927. 0xa25151f3U, 0x5da3a3feU, 0x804040c0U, 0x058f8f8aU,
  928. 0x3f9292adU, 0x219d9dbcU, 0x70383848U, 0xf1f5f504U,
  929. 0x63bcbcdfU, 0x77b6b6c1U, 0xafdada75U, 0x42212163U,
  930. 0x20101030U, 0xe5ffff1aU, 0xfdf3f30eU, 0xbfd2d26dU,
  931. 0x81cdcd4cU, 0x180c0c14U, 0x26131335U, 0xc3ecec2fU,
  932. 0xbe5f5fe1U, 0x359797a2U, 0x884444ccU, 0x2e171739U,
  933. 0x93c4c457U, 0x55a7a7f2U, 0xfc7e7e82U, 0x7a3d3d47U,
  934. 0xc86464acU, 0xba5d5de7U, 0x3219192bU, 0xe6737395U,
  935. 0xc06060a0U, 0x19818198U, 0x9e4f4fd1U, 0xa3dcdc7fU,
  936. 0x44222266U, 0x542a2a7eU, 0x3b9090abU, 0x0b888883U,
  937. 0x8c4646caU, 0xc7eeee29U, 0x6bb8b8d3U, 0x2814143cU,
  938. 0xa7dede79U, 0xbc5e5ee2U, 0x160b0b1dU, 0xaddbdb76U,
  939. 0xdbe0e03bU, 0x64323256U, 0x743a3a4eU, 0x140a0a1eU,
  940. 0x924949dbU, 0x0c06060aU, 0x4824246cU, 0xb85c5ce4U,
  941. 0x9fc2c25dU, 0xbdd3d36eU, 0x43acacefU, 0xc46262a6U,
  942. 0x399191a8U, 0x319595a4U, 0xd3e4e437U, 0xf279798bU,
  943. 0xd5e7e732U, 0x8bc8c843U, 0x6e373759U, 0xda6d6db7U,
  944. 0x018d8d8cU, 0xb1d5d564U, 0x9c4e4ed2U, 0x49a9a9e0U,
  945. 0xd86c6cb4U, 0xac5656faU, 0xf3f4f407U, 0xcfeaea25U,
  946. 0xca6565afU, 0xf47a7a8eU, 0x47aeaee9U, 0x10080818U,
  947. 0x6fbabad5U, 0xf0787888U, 0x4a25256fU, 0x5c2e2e72U,
  948. 0x381c1c24U, 0x57a6a6f1U, 0x73b4b4c7U, 0x97c6c651U,
  949. 0xcbe8e823U, 0xa1dddd7cU, 0xe874749cU, 0x3e1f1f21U,
  950. 0x964b4bddU, 0x61bdbddcU, 0x0d8b8b86U, 0x0f8a8a85U,
  951. 0xe0707090U, 0x7c3e3e42U, 0x71b5b5c4U, 0xcc6666aaU,
  952. 0x904848d8U, 0x06030305U, 0xf7f6f601U, 0x1c0e0e12U,
  953. 0xc26161a3U, 0x6a35355fU, 0xae5757f9U, 0x69b9b9d0U,
  954. 0x17868691U, 0x99c1c158U, 0x3a1d1d27U, 0x279e9eb9U,
  955. 0xd9e1e138U, 0xebf8f813U, 0x2b9898b3U, 0x22111133U,
  956. 0xd26969bbU, 0xa9d9d970U, 0x078e8e89U, 0x339494a7U,
  957. 0x2d9b9bb6U, 0x3c1e1e22U, 0x15878792U, 0xc9e9e920U,
  958. 0x87cece49U, 0xaa5555ffU, 0x50282878U, 0xa5dfdf7aU,
  959. 0x038c8c8fU, 0x59a1a1f8U, 0x09898980U, 0x1a0d0d17U,
  960. 0x65bfbfdaU, 0xd7e6e631U, 0x844242c6U, 0xd06868b8U,
  961. 0x824141c3U, 0x299999b0U, 0x5a2d2d77U, 0x1e0f0f11U,
  962. 0x7bb0b0cbU, 0xa85454fcU, 0x6dbbbbd6U, 0x2c16163aU,
  963. },
  964. {
  965. 0xa5c66363U, 0x84f87c7cU, 0x99ee7777U, 0x8df67b7bU,
  966. 0x0dfff2f2U, 0xbdd66b6bU, 0xb1de6f6fU, 0x5491c5c5U,
  967. 0x50603030U, 0x03020101U, 0xa9ce6767U, 0x7d562b2bU,
  968. 0x19e7fefeU, 0x62b5d7d7U, 0xe64dababU, 0x9aec7676U,
  969. 0x458fcacaU, 0x9d1f8282U, 0x4089c9c9U, 0x87fa7d7dU,
  970. 0x15effafaU, 0xebb25959U, 0xc98e4747U, 0x0bfbf0f0U,
  971. 0xec41adadU, 0x67b3d4d4U, 0xfd5fa2a2U, 0xea45afafU,
  972. 0xbf239c9cU, 0xf753a4a4U, 0x96e47272U, 0x5b9bc0c0U,
  973. 0xc275b7b7U, 0x1ce1fdfdU, 0xae3d9393U, 0x6a4c2626U,
  974. 0x5a6c3636U, 0x417e3f3fU, 0x02f5f7f7U, 0x4f83ccccU,
  975. 0x5c683434U, 0xf451a5a5U, 0x34d1e5e5U, 0x08f9f1f1U,
  976. 0x93e27171U, 0x73abd8d8U, 0x53623131U, 0x3f2a1515U,
  977. 0x0c080404U, 0x5295c7c7U, 0x65462323U, 0x5e9dc3c3U,
  978. 0x28301818U, 0xa1379696U, 0x0f0a0505U, 0xb52f9a9aU,
  979. 0x090e0707U, 0x36241212U, 0x9b1b8080U, 0x3ddfe2e2U,
  980. 0x26cdebebU, 0x694e2727U, 0xcd7fb2b2U, 0x9fea7575U,
  981. 0x1b120909U, 0x9e1d8383U, 0x74582c2cU, 0x2e341a1aU,
  982. 0x2d361b1bU, 0xb2dc6e6eU, 0xeeb45a5aU, 0xfb5ba0a0U,
  983. 0xf6a45252U, 0x4d763b3bU, 0x61b7d6d6U, 0xce7db3b3U,
  984. 0x7b522929U, 0x3edde3e3U, 0x715e2f2fU, 0x97138484U,
  985. 0xf5a65353U, 0x68b9d1d1U, 0x00000000U, 0x2cc1ededU,
  986. 0x60402020U, 0x1fe3fcfcU, 0xc879b1b1U, 0xedb65b5bU,
  987. 0xbed46a6aU, 0x468dcbcbU, 0xd967bebeU, 0x4b723939U,
  988. 0xde944a4aU, 0xd4984c4cU, 0xe8b05858U, 0x4a85cfcfU,
  989. 0x6bbbd0d0U, 0x2ac5efefU, 0xe54faaaaU, 0x16edfbfbU,
  990. 0xc5864343U, 0xd79a4d4dU, 0x55663333U, 0x94118585U,
  991. 0xcf8a4545U, 0x10e9f9f9U, 0x06040202U, 0x81fe7f7fU,
  992. 0xf0a05050U, 0x44783c3cU, 0xba259f9fU, 0xe34ba8a8U,
  993. 0xf3a25151U, 0xfe5da3a3U, 0xc0804040U, 0x8a058f8fU,
  994. 0xad3f9292U, 0xbc219d9dU, 0x48703838U, 0x04f1f5f5U,
  995. 0xdf63bcbcU, 0xc177b6b6U, 0x75afdadaU, 0x63422121U,
  996. 0x30201010U, 0x1ae5ffffU, 0x0efdf3f3U, 0x6dbfd2d2U,
  997. 0x4c81cdcdU, 0x14180c0cU, 0x35261313U, 0x2fc3ececU,
  998. 0xe1be5f5fU, 0xa2359797U, 0xcc884444U, 0x392e1717U,
  999. 0x5793c4c4U, 0xf255a7a7U, 0x82fc7e7eU, 0x477a3d3dU,
  1000. 0xacc86464U, 0xe7ba5d5dU, 0x2b321919U, 0x95e67373U,
  1001. 0xa0c06060U, 0x98198181U, 0xd19e4f4fU, 0x7fa3dcdcU,
  1002. 0x66442222U, 0x7e542a2aU, 0xab3b9090U, 0x830b8888U,
  1003. 0xca8c4646U, 0x29c7eeeeU, 0xd36bb8b8U, 0x3c281414U,
  1004. 0x79a7dedeU, 0xe2bc5e5eU, 0x1d160b0bU, 0x76addbdbU,
  1005. 0x3bdbe0e0U, 0x56643232U, 0x4e743a3aU, 0x1e140a0aU,
  1006. 0xdb924949U, 0x0a0c0606U, 0x6c482424U, 0xe4b85c5cU,
  1007. 0x5d9fc2c2U, 0x6ebdd3d3U, 0xef43acacU, 0xa6c46262U,
  1008. 0xa8399191U, 0xa4319595U, 0x37d3e4e4U, 0x8bf27979U,
  1009. 0x32d5e7e7U, 0x438bc8c8U, 0x596e3737U, 0xb7da6d6dU,
  1010. 0x8c018d8dU, 0x64b1d5d5U, 0xd29c4e4eU, 0xe049a9a9U,
  1011. 0xb4d86c6cU, 0xfaac5656U, 0x07f3f4f4U, 0x25cfeaeaU,
  1012. 0xafca6565U, 0x8ef47a7aU, 0xe947aeaeU, 0x18100808U,
  1013. 0xd56fbabaU, 0x88f07878U, 0x6f4a2525U, 0x725c2e2eU,
  1014. 0x24381c1cU, 0xf157a6a6U, 0xc773b4b4U, 0x5197c6c6U,
  1015. 0x23cbe8e8U, 0x7ca1ddddU, 0x9ce87474U, 0x213e1f1fU,
  1016. 0xdd964b4bU, 0xdc61bdbdU, 0x860d8b8bU, 0x850f8a8aU,
  1017. 0x90e07070U, 0x427c3e3eU, 0xc471b5b5U, 0xaacc6666U,
  1018. 0xd8904848U, 0x05060303U, 0x01f7f6f6U, 0x121c0e0eU,
  1019. 0xa3c26161U, 0x5f6a3535U, 0xf9ae5757U, 0xd069b9b9U,
  1020. 0x91178686U, 0x5899c1c1U, 0x273a1d1dU, 0xb9279e9eU,
  1021. 0x38d9e1e1U, 0x13ebf8f8U, 0xb32b9898U, 0x33221111U,
  1022. 0xbbd26969U, 0x70a9d9d9U, 0x89078e8eU, 0xa7339494U,
  1023. 0xb62d9b9bU, 0x223c1e1eU, 0x92158787U, 0x20c9e9e9U,
  1024. 0x4987ceceU, 0xffaa5555U, 0x78502828U, 0x7aa5dfdfU,
  1025. 0x8f038c8cU, 0xf859a1a1U, 0x80098989U, 0x171a0d0dU,
  1026. 0xda65bfbfU, 0x31d7e6e6U, 0xc6844242U, 0xb8d06868U,
  1027. 0xc3824141U, 0xb0299999U, 0x775a2d2dU, 0x111e0f0fU,
  1028. 0xcb7bb0b0U, 0xfca85454U, 0xd66dbbbbU, 0x3a2c1616U,
  1029. },
  1030. {
  1031. 0x63a5c663U, 0x7c84f87cU, 0x7799ee77U, 0x7b8df67bU,
  1032. 0xf20dfff2U, 0x6bbdd66bU, 0x6fb1de6fU, 0xc55491c5U,
  1033. 0x30506030U, 0x01030201U, 0x67a9ce67U, 0x2b7d562bU,
  1034. 0xfe19e7feU, 0xd762b5d7U, 0xabe64dabU, 0x769aec76U,
  1035. 0xca458fcaU, 0x829d1f82U, 0xc94089c9U, 0x7d87fa7dU,
  1036. 0xfa15effaU, 0x59ebb259U, 0x47c98e47U, 0xf00bfbf0U,
  1037. 0xadec41adU, 0xd467b3d4U, 0xa2fd5fa2U, 0xafea45afU,
  1038. 0x9cbf239cU, 0xa4f753a4U, 0x7296e472U, 0xc05b9bc0U,
  1039. 0xb7c275b7U, 0xfd1ce1fdU, 0x93ae3d93U, 0x266a4c26U,
  1040. 0x365a6c36U, 0x3f417e3fU, 0xf702f5f7U, 0xcc4f83ccU,
  1041. 0x345c6834U, 0xa5f451a5U, 0xe534d1e5U, 0xf108f9f1U,
  1042. 0x7193e271U, 0xd873abd8U, 0x31536231U, 0x153f2a15U,
  1043. 0x040c0804U, 0xc75295c7U, 0x23654623U, 0xc35e9dc3U,
  1044. 0x18283018U, 0x96a13796U, 0x050f0a05U, 0x9ab52f9aU,
  1045. 0x07090e07U, 0x12362412U, 0x809b1b80U, 0xe23ddfe2U,
  1046. 0xeb26cdebU, 0x27694e27U, 0xb2cd7fb2U, 0x759fea75U,
  1047. 0x091b1209U, 0x839e1d83U, 0x2c74582cU, 0x1a2e341aU,
  1048. 0x1b2d361bU, 0x6eb2dc6eU, 0x5aeeb45aU, 0xa0fb5ba0U,
  1049. 0x52f6a452U, 0x3b4d763bU, 0xd661b7d6U, 0xb3ce7db3U,
  1050. 0x297b5229U, 0xe33edde3U, 0x2f715e2fU, 0x84971384U,
  1051. 0x53f5a653U, 0xd168b9d1U, 0x00000000U, 0xed2cc1edU,
  1052. 0x20604020U, 0xfc1fe3fcU, 0xb1c879b1U, 0x5bedb65bU,
  1053. 0x6abed46aU, 0xcb468dcbU, 0xbed967beU, 0x394b7239U,
  1054. 0x4ade944aU, 0x4cd4984cU, 0x58e8b058U, 0xcf4a85cfU,
  1055. 0xd06bbbd0U, 0xef2ac5efU, 0xaae54faaU, 0xfb16edfbU,
  1056. 0x43c58643U, 0x4dd79a4dU, 0x33556633U, 0x85941185U,
  1057. 0x45cf8a45U, 0xf910e9f9U, 0x02060402U, 0x7f81fe7fU,
  1058. 0x50f0a050U, 0x3c44783cU, 0x9fba259fU, 0xa8e34ba8U,
  1059. 0x51f3a251U, 0xa3fe5da3U, 0x40c08040U, 0x8f8a058fU,
  1060. 0x92ad3f92U, 0x9dbc219dU, 0x38487038U, 0xf504f1f5U,
  1061. 0xbcdf63bcU, 0xb6c177b6U, 0xda75afdaU, 0x21634221U,
  1062. 0x10302010U, 0xff1ae5ffU, 0xf30efdf3U, 0xd26dbfd2U,
  1063. 0xcd4c81cdU, 0x0c14180cU, 0x13352613U, 0xec2fc3ecU,
  1064. 0x5fe1be5fU, 0x97a23597U, 0x44cc8844U, 0x17392e17U,
  1065. 0xc45793c4U, 0xa7f255a7U, 0x7e82fc7eU, 0x3d477a3dU,
  1066. 0x64acc864U, 0x5de7ba5dU, 0x192b3219U, 0x7395e673U,
  1067. 0x60a0c060U, 0x81981981U, 0x4fd19e4fU, 0xdc7fa3dcU,
  1068. 0x22664422U, 0x2a7e542aU, 0x90ab3b90U, 0x88830b88U,
  1069. 0x46ca8c46U, 0xee29c7eeU, 0xb8d36bb8U, 0x143c2814U,
  1070. 0xde79a7deU, 0x5ee2bc5eU, 0x0b1d160bU, 0xdb76addbU,
  1071. 0xe03bdbe0U, 0x32566432U, 0x3a4e743aU, 0x0a1e140aU,
  1072. 0x49db9249U, 0x060a0c06U, 0x246c4824U, 0x5ce4b85cU,
  1073. 0xc25d9fc2U, 0xd36ebdd3U, 0xacef43acU, 0x62a6c462U,
  1074. 0x91a83991U, 0x95a43195U, 0xe437d3e4U, 0x798bf279U,
  1075. 0xe732d5e7U, 0xc8438bc8U, 0x37596e37U, 0x6db7da6dU,
  1076. 0x8d8c018dU, 0xd564b1d5U, 0x4ed29c4eU, 0xa9e049a9U,
  1077. 0x6cb4d86cU, 0x56faac56U, 0xf407f3f4U, 0xea25cfeaU,
  1078. 0x65afca65U, 0x7a8ef47aU, 0xaee947aeU, 0x08181008U,
  1079. 0xbad56fbaU, 0x7888f078U, 0x256f4a25U, 0x2e725c2eU,
  1080. 0x1c24381cU, 0xa6f157a6U, 0xb4c773b4U, 0xc65197c6U,
  1081. 0xe823cbe8U, 0xdd7ca1ddU, 0x749ce874U, 0x1f213e1fU,
  1082. 0x4bdd964bU, 0xbddc61bdU, 0x8b860d8bU, 0x8a850f8aU,
  1083. 0x7090e070U, 0x3e427c3eU, 0xb5c471b5U, 0x66aacc66U,
  1084. 0x48d89048U, 0x03050603U, 0xf601f7f6U, 0x0e121c0eU,
  1085. 0x61a3c261U, 0x355f6a35U, 0x57f9ae57U, 0xb9d069b9U,
  1086. 0x86911786U, 0xc15899c1U, 0x1d273a1dU, 0x9eb9279eU,
  1087. 0xe138d9e1U, 0xf813ebf8U, 0x98b32b98U, 0x11332211U,
  1088. 0x69bbd269U, 0xd970a9d9U, 0x8e89078eU, 0x94a73394U,
  1089. 0x9bb62d9bU, 0x1e223c1eU, 0x87921587U, 0xe920c9e9U,
  1090. 0xce4987ceU, 0x55ffaa55U, 0x28785028U, 0xdf7aa5dfU,
  1091. 0x8c8f038cU, 0xa1f859a1U, 0x89800989U, 0x0d171a0dU,
  1092. 0xbfda65bfU, 0xe631d7e6U, 0x42c68442U, 0x68b8d068U,
  1093. 0x41c38241U, 0x99b02999U, 0x2d775a2dU, 0x0f111e0fU,
  1094. 0xb0cb7bb0U, 0x54fca854U, 0xbbd66dbbU, 0x163a2c16U,
  1095. },
  1096. {
  1097. 0x6363a5c6U, 0x7c7c84f8U, 0x777799eeU, 0x7b7b8df6U,
  1098. 0xf2f20dffU, 0x6b6bbdd6U, 0x6f6fb1deU, 0xc5c55491U,
  1099. 0x30305060U, 0x01010302U, 0x6767a9ceU, 0x2b2b7d56U,
  1100. 0xfefe19e7U, 0xd7d762b5U, 0xababe64dU, 0x76769aecU,
  1101. 0xcaca458fU, 0x82829d1fU, 0xc9c94089U, 0x7d7d87faU,
  1102. 0xfafa15efU, 0x5959ebb2U, 0x4747c98eU, 0xf0f00bfbU,
  1103. 0xadadec41U, 0xd4d467b3U, 0xa2a2fd5fU, 0xafafea45U,
  1104. 0x9c9cbf23U, 0xa4a4f753U, 0x727296e4U, 0xc0c05b9bU,
  1105. 0xb7b7c275U, 0xfdfd1ce1U, 0x9393ae3dU, 0x26266a4cU,
  1106. 0x36365a6cU, 0x3f3f417eU, 0xf7f702f5U, 0xcccc4f83U,
  1107. 0x34345c68U, 0xa5a5f451U, 0xe5e534d1U, 0xf1f108f9U,
  1108. 0x717193e2U, 0xd8d873abU, 0x31315362U, 0x15153f2aU,
  1109. 0x04040c08U, 0xc7c75295U, 0x23236546U, 0xc3c35e9dU,
  1110. 0x18182830U, 0x9696a137U, 0x05050f0aU, 0x9a9ab52fU,
  1111. 0x0707090eU, 0x12123624U, 0x80809b1bU, 0xe2e23ddfU,
  1112. 0xebeb26cdU, 0x2727694eU, 0xb2b2cd7fU, 0x75759feaU,
  1113. 0x09091b12U, 0x83839e1dU, 0x2c2c7458U, 0x1a1a2e34U,
  1114. 0x1b1b2d36U, 0x6e6eb2dcU, 0x5a5aeeb4U, 0xa0a0fb5bU,
  1115. 0x5252f6a4U, 0x3b3b4d76U, 0xd6d661b7U, 0xb3b3ce7dU,
  1116. 0x29297b52U, 0xe3e33eddU, 0x2f2f715eU, 0x84849713U,
  1117. 0x5353f5a6U, 0xd1d168b9U, 0x00000000U, 0xeded2cc1U,
  1118. 0x20206040U, 0xfcfc1fe3U, 0xb1b1c879U, 0x5b5bedb6U,
  1119. 0x6a6abed4U, 0xcbcb468dU, 0xbebed967U, 0x39394b72U,
  1120. 0x4a4ade94U, 0x4c4cd498U, 0x5858e8b0U, 0xcfcf4a85U,
  1121. 0xd0d06bbbU, 0xefef2ac5U, 0xaaaae54fU, 0xfbfb16edU,
  1122. 0x4343c586U, 0x4d4dd79aU, 0x33335566U, 0x85859411U,
  1123. 0x4545cf8aU, 0xf9f910e9U, 0x02020604U, 0x7f7f81feU,
  1124. 0x5050f0a0U, 0x3c3c4478U, 0x9f9fba25U, 0xa8a8e34bU,
  1125. 0x5151f3a2U, 0xa3a3fe5dU, 0x4040c080U, 0x8f8f8a05U,
  1126. 0x9292ad3fU, 0x9d9dbc21U, 0x38384870U, 0xf5f504f1U,
  1127. 0xbcbcdf63U, 0xb6b6c177U, 0xdada75afU, 0x21216342U,
  1128. 0x10103020U, 0xffff1ae5U, 0xf3f30efdU, 0xd2d26dbfU,
  1129. 0xcdcd4c81U, 0x0c0c1418U, 0x13133526U, 0xecec2fc3U,
  1130. 0x5f5fe1beU, 0x9797a235U, 0x4444cc88U, 0x1717392eU,
  1131. 0xc4c45793U, 0xa7a7f255U, 0x7e7e82fcU, 0x3d3d477aU,
  1132. 0x6464acc8U, 0x5d5de7baU, 0x19192b32U, 0x737395e6U,
  1133. 0x6060a0c0U, 0x81819819U, 0x4f4fd19eU, 0xdcdc7fa3U,
  1134. 0x22226644U, 0x2a2a7e54U, 0x9090ab3bU, 0x8888830bU,
  1135. 0x4646ca8cU, 0xeeee29c7U, 0xb8b8d36bU, 0x14143c28U,
  1136. 0xdede79a7U, 0x5e5ee2bcU, 0x0b0b1d16U, 0xdbdb76adU,
  1137. 0xe0e03bdbU, 0x32325664U, 0x3a3a4e74U, 0x0a0a1e14U,
  1138. 0x4949db92U, 0x06060a0cU, 0x24246c48U, 0x5c5ce4b8U,
  1139. 0xc2c25d9fU, 0xd3d36ebdU, 0xacacef43U, 0x6262a6c4U,
  1140. 0x9191a839U, 0x9595a431U, 0xe4e437d3U, 0x79798bf2U,
  1141. 0xe7e732d5U, 0xc8c8438bU, 0x3737596eU, 0x6d6db7daU,
  1142. 0x8d8d8c01U, 0xd5d564b1U, 0x4e4ed29cU, 0xa9a9e049U,
  1143. 0x6c6cb4d8U, 0x5656faacU, 0xf4f407f3U, 0xeaea25cfU,
  1144. 0x6565afcaU, 0x7a7a8ef4U, 0xaeaee947U, 0x08081810U,
  1145. 0xbabad56fU, 0x787888f0U, 0x25256f4aU, 0x2e2e725cU,
  1146. 0x1c1c2438U, 0xa6a6f157U, 0xb4b4c773U, 0xc6c65197U,
  1147. 0xe8e823cbU, 0xdddd7ca1U, 0x74749ce8U, 0x1f1f213eU,
  1148. 0x4b4bdd96U, 0xbdbddc61U, 0x8b8b860dU, 0x8a8a850fU,
  1149. 0x707090e0U, 0x3e3e427cU, 0xb5b5c471U, 0x6666aaccU,
  1150. 0x4848d890U, 0x03030506U, 0xf6f601f7U, 0x0e0e121cU,
  1151. 0x6161a3c2U, 0x35355f6aU, 0x5757f9aeU, 0xb9b9d069U,
  1152. 0x86869117U, 0xc1c15899U, 0x1d1d273aU, 0x9e9eb927U,
  1153. 0xe1e138d9U, 0xf8f813ebU, 0x9898b32bU, 0x11113322U,
  1154. 0x6969bbd2U, 0xd9d970a9U, 0x8e8e8907U, 0x9494a733U,
  1155. 0x9b9bb62dU, 0x1e1e223cU, 0x87879215U, 0xe9e920c9U,
  1156. 0xcece4987U, 0x5555ffaaU, 0x28287850U, 0xdfdf7aa5U,
  1157. 0x8c8c8f03U, 0xa1a1f859U, 0x89898009U, 0x0d0d171aU,
  1158. 0xbfbfda65U, 0xe6e631d7U, 0x4242c684U, 0x6868b8d0U,
  1159. 0x4141c382U, 0x9999b029U, 0x2d2d775aU, 0x0f0f111eU,
  1160. 0xb0b0cb7bU, 0x5454fca8U, 0xbbbbd66dU, 0x16163a2cU,
  1161. }
  1162. };
  1163. #ifdef HAVE_AES_DECRYPT
  1164. static const FLASH_QUALIFIER word32 Td[4][256] = {
  1165. {
  1166. 0x51f4a750U, 0x7e416553U, 0x1a17a4c3U, 0x3a275e96U,
  1167. 0x3bab6bcbU, 0x1f9d45f1U, 0xacfa58abU, 0x4be30393U,
  1168. 0x2030fa55U, 0xad766df6U, 0x88cc7691U, 0xf5024c25U,
  1169. 0x4fe5d7fcU, 0xc52acbd7U, 0x26354480U, 0xb562a38fU,
  1170. 0xdeb15a49U, 0x25ba1b67U, 0x45ea0e98U, 0x5dfec0e1U,
  1171. 0xc32f7502U, 0x814cf012U, 0x8d4697a3U, 0x6bd3f9c6U,
  1172. 0x038f5fe7U, 0x15929c95U, 0xbf6d7aebU, 0x955259daU,
  1173. 0xd4be832dU, 0x587421d3U, 0x49e06929U, 0x8ec9c844U,
  1174. 0x75c2896aU, 0xf48e7978U, 0x99583e6bU, 0x27b971ddU,
  1175. 0xbee14fb6U, 0xf088ad17U, 0xc920ac66U, 0x7dce3ab4U,
  1176. 0x63df4a18U, 0xe51a3182U, 0x97513360U, 0x62537f45U,
  1177. 0xb16477e0U, 0xbb6bae84U, 0xfe81a01cU, 0xf9082b94U,
  1178. 0x70486858U, 0x8f45fd19U, 0x94de6c87U, 0x527bf8b7U,
  1179. 0xab73d323U, 0x724b02e2U, 0xe31f8f57U, 0x6655ab2aU,
  1180. 0xb2eb2807U, 0x2fb5c203U, 0x86c57b9aU, 0xd33708a5U,
  1181. 0x302887f2U, 0x23bfa5b2U, 0x02036abaU, 0xed16825cU,
  1182. 0x8acf1c2bU, 0xa779b492U, 0xf307f2f0U, 0x4e69e2a1U,
  1183. 0x65daf4cdU, 0x0605bed5U, 0xd134621fU, 0xc4a6fe8aU,
  1184. 0x342e539dU, 0xa2f355a0U, 0x058ae132U, 0xa4f6eb75U,
  1185. 0x0b83ec39U, 0x4060efaaU, 0x5e719f06U, 0xbd6e1051U,
  1186. 0x3e218af9U, 0x96dd063dU, 0xdd3e05aeU, 0x4de6bd46U,
  1187. 0x91548db5U, 0x71c45d05U, 0x0406d46fU, 0x605015ffU,
  1188. 0x1998fb24U, 0xd6bde997U, 0x894043ccU, 0x67d99e77U,
  1189. 0xb0e842bdU, 0x07898b88U, 0xe7195b38U, 0x79c8eedbU,
  1190. 0xa17c0a47U, 0x7c420fe9U, 0xf8841ec9U, 0x00000000U,
  1191. 0x09808683U, 0x322bed48U, 0x1e1170acU, 0x6c5a724eU,
  1192. 0xfd0efffbU, 0x0f853856U, 0x3daed51eU, 0x362d3927U,
  1193. 0x0a0fd964U, 0x685ca621U, 0x9b5b54d1U, 0x24362e3aU,
  1194. 0x0c0a67b1U, 0x9357e70fU, 0xb4ee96d2U, 0x1b9b919eU,
  1195. 0x80c0c54fU, 0x61dc20a2U, 0x5a774b69U, 0x1c121a16U,
  1196. 0xe293ba0aU, 0xc0a02ae5U, 0x3c22e043U, 0x121b171dU,
  1197. 0x0e090d0bU, 0xf28bc7adU, 0x2db6a8b9U, 0x141ea9c8U,
  1198. 0x57f11985U, 0xaf75074cU, 0xee99ddbbU, 0xa37f60fdU,
  1199. 0xf701269fU, 0x5c72f5bcU, 0x44663bc5U, 0x5bfb7e34U,
  1200. 0x8b432976U, 0xcb23c6dcU, 0xb6edfc68U, 0xb8e4f163U,
  1201. 0xd731dccaU, 0x42638510U, 0x13972240U, 0x84c61120U,
  1202. 0x854a247dU, 0xd2bb3df8U, 0xaef93211U, 0xc729a16dU,
  1203. 0x1d9e2f4bU, 0xdcb230f3U, 0x0d8652ecU, 0x77c1e3d0U,
  1204. 0x2bb3166cU, 0xa970b999U, 0x119448faU, 0x47e96422U,
  1205. 0xa8fc8cc4U, 0xa0f03f1aU, 0x567d2cd8U, 0x223390efU,
  1206. 0x87494ec7U, 0xd938d1c1U, 0x8ccaa2feU, 0x98d40b36U,
  1207. 0xa6f581cfU, 0xa57ade28U, 0xdab78e26U, 0x3fadbfa4U,
  1208. 0x2c3a9de4U, 0x5078920dU, 0x6a5fcc9bU, 0x547e4662U,
  1209. 0xf68d13c2U, 0x90d8b8e8U, 0x2e39f75eU, 0x82c3aff5U,
  1210. 0x9f5d80beU, 0x69d0937cU, 0x6fd52da9U, 0xcf2512b3U,
  1211. 0xc8ac993bU, 0x10187da7U, 0xe89c636eU, 0xdb3bbb7bU,
  1212. 0xcd267809U, 0x6e5918f4U, 0xec9ab701U, 0x834f9aa8U,
  1213. 0xe6956e65U, 0xaaffe67eU, 0x21bccf08U, 0xef15e8e6U,
  1214. 0xbae79bd9U, 0x4a6f36ceU, 0xea9f09d4U, 0x29b07cd6U,
  1215. 0x31a4b2afU, 0x2a3f2331U, 0xc6a59430U, 0x35a266c0U,
  1216. 0x744ebc37U, 0xfc82caa6U, 0xe090d0b0U, 0x33a7d815U,
  1217. 0xf104984aU, 0x41ecdaf7U, 0x7fcd500eU, 0x1791f62fU,
  1218. 0x764dd68dU, 0x43efb04dU, 0xccaa4d54U, 0xe49604dfU,
  1219. 0x9ed1b5e3U, 0x4c6a881bU, 0xc12c1fb8U, 0x4665517fU,
  1220. 0x9d5eea04U, 0x018c355dU, 0xfa877473U, 0xfb0b412eU,
  1221. 0xb3671d5aU, 0x92dbd252U, 0xe9105633U, 0x6dd64713U,
  1222. 0x9ad7618cU, 0x37a10c7aU, 0x59f8148eU, 0xeb133c89U,
  1223. 0xcea927eeU, 0xb761c935U, 0xe11ce5edU, 0x7a47b13cU,
  1224. 0x9cd2df59U, 0x55f2733fU, 0x1814ce79U, 0x73c737bfU,
  1225. 0x53f7cdeaU, 0x5ffdaa5bU, 0xdf3d6f14U, 0x7844db86U,
  1226. 0xcaaff381U, 0xb968c43eU, 0x3824342cU, 0xc2a3405fU,
  1227. 0x161dc372U, 0xbce2250cU, 0x283c498bU, 0xff0d9541U,
  1228. 0x39a80171U, 0x080cb3deU, 0xd8b4e49cU, 0x6456c190U,
  1229. 0x7bcb8461U, 0xd532b670U, 0x486c5c74U, 0xd0b85742U,
  1230. },
  1231. {
  1232. 0x5051f4a7U, 0x537e4165U, 0xc31a17a4U, 0x963a275eU,
  1233. 0xcb3bab6bU, 0xf11f9d45U, 0xabacfa58U, 0x934be303U,
  1234. 0x552030faU, 0xf6ad766dU, 0x9188cc76U, 0x25f5024cU,
  1235. 0xfc4fe5d7U, 0xd7c52acbU, 0x80263544U, 0x8fb562a3U,
  1236. 0x49deb15aU, 0x6725ba1bU, 0x9845ea0eU, 0xe15dfec0U,
  1237. 0x02c32f75U, 0x12814cf0U, 0xa38d4697U, 0xc66bd3f9U,
  1238. 0xe7038f5fU, 0x9515929cU, 0xebbf6d7aU, 0xda955259U,
  1239. 0x2dd4be83U, 0xd3587421U, 0x2949e069U, 0x448ec9c8U,
  1240. 0x6a75c289U, 0x78f48e79U, 0x6b99583eU, 0xdd27b971U,
  1241. 0xb6bee14fU, 0x17f088adU, 0x66c920acU, 0xb47dce3aU,
  1242. 0x1863df4aU, 0x82e51a31U, 0x60975133U, 0x4562537fU,
  1243. 0xe0b16477U, 0x84bb6baeU, 0x1cfe81a0U, 0x94f9082bU,
  1244. 0x58704868U, 0x198f45fdU, 0x8794de6cU, 0xb7527bf8U,
  1245. 0x23ab73d3U, 0xe2724b02U, 0x57e31f8fU, 0x2a6655abU,
  1246. 0x07b2eb28U, 0x032fb5c2U, 0x9a86c57bU, 0xa5d33708U,
  1247. 0xf2302887U, 0xb223bfa5U, 0xba02036aU, 0x5ced1682U,
  1248. 0x2b8acf1cU, 0x92a779b4U, 0xf0f307f2U, 0xa14e69e2U,
  1249. 0xcd65daf4U, 0xd50605beU, 0x1fd13462U, 0x8ac4a6feU,
  1250. 0x9d342e53U, 0xa0a2f355U, 0x32058ae1U, 0x75a4f6ebU,
  1251. 0x390b83ecU, 0xaa4060efU, 0x065e719fU, 0x51bd6e10U,
  1252. 0xf93e218aU, 0x3d96dd06U, 0xaedd3e05U, 0x464de6bdU,
  1253. 0xb591548dU, 0x0571c45dU, 0x6f0406d4U, 0xff605015U,
  1254. 0x241998fbU, 0x97d6bde9U, 0xcc894043U, 0x7767d99eU,
  1255. 0xbdb0e842U, 0x8807898bU, 0x38e7195bU, 0xdb79c8eeU,
  1256. 0x47a17c0aU, 0xe97c420fU, 0xc9f8841eU, 0x00000000U,
  1257. 0x83098086U, 0x48322bedU, 0xac1e1170U, 0x4e6c5a72U,
  1258. 0xfbfd0effU, 0x560f8538U, 0x1e3daed5U, 0x27362d39U,
  1259. 0x640a0fd9U, 0x21685ca6U, 0xd19b5b54U, 0x3a24362eU,
  1260. 0xb10c0a67U, 0x0f9357e7U, 0xd2b4ee96U, 0x9e1b9b91U,
  1261. 0x4f80c0c5U, 0xa261dc20U, 0x695a774bU, 0x161c121aU,
  1262. 0x0ae293baU, 0xe5c0a02aU, 0x433c22e0U, 0x1d121b17U,
  1263. 0x0b0e090dU, 0xadf28bc7U, 0xb92db6a8U, 0xc8141ea9U,
  1264. 0x8557f119U, 0x4caf7507U, 0xbbee99ddU, 0xfda37f60U,
  1265. 0x9ff70126U, 0xbc5c72f5U, 0xc544663bU, 0x345bfb7eU,
  1266. 0x768b4329U, 0xdccb23c6U, 0x68b6edfcU, 0x63b8e4f1U,
  1267. 0xcad731dcU, 0x10426385U, 0x40139722U, 0x2084c611U,
  1268. 0x7d854a24U, 0xf8d2bb3dU, 0x11aef932U, 0x6dc729a1U,
  1269. 0x4b1d9e2fU, 0xf3dcb230U, 0xec0d8652U, 0xd077c1e3U,
  1270. 0x6c2bb316U, 0x99a970b9U, 0xfa119448U, 0x2247e964U,
  1271. 0xc4a8fc8cU, 0x1aa0f03fU, 0xd8567d2cU, 0xef223390U,
  1272. 0xc787494eU, 0xc1d938d1U, 0xfe8ccaa2U, 0x3698d40bU,
  1273. 0xcfa6f581U, 0x28a57adeU, 0x26dab78eU, 0xa43fadbfU,
  1274. 0xe42c3a9dU, 0x0d507892U, 0x9b6a5fccU, 0x62547e46U,
  1275. 0xc2f68d13U, 0xe890d8b8U, 0x5e2e39f7U, 0xf582c3afU,
  1276. 0xbe9f5d80U, 0x7c69d093U, 0xa96fd52dU, 0xb3cf2512U,
  1277. 0x3bc8ac99U, 0xa710187dU, 0x6ee89c63U, 0x7bdb3bbbU,
  1278. 0x09cd2678U, 0xf46e5918U, 0x01ec9ab7U, 0xa8834f9aU,
  1279. 0x65e6956eU, 0x7eaaffe6U, 0x0821bccfU, 0xe6ef15e8U,
  1280. 0xd9bae79bU, 0xce4a6f36U, 0xd4ea9f09U, 0xd629b07cU,
  1281. 0xaf31a4b2U, 0x312a3f23U, 0x30c6a594U, 0xc035a266U,
  1282. 0x37744ebcU, 0xa6fc82caU, 0xb0e090d0U, 0x1533a7d8U,
  1283. 0x4af10498U, 0xf741ecdaU, 0x0e7fcd50U, 0x2f1791f6U,
  1284. 0x8d764dd6U, 0x4d43efb0U, 0x54ccaa4dU, 0xdfe49604U,
  1285. 0xe39ed1b5U, 0x1b4c6a88U, 0xb8c12c1fU, 0x7f466551U,
  1286. 0x049d5eeaU, 0x5d018c35U, 0x73fa8774U, 0x2efb0b41U,
  1287. 0x5ab3671dU, 0x5292dbd2U, 0x33e91056U, 0x136dd647U,
  1288. 0x8c9ad761U, 0x7a37a10cU, 0x8e59f814U, 0x89eb133cU,
  1289. 0xeecea927U, 0x35b761c9U, 0xede11ce5U, 0x3c7a47b1U,
  1290. 0x599cd2dfU, 0x3f55f273U, 0x791814ceU, 0xbf73c737U,
  1291. 0xea53f7cdU, 0x5b5ffdaaU, 0x14df3d6fU, 0x867844dbU,
  1292. 0x81caaff3U, 0x3eb968c4U, 0x2c382434U, 0x5fc2a340U,
  1293. 0x72161dc3U, 0x0cbce225U, 0x8b283c49U, 0x41ff0d95U,
  1294. 0x7139a801U, 0xde080cb3U, 0x9cd8b4e4U, 0x906456c1U,
  1295. 0x617bcb84U, 0x70d532b6U, 0x74486c5cU, 0x42d0b857U,
  1296. },
  1297. {
  1298. 0xa75051f4U, 0x65537e41U, 0xa4c31a17U, 0x5e963a27U,
  1299. 0x6bcb3babU, 0x45f11f9dU, 0x58abacfaU, 0x03934be3U,
  1300. 0xfa552030U, 0x6df6ad76U, 0x769188ccU, 0x4c25f502U,
  1301. 0xd7fc4fe5U, 0xcbd7c52aU, 0x44802635U, 0xa38fb562U,
  1302. 0x5a49deb1U, 0x1b6725baU, 0x0e9845eaU, 0xc0e15dfeU,
  1303. 0x7502c32fU, 0xf012814cU, 0x97a38d46U, 0xf9c66bd3U,
  1304. 0x5fe7038fU, 0x9c951592U, 0x7aebbf6dU, 0x59da9552U,
  1305. 0x832dd4beU, 0x21d35874U, 0x692949e0U, 0xc8448ec9U,
  1306. 0x896a75c2U, 0x7978f48eU, 0x3e6b9958U, 0x71dd27b9U,
  1307. 0x4fb6bee1U, 0xad17f088U, 0xac66c920U, 0x3ab47dceU,
  1308. 0x4a1863dfU, 0x3182e51aU, 0x33609751U, 0x7f456253U,
  1309. 0x77e0b164U, 0xae84bb6bU, 0xa01cfe81U, 0x2b94f908U,
  1310. 0x68587048U, 0xfd198f45U, 0x6c8794deU, 0xf8b7527bU,
  1311. 0xd323ab73U, 0x02e2724bU, 0x8f57e31fU, 0xab2a6655U,
  1312. 0x2807b2ebU, 0xc2032fb5U, 0x7b9a86c5U, 0x08a5d337U,
  1313. 0x87f23028U, 0xa5b223bfU, 0x6aba0203U, 0x825ced16U,
  1314. 0x1c2b8acfU, 0xb492a779U, 0xf2f0f307U, 0xe2a14e69U,
  1315. 0xf4cd65daU, 0xbed50605U, 0x621fd134U, 0xfe8ac4a6U,
  1316. 0x539d342eU, 0x55a0a2f3U, 0xe132058aU, 0xeb75a4f6U,
  1317. 0xec390b83U, 0xefaa4060U, 0x9f065e71U, 0x1051bd6eU,
  1318. 0x8af93e21U, 0x063d96ddU, 0x05aedd3eU, 0xbd464de6U,
  1319. 0x8db59154U, 0x5d0571c4U, 0xd46f0406U, 0x15ff6050U,
  1320. 0xfb241998U, 0xe997d6bdU, 0x43cc8940U, 0x9e7767d9U,
  1321. 0x42bdb0e8U, 0x8b880789U, 0x5b38e719U, 0xeedb79c8U,
  1322. 0x0a47a17cU, 0x0fe97c42U, 0x1ec9f884U, 0x00000000U,
  1323. 0x86830980U, 0xed48322bU, 0x70ac1e11U, 0x724e6c5aU,
  1324. 0xfffbfd0eU, 0x38560f85U, 0xd51e3daeU, 0x3927362dU,
  1325. 0xd9640a0fU, 0xa621685cU, 0x54d19b5bU, 0x2e3a2436U,
  1326. 0x67b10c0aU, 0xe70f9357U, 0x96d2b4eeU, 0x919e1b9bU,
  1327. 0xc54f80c0U, 0x20a261dcU, 0x4b695a77U, 0x1a161c12U,
  1328. 0xba0ae293U, 0x2ae5c0a0U, 0xe0433c22U, 0x171d121bU,
  1329. 0x0d0b0e09U, 0xc7adf28bU, 0xa8b92db6U, 0xa9c8141eU,
  1330. 0x198557f1U, 0x074caf75U, 0xddbbee99U, 0x60fda37fU,
  1331. 0x269ff701U, 0xf5bc5c72U, 0x3bc54466U, 0x7e345bfbU,
  1332. 0x29768b43U, 0xc6dccb23U, 0xfc68b6edU, 0xf163b8e4U,
  1333. 0xdccad731U, 0x85104263U, 0x22401397U, 0x112084c6U,
  1334. 0x247d854aU, 0x3df8d2bbU, 0x3211aef9U, 0xa16dc729U,
  1335. 0x2f4b1d9eU, 0x30f3dcb2U, 0x52ec0d86U, 0xe3d077c1U,
  1336. 0x166c2bb3U, 0xb999a970U, 0x48fa1194U, 0x642247e9U,
  1337. 0x8cc4a8fcU, 0x3f1aa0f0U, 0x2cd8567dU, 0x90ef2233U,
  1338. 0x4ec78749U, 0xd1c1d938U, 0xa2fe8ccaU, 0x0b3698d4U,
  1339. 0x81cfa6f5U, 0xde28a57aU, 0x8e26dab7U, 0xbfa43fadU,
  1340. 0x9de42c3aU, 0x920d5078U, 0xcc9b6a5fU, 0x4662547eU,
  1341. 0x13c2f68dU, 0xb8e890d8U, 0xf75e2e39U, 0xaff582c3U,
  1342. 0x80be9f5dU, 0x937c69d0U, 0x2da96fd5U, 0x12b3cf25U,
  1343. 0x993bc8acU, 0x7da71018U, 0x636ee89cU, 0xbb7bdb3bU,
  1344. 0x7809cd26U, 0x18f46e59U, 0xb701ec9aU, 0x9aa8834fU,
  1345. 0x6e65e695U, 0xe67eaaffU, 0xcf0821bcU, 0xe8e6ef15U,
  1346. 0x9bd9bae7U, 0x36ce4a6fU, 0x09d4ea9fU, 0x7cd629b0U,
  1347. 0xb2af31a4U, 0x23312a3fU, 0x9430c6a5U, 0x66c035a2U,
  1348. 0xbc37744eU, 0xcaa6fc82U, 0xd0b0e090U, 0xd81533a7U,
  1349. 0x984af104U, 0xdaf741ecU, 0x500e7fcdU, 0xf62f1791U,
  1350. 0xd68d764dU, 0xb04d43efU, 0x4d54ccaaU, 0x04dfe496U,
  1351. 0xb5e39ed1U, 0x881b4c6aU, 0x1fb8c12cU, 0x517f4665U,
  1352. 0xea049d5eU, 0x355d018cU, 0x7473fa87U, 0x412efb0bU,
  1353. 0x1d5ab367U, 0xd25292dbU, 0x5633e910U, 0x47136dd6U,
  1354. 0x618c9ad7U, 0x0c7a37a1U, 0x148e59f8U, 0x3c89eb13U,
  1355. 0x27eecea9U, 0xc935b761U, 0xe5ede11cU, 0xb13c7a47U,
  1356. 0xdf599cd2U, 0x733f55f2U, 0xce791814U, 0x37bf73c7U,
  1357. 0xcdea53f7U, 0xaa5b5ffdU, 0x6f14df3dU, 0xdb867844U,
  1358. 0xf381caafU, 0xc43eb968U, 0x342c3824U, 0x405fc2a3U,
  1359. 0xc372161dU, 0x250cbce2U, 0x498b283cU, 0x9541ff0dU,
  1360. 0x017139a8U, 0xb3de080cU, 0xe49cd8b4U, 0xc1906456U,
  1361. 0x84617bcbU, 0xb670d532U, 0x5c74486cU, 0x5742d0b8U,
  1362. },
  1363. {
  1364. 0xf4a75051U, 0x4165537eU, 0x17a4c31aU, 0x275e963aU,
  1365. 0xab6bcb3bU, 0x9d45f11fU, 0xfa58abacU, 0xe303934bU,
  1366. 0x30fa5520U, 0x766df6adU, 0xcc769188U, 0x024c25f5U,
  1367. 0xe5d7fc4fU, 0x2acbd7c5U, 0x35448026U, 0x62a38fb5U,
  1368. 0xb15a49deU, 0xba1b6725U, 0xea0e9845U, 0xfec0e15dU,
  1369. 0x2f7502c3U, 0x4cf01281U, 0x4697a38dU, 0xd3f9c66bU,
  1370. 0x8f5fe703U, 0x929c9515U, 0x6d7aebbfU, 0x5259da95U,
  1371. 0xbe832dd4U, 0x7421d358U, 0xe0692949U, 0xc9c8448eU,
  1372. 0xc2896a75U, 0x8e7978f4U, 0x583e6b99U, 0xb971dd27U,
  1373. 0xe14fb6beU, 0x88ad17f0U, 0x20ac66c9U, 0xce3ab47dU,
  1374. 0xdf4a1863U, 0x1a3182e5U, 0x51336097U, 0x537f4562U,
  1375. 0x6477e0b1U, 0x6bae84bbU, 0x81a01cfeU, 0x082b94f9U,
  1376. 0x48685870U, 0x45fd198fU, 0xde6c8794U, 0x7bf8b752U,
  1377. 0x73d323abU, 0x4b02e272U, 0x1f8f57e3U, 0x55ab2a66U,
  1378. 0xeb2807b2U, 0xb5c2032fU, 0xc57b9a86U, 0x3708a5d3U,
  1379. 0x2887f230U, 0xbfa5b223U, 0x036aba02U, 0x16825cedU,
  1380. 0xcf1c2b8aU, 0x79b492a7U, 0x07f2f0f3U, 0x69e2a14eU,
  1381. 0xdaf4cd65U, 0x05bed506U, 0x34621fd1U, 0xa6fe8ac4U,
  1382. 0x2e539d34U, 0xf355a0a2U, 0x8ae13205U, 0xf6eb75a4U,
  1383. 0x83ec390bU, 0x60efaa40U, 0x719f065eU, 0x6e1051bdU,
  1384. 0x218af93eU, 0xdd063d96U, 0x3e05aeddU, 0xe6bd464dU,
  1385. 0x548db591U, 0xc45d0571U, 0x06d46f04U, 0x5015ff60U,
  1386. 0x98fb2419U, 0xbde997d6U, 0x4043cc89U, 0xd99e7767U,
  1387. 0xe842bdb0U, 0x898b8807U, 0x195b38e7U, 0xc8eedb79U,
  1388. 0x7c0a47a1U, 0x420fe97cU, 0x841ec9f8U, 0x00000000U,
  1389. 0x80868309U, 0x2bed4832U, 0x1170ac1eU, 0x5a724e6cU,
  1390. 0x0efffbfdU, 0x8538560fU, 0xaed51e3dU, 0x2d392736U,
  1391. 0x0fd9640aU, 0x5ca62168U, 0x5b54d19bU, 0x362e3a24U,
  1392. 0x0a67b10cU, 0x57e70f93U, 0xee96d2b4U, 0x9b919e1bU,
  1393. 0xc0c54f80U, 0xdc20a261U, 0x774b695aU, 0x121a161cU,
  1394. 0x93ba0ae2U, 0xa02ae5c0U, 0x22e0433cU, 0x1b171d12U,
  1395. 0x090d0b0eU, 0x8bc7adf2U, 0xb6a8b92dU, 0x1ea9c814U,
  1396. 0xf1198557U, 0x75074cafU, 0x99ddbbeeU, 0x7f60fda3U,
  1397. 0x01269ff7U, 0x72f5bc5cU, 0x663bc544U, 0xfb7e345bU,
  1398. 0x4329768bU, 0x23c6dccbU, 0xedfc68b6U, 0xe4f163b8U,
  1399. 0x31dccad7U, 0x63851042U, 0x97224013U, 0xc6112084U,
  1400. 0x4a247d85U, 0xbb3df8d2U, 0xf93211aeU, 0x29a16dc7U,
  1401. 0x9e2f4b1dU, 0xb230f3dcU, 0x8652ec0dU, 0xc1e3d077U,
  1402. 0xb3166c2bU, 0x70b999a9U, 0x9448fa11U, 0xe9642247U,
  1403. 0xfc8cc4a8U, 0xf03f1aa0U, 0x7d2cd856U, 0x3390ef22U,
  1404. 0x494ec787U, 0x38d1c1d9U, 0xcaa2fe8cU, 0xd40b3698U,
  1405. 0xf581cfa6U, 0x7ade28a5U, 0xb78e26daU, 0xadbfa43fU,
  1406. 0x3a9de42cU, 0x78920d50U, 0x5fcc9b6aU, 0x7e466254U,
  1407. 0x8d13c2f6U, 0xd8b8e890U, 0x39f75e2eU, 0xc3aff582U,
  1408. 0x5d80be9fU, 0xd0937c69U, 0xd52da96fU, 0x2512b3cfU,
  1409. 0xac993bc8U, 0x187da710U, 0x9c636ee8U, 0x3bbb7bdbU,
  1410. 0x267809cdU, 0x5918f46eU, 0x9ab701ecU, 0x4f9aa883U,
  1411. 0x956e65e6U, 0xffe67eaaU, 0xbccf0821U, 0x15e8e6efU,
  1412. 0xe79bd9baU, 0x6f36ce4aU, 0x9f09d4eaU, 0xb07cd629U,
  1413. 0xa4b2af31U, 0x3f23312aU, 0xa59430c6U, 0xa266c035U,
  1414. 0x4ebc3774U, 0x82caa6fcU, 0x90d0b0e0U, 0xa7d81533U,
  1415. 0x04984af1U, 0xecdaf741U, 0xcd500e7fU, 0x91f62f17U,
  1416. 0x4dd68d76U, 0xefb04d43U, 0xaa4d54ccU, 0x9604dfe4U,
  1417. 0xd1b5e39eU, 0x6a881b4cU, 0x2c1fb8c1U, 0x65517f46U,
  1418. 0x5eea049dU, 0x8c355d01U, 0x877473faU, 0x0b412efbU,
  1419. 0x671d5ab3U, 0xdbd25292U, 0x105633e9U, 0xd647136dU,
  1420. 0xd7618c9aU, 0xa10c7a37U, 0xf8148e59U, 0x133c89ebU,
  1421. 0xa927eeceU, 0x61c935b7U, 0x1ce5ede1U, 0x47b13c7aU,
  1422. 0xd2df599cU, 0xf2733f55U, 0x14ce7918U, 0xc737bf73U,
  1423. 0xf7cdea53U, 0xfdaa5b5fU, 0x3d6f14dfU, 0x44db8678U,
  1424. 0xaff381caU, 0x68c43eb9U, 0x24342c38U, 0xa3405fc2U,
  1425. 0x1dc37216U, 0xe2250cbcU, 0x3c498b28U, 0x0d9541ffU,
  1426. 0xa8017139U, 0x0cb3de08U, 0xb4e49cd8U, 0x56c19064U,
  1427. 0xcb84617bU, 0x32b670d5U, 0x6c5c7448U, 0xb85742d0U,
  1428. }
  1429. };
  1430. #endif /* HAVE_AES_DECRYPT */
  1431. #endif /* WOLFSSL_AES_SMALL_TABLES */
  1432. #ifdef HAVE_AES_DECRYPT
  1433. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC)) \
  1434. || defined(WOLFSSL_AES_DIRECT)
  1435. static const FLASH_QUALIFIER byte Td4[256] =
  1436. {
  1437. 0x52U, 0x09U, 0x6aU, 0xd5U, 0x30U, 0x36U, 0xa5U, 0x38U,
  1438. 0xbfU, 0x40U, 0xa3U, 0x9eU, 0x81U, 0xf3U, 0xd7U, 0xfbU,
  1439. 0x7cU, 0xe3U, 0x39U, 0x82U, 0x9bU, 0x2fU, 0xffU, 0x87U,
  1440. 0x34U, 0x8eU, 0x43U, 0x44U, 0xc4U, 0xdeU, 0xe9U, 0xcbU,
  1441. 0x54U, 0x7bU, 0x94U, 0x32U, 0xa6U, 0xc2U, 0x23U, 0x3dU,
  1442. 0xeeU, 0x4cU, 0x95U, 0x0bU, 0x42U, 0xfaU, 0xc3U, 0x4eU,
  1443. 0x08U, 0x2eU, 0xa1U, 0x66U, 0x28U, 0xd9U, 0x24U, 0xb2U,
  1444. 0x76U, 0x5bU, 0xa2U, 0x49U, 0x6dU, 0x8bU, 0xd1U, 0x25U,
  1445. 0x72U, 0xf8U, 0xf6U, 0x64U, 0x86U, 0x68U, 0x98U, 0x16U,
  1446. 0xd4U, 0xa4U, 0x5cU, 0xccU, 0x5dU, 0x65U, 0xb6U, 0x92U,
  1447. 0x6cU, 0x70U, 0x48U, 0x50U, 0xfdU, 0xedU, 0xb9U, 0xdaU,
  1448. 0x5eU, 0x15U, 0x46U, 0x57U, 0xa7U, 0x8dU, 0x9dU, 0x84U,
  1449. 0x90U, 0xd8U, 0xabU, 0x00U, 0x8cU, 0xbcU, 0xd3U, 0x0aU,
  1450. 0xf7U, 0xe4U, 0x58U, 0x05U, 0xb8U, 0xb3U, 0x45U, 0x06U,
  1451. 0xd0U, 0x2cU, 0x1eU, 0x8fU, 0xcaU, 0x3fU, 0x0fU, 0x02U,
  1452. 0xc1U, 0xafU, 0xbdU, 0x03U, 0x01U, 0x13U, 0x8aU, 0x6bU,
  1453. 0x3aU, 0x91U, 0x11U, 0x41U, 0x4fU, 0x67U, 0xdcU, 0xeaU,
  1454. 0x97U, 0xf2U, 0xcfU, 0xceU, 0xf0U, 0xb4U, 0xe6U, 0x73U,
  1455. 0x96U, 0xacU, 0x74U, 0x22U, 0xe7U, 0xadU, 0x35U, 0x85U,
  1456. 0xe2U, 0xf9U, 0x37U, 0xe8U, 0x1cU, 0x75U, 0xdfU, 0x6eU,
  1457. 0x47U, 0xf1U, 0x1aU, 0x71U, 0x1dU, 0x29U, 0xc5U, 0x89U,
  1458. 0x6fU, 0xb7U, 0x62U, 0x0eU, 0xaaU, 0x18U, 0xbeU, 0x1bU,
  1459. 0xfcU, 0x56U, 0x3eU, 0x4bU, 0xc6U, 0xd2U, 0x79U, 0x20U,
  1460. 0x9aU, 0xdbU, 0xc0U, 0xfeU, 0x78U, 0xcdU, 0x5aU, 0xf4U,
  1461. 0x1fU, 0xddU, 0xa8U, 0x33U, 0x88U, 0x07U, 0xc7U, 0x31U,
  1462. 0xb1U, 0x12U, 0x10U, 0x59U, 0x27U, 0x80U, 0xecU, 0x5fU,
  1463. 0x60U, 0x51U, 0x7fU, 0xa9U, 0x19U, 0xb5U, 0x4aU, 0x0dU,
  1464. 0x2dU, 0xe5U, 0x7aU, 0x9fU, 0x93U, 0xc9U, 0x9cU, 0xefU,
  1465. 0xa0U, 0xe0U, 0x3bU, 0x4dU, 0xaeU, 0x2aU, 0xf5U, 0xb0U,
  1466. 0xc8U, 0xebU, 0xbbU, 0x3cU, 0x83U, 0x53U, 0x99U, 0x61U,
  1467. 0x17U, 0x2bU, 0x04U, 0x7eU, 0xbaU, 0x77U, 0xd6U, 0x26U,
  1468. 0xe1U, 0x69U, 0x14U, 0x63U, 0x55U, 0x21U, 0x0cU, 0x7dU,
  1469. };
  1470. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  1471. #endif /* HAVE_AES_DECRYPT */
  1472. #define GETBYTE(x, y) (word32)((byte)((x) >> (8 * (y))))
  1473. #ifdef WOLFSSL_AES_SMALL_TABLES
  1474. static const byte Tsbox[256] = {
  1475. 0x63U, 0x7cU, 0x77U, 0x7bU, 0xf2U, 0x6bU, 0x6fU, 0xc5U,
  1476. 0x30U, 0x01U, 0x67U, 0x2bU, 0xfeU, 0xd7U, 0xabU, 0x76U,
  1477. 0xcaU, 0x82U, 0xc9U, 0x7dU, 0xfaU, 0x59U, 0x47U, 0xf0U,
  1478. 0xadU, 0xd4U, 0xa2U, 0xafU, 0x9cU, 0xa4U, 0x72U, 0xc0U,
  1479. 0xb7U, 0xfdU, 0x93U, 0x26U, 0x36U, 0x3fU, 0xf7U, 0xccU,
  1480. 0x34U, 0xa5U, 0xe5U, 0xf1U, 0x71U, 0xd8U, 0x31U, 0x15U,
  1481. 0x04U, 0xc7U, 0x23U, 0xc3U, 0x18U, 0x96U, 0x05U, 0x9aU,
  1482. 0x07U, 0x12U, 0x80U, 0xe2U, 0xebU, 0x27U, 0xb2U, 0x75U,
  1483. 0x09U, 0x83U, 0x2cU, 0x1aU, 0x1bU, 0x6eU, 0x5aU, 0xa0U,
  1484. 0x52U, 0x3bU, 0xd6U, 0xb3U, 0x29U, 0xe3U, 0x2fU, 0x84U,
  1485. 0x53U, 0xd1U, 0x00U, 0xedU, 0x20U, 0xfcU, 0xb1U, 0x5bU,
  1486. 0x6aU, 0xcbU, 0xbeU, 0x39U, 0x4aU, 0x4cU, 0x58U, 0xcfU,
  1487. 0xd0U, 0xefU, 0xaaU, 0xfbU, 0x43U, 0x4dU, 0x33U, 0x85U,
  1488. 0x45U, 0xf9U, 0x02U, 0x7fU, 0x50U, 0x3cU, 0x9fU, 0xa8U,
  1489. 0x51U, 0xa3U, 0x40U, 0x8fU, 0x92U, 0x9dU, 0x38U, 0xf5U,
  1490. 0xbcU, 0xb6U, 0xdaU, 0x21U, 0x10U, 0xffU, 0xf3U, 0xd2U,
  1491. 0xcdU, 0x0cU, 0x13U, 0xecU, 0x5fU, 0x97U, 0x44U, 0x17U,
  1492. 0xc4U, 0xa7U, 0x7eU, 0x3dU, 0x64U, 0x5dU, 0x19U, 0x73U,
  1493. 0x60U, 0x81U, 0x4fU, 0xdcU, 0x22U, 0x2aU, 0x90U, 0x88U,
  1494. 0x46U, 0xeeU, 0xb8U, 0x14U, 0xdeU, 0x5eU, 0x0bU, 0xdbU,
  1495. 0xe0U, 0x32U, 0x3aU, 0x0aU, 0x49U, 0x06U, 0x24U, 0x5cU,
  1496. 0xc2U, 0xd3U, 0xacU, 0x62U, 0x91U, 0x95U, 0xe4U, 0x79U,
  1497. 0xe7U, 0xc8U, 0x37U, 0x6dU, 0x8dU, 0xd5U, 0x4eU, 0xa9U,
  1498. 0x6cU, 0x56U, 0xf4U, 0xeaU, 0x65U, 0x7aU, 0xaeU, 0x08U,
  1499. 0xbaU, 0x78U, 0x25U, 0x2eU, 0x1cU, 0xa6U, 0xb4U, 0xc6U,
  1500. 0xe8U, 0xddU, 0x74U, 0x1fU, 0x4bU, 0xbdU, 0x8bU, 0x8aU,
  1501. 0x70U, 0x3eU, 0xb5U, 0x66U, 0x48U, 0x03U, 0xf6U, 0x0eU,
  1502. 0x61U, 0x35U, 0x57U, 0xb9U, 0x86U, 0xc1U, 0x1dU, 0x9eU,
  1503. 0xe1U, 0xf8U, 0x98U, 0x11U, 0x69U, 0xd9U, 0x8eU, 0x94U,
  1504. 0x9bU, 0x1eU, 0x87U, 0xe9U, 0xceU, 0x55U, 0x28U, 0xdfU,
  1505. 0x8cU, 0xa1U, 0x89U, 0x0dU, 0xbfU, 0xe6U, 0x42U, 0x68U,
  1506. 0x41U, 0x99U, 0x2dU, 0x0fU, 0xb0U, 0x54U, 0xbbU, 0x16U
  1507. };
  1508. #define AES_XTIME(x) ((byte)((byte)((x) << 1) ^ ((0 - ((x) >> 7)) & 0x1b)))
  1509. static WARN_UNUSED_RESULT word32 col_mul(
  1510. word32 t, int i2, int i3, int ia, int ib)
  1511. {
  1512. byte t3 = GETBYTE(t, i3);
  1513. byte tm = AES_XTIME(GETBYTE(t, i2) ^ t3);
  1514. return GETBYTE(t, ia) ^ GETBYTE(t, ib) ^ t3 ^ tm;
  1515. }
  1516. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)
  1517. static WARN_UNUSED_RESULT word32 inv_col_mul(
  1518. word32 t, int i9, int ib, int id, int ie)
  1519. {
  1520. byte t9 = GETBYTE(t, i9);
  1521. byte tb = GETBYTE(t, ib);
  1522. byte td = GETBYTE(t, id);
  1523. byte te = GETBYTE(t, ie);
  1524. byte t0 = t9 ^ tb ^ td;
  1525. return t0 ^ AES_XTIME(AES_XTIME(AES_XTIME(t0 ^ te) ^ td ^ te) ^ tb ^ te);
  1526. }
  1527. #endif
  1528. #endif
  1529. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) || \
  1530. defined(HAVE_AESCCM) || defined(HAVE_AESGCM)
  1531. #ifndef WC_CACHE_LINE_SZ
  1532. #if defined(__x86_64__) || defined(_M_X64) || \
  1533. (defined(__ILP32__) && (__ILP32__ >= 1))
  1534. #define WC_CACHE_LINE_SZ 64
  1535. #else
  1536. /* default cache line size */
  1537. #define WC_CACHE_LINE_SZ 32
  1538. #endif
  1539. #endif
  1540. #ifndef WC_NO_CACHE_RESISTANT
  1541. #if defined(__riscv) && !defined(WOLFSSL_AES_TOUCH_LINES)
  1542. #define WOLFSSL_AES_TOUCH_LINES
  1543. #endif
  1544. #ifndef WOLFSSL_AES_SMALL_TABLES
  1545. /* load 4 Te Tables into cache by cache line stride */
  1546. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTe(void)
  1547. {
  1548. #ifndef WOLFSSL_AES_TOUCH_LINES
  1549. word32 x = 0;
  1550. int i,j;
  1551. for (i = 0; i < 4; i++) {
  1552. /* 256 elements, each one is 4 bytes */
  1553. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  1554. x &= Te[i][j];
  1555. }
  1556. }
  1557. return x;
  1558. #else
  1559. return 0;
  1560. #endif
  1561. }
  1562. #else
  1563. /* load sbox into cache by cache line stride */
  1564. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchSBox(void)
  1565. {
  1566. #ifndef WOLFSSL_AES_TOUCH_LINES
  1567. word32 x = 0;
  1568. int i;
  1569. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ/4) {
  1570. x &= Tsbox[i];
  1571. }
  1572. return x;
  1573. #else
  1574. return 0;
  1575. #endif
  1576. }
  1577. #endif
  1578. #endif
  1579. #ifdef WOLFSSL_AES_TOUCH_LINES
  1580. #if WC_CACHE_LINE_SZ == 128
  1581. #define WC_CACHE_LINE_BITS 5
  1582. #define WC_CACHE_LINE_MASK_HI 0xe0
  1583. #define WC_CACHE_LINE_MASK_LO 0x1f
  1584. #define WC_CACHE_LINE_ADD 0x20
  1585. #elif WC_CACHE_LINE_SZ == 64
  1586. #define WC_CACHE_LINE_BITS 4
  1587. #define WC_CACHE_LINE_MASK_HI 0xf0
  1588. #define WC_CACHE_LINE_MASK_LO 0x0f
  1589. #define WC_CACHE_LINE_ADD 0x10
  1590. #elif WC_CACHE_LINE_SZ == 32
  1591. #define WC_CACHE_LINE_BITS 3
  1592. #define WC_CACHE_LINE_MASK_HI 0xf8
  1593. #define WC_CACHE_LINE_MASK_LO 0x07
  1594. #define WC_CACHE_LINE_ADD 0x08
  1595. #elif WC_CACHE_LINE_SZ = 16
  1596. #define WC_CACHE_LINE_BITS 2
  1597. #define WC_CACHE_LINE_MASK_HI 0xfc
  1598. #define WC_CACHE_LINE_MASK_LO 0x03
  1599. #define WC_CACHE_LINE_ADD 0x04
  1600. #else
  1601. #error Cache line size not supported
  1602. #endif
  1603. #ifndef WOLFSSL_AES_SMALL_TABLES
  1604. static word32 GetTable(const word32* t, byte o)
  1605. {
  1606. #if WC_CACHE_LINE_SZ == 64
  1607. word32 e;
  1608. byte hi = o & 0xf0;
  1609. byte lo = o & 0x0f;
  1610. e = t[lo + 0x00] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1611. e |= t[lo + 0x10] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1612. e |= t[lo + 0x20] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1613. e |= t[lo + 0x30] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1614. e |= t[lo + 0x40] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1615. e |= t[lo + 0x50] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1616. e |= t[lo + 0x60] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1617. e |= t[lo + 0x70] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1618. e |= t[lo + 0x80] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1619. e |= t[lo + 0x90] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1620. e |= t[lo + 0xa0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1621. e |= t[lo + 0xb0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1622. e |= t[lo + 0xc0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1623. e |= t[lo + 0xd0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1624. e |= t[lo + 0xe0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1625. e |= t[lo + 0xf0] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1626. return e;
  1627. #else
  1628. word32 e = 0;
  1629. int i;
  1630. byte hi = o & WC_CACHE_LINE_MASK_HI;
  1631. byte lo = o & WC_CACHE_LINE_MASK_LO;
  1632. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1633. e |= t[lo + i] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1634. hi -= WC_CACHE_LINE_ADD;
  1635. }
  1636. return e;
  1637. #endif
  1638. }
  1639. #endif
  1640. #ifdef WOLFSSL_AES_SMALL_TABLES
  1641. static byte GetTable8(const byte* t, byte o)
  1642. {
  1643. #if WC_CACHE_LINE_SZ == 64
  1644. byte e;
  1645. byte hi = o & 0xf0;
  1646. byte lo = o & 0x0f;
  1647. e = t[lo + 0x00] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1648. e |= t[lo + 0x10] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1649. e |= t[lo + 0x20] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1650. e |= t[lo + 0x30] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1651. e |= t[lo + 0x40] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1652. e |= t[lo + 0x50] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1653. e |= t[lo + 0x60] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1654. e |= t[lo + 0x70] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1655. e |= t[lo + 0x80] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1656. e |= t[lo + 0x90] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1657. e |= t[lo + 0xa0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1658. e |= t[lo + 0xb0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1659. e |= t[lo + 0xc0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1660. e |= t[lo + 0xd0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1661. e |= t[lo + 0xe0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1662. e |= t[lo + 0xf0] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1663. return e;
  1664. #else
  1665. byte e = 0;
  1666. int i;
  1667. byte hi = o & WC_CACHE_LINE_MASK_HI;
  1668. byte lo = o & WC_CACHE_LINE_MASK_LO;
  1669. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1670. e |= t[lo + i] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1671. hi -= WC_CACHE_LINE_ADD;
  1672. }
  1673. return e;
  1674. #endif
  1675. }
  1676. #endif
  1677. #ifndef WOLFSSL_AES_SMALL_TABLES
  1678. static void GetTable_Multi(const word32* t, word32* t0, byte o0,
  1679. word32* t1, byte o1, word32* t2, byte o2, word32* t3, byte o3)
  1680. {
  1681. word32 e0 = 0;
  1682. word32 e1 = 0;
  1683. word32 e2 = 0;
  1684. word32 e3 = 0;
  1685. byte hi0 = o0 & WC_CACHE_LINE_MASK_HI;
  1686. byte lo0 = o0 & WC_CACHE_LINE_MASK_LO;
  1687. byte hi1 = o1 & WC_CACHE_LINE_MASK_HI;
  1688. byte lo1 = o1 & WC_CACHE_LINE_MASK_LO;
  1689. byte hi2 = o2 & WC_CACHE_LINE_MASK_HI;
  1690. byte lo2 = o2 & WC_CACHE_LINE_MASK_LO;
  1691. byte hi3 = o3 & WC_CACHE_LINE_MASK_HI;
  1692. byte lo3 = o3 & WC_CACHE_LINE_MASK_LO;
  1693. int i;
  1694. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1695. e0 |= t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31));
  1696. hi0 -= WC_CACHE_LINE_ADD;
  1697. e1 |= t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31));
  1698. hi1 -= WC_CACHE_LINE_ADD;
  1699. e2 |= t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31));
  1700. hi2 -= WC_CACHE_LINE_ADD;
  1701. e3 |= t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31));
  1702. hi3 -= WC_CACHE_LINE_ADD;
  1703. }
  1704. *t0 = e0;
  1705. *t1 = e1;
  1706. *t2 = e2;
  1707. *t3 = e3;
  1708. }
  1709. static void XorTable_Multi(const word32* t, word32* t0, byte o0,
  1710. word32* t1, byte o1, word32* t2, byte o2, word32* t3, byte o3)
  1711. {
  1712. word32 e0 = 0;
  1713. word32 e1 = 0;
  1714. word32 e2 = 0;
  1715. word32 e3 = 0;
  1716. byte hi0 = o0 & 0xf0;
  1717. byte lo0 = o0 & 0x0f;
  1718. byte hi1 = o1 & 0xf0;
  1719. byte lo1 = o1 & 0x0f;
  1720. byte hi2 = o2 & 0xf0;
  1721. byte lo2 = o2 & 0x0f;
  1722. byte hi3 = o3 & 0xf0;
  1723. byte lo3 = o3 & 0x0f;
  1724. int i;
  1725. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1726. e0 |= t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31));
  1727. hi0 -= WC_CACHE_LINE_ADD;
  1728. e1 |= t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31));
  1729. hi1 -= WC_CACHE_LINE_ADD;
  1730. e2 |= t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31));
  1731. hi2 -= WC_CACHE_LINE_ADD;
  1732. e3 |= t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31));
  1733. hi3 -= WC_CACHE_LINE_ADD;
  1734. }
  1735. *t0 ^= e0;
  1736. *t1 ^= e1;
  1737. *t2 ^= e2;
  1738. *t3 ^= e3;
  1739. }
  1740. static word32 GetTable8_4(const byte* t, byte o0, byte o1, byte o2, byte o3)
  1741. {
  1742. word32 e = 0;
  1743. int i;
  1744. byte hi0 = o0 & WC_CACHE_LINE_MASK_HI;
  1745. byte lo0 = o0 & WC_CACHE_LINE_MASK_LO;
  1746. byte hi1 = o1 & WC_CACHE_LINE_MASK_HI;
  1747. byte lo1 = o1 & WC_CACHE_LINE_MASK_LO;
  1748. byte hi2 = o2 & WC_CACHE_LINE_MASK_HI;
  1749. byte lo2 = o2 & WC_CACHE_LINE_MASK_LO;
  1750. byte hi3 = o3 & WC_CACHE_LINE_MASK_HI;
  1751. byte lo3 = o3 & WC_CACHE_LINE_MASK_LO;
  1752. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1753. e |= (word32)(t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31)))
  1754. << 24;
  1755. hi0 -= WC_CACHE_LINE_ADD;
  1756. e |= (word32)(t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31)))
  1757. << 16;
  1758. hi1 -= WC_CACHE_LINE_ADD;
  1759. e |= (word32)(t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31)))
  1760. << 8;
  1761. hi2 -= WC_CACHE_LINE_ADD;
  1762. e |= (word32)(t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31)))
  1763. << 0;
  1764. hi3 -= WC_CACHE_LINE_ADD;
  1765. }
  1766. return e;
  1767. }
  1768. #endif
  1769. #else
  1770. #define GetTable(t, o) t[o]
  1771. #define GetTable8(t, o) t[o]
  1772. #define GetTable_Multi(t, t0, o0, t1, o1, t2, o2, t3, o3) \
  1773. *(t0) = (t)[o0]; *(t1) = (t)[o1]; *(t2) = (t)[o2]; *(t3) = (t)[o3]
  1774. #define XorTable_Multi(t, t0, o0, t1, o1, t2, o2, t3, o3) \
  1775. *(t0) ^= (t)[o0]; *(t1) ^= (t)[o1]; *(t2) ^= (t)[o2]; *(t3) ^= (t)[o3]
  1776. #define GetTable8_4(t, o0, o1, o2, o3) \
  1777. (((word32)(t)[o0] << 24) | ((word32)(t)[o1] << 16) | \
  1778. ((word32)(t)[o2] << 8) | ((word32)(t)[o3] << 0))
  1779. #endif
  1780. /* Software AES - ECB Encrypt */
  1781. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  1782. Aes* aes, const byte* inBlock, byte* outBlock)
  1783. {
  1784. word32 s0, s1, s2, s3;
  1785. word32 t0, t1, t2, t3;
  1786. word32 r = aes->rounds >> 1;
  1787. const word32* rk = aes->key;
  1788. if (r > 7 || r == 0) {
  1789. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  1790. return KEYUSAGE_E;
  1791. }
  1792. #ifdef WOLFSSL_AESNI
  1793. if (haveAESNI && aes->use_aesni) {
  1794. #ifdef DEBUG_AESNI
  1795. printf("about to aes encrypt\n");
  1796. printf("in = %p\n", inBlock);
  1797. printf("out = %p\n", outBlock);
  1798. printf("aes->key = %p\n", aes->key);
  1799. printf("aes->rounds = %d\n", aes->rounds);
  1800. printf("sz = %d\n", AES_BLOCK_SIZE);
  1801. #endif
  1802. /* check alignment, decrypt doesn't need alignment */
  1803. if ((wc_ptr_t)inBlock % AESNI_ALIGN) {
  1804. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  1805. byte* tmp = (byte*)XMALLOC(AES_BLOCK_SIZE + AESNI_ALIGN, aes->heap,
  1806. DYNAMIC_TYPE_TMP_BUFFER);
  1807. byte* tmp_align;
  1808. if (tmp == NULL)
  1809. return MEMORY_E;
  1810. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  1811. XMEMCPY(tmp_align, inBlock, AES_BLOCK_SIZE);
  1812. AES_ECB_encrypt(tmp_align, tmp_align, AES_BLOCK_SIZE,
  1813. (byte*)aes->key, (int)aes->rounds);
  1814. XMEMCPY(outBlock, tmp_align, AES_BLOCK_SIZE);
  1815. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  1816. return 0;
  1817. #else
  1818. WOLFSSL_MSG("AES-ECB encrypt with bad alignment");
  1819. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  1820. return BAD_ALIGN_E;
  1821. #endif
  1822. }
  1823. AES_ECB_encrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  1824. (int)aes->rounds);
  1825. return 0;
  1826. }
  1827. else {
  1828. #ifdef DEBUG_AESNI
  1829. printf("Skipping AES-NI\n");
  1830. #endif
  1831. }
  1832. #endif
  1833. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  1834. AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  1835. return 0;
  1836. #endif
  1837. #if defined(WOLFSSL_IMXRT_DCP)
  1838. if (aes->keylen == 16) {
  1839. DCPAesEcbEncrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  1840. return 0;
  1841. }
  1842. #endif
  1843. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  1844. if (aes->useSWCrypt == 0) {
  1845. return se050_aes_crypt(aes, inBlock, outBlock, AES_BLOCK_SIZE,
  1846. AES_ENCRYPTION, kAlgorithm_SSS_AES_ECB);
  1847. }
  1848. #endif
  1849. /*
  1850. * map byte array block to cipher state
  1851. * and add initial round key:
  1852. */
  1853. XMEMCPY(&s0, inBlock, sizeof(s0));
  1854. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  1855. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  1856. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  1857. #ifdef LITTLE_ENDIAN_ORDER
  1858. s0 = ByteReverseWord32(s0);
  1859. s1 = ByteReverseWord32(s1);
  1860. s2 = ByteReverseWord32(s2);
  1861. s3 = ByteReverseWord32(s3);
  1862. #endif
  1863. /* AddRoundKey */
  1864. s0 ^= rk[0];
  1865. s1 ^= rk[1];
  1866. s2 ^= rk[2];
  1867. s3 ^= rk[3];
  1868. #ifndef WOLFSSL_AES_SMALL_TABLES
  1869. #ifndef WC_NO_CACHE_RESISTANT
  1870. s0 |= PreFetchTe();
  1871. #endif
  1872. #ifndef WOLFSSL_AES_TOUCH_LINES
  1873. #define ENC_ROUND_T_S(o) \
  1874. t0 = GetTable(Te[0], GETBYTE(s0, 3)) ^ GetTable(Te[1], GETBYTE(s1, 2)) ^ \
  1875. GetTable(Te[2], GETBYTE(s2, 1)) ^ GetTable(Te[3], GETBYTE(s3, 0)) ^ \
  1876. rk[(o)+4]; \
  1877. t1 = GetTable(Te[0], GETBYTE(s1, 3)) ^ GetTable(Te[1], GETBYTE(s2, 2)) ^ \
  1878. GetTable(Te[2], GETBYTE(s3, 1)) ^ GetTable(Te[3], GETBYTE(s0, 0)) ^ \
  1879. rk[(o)+5]; \
  1880. t2 = GetTable(Te[0], GETBYTE(s2, 3)) ^ GetTable(Te[1], GETBYTE(s3, 2)) ^ \
  1881. GetTable(Te[2], GETBYTE(s0, 1)) ^ GetTable(Te[3], GETBYTE(s1, 0)) ^ \
  1882. rk[(o)+6]; \
  1883. t3 = GetTable(Te[0], GETBYTE(s3, 3)) ^ GetTable(Te[1], GETBYTE(s0, 2)) ^ \
  1884. GetTable(Te[2], GETBYTE(s1, 1)) ^ GetTable(Te[3], GETBYTE(s2, 0)) ^ \
  1885. rk[(o)+7]
  1886. #define ENC_ROUND_S_T(o) \
  1887. s0 = GetTable(Te[0], GETBYTE(t0, 3)) ^ GetTable(Te[1], GETBYTE(t1, 2)) ^ \
  1888. GetTable(Te[2], GETBYTE(t2, 1)) ^ GetTable(Te[3], GETBYTE(t3, 0)) ^ \
  1889. rk[(o)+0]; \
  1890. s1 = GetTable(Te[0], GETBYTE(t1, 3)) ^ GetTable(Te[1], GETBYTE(t2, 2)) ^ \
  1891. GetTable(Te[2], GETBYTE(t3, 1)) ^ GetTable(Te[3], GETBYTE(t0, 0)) ^ \
  1892. rk[(o)+1]; \
  1893. s2 = GetTable(Te[0], GETBYTE(t2, 3)) ^ GetTable(Te[1], GETBYTE(t3, 2)) ^ \
  1894. GetTable(Te[2], GETBYTE(t0, 1)) ^ GetTable(Te[3], GETBYTE(t1, 0)) ^ \
  1895. rk[(o)+2]; \
  1896. s3 = GetTable(Te[0], GETBYTE(t3, 3)) ^ GetTable(Te[1], GETBYTE(t0, 2)) ^ \
  1897. GetTable(Te[2], GETBYTE(t1, 1)) ^ GetTable(Te[3], GETBYTE(t2, 0)) ^ \
  1898. rk[(o)+3]
  1899. #else
  1900. #define ENC_ROUND_T_S(o) \
  1901. GetTable_Multi(Te[0], &t0, GETBYTE(s0, 3), &t1, GETBYTE(s1, 3), \
  1902. &t2, GETBYTE(s2, 3), &t3, GETBYTE(s3, 3)); \
  1903. XorTable_Multi(Te[1], &t0, GETBYTE(s1, 2), &t1, GETBYTE(s2, 2), \
  1904. &t2, GETBYTE(s3, 2), &t3, GETBYTE(s0, 2)); \
  1905. XorTable_Multi(Te[2], &t0, GETBYTE(s2, 1), &t1, GETBYTE(s3, 1), \
  1906. &t2, GETBYTE(s0, 1), &t3, GETBYTE(s1, 1)); \
  1907. XorTable_Multi(Te[3], &t0, GETBYTE(s3, 0), &t1, GETBYTE(s0, 0), \
  1908. &t2, GETBYTE(s1, 0), &t3, GETBYTE(s2, 0)); \
  1909. t0 ^= rk[(o)+4]; t1 ^= rk[(o)+5]; t2 ^= rk[(o)+6]; t3 ^= rk[(o)+7];
  1910. #define ENC_ROUND_S_T(o) \
  1911. GetTable_Multi(Te[0], &s0, GETBYTE(t0, 3), &s1, GETBYTE(t1, 3), \
  1912. &s2, GETBYTE(t2, 3), &s3, GETBYTE(t3, 3)); \
  1913. XorTable_Multi(Te[1], &s0, GETBYTE(t1, 2), &s1, GETBYTE(t2, 2), \
  1914. &s2, GETBYTE(t3, 2), &s3, GETBYTE(t0, 2)); \
  1915. XorTable_Multi(Te[2], &s0, GETBYTE(t2, 1), &s1, GETBYTE(t3, 1), \
  1916. &s2, GETBYTE(t0, 1), &s3, GETBYTE(t1, 1)); \
  1917. XorTable_Multi(Te[3], &s0, GETBYTE(t3, 0), &s1, GETBYTE(t0, 0), \
  1918. &s2, GETBYTE(t1, 0), &s3, GETBYTE(t2, 0)); \
  1919. s0 ^= rk[(o)+0]; s1 ^= rk[(o)+1]; s2 ^= rk[(o)+2]; s3 ^= rk[(o)+3];
  1920. #endif
  1921. #ifndef WOLFSSL_AES_NO_UNROLL
  1922. /* Unroll the loop. */
  1923. ENC_ROUND_T_S( 0);
  1924. ENC_ROUND_S_T( 8); ENC_ROUND_T_S( 8);
  1925. ENC_ROUND_S_T(16); ENC_ROUND_T_S(16);
  1926. ENC_ROUND_S_T(24); ENC_ROUND_T_S(24);
  1927. ENC_ROUND_S_T(32); ENC_ROUND_T_S(32);
  1928. if (r > 5) {
  1929. ENC_ROUND_S_T(40); ENC_ROUND_T_S(40);
  1930. if (r > 6) {
  1931. ENC_ROUND_S_T(48); ENC_ROUND_T_S(48);
  1932. }
  1933. }
  1934. rk += r * 8;
  1935. #else
  1936. /*
  1937. * Nr - 1 full rounds:
  1938. */
  1939. for (;;) {
  1940. ENC_ROUND_T_S(0);
  1941. rk += 8;
  1942. if (--r == 0) {
  1943. break;
  1944. }
  1945. ENC_ROUND_S_T(0);
  1946. }
  1947. #endif
  1948. /*
  1949. * apply last round and
  1950. * map cipher state to byte array block:
  1951. */
  1952. #ifndef WOLFSSL_AES_TOUCH_LINES
  1953. s0 =
  1954. (GetTable(Te[2], GETBYTE(t0, 3)) & 0xff000000) ^
  1955. (GetTable(Te[3], GETBYTE(t1, 2)) & 0x00ff0000) ^
  1956. (GetTable(Te[0], GETBYTE(t2, 1)) & 0x0000ff00) ^
  1957. (GetTable(Te[1], GETBYTE(t3, 0)) & 0x000000ff) ^
  1958. rk[0];
  1959. s1 =
  1960. (GetTable(Te[2], GETBYTE(t1, 3)) & 0xff000000) ^
  1961. (GetTable(Te[3], GETBYTE(t2, 2)) & 0x00ff0000) ^
  1962. (GetTable(Te[0], GETBYTE(t3, 1)) & 0x0000ff00) ^
  1963. (GetTable(Te[1], GETBYTE(t0, 0)) & 0x000000ff) ^
  1964. rk[1];
  1965. s2 =
  1966. (GetTable(Te[2], GETBYTE(t2, 3)) & 0xff000000) ^
  1967. (GetTable(Te[3], GETBYTE(t3, 2)) & 0x00ff0000) ^
  1968. (GetTable(Te[0], GETBYTE(t0, 1)) & 0x0000ff00) ^
  1969. (GetTable(Te[1], GETBYTE(t1, 0)) & 0x000000ff) ^
  1970. rk[2];
  1971. s3 =
  1972. (GetTable(Te[2], GETBYTE(t3, 3)) & 0xff000000) ^
  1973. (GetTable(Te[3], GETBYTE(t0, 2)) & 0x00ff0000) ^
  1974. (GetTable(Te[0], GETBYTE(t1, 1)) & 0x0000ff00) ^
  1975. (GetTable(Te[1], GETBYTE(t2, 0)) & 0x000000ff) ^
  1976. rk[3];
  1977. #else
  1978. {
  1979. word32 u0;
  1980. word32 u1;
  1981. word32 u2;
  1982. word32 u3;
  1983. s0 = rk[0]; s1 = rk[1]; s2 = rk[2]; s3 = rk[3];
  1984. GetTable_Multi(Te[2], &u0, GETBYTE(t0, 3), &u1, GETBYTE(t1, 3),
  1985. &u2, GETBYTE(t2, 3), &u3, GETBYTE(t3, 3));
  1986. s0 ^= u0 & 0xff000000; s1 ^= u1 & 0xff000000;
  1987. s2 ^= u2 & 0xff000000; s3 ^= u3 & 0xff000000;
  1988. GetTable_Multi(Te[3], &u0, GETBYTE(t1, 2), &u1, GETBYTE(t2, 2),
  1989. &u2, GETBYTE(t3, 2), &u3, GETBYTE(t0, 2));
  1990. s0 ^= u0 & 0x00ff0000; s1 ^= u1 & 0x00ff0000;
  1991. s2 ^= u2 & 0x00ff0000; s3 ^= u3 & 0x00ff0000;
  1992. GetTable_Multi(Te[0], &u0, GETBYTE(t2, 1), &u1, GETBYTE(t3, 1),
  1993. &u2, GETBYTE(t0, 1), &u3, GETBYTE(t1, 1));
  1994. s0 ^= u0 & 0x0000ff00; s1 ^= u1 & 0x0000ff00;
  1995. s2 ^= u2 & 0x0000ff00; s3 ^= u3 & 0x0000ff00;
  1996. GetTable_Multi(Te[1], &u0, GETBYTE(t3, 0), &u1, GETBYTE(t0, 0),
  1997. &u2, GETBYTE(t1, 0), &u3, GETBYTE(t2, 0));
  1998. s0 ^= u0 & 0x000000ff; s1 ^= u1 & 0x000000ff;
  1999. s2 ^= u2 & 0x000000ff; s3 ^= u3 & 0x000000ff;
  2000. }
  2001. #endif
  2002. #else
  2003. #ifndef WC_NO_CACHE_RESISTANT
  2004. s0 |= PreFetchSBox();
  2005. #endif
  2006. r *= 2;
  2007. /* Two rounds at a time */
  2008. for (rk += 4; r > 1; r--, rk += 4) {
  2009. t0 =
  2010. ((word32)GetTable8(Tsbox, GETBYTE(s0, 3)) << 24) ^
  2011. ((word32)GetTable8(Tsbox, GETBYTE(s1, 2)) << 16) ^
  2012. ((word32)GetTable8(Tsbox, GETBYTE(s2, 1)) << 8) ^
  2013. ((word32)GetTable8(Tsbox, GETBYTE(s3, 0)));
  2014. t1 =
  2015. ((word32)GetTable8(Tsbox, GETBYTE(s1, 3)) << 24) ^
  2016. ((word32)GetTable8(Tsbox, GETBYTE(s2, 2)) << 16) ^
  2017. ((word32)GetTable8(Tsbox, GETBYTE(s3, 1)) << 8) ^
  2018. ((word32)GetTable8(Tsbox, GETBYTE(s0, 0)));
  2019. t2 =
  2020. ((word32)GetTable8(Tsbox, GETBYTE(s2, 3)) << 24) ^
  2021. ((word32)GetTable8(Tsbox, GETBYTE(s3, 2)) << 16) ^
  2022. ((word32)GetTable8(Tsbox, GETBYTE(s0, 1)) << 8) ^
  2023. ((word32)GetTable8(Tsbox, GETBYTE(s1, 0)));
  2024. t3 =
  2025. ((word32)GetTable8(Tsbox, GETBYTE(s3, 3)) << 24) ^
  2026. ((word32)GetTable8(Tsbox, GETBYTE(s0, 2)) << 16) ^
  2027. ((word32)GetTable8(Tsbox, GETBYTE(s1, 1)) << 8) ^
  2028. ((word32)GetTable8(Tsbox, GETBYTE(s2, 0)));
  2029. s0 =
  2030. (col_mul(t0, 3, 2, 0, 1) << 24) ^
  2031. (col_mul(t0, 2, 1, 0, 3) << 16) ^
  2032. (col_mul(t0, 1, 0, 2, 3) << 8) ^
  2033. (col_mul(t0, 0, 3, 2, 1) ) ^
  2034. rk[0];
  2035. s1 =
  2036. (col_mul(t1, 3, 2, 0, 1) << 24) ^
  2037. (col_mul(t1, 2, 1, 0, 3) << 16) ^
  2038. (col_mul(t1, 1, 0, 2, 3) << 8) ^
  2039. (col_mul(t1, 0, 3, 2, 1) ) ^
  2040. rk[1];
  2041. s2 =
  2042. (col_mul(t2, 3, 2, 0, 1) << 24) ^
  2043. (col_mul(t2, 2, 1, 0, 3) << 16) ^
  2044. (col_mul(t2, 1, 0, 2, 3) << 8) ^
  2045. (col_mul(t2, 0, 3, 2, 1) ) ^
  2046. rk[2];
  2047. s3 =
  2048. (col_mul(t3, 3, 2, 0, 1) << 24) ^
  2049. (col_mul(t3, 2, 1, 0, 3) << 16) ^
  2050. (col_mul(t3, 1, 0, 2, 3) << 8) ^
  2051. (col_mul(t3, 0, 3, 2, 1) ) ^
  2052. rk[3];
  2053. }
  2054. t0 =
  2055. ((word32)GetTable8(Tsbox, GETBYTE(s0, 3)) << 24) ^
  2056. ((word32)GetTable8(Tsbox, GETBYTE(s1, 2)) << 16) ^
  2057. ((word32)GetTable8(Tsbox, GETBYTE(s2, 1)) << 8) ^
  2058. ((word32)GetTable8(Tsbox, GETBYTE(s3, 0)));
  2059. t1 =
  2060. ((word32)GetTable8(Tsbox, GETBYTE(s1, 3)) << 24) ^
  2061. ((word32)GetTable8(Tsbox, GETBYTE(s2, 2)) << 16) ^
  2062. ((word32)GetTable8(Tsbox, GETBYTE(s3, 1)) << 8) ^
  2063. ((word32)GetTable8(Tsbox, GETBYTE(s0, 0)));
  2064. t2 =
  2065. ((word32)GetTable8(Tsbox, GETBYTE(s2, 3)) << 24) ^
  2066. ((word32)GetTable8(Tsbox, GETBYTE(s3, 2)) << 16) ^
  2067. ((word32)GetTable8(Tsbox, GETBYTE(s0, 1)) << 8) ^
  2068. ((word32)GetTable8(Tsbox, GETBYTE(s1, 0)));
  2069. t3 =
  2070. ((word32)GetTable8(Tsbox, GETBYTE(s3, 3)) << 24) ^
  2071. ((word32)GetTable8(Tsbox, GETBYTE(s0, 2)) << 16) ^
  2072. ((word32)GetTable8(Tsbox, GETBYTE(s1, 1)) << 8) ^
  2073. ((word32)GetTable8(Tsbox, GETBYTE(s2, 0)));
  2074. s0 = t0 ^ rk[0];
  2075. s1 = t1 ^ rk[1];
  2076. s2 = t2 ^ rk[2];
  2077. s3 = t3 ^ rk[3];
  2078. #endif
  2079. /* write out */
  2080. #ifdef LITTLE_ENDIAN_ORDER
  2081. s0 = ByteReverseWord32(s0);
  2082. s1 = ByteReverseWord32(s1);
  2083. s2 = ByteReverseWord32(s2);
  2084. s3 = ByteReverseWord32(s3);
  2085. #endif
  2086. XMEMCPY(outBlock, &s0, sizeof(s0));
  2087. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  2088. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  2089. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  2090. return 0;
  2091. }
  2092. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT || HAVE_AESGCM */
  2093. #if defined(HAVE_AES_DECRYPT)
  2094. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC)) || \
  2095. defined(WOLFSSL_AES_DIRECT)
  2096. #ifndef WC_NO_CACHE_RESISTANT
  2097. #ifndef WOLFSSL_AES_SMALL_TABLES
  2098. /* load 4 Td Tables into cache by cache line stride */
  2099. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd(void)
  2100. {
  2101. word32 x = 0;
  2102. int i,j;
  2103. for (i = 0; i < 4; i++) {
  2104. /* 256 elements, each one is 4 bytes */
  2105. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  2106. x &= Td[i][j];
  2107. }
  2108. }
  2109. return x;
  2110. }
  2111. #endif
  2112. /* load Td Table4 into cache by cache line stride */
  2113. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd4(void)
  2114. {
  2115. #ifndef WOLFSSL_AES_TOUCH_LINES
  2116. word32 x = 0;
  2117. int i;
  2118. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ) {
  2119. x &= (word32)Td4[i];
  2120. }
  2121. return x;
  2122. #else
  2123. return 0;
  2124. #endif
  2125. }
  2126. #endif
  2127. /* Software AES - ECB Decrypt */
  2128. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  2129. Aes* aes, const byte* inBlock, byte* outBlock)
  2130. {
  2131. word32 s0, s1, s2, s3;
  2132. word32 t0, t1, t2, t3;
  2133. word32 r = aes->rounds >> 1;
  2134. const word32* rk = aes->key;
  2135. if (r > 7 || r == 0) {
  2136. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  2137. return KEYUSAGE_E;
  2138. }
  2139. #ifdef WOLFSSL_AESNI
  2140. if (haveAESNI && aes->use_aesni) {
  2141. #ifdef DEBUG_AESNI
  2142. printf("about to aes decrypt\n");
  2143. printf("in = %p\n", inBlock);
  2144. printf("out = %p\n", outBlock);
  2145. printf("aes->key = %p\n", aes->key);
  2146. printf("aes->rounds = %d\n", aes->rounds);
  2147. printf("sz = %d\n", AES_BLOCK_SIZE);
  2148. #endif
  2149. /* if input and output same will overwrite input iv */
  2150. if ((const byte*)aes->tmp != inBlock)
  2151. XMEMCPY(aes->tmp, inBlock, AES_BLOCK_SIZE);
  2152. AES_ECB_decrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  2153. (int)aes->rounds);
  2154. return 0;
  2155. }
  2156. else {
  2157. #ifdef DEBUG_AESNI
  2158. printf("Skipping AES-NI\n");
  2159. #endif
  2160. }
  2161. #endif /* WOLFSSL_AESNI */
  2162. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  2163. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  2164. #endif
  2165. #if defined(WOLFSSL_IMXRT_DCP)
  2166. if (aes->keylen == 16) {
  2167. DCPAesEcbDecrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  2168. return 0;
  2169. }
  2170. #endif
  2171. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  2172. if (aes->useSWCrypt == 0) {
  2173. return se050_aes_crypt(aes, inBlock, outBlock, AES_BLOCK_SIZE,
  2174. AES_DECRYPTION, kAlgorithm_SSS_AES_ECB);
  2175. }
  2176. #endif
  2177. /*
  2178. * map byte array block to cipher state
  2179. * and add initial round key:
  2180. */
  2181. XMEMCPY(&s0, inBlock, sizeof(s0));
  2182. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  2183. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  2184. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  2185. #ifdef LITTLE_ENDIAN_ORDER
  2186. s0 = ByteReverseWord32(s0);
  2187. s1 = ByteReverseWord32(s1);
  2188. s2 = ByteReverseWord32(s2);
  2189. s3 = ByteReverseWord32(s3);
  2190. #endif
  2191. s0 ^= rk[0];
  2192. s1 ^= rk[1];
  2193. s2 ^= rk[2];
  2194. s3 ^= rk[3];
  2195. #ifndef WOLFSSL_AES_SMALL_TABLES
  2196. #ifndef WC_NO_CACHE_RESISTANT
  2197. s0 |= PreFetchTd();
  2198. #endif
  2199. #ifndef WOLFSSL_AES_TOUCH_LINES
  2200. /* Unroll the loop. */
  2201. #define DEC_ROUND_T_S(o) \
  2202. t0 = GetTable(Td[0], GETBYTE(s0, 3)) ^ GetTable(Td[1], GETBYTE(s3, 2)) ^ \
  2203. GetTable(Td[2], GETBYTE(s2, 1)) ^ GetTable(Td[3], GETBYTE(s1, 0)) ^ rk[(o)+4]; \
  2204. t1 = GetTable(Td[0], GETBYTE(s1, 3)) ^ GetTable(Td[1], GETBYTE(s0, 2)) ^ \
  2205. GetTable(Td[2], GETBYTE(s3, 1)) ^ GetTable(Td[3], GETBYTE(s2, 0)) ^ rk[(o)+5]; \
  2206. t2 = GetTable(Td[0], GETBYTE(s2, 3)) ^ GetTable(Td[1], GETBYTE(s1, 2)) ^ \
  2207. GetTable(Td[2], GETBYTE(s0, 1)) ^ GetTable(Td[3], GETBYTE(s3, 0)) ^ rk[(o)+6]; \
  2208. t3 = GetTable(Td[0], GETBYTE(s3, 3)) ^ GetTable(Td[1], GETBYTE(s2, 2)) ^ \
  2209. GetTable(Td[2], GETBYTE(s1, 1)) ^ GetTable(Td[3], GETBYTE(s0, 0)) ^ rk[(o)+7]
  2210. #define DEC_ROUND_S_T(o) \
  2211. s0 = GetTable(Td[0], GETBYTE(t0, 3)) ^ GetTable(Td[1], GETBYTE(t3, 2)) ^ \
  2212. GetTable(Td[2], GETBYTE(t2, 1)) ^ GetTable(Td[3], GETBYTE(t1, 0)) ^ rk[(o)+0]; \
  2213. s1 = GetTable(Td[0], GETBYTE(t1, 3)) ^ GetTable(Td[1], GETBYTE(t0, 2)) ^ \
  2214. GetTable(Td[2], GETBYTE(t3, 1)) ^ GetTable(Td[3], GETBYTE(t2, 0)) ^ rk[(o)+1]; \
  2215. s2 = GetTable(Td[0], GETBYTE(t2, 3)) ^ GetTable(Td[1], GETBYTE(t1, 2)) ^ \
  2216. GetTable(Td[2], GETBYTE(t0, 1)) ^ GetTable(Td[3], GETBYTE(t3, 0)) ^ rk[(o)+2]; \
  2217. s3 = GetTable(Td[0], GETBYTE(t3, 3)) ^ GetTable(Td[1], GETBYTE(t2, 2)) ^ \
  2218. GetTable(Td[2], GETBYTE(t1, 1)) ^ GetTable(Td[3], GETBYTE(t0, 0)) ^ rk[(o)+3]
  2219. #else
  2220. #define DEC_ROUND_T_S(o) \
  2221. GetTable_Multi(Td[0], &t0, GETBYTE(s0, 3), &t1, GETBYTE(s1, 3), \
  2222. &t2, GETBYTE(s2, 3), &t3, GETBYTE(s3, 3)); \
  2223. XorTable_Multi(Td[1], &t0, GETBYTE(s3, 2), &t1, GETBYTE(s0, 2), \
  2224. &t2, GETBYTE(s1, 2), &t3, GETBYTE(s2, 2)); \
  2225. XorTable_Multi(Td[2], &t0, GETBYTE(s2, 1), &t1, GETBYTE(s3, 1), \
  2226. &t2, GETBYTE(s0, 1), &t3, GETBYTE(s1, 1)); \
  2227. XorTable_Multi(Td[3], &t0, GETBYTE(s1, 0), &t1, GETBYTE(s2, 0), \
  2228. &t2, GETBYTE(s3, 0), &t3, GETBYTE(s0, 0)); \
  2229. t0 ^= rk[(o)+4]; t1 ^= rk[(o)+5]; t2 ^= rk[(o)+6]; t3 ^= rk[(o)+7];
  2230. #define DEC_ROUND_S_T(o) \
  2231. GetTable_Multi(Td[0], &s0, GETBYTE(t0, 3), &s1, GETBYTE(t1, 3), \
  2232. &s2, GETBYTE(t2, 3), &s3, GETBYTE(t3, 3)); \
  2233. XorTable_Multi(Td[1], &s0, GETBYTE(t3, 2), &s1, GETBYTE(t0, 2), \
  2234. &s2, GETBYTE(t1, 2), &s3, GETBYTE(t2, 2)); \
  2235. XorTable_Multi(Td[2], &s0, GETBYTE(t2, 1), &s1, GETBYTE(t3, 1), \
  2236. &s2, GETBYTE(t0, 1), &s3, GETBYTE(t1, 1)); \
  2237. XorTable_Multi(Td[3], &s0, GETBYTE(t1, 0), &s1, GETBYTE(t2, 0), \
  2238. &s2, GETBYTE(t3, 0), &s3, GETBYTE(t0, 0)); \
  2239. s0 ^= rk[(o)+0]; s1 ^= rk[(o)+1]; s2 ^= rk[(o)+2]; s3 ^= rk[(o)+3];
  2240. #endif
  2241. #ifndef WOLFSSL_AES_NO_UNROLL
  2242. DEC_ROUND_T_S( 0);
  2243. DEC_ROUND_S_T( 8); DEC_ROUND_T_S( 8);
  2244. DEC_ROUND_S_T(16); DEC_ROUND_T_S(16);
  2245. DEC_ROUND_S_T(24); DEC_ROUND_T_S(24);
  2246. DEC_ROUND_S_T(32); DEC_ROUND_T_S(32);
  2247. if (r > 5) {
  2248. DEC_ROUND_S_T(40); DEC_ROUND_T_S(40);
  2249. if (r > 6) {
  2250. DEC_ROUND_S_T(48); DEC_ROUND_T_S(48);
  2251. }
  2252. }
  2253. rk += r * 8;
  2254. #else
  2255. /*
  2256. * Nr - 1 full rounds:
  2257. */
  2258. for (;;) {
  2259. DEC_ROUND_T_S(0);
  2260. rk += 8;
  2261. if (--r == 0) {
  2262. break;
  2263. }
  2264. DEC_ROUND_S_T(0);
  2265. }
  2266. #endif
  2267. /*
  2268. * apply last round and
  2269. * map cipher state to byte array block:
  2270. */
  2271. #ifndef WC_NO_CACHE_RESISTANT
  2272. t0 |= PreFetchTd4();
  2273. #endif
  2274. s0 = GetTable8_4(Td4, GETBYTE(t0, 3), GETBYTE(t3, 2),
  2275. GETBYTE(t2, 1), GETBYTE(t1, 0)) ^ rk[0];
  2276. s1 = GetTable8_4(Td4, GETBYTE(t1, 3), GETBYTE(t0, 2),
  2277. GETBYTE(t3, 1), GETBYTE(t2, 0)) ^ rk[1];
  2278. s2 = GetTable8_4(Td4, GETBYTE(t2, 3), GETBYTE(t1, 2),
  2279. GETBYTE(t0, 1), GETBYTE(t3, 0)) ^ rk[2];
  2280. s3 = GetTable8_4(Td4, GETBYTE(t3, 3), GETBYTE(t2, 2),
  2281. GETBYTE(t1, 1), GETBYTE(t0, 0)) ^ rk[3];
  2282. #else
  2283. #ifndef WC_NO_CACHE_RESISTANT
  2284. s0 |= PreFetchTd4();
  2285. #endif
  2286. r *= 2;
  2287. for (rk += 4; r > 1; r--, rk += 4) {
  2288. t0 =
  2289. ((word32)GetTable8(Td4, GETBYTE(s0, 3)) << 24) ^
  2290. ((word32)GetTable8(Td4, GETBYTE(s3, 2)) << 16) ^
  2291. ((word32)GetTable8(Td4, GETBYTE(s2, 1)) << 8) ^
  2292. ((word32)GetTable8(Td4, GETBYTE(s1, 0))) ^
  2293. rk[0];
  2294. t1 =
  2295. ((word32)GetTable8(Td4, GETBYTE(s1, 3)) << 24) ^
  2296. ((word32)GetTable8(Td4, GETBYTE(s0, 2)) << 16) ^
  2297. ((word32)GetTable8(Td4, GETBYTE(s3, 1)) << 8) ^
  2298. ((word32)GetTable8(Td4, GETBYTE(s2, 0))) ^
  2299. rk[1];
  2300. t2 =
  2301. ((word32)GetTable8(Td4, GETBYTE(s2, 3)) << 24) ^
  2302. ((word32)GetTable8(Td4, GETBYTE(s1, 2)) << 16) ^
  2303. ((word32)GetTable8(Td4, GETBYTE(s0, 1)) << 8) ^
  2304. ((word32)GetTable8(Td4, GETBYTE(s3, 0))) ^
  2305. rk[2];
  2306. t3 =
  2307. ((word32)GetTable8(Td4, GETBYTE(s3, 3)) << 24) ^
  2308. ((word32)GetTable8(Td4, GETBYTE(s2, 2)) << 16) ^
  2309. ((word32)GetTable8(Td4, GETBYTE(s1, 1)) << 8) ^
  2310. ((word32)GetTable8(Td4, GETBYTE(s0, 0))) ^
  2311. rk[3];
  2312. s0 =
  2313. (inv_col_mul(t0, 0, 2, 1, 3) << 24) ^
  2314. (inv_col_mul(t0, 3, 1, 0, 2) << 16) ^
  2315. (inv_col_mul(t0, 2, 0, 3, 1) << 8) ^
  2316. (inv_col_mul(t0, 1, 3, 2, 0) );
  2317. s1 =
  2318. (inv_col_mul(t1, 0, 2, 1, 3) << 24) ^
  2319. (inv_col_mul(t1, 3, 1, 0, 2) << 16) ^
  2320. (inv_col_mul(t1, 2, 0, 3, 1) << 8) ^
  2321. (inv_col_mul(t1, 1, 3, 2, 0) );
  2322. s2 =
  2323. (inv_col_mul(t2, 0, 2, 1, 3) << 24) ^
  2324. (inv_col_mul(t2, 3, 1, 0, 2) << 16) ^
  2325. (inv_col_mul(t2, 2, 0, 3, 1) << 8) ^
  2326. (inv_col_mul(t2, 1, 3, 2, 0) );
  2327. s3 =
  2328. (inv_col_mul(t3, 0, 2, 1, 3) << 24) ^
  2329. (inv_col_mul(t3, 3, 1, 0, 2) << 16) ^
  2330. (inv_col_mul(t3, 2, 0, 3, 1) << 8) ^
  2331. (inv_col_mul(t3, 1, 3, 2, 0) );
  2332. }
  2333. t0 =
  2334. ((word32)GetTable8(Td4, GETBYTE(s0, 3)) << 24) ^
  2335. ((word32)GetTable8(Td4, GETBYTE(s3, 2)) << 16) ^
  2336. ((word32)GetTable8(Td4, GETBYTE(s2, 1)) << 8) ^
  2337. ((word32)GetTable8(Td4, GETBYTE(s1, 0)));
  2338. t1 =
  2339. ((word32)GetTable8(Td4, GETBYTE(s1, 3)) << 24) ^
  2340. ((word32)GetTable8(Td4, GETBYTE(s0, 2)) << 16) ^
  2341. ((word32)GetTable8(Td4, GETBYTE(s3, 1)) << 8) ^
  2342. ((word32)GetTable8(Td4, GETBYTE(s2, 0)));
  2343. t2 =
  2344. ((word32)GetTable8(Td4, GETBYTE(s2, 3)) << 24) ^
  2345. ((word32)GetTable8(Td4, GETBYTE(s1, 2)) << 16) ^
  2346. ((word32)GetTable8(Td4, GETBYTE(s0, 1)) << 8) ^
  2347. ((word32)GetTable8(Td4, GETBYTE(s3, 0)));
  2348. t3 =
  2349. ((word32)GetTable8(Td4, GETBYTE(s3, 3)) << 24) ^
  2350. ((word32)GetTable8(Td4, GETBYTE(s2, 2)) << 16) ^
  2351. ((word32)GetTable8(Td4, GETBYTE(s1, 1)) << 8) ^
  2352. ((word32)GetTable8(Td4, GETBYTE(s0, 0)));
  2353. s0 = t0 ^ rk[0];
  2354. s1 = t1 ^ rk[1];
  2355. s2 = t2 ^ rk[2];
  2356. s3 = t3 ^ rk[3];
  2357. #endif
  2358. /* write out */
  2359. #ifdef LITTLE_ENDIAN_ORDER
  2360. s0 = ByteReverseWord32(s0);
  2361. s1 = ByteReverseWord32(s1);
  2362. s2 = ByteReverseWord32(s2);
  2363. s3 = ByteReverseWord32(s3);
  2364. #endif
  2365. XMEMCPY(outBlock, &s0, sizeof(s0));
  2366. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  2367. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  2368. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  2369. return 0;
  2370. }
  2371. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  2372. #endif /* HAVE_AES_DECRYPT */
  2373. #endif /* NEED_AES_TABLES */
  2374. /* wc_AesSetKey */
  2375. #if defined(STM32_CRYPTO)
  2376. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2377. const byte* iv, int dir)
  2378. {
  2379. word32 *rk;
  2380. (void)dir;
  2381. if (aes == NULL || (keylen != 16 &&
  2382. #ifdef WOLFSSL_AES_192
  2383. keylen != 24 &&
  2384. #endif
  2385. keylen != 32)) {
  2386. return BAD_FUNC_ARG;
  2387. }
  2388. rk = aes->key;
  2389. aes->keylen = keylen;
  2390. aes->rounds = keylen/4 + 6;
  2391. XMEMCPY(rk, userKey, keylen);
  2392. #if !defined(WOLFSSL_STM32_CUBEMX) || defined(STM32_HAL_V2)
  2393. ByteReverseWords(rk, rk, keylen);
  2394. #endif
  2395. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2396. defined(WOLFSSL_AES_OFB)
  2397. aes->left = 0;
  2398. #endif
  2399. return wc_AesSetIV(aes, iv);
  2400. }
  2401. #if defined(WOLFSSL_AES_DIRECT)
  2402. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2403. const byte* iv, int dir)
  2404. {
  2405. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2406. }
  2407. #endif
  2408. #elif defined(HAVE_COLDFIRE_SEC)
  2409. #if defined (HAVE_THREADX)
  2410. #include "memory_pools.h"
  2411. extern TX_BYTE_POOL mp_ncached; /* Non Cached memory pool */
  2412. #endif
  2413. #define AES_BUFFER_SIZE (AES_BLOCK_SIZE * 64)
  2414. static unsigned char *AESBuffIn = NULL;
  2415. static unsigned char *AESBuffOut = NULL;
  2416. static byte *secReg;
  2417. static byte *secKey;
  2418. static volatile SECdescriptorType *secDesc;
  2419. static wolfSSL_Mutex Mutex_AesSEC;
  2420. #define SEC_DESC_AES_CBC_ENCRYPT 0x60300010
  2421. #define SEC_DESC_AES_CBC_DECRYPT 0x60200010
  2422. extern volatile unsigned char __MBAR[];
  2423. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2424. const byte* iv, int dir)
  2425. {
  2426. if (AESBuffIn == NULL) {
  2427. #if defined (HAVE_THREADX)
  2428. int s1, s2, s3, s4, s5;
  2429. s5 = tx_byte_allocate(&mp_ncached,(void *)&secDesc,
  2430. sizeof(SECdescriptorType), TX_NO_WAIT);
  2431. s1 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffIn,
  2432. AES_BUFFER_SIZE, TX_NO_WAIT);
  2433. s2 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffOut,
  2434. AES_BUFFER_SIZE, TX_NO_WAIT);
  2435. s3 = tx_byte_allocate(&mp_ncached, (void *)&secKey,
  2436. AES_BLOCK_SIZE*2, TX_NO_WAIT);
  2437. s4 = tx_byte_allocate(&mp_ncached, (void *)&secReg,
  2438. AES_BLOCK_SIZE, TX_NO_WAIT);
  2439. if (s1 || s2 || s3 || s4 || s5)
  2440. return BAD_FUNC_ARG;
  2441. #else
  2442. #warning "Allocate non-Cache buffers"
  2443. #endif
  2444. wc_InitMutex(&Mutex_AesSEC);
  2445. }
  2446. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2447. return BAD_FUNC_ARG;
  2448. if (aes == NULL)
  2449. return BAD_FUNC_ARG;
  2450. aes->keylen = keylen;
  2451. aes->rounds = keylen/4 + 6;
  2452. XMEMCPY(aes->key, userKey, keylen);
  2453. if (iv)
  2454. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2455. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2456. defined(WOLFSSL_AES_OFB)
  2457. aes->left = 0;
  2458. #endif
  2459. return 0;
  2460. }
  2461. #elif defined(FREESCALE_LTC)
  2462. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  2463. int dir)
  2464. {
  2465. if (aes == NULL || !((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2466. return BAD_FUNC_ARG;
  2467. aes->rounds = keylen/4 + 6;
  2468. XMEMCPY(aes->key, userKey, keylen);
  2469. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2470. defined(WOLFSSL_AES_OFB)
  2471. aes->left = 0;
  2472. #endif
  2473. return wc_AesSetIV(aes, iv);
  2474. }
  2475. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2476. const byte* iv, int dir)
  2477. {
  2478. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2479. }
  2480. #elif defined(FREESCALE_MMCAU)
  2481. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2482. const byte* iv, int dir)
  2483. {
  2484. int ret;
  2485. byte* rk;
  2486. byte* tmpKey = (byte*)userKey;
  2487. int tmpKeyDynamic = 0;
  2488. word32 alignOffset = 0;
  2489. (void)dir;
  2490. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2491. return BAD_FUNC_ARG;
  2492. if (aes == NULL)
  2493. return BAD_FUNC_ARG;
  2494. rk = (byte*)aes->key;
  2495. if (rk == NULL)
  2496. return BAD_FUNC_ARG;
  2497. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2498. defined(WOLFSSL_AES_OFB)
  2499. aes->left = 0;
  2500. #endif
  2501. aes->rounds = keylen/4 + 6;
  2502. #ifdef FREESCALE_MMCAU_CLASSIC
  2503. if ((wc_ptr_t)userKey % WOLFSSL_MMCAU_ALIGNMENT) {
  2504. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  2505. byte* tmp = (byte*)XMALLOC(keylen + WOLFSSL_MMCAU_ALIGNMENT,
  2506. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  2507. if (tmp == NULL) {
  2508. return MEMORY_E;
  2509. }
  2510. alignOffset = WOLFSSL_MMCAU_ALIGNMENT -
  2511. ((wc_ptr_t)tmp % WOLFSSL_MMCAU_ALIGNMENT);
  2512. tmpKey = tmp + alignOffset;
  2513. XMEMCPY(tmpKey, userKey, keylen);
  2514. tmpKeyDynamic = 1;
  2515. #else
  2516. WOLFSSL_MSG("Bad cau_aes_set_key alignment");
  2517. return BAD_ALIGN_E;
  2518. #endif
  2519. }
  2520. #endif
  2521. ret = wolfSSL_CryptHwMutexLock();
  2522. if(ret == 0) {
  2523. #ifdef FREESCALE_MMCAU_CLASSIC
  2524. cau_aes_set_key(tmpKey, keylen*8, rk);
  2525. #else
  2526. MMCAU_AES_SetKey(tmpKey, keylen, rk);
  2527. #endif
  2528. wolfSSL_CryptHwMutexUnLock();
  2529. ret = wc_AesSetIV(aes, iv);
  2530. }
  2531. if (tmpKeyDynamic == 1) {
  2532. XFREE(tmpKey - alignOffset, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  2533. }
  2534. return ret;
  2535. }
  2536. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2537. const byte* iv, int dir)
  2538. {
  2539. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2540. }
  2541. #elif defined(WOLFSSL_NRF51_AES)
  2542. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2543. const byte* iv, int dir)
  2544. {
  2545. int ret;
  2546. (void)dir;
  2547. (void)iv;
  2548. if (aes == NULL || keylen != 16)
  2549. return BAD_FUNC_ARG;
  2550. aes->keylen = keylen;
  2551. aes->rounds = keylen/4 + 6;
  2552. XMEMCPY(aes->key, userKey, keylen);
  2553. ret = nrf51_aes_set_key(userKey);
  2554. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2555. defined(WOLFSSL_AES_OFB)
  2556. aes->left = 0;
  2557. #endif
  2558. return ret;
  2559. }
  2560. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2561. const byte* iv, int dir)
  2562. {
  2563. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2564. }
  2565. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  2566. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  2567. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2568. const byte* iv, int dir)
  2569. {
  2570. (void)dir;
  2571. (void)iv;
  2572. if (aes == NULL || (keylen != 16 && keylen != 24 && keylen != 32)) {
  2573. return BAD_FUNC_ARG;
  2574. }
  2575. #if !defined(WOLFSSL_AES_128)
  2576. if (keylen == 16) {
  2577. return BAD_FUNC_ARG;
  2578. }
  2579. #endif
  2580. #if !defined(WOLFSSL_AES_192)
  2581. if (keylen == 24) {
  2582. return BAD_FUNC_ARG;
  2583. }
  2584. #endif
  2585. #if !defined(WOLFSSL_AES_256)
  2586. if (keylen == 32) {
  2587. return BAD_FUNC_ARG;
  2588. }
  2589. #endif
  2590. aes->keylen = keylen;
  2591. aes->rounds = keylen/4 + 6;
  2592. XMEMCPY(aes->key, userKey, keylen);
  2593. #if defined(WOLFSSL_AES_COUNTER)
  2594. aes->left = 0;
  2595. #endif
  2596. return wc_AesSetIV(aes, iv);
  2597. }
  2598. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2599. const byte* iv, int dir)
  2600. {
  2601. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2602. }
  2603. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  2604. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  2605. int dir)
  2606. {
  2607. SaSiError_t ret = SASI_OK;
  2608. SaSiAesIv_t iv_aes;
  2609. if (aes == NULL ||
  2610. (keylen != AES_128_KEY_SIZE &&
  2611. keylen != AES_192_KEY_SIZE &&
  2612. keylen != AES_256_KEY_SIZE)) {
  2613. return BAD_FUNC_ARG;
  2614. }
  2615. #if defined(AES_MAX_KEY_SIZE)
  2616. if (keylen > (AES_MAX_KEY_SIZE/8)) {
  2617. return BAD_FUNC_ARG;
  2618. }
  2619. #endif
  2620. if (dir != AES_ENCRYPTION &&
  2621. dir != AES_DECRYPTION) {
  2622. return BAD_FUNC_ARG;
  2623. }
  2624. if (dir == AES_ENCRYPTION) {
  2625. aes->ctx.mode = SASI_AES_ENCRYPT;
  2626. SaSi_AesInit(&aes->ctx.user_ctx,
  2627. SASI_AES_ENCRYPT,
  2628. SASI_AES_MODE_CBC,
  2629. SASI_AES_PADDING_NONE);
  2630. }
  2631. else {
  2632. aes->ctx.mode = SASI_AES_DECRYPT;
  2633. SaSi_AesInit(&aes->ctx.user_ctx,
  2634. SASI_AES_DECRYPT,
  2635. SASI_AES_MODE_CBC,
  2636. SASI_AES_PADDING_NONE);
  2637. }
  2638. aes->keylen = keylen;
  2639. aes->rounds = keylen/4 + 6;
  2640. XMEMCPY(aes->key, userKey, keylen);
  2641. aes->ctx.key.pKey = (byte*)aes->key;
  2642. aes->ctx.key.keySize= keylen;
  2643. ret = SaSi_AesSetKey(&aes->ctx.user_ctx,
  2644. SASI_AES_USER_KEY,
  2645. &aes->ctx.key,
  2646. sizeof(aes->ctx.key));
  2647. if (ret != SASI_OK) {
  2648. return BAD_FUNC_ARG;
  2649. }
  2650. ret = wc_AesSetIV(aes, iv);
  2651. if (iv)
  2652. XMEMCPY(iv_aes, iv, AES_BLOCK_SIZE);
  2653. else
  2654. XMEMSET(iv_aes, 0, AES_BLOCK_SIZE);
  2655. ret = SaSi_AesSetIv(&aes->ctx.user_ctx, iv_aes);
  2656. if (ret != SASI_OK) {
  2657. return ret;
  2658. }
  2659. return ret;
  2660. }
  2661. #if defined(WOLFSSL_AES_DIRECT)
  2662. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2663. const byte* iv, int dir)
  2664. {
  2665. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2666. }
  2667. #endif
  2668. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  2669. && !defined(WOLFSSL_QNX_CAAM)
  2670. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  2671. #elif defined(WOLFSSL_AFALG)
  2672. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  2673. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  2674. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  2675. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  2676. /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
  2677. #else
  2678. /* Software AES - SetKey */
  2679. static WARN_UNUSED_RESULT int wc_AesSetKeyLocal(
  2680. Aes* aes, const byte* userKey, word32 keylen, const byte* iv, int dir,
  2681. int checkKeyLen)
  2682. {
  2683. int ret;
  2684. word32 *rk;
  2685. #ifdef NEED_AES_TABLES
  2686. word32 temp;
  2687. unsigned int i = 0;
  2688. #endif
  2689. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2690. byte local[32];
  2691. word32 localSz = 32;
  2692. #endif
  2693. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  2694. if (wc_MAXQ10XX_AesSetKey(aes, userKey, keylen) != 0) {
  2695. return WC_HW_E;
  2696. }
  2697. #endif
  2698. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2699. if (keylen == (16 + WC_CAAM_BLOB_SZ) ||
  2700. keylen == (24 + WC_CAAM_BLOB_SZ) ||
  2701. keylen == (32 + WC_CAAM_BLOB_SZ)) {
  2702. if (wc_caamOpenBlob((byte*)userKey, keylen, local, &localSz) != 0) {
  2703. return BAD_FUNC_ARG;
  2704. }
  2705. /* set local values */
  2706. userKey = local;
  2707. keylen = localSz;
  2708. }
  2709. #endif
  2710. #ifdef WOLFSSL_SECO_CAAM
  2711. /* if set to use hardware than import the key */
  2712. if (aes->devId == WOLFSSL_SECO_DEVID) {
  2713. int keyGroup = 1; /* group one was chosen arbitrarily */
  2714. unsigned int keyIdOut;
  2715. byte importiv[GCM_NONCE_MID_SZ];
  2716. int importivSz = GCM_NONCE_MID_SZ;
  2717. int keyType = 0;
  2718. WC_RNG rng;
  2719. if (wc_InitRng(&rng) != 0) {
  2720. WOLFSSL_MSG("RNG init for IV failed");
  2721. return WC_HW_E;
  2722. }
  2723. if (wc_RNG_GenerateBlock(&rng, importiv, importivSz) != 0) {
  2724. WOLFSSL_MSG("Generate IV failed");
  2725. wc_FreeRng(&rng);
  2726. return WC_HW_E;
  2727. }
  2728. wc_FreeRng(&rng);
  2729. if (iv)
  2730. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2731. else
  2732. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  2733. switch (keylen) {
  2734. case AES_128_KEY_SIZE: keyType = CAAM_KEYTYPE_AES128; break;
  2735. case AES_192_KEY_SIZE: keyType = CAAM_KEYTYPE_AES192; break;
  2736. case AES_256_KEY_SIZE: keyType = CAAM_KEYTYPE_AES256; break;
  2737. }
  2738. keyIdOut = wc_SECO_WrapKey(0, (byte*)userKey, keylen, importiv,
  2739. importivSz, keyType, CAAM_KEY_TRANSIENT, keyGroup);
  2740. if (keyIdOut == 0) {
  2741. return WC_HW_E;
  2742. }
  2743. aes->blackKey = keyIdOut;
  2744. return 0;
  2745. }
  2746. #endif
  2747. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  2748. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  2749. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  2750. #ifdef WOLF_CRYPTO_CB
  2751. if (aes->devId != INVALID_DEVID)
  2752. #endif
  2753. {
  2754. if (keylen > sizeof(aes->devKey)) {
  2755. return BAD_FUNC_ARG;
  2756. }
  2757. XMEMCPY(aes->devKey, userKey, keylen);
  2758. }
  2759. #endif
  2760. if (checkKeyLen) {
  2761. if (keylen != 16 && keylen != 24 && keylen != 32) {
  2762. return BAD_FUNC_ARG;
  2763. }
  2764. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE < 256
  2765. /* Check key length only when AES_MAX_KEY_SIZE doesn't allow
  2766. * all key sizes. Otherwise this condition is never true. */
  2767. if (keylen > (AES_MAX_KEY_SIZE / 8)) {
  2768. return BAD_FUNC_ARG;
  2769. }
  2770. #endif
  2771. }
  2772. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2773. defined(WOLFSSL_AES_OFB)
  2774. aes->left = 0;
  2775. #endif
  2776. aes->keylen = (int)keylen;
  2777. aes->rounds = (keylen/4) + 6;
  2778. #ifdef WOLFSSL_AESNI
  2779. aes->use_aesni = 0;
  2780. if (checkAESNI == 0) {
  2781. haveAESNI = Check_CPU_support_AES();
  2782. checkAESNI = 1;
  2783. }
  2784. if (haveAESNI) {
  2785. #ifdef WOLFSSL_LINUXKM
  2786. /* runtime alignment check */
  2787. if ((wc_ptr_t)&aes->key & (wc_ptr_t)0xf) {
  2788. return BAD_ALIGN_E;
  2789. }
  2790. #endif
  2791. aes->use_aesni = 1;
  2792. if (iv)
  2793. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2794. else
  2795. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  2796. if (dir == AES_ENCRYPTION)
  2797. return AES_set_encrypt_key(userKey, (int)keylen * 8, aes);
  2798. #ifdef HAVE_AES_DECRYPT
  2799. else
  2800. return AES_set_decrypt_key(userKey, (int)keylen * 8, aes);
  2801. #endif
  2802. }
  2803. #endif /* WOLFSSL_AESNI */
  2804. #ifdef WOLFSSL_KCAPI_AES
  2805. XMEMCPY(aes->devKey, userKey, keylen);
  2806. if (aes->init != 0) {
  2807. kcapi_cipher_destroy(aes->handle);
  2808. aes->handle = NULL;
  2809. aes->init = 0;
  2810. }
  2811. (void)dir;
  2812. #endif
  2813. if (keylen > sizeof(aes->key)) {
  2814. return BAD_FUNC_ARG;
  2815. }
  2816. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  2817. return wc_psa_aes_set_key(aes, userKey, keylen, (uint8_t*)iv,
  2818. ((psa_algorithm_t)0), dir);
  2819. #endif
  2820. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  2821. /* wolfSSL HostCrypto in SE05x SDK can request to use SW crypto
  2822. * instead of SE05x crypto by setting useSWCrypt */
  2823. if (aes->useSWCrypt == 0) {
  2824. ret = se050_aes_set_key(aes, userKey, keylen, iv, dir);
  2825. if (ret == 0) {
  2826. ret = wc_AesSetIV(aes, iv);
  2827. }
  2828. return ret;
  2829. }
  2830. #endif
  2831. rk = aes->key;
  2832. XMEMCPY(rk, userKey, keylen);
  2833. #if defined(LITTLE_ENDIAN_ORDER) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  2834. (!defined(WOLFSSL_ESP32_CRYPT) || \
  2835. defined(NO_WOLFSSL_ESP32_CRYPT_AES))
  2836. ByteReverseWords(rk, rk, keylen);
  2837. #endif
  2838. #ifdef WOLFSSL_IMXRT_DCP
  2839. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  2840. temp = 0;
  2841. if (keylen == 16)
  2842. temp = DCPAesSetKey(aes, userKey, keylen, iv, dir);
  2843. if (temp != 0)
  2844. return WC_HW_E;
  2845. #endif
  2846. #ifdef NEED_AES_TABLES
  2847. switch (keylen) {
  2848. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 128 && \
  2849. defined(WOLFSSL_AES_128)
  2850. case 16:
  2851. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2852. temp = (word32)-1;
  2853. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2854. #endif
  2855. while (1)
  2856. {
  2857. temp = rk[3];
  2858. rk[4] = rk[0] ^
  2859. #ifndef WOLFSSL_AES_SMALL_TABLES
  2860. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  2861. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  2862. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  2863. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  2864. #else
  2865. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  2866. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  2867. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  2868. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  2869. #endif
  2870. rcon[i];
  2871. rk[5] = rk[1] ^ rk[4];
  2872. rk[6] = rk[2] ^ rk[5];
  2873. rk[7] = rk[3] ^ rk[6];
  2874. if (++i == 10)
  2875. break;
  2876. rk += 4;
  2877. }
  2878. break;
  2879. #endif /* 128 */
  2880. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 192 && \
  2881. defined(WOLFSSL_AES_192)
  2882. case 24:
  2883. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2884. temp = (word32)-1;
  2885. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2886. #endif
  2887. /* for (;;) here triggers a bug in VC60 SP4 w/ Pro Pack */
  2888. while (1)
  2889. {
  2890. temp = rk[ 5];
  2891. rk[ 6] = rk[ 0] ^
  2892. #ifndef WOLFSSL_AES_SMALL_TABLES
  2893. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  2894. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  2895. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  2896. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  2897. #else
  2898. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  2899. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  2900. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  2901. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  2902. #endif
  2903. rcon[i];
  2904. rk[ 7] = rk[ 1] ^ rk[ 6];
  2905. rk[ 8] = rk[ 2] ^ rk[ 7];
  2906. rk[ 9] = rk[ 3] ^ rk[ 8];
  2907. if (++i == 8)
  2908. break;
  2909. rk[10] = rk[ 4] ^ rk[ 9];
  2910. rk[11] = rk[ 5] ^ rk[10];
  2911. rk += 6;
  2912. }
  2913. break;
  2914. #endif /* 192 */
  2915. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 256 && \
  2916. defined(WOLFSSL_AES_256)
  2917. case 32:
  2918. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2919. temp = (word32)-1;
  2920. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2921. #endif
  2922. while (1)
  2923. {
  2924. temp = rk[ 7];
  2925. rk[ 8] = rk[ 0] ^
  2926. #ifndef WOLFSSL_AES_SMALL_TABLES
  2927. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  2928. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  2929. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  2930. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  2931. #else
  2932. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  2933. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  2934. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  2935. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  2936. #endif
  2937. rcon[i];
  2938. rk[ 9] = rk[ 1] ^ rk[ 8];
  2939. rk[10] = rk[ 2] ^ rk[ 9];
  2940. rk[11] = rk[ 3] ^ rk[10];
  2941. if (++i == 7)
  2942. break;
  2943. temp = rk[11];
  2944. rk[12] = rk[ 4] ^
  2945. #ifndef WOLFSSL_AES_SMALL_TABLES
  2946. (GetTable(Te[2], GETBYTE(temp, 3)) & 0xff000000) ^
  2947. (GetTable(Te[3], GETBYTE(temp, 2)) & 0x00ff0000) ^
  2948. (GetTable(Te[0], GETBYTE(temp, 1)) & 0x0000ff00) ^
  2949. (GetTable(Te[1], GETBYTE(temp, 0)) & 0x000000ff);
  2950. #else
  2951. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3)) << 24) ^
  2952. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 16) ^
  2953. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 8) ^
  2954. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)));
  2955. #endif
  2956. rk[13] = rk[ 5] ^ rk[12];
  2957. rk[14] = rk[ 6] ^ rk[13];
  2958. rk[15] = rk[ 7] ^ rk[14];
  2959. rk += 8;
  2960. }
  2961. break;
  2962. #endif /* 256 */
  2963. default:
  2964. return BAD_FUNC_ARG;
  2965. } /* switch */
  2966. ForceZero(&temp, sizeof(temp));
  2967. #if defined(HAVE_AES_DECRYPT)
  2968. if (dir == AES_DECRYPTION) {
  2969. unsigned int j;
  2970. rk = aes->key;
  2971. /* invert the order of the round keys: */
  2972. for (i = 0, j = 4* aes->rounds; i < j; i += 4, j -= 4) {
  2973. temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp;
  2974. temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp;
  2975. temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp;
  2976. temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp;
  2977. }
  2978. ForceZero(&temp, sizeof(temp));
  2979. #if !defined(WOLFSSL_AES_SMALL_TABLES)
  2980. /* apply the inverse MixColumn transform to all round keys but the
  2981. first and the last: */
  2982. for (i = 1; i < aes->rounds; i++) {
  2983. rk += 4;
  2984. rk[0] =
  2985. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[0], 3)) & 0xff) ^
  2986. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[0], 2)) & 0xff) ^
  2987. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[0], 1)) & 0xff) ^
  2988. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[0], 0)) & 0xff);
  2989. rk[1] =
  2990. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[1], 3)) & 0xff) ^
  2991. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[1], 2)) & 0xff) ^
  2992. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[1], 1)) & 0xff) ^
  2993. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[1], 0)) & 0xff);
  2994. rk[2] =
  2995. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[2], 3)) & 0xff) ^
  2996. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[2], 2)) & 0xff) ^
  2997. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[2], 1)) & 0xff) ^
  2998. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[2], 0)) & 0xff);
  2999. rk[3] =
  3000. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[3], 3)) & 0xff) ^
  3001. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[3], 2)) & 0xff) ^
  3002. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[3], 1)) & 0xff) ^
  3003. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[3], 0)) & 0xff);
  3004. }
  3005. #endif
  3006. }
  3007. #else
  3008. (void)dir;
  3009. #endif /* HAVE_AES_DECRYPT */
  3010. (void)temp;
  3011. #endif /* NEED_AES_TABLES */
  3012. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  3013. XMEMCPY((byte*)aes->key, userKey, keylen);
  3014. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == CRYPTO_WORD_ENDIAN_BIG) {
  3015. ByteReverseWords(aes->key, aes->key, 32);
  3016. }
  3017. #endif
  3018. ret = wc_AesSetIV(aes, iv);
  3019. #if defined(WOLFSSL_DEVCRYPTO) && \
  3020. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  3021. aes->ctx.cfd = -1;
  3022. #endif
  3023. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  3024. ForceZero(local, sizeof(local));
  3025. #endif
  3026. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3027. wc_MemZero_Check(&temp, sizeof(temp));
  3028. #endif
  3029. return ret;
  3030. }
  3031. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  3032. const byte* iv, int dir)
  3033. {
  3034. if (aes == NULL) {
  3035. return BAD_FUNC_ARG;
  3036. }
  3037. if (keylen > sizeof(aes->key)) {
  3038. return BAD_FUNC_ARG;
  3039. }
  3040. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 1);
  3041. }
  3042. #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER)
  3043. /* AES-CTR and AES-DIRECT need to use this for key setup */
  3044. /* This function allows key sizes that are not 128/192/256 bits */
  3045. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  3046. const byte* iv, int dir)
  3047. {
  3048. if (aes == NULL) {
  3049. return BAD_FUNC_ARG;
  3050. }
  3051. if (keylen > sizeof(aes->key)) {
  3052. return BAD_FUNC_ARG;
  3053. }
  3054. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 0);
  3055. }
  3056. #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER */
  3057. #endif /* wc_AesSetKey block */
  3058. /* wc_AesSetIV is shared between software and hardware */
  3059. int wc_AesSetIV(Aes* aes, const byte* iv)
  3060. {
  3061. if (aes == NULL)
  3062. return BAD_FUNC_ARG;
  3063. if (iv)
  3064. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  3065. else
  3066. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  3067. #if defined(WOLFSSL_AES_COUNTER) || defined(WOLFSSL_AES_CFB) || \
  3068. defined(WOLFSSL_AES_OFB) || defined(WOLFSSL_AES_XTS)
  3069. /* Clear any unused bytes from last cipher op. */
  3070. aes->left = 0;
  3071. #endif
  3072. return 0;
  3073. }
  3074. /* AES-DIRECT */
  3075. #if defined(WOLFSSL_AES_DIRECT)
  3076. #if defined(HAVE_COLDFIRE_SEC)
  3077. #error "Coldfire SEC doesn't yet support AES direct"
  3078. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  3079. !defined(WOLFSSL_QNX_CAAM)
  3080. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  3081. #elif defined(WOLFSSL_AFALG)
  3082. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  3083. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  3084. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  3085. #elif defined(WOLFSSL_LINUXKM) && defined(WOLFSSL_AESNI)
  3086. WARN_UNUSED_RESULT int wc_AesEncryptDirect(
  3087. Aes* aes, byte* out, const byte* in)
  3088. {
  3089. int ret;
  3090. if (haveAESNI && aes->use_aesni)
  3091. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3092. ret = wc_AesEncrypt(aes, in, out);
  3093. if (haveAESNI && aes->use_aesni)
  3094. RESTORE_VECTOR_REGISTERS();
  3095. return ret;
  3096. }
  3097. /* vector reg save/restore is explicit in all below calls to
  3098. * wc_Aes{En,De}cryptDirect(), so bypass the public version with a
  3099. * macro.
  3100. */
  3101. #define wc_AesEncryptDirect(aes, out, in) wc_AesEncrypt(aes, in, out)
  3102. #ifdef HAVE_AES_DECRYPT
  3103. /* Allow direct access to one block decrypt */
  3104. WARN_UNUSED_RESULT int wc_AesDecryptDirect(
  3105. Aes* aes, byte* out, const byte* in)
  3106. {
  3107. int ret;
  3108. if (haveAESNI && aes->use_aesni)
  3109. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3110. ret = wc_AesDecrypt(aes, in, out);
  3111. if (haveAESNI && aes->use_aesni)
  3112. RESTORE_VECTOR_REGISTERS();
  3113. return ret;
  3114. }
  3115. #define wc_AesDecryptDirect(aes, out, in) wc_AesDecrypt(aes, in, out)
  3116. #endif /* HAVE_AES_DECRYPT */
  3117. #else
  3118. /* Allow direct access to one block encrypt */
  3119. int wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in)
  3120. {
  3121. return wc_AesEncrypt(aes, in, out);
  3122. }
  3123. #ifdef HAVE_AES_DECRYPT
  3124. /* Allow direct access to one block decrypt */
  3125. int wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in)
  3126. {
  3127. return wc_AesDecrypt(aes, in, out);
  3128. }
  3129. #endif /* HAVE_AES_DECRYPT */
  3130. #endif /* AES direct block */
  3131. #endif /* WOLFSSL_AES_DIRECT */
  3132. /* AES-CBC */
  3133. #ifdef HAVE_AES_CBC
  3134. #if defined(STM32_CRYPTO)
  3135. #ifdef WOLFSSL_STM32_CUBEMX
  3136. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3137. {
  3138. int ret = 0;
  3139. CRYP_HandleTypeDef hcryp;
  3140. word32 blocks = (sz / AES_BLOCK_SIZE);
  3141. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3142. if (sz % AES_BLOCK_SIZE) {
  3143. return BAD_LENGTH_E;
  3144. }
  3145. #endif
  3146. if (blocks == 0)
  3147. return 0;
  3148. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3149. if (ret != 0)
  3150. return ret;
  3151. ret = wolfSSL_CryptHwMutexLock();
  3152. if (ret != 0) {
  3153. return ret;
  3154. }
  3155. #if defined(STM32_HAL_V2)
  3156. hcryp.Init.Algorithm = CRYP_AES_CBC;
  3157. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  3158. #elif defined(STM32_CRYPTO_AES_ONLY)
  3159. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  3160. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  3161. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3162. #endif
  3163. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3164. HAL_CRYP_Init(&hcryp);
  3165. #if defined(STM32_HAL_V2)
  3166. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  3167. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3168. #elif defined(STM32_CRYPTO_AES_ONLY)
  3169. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  3170. out, STM32_HAL_TIMEOUT);
  3171. #else
  3172. ret = HAL_CRYP_AESCBC_Encrypt(&hcryp, (uint8_t*)in,
  3173. blocks * AES_BLOCK_SIZE,
  3174. out, STM32_HAL_TIMEOUT);
  3175. #endif
  3176. if (ret != HAL_OK) {
  3177. ret = WC_TIMEOUT_E;
  3178. }
  3179. /* store iv for next call */
  3180. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3181. HAL_CRYP_DeInit(&hcryp);
  3182. wolfSSL_CryptHwMutexUnLock();
  3183. wc_Stm32_Aes_Cleanup();
  3184. return ret;
  3185. }
  3186. #ifdef HAVE_AES_DECRYPT
  3187. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3188. {
  3189. int ret = 0;
  3190. CRYP_HandleTypeDef hcryp;
  3191. word32 blocks = (sz / AES_BLOCK_SIZE);
  3192. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3193. if (sz % AES_BLOCK_SIZE) {
  3194. return BAD_LENGTH_E;
  3195. }
  3196. #endif
  3197. if (blocks == 0)
  3198. return 0;
  3199. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3200. if (ret != 0)
  3201. return ret;
  3202. ret = wolfSSL_CryptHwMutexLock();
  3203. if (ret != 0) {
  3204. return ret;
  3205. }
  3206. /* if input and output same will overwrite input iv */
  3207. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3208. #if defined(STM32_HAL_V2)
  3209. hcryp.Init.Algorithm = CRYP_AES_CBC;
  3210. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  3211. #elif defined(STM32_CRYPTO_AES_ONLY)
  3212. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  3213. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  3214. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3215. #endif
  3216. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3217. HAL_CRYP_Init(&hcryp);
  3218. #if defined(STM32_HAL_V2)
  3219. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  3220. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3221. #elif defined(STM32_CRYPTO_AES_ONLY)
  3222. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  3223. out, STM32_HAL_TIMEOUT);
  3224. #else
  3225. ret = HAL_CRYP_AESCBC_Decrypt(&hcryp, (uint8_t*)in,
  3226. blocks * AES_BLOCK_SIZE,
  3227. out, STM32_HAL_TIMEOUT);
  3228. #endif
  3229. if (ret != HAL_OK) {
  3230. ret = WC_TIMEOUT_E;
  3231. }
  3232. /* store iv for next call */
  3233. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3234. HAL_CRYP_DeInit(&hcryp);
  3235. wolfSSL_CryptHwMutexUnLock();
  3236. wc_Stm32_Aes_Cleanup();
  3237. return ret;
  3238. }
  3239. #endif /* HAVE_AES_DECRYPT */
  3240. #else /* Standard Peripheral Library */
  3241. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3242. {
  3243. int ret;
  3244. word32 *iv;
  3245. CRYP_InitTypeDef cryptInit;
  3246. CRYP_KeyInitTypeDef keyInit;
  3247. CRYP_IVInitTypeDef ivInit;
  3248. word32 blocks = (sz / AES_BLOCK_SIZE);
  3249. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3250. if (sz % AES_BLOCK_SIZE) {
  3251. return BAD_LENGTH_E;
  3252. }
  3253. #endif
  3254. if (blocks == 0)
  3255. return 0;
  3256. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3257. if (ret != 0)
  3258. return ret;
  3259. ret = wolfSSL_CryptHwMutexLock();
  3260. if (ret != 0) {
  3261. return ret;
  3262. }
  3263. /* reset registers to their default values */
  3264. CRYP_DeInit();
  3265. /* set key */
  3266. CRYP_KeyInit(&keyInit);
  3267. /* set iv */
  3268. iv = aes->reg;
  3269. CRYP_IVStructInit(&ivInit);
  3270. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  3271. ivInit.CRYP_IV0Left = iv[0];
  3272. ivInit.CRYP_IV0Right = iv[1];
  3273. ivInit.CRYP_IV1Left = iv[2];
  3274. ivInit.CRYP_IV1Right = iv[3];
  3275. CRYP_IVInit(&ivInit);
  3276. /* set direction and mode */
  3277. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  3278. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  3279. CRYP_Init(&cryptInit);
  3280. /* enable crypto processor */
  3281. CRYP_Cmd(ENABLE);
  3282. while (blocks--) {
  3283. /* flush IN/OUT FIFOs */
  3284. CRYP_FIFOFlush();
  3285. CRYP_DataIn(*(uint32_t*)&in[0]);
  3286. CRYP_DataIn(*(uint32_t*)&in[4]);
  3287. CRYP_DataIn(*(uint32_t*)&in[8]);
  3288. CRYP_DataIn(*(uint32_t*)&in[12]);
  3289. /* wait until the complete message has been processed */
  3290. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3291. *(uint32_t*)&out[0] = CRYP_DataOut();
  3292. *(uint32_t*)&out[4] = CRYP_DataOut();
  3293. *(uint32_t*)&out[8] = CRYP_DataOut();
  3294. *(uint32_t*)&out[12] = CRYP_DataOut();
  3295. /* store iv for next call */
  3296. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3297. sz -= AES_BLOCK_SIZE;
  3298. in += AES_BLOCK_SIZE;
  3299. out += AES_BLOCK_SIZE;
  3300. }
  3301. /* disable crypto processor */
  3302. CRYP_Cmd(DISABLE);
  3303. wolfSSL_CryptHwMutexUnLock();
  3304. wc_Stm32_Aes_Cleanup();
  3305. return ret;
  3306. }
  3307. #ifdef HAVE_AES_DECRYPT
  3308. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3309. {
  3310. int ret;
  3311. word32 *iv;
  3312. CRYP_InitTypeDef cryptInit;
  3313. CRYP_KeyInitTypeDef keyInit;
  3314. CRYP_IVInitTypeDef ivInit;
  3315. word32 blocks = (sz / AES_BLOCK_SIZE);
  3316. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3317. if (sz % AES_BLOCK_SIZE) {
  3318. return BAD_LENGTH_E;
  3319. }
  3320. #endif
  3321. if (blocks == 0)
  3322. return 0;
  3323. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3324. if (ret != 0)
  3325. return ret;
  3326. ret = wolfSSL_CryptHwMutexLock();
  3327. if (ret != 0) {
  3328. return ret;
  3329. }
  3330. /* if input and output same will overwrite input iv */
  3331. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3332. /* reset registers to their default values */
  3333. CRYP_DeInit();
  3334. /* set direction and key */
  3335. CRYP_KeyInit(&keyInit);
  3336. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  3337. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  3338. CRYP_Init(&cryptInit);
  3339. /* enable crypto processor */
  3340. CRYP_Cmd(ENABLE);
  3341. /* wait until key has been prepared */
  3342. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3343. /* set direction and mode */
  3344. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  3345. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  3346. CRYP_Init(&cryptInit);
  3347. /* set iv */
  3348. iv = aes->reg;
  3349. CRYP_IVStructInit(&ivInit);
  3350. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  3351. ivInit.CRYP_IV0Left = iv[0];
  3352. ivInit.CRYP_IV0Right = iv[1];
  3353. ivInit.CRYP_IV1Left = iv[2];
  3354. ivInit.CRYP_IV1Right = iv[3];
  3355. CRYP_IVInit(&ivInit);
  3356. /* enable crypto processor */
  3357. CRYP_Cmd(ENABLE);
  3358. while (blocks--) {
  3359. /* flush IN/OUT FIFOs */
  3360. CRYP_FIFOFlush();
  3361. CRYP_DataIn(*(uint32_t*)&in[0]);
  3362. CRYP_DataIn(*(uint32_t*)&in[4]);
  3363. CRYP_DataIn(*(uint32_t*)&in[8]);
  3364. CRYP_DataIn(*(uint32_t*)&in[12]);
  3365. /* wait until the complete message has been processed */
  3366. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3367. *(uint32_t*)&out[0] = CRYP_DataOut();
  3368. *(uint32_t*)&out[4] = CRYP_DataOut();
  3369. *(uint32_t*)&out[8] = CRYP_DataOut();
  3370. *(uint32_t*)&out[12] = CRYP_DataOut();
  3371. /* store iv for next call */
  3372. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3373. in += AES_BLOCK_SIZE;
  3374. out += AES_BLOCK_SIZE;
  3375. }
  3376. /* disable crypto processor */
  3377. CRYP_Cmd(DISABLE);
  3378. wolfSSL_CryptHwMutexUnLock();
  3379. wc_Stm32_Aes_Cleanup();
  3380. return ret;
  3381. }
  3382. #endif /* HAVE_AES_DECRYPT */
  3383. #endif /* WOLFSSL_STM32_CUBEMX */
  3384. #elif defined(HAVE_COLDFIRE_SEC)
  3385. static WARN_UNUSED_RESULT int wc_AesCbcCrypt(
  3386. Aes* aes, byte* po, const byte* pi, word32 sz, word32 descHeader)
  3387. {
  3388. #ifdef DEBUG_WOLFSSL
  3389. int i; int stat1, stat2; int ret;
  3390. #endif
  3391. int size;
  3392. volatile int v;
  3393. if ((pi == NULL) || (po == NULL))
  3394. return BAD_FUNC_ARG; /*wrong pointer*/
  3395. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3396. if (sz % AES_BLOCK_SIZE) {
  3397. return BAD_LENGTH_E;
  3398. }
  3399. #endif
  3400. wc_LockMutex(&Mutex_AesSEC);
  3401. /* Set descriptor for SEC */
  3402. secDesc->length1 = 0x0;
  3403. secDesc->pointer1 = NULL;
  3404. secDesc->length2 = AES_BLOCK_SIZE;
  3405. secDesc->pointer2 = (byte *)secReg; /* Initial Vector */
  3406. switch(aes->rounds) {
  3407. case 10: secDesc->length3 = 16; break;
  3408. case 12: secDesc->length3 = 24; break;
  3409. case 14: secDesc->length3 = 32; break;
  3410. }
  3411. XMEMCPY(secKey, aes->key, secDesc->length3);
  3412. secDesc->pointer3 = (byte *)secKey;
  3413. secDesc->pointer4 = AESBuffIn;
  3414. secDesc->pointer5 = AESBuffOut;
  3415. secDesc->length6 = 0x0;
  3416. secDesc->pointer6 = NULL;
  3417. secDesc->length7 = 0x0;
  3418. secDesc->pointer7 = NULL;
  3419. secDesc->nextDescriptorPtr = NULL;
  3420. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3421. size = AES_BUFFER_SIZE;
  3422. #endif
  3423. while (sz) {
  3424. secDesc->header = descHeader;
  3425. XMEMCPY(secReg, aes->reg, AES_BLOCK_SIZE);
  3426. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3427. sz -= AES_BUFFER_SIZE;
  3428. #else
  3429. if (sz < AES_BUFFER_SIZE) {
  3430. size = sz;
  3431. sz = 0;
  3432. } else {
  3433. size = AES_BUFFER_SIZE;
  3434. sz -= AES_BUFFER_SIZE;
  3435. }
  3436. #endif
  3437. secDesc->length4 = size;
  3438. secDesc->length5 = size;
  3439. XMEMCPY(AESBuffIn, pi, size);
  3440. if(descHeader == SEC_DESC_AES_CBC_DECRYPT) {
  3441. XMEMCPY((void*)aes->tmp, (void*)&(pi[size-AES_BLOCK_SIZE]),
  3442. AES_BLOCK_SIZE);
  3443. }
  3444. /* Point SEC to the location of the descriptor */
  3445. MCF_SEC_FR0 = (uint32)secDesc;
  3446. /* Initialize SEC and wait for encryption to complete */
  3447. MCF_SEC_CCCR0 = 0x0000001a;
  3448. /* poll SISR to determine when channel is complete */
  3449. v=0;
  3450. while ((secDesc->header>> 24) != 0xff) v++;
  3451. #ifdef DEBUG_WOLFSSL
  3452. ret = MCF_SEC_SISRH;
  3453. stat1 = MCF_SEC_AESSR;
  3454. stat2 = MCF_SEC_AESISR;
  3455. if (ret & 0xe0000000) {
  3456. db_printf("Aes_Cbc(i=%d):ISRH=%08x, AESSR=%08x, "
  3457. "AESISR=%08x\n", i, ret, stat1, stat2);
  3458. }
  3459. #endif
  3460. XMEMCPY(po, AESBuffOut, size);
  3461. if (descHeader == SEC_DESC_AES_CBC_ENCRYPT) {
  3462. XMEMCPY((void*)aes->reg, (void*)&(po[size-AES_BLOCK_SIZE]),
  3463. AES_BLOCK_SIZE);
  3464. } else {
  3465. XMEMCPY((void*)aes->reg, (void*)aes->tmp, AES_BLOCK_SIZE);
  3466. }
  3467. pi += size;
  3468. po += size;
  3469. }
  3470. wc_UnLockMutex(&Mutex_AesSEC);
  3471. return 0;
  3472. }
  3473. int wc_AesCbcEncrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  3474. {
  3475. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_ENCRYPT));
  3476. }
  3477. #ifdef HAVE_AES_DECRYPT
  3478. int wc_AesCbcDecrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  3479. {
  3480. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_DECRYPT));
  3481. }
  3482. #endif /* HAVE_AES_DECRYPT */
  3483. #elif defined(FREESCALE_LTC)
  3484. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3485. {
  3486. word32 keySize;
  3487. status_t status;
  3488. byte *iv, *enc_key;
  3489. word32 blocks = (sz / AES_BLOCK_SIZE);
  3490. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3491. if (sz % AES_BLOCK_SIZE) {
  3492. return BAD_LENGTH_E;
  3493. }
  3494. #endif
  3495. if (blocks == 0)
  3496. return 0;
  3497. iv = (byte*)aes->reg;
  3498. enc_key = (byte*)aes->key;
  3499. status = wc_AesGetKeySize(aes, &keySize);
  3500. if (status != 0) {
  3501. return status;
  3502. }
  3503. status = wolfSSL_CryptHwMutexLock();
  3504. if (status != 0)
  3505. return status;
  3506. status = LTC_AES_EncryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  3507. iv, enc_key, keySize);
  3508. wolfSSL_CryptHwMutexUnLock();
  3509. /* store iv for next call */
  3510. if (status == kStatus_Success) {
  3511. XMEMCPY(iv, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3512. }
  3513. return (status == kStatus_Success) ? 0 : -1;
  3514. }
  3515. #ifdef HAVE_AES_DECRYPT
  3516. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3517. {
  3518. word32 keySize;
  3519. status_t status;
  3520. byte* iv, *dec_key;
  3521. byte temp_block[AES_BLOCK_SIZE];
  3522. word32 blocks = (sz / AES_BLOCK_SIZE);
  3523. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3524. if (sz % AES_BLOCK_SIZE) {
  3525. return BAD_LENGTH_E;
  3526. }
  3527. #endif
  3528. if (blocks == 0)
  3529. return 0;
  3530. iv = (byte*)aes->reg;
  3531. dec_key = (byte*)aes->key;
  3532. status = wc_AesGetKeySize(aes, &keySize);
  3533. if (status != 0) {
  3534. return status;
  3535. }
  3536. /* get IV for next call */
  3537. XMEMCPY(temp_block, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3538. status = wolfSSL_CryptHwMutexLock();
  3539. if (status != 0)
  3540. return status;
  3541. status = LTC_AES_DecryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  3542. iv, dec_key, keySize, kLTC_EncryptKey);
  3543. wolfSSL_CryptHwMutexUnLock();
  3544. /* store IV for next call */
  3545. if (status == kStatus_Success) {
  3546. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  3547. }
  3548. return (status == kStatus_Success) ? 0 : -1;
  3549. }
  3550. #endif /* HAVE_AES_DECRYPT */
  3551. #elif defined(FREESCALE_MMCAU)
  3552. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3553. {
  3554. int offset = 0;
  3555. byte *iv;
  3556. byte temp_block[AES_BLOCK_SIZE];
  3557. word32 blocks = (sz / AES_BLOCK_SIZE);
  3558. int ret;
  3559. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3560. if (sz % AES_BLOCK_SIZE) {
  3561. return BAD_LENGTH_E;
  3562. }
  3563. #endif
  3564. if (blocks == 0)
  3565. return 0;
  3566. iv = (byte*)aes->reg;
  3567. while (blocks--) {
  3568. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  3569. /* XOR block with IV for CBC */
  3570. xorbuf(temp_block, iv, AES_BLOCK_SIZE);
  3571. ret = wc_AesEncrypt(aes, temp_block, out + offset);
  3572. if (ret != 0)
  3573. return ret;
  3574. offset += AES_BLOCK_SIZE;
  3575. /* store IV for next block */
  3576. XMEMCPY(iv, out + offset - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3577. }
  3578. return 0;
  3579. }
  3580. #ifdef HAVE_AES_DECRYPT
  3581. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3582. {
  3583. int offset = 0;
  3584. byte* iv;
  3585. byte temp_block[AES_BLOCK_SIZE];
  3586. word32 blocks = (sz / AES_BLOCK_SIZE);
  3587. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3588. if (sz % AES_BLOCK_SIZE) {
  3589. return BAD_LENGTH_E;
  3590. }
  3591. #endif
  3592. if (blocks == 0)
  3593. return 0;
  3594. iv = (byte*)aes->reg;
  3595. while (blocks--) {
  3596. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  3597. wc_AesDecrypt(aes, in + offset, out + offset);
  3598. /* XOR block with IV for CBC */
  3599. xorbuf(out + offset, iv, AES_BLOCK_SIZE);
  3600. /* store IV for next block */
  3601. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  3602. offset += AES_BLOCK_SIZE;
  3603. }
  3604. return 0;
  3605. }
  3606. #endif /* HAVE_AES_DECRYPT */
  3607. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  3608. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3609. {
  3610. int ret;
  3611. if (sz == 0)
  3612. return 0;
  3613. /* hardware fails on input that is not a multiple of AES block size */
  3614. if (sz % AES_BLOCK_SIZE != 0) {
  3615. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3616. return BAD_LENGTH_E;
  3617. #else
  3618. return BAD_FUNC_ARG;
  3619. #endif
  3620. }
  3621. ret = wc_Pic32AesCrypt(
  3622. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  3623. out, in, sz, PIC32_ENCRYPTION,
  3624. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  3625. /* store iv for next call */
  3626. if (ret == 0) {
  3627. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3628. }
  3629. return ret;
  3630. }
  3631. #ifdef HAVE_AES_DECRYPT
  3632. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3633. {
  3634. int ret;
  3635. byte scratch[AES_BLOCK_SIZE];
  3636. if (sz == 0)
  3637. return 0;
  3638. /* hardware fails on input that is not a multiple of AES block size */
  3639. if (sz % AES_BLOCK_SIZE != 0) {
  3640. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3641. return BAD_LENGTH_E;
  3642. #else
  3643. return BAD_FUNC_ARG;
  3644. #endif
  3645. }
  3646. XMEMCPY(scratch, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3647. ret = wc_Pic32AesCrypt(
  3648. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  3649. out, in, sz, PIC32_DECRYPTION,
  3650. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  3651. /* store iv for next call */
  3652. if (ret == 0) {
  3653. XMEMCPY((byte*)aes->reg, scratch, AES_BLOCK_SIZE);
  3654. }
  3655. return ret;
  3656. }
  3657. #endif /* HAVE_AES_DECRYPT */
  3658. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  3659. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  3660. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3661. {
  3662. return wc_esp32AesCbcEncrypt(aes, out, in, sz);
  3663. }
  3664. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3665. {
  3666. return wc_esp32AesCbcDecrypt(aes, out, in, sz);
  3667. }
  3668. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  3669. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3670. {
  3671. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  3672. }
  3673. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3674. {
  3675. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  3676. }
  3677. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  3678. !defined(WOLFSSL_QNX_CAAM)
  3679. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  3680. #elif defined(WOLFSSL_AFALG)
  3681. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  3682. #elif defined(WOLFSSL_KCAPI_AES) && !defined(WOLFSSL_NO_KCAPI_AES_CBC)
  3683. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  3684. #elif defined(WOLFSSL_DEVCRYPTO_CBC)
  3685. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  3686. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  3687. /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
  3688. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  3689. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  3690. #else
  3691. /* Software AES - CBC Encrypt */
  3692. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3693. {
  3694. word32 blocks;
  3695. if (aes == NULL || out == NULL || in == NULL) {
  3696. return BAD_FUNC_ARG;
  3697. }
  3698. if (sz == 0) {
  3699. return 0;
  3700. }
  3701. blocks = sz / AES_BLOCK_SIZE;
  3702. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3703. if (sz % AES_BLOCK_SIZE) {
  3704. WOLFSSL_ERROR_VERBOSE(BAD_LENGTH_E);
  3705. return BAD_LENGTH_E;
  3706. }
  3707. #endif
  3708. #ifdef WOLFSSL_IMXRT_DCP
  3709. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  3710. if (aes->keylen == 16)
  3711. return DCPAesCbcEncrypt(aes, out, in, sz);
  3712. #endif
  3713. #ifdef WOLF_CRYPTO_CB
  3714. #ifndef WOLF_CRYPTO_CB_FIND
  3715. if (aes->devId != INVALID_DEVID)
  3716. #endif
  3717. {
  3718. int crypto_cb_ret = wc_CryptoCb_AesCbcEncrypt(aes, out, in, sz);
  3719. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  3720. return crypto_cb_ret;
  3721. /* fall-through when unavailable */
  3722. }
  3723. #endif
  3724. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  3725. /* if async and byte count above threshold */
  3726. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  3727. sz >= WC_ASYNC_THRESH_AES_CBC) {
  3728. #if defined(HAVE_CAVIUM)
  3729. return NitroxAesCbcEncrypt(aes, out, in, sz);
  3730. #elif defined(HAVE_INTEL_QA)
  3731. return IntelQaSymAesCbcEncrypt(&aes->asyncDev, out, in, sz,
  3732. (const byte*)aes->devKey, aes->keylen,
  3733. (byte*)aes->reg, AES_BLOCK_SIZE);
  3734. #else /* WOLFSSL_ASYNC_CRYPT_SW */
  3735. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_CBC_ENCRYPT)) {
  3736. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  3737. sw->aes.aes = aes;
  3738. sw->aes.out = out;
  3739. sw->aes.in = in;
  3740. sw->aes.sz = sz;
  3741. return WC_PENDING_E;
  3742. }
  3743. #endif
  3744. }
  3745. #endif /* WOLFSSL_ASYNC_CRYPT */
  3746. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  3747. /* Implemented in wolfcrypt/src/port/nxp/se050_port.c */
  3748. if (aes->useSWCrypt == 0) {
  3749. return se050_aes_crypt(aes, in, out, sz, AES_ENCRYPTION,
  3750. kAlgorithm_SSS_AES_CBC);
  3751. }
  3752. #endif
  3753. #ifdef WOLFSSL_AESNI
  3754. if (haveAESNI) {
  3755. #ifdef DEBUG_AESNI
  3756. printf("about to aes cbc encrypt\n");
  3757. printf("in = %p\n", in);
  3758. printf("out = %p\n", out);
  3759. printf("aes->key = %p\n", aes->key);
  3760. printf("aes->reg = %p\n", aes->reg);
  3761. printf("aes->rounds = %d\n", aes->rounds);
  3762. printf("sz = %d\n", sz);
  3763. #endif
  3764. /* check alignment, decrypt doesn't need alignment */
  3765. if ((wc_ptr_t)in % AESNI_ALIGN) {
  3766. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  3767. byte* tmp = (byte*)XMALLOC(sz + AES_BLOCK_SIZE + AESNI_ALIGN,
  3768. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3769. byte* tmp_align;
  3770. if (tmp == NULL) return MEMORY_E;
  3771. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  3772. XMEMCPY(tmp_align, in, sz);
  3773. SAVE_VECTOR_REGISTERS(XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); return _svr_ret;);
  3774. AES_CBC_encrypt(tmp_align, tmp_align, (byte*)aes->reg, sz,
  3775. (byte*)aes->key, (int)aes->rounds);
  3776. RESTORE_VECTOR_REGISTERS();
  3777. /* store iv for next call */
  3778. XMEMCPY(aes->reg, tmp_align + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3779. XMEMCPY(out, tmp_align, sz);
  3780. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3781. return 0;
  3782. #else
  3783. WOLFSSL_MSG("AES-CBC encrypt with bad alignment");
  3784. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  3785. return BAD_ALIGN_E;
  3786. #endif
  3787. }
  3788. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3789. AES_CBC_encrypt(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3790. (int)aes->rounds);
  3791. RESTORE_VECTOR_REGISTERS();
  3792. /* store iv for next call */
  3793. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3794. return 0;
  3795. }
  3796. #endif
  3797. while (blocks--) {
  3798. int ret;
  3799. xorbuf((byte*)aes->reg, in, AES_BLOCK_SIZE);
  3800. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->reg);
  3801. if (ret != 0)
  3802. return ret;
  3803. XMEMCPY(out, aes->reg, AES_BLOCK_SIZE);
  3804. out += AES_BLOCK_SIZE;
  3805. in += AES_BLOCK_SIZE;
  3806. }
  3807. return 0;
  3808. }
  3809. #ifdef HAVE_AES_DECRYPT
  3810. /* Software AES - CBC Decrypt */
  3811. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3812. {
  3813. word32 blocks;
  3814. if (aes == NULL || out == NULL || in == NULL) {
  3815. return BAD_FUNC_ARG;
  3816. }
  3817. if (sz == 0) {
  3818. return 0;
  3819. }
  3820. blocks = sz / AES_BLOCK_SIZE;
  3821. if (sz % AES_BLOCK_SIZE) {
  3822. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3823. return BAD_LENGTH_E;
  3824. #else
  3825. return BAD_FUNC_ARG;
  3826. #endif
  3827. }
  3828. #ifdef WOLFSSL_IMXRT_DCP
  3829. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  3830. if (aes->keylen == 16)
  3831. return DCPAesCbcDecrypt(aes, out, in, sz);
  3832. #endif
  3833. #ifdef WOLF_CRYPTO_CB
  3834. #ifndef WOLF_CRYPTO_CB_FIND
  3835. if (aes->devId != INVALID_DEVID)
  3836. #endif
  3837. {
  3838. int crypto_cb_ret = wc_CryptoCb_AesCbcDecrypt(aes, out, in, sz);
  3839. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  3840. return crypto_cb_ret;
  3841. /* fall-through when unavailable */
  3842. }
  3843. #endif
  3844. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  3845. /* if async and byte count above threshold */
  3846. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  3847. sz >= WC_ASYNC_THRESH_AES_CBC) {
  3848. #if defined(HAVE_CAVIUM)
  3849. return NitroxAesCbcDecrypt(aes, out, in, sz);
  3850. #elif defined(HAVE_INTEL_QA)
  3851. return IntelQaSymAesCbcDecrypt(&aes->asyncDev, out, in, sz,
  3852. (const byte*)aes->devKey, aes->keylen,
  3853. (byte*)aes->reg, AES_BLOCK_SIZE);
  3854. #else /* WOLFSSL_ASYNC_CRYPT_SW */
  3855. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_CBC_DECRYPT)) {
  3856. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  3857. sw->aes.aes = aes;
  3858. sw->aes.out = out;
  3859. sw->aes.in = in;
  3860. sw->aes.sz = sz;
  3861. return WC_PENDING_E;
  3862. }
  3863. #endif
  3864. }
  3865. #endif
  3866. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  3867. /* Implemented in wolfcrypt/src/port/nxp/se050_port.c */
  3868. if (aes->useSWCrypt == 0) {
  3869. return se050_aes_crypt(aes, in, out, sz, AES_DECRYPTION,
  3870. kAlgorithm_SSS_AES_CBC);
  3871. }
  3872. #endif
  3873. #ifdef WOLFSSL_AESNI
  3874. if (haveAESNI) {
  3875. #ifdef DEBUG_AESNI
  3876. printf("about to aes cbc decrypt\n");
  3877. printf("in = %p\n", in);
  3878. printf("out = %p\n", out);
  3879. printf("aes->key = %p\n", aes->key);
  3880. printf("aes->reg = %p\n", aes->reg);
  3881. printf("aes->rounds = %d\n", aes->rounds);
  3882. printf("sz = %d\n", sz);
  3883. #endif
  3884. /* if input and output same will overwrite input iv */
  3885. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3886. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3887. #if defined(WOLFSSL_AESNI_BY4) || defined(WOLFSSL_X86_BUILD)
  3888. AES_CBC_decrypt_by4(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3889. aes->rounds);
  3890. #elif defined(WOLFSSL_AESNI_BY6)
  3891. AES_CBC_decrypt_by6(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3892. aes->rounds);
  3893. #else /* WOLFSSL_AESNI_BYx */
  3894. AES_CBC_decrypt_by8(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3895. (int)aes->rounds);
  3896. #endif /* WOLFSSL_AESNI_BYx */
  3897. /* store iv for next call */
  3898. RESTORE_VECTOR_REGISTERS();
  3899. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3900. return 0;
  3901. }
  3902. #endif
  3903. while (blocks--) {
  3904. int ret;
  3905. XMEMCPY(aes->tmp, in, AES_BLOCK_SIZE);
  3906. ret = wc_AesDecrypt(aes, (byte*)aes->tmp, out);
  3907. if (ret != 0)
  3908. return ret;
  3909. xorbuf(out, (byte*)aes->reg, AES_BLOCK_SIZE);
  3910. /* store iv for next call */
  3911. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3912. out += AES_BLOCK_SIZE;
  3913. in += AES_BLOCK_SIZE;
  3914. }
  3915. return 0;
  3916. }
  3917. #endif /* HAVE_AES_DECRYPT */
  3918. #endif /* AES-CBC block */
  3919. #endif /* HAVE_AES_CBC */
  3920. /* AES-CTR */
  3921. #if defined(WOLFSSL_AES_COUNTER)
  3922. #ifdef STM32_CRYPTO
  3923. #define NEED_AES_CTR_SOFT
  3924. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  3925. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  3926. {
  3927. int ret = 0;
  3928. #ifdef WOLFSSL_STM32_CUBEMX
  3929. CRYP_HandleTypeDef hcryp;
  3930. #ifdef STM32_HAL_V2
  3931. word32 iv[AES_BLOCK_SIZE/sizeof(word32)];
  3932. #endif
  3933. #else
  3934. word32 *iv;
  3935. CRYP_InitTypeDef cryptInit;
  3936. CRYP_KeyInitTypeDef keyInit;
  3937. CRYP_IVInitTypeDef ivInit;
  3938. #endif
  3939. #ifdef WOLFSSL_STM32_CUBEMX
  3940. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3941. if (ret != 0) {
  3942. return ret;
  3943. }
  3944. ret = wolfSSL_CryptHwMutexLock();
  3945. if (ret != 0) {
  3946. return ret;
  3947. }
  3948. #if defined(STM32_HAL_V2)
  3949. hcryp.Init.Algorithm = CRYP_AES_CTR;
  3950. ByteReverseWords(iv, aes->reg, AES_BLOCK_SIZE);
  3951. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)iv;
  3952. #elif defined(STM32_CRYPTO_AES_ONLY)
  3953. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  3954. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CTR;
  3955. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3956. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3957. #else
  3958. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3959. #endif
  3960. HAL_CRYP_Init(&hcryp);
  3961. #if defined(STM32_HAL_V2)
  3962. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, AES_BLOCK_SIZE,
  3963. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3964. #elif defined(STM32_CRYPTO_AES_ONLY)
  3965. ret = HAL_CRYPEx_AES(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  3966. out, STM32_HAL_TIMEOUT);
  3967. #else
  3968. ret = HAL_CRYP_AESCTR_Encrypt(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  3969. out, STM32_HAL_TIMEOUT);
  3970. #endif
  3971. if (ret != HAL_OK) {
  3972. ret = WC_TIMEOUT_E;
  3973. }
  3974. HAL_CRYP_DeInit(&hcryp);
  3975. #else /* Standard Peripheral Library */
  3976. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3977. if (ret != 0) {
  3978. return ret;
  3979. }
  3980. ret = wolfSSL_CryptHwMutexLock();
  3981. if (ret != 0) {
  3982. return ret;
  3983. }
  3984. /* reset registers to their default values */
  3985. CRYP_DeInit();
  3986. /* set key */
  3987. CRYP_KeyInit(&keyInit);
  3988. /* set iv */
  3989. iv = aes->reg;
  3990. CRYP_IVStructInit(&ivInit);
  3991. ivInit.CRYP_IV0Left = ByteReverseWord32(iv[0]);
  3992. ivInit.CRYP_IV0Right = ByteReverseWord32(iv[1]);
  3993. ivInit.CRYP_IV1Left = ByteReverseWord32(iv[2]);
  3994. ivInit.CRYP_IV1Right = ByteReverseWord32(iv[3]);
  3995. CRYP_IVInit(&ivInit);
  3996. /* set direction and mode */
  3997. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  3998. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CTR;
  3999. CRYP_Init(&cryptInit);
  4000. /* enable crypto processor */
  4001. CRYP_Cmd(ENABLE);
  4002. /* flush IN/OUT FIFOs */
  4003. CRYP_FIFOFlush();
  4004. CRYP_DataIn(*(uint32_t*)&in[0]);
  4005. CRYP_DataIn(*(uint32_t*)&in[4]);
  4006. CRYP_DataIn(*(uint32_t*)&in[8]);
  4007. CRYP_DataIn(*(uint32_t*)&in[12]);
  4008. /* wait until the complete message has been processed */
  4009. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  4010. *(uint32_t*)&out[0] = CRYP_DataOut();
  4011. *(uint32_t*)&out[4] = CRYP_DataOut();
  4012. *(uint32_t*)&out[8] = CRYP_DataOut();
  4013. *(uint32_t*)&out[12] = CRYP_DataOut();
  4014. /* disable crypto processor */
  4015. CRYP_Cmd(DISABLE);
  4016. #endif /* WOLFSSL_STM32_CUBEMX */
  4017. wolfSSL_CryptHwMutexUnLock();
  4018. wc_Stm32_Aes_Cleanup();
  4019. return ret;
  4020. }
  4021. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  4022. #define NEED_AES_CTR_SOFT
  4023. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  4024. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  4025. {
  4026. word32 tmpIv[AES_BLOCK_SIZE / sizeof(word32)];
  4027. XMEMCPY(tmpIv, aes->reg, AES_BLOCK_SIZE);
  4028. return wc_Pic32AesCrypt(
  4029. aes->key, aes->keylen, tmpIv, AES_BLOCK_SIZE,
  4030. out, in, AES_BLOCK_SIZE,
  4031. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCTR);
  4032. }
  4033. #elif defined(HAVE_COLDFIRE_SEC)
  4034. #error "Coldfire SEC doesn't currently support AES-CTR mode"
  4035. #elif defined(FREESCALE_LTC)
  4036. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4037. {
  4038. int ret = 0;
  4039. word32 keySize;
  4040. byte *iv, *enc_key;
  4041. byte* tmp;
  4042. if (aes == NULL || out == NULL || in == NULL) {
  4043. return BAD_FUNC_ARG;
  4044. }
  4045. /* consume any unused bytes left in aes->tmp */
  4046. tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left;
  4047. while (aes->left && sz) {
  4048. *(out++) = *(in++) ^ *(tmp++);
  4049. aes->left--;
  4050. sz--;
  4051. }
  4052. if (sz) {
  4053. iv = (byte*)aes->reg;
  4054. enc_key = (byte*)aes->key;
  4055. ret = wc_AesGetKeySize(aes, &keySize);
  4056. if (ret != 0)
  4057. return ret;
  4058. ret = wolfSSL_CryptHwMutexLock();
  4059. if (ret != 0)
  4060. return ret;
  4061. LTC_AES_CryptCtr(LTC_BASE, in, out, sz,
  4062. iv, enc_key, keySize, (byte*)aes->tmp,
  4063. (uint32_t*)&aes->left);
  4064. wolfSSL_CryptHwMutexUnLock();
  4065. }
  4066. return ret;
  4067. }
  4068. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  4069. !defined(WOLFSSL_QNX_CAAM)
  4070. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  4071. #elif defined(WOLFSSL_AFALG)
  4072. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  4073. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  4074. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  4075. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  4076. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  4077. /* esp32 doesn't support CRT mode by hw. */
  4078. /* use aes ecnryption plus sw implementation */
  4079. #define NEED_AES_CTR_SOFT
  4080. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  4081. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  4082. #else
  4083. /* Use software based AES counter */
  4084. #define NEED_AES_CTR_SOFT
  4085. #endif
  4086. #ifdef NEED_AES_CTR_SOFT
  4087. /* Increment AES counter */
  4088. static WC_INLINE void IncrementAesCounter(byte* inOutCtr)
  4089. {
  4090. /* in network byte order so start at end and work back */
  4091. int i;
  4092. for (i = AES_BLOCK_SIZE - 1; i >= 0; i--) {
  4093. if (++inOutCtr[i]) /* we're done unless we overflow */
  4094. return;
  4095. }
  4096. }
  4097. /* Software AES - CTR Encrypt */
  4098. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4099. {
  4100. byte scratch[AES_BLOCK_SIZE];
  4101. int ret;
  4102. word32 processed;
  4103. if (aes == NULL || out == NULL || in == NULL) {
  4104. return BAD_FUNC_ARG;
  4105. }
  4106. #ifdef WOLF_CRYPTO_CB
  4107. #ifndef WOLF_CRYPTO_CB_FIND
  4108. if (aes->devId != INVALID_DEVID)
  4109. #endif
  4110. {
  4111. int crypto_cb_ret = wc_CryptoCb_AesCtrEncrypt(aes, out, in, sz);
  4112. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  4113. return crypto_cb_ret;
  4114. /* fall-through when unavailable */
  4115. }
  4116. #endif
  4117. /* consume any unused bytes left in aes->tmp */
  4118. processed = min(aes->left, sz);
  4119. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left,
  4120. processed);
  4121. out += processed;
  4122. in += processed;
  4123. aes->left -= processed;
  4124. sz -= processed;
  4125. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  4126. !defined(XTRANSFORM_AESCTRBLOCK)
  4127. if (in != out && sz >= AES_BLOCK_SIZE) {
  4128. word32 blocks = sz / AES_BLOCK_SIZE;
  4129. byte* counter = (byte*)aes->reg;
  4130. byte* c = out;
  4131. while (blocks--) {
  4132. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  4133. c += AES_BLOCK_SIZE;
  4134. IncrementAesCounter(counter);
  4135. }
  4136. /* reset number of blocks and then do encryption */
  4137. blocks = sz / AES_BLOCK_SIZE;
  4138. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  4139. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  4140. in += AES_BLOCK_SIZE * blocks;
  4141. out += AES_BLOCK_SIZE * blocks;
  4142. sz -= blocks * AES_BLOCK_SIZE;
  4143. }
  4144. else
  4145. #endif
  4146. {
  4147. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4148. wc_MemZero_Add("wc_AesCtrEncrypt scratch", scratch,
  4149. AES_BLOCK_SIZE);
  4150. #endif
  4151. /* do as many block size ops as possible */
  4152. while (sz >= AES_BLOCK_SIZE) {
  4153. #ifdef XTRANSFORM_AESCTRBLOCK
  4154. XTRANSFORM_AESCTRBLOCK(aes, out, in);
  4155. #else
  4156. ret = wc_AesEncrypt(aes, (byte*)aes->reg, scratch);
  4157. if (ret != 0) {
  4158. ForceZero(scratch, AES_BLOCK_SIZE);
  4159. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4160. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  4161. #endif
  4162. return ret;
  4163. }
  4164. xorbuf(scratch, in, AES_BLOCK_SIZE);
  4165. XMEMCPY(out, scratch, AES_BLOCK_SIZE);
  4166. #endif
  4167. IncrementAesCounter((byte*)aes->reg);
  4168. out += AES_BLOCK_SIZE;
  4169. in += AES_BLOCK_SIZE;
  4170. sz -= AES_BLOCK_SIZE;
  4171. aes->left = 0;
  4172. }
  4173. ForceZero(scratch, AES_BLOCK_SIZE);
  4174. }
  4175. /* handle non block size remaining and store unused byte count in left */
  4176. if (sz) {
  4177. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->tmp);
  4178. if (ret != 0) {
  4179. ForceZero(scratch, AES_BLOCK_SIZE);
  4180. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4181. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  4182. #endif
  4183. return ret;
  4184. }
  4185. IncrementAesCounter((byte*)aes->reg);
  4186. aes->left = AES_BLOCK_SIZE - sz;
  4187. xorbufout(out, in, aes->tmp, sz);
  4188. }
  4189. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4190. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  4191. #endif
  4192. return 0;
  4193. }
  4194. #endif /* NEED_AES_CTR_SOFT */
  4195. #endif /* WOLFSSL_AES_COUNTER */
  4196. #endif /* !WOLFSSL_ARMASM */
  4197. /*
  4198. * The IV for AES GCM and CCM, stored in struct Aes's member reg, is comprised
  4199. * of two parts in order:
  4200. * 1. The fixed field which may be 0 or 4 bytes long. In TLS, this is set
  4201. * to the implicit IV.
  4202. * 2. The explicit IV is generated by wolfCrypt. It needs to be managed
  4203. * by wolfCrypt to ensure the IV is unique for each call to encrypt.
  4204. * The IV may be a 96-bit random value, or the 32-bit fixed value and a
  4205. * 64-bit set of 0 or random data. The final 32-bits of reg is used as a
  4206. * block counter during the encryption.
  4207. */
  4208. #if (defined(HAVE_AESGCM) && !defined(WC_NO_RNG)) || defined(HAVE_AESCCM)
  4209. static WC_INLINE void IncCtr(byte* ctr, word32 ctrSz)
  4210. {
  4211. int i;
  4212. for (i = (int)ctrSz - 1; i >= 0; i--) {
  4213. if (++ctr[i])
  4214. break;
  4215. }
  4216. }
  4217. #endif /* HAVE_AESGCM || HAVE_AESCCM */
  4218. #ifdef HAVE_AESGCM
  4219. #ifdef WOLFSSL_AESGCM_STREAM
  4220. /* Access initialization counter data. */
  4221. #define AES_INITCTR(aes) ((aes)->streamData + 0 * AES_BLOCK_SIZE)
  4222. /* Access counter data. */
  4223. #define AES_COUNTER(aes) ((aes)->streamData + 1 * AES_BLOCK_SIZE)
  4224. /* Access tag data. */
  4225. #define AES_TAG(aes) ((aes)->streamData + 2 * AES_BLOCK_SIZE)
  4226. /* Access last GHASH block. */
  4227. #define AES_LASTGBLOCK(aes) ((aes)->streamData + 3 * AES_BLOCK_SIZE)
  4228. /* Access last encrypted block. */
  4229. #define AES_LASTBLOCK(aes) ((aes)->streamData + 4 * AES_BLOCK_SIZE)
  4230. #endif
  4231. #if defined(HAVE_COLDFIRE_SEC)
  4232. #error "Coldfire SEC doesn't currently support AES-GCM mode"
  4233. #endif
  4234. #ifdef WOLFSSL_ARMASM
  4235. /* implementation is located in wolfcrypt/src/port/arm/armv8-aes.c */
  4236. #elif defined(WOLFSSL_AFALG)
  4237. /* implemented in wolfcrypt/src/port/afalg/afalg_aes.c */
  4238. #elif defined(WOLFSSL_KCAPI_AES)
  4239. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  4240. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  4241. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  4242. #else /* software + AESNI implementation */
  4243. #if !defined(FREESCALE_LTC_AES_GCM)
  4244. static WC_INLINE void IncrementGcmCounter(byte* inOutCtr)
  4245. {
  4246. int i;
  4247. /* in network byte order so start at end and work back */
  4248. for (i = AES_BLOCK_SIZE - 1; i >= AES_BLOCK_SIZE - CTR_SZ; i--) {
  4249. if (++inOutCtr[i]) /* we're done unless we overflow */
  4250. return;
  4251. }
  4252. }
  4253. #endif /* !FREESCALE_LTC_AES_GCM */
  4254. #if defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  4255. static WC_INLINE void FlattenSzInBits(byte* buf, word32 sz)
  4256. {
  4257. /* Multiply the sz by 8 */
  4258. word32 szHi = (sz >> (8*sizeof(sz) - 3));
  4259. sz <<= 3;
  4260. /* copy over the words of the sz into the destination buffer */
  4261. buf[0] = (byte)(szHi >> 24);
  4262. buf[1] = (byte)(szHi >> 16);
  4263. buf[2] = (byte)(szHi >> 8);
  4264. buf[3] = (byte)szHi;
  4265. buf[4] = (byte)(sz >> 24);
  4266. buf[5] = (byte)(sz >> 16);
  4267. buf[6] = (byte)(sz >> 8);
  4268. buf[7] = (byte)sz;
  4269. }
  4270. static WC_INLINE void RIGHTSHIFTX(byte* x)
  4271. {
  4272. int i;
  4273. int carryIn = 0;
  4274. byte borrow = (0x00 - (x[15] & 0x01)) & 0xE1;
  4275. for (i = 0; i < AES_BLOCK_SIZE; i++) {
  4276. int carryOut = (x[i] & 0x01) << 7;
  4277. x[i] = (byte) ((x[i] >> 1) | carryIn);
  4278. carryIn = carryOut;
  4279. }
  4280. x[0] ^= borrow;
  4281. }
  4282. #endif /* defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT) */
  4283. #ifdef GCM_TABLE
  4284. void GenerateM0(Gcm* gcm)
  4285. {
  4286. int i, j;
  4287. byte (*m)[AES_BLOCK_SIZE] = gcm->M0;
  4288. XMEMCPY(m[128], gcm->H, AES_BLOCK_SIZE);
  4289. for (i = 64; i > 0; i /= 2) {
  4290. XMEMCPY(m[i], m[i*2], AES_BLOCK_SIZE);
  4291. RIGHTSHIFTX(m[i]);
  4292. }
  4293. for (i = 2; i < 256; i *= 2) {
  4294. for (j = 1; j < i; j++) {
  4295. XMEMCPY(m[i+j], m[i], AES_BLOCK_SIZE);
  4296. xorbuf(m[i+j], m[j], AES_BLOCK_SIZE);
  4297. }
  4298. }
  4299. XMEMSET(m[0], 0, AES_BLOCK_SIZE);
  4300. }
  4301. #elif defined(GCM_TABLE_4BIT)
  4302. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4303. static WC_INLINE void Shift4_M0(byte *r8, byte *z8)
  4304. {
  4305. int i;
  4306. for (i = 15; i > 0; i--)
  4307. r8[i] = (byte)(z8[i-1] << 4) | (byte)(z8[i] >> 4);
  4308. r8[0] = (byte)(z8[0] >> 4);
  4309. }
  4310. #endif
  4311. void GenerateM0(Gcm* gcm)
  4312. {
  4313. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4314. int i;
  4315. #endif
  4316. byte (*m)[AES_BLOCK_SIZE] = gcm->M0;
  4317. /* 0 times -> 0x0 */
  4318. XMEMSET(m[0x0], 0, AES_BLOCK_SIZE);
  4319. /* 1 times -> 0x8 */
  4320. XMEMCPY(m[0x8], gcm->H, AES_BLOCK_SIZE);
  4321. /* 2 times -> 0x4 */
  4322. XMEMCPY(m[0x4], m[0x8], AES_BLOCK_SIZE);
  4323. RIGHTSHIFTX(m[0x4]);
  4324. /* 4 times -> 0x2 */
  4325. XMEMCPY(m[0x2], m[0x4], AES_BLOCK_SIZE);
  4326. RIGHTSHIFTX(m[0x2]);
  4327. /* 8 times -> 0x1 */
  4328. XMEMCPY(m[0x1], m[0x2], AES_BLOCK_SIZE);
  4329. RIGHTSHIFTX(m[0x1]);
  4330. /* 0x3 */
  4331. XMEMCPY(m[0x3], m[0x2], AES_BLOCK_SIZE);
  4332. xorbuf (m[0x3], m[0x1], AES_BLOCK_SIZE);
  4333. /* 0x5 -> 0x7 */
  4334. XMEMCPY(m[0x5], m[0x4], AES_BLOCK_SIZE);
  4335. xorbuf (m[0x5], m[0x1], AES_BLOCK_SIZE);
  4336. XMEMCPY(m[0x6], m[0x4], AES_BLOCK_SIZE);
  4337. xorbuf (m[0x6], m[0x2], AES_BLOCK_SIZE);
  4338. XMEMCPY(m[0x7], m[0x4], AES_BLOCK_SIZE);
  4339. xorbuf (m[0x7], m[0x3], AES_BLOCK_SIZE);
  4340. /* 0x9 -> 0xf */
  4341. XMEMCPY(m[0x9], m[0x8], AES_BLOCK_SIZE);
  4342. xorbuf (m[0x9], m[0x1], AES_BLOCK_SIZE);
  4343. XMEMCPY(m[0xa], m[0x8], AES_BLOCK_SIZE);
  4344. xorbuf (m[0xa], m[0x2], AES_BLOCK_SIZE);
  4345. XMEMCPY(m[0xb], m[0x8], AES_BLOCK_SIZE);
  4346. xorbuf (m[0xb], m[0x3], AES_BLOCK_SIZE);
  4347. XMEMCPY(m[0xc], m[0x8], AES_BLOCK_SIZE);
  4348. xorbuf (m[0xc], m[0x4], AES_BLOCK_SIZE);
  4349. XMEMCPY(m[0xd], m[0x8], AES_BLOCK_SIZE);
  4350. xorbuf (m[0xd], m[0x5], AES_BLOCK_SIZE);
  4351. XMEMCPY(m[0xe], m[0x8], AES_BLOCK_SIZE);
  4352. xorbuf (m[0xe], m[0x6], AES_BLOCK_SIZE);
  4353. XMEMCPY(m[0xf], m[0x8], AES_BLOCK_SIZE);
  4354. xorbuf (m[0xf], m[0x7], AES_BLOCK_SIZE);
  4355. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4356. for (i = 0; i < 16; i++) {
  4357. Shift4_M0(m[16+i], m[i]);
  4358. }
  4359. #endif
  4360. }
  4361. #endif /* GCM_TABLE */
  4362. /* Software AES - GCM SetKey */
  4363. int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len)
  4364. {
  4365. int ret;
  4366. byte iv[AES_BLOCK_SIZE];
  4367. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4368. byte local[32];
  4369. word32 localSz = 32;
  4370. if (len == (16 + WC_CAAM_BLOB_SZ) ||
  4371. len == (24 + WC_CAAM_BLOB_SZ) ||
  4372. len == (32 + WC_CAAM_BLOB_SZ)) {
  4373. if (wc_caamOpenBlob((byte*)key, len, local, &localSz) != 0) {
  4374. return BAD_FUNC_ARG;
  4375. }
  4376. /* set local values */
  4377. key = local;
  4378. len = localSz;
  4379. }
  4380. #endif
  4381. if (!((len == 16) || (len == 24) || (len == 32)))
  4382. return BAD_FUNC_ARG;
  4383. if (aes == NULL) {
  4384. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4385. ForceZero(local, sizeof(local));
  4386. #endif
  4387. return BAD_FUNC_ARG;
  4388. }
  4389. #ifdef OPENSSL_EXTRA
  4390. XMEMSET(aes->gcm.aadH, 0, sizeof(aes->gcm.aadH));
  4391. aes->gcm.aadLen = 0;
  4392. #endif
  4393. XMEMSET(iv, 0, AES_BLOCK_SIZE);
  4394. ret = wc_AesSetKey(aes, key, len, iv, AES_ENCRYPTION);
  4395. #ifdef WOLFSSL_AESGCM_STREAM
  4396. aes->gcmKeySet = 1;
  4397. #endif
  4398. #ifdef WOLFSSL_AESNI
  4399. /* AES-NI code generates its own H value. */
  4400. if (haveAESNI)
  4401. return ret;
  4402. #endif /* WOLFSSL_AESNI */
  4403. #if defined(WOLFSSL_SECO_CAAM)
  4404. if (aes->devId == WOLFSSL_SECO_DEVID) {
  4405. return ret;
  4406. }
  4407. #endif /* WOLFSSL_SECO_CAAM */
  4408. #if !defined(FREESCALE_LTC_AES_GCM)
  4409. if (ret == 0)
  4410. ret = wc_AesEncrypt(aes, iv, aes->gcm.H);
  4411. if (ret == 0) {
  4412. #if defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  4413. GenerateM0(&aes->gcm);
  4414. #endif /* GCM_TABLE */
  4415. }
  4416. #endif /* FREESCALE_LTC_AES_GCM */
  4417. #if defined(WOLFSSL_XILINX_CRYPT) || defined(WOLFSSL_AFALG_XILINX_AES)
  4418. wc_AesGcmSetKey_ex(aes, key, len, WOLFSSL_XILINX_AES_KEY_SRC);
  4419. #endif
  4420. #ifdef WOLF_CRYPTO_CB
  4421. if (aes->devId != INVALID_DEVID) {
  4422. XMEMCPY(aes->devKey, key, len);
  4423. }
  4424. #endif
  4425. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4426. ForceZero(local, sizeof(local));
  4427. #endif
  4428. return ret;
  4429. }
  4430. #ifdef WOLFSSL_AESNI
  4431. #if defined(USE_INTEL_SPEEDUP)
  4432. #define HAVE_INTEL_AVX1
  4433. #define HAVE_INTEL_AVX2
  4434. #endif /* USE_INTEL_SPEEDUP */
  4435. void AES_GCM_encrypt(const unsigned char *in, unsigned char *out,
  4436. const unsigned char* addt, const unsigned char* ivec,
  4437. unsigned char *tag, word32 nbytes,
  4438. word32 abytes, word32 ibytes,
  4439. word32 tbytes, const unsigned char* key, int nr)
  4440. XASM_LINK("AES_GCM_encrypt");
  4441. #ifdef HAVE_INTEL_AVX1
  4442. void AES_GCM_encrypt_avx1(const unsigned char *in, unsigned char *out,
  4443. const unsigned char* addt, const unsigned char* ivec,
  4444. unsigned char *tag, word32 nbytes,
  4445. word32 abytes, word32 ibytes,
  4446. word32 tbytes, const unsigned char* key,
  4447. int nr)
  4448. XASM_LINK("AES_GCM_encrypt_avx1");
  4449. #ifdef HAVE_INTEL_AVX2
  4450. void AES_GCM_encrypt_avx2(const unsigned char *in, unsigned char *out,
  4451. const unsigned char* addt, const unsigned char* ivec,
  4452. unsigned char *tag, word32 nbytes,
  4453. word32 abytes, word32 ibytes,
  4454. word32 tbytes, const unsigned char* key,
  4455. int nr)
  4456. XASM_LINK("AES_GCM_encrypt_avx2");
  4457. #endif /* HAVE_INTEL_AVX2 */
  4458. #endif /* HAVE_INTEL_AVX1 */
  4459. #ifdef HAVE_AES_DECRYPT
  4460. void AES_GCM_decrypt(const unsigned char *in, unsigned char *out,
  4461. const unsigned char* addt, const unsigned char* ivec,
  4462. const unsigned char *tag, word32 nbytes, word32 abytes,
  4463. word32 ibytes, word32 tbytes, const unsigned char* key,
  4464. int nr, int* res)
  4465. XASM_LINK("AES_GCM_decrypt");
  4466. #ifdef HAVE_INTEL_AVX1
  4467. void AES_GCM_decrypt_avx1(const unsigned char *in, unsigned char *out,
  4468. const unsigned char* addt, const unsigned char* ivec,
  4469. const unsigned char *tag, word32 nbytes,
  4470. word32 abytes, word32 ibytes, word32 tbytes,
  4471. const unsigned char* key, int nr, int* res)
  4472. XASM_LINK("AES_GCM_decrypt_avx1");
  4473. #ifdef HAVE_INTEL_AVX2
  4474. void AES_GCM_decrypt_avx2(const unsigned char *in, unsigned char *out,
  4475. const unsigned char* addt, const unsigned char* ivec,
  4476. const unsigned char *tag, word32 nbytes,
  4477. word32 abytes, word32 ibytes, word32 tbytes,
  4478. const unsigned char* key, int nr, int* res)
  4479. XASM_LINK("AES_GCM_decrypt_avx2");
  4480. #endif /* HAVE_INTEL_AVX2 */
  4481. #endif /* HAVE_INTEL_AVX1 */
  4482. #endif /* HAVE_AES_DECRYPT */
  4483. #endif /* WOLFSSL_AESNI */
  4484. #if defined(GCM_SMALL)
  4485. static void GMULT(byte* X, byte* Y)
  4486. {
  4487. byte Z[AES_BLOCK_SIZE];
  4488. byte V[AES_BLOCK_SIZE];
  4489. int i, j;
  4490. XMEMSET(Z, 0, AES_BLOCK_SIZE);
  4491. XMEMCPY(V, X, AES_BLOCK_SIZE);
  4492. for (i = 0; i < AES_BLOCK_SIZE; i++)
  4493. {
  4494. byte y = Y[i];
  4495. for (j = 0; j < 8; j++)
  4496. {
  4497. if (y & 0x80) {
  4498. xorbuf(Z, V, AES_BLOCK_SIZE);
  4499. }
  4500. RIGHTSHIFTX(V);
  4501. y = y << 1;
  4502. }
  4503. }
  4504. XMEMCPY(X, Z, AES_BLOCK_SIZE);
  4505. }
  4506. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  4507. word32 cSz, byte* s, word32 sSz)
  4508. {
  4509. byte x[AES_BLOCK_SIZE];
  4510. byte scratch[AES_BLOCK_SIZE];
  4511. word32 blocks, partial;
  4512. byte* h;
  4513. if (gcm == NULL) {
  4514. return;
  4515. }
  4516. h = gcm->H;
  4517. XMEMSET(x, 0, AES_BLOCK_SIZE);
  4518. /* Hash in A, the Additional Authentication Data */
  4519. if (aSz != 0 && a != NULL) {
  4520. blocks = aSz / AES_BLOCK_SIZE;
  4521. partial = aSz % AES_BLOCK_SIZE;
  4522. while (blocks--) {
  4523. xorbuf(x, a, AES_BLOCK_SIZE);
  4524. GMULT(x, h);
  4525. a += AES_BLOCK_SIZE;
  4526. }
  4527. if (partial != 0) {
  4528. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4529. XMEMCPY(scratch, a, partial);
  4530. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4531. GMULT(x, h);
  4532. }
  4533. }
  4534. /* Hash in C, the Ciphertext */
  4535. if (cSz != 0 && c != NULL) {
  4536. blocks = cSz / AES_BLOCK_SIZE;
  4537. partial = cSz % AES_BLOCK_SIZE;
  4538. while (blocks--) {
  4539. xorbuf(x, c, AES_BLOCK_SIZE);
  4540. GMULT(x, h);
  4541. c += AES_BLOCK_SIZE;
  4542. }
  4543. if (partial != 0) {
  4544. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4545. XMEMCPY(scratch, c, partial);
  4546. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4547. GMULT(x, h);
  4548. }
  4549. }
  4550. /* Hash in the lengths of A and C in bits */
  4551. FlattenSzInBits(&scratch[0], aSz);
  4552. FlattenSzInBits(&scratch[8], cSz);
  4553. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4554. GMULT(x, h);
  4555. /* Copy the result into s. */
  4556. XMEMCPY(s, x, sSz);
  4557. }
  4558. #ifdef WOLFSSL_AESGCM_STREAM
  4559. /* No extra initialization for small implementation.
  4560. *
  4561. * @param [in] aes AES GCM object.
  4562. */
  4563. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  4564. /* GHASH one block of data..
  4565. *
  4566. * XOR block into tag and GMULT with H.
  4567. *
  4568. * @param [in, out] aes AES GCM object.
  4569. * @param [in] block Block of AAD or cipher text.
  4570. */
  4571. #define GHASH_ONE_BLOCK(aes, block) \
  4572. do { \
  4573. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  4574. GMULT(AES_TAG(aes), aes->gcm.H); \
  4575. } \
  4576. while (0)
  4577. #endif /* WOLFSSL_AESGCM_STREAM */
  4578. /* end GCM_SMALL */
  4579. #elif defined(GCM_TABLE)
  4580. static const byte R[256][2] = {
  4581. {0x00, 0x00}, {0x01, 0xc2}, {0x03, 0x84}, {0x02, 0x46},
  4582. {0x07, 0x08}, {0x06, 0xca}, {0x04, 0x8c}, {0x05, 0x4e},
  4583. {0x0e, 0x10}, {0x0f, 0xd2}, {0x0d, 0x94}, {0x0c, 0x56},
  4584. {0x09, 0x18}, {0x08, 0xda}, {0x0a, 0x9c}, {0x0b, 0x5e},
  4585. {0x1c, 0x20}, {0x1d, 0xe2}, {0x1f, 0xa4}, {0x1e, 0x66},
  4586. {0x1b, 0x28}, {0x1a, 0xea}, {0x18, 0xac}, {0x19, 0x6e},
  4587. {0x12, 0x30}, {0x13, 0xf2}, {0x11, 0xb4}, {0x10, 0x76},
  4588. {0x15, 0x38}, {0x14, 0xfa}, {0x16, 0xbc}, {0x17, 0x7e},
  4589. {0x38, 0x40}, {0x39, 0x82}, {0x3b, 0xc4}, {0x3a, 0x06},
  4590. {0x3f, 0x48}, {0x3e, 0x8a}, {0x3c, 0xcc}, {0x3d, 0x0e},
  4591. {0x36, 0x50}, {0x37, 0x92}, {0x35, 0xd4}, {0x34, 0x16},
  4592. {0x31, 0x58}, {0x30, 0x9a}, {0x32, 0xdc}, {0x33, 0x1e},
  4593. {0x24, 0x60}, {0x25, 0xa2}, {0x27, 0xe4}, {0x26, 0x26},
  4594. {0x23, 0x68}, {0x22, 0xaa}, {0x20, 0xec}, {0x21, 0x2e},
  4595. {0x2a, 0x70}, {0x2b, 0xb2}, {0x29, 0xf4}, {0x28, 0x36},
  4596. {0x2d, 0x78}, {0x2c, 0xba}, {0x2e, 0xfc}, {0x2f, 0x3e},
  4597. {0x70, 0x80}, {0x71, 0x42}, {0x73, 0x04}, {0x72, 0xc6},
  4598. {0x77, 0x88}, {0x76, 0x4a}, {0x74, 0x0c}, {0x75, 0xce},
  4599. {0x7e, 0x90}, {0x7f, 0x52}, {0x7d, 0x14}, {0x7c, 0xd6},
  4600. {0x79, 0x98}, {0x78, 0x5a}, {0x7a, 0x1c}, {0x7b, 0xde},
  4601. {0x6c, 0xa0}, {0x6d, 0x62}, {0x6f, 0x24}, {0x6e, 0xe6},
  4602. {0x6b, 0xa8}, {0x6a, 0x6a}, {0x68, 0x2c}, {0x69, 0xee},
  4603. {0x62, 0xb0}, {0x63, 0x72}, {0x61, 0x34}, {0x60, 0xf6},
  4604. {0x65, 0xb8}, {0x64, 0x7a}, {0x66, 0x3c}, {0x67, 0xfe},
  4605. {0x48, 0xc0}, {0x49, 0x02}, {0x4b, 0x44}, {0x4a, 0x86},
  4606. {0x4f, 0xc8}, {0x4e, 0x0a}, {0x4c, 0x4c}, {0x4d, 0x8e},
  4607. {0x46, 0xd0}, {0x47, 0x12}, {0x45, 0x54}, {0x44, 0x96},
  4608. {0x41, 0xd8}, {0x40, 0x1a}, {0x42, 0x5c}, {0x43, 0x9e},
  4609. {0x54, 0xe0}, {0x55, 0x22}, {0x57, 0x64}, {0x56, 0xa6},
  4610. {0x53, 0xe8}, {0x52, 0x2a}, {0x50, 0x6c}, {0x51, 0xae},
  4611. {0x5a, 0xf0}, {0x5b, 0x32}, {0x59, 0x74}, {0x58, 0xb6},
  4612. {0x5d, 0xf8}, {0x5c, 0x3a}, {0x5e, 0x7c}, {0x5f, 0xbe},
  4613. {0xe1, 0x00}, {0xe0, 0xc2}, {0xe2, 0x84}, {0xe3, 0x46},
  4614. {0xe6, 0x08}, {0xe7, 0xca}, {0xe5, 0x8c}, {0xe4, 0x4e},
  4615. {0xef, 0x10}, {0xee, 0xd2}, {0xec, 0x94}, {0xed, 0x56},
  4616. {0xe8, 0x18}, {0xe9, 0xda}, {0xeb, 0x9c}, {0xea, 0x5e},
  4617. {0xfd, 0x20}, {0xfc, 0xe2}, {0xfe, 0xa4}, {0xff, 0x66},
  4618. {0xfa, 0x28}, {0xfb, 0xea}, {0xf9, 0xac}, {0xf8, 0x6e},
  4619. {0xf3, 0x30}, {0xf2, 0xf2}, {0xf0, 0xb4}, {0xf1, 0x76},
  4620. {0xf4, 0x38}, {0xf5, 0xfa}, {0xf7, 0xbc}, {0xf6, 0x7e},
  4621. {0xd9, 0x40}, {0xd8, 0x82}, {0xda, 0xc4}, {0xdb, 0x06},
  4622. {0xde, 0x48}, {0xdf, 0x8a}, {0xdd, 0xcc}, {0xdc, 0x0e},
  4623. {0xd7, 0x50}, {0xd6, 0x92}, {0xd4, 0xd4}, {0xd5, 0x16},
  4624. {0xd0, 0x58}, {0xd1, 0x9a}, {0xd3, 0xdc}, {0xd2, 0x1e},
  4625. {0xc5, 0x60}, {0xc4, 0xa2}, {0xc6, 0xe4}, {0xc7, 0x26},
  4626. {0xc2, 0x68}, {0xc3, 0xaa}, {0xc1, 0xec}, {0xc0, 0x2e},
  4627. {0xcb, 0x70}, {0xca, 0xb2}, {0xc8, 0xf4}, {0xc9, 0x36},
  4628. {0xcc, 0x78}, {0xcd, 0xba}, {0xcf, 0xfc}, {0xce, 0x3e},
  4629. {0x91, 0x80}, {0x90, 0x42}, {0x92, 0x04}, {0x93, 0xc6},
  4630. {0x96, 0x88}, {0x97, 0x4a}, {0x95, 0x0c}, {0x94, 0xce},
  4631. {0x9f, 0x90}, {0x9e, 0x52}, {0x9c, 0x14}, {0x9d, 0xd6},
  4632. {0x98, 0x98}, {0x99, 0x5a}, {0x9b, 0x1c}, {0x9a, 0xde},
  4633. {0x8d, 0xa0}, {0x8c, 0x62}, {0x8e, 0x24}, {0x8f, 0xe6},
  4634. {0x8a, 0xa8}, {0x8b, 0x6a}, {0x89, 0x2c}, {0x88, 0xee},
  4635. {0x83, 0xb0}, {0x82, 0x72}, {0x80, 0x34}, {0x81, 0xf6},
  4636. {0x84, 0xb8}, {0x85, 0x7a}, {0x87, 0x3c}, {0x86, 0xfe},
  4637. {0xa9, 0xc0}, {0xa8, 0x02}, {0xaa, 0x44}, {0xab, 0x86},
  4638. {0xae, 0xc8}, {0xaf, 0x0a}, {0xad, 0x4c}, {0xac, 0x8e},
  4639. {0xa7, 0xd0}, {0xa6, 0x12}, {0xa4, 0x54}, {0xa5, 0x96},
  4640. {0xa0, 0xd8}, {0xa1, 0x1a}, {0xa3, 0x5c}, {0xa2, 0x9e},
  4641. {0xb5, 0xe0}, {0xb4, 0x22}, {0xb6, 0x64}, {0xb7, 0xa6},
  4642. {0xb2, 0xe8}, {0xb3, 0x2a}, {0xb1, 0x6c}, {0xb0, 0xae},
  4643. {0xbb, 0xf0}, {0xba, 0x32}, {0xb8, 0x74}, {0xb9, 0xb6},
  4644. {0xbc, 0xf8}, {0xbd, 0x3a}, {0xbf, 0x7c}, {0xbe, 0xbe} };
  4645. static void GMULT(byte *x, byte m[256][AES_BLOCK_SIZE])
  4646. {
  4647. #if !defined(WORD64_AVAILABLE) || defined(BIG_ENDIAN_ORDER)
  4648. int i, j;
  4649. byte Z[AES_BLOCK_SIZE];
  4650. byte a;
  4651. XMEMSET(Z, 0, sizeof(Z));
  4652. for (i = 15; i > 0; i--) {
  4653. xorbuf(Z, m[x[i]], AES_BLOCK_SIZE);
  4654. a = Z[15];
  4655. for (j = 15; j > 0; j--) {
  4656. Z[j] = Z[j-1];
  4657. }
  4658. Z[0] = R[a][0];
  4659. Z[1] ^= R[a][1];
  4660. }
  4661. xorbuf(Z, m[x[0]], AES_BLOCK_SIZE);
  4662. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  4663. #else
  4664. byte Z[AES_BLOCK_SIZE + AES_BLOCK_SIZE];
  4665. byte a;
  4666. word64* pZ;
  4667. word64* pm;
  4668. word64* px = (word64*)(x);
  4669. int i;
  4670. pZ = (word64*)(Z + 15 + 1);
  4671. pm = (word64*)(m[x[15]]);
  4672. pZ[0] = pm[0];
  4673. pZ[1] = pm[1];
  4674. a = Z[16 + 15];
  4675. Z[15] = R[a][0];
  4676. Z[16] ^= R[a][1];
  4677. for (i = 14; i > 0; i--) {
  4678. pZ = (word64*)(Z + i + 1);
  4679. pm = (word64*)(m[x[i]]);
  4680. pZ[0] ^= pm[0];
  4681. pZ[1] ^= pm[1];
  4682. a = Z[16 + i];
  4683. Z[i] = R[a][0];
  4684. Z[i+1] ^= R[a][1];
  4685. }
  4686. pZ = (word64*)(Z + 1);
  4687. pm = (word64*)(m[x[0]]);
  4688. px[0] = pZ[0] ^ pm[0]; px[1] = pZ[1] ^ pm[1];
  4689. #endif
  4690. }
  4691. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  4692. word32 cSz, byte* s, word32 sSz)
  4693. {
  4694. byte x[AES_BLOCK_SIZE];
  4695. byte scratch[AES_BLOCK_SIZE];
  4696. word32 blocks, partial;
  4697. if (gcm == NULL) {
  4698. return;
  4699. }
  4700. XMEMSET(x, 0, AES_BLOCK_SIZE);
  4701. /* Hash in A, the Additional Authentication Data */
  4702. if (aSz != 0 && a != NULL) {
  4703. blocks = aSz / AES_BLOCK_SIZE;
  4704. partial = aSz % AES_BLOCK_SIZE;
  4705. while (blocks--) {
  4706. xorbuf(x, a, AES_BLOCK_SIZE);
  4707. GMULT(x, gcm->M0);
  4708. a += AES_BLOCK_SIZE;
  4709. }
  4710. if (partial != 0) {
  4711. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4712. XMEMCPY(scratch, a, partial);
  4713. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4714. GMULT(x, gcm->M0);
  4715. }
  4716. }
  4717. /* Hash in C, the Ciphertext */
  4718. if (cSz != 0 && c != NULL) {
  4719. blocks = cSz / AES_BLOCK_SIZE;
  4720. partial = cSz % AES_BLOCK_SIZE;
  4721. while (blocks--) {
  4722. xorbuf(x, c, AES_BLOCK_SIZE);
  4723. GMULT(x, gcm->M0);
  4724. c += AES_BLOCK_SIZE;
  4725. }
  4726. if (partial != 0) {
  4727. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4728. XMEMCPY(scratch, c, partial);
  4729. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4730. GMULT(x, gcm->M0);
  4731. }
  4732. }
  4733. /* Hash in the lengths of A and C in bits */
  4734. FlattenSzInBits(&scratch[0], aSz);
  4735. FlattenSzInBits(&scratch[8], cSz);
  4736. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4737. GMULT(x, gcm->M0);
  4738. /* Copy the result into s. */
  4739. XMEMCPY(s, x, sSz);
  4740. }
  4741. #ifdef WOLFSSL_AESGCM_STREAM
  4742. /* No extra initialization for table implementation.
  4743. *
  4744. * @param [in] aes AES GCM object.
  4745. */
  4746. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  4747. /* GHASH one block of data..
  4748. *
  4749. * XOR block into tag and GMULT with H using pre-computed table.
  4750. *
  4751. * @param [in, out] aes AES GCM object.
  4752. * @param [in] block Block of AAD or cipher text.
  4753. */
  4754. #define GHASH_ONE_BLOCK(aes, block) \
  4755. do { \
  4756. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  4757. GMULT(AES_TAG(aes), aes->gcm.M0); \
  4758. } \
  4759. while (0)
  4760. #endif /* WOLFSSL_AESGCM_STREAM */
  4761. /* end GCM_TABLE */
  4762. #elif defined(GCM_TABLE_4BIT)
  4763. /* remainder = x^7 + x^2 + x^1 + 1 => 0xe1
  4764. * R shifts right a reverse bit pair of bytes such that:
  4765. * R(b0, b1) => b1 = (b1 >> 1) | (b0 << 7); b0 >>= 1
  4766. * 0 => 0, 0, 0, 0 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ 00,00 = 00,00
  4767. * 8 => 0, 0, 0, 1 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ e1,00 = e1,00
  4768. * 4 => 0, 0, 1, 0 => R(R(R(00,00) ^ 00,00) ^ e1,00) ^ 00,00 = 70,80
  4769. * 2 => 0, 1, 0, 0 => R(R(R(00,00) ^ e1,00) ^ 00,00) ^ 00,00 = 38,40
  4770. * 1 => 1, 0, 0, 0 => R(R(R(e1,00) ^ 00,00) ^ 00,00) ^ 00,00 = 1c,20
  4771. * To calculate te rest, XOR result for each bit.
  4772. * e.g. 6 = 4 ^ 2 => 48,c0
  4773. *
  4774. * Second half is same values rotated by 4-bits.
  4775. */
  4776. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  4777. static const byte R[16][2] = {
  4778. {0x00, 0x00}, {0x1c, 0x20}, {0x38, 0x40}, {0x24, 0x60},
  4779. {0x70, 0x80}, {0x6c, 0xa0}, {0x48, 0xc0}, {0x54, 0xe0},
  4780. {0xe1, 0x00}, {0xfd, 0x20}, {0xd9, 0x40}, {0xc5, 0x60},
  4781. {0x91, 0x80}, {0x8d, 0xa0}, {0xa9, 0xc0}, {0xb5, 0xe0},
  4782. };
  4783. #else
  4784. static const word16 R[32] = {
  4785. 0x0000, 0x201c, 0x4038, 0x6024,
  4786. 0x8070, 0xa06c, 0xc048, 0xe054,
  4787. 0x00e1, 0x20fd, 0x40d9, 0x60c5,
  4788. 0x8091, 0xa08d, 0xc0a9, 0xe0b5,
  4789. 0x0000, 0xc201, 0x8403, 0x4602,
  4790. 0x0807, 0xca06, 0x8c04, 0x4e05,
  4791. 0x100e, 0xd20f, 0x940d, 0x560c,
  4792. 0x1809, 0xda08, 0x9c0a, 0x5e0b,
  4793. };
  4794. #endif
  4795. /* Multiply in GF(2^128) defined by polynomial:
  4796. * x^128 + x^7 + x^2 + x^1 + 1.
  4797. *
  4798. * H: hash key = encrypt(key, 0)
  4799. * x = x * H in field
  4800. *
  4801. * x: cumlative result
  4802. * m: 4-bit table
  4803. * [0..15] * H
  4804. */
  4805. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  4806. static void GMULT(byte *x, byte m[16][AES_BLOCK_SIZE])
  4807. {
  4808. int i, j, n;
  4809. byte Z[AES_BLOCK_SIZE];
  4810. byte a;
  4811. XMEMSET(Z, 0, sizeof(Z));
  4812. for (i = 15; i >= 0; i--) {
  4813. for (n = 0; n < 2; n++) {
  4814. if (n == 0)
  4815. xorbuf(Z, m[x[i] & 0xf], AES_BLOCK_SIZE);
  4816. else {
  4817. xorbuf(Z, m[x[i] >> 4], AES_BLOCK_SIZE);
  4818. if (i == 0)
  4819. break;
  4820. }
  4821. a = Z[15] & 0xf;
  4822. for (j = 15; j > 0; j--)
  4823. Z[j] = (Z[j-1] << 4) | (Z[j] >> 4);
  4824. Z[0] >>= 4;
  4825. Z[0] ^= R[a][0];
  4826. Z[1] ^= R[a][1];
  4827. }
  4828. }
  4829. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  4830. }
  4831. #elif defined(WC_32BIT_CPU)
  4832. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  4833. {
  4834. int i;
  4835. word32 z8[4] = {0, 0, 0, 0};
  4836. byte a;
  4837. word32* x8 = (word32*)x;
  4838. word32* m8;
  4839. byte xi;
  4840. word32 n7, n6, n5, n4, n3, n2, n1, n0;
  4841. for (i = 15; i > 0; i--) {
  4842. xi = x[i];
  4843. /* XOR in (msn * H) */
  4844. m8 = (word32*)m[xi & 0xf];
  4845. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4846. /* Cache top byte for remainder calculations - lost in rotate. */
  4847. a = (byte)(z8[3] >> 24);
  4848. /* Rotate Z by 8-bits */
  4849. z8[3] = (z8[2] >> 24) | (z8[3] << 8);
  4850. z8[2] = (z8[1] >> 24) | (z8[2] << 8);
  4851. z8[1] = (z8[0] >> 24) | (z8[1] << 8);
  4852. z8[0] <<= 8;
  4853. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  4854. z8[0] ^= (word32)R[16 + (a & 0xf)];
  4855. xi >>= 4;
  4856. /* XOR in next significant nibble (XORed with H) * remainder */
  4857. m8 = (word32*)m[xi];
  4858. a ^= (byte)(m8[3] >> 20);
  4859. z8[0] ^= (word32)R[a >> 4];
  4860. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  4861. m8 = (word32*)m[16 + xi];
  4862. z8[0] ^= m8[0]; z8[1] ^= m8[1];
  4863. z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4864. }
  4865. xi = x[0];
  4866. /* XOR in most significant nibble * H */
  4867. m8 = (word32*)m[xi & 0xf];
  4868. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4869. /* Cache top byte for remainder calculations - lost in rotate. */
  4870. a = (z8[3] >> 24) & 0xf;
  4871. /* Rotate z by 4-bits */
  4872. n7 = z8[3] & 0xf0f0f0f0ULL;
  4873. n6 = z8[3] & 0x0f0f0f0fULL;
  4874. n5 = z8[2] & 0xf0f0f0f0ULL;
  4875. n4 = z8[2] & 0x0f0f0f0fULL;
  4876. n3 = z8[1] & 0xf0f0f0f0ULL;
  4877. n2 = z8[1] & 0x0f0f0f0fULL;
  4878. n1 = z8[0] & 0xf0f0f0f0ULL;
  4879. n0 = z8[0] & 0x0f0f0f0fULL;
  4880. z8[3] = (n7 >> 4) | (n6 << 12) | (n4 >> 20);
  4881. z8[2] = (n5 >> 4) | (n4 << 12) | (n2 >> 20);
  4882. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 20);
  4883. z8[0] = (n1 >> 4) | (n0 << 12);
  4884. /* XOR in most significant nibble * remainder */
  4885. z8[0] ^= (word32)R[a];
  4886. /* XOR in next significant nibble * H */
  4887. m8 = (word32*)m[xi >> 4];
  4888. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4889. /* Write back result. */
  4890. x8[0] = z8[0]; x8[1] = z8[1]; x8[2] = z8[2]; x8[3] = z8[3];
  4891. }
  4892. #else
  4893. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  4894. {
  4895. int i;
  4896. word64 z8[2] = {0, 0};
  4897. byte a;
  4898. word64* x8 = (word64*)x;
  4899. word64* m8;
  4900. word64 n0, n1, n2, n3;
  4901. byte xi;
  4902. for (i = 15; i > 0; i--) {
  4903. xi = x[i];
  4904. /* XOR in (msn * H) */
  4905. m8 = (word64*)m[xi & 0xf];
  4906. z8[0] ^= m8[0];
  4907. z8[1] ^= m8[1];
  4908. /* Cache top byte for remainder calculations - lost in rotate. */
  4909. a = (byte)(z8[1] >> 56);
  4910. /* Rotate Z by 8-bits */
  4911. z8[1] = (z8[0] >> 56) | (z8[1] << 8);
  4912. z8[0] <<= 8;
  4913. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  4914. m8 = (word64*)m[16 + (xi >> 4)];
  4915. z8[0] ^= m8[0];
  4916. z8[1] ^= m8[1];
  4917. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  4918. z8[0] ^= (word64)R[16 + (a & 0xf)];
  4919. /* XOR in next significant nibble (XORed with H) * remainder */
  4920. m8 = (word64*)m[xi >> 4];
  4921. a ^= (byte)(m8[1] >> 52);
  4922. z8[0] ^= (word64)R[a >> 4];
  4923. }
  4924. xi = x[0];
  4925. /* XOR in most significant nibble * H */
  4926. m8 = (word64*)m[xi & 0xf];
  4927. z8[0] ^= m8[0];
  4928. z8[1] ^= m8[1];
  4929. /* Cache top byte for remainder calculations - lost in rotate. */
  4930. a = (z8[1] >> 56) & 0xf;
  4931. /* Rotate z by 4-bits */
  4932. n3 = z8[1] & W64LIT(0xf0f0f0f0f0f0f0f0);
  4933. n2 = z8[1] & W64LIT(0x0f0f0f0f0f0f0f0f);
  4934. n1 = z8[0] & W64LIT(0xf0f0f0f0f0f0f0f0);
  4935. n0 = z8[0] & W64LIT(0x0f0f0f0f0f0f0f0f);
  4936. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 52);
  4937. z8[0] = (n1 >> 4) | (n0 << 12);
  4938. /* XOR in next significant nibble * H */
  4939. m8 = (word64*)m[xi >> 4];
  4940. z8[0] ^= m8[0];
  4941. z8[1] ^= m8[1];
  4942. /* XOR in most significant nibble * remainder */
  4943. z8[0] ^= (word64)R[a];
  4944. /* Write back result. */
  4945. x8[0] = z8[0];
  4946. x8[1] = z8[1];
  4947. }
  4948. #endif
  4949. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  4950. word32 cSz, byte* s, word32 sSz)
  4951. {
  4952. byte x[AES_BLOCK_SIZE];
  4953. byte scratch[AES_BLOCK_SIZE];
  4954. word32 blocks, partial;
  4955. if (gcm == NULL) {
  4956. return;
  4957. }
  4958. XMEMSET(x, 0, AES_BLOCK_SIZE);
  4959. /* Hash in A, the Additional Authentication Data */
  4960. if (aSz != 0 && a != NULL) {
  4961. blocks = aSz / AES_BLOCK_SIZE;
  4962. partial = aSz % AES_BLOCK_SIZE;
  4963. while (blocks--) {
  4964. xorbuf(x, a, AES_BLOCK_SIZE);
  4965. GMULT(x, gcm->M0);
  4966. a += AES_BLOCK_SIZE;
  4967. }
  4968. if (partial != 0) {
  4969. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4970. XMEMCPY(scratch, a, partial);
  4971. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4972. GMULT(x, gcm->M0);
  4973. }
  4974. }
  4975. /* Hash in C, the Ciphertext */
  4976. if (cSz != 0 && c != NULL) {
  4977. blocks = cSz / AES_BLOCK_SIZE;
  4978. partial = cSz % AES_BLOCK_SIZE;
  4979. while (blocks--) {
  4980. xorbuf(x, c, AES_BLOCK_SIZE);
  4981. GMULT(x, gcm->M0);
  4982. c += AES_BLOCK_SIZE;
  4983. }
  4984. if (partial != 0) {
  4985. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4986. XMEMCPY(scratch, c, partial);
  4987. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4988. GMULT(x, gcm->M0);
  4989. }
  4990. }
  4991. /* Hash in the lengths of A and C in bits */
  4992. FlattenSzInBits(&scratch[0], aSz);
  4993. FlattenSzInBits(&scratch[8], cSz);
  4994. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4995. GMULT(x, gcm->M0);
  4996. /* Copy the result into s. */
  4997. XMEMCPY(s, x, sSz);
  4998. }
  4999. #ifdef WOLFSSL_AESGCM_STREAM
  5000. /* No extra initialization for 4-bit table implementation.
  5001. *
  5002. * @param [in] aes AES GCM object.
  5003. */
  5004. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  5005. /* GHASH one block of data..
  5006. *
  5007. * XOR block into tag and GMULT with H using pre-computed table.
  5008. *
  5009. * @param [in, out] aes AES GCM object.
  5010. * @param [in] block Block of AAD or cipher text.
  5011. */
  5012. #define GHASH_ONE_BLOCK(aes, block) \
  5013. do { \
  5014. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  5015. GMULT(AES_TAG(aes), (aes)->gcm.M0); \
  5016. } \
  5017. while (0)
  5018. #endif /* WOLFSSL_AESGCM_STREAM */
  5019. #elif defined(WORD64_AVAILABLE) && !defined(GCM_WORD32)
  5020. #if !defined(FREESCALE_LTC_AES_GCM)
  5021. static void GMULT(word64* X, word64* Y)
  5022. {
  5023. word64 Z[2] = {0,0};
  5024. word64 V[2];
  5025. int i, j;
  5026. word64 v1;
  5027. V[0] = X[0]; V[1] = X[1];
  5028. for (i = 0; i < 2; i++)
  5029. {
  5030. word64 y = Y[i];
  5031. for (j = 0; j < 64; j++)
  5032. {
  5033. #ifndef AES_GCM_GMULT_NCT
  5034. word64 mask = 0 - (y >> 63);
  5035. Z[0] ^= V[0] & mask;
  5036. Z[1] ^= V[1] & mask;
  5037. #else
  5038. if (y & 0x8000000000000000ULL) {
  5039. Z[0] ^= V[0];
  5040. Z[1] ^= V[1];
  5041. }
  5042. #endif
  5043. v1 = (0 - (V[1] & 1)) & 0xE100000000000000ULL;
  5044. V[1] >>= 1;
  5045. V[1] |= V[0] << 63;
  5046. V[0] >>= 1;
  5047. V[0] ^= v1;
  5048. y <<= 1;
  5049. }
  5050. }
  5051. X[0] = Z[0];
  5052. X[1] = Z[1];
  5053. }
  5054. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  5055. word32 cSz, byte* s, word32 sSz)
  5056. {
  5057. word64 x[2] = {0,0};
  5058. word32 blocks, partial;
  5059. word64 bigH[2];
  5060. if (gcm == NULL) {
  5061. return;
  5062. }
  5063. XMEMCPY(bigH, gcm->H, AES_BLOCK_SIZE);
  5064. #ifdef LITTLE_ENDIAN_ORDER
  5065. ByteReverseWords64(bigH, bigH, AES_BLOCK_SIZE);
  5066. #endif
  5067. /* Hash in A, the Additional Authentication Data */
  5068. if (aSz != 0 && a != NULL) {
  5069. word64 bigA[2];
  5070. blocks = aSz / AES_BLOCK_SIZE;
  5071. partial = aSz % AES_BLOCK_SIZE;
  5072. while (blocks--) {
  5073. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  5074. #ifdef LITTLE_ENDIAN_ORDER
  5075. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  5076. #endif
  5077. x[0] ^= bigA[0];
  5078. x[1] ^= bigA[1];
  5079. GMULT(x, bigH);
  5080. a += AES_BLOCK_SIZE;
  5081. }
  5082. if (partial != 0) {
  5083. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  5084. XMEMCPY(bigA, a, partial);
  5085. #ifdef LITTLE_ENDIAN_ORDER
  5086. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  5087. #endif
  5088. x[0] ^= bigA[0];
  5089. x[1] ^= bigA[1];
  5090. GMULT(x, bigH);
  5091. }
  5092. #ifdef OPENSSL_EXTRA
  5093. /* store AAD partial tag for next call */
  5094. gcm->aadH[0] = (word32)((x[0] & 0xFFFFFFFF00000000ULL) >> 32);
  5095. gcm->aadH[1] = (word32)(x[0] & 0xFFFFFFFF);
  5096. gcm->aadH[2] = (word32)((x[1] & 0xFFFFFFFF00000000ULL) >> 32);
  5097. gcm->aadH[3] = (word32)(x[1] & 0xFFFFFFFF);
  5098. #endif
  5099. }
  5100. /* Hash in C, the Ciphertext */
  5101. if (cSz != 0 && c != NULL) {
  5102. word64 bigC[2];
  5103. blocks = cSz / AES_BLOCK_SIZE;
  5104. partial = cSz % AES_BLOCK_SIZE;
  5105. #ifdef OPENSSL_EXTRA
  5106. /* Start from last AAD partial tag */
  5107. if(gcm->aadLen) {
  5108. x[0] = ((word64)gcm->aadH[0]) << 32 | gcm->aadH[1];
  5109. x[1] = ((word64)gcm->aadH[2]) << 32 | gcm->aadH[3];
  5110. }
  5111. #endif
  5112. while (blocks--) {
  5113. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  5114. #ifdef LITTLE_ENDIAN_ORDER
  5115. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  5116. #endif
  5117. x[0] ^= bigC[0];
  5118. x[1] ^= bigC[1];
  5119. GMULT(x, bigH);
  5120. c += AES_BLOCK_SIZE;
  5121. }
  5122. if (partial != 0) {
  5123. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  5124. XMEMCPY(bigC, c, partial);
  5125. #ifdef LITTLE_ENDIAN_ORDER
  5126. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  5127. #endif
  5128. x[0] ^= bigC[0];
  5129. x[1] ^= bigC[1];
  5130. GMULT(x, bigH);
  5131. }
  5132. }
  5133. /* Hash in the lengths in bits of A and C */
  5134. {
  5135. word64 len[2];
  5136. len[0] = aSz; len[1] = cSz;
  5137. #ifdef OPENSSL_EXTRA
  5138. if (gcm->aadLen)
  5139. len[0] = (word64)gcm->aadLen;
  5140. #endif
  5141. /* Lengths are in bytes. Convert to bits. */
  5142. len[0] *= 8;
  5143. len[1] *= 8;
  5144. x[0] ^= len[0];
  5145. x[1] ^= len[1];
  5146. GMULT(x, bigH);
  5147. }
  5148. #ifdef LITTLE_ENDIAN_ORDER
  5149. ByteReverseWords64(x, x, AES_BLOCK_SIZE);
  5150. #endif
  5151. XMEMCPY(s, x, sSz);
  5152. }
  5153. #endif /* !FREESCALE_LTC_AES_GCM */
  5154. #ifdef WOLFSSL_AESGCM_STREAM
  5155. #ifdef LITTLE_ENDIAN_ORDER
  5156. /* No extra initialization for small implementation.
  5157. *
  5158. * @param [in] aes AES GCM object.
  5159. */
  5160. #define GHASH_INIT_EXTRA(aes) \
  5161. ByteReverseWords64((word64*)aes->gcm.H, (word64*)aes->gcm.H, AES_BLOCK_SIZE)
  5162. /* GHASH one block of data..
  5163. *
  5164. * XOR block into tag and GMULT with H.
  5165. *
  5166. * @param [in, out] aes AES GCM object.
  5167. * @param [in] block Block of AAD or cipher text.
  5168. */
  5169. #define GHASH_ONE_BLOCK(aes, block) \
  5170. do { \
  5171. word64* x = (word64*)AES_TAG(aes); \
  5172. word64* h = (word64*)aes->gcm.H; \
  5173. word64 block64[2]; \
  5174. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  5175. ByteReverseWords64(block64, block64, AES_BLOCK_SIZE); \
  5176. x[0] ^= block64[0]; \
  5177. x[1] ^= block64[1]; \
  5178. GMULT(x, h); \
  5179. } \
  5180. while (0)
  5181. #ifdef OPENSSL_EXTRA
  5182. /* GHASH in AAD and cipher text lengths in bits.
  5183. *
  5184. * Convert tag back to little-endian.
  5185. *
  5186. * @param [in, out] aes AES GCM object.
  5187. */
  5188. #define GHASH_LEN_BLOCK(aes) \
  5189. do { \
  5190. word64* x = (word64*)AES_TAG(aes); \
  5191. word64* h = (word64*)aes->gcm.H; \
  5192. word64 len[2]; \
  5193. len[0] = aes->aSz; len[1] = aes->cSz; \
  5194. if (aes->gcm.aadLen) \
  5195. len[0] = (word64)aes->gcm.aadLen; \
  5196. /* Lengths are in bytes. Convert to bits. */ \
  5197. len[0] *= 8; \
  5198. len[1] *= 8; \
  5199. \
  5200. x[0] ^= len[0]; \
  5201. x[1] ^= len[1]; \
  5202. GMULT(x, h); \
  5203. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  5204. } \
  5205. while (0)
  5206. #else
  5207. /* GHASH in AAD and cipher text lengths in bits.
  5208. *
  5209. * Convert tag back to little-endian.
  5210. *
  5211. * @param [in, out] aes AES GCM object.
  5212. */
  5213. #define GHASH_LEN_BLOCK(aes) \
  5214. do { \
  5215. word64* x = (word64*)AES_TAG(aes); \
  5216. word64* h = (word64*)aes->gcm.H; \
  5217. word64 len[2]; \
  5218. len[0] = aes->aSz; len[1] = aes->cSz; \
  5219. /* Lengths are in bytes. Convert to bits. */ \
  5220. len[0] *= 8; \
  5221. len[1] *= 8; \
  5222. \
  5223. x[0] ^= len[0]; \
  5224. x[1] ^= len[1]; \
  5225. GMULT(x, h); \
  5226. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  5227. } \
  5228. while (0)
  5229. #endif
  5230. #else
  5231. /* No extra initialization for small implementation.
  5232. *
  5233. * @param [in] aes AES GCM object.
  5234. */
  5235. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  5236. /* GHASH one block of data..
  5237. *
  5238. * XOR block into tag and GMULT with H.
  5239. *
  5240. * @param [in, out] aes AES GCM object.
  5241. * @param [in] block Block of AAD or cipher text.
  5242. */
  5243. #define GHASH_ONE_BLOCK(aes, block) \
  5244. do { \
  5245. word64* x = (word64*)AES_TAG(aes); \
  5246. word64* h = (word64*)aes->gcm.H; \
  5247. word64 block64[2]; \
  5248. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  5249. x[0] ^= block64[0]; \
  5250. x[1] ^= block64[1]; \
  5251. GMULT(x, h); \
  5252. } \
  5253. while (0)
  5254. #ifdef OPENSSL_EXTRA
  5255. /* GHASH in AAD and cipher text lengths in bits.
  5256. *
  5257. * Convert tag back to little-endian.
  5258. *
  5259. * @param [in, out] aes AES GCM object.
  5260. */
  5261. #define GHASH_LEN_BLOCK(aes) \
  5262. do { \
  5263. word64* x = (word64*)AES_TAG(aes); \
  5264. word64* h = (word64*)aes->gcm.H; \
  5265. word64 len[2]; \
  5266. len[0] = aes->aSz; len[1] = aes->cSz; \
  5267. if (aes->gcm.aadLen) \
  5268. len[0] = (word64)aes->gcm.aadLen; \
  5269. /* Lengths are in bytes. Convert to bits. */ \
  5270. len[0] *= 8; \
  5271. len[1] *= 8; \
  5272. \
  5273. x[0] ^= len[0]; \
  5274. x[1] ^= len[1]; \
  5275. GMULT(x, h); \
  5276. } \
  5277. while (0)
  5278. #else
  5279. /* GHASH in AAD and cipher text lengths in bits.
  5280. *
  5281. * Convert tag back to little-endian.
  5282. *
  5283. * @param [in, out] aes AES GCM object.
  5284. */
  5285. #define GHASH_LEN_BLOCK(aes) \
  5286. do { \
  5287. word64* x = (word64*)AES_TAG(aes); \
  5288. word64* h = (word64*)aes->gcm.H; \
  5289. word64 len[2]; \
  5290. len[0] = aes->aSz; len[1] = aes->cSz; \
  5291. /* Lengths are in bytes. Convert to bits. */ \
  5292. len[0] *= 8; \
  5293. len[1] *= 8; \
  5294. \
  5295. x[0] ^= len[0]; \
  5296. x[1] ^= len[1]; \
  5297. GMULT(x, h); \
  5298. } \
  5299. while (0)
  5300. #endif
  5301. #endif /* !LITTLE_ENDIAN_ORDER */
  5302. #endif /* WOLFSSL_AESGCM_STREAM */
  5303. /* end defined(WORD64_AVAILABLE) && !defined(GCM_WORD32) */
  5304. #else /* GCM_WORD32 */
  5305. static void GMULT(word32* X, word32* Y)
  5306. {
  5307. word32 Z[4] = {0,0,0,0};
  5308. word32 V[4];
  5309. int i, j;
  5310. V[0] = X[0]; V[1] = X[1]; V[2] = X[2]; V[3] = X[3];
  5311. for (i = 0; i < 4; i++)
  5312. {
  5313. word32 y = Y[i];
  5314. for (j = 0; j < 32; j++)
  5315. {
  5316. if (y & 0x80000000) {
  5317. Z[0] ^= V[0];
  5318. Z[1] ^= V[1];
  5319. Z[2] ^= V[2];
  5320. Z[3] ^= V[3];
  5321. }
  5322. if (V[3] & 0x00000001) {
  5323. V[3] >>= 1;
  5324. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  5325. V[2] >>= 1;
  5326. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  5327. V[1] >>= 1;
  5328. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  5329. V[0] >>= 1;
  5330. V[0] ^= 0xE1000000;
  5331. } else {
  5332. V[3] >>= 1;
  5333. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  5334. V[2] >>= 1;
  5335. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  5336. V[1] >>= 1;
  5337. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  5338. V[0] >>= 1;
  5339. }
  5340. y <<= 1;
  5341. }
  5342. }
  5343. X[0] = Z[0];
  5344. X[1] = Z[1];
  5345. X[2] = Z[2];
  5346. X[3] = Z[3];
  5347. }
  5348. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  5349. word32 cSz, byte* s, word32 sSz)
  5350. {
  5351. word32 x[4] = {0,0,0,0};
  5352. word32 blocks, partial;
  5353. word32 bigH[4];
  5354. if (gcm == NULL) {
  5355. return;
  5356. }
  5357. XMEMCPY(bigH, gcm->H, AES_BLOCK_SIZE);
  5358. #ifdef LITTLE_ENDIAN_ORDER
  5359. ByteReverseWords(bigH, bigH, AES_BLOCK_SIZE);
  5360. #endif
  5361. /* Hash in A, the Additional Authentication Data */
  5362. if (aSz != 0 && a != NULL) {
  5363. word32 bigA[4];
  5364. blocks = aSz / AES_BLOCK_SIZE;
  5365. partial = aSz % AES_BLOCK_SIZE;
  5366. while (blocks--) {
  5367. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  5368. #ifdef LITTLE_ENDIAN_ORDER
  5369. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  5370. #endif
  5371. x[0] ^= bigA[0];
  5372. x[1] ^= bigA[1];
  5373. x[2] ^= bigA[2];
  5374. x[3] ^= bigA[3];
  5375. GMULT(x, bigH);
  5376. a += AES_BLOCK_SIZE;
  5377. }
  5378. if (partial != 0) {
  5379. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  5380. XMEMCPY(bigA, a, partial);
  5381. #ifdef LITTLE_ENDIAN_ORDER
  5382. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  5383. #endif
  5384. x[0] ^= bigA[0];
  5385. x[1] ^= bigA[1];
  5386. x[2] ^= bigA[2];
  5387. x[3] ^= bigA[3];
  5388. GMULT(x, bigH);
  5389. }
  5390. }
  5391. /* Hash in C, the Ciphertext */
  5392. if (cSz != 0 && c != NULL) {
  5393. word32 bigC[4];
  5394. blocks = cSz / AES_BLOCK_SIZE;
  5395. partial = cSz % AES_BLOCK_SIZE;
  5396. while (blocks--) {
  5397. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  5398. #ifdef LITTLE_ENDIAN_ORDER
  5399. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  5400. #endif
  5401. x[0] ^= bigC[0];
  5402. x[1] ^= bigC[1];
  5403. x[2] ^= bigC[2];
  5404. x[3] ^= bigC[3];
  5405. GMULT(x, bigH);
  5406. c += AES_BLOCK_SIZE;
  5407. }
  5408. if (partial != 0) {
  5409. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  5410. XMEMCPY(bigC, c, partial);
  5411. #ifdef LITTLE_ENDIAN_ORDER
  5412. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  5413. #endif
  5414. x[0] ^= bigC[0];
  5415. x[1] ^= bigC[1];
  5416. x[2] ^= bigC[2];
  5417. x[3] ^= bigC[3];
  5418. GMULT(x, bigH);
  5419. }
  5420. }
  5421. /* Hash in the lengths in bits of A and C */
  5422. {
  5423. word32 len[4];
  5424. /* Lengths are in bytes. Convert to bits. */
  5425. len[0] = (aSz >> (8*sizeof(aSz) - 3));
  5426. len[1] = aSz << 3;
  5427. len[2] = (cSz >> (8*sizeof(cSz) - 3));
  5428. len[3] = cSz << 3;
  5429. x[0] ^= len[0];
  5430. x[1] ^= len[1];
  5431. x[2] ^= len[2];
  5432. x[3] ^= len[3];
  5433. GMULT(x, bigH);
  5434. }
  5435. #ifdef LITTLE_ENDIAN_ORDER
  5436. ByteReverseWords(x, x, AES_BLOCK_SIZE);
  5437. #endif
  5438. XMEMCPY(s, x, sSz);
  5439. }
  5440. #ifdef WOLFSSL_AESGCM_STREAM
  5441. #ifdef LITTLE_ENDIAN_ORDER
  5442. /* Little-endian 32-bit word implementation requires byte reversal of H.
  5443. *
  5444. * H is all-zeros block encrypted with key.
  5445. *
  5446. * @param [in, out] aes AES GCM object.
  5447. */
  5448. #define GHASH_INIT_EXTRA(aes) \
  5449. ByteReverseWords((word32*)aes->gcm.H, (word32*)aes->gcm.H, AES_BLOCK_SIZE)
  5450. /* GHASH one block of data..
  5451. *
  5452. * XOR block, in big-endian form, into tag and GMULT with H.
  5453. *
  5454. * @param [in, out] aes AES GCM object.
  5455. * @param [in] block Block of AAD or cipher text.
  5456. */
  5457. #define GHASH_ONE_BLOCK(aes, block) \
  5458. do { \
  5459. word32* x = (word32*)AES_TAG(aes); \
  5460. word32* h = (word32*)aes->gcm.H; \
  5461. word32 bigEnd[4]; \
  5462. XMEMCPY(bigEnd, block, AES_BLOCK_SIZE); \
  5463. ByteReverseWords(bigEnd, bigEnd, AES_BLOCK_SIZE); \
  5464. x[0] ^= bigEnd[0]; \
  5465. x[1] ^= bigEnd[1]; \
  5466. x[2] ^= bigEnd[2]; \
  5467. x[3] ^= bigEnd[3]; \
  5468. GMULT(x, h); \
  5469. } \
  5470. while (0)
  5471. /* GHASH in AAD and cipher text lengths in bits.
  5472. *
  5473. * Convert tag back to little-endian.
  5474. *
  5475. * @param [in, out] aes AES GCM object.
  5476. */
  5477. #define GHASH_LEN_BLOCK(aes) \
  5478. do { \
  5479. word32 len[4]; \
  5480. word32* x = (word32*)AES_TAG(aes); \
  5481. word32* h = (word32*)aes->gcm.H; \
  5482. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  5483. len[1] = aes->aSz << 3; \
  5484. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  5485. len[3] = aes->cSz << 3; \
  5486. x[0] ^= len[0]; \
  5487. x[1] ^= len[1]; \
  5488. x[2] ^= len[2]; \
  5489. x[3] ^= len[3]; \
  5490. GMULT(x, h); \
  5491. ByteReverseWords(x, x, AES_BLOCK_SIZE); \
  5492. } \
  5493. while (0)
  5494. #else
  5495. /* No extra initialization for 32-bit word implementation.
  5496. *
  5497. * @param [in] aes AES GCM object.
  5498. */
  5499. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  5500. /* GHASH one block of data..
  5501. *
  5502. * XOR block into tag and GMULT with H.
  5503. *
  5504. * @param [in, out] aes AES GCM object.
  5505. * @param [in] block Block of AAD or cipher text.
  5506. */
  5507. #define GHASH_ONE_BLOCK(aes, block) \
  5508. do { \
  5509. word32* x = (word32*)AES_TAG(aes); \
  5510. word32* h = (word32*)aes->gcm.H; \
  5511. word32 block32[4]; \
  5512. XMEMCPY(block32, block, AES_BLOCK_SIZE); \
  5513. x[0] ^= block32[0]; \
  5514. x[1] ^= block32[1]; \
  5515. x[2] ^= block32[2]; \
  5516. x[3] ^= block32[3]; \
  5517. GMULT(x, h); \
  5518. } \
  5519. while (0)
  5520. /* GHASH in AAD and cipher text lengths in bits.
  5521. *
  5522. * @param [in, out] aes AES GCM object.
  5523. */
  5524. #define GHASH_LEN_BLOCK(aes) \
  5525. do { \
  5526. word32 len[4]; \
  5527. word32* x = (word32*)AES_TAG(aes); \
  5528. word32* h = (word32*)aes->gcm.H; \
  5529. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  5530. len[1] = aes->aSz << 3; \
  5531. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  5532. len[3] = aes->cSz << 3; \
  5533. x[0] ^= len[0]; \
  5534. x[1] ^= len[1]; \
  5535. x[2] ^= len[2]; \
  5536. x[3] ^= len[3]; \
  5537. GMULT(x, h); \
  5538. } \
  5539. while (0)
  5540. #endif /* LITTLE_ENDIAN_ORDER */
  5541. #endif /* WOLFSSL_AESGCM_STREAM */
  5542. #endif /* end GCM_WORD32 */
  5543. #if !defined(WOLFSSL_XILINX_CRYPT) && !defined(WOLFSSL_AFALG_XILINX_AES)
  5544. #ifdef WOLFSSL_AESGCM_STREAM
  5545. #ifndef GHASH_LEN_BLOCK
  5546. /* Hash in the lengths of the AAD and cipher text in bits.
  5547. *
  5548. * Default implementation.
  5549. *
  5550. * @param [in, out] aes AES GCM object.
  5551. */
  5552. #define GHASH_LEN_BLOCK(aes) \
  5553. do { \
  5554. byte scratch[AES_BLOCK_SIZE]; \
  5555. FlattenSzInBits(&scratch[0], (aes)->aSz); \
  5556. FlattenSzInBits(&scratch[8], (aes)->cSz); \
  5557. GHASH_ONE_BLOCK(aes, scratch); \
  5558. } \
  5559. while (0)
  5560. #endif
  5561. /* Initialize a GHASH for streaming operations.
  5562. *
  5563. * @param [in, out] aes AES GCM object.
  5564. */
  5565. static void GHASH_INIT(Aes* aes) {
  5566. /* Set tag to all zeros as initial value. */
  5567. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  5568. /* Reset counts of AAD and cipher text. */
  5569. aes->aOver = 0;
  5570. aes->cOver = 0;
  5571. /* Extra initialization baed on implementation. */
  5572. GHASH_INIT_EXTRA(aes);
  5573. }
  5574. /* Update the GHASH with AAD and/or cipher text.
  5575. *
  5576. * @param [in,out] aes AES GCM object.
  5577. * @param [in] a Additional authentication data buffer.
  5578. * @param [in] aSz Size of data in AAD buffer.
  5579. * @param [in] c Cipher text buffer.
  5580. * @param [in] cSz Size of data in cipher text buffer.
  5581. */
  5582. static void GHASH_UPDATE(Aes* aes, const byte* a, word32 aSz, const byte* c,
  5583. word32 cSz)
  5584. {
  5585. word32 blocks;
  5586. word32 partial;
  5587. /* Hash in A, the Additional Authentication Data */
  5588. if (aSz != 0 && a != NULL) {
  5589. /* Update count of AAD we have hashed. */
  5590. aes->aSz += aSz;
  5591. /* Check if we have unprocessed data. */
  5592. if (aes->aOver > 0) {
  5593. /* Calculate amount we can use - fill up the block. */
  5594. byte sz = AES_BLOCK_SIZE - aes->aOver;
  5595. if (sz > aSz) {
  5596. sz = (byte)aSz;
  5597. }
  5598. /* Copy extra into last GHASH block array and update count. */
  5599. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  5600. aes->aOver += sz;
  5601. if (aes->aOver == AES_BLOCK_SIZE) {
  5602. /* We have filled up the block and can process. */
  5603. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5604. /* Reset count. */
  5605. aes->aOver = 0;
  5606. }
  5607. /* Used up some data. */
  5608. aSz -= sz;
  5609. a += sz;
  5610. }
  5611. /* Calculate number of blocks of AAD and the leftover. */
  5612. blocks = aSz / AES_BLOCK_SIZE;
  5613. partial = aSz % AES_BLOCK_SIZE;
  5614. /* GHASH full blocks now. */
  5615. while (blocks--) {
  5616. GHASH_ONE_BLOCK(aes, a);
  5617. a += AES_BLOCK_SIZE;
  5618. }
  5619. if (partial != 0) {
  5620. /* Cache the partial block. */
  5621. XMEMCPY(AES_LASTGBLOCK(aes), a, partial);
  5622. aes->aOver = (byte)partial;
  5623. }
  5624. }
  5625. if (aes->aOver > 0 && cSz > 0 && c != NULL) {
  5626. /* No more AAD coming and we have a partial block. */
  5627. /* Fill the rest of the block with zeros. */
  5628. byte sz = AES_BLOCK_SIZE - aes->aOver;
  5629. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0, sz);
  5630. /* GHASH last AAD block. */
  5631. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5632. /* Clear partial count for next time through. */
  5633. aes->aOver = 0;
  5634. }
  5635. /* Hash in C, the Ciphertext */
  5636. if (cSz != 0 && c != NULL) {
  5637. /* Update count of cipher text we have hashed. */
  5638. aes->cSz += cSz;
  5639. if (aes->cOver > 0) {
  5640. /* Calculate amount we can use - fill up the block. */
  5641. byte sz = AES_BLOCK_SIZE - aes->cOver;
  5642. if (sz > cSz) {
  5643. sz = (byte)cSz;
  5644. }
  5645. XMEMCPY(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  5646. /* Update count of unsed encrypted counter. */
  5647. aes->cOver += sz;
  5648. if (aes->cOver == AES_BLOCK_SIZE) {
  5649. /* We have filled up the block and can process. */
  5650. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5651. /* Reset count. */
  5652. aes->cOver = 0;
  5653. }
  5654. /* Used up some data. */
  5655. cSz -= sz;
  5656. c += sz;
  5657. }
  5658. /* Calculate number of blocks of cipher text and the leftover. */
  5659. blocks = cSz / AES_BLOCK_SIZE;
  5660. partial = cSz % AES_BLOCK_SIZE;
  5661. /* GHASH full blocks now. */
  5662. while (blocks--) {
  5663. GHASH_ONE_BLOCK(aes, c);
  5664. c += AES_BLOCK_SIZE;
  5665. }
  5666. if (partial != 0) {
  5667. /* Cache the partial block. */
  5668. XMEMCPY(AES_LASTGBLOCK(aes), c, partial);
  5669. aes->cOver = (byte)partial;
  5670. }
  5671. }
  5672. }
  5673. /* Finalize the GHASH calculation.
  5674. *
  5675. * Complete hashing cipher text and hash the AAD and cipher text lengths.
  5676. *
  5677. * @param [in, out] aes AES GCM object.
  5678. * @param [out] s Authentication tag.
  5679. * @param [in] sSz Size of authentication tag required.
  5680. */
  5681. static void GHASH_FINAL(Aes* aes, byte* s, word32 sSz)
  5682. {
  5683. /* AAD block incomplete when > 0 */
  5684. byte over = aes->aOver;
  5685. if (aes->cOver > 0) {
  5686. /* Cipher text block incomplete. */
  5687. over = aes->cOver;
  5688. }
  5689. if (over > 0) {
  5690. /* Zeroize the unused part of the block. */
  5691. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  5692. /* Hash the last block of cipher text. */
  5693. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5694. }
  5695. /* Hash in the lengths of AAD and cipher text in bits */
  5696. GHASH_LEN_BLOCK(aes);
  5697. /* Copy the result into s. */
  5698. XMEMCPY(s, AES_TAG(aes), sSz);
  5699. }
  5700. #endif /* WOLFSSL_AESGCM_STREAM */
  5701. #ifdef FREESCALE_LTC_AES_GCM
  5702. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  5703. const byte* iv, word32 ivSz,
  5704. byte* authTag, word32 authTagSz,
  5705. const byte* authIn, word32 authInSz)
  5706. {
  5707. status_t status;
  5708. word32 keySize;
  5709. /* argument checks */
  5710. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  5711. return BAD_FUNC_ARG;
  5712. }
  5713. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  5714. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  5715. return BAD_FUNC_ARG;
  5716. }
  5717. status = wc_AesGetKeySize(aes, &keySize);
  5718. if (status)
  5719. return status;
  5720. status = wolfSSL_CryptHwMutexLock();
  5721. if (status != 0)
  5722. return status;
  5723. status = LTC_AES_EncryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  5724. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  5725. wolfSSL_CryptHwMutexUnLock();
  5726. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  5727. }
  5728. #else
  5729. #ifdef STM32_CRYPTO_AES_GCM
  5730. /* this function supports inline encrypt */
  5731. /* define STM32_AESGCM_PARTIAL for STM HW that does not support authentication
  5732. * on byte multiples (see CRYP_HEADERWIDTHUNIT_BYTE) */
  5733. static WARN_UNUSED_RESULT int wc_AesGcmEncrypt_STM32(
  5734. Aes* aes, byte* out, const byte* in, word32 sz,
  5735. const byte* iv, word32 ivSz,
  5736. byte* authTag, word32 authTagSz,
  5737. const byte* authIn, word32 authInSz)
  5738. {
  5739. int ret;
  5740. #ifdef WOLFSSL_STM32_CUBEMX
  5741. CRYP_HandleTypeDef hcryp;
  5742. #else
  5743. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  5744. #endif
  5745. word32 keySize;
  5746. #ifdef WOLFSSL_STM32_CUBEMX
  5747. int status = HAL_OK;
  5748. word32 blocks = sz / AES_BLOCK_SIZE;
  5749. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  5750. #else
  5751. int status = SUCCESS;
  5752. #endif
  5753. word32 partial = sz % AES_BLOCK_SIZE;
  5754. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  5755. word32 ctrInit[AES_BLOCK_SIZE/sizeof(word32)];
  5756. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  5757. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  5758. byte* authInPadded = NULL;
  5759. int authPadSz, wasAlloc = 0, useSwGhash = 0;
  5760. ret = wc_AesGetKeySize(aes, &keySize);
  5761. if (ret != 0)
  5762. return ret;
  5763. #ifdef WOLFSSL_STM32_CUBEMX
  5764. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  5765. if (ret != 0)
  5766. return ret;
  5767. #endif
  5768. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  5769. if (ivSz == GCM_NONCE_MID_SZ) {
  5770. byte* pCtr = (byte*)ctr;
  5771. XMEMCPY(ctr, iv, ivSz);
  5772. pCtr[AES_BLOCK_SIZE - 1] = 1;
  5773. }
  5774. else {
  5775. GHASH(&aes->gcm, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  5776. }
  5777. XMEMCPY(ctrInit, ctr, sizeof(ctr)); /* save off initial counter for GMAC */
  5778. /* Authentication buffer - must be 4-byte multiple zero padded */
  5779. authPadSz = authInSz % sizeof(word32);
  5780. if (authPadSz != 0) {
  5781. authPadSz = authInSz + sizeof(word32) - authPadSz;
  5782. if (authPadSz <= sizeof(authhdr)) {
  5783. authInPadded = (byte*)authhdr;
  5784. }
  5785. else {
  5786. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  5787. DYNAMIC_TYPE_TMP_BUFFER);
  5788. if (authInPadded == NULL) {
  5789. wolfSSL_CryptHwMutexUnLock();
  5790. return MEMORY_E;
  5791. }
  5792. wasAlloc = 1;
  5793. }
  5794. XMEMSET(authInPadded, 0, authPadSz);
  5795. XMEMCPY(authInPadded, authIn, authInSz);
  5796. } else {
  5797. authPadSz = authInSz;
  5798. authInPadded = (byte*)authIn;
  5799. }
  5800. /* for cases where hardware cannot be used for authTag calculate it */
  5801. /* if IV is not 12 calculate GHASH using software */
  5802. if (ivSz != GCM_NONCE_MID_SZ
  5803. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  5804. /* or harware that does not support partial block */
  5805. || sz == 0 || partial != 0
  5806. #endif
  5807. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  5808. /* or authIn is not a multiple of 4 */
  5809. || authPadSz != authInSz
  5810. #endif
  5811. ) {
  5812. useSwGhash = 1;
  5813. }
  5814. /* Hardware requires counter + 1 */
  5815. IncrementGcmCounter((byte*)ctr);
  5816. ret = wolfSSL_CryptHwMutexLock();
  5817. if (ret != 0) {
  5818. return ret;
  5819. }
  5820. #ifdef WOLFSSL_STM32_CUBEMX
  5821. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  5822. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  5823. #if defined(STM32_HAL_V2)
  5824. hcryp.Init.Algorithm = CRYP_AES_GCM;
  5825. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  5826. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  5827. hcryp.Init.HeaderSize = authInSz;
  5828. #else
  5829. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  5830. #endif
  5831. #ifdef CRYP_KEYIVCONFIG_ONCE
  5832. /* allows repeated calls to HAL_CRYP_Encrypt */
  5833. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  5834. #endif
  5835. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  5836. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  5837. HAL_CRYP_Init(&hcryp);
  5838. #ifndef CRYP_KEYIVCONFIG_ONCE
  5839. /* GCM payload phase - can handle partial blocks */
  5840. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  5841. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  5842. #else
  5843. /* GCM payload phase - blocks */
  5844. if (blocks) {
  5845. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  5846. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  5847. }
  5848. /* GCM payload phase - partial remainder */
  5849. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  5850. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  5851. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  5852. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)partialBlock, partial,
  5853. (uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  5854. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  5855. }
  5856. #endif
  5857. if (status == HAL_OK && !useSwGhash) {
  5858. /* Compute the authTag */
  5859. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  5860. STM32_HAL_TIMEOUT);
  5861. }
  5862. #elif defined(STM32_CRYPTO_AES_ONLY)
  5863. /* Set the CRYP parameters */
  5864. hcryp.Init.HeaderSize = authPadSz;
  5865. if (authPadSz == 0)
  5866. hcryp.Init.Header = NULL; /* cannot pass pointer here when authIn == 0 */
  5867. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  5868. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  5869. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  5870. HAL_CRYP_Init(&hcryp);
  5871. /* GCM init phase */
  5872. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  5873. if (status == HAL_OK) {
  5874. /* GCM header phase */
  5875. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  5876. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  5877. }
  5878. if (status == HAL_OK) {
  5879. /* GCM payload phase - blocks */
  5880. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  5881. if (blocks) {
  5882. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  5883. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  5884. }
  5885. }
  5886. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  5887. /* GCM payload phase - partial remainder */
  5888. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  5889. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  5890. status = HAL_CRYPEx_AES_Auth(&hcryp, (uint8_t*)partialBlock, partial,
  5891. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  5892. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  5893. }
  5894. if (status == HAL_OK && !useSwGhash) {
  5895. /* GCM final phase */
  5896. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  5897. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  5898. }
  5899. #else
  5900. hcryp.Init.HeaderSize = authPadSz;
  5901. HAL_CRYP_Init(&hcryp);
  5902. if (blocks) {
  5903. /* GCM payload phase - blocks */
  5904. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (byte*)in,
  5905. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  5906. }
  5907. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  5908. /* GCM payload phase - partial remainder */
  5909. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  5910. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  5911. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (uint8_t*)partialBlock, partial,
  5912. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  5913. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  5914. }
  5915. if (status == HAL_OK && !useSwGhash) {
  5916. /* Compute the authTag */
  5917. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  5918. }
  5919. #endif
  5920. if (status != HAL_OK)
  5921. ret = AES_GCM_AUTH_E;
  5922. HAL_CRYP_DeInit(&hcryp);
  5923. #else /* Standard Peripheral Library */
  5924. ByteReverseWords(keyCopy, (word32*)aes->key, keySize);
  5925. status = CRYP_AES_GCM(MODE_ENCRYPT, (uint8_t*)ctr,
  5926. (uint8_t*)keyCopy, keySize * 8,
  5927. (uint8_t*)in, sz,
  5928. (uint8_t*)authInPadded, authInSz,
  5929. (uint8_t*)out, (uint8_t*)tag);
  5930. if (status != SUCCESS)
  5931. ret = AES_GCM_AUTH_E;
  5932. #endif /* WOLFSSL_STM32_CUBEMX */
  5933. wolfSSL_CryptHwMutexUnLock();
  5934. wc_Stm32_Aes_Cleanup();
  5935. if (ret == 0) {
  5936. /* return authTag */
  5937. if (authTag) {
  5938. if (useSwGhash) {
  5939. GHASH(&aes->gcm, authIn, authInSz, out, sz, authTag, authTagSz);
  5940. ret = wc_AesEncrypt(aes, (byte*)ctrInit, (byte*)tag);
  5941. if (ret == 0) {
  5942. xorbuf(authTag, tag, authTagSz);
  5943. }
  5944. }
  5945. else {
  5946. /* use hardware calculated tag */
  5947. XMEMCPY(authTag, tag, authTagSz);
  5948. }
  5949. }
  5950. }
  5951. /* Free memory */
  5952. if (wasAlloc) {
  5953. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  5954. }
  5955. return ret;
  5956. }
  5957. #endif /* STM32_CRYPTO_AES_GCM */
  5958. #ifdef WOLFSSL_AESNI
  5959. /* For performance reasons, this code needs to be not inlined. */
  5960. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  5961. Aes* aes, byte* out, const byte* in, word32 sz,
  5962. const byte* iv, word32 ivSz,
  5963. byte* authTag, word32 authTagSz,
  5964. const byte* authIn, word32 authInSz);
  5965. #else
  5966. static
  5967. #endif
  5968. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  5969. Aes* aes, byte* out, const byte* in, word32 sz,
  5970. const byte* iv, word32 ivSz,
  5971. byte* authTag, word32 authTagSz,
  5972. const byte* authIn, word32 authInSz)
  5973. {
  5974. int ret = 0;
  5975. word32 blocks = sz / AES_BLOCK_SIZE;
  5976. word32 partial = sz % AES_BLOCK_SIZE;
  5977. const byte* p = in;
  5978. byte* c = out;
  5979. ALIGN16 byte counter[AES_BLOCK_SIZE];
  5980. ALIGN16 byte initialCounter[AES_BLOCK_SIZE];
  5981. ALIGN16 byte scratch[AES_BLOCK_SIZE];
  5982. if (ivSz == GCM_NONCE_MID_SZ) {
  5983. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  5984. XMEMCPY(counter, iv, ivSz);
  5985. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  5986. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  5987. counter[AES_BLOCK_SIZE - 1] = 1;
  5988. }
  5989. else {
  5990. /* Counter is GHASH of IV. */
  5991. #ifdef OPENSSL_EXTRA
  5992. word32 aadTemp = aes->gcm.aadLen;
  5993. aes->gcm.aadLen = 0;
  5994. #endif
  5995. GHASH(&aes->gcm, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  5996. #ifdef OPENSSL_EXTRA
  5997. aes->gcm.aadLen = aadTemp;
  5998. #endif
  5999. }
  6000. XMEMCPY(initialCounter, counter, AES_BLOCK_SIZE);
  6001. #ifdef WOLFSSL_PIC32MZ_CRYPT
  6002. if (blocks) {
  6003. /* use initial IV for HW, but don't use it below */
  6004. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  6005. ret = wc_Pic32AesCrypt(
  6006. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  6007. out, in, (blocks * AES_BLOCK_SIZE),
  6008. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  6009. if (ret != 0)
  6010. return ret;
  6011. }
  6012. /* process remainder using partial handling */
  6013. #endif
  6014. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  6015. /* some hardware acceleration can gain performance from doing AES encryption
  6016. * of the whole buffer at once */
  6017. if (c != p && blocks > 0) { /* can not handle inline encryption */
  6018. while (blocks--) {
  6019. IncrementGcmCounter(counter);
  6020. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  6021. c += AES_BLOCK_SIZE;
  6022. }
  6023. /* reset number of blocks and then do encryption */
  6024. blocks = sz / AES_BLOCK_SIZE;
  6025. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6026. xorbuf(out, p, AES_BLOCK_SIZE * blocks);
  6027. p += AES_BLOCK_SIZE * blocks;
  6028. }
  6029. else
  6030. #endif /* HAVE_AES_ECB && !WOLFSSL_PIC32MZ_CRYPT */
  6031. {
  6032. while (blocks--) {
  6033. IncrementGcmCounter(counter);
  6034. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  6035. ret = wc_AesEncrypt(aes, counter, scratch);
  6036. if (ret != 0)
  6037. return ret;
  6038. xorbufout(c, scratch, p, AES_BLOCK_SIZE);
  6039. #endif
  6040. p += AES_BLOCK_SIZE;
  6041. c += AES_BLOCK_SIZE;
  6042. }
  6043. }
  6044. if (partial != 0) {
  6045. IncrementGcmCounter(counter);
  6046. ret = wc_AesEncrypt(aes, counter, scratch);
  6047. if (ret != 0)
  6048. return ret;
  6049. xorbufout(c, scratch, p, partial);
  6050. }
  6051. if (authTag) {
  6052. GHASH(&aes->gcm, authIn, authInSz, out, sz, authTag, authTagSz);
  6053. ret = wc_AesEncrypt(aes, initialCounter, scratch);
  6054. if (ret != 0)
  6055. return ret;
  6056. xorbuf(authTag, scratch, authTagSz);
  6057. #ifdef OPENSSL_EXTRA
  6058. if (!in && !sz)
  6059. /* store AAD size for next call */
  6060. aes->gcm.aadLen = authInSz;
  6061. #endif
  6062. }
  6063. return ret;
  6064. }
  6065. /* Software AES - GCM Encrypt */
  6066. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6067. const byte* iv, word32 ivSz,
  6068. byte* authTag, word32 authTagSz,
  6069. const byte* authIn, word32 authInSz)
  6070. {
  6071. /* argument checks */
  6072. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  6073. return BAD_FUNC_ARG;
  6074. }
  6075. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  6076. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  6077. return BAD_FUNC_ARG;
  6078. }
  6079. #ifdef WOLF_CRYPTO_CB
  6080. #ifndef WOLF_CRYPTO_CB_FIND
  6081. if (aes->devId != INVALID_DEVID)
  6082. #endif
  6083. {
  6084. int crypto_cb_ret =
  6085. wc_CryptoCb_AesGcmEncrypt(aes, out, in, sz, iv, ivSz, authTag,
  6086. authTagSz, authIn, authInSz);
  6087. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  6088. return crypto_cb_ret;
  6089. /* fall-through when unavailable */
  6090. }
  6091. #endif
  6092. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  6093. /* if async and byte count above threshold */
  6094. /* only 12-byte IV is supported in HW */
  6095. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  6096. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  6097. #if defined(HAVE_CAVIUM)
  6098. #ifdef HAVE_CAVIUM_V
  6099. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  6100. return NitroxAesGcmEncrypt(aes, out, in, sz,
  6101. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6102. authTag, authTagSz, authIn, authInSz);
  6103. }
  6104. #endif
  6105. #elif defined(HAVE_INTEL_QA)
  6106. return IntelQaSymAesGcmEncrypt(&aes->asyncDev, out, in, sz,
  6107. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6108. authTag, authTagSz, authIn, authInSz);
  6109. #else /* WOLFSSL_ASYNC_CRYPT_SW */
  6110. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_GCM_ENCRYPT)) {
  6111. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  6112. sw->aes.aes = aes;
  6113. sw->aes.out = out;
  6114. sw->aes.in = in;
  6115. sw->aes.sz = sz;
  6116. sw->aes.iv = iv;
  6117. sw->aes.ivSz = ivSz;
  6118. sw->aes.authTag = authTag;
  6119. sw->aes.authTagSz = authTagSz;
  6120. sw->aes.authIn = authIn;
  6121. sw->aes.authInSz = authInSz;
  6122. return WC_PENDING_E;
  6123. }
  6124. #endif
  6125. }
  6126. #endif /* WOLFSSL_ASYNC_CRYPT */
  6127. #ifdef WOLFSSL_SILABS_SE_ACCEL
  6128. return wc_AesGcmEncrypt_silabs(
  6129. aes, out, in, sz,
  6130. iv, ivSz,
  6131. authTag, authTagSz,
  6132. authIn, authInSz);
  6133. #endif
  6134. #ifdef STM32_CRYPTO_AES_GCM
  6135. return wc_AesGcmEncrypt_STM32(
  6136. aes, out, in, sz, iv, ivSz,
  6137. authTag, authTagSz, authIn, authInSz);
  6138. #endif /* STM32_CRYPTO_AES_GCM */
  6139. #ifdef WOLFSSL_AESNI
  6140. #ifdef HAVE_INTEL_AVX2
  6141. if (IS_INTEL_AVX2(intel_flags)) {
  6142. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6143. AES_GCM_encrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6144. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  6145. RESTORE_VECTOR_REGISTERS();
  6146. return 0;
  6147. }
  6148. else
  6149. #endif
  6150. #if defined(HAVE_INTEL_AVX1)
  6151. if (IS_INTEL_AVX1(intel_flags)) {
  6152. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6153. AES_GCM_encrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6154. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  6155. RESTORE_VECTOR_REGISTERS();
  6156. return 0;
  6157. }
  6158. else
  6159. #endif
  6160. if (haveAESNI) {
  6161. AES_GCM_encrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6162. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  6163. return 0;
  6164. }
  6165. else
  6166. #endif
  6167. {
  6168. return AES_GCM_encrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  6169. authIn, authInSz);
  6170. }
  6171. }
  6172. #endif
  6173. /* AES GCM Decrypt */
  6174. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  6175. #ifdef FREESCALE_LTC_AES_GCM
  6176. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6177. const byte* iv, word32 ivSz,
  6178. const byte* authTag, word32 authTagSz,
  6179. const byte* authIn, word32 authInSz)
  6180. {
  6181. int ret;
  6182. word32 keySize;
  6183. status_t status;
  6184. /* argument checks */
  6185. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  6186. * in and out are don't cares, as this is is the GMAC case. */
  6187. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  6188. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  6189. ivSz == 0) {
  6190. return BAD_FUNC_ARG;
  6191. }
  6192. ret = wc_AesGetKeySize(aes, &keySize);
  6193. if (ret != 0) {
  6194. return ret;
  6195. }
  6196. status = wolfSSL_CryptHwMutexLock();
  6197. if (status != 0)
  6198. return status;
  6199. status = LTC_AES_DecryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  6200. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  6201. wolfSSL_CryptHwMutexUnLock();
  6202. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  6203. }
  6204. #else
  6205. #ifdef STM32_CRYPTO_AES_GCM
  6206. /* this function supports inline decrypt */
  6207. static WARN_UNUSED_RESULT int wc_AesGcmDecrypt_STM32(
  6208. Aes* aes, byte* out,
  6209. const byte* in, word32 sz,
  6210. const byte* iv, word32 ivSz,
  6211. const byte* authTag, word32 authTagSz,
  6212. const byte* authIn, word32 authInSz)
  6213. {
  6214. int ret;
  6215. #ifdef WOLFSSL_STM32_CUBEMX
  6216. int status = HAL_OK;
  6217. CRYP_HandleTypeDef hcryp;
  6218. word32 blocks = sz / AES_BLOCK_SIZE;
  6219. #else
  6220. int status = SUCCESS;
  6221. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  6222. #endif
  6223. word32 keySize;
  6224. word32 partial = sz % AES_BLOCK_SIZE;
  6225. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  6226. word32 tagExpected[AES_BLOCK_SIZE/sizeof(word32)];
  6227. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  6228. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  6229. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  6230. byte* authInPadded = NULL;
  6231. int authPadSz, wasAlloc = 0, tagComputed = 0;
  6232. ret = wc_AesGetKeySize(aes, &keySize);
  6233. if (ret != 0)
  6234. return ret;
  6235. #ifdef WOLFSSL_STM32_CUBEMX
  6236. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  6237. if (ret != 0)
  6238. return ret;
  6239. #endif
  6240. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  6241. if (ivSz == GCM_NONCE_MID_SZ) {
  6242. byte* pCtr = (byte*)ctr;
  6243. XMEMCPY(ctr, iv, ivSz);
  6244. pCtr[AES_BLOCK_SIZE - 1] = 1;
  6245. }
  6246. else {
  6247. GHASH(&aes->gcm, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  6248. }
  6249. /* Make copy of expected authTag, which could get corrupted in some
  6250. * Cube HAL versions without proper partial block support.
  6251. * For TLS blocks the authTag is after the output buffer, so save it */
  6252. XMEMCPY(tagExpected, authTag, authTagSz);
  6253. /* Authentication buffer - must be 4-byte multiple zero padded */
  6254. authPadSz = authInSz % sizeof(word32);
  6255. if (authPadSz != 0) {
  6256. authPadSz = authInSz + sizeof(word32) - authPadSz;
  6257. }
  6258. else {
  6259. authPadSz = authInSz;
  6260. }
  6261. /* for cases where hardware cannot be used for authTag calculate it */
  6262. /* if IV is not 12 calculate GHASH using software */
  6263. if (ivSz != GCM_NONCE_MID_SZ
  6264. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  6265. /* or harware that does not support partial block */
  6266. || sz == 0 || partial != 0
  6267. #endif
  6268. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  6269. /* or authIn is not a multiple of 4 */
  6270. || authPadSz != authInSz
  6271. #endif
  6272. ) {
  6273. GHASH(&aes->gcm, authIn, authInSz, in, sz, (byte*)tag, sizeof(tag));
  6274. ret = wc_AesEncrypt(aes, (byte*)ctr, (byte*)partialBlock);
  6275. if (ret != 0)
  6276. return ret;
  6277. xorbuf(tag, partialBlock, sizeof(tag));
  6278. tagComputed = 1;
  6279. }
  6280. /* if using hardware for authentication tag make sure its aligned and zero padded */
  6281. if (authPadSz != authInSz && !tagComputed) {
  6282. if (authPadSz <= sizeof(authhdr)) {
  6283. authInPadded = (byte*)authhdr;
  6284. }
  6285. else {
  6286. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  6287. DYNAMIC_TYPE_TMP_BUFFER);
  6288. if (authInPadded == NULL) {
  6289. wolfSSL_CryptHwMutexUnLock();
  6290. return MEMORY_E;
  6291. }
  6292. wasAlloc = 1;
  6293. }
  6294. XMEMSET(authInPadded, 0, authPadSz);
  6295. XMEMCPY(authInPadded, authIn, authInSz);
  6296. } else {
  6297. authInPadded = (byte*)authIn;
  6298. }
  6299. /* Hardware requires counter + 1 */
  6300. IncrementGcmCounter((byte*)ctr);
  6301. ret = wolfSSL_CryptHwMutexLock();
  6302. if (ret != 0) {
  6303. return ret;
  6304. }
  6305. #ifdef WOLFSSL_STM32_CUBEMX
  6306. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  6307. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  6308. #if defined(STM32_HAL_V2)
  6309. hcryp.Init.Algorithm = CRYP_AES_GCM;
  6310. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  6311. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  6312. hcryp.Init.HeaderSize = authInSz;
  6313. #else
  6314. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  6315. #endif
  6316. #ifdef CRYP_KEYIVCONFIG_ONCE
  6317. /* allows repeated calls to HAL_CRYP_Decrypt */
  6318. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  6319. #endif
  6320. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  6321. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  6322. HAL_CRYP_Init(&hcryp);
  6323. #ifndef CRYP_KEYIVCONFIG_ONCE
  6324. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  6325. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  6326. #else
  6327. /* GCM payload phase - blocks */
  6328. if (blocks) {
  6329. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  6330. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  6331. }
  6332. /* GCM payload phase - partial remainder */
  6333. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  6334. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6335. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6336. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)partialBlock, partial,
  6337. (uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  6338. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6339. }
  6340. #endif
  6341. if (status == HAL_OK && !tagComputed) {
  6342. /* Compute the authTag */
  6343. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  6344. STM32_HAL_TIMEOUT);
  6345. }
  6346. #elif defined(STM32_CRYPTO_AES_ONLY)
  6347. /* Set the CRYP parameters */
  6348. hcryp.Init.HeaderSize = authPadSz;
  6349. if (authPadSz == 0)
  6350. hcryp.Init.Header = NULL; /* cannot pass pointer when authIn == 0 */
  6351. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  6352. hcryp.Init.OperatingMode = CRYP_ALGOMODE_DECRYPT;
  6353. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  6354. HAL_CRYP_Init(&hcryp);
  6355. /* GCM init phase */
  6356. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  6357. if (status == HAL_OK) {
  6358. /* GCM header phase */
  6359. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  6360. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  6361. }
  6362. if (status == HAL_OK) {
  6363. /* GCM payload phase - blocks */
  6364. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  6365. if (blocks) {
  6366. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  6367. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  6368. }
  6369. }
  6370. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  6371. /* GCM payload phase - partial remainder */
  6372. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6373. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6374. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)partialBlock, partial,
  6375. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  6376. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6377. }
  6378. if (status == HAL_OK && tagComputed == 0) {
  6379. /* GCM final phase */
  6380. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  6381. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  6382. }
  6383. #else
  6384. hcryp.Init.HeaderSize = authPadSz;
  6385. HAL_CRYP_Init(&hcryp);
  6386. if (blocks) {
  6387. /* GCM payload phase - blocks */
  6388. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)in,
  6389. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  6390. }
  6391. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  6392. /* GCM payload phase - partial remainder */
  6393. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6394. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6395. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)partialBlock, partial,
  6396. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  6397. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6398. }
  6399. if (status == HAL_OK && tagComputed == 0) {
  6400. /* Compute the authTag */
  6401. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  6402. }
  6403. #endif
  6404. if (status != HAL_OK)
  6405. ret = AES_GCM_AUTH_E;
  6406. HAL_CRYP_DeInit(&hcryp);
  6407. #else /* Standard Peripheral Library */
  6408. ByteReverseWords(keyCopy, (word32*)aes->key, aes->keylen);
  6409. /* Input size and auth size need to be the actual sizes, even though
  6410. * they are not block aligned, because this length (in bits) is used
  6411. * in the final GHASH. */
  6412. XMEMSET(partialBlock, 0, sizeof(partialBlock)); /* use this to get tag */
  6413. status = CRYP_AES_GCM(MODE_DECRYPT, (uint8_t*)ctr,
  6414. (uint8_t*)keyCopy, keySize * 8,
  6415. (uint8_t*)in, sz,
  6416. (uint8_t*)authInPadded, authInSz,
  6417. (uint8_t*)out, (uint8_t*)partialBlock);
  6418. if (status != SUCCESS)
  6419. ret = AES_GCM_AUTH_E;
  6420. if (tagComputed == 0)
  6421. XMEMCPY(tag, partialBlock, authTagSz);
  6422. #endif /* WOLFSSL_STM32_CUBEMX */
  6423. wolfSSL_CryptHwMutexUnLock();
  6424. wc_Stm32_Aes_Cleanup();
  6425. /* Check authentication tag */
  6426. if (ConstantCompare((const byte*)tagExpected, (byte*)tag, authTagSz) != 0) {
  6427. ret = AES_GCM_AUTH_E;
  6428. }
  6429. /* Free memory */
  6430. if (wasAlloc) {
  6431. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  6432. }
  6433. return ret;
  6434. }
  6435. #endif /* STM32_CRYPTO_AES_GCM */
  6436. #ifdef WOLFSSL_AESNI
  6437. /* For performance reasons, this code needs to be not inlined. */
  6438. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  6439. Aes* aes, byte* out, const byte* in, word32 sz,
  6440. const byte* iv, word32 ivSz,
  6441. const byte* authTag, word32 authTagSz,
  6442. const byte* authIn, word32 authInSz);
  6443. #else
  6444. static
  6445. #endif
  6446. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  6447. Aes* aes, byte* out, const byte* in, word32 sz,
  6448. const byte* iv, word32 ivSz,
  6449. const byte* authTag, word32 authTagSz,
  6450. const byte* authIn, word32 authInSz)
  6451. {
  6452. int ret;
  6453. word32 blocks = sz / AES_BLOCK_SIZE;
  6454. word32 partial = sz % AES_BLOCK_SIZE;
  6455. const byte* c = in;
  6456. byte* p = out;
  6457. ALIGN16 byte counter[AES_BLOCK_SIZE];
  6458. ALIGN16 byte scratch[AES_BLOCK_SIZE];
  6459. ALIGN16 byte Tprime[AES_BLOCK_SIZE];
  6460. ALIGN16 byte EKY0[AES_BLOCK_SIZE];
  6461. sword32 res;
  6462. if (ivSz == GCM_NONCE_MID_SZ) {
  6463. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  6464. XMEMCPY(counter, iv, ivSz);
  6465. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  6466. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  6467. counter[AES_BLOCK_SIZE - 1] = 1;
  6468. }
  6469. else {
  6470. /* Counter is GHASH of IV. */
  6471. #ifdef OPENSSL_EXTRA
  6472. word32 aadTemp = aes->gcm.aadLen;
  6473. aes->gcm.aadLen = 0;
  6474. #endif
  6475. GHASH(&aes->gcm, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  6476. #ifdef OPENSSL_EXTRA
  6477. aes->gcm.aadLen = aadTemp;
  6478. #endif
  6479. }
  6480. /* Calc the authTag again using received auth data and the cipher text */
  6481. GHASH(&aes->gcm, authIn, authInSz, in, sz, Tprime, sizeof(Tprime));
  6482. ret = wc_AesEncrypt(aes, counter, EKY0);
  6483. if (ret != 0)
  6484. return ret;
  6485. xorbuf(Tprime, EKY0, sizeof(Tprime));
  6486. #ifdef WC_AES_GCM_DEC_AUTH_EARLY
  6487. /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of
  6488. * the pairwise bytes in the strings.
  6489. */
  6490. res = ConstantCompare(authTag, Tprime, authTagSz);
  6491. /* convert positive retval from ConstantCompare() to all-1s word, in
  6492. * constant time.
  6493. */
  6494. res = 0 - (sword32)(((word32)(0 - res)) >> 31U);
  6495. ret = res & AES_GCM_AUTH_E;
  6496. if (ret != 0)
  6497. return ret;
  6498. #endif
  6499. #ifdef OPENSSL_EXTRA
  6500. if (!out) {
  6501. /* authenticated, non-confidential data */
  6502. /* store AAD size for next call */
  6503. aes->gcm.aadLen = authInSz;
  6504. }
  6505. #endif
  6506. #if defined(WOLFSSL_PIC32MZ_CRYPT)
  6507. if (blocks) {
  6508. /* use initial IV for HW, but don't use it below */
  6509. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  6510. ret = wc_Pic32AesCrypt(
  6511. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  6512. out, in, (blocks * AES_BLOCK_SIZE),
  6513. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  6514. if (ret != 0)
  6515. return ret;
  6516. }
  6517. /* process remainder using partial handling */
  6518. #endif
  6519. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  6520. /* some hardware acceleration can gain performance from doing AES encryption
  6521. * of the whole buffer at once */
  6522. if (c != p && blocks > 0) { /* can not handle inline decryption */
  6523. while (blocks--) {
  6524. IncrementGcmCounter(counter);
  6525. XMEMCPY(p, counter, AES_BLOCK_SIZE);
  6526. p += AES_BLOCK_SIZE;
  6527. }
  6528. /* reset number of blocks and then do encryption */
  6529. blocks = sz / AES_BLOCK_SIZE;
  6530. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6531. xorbuf(out, c, AES_BLOCK_SIZE * blocks);
  6532. c += AES_BLOCK_SIZE * blocks;
  6533. }
  6534. else
  6535. #endif /* HAVE_AES_ECB && !PIC32MZ */
  6536. {
  6537. while (blocks--) {
  6538. IncrementGcmCounter(counter);
  6539. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  6540. ret = wc_AesEncrypt(aes, counter, scratch);
  6541. if (ret != 0)
  6542. return ret;
  6543. xorbufout(p, scratch, c, AES_BLOCK_SIZE);
  6544. #endif
  6545. p += AES_BLOCK_SIZE;
  6546. c += AES_BLOCK_SIZE;
  6547. }
  6548. }
  6549. if (partial != 0) {
  6550. IncrementGcmCounter(counter);
  6551. ret = wc_AesEncrypt(aes, counter, scratch);
  6552. if (ret != 0)
  6553. return ret;
  6554. xorbuf(scratch, c, partial);
  6555. XMEMCPY(p, scratch, partial);
  6556. }
  6557. #ifndef WC_AES_GCM_DEC_AUTH_EARLY
  6558. /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of
  6559. * the pairwise bytes in the strings.
  6560. */
  6561. res = ConstantCompare(authTag, Tprime, (int)authTagSz);
  6562. /* convert positive retval from ConstantCompare() to all-1s word, in
  6563. * constant time.
  6564. */
  6565. res = 0 - (sword32)(((word32)(0 - res)) >> 31U);
  6566. /* now use res as a mask for constant time return of ret, unless tag
  6567. * mismatch, whereupon AES_GCM_AUTH_E is returned.
  6568. */
  6569. ret = (ret & ~res) | (res & AES_GCM_AUTH_E);
  6570. #endif
  6571. return ret;
  6572. }
  6573. /* Software AES - GCM Decrypt */
  6574. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6575. const byte* iv, word32 ivSz,
  6576. const byte* authTag, word32 authTagSz,
  6577. const byte* authIn, word32 authInSz)
  6578. {
  6579. #ifdef WOLFSSL_AESNI
  6580. int res = AES_GCM_AUTH_E;
  6581. #endif
  6582. /* argument checks */
  6583. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  6584. * in and out are don't cares, as this is is the GMAC case. */
  6585. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  6586. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  6587. ivSz == 0) {
  6588. return BAD_FUNC_ARG;
  6589. }
  6590. #ifdef WOLF_CRYPTO_CB
  6591. #ifndef WOLF_CRYPTO_CB_FIND
  6592. if (aes->devId != INVALID_DEVID)
  6593. #endif
  6594. {
  6595. int crypto_cb_ret =
  6596. wc_CryptoCb_AesGcmDecrypt(aes, out, in, sz, iv, ivSz,
  6597. authTag, authTagSz, authIn, authInSz);
  6598. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  6599. return crypto_cb_ret;
  6600. /* fall-through when unavailable */
  6601. }
  6602. #endif
  6603. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  6604. /* if async and byte count above threshold */
  6605. /* only 12-byte IV is supported in HW */
  6606. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  6607. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  6608. #if defined(HAVE_CAVIUM)
  6609. #ifdef HAVE_CAVIUM_V
  6610. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  6611. return NitroxAesGcmDecrypt(aes, out, in, sz,
  6612. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6613. authTag, authTagSz, authIn, authInSz);
  6614. }
  6615. #endif
  6616. #elif defined(HAVE_INTEL_QA)
  6617. return IntelQaSymAesGcmDecrypt(&aes->asyncDev, out, in, sz,
  6618. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6619. authTag, authTagSz, authIn, authInSz);
  6620. #else /* WOLFSSL_ASYNC_CRYPT_SW */
  6621. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_GCM_DECRYPT)) {
  6622. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  6623. sw->aes.aes = aes;
  6624. sw->aes.out = out;
  6625. sw->aes.in = in;
  6626. sw->aes.sz = sz;
  6627. sw->aes.iv = iv;
  6628. sw->aes.ivSz = ivSz;
  6629. sw->aes.authTag = (byte*)authTag;
  6630. sw->aes.authTagSz = authTagSz;
  6631. sw->aes.authIn = authIn;
  6632. sw->aes.authInSz = authInSz;
  6633. return WC_PENDING_E;
  6634. }
  6635. #endif
  6636. }
  6637. #endif /* WOLFSSL_ASYNC_CRYPT */
  6638. #ifdef WOLFSSL_SILABS_SE_ACCEL
  6639. return wc_AesGcmDecrypt_silabs(
  6640. aes, out, in, sz, iv, ivSz,
  6641. authTag, authTagSz, authIn, authInSz);
  6642. #endif
  6643. #ifdef STM32_CRYPTO_AES_GCM
  6644. /* The STM standard peripheral library API's doesn't support partial blocks */
  6645. return wc_AesGcmDecrypt_STM32(
  6646. aes, out, in, sz, iv, ivSz,
  6647. authTag, authTagSz, authIn, authInSz);
  6648. #endif /* STM32_CRYPTO_AES_GCM */
  6649. #ifdef WOLFSSL_AESNI
  6650. #ifdef HAVE_INTEL_AVX2
  6651. if (IS_INTEL_AVX2(intel_flags)) {
  6652. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6653. AES_GCM_decrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6654. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  6655. RESTORE_VECTOR_REGISTERS();
  6656. if (res == 0)
  6657. return AES_GCM_AUTH_E;
  6658. return 0;
  6659. }
  6660. else
  6661. #endif
  6662. #if defined(HAVE_INTEL_AVX1)
  6663. if (IS_INTEL_AVX1(intel_flags)) {
  6664. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6665. AES_GCM_decrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6666. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  6667. RESTORE_VECTOR_REGISTERS();
  6668. if (res == 0)
  6669. return AES_GCM_AUTH_E;
  6670. return 0;
  6671. }
  6672. else
  6673. #endif
  6674. if (haveAESNI) {
  6675. AES_GCM_decrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6676. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  6677. if (res == 0)
  6678. return AES_GCM_AUTH_E;
  6679. return 0;
  6680. }
  6681. else
  6682. #endif
  6683. {
  6684. return AES_GCM_decrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  6685. authIn, authInSz);
  6686. }
  6687. }
  6688. #endif
  6689. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  6690. #ifdef WOLFSSL_AESGCM_STREAM
  6691. /* Initialize the AES GCM cipher with an IV. C implementation.
  6692. *
  6693. * @param [in, out] aes AES object.
  6694. * @param [in] iv IV/nonce buffer.
  6695. * @param [in] ivSz Length of IV/nonce data.
  6696. */
  6697. static WARN_UNUSED_RESULT int AesGcmInit_C(Aes* aes, const byte* iv, word32 ivSz)
  6698. {
  6699. ALIGN32 byte counter[AES_BLOCK_SIZE];
  6700. int ret;
  6701. if (ivSz == GCM_NONCE_MID_SZ) {
  6702. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  6703. XMEMCPY(counter, iv, ivSz);
  6704. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  6705. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  6706. counter[AES_BLOCK_SIZE - 1] = 1;
  6707. }
  6708. else {
  6709. /* Counter is GHASH of IV. */
  6710. #ifdef OPENSSL_EXTRA
  6711. word32 aadTemp = aes->gcm.aadLen;
  6712. aes->gcm.aadLen = 0;
  6713. #endif
  6714. GHASH(&aes->gcm, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  6715. #ifdef OPENSSL_EXTRA
  6716. aes->gcm.aadLen = aadTemp;
  6717. #endif
  6718. }
  6719. /* Copy in the counter for use with cipher. */
  6720. XMEMCPY(AES_COUNTER(aes), counter, AES_BLOCK_SIZE);
  6721. /* Encrypt initial counter into a buffer for GCM. */
  6722. ret = wc_AesEncrypt(aes, counter, AES_INITCTR(aes));
  6723. if (ret != 0)
  6724. return ret;
  6725. /* Reset state fields. */
  6726. aes->over = 0;
  6727. aes->aSz = 0;
  6728. aes->cSz = 0;
  6729. /* Initialization for GHASH. */
  6730. GHASH_INIT(aes);
  6731. return 0;
  6732. }
  6733. /* Update the AES GCM cipher with data. C implementation.
  6734. *
  6735. * Only enciphers data.
  6736. *
  6737. * @param [in, out] aes AES object.
  6738. * @param [in] out Cipher text or plaintext buffer.
  6739. * @param [in] in Plaintext or cipher text buffer.
  6740. * @param [in] sz Length of data.
  6741. */
  6742. static WARN_UNUSED_RESULT int AesGcmCryptUpdate_C(
  6743. Aes* aes, byte* out, const byte* in, word32 sz)
  6744. {
  6745. word32 blocks;
  6746. word32 partial;
  6747. int ret;
  6748. /* Check if previous encrypted block was not used up. */
  6749. if (aes->over > 0) {
  6750. byte pSz = AES_BLOCK_SIZE - aes->over;
  6751. if (pSz > sz) pSz = (byte)sz;
  6752. /* Use some/all of last encrypted block. */
  6753. xorbufout(out, AES_LASTBLOCK(aes) + aes->over, in, pSz);
  6754. aes->over = (aes->over + pSz) & (AES_BLOCK_SIZE - 1);
  6755. /* Some data used. */
  6756. sz -= pSz;
  6757. in += pSz;
  6758. out += pSz;
  6759. }
  6760. /* Calculate the number of blocks needing to be encrypted and any leftover.
  6761. */
  6762. blocks = sz / AES_BLOCK_SIZE;
  6763. partial = sz & (AES_BLOCK_SIZE - 1);
  6764. #if defined(HAVE_AES_ECB)
  6765. /* Some hardware acceleration can gain performance from doing AES encryption
  6766. * of the whole buffer at once.
  6767. * Overwrites the cipher text before using plaintext - no inline encryption.
  6768. */
  6769. if ((out != in) && blocks > 0) {
  6770. word32 b;
  6771. /* Place incrementing counter blocks into cipher text. */
  6772. for (b = 0; b < blocks; b++) {
  6773. IncrementGcmCounter(AES_COUNTER(aes));
  6774. XMEMCPY(out + b * AES_BLOCK_SIZE, AES_COUNTER(aes), AES_BLOCK_SIZE);
  6775. }
  6776. /* Encrypt counter blocks. */
  6777. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6778. /* XOR in plaintext. */
  6779. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  6780. /* Skip over processed data. */
  6781. in += AES_BLOCK_SIZE * blocks;
  6782. out += AES_BLOCK_SIZE * blocks;
  6783. }
  6784. else
  6785. #endif /* HAVE_AES_ECB */
  6786. {
  6787. /* Encrypt block by block. */
  6788. while (blocks--) {
  6789. ALIGN32 byte scratch[AES_BLOCK_SIZE];
  6790. IncrementGcmCounter(AES_COUNTER(aes));
  6791. /* Encrypt counter into a buffer. */
  6792. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), scratch);
  6793. if (ret != 0)
  6794. return ret;
  6795. /* XOR plain text into encrypted counter into cipher text buffer. */
  6796. xorbufout(out, scratch, in, AES_BLOCK_SIZE);
  6797. /* Data complete. */
  6798. in += AES_BLOCK_SIZE;
  6799. out += AES_BLOCK_SIZE;
  6800. }
  6801. }
  6802. if (partial != 0) {
  6803. /* Generate an extra block and use up as much as needed. */
  6804. IncrementGcmCounter(AES_COUNTER(aes));
  6805. /* Encrypt counter into cache. */
  6806. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), AES_LASTBLOCK(aes));
  6807. if (ret != 0)
  6808. return ret;
  6809. /* XOR plain text into encrypted counter into cipher text buffer. */
  6810. xorbufout(out, AES_LASTBLOCK(aes), in, partial);
  6811. /* Keep amount of encrypted block used. */
  6812. aes->over = (byte)partial;
  6813. }
  6814. return 0;
  6815. }
  6816. /* Calculates authentication tag for AES GCM. C implementation.
  6817. *
  6818. * @param [in, out] aes AES object.
  6819. * @param [out] authTag Buffer to store authentication tag in.
  6820. * @param [in] authTagSz Length of tag to create.
  6821. */
  6822. static WARN_UNUSED_RESULT int AesGcmFinal_C(
  6823. Aes* aes, byte* authTag, word32 authTagSz)
  6824. {
  6825. /* Calculate authentication tag. */
  6826. GHASH_FINAL(aes, authTag, authTagSz);
  6827. /* XOR in as much of encrypted counter as is required. */
  6828. xorbuf(authTag, AES_INITCTR(aes), authTagSz);
  6829. #ifdef OPENSSL_EXTRA
  6830. /* store AAD size for next call */
  6831. aes->gcm.aadLen = aes->aSz;
  6832. #endif
  6833. /* Zeroize last block to protect sensitive data. */
  6834. ForceZero(AES_LASTBLOCK(aes), AES_BLOCK_SIZE);
  6835. return 0;
  6836. }
  6837. #ifdef WOLFSSL_AESNI
  6838. #ifdef __cplusplus
  6839. extern "C" {
  6840. #endif
  6841. /* Assembly code implementations in: aes_gcm_asm.S */
  6842. #ifdef HAVE_INTEL_AVX2
  6843. extern void AES_GCM_init_avx2(const unsigned char* key, int nr,
  6844. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  6845. unsigned char* counter, unsigned char* initCtr);
  6846. extern void AES_GCM_aad_update_avx2(const unsigned char* addt,
  6847. unsigned int abytes, unsigned char* tag, unsigned char* h);
  6848. extern void AES_GCM_encrypt_block_avx2(const unsigned char* key, int nr,
  6849. unsigned char* out, const unsigned char* in, unsigned char* counter);
  6850. extern void AES_GCM_ghash_block_avx2(const unsigned char* data,
  6851. unsigned char* tag, unsigned char* h);
  6852. extern void AES_GCM_encrypt_update_avx2(const unsigned char* key, int nr,
  6853. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  6854. unsigned char* tag, unsigned char* h, unsigned char* counter);
  6855. extern void AES_GCM_encrypt_final_avx2(unsigned char* tag,
  6856. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  6857. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  6858. #endif
  6859. #ifdef HAVE_INTEL_AVX1
  6860. extern void AES_GCM_init_avx1(const unsigned char* key, int nr,
  6861. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  6862. unsigned char* counter, unsigned char* initCtr);
  6863. extern void AES_GCM_aad_update_avx1(const unsigned char* addt,
  6864. unsigned int abytes, unsigned char* tag, unsigned char* h);
  6865. extern void AES_GCM_encrypt_block_avx1(const unsigned char* key, int nr,
  6866. unsigned char* out, const unsigned char* in, unsigned char* counter);
  6867. extern void AES_GCM_ghash_block_avx1(const unsigned char* data,
  6868. unsigned char* tag, unsigned char* h);
  6869. extern void AES_GCM_encrypt_update_avx1(const unsigned char* key, int nr,
  6870. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  6871. unsigned char* tag, unsigned char* h, unsigned char* counter);
  6872. extern void AES_GCM_encrypt_final_avx1(unsigned char* tag,
  6873. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  6874. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  6875. #endif
  6876. extern void AES_GCM_init_aesni(const unsigned char* key, int nr,
  6877. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  6878. unsigned char* counter, unsigned char* initCtr);
  6879. extern void AES_GCM_aad_update_aesni(const unsigned char* addt,
  6880. unsigned int abytes, unsigned char* tag, unsigned char* h);
  6881. extern void AES_GCM_encrypt_block_aesni(const unsigned char* key, int nr,
  6882. unsigned char* out, const unsigned char* in, unsigned char* counter);
  6883. extern void AES_GCM_ghash_block_aesni(const unsigned char* data,
  6884. unsigned char* tag, unsigned char* h);
  6885. extern void AES_GCM_encrypt_update_aesni(const unsigned char* key, int nr,
  6886. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  6887. unsigned char* tag, unsigned char* h, unsigned char* counter);
  6888. extern void AES_GCM_encrypt_final_aesni(unsigned char* tag,
  6889. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  6890. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  6891. #ifdef __cplusplus
  6892. } /* extern "C" */
  6893. #endif
  6894. /* Initialize the AES GCM cipher with an IV. AES-NI implementations.
  6895. *
  6896. * @param [in, out] aes AES object.
  6897. * @param [in] iv IV/nonce buffer.
  6898. * @param [in] ivSz Length of IV/nonce data.
  6899. */
  6900. static WARN_UNUSED_RESULT int AesGcmInit_aesni(
  6901. Aes* aes, const byte* iv, word32 ivSz)
  6902. {
  6903. /* Reset state fields. */
  6904. aes->aSz = 0;
  6905. aes->cSz = 0;
  6906. /* Set tag to all zeros as initial value. */
  6907. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  6908. /* Reset counts of AAD and cipher text. */
  6909. aes->aOver = 0;
  6910. aes->cOver = 0;
  6911. #ifdef HAVE_INTEL_AVX2
  6912. if (IS_INTEL_AVX2(intel_flags)) {
  6913. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6914. AES_GCM_init_avx2((byte*)aes->key, (int)aes->rounds, iv, ivSz,
  6915. aes->gcm.H, AES_COUNTER(aes), AES_INITCTR(aes));
  6916. RESTORE_VECTOR_REGISTERS();
  6917. }
  6918. else
  6919. #endif
  6920. #ifdef HAVE_INTEL_AVX1
  6921. if (IS_INTEL_AVX1(intel_flags)) {
  6922. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6923. AES_GCM_init_avx1((byte*)aes->key, (int)aes->rounds, iv, ivSz,
  6924. aes->gcm.H, AES_COUNTER(aes), AES_INITCTR(aes));
  6925. RESTORE_VECTOR_REGISTERS();
  6926. }
  6927. else
  6928. #endif
  6929. {
  6930. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6931. AES_GCM_init_aesni((byte*)aes->key, (int)aes->rounds, iv, ivSz,
  6932. aes->gcm.H, AES_COUNTER(aes), AES_INITCTR(aes));
  6933. RESTORE_VECTOR_REGISTERS();
  6934. }
  6935. return 0;
  6936. }
  6937. /* Update the AES GCM for encryption with authentication data.
  6938. *
  6939. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  6940. *
  6941. * @param [in, out] aes AES object.
  6942. * @param [in] a Buffer holding authentication data.
  6943. * @param [in] aSz Length of authentication data in bytes.
  6944. * @param [in] endA Whether no more authentication data is expected.
  6945. */
  6946. static WARN_UNUSED_RESULT int AesGcmAadUpdate_aesni(
  6947. Aes* aes, const byte* a, word32 aSz, int endA)
  6948. {
  6949. word32 blocks;
  6950. int partial;
  6951. ASSERT_SAVED_VECTOR_REGISTERS();
  6952. if (aSz != 0 && a != NULL) {
  6953. /* Total count of AAD updated. */
  6954. aes->aSz += aSz;
  6955. /* Check if we have unprocessed data. */
  6956. if (aes->aOver > 0) {
  6957. /* Calculate amount we can use - fill up the block. */
  6958. byte sz = AES_BLOCK_SIZE - aes->aOver;
  6959. if (sz > aSz) {
  6960. sz = (byte)aSz;
  6961. }
  6962. /* Copy extra into last GHASH block array and update count. */
  6963. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  6964. aes->aOver += sz;
  6965. if (aes->aOver == AES_BLOCK_SIZE) {
  6966. /* We have filled up the block and can process. */
  6967. #ifdef HAVE_INTEL_AVX2
  6968. if (IS_INTEL_AVX2(intel_flags)) {
  6969. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  6970. aes->gcm.H);
  6971. }
  6972. else
  6973. #endif
  6974. #ifdef HAVE_INTEL_AVX1
  6975. if (IS_INTEL_AVX1(intel_flags)) {
  6976. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  6977. aes->gcm.H);
  6978. }
  6979. else
  6980. #endif
  6981. {
  6982. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  6983. aes->gcm.H);
  6984. }
  6985. /* Reset count. */
  6986. aes->aOver = 0;
  6987. }
  6988. /* Used up some data. */
  6989. aSz -= sz;
  6990. a += sz;
  6991. }
  6992. /* Calculate number of blocks of AAD and the leftover. */
  6993. blocks = aSz / AES_BLOCK_SIZE;
  6994. partial = aSz % AES_BLOCK_SIZE;
  6995. if (blocks > 0) {
  6996. /* GHASH full blocks now. */
  6997. #ifdef HAVE_INTEL_AVX2
  6998. if (IS_INTEL_AVX2(intel_flags)) {
  6999. AES_GCM_aad_update_avx2(a, blocks * AES_BLOCK_SIZE,
  7000. AES_TAG(aes), aes->gcm.H);
  7001. }
  7002. else
  7003. #endif
  7004. #ifdef HAVE_INTEL_AVX1
  7005. if (IS_INTEL_AVX1(intel_flags)) {
  7006. AES_GCM_aad_update_avx1(a, blocks * AES_BLOCK_SIZE,
  7007. AES_TAG(aes), aes->gcm.H);
  7008. }
  7009. else
  7010. #endif
  7011. {
  7012. AES_GCM_aad_update_aesni(a, blocks * AES_BLOCK_SIZE,
  7013. AES_TAG(aes), aes->gcm.H);
  7014. }
  7015. /* Skip over to end of AAD blocks. */
  7016. a += blocks * AES_BLOCK_SIZE;
  7017. }
  7018. if (partial != 0) {
  7019. /* Cache the partial block. */
  7020. XMEMCPY(AES_LASTGBLOCK(aes), a, (size_t)partial);
  7021. aes->aOver = (byte)partial;
  7022. }
  7023. }
  7024. if (endA && (aes->aOver > 0)) {
  7025. /* No more AAD coming and we have a partial block. */
  7026. /* Fill the rest of the block with zeros. */
  7027. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0,
  7028. AES_BLOCK_SIZE - aes->aOver);
  7029. /* GHASH last AAD block. */
  7030. #ifdef HAVE_INTEL_AVX2
  7031. if (IS_INTEL_AVX2(intel_flags)) {
  7032. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7033. aes->gcm.H);
  7034. }
  7035. else
  7036. #endif
  7037. #ifdef HAVE_INTEL_AVX1
  7038. if (IS_INTEL_AVX1(intel_flags)) {
  7039. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7040. aes->gcm.H);
  7041. }
  7042. else
  7043. #endif
  7044. {
  7045. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7046. aes->gcm.H);
  7047. }
  7048. /* Clear partial count for next time through. */
  7049. aes->aOver = 0;
  7050. }
  7051. return 0;
  7052. }
  7053. /* Update the AES GCM for encryption with data and/or authentication data.
  7054. *
  7055. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  7056. *
  7057. * @param [in, out] aes AES object.
  7058. * @param [out] c Buffer to hold cipher text.
  7059. * @param [in] p Buffer holding plaintext.
  7060. * @param [in] cSz Length of cipher text/plaintext in bytes.
  7061. * @param [in] a Buffer holding authentication data.
  7062. * @param [in] aSz Length of authentication data in bytes.
  7063. */
  7064. static WARN_UNUSED_RESULT int AesGcmEncryptUpdate_aesni(
  7065. Aes* aes, byte* c, const byte* p, word32 cSz, const byte* a, word32 aSz)
  7066. {
  7067. word32 blocks;
  7068. int partial;
  7069. int ret;
  7070. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7071. /* Hash in A, the Authentication Data */
  7072. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  7073. if (ret != 0)
  7074. return ret;
  7075. /* Encrypt plaintext and Hash in C, the Cipher text */
  7076. if (cSz != 0 && c != NULL) {
  7077. /* Update count of cipher text we have hashed. */
  7078. aes->cSz += cSz;
  7079. if (aes->cOver > 0) {
  7080. /* Calculate amount we can use - fill up the block. */
  7081. byte sz = AES_BLOCK_SIZE - aes->cOver;
  7082. if (sz > cSz) {
  7083. sz = (byte)cSz;
  7084. }
  7085. /* Encrypt some of the plaintext. */
  7086. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, p, sz);
  7087. XMEMCPY(c, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  7088. /* Update count of unsed encrypted counter. */
  7089. aes->cOver += sz;
  7090. if (aes->cOver == AES_BLOCK_SIZE) {
  7091. /* We have filled up the block and can process. */
  7092. #ifdef HAVE_INTEL_AVX2
  7093. if (IS_INTEL_AVX2(intel_flags)) {
  7094. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7095. aes->gcm.H);
  7096. }
  7097. else
  7098. #endif
  7099. #ifdef HAVE_INTEL_AVX1
  7100. if (IS_INTEL_AVX1(intel_flags)) {
  7101. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7102. aes->gcm.H);
  7103. }
  7104. else
  7105. #endif
  7106. {
  7107. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7108. aes->gcm.H);
  7109. }
  7110. /* Reset count. */
  7111. aes->cOver = 0;
  7112. }
  7113. /* Used up some data. */
  7114. cSz -= sz;
  7115. p += sz;
  7116. c += sz;
  7117. }
  7118. /* Calculate number of blocks of plaintext and the leftover. */
  7119. blocks = cSz / AES_BLOCK_SIZE;
  7120. partial = cSz % AES_BLOCK_SIZE;
  7121. if (blocks > 0) {
  7122. /* Encrypt and GHASH full blocks now. */
  7123. #ifdef HAVE_INTEL_AVX2
  7124. if (IS_INTEL_AVX2(intel_flags)) {
  7125. AES_GCM_encrypt_update_avx2((byte*)aes->key, (int)aes->rounds,
  7126. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7127. AES_COUNTER(aes));
  7128. }
  7129. else
  7130. #endif
  7131. #ifdef HAVE_INTEL_AVX1
  7132. if (IS_INTEL_AVX1(intel_flags)) {
  7133. AES_GCM_encrypt_update_avx1((byte*)aes->key, (int)aes->rounds,
  7134. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7135. AES_COUNTER(aes));
  7136. }
  7137. else
  7138. #endif
  7139. {
  7140. AES_GCM_encrypt_update_aesni((byte*)aes->key, (int)aes->rounds,
  7141. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7142. AES_COUNTER(aes));
  7143. }
  7144. /* Skip over to end of blocks. */
  7145. p += blocks * AES_BLOCK_SIZE;
  7146. c += blocks * AES_BLOCK_SIZE;
  7147. }
  7148. if (partial != 0) {
  7149. /* Encrypt the counter - XOR in zeros as proxy for plaintext. */
  7150. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  7151. #ifdef HAVE_INTEL_AVX2
  7152. if (IS_INTEL_AVX2(intel_flags)) {
  7153. AES_GCM_encrypt_block_avx2((byte*)aes->key, (int)aes->rounds,
  7154. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7155. }
  7156. else
  7157. #endif
  7158. #ifdef HAVE_INTEL_AVX1
  7159. if (IS_INTEL_AVX1(intel_flags)) {
  7160. AES_GCM_encrypt_block_avx1((byte*)aes->key, (int)aes->rounds,
  7161. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7162. }
  7163. else
  7164. #endif
  7165. {
  7166. AES_GCM_encrypt_block_aesni((byte*)aes->key, (int)aes->rounds,
  7167. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7168. }
  7169. /* XOR the remaining plaintext to calculate cipher text.
  7170. * Keep cipher text for GHASH of last partial block.
  7171. */
  7172. xorbuf(AES_LASTGBLOCK(aes), p, (word32)partial);
  7173. XMEMCPY(c, AES_LASTGBLOCK(aes), (size_t)partial);
  7174. /* Update count of the block used. */
  7175. aes->cOver = (byte)partial;
  7176. }
  7177. }
  7178. RESTORE_VECTOR_REGISTERS();
  7179. return 0;
  7180. }
  7181. /* Finalize the AES GCM for encryption and calculate the authentication tag.
  7182. *
  7183. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  7184. *
  7185. * @param [in, out] aes AES object.
  7186. * @param [in] authTag Buffer to hold authentication tag.
  7187. * @param [in] authTagSz Length of authentication tag in bytes.
  7188. * @return 0 on success.
  7189. */
  7190. static WARN_UNUSED_RESULT int AesGcmEncryptFinal_aesni(
  7191. Aes* aes, byte* authTag, word32 authTagSz)
  7192. {
  7193. /* AAD block incomplete when > 0 */
  7194. byte over = aes->aOver;
  7195. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7196. if (aes->cOver > 0) {
  7197. /* Cipher text block incomplete. */
  7198. over = aes->cOver;
  7199. }
  7200. if (over > 0) {
  7201. /* Fill the rest of the block with zeros. */
  7202. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  7203. /* GHASH last cipher block. */
  7204. #ifdef HAVE_INTEL_AVX2
  7205. if (IS_INTEL_AVX2(intel_flags)) {
  7206. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7207. aes->gcm.H);
  7208. }
  7209. else
  7210. #endif
  7211. #ifdef HAVE_INTEL_AVX1
  7212. if (IS_INTEL_AVX1(intel_flags)) {
  7213. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7214. aes->gcm.H);
  7215. }
  7216. else
  7217. #endif
  7218. {
  7219. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7220. aes->gcm.H);
  7221. }
  7222. }
  7223. /* Calculate the authentication tag. */
  7224. #ifdef HAVE_INTEL_AVX2
  7225. if (IS_INTEL_AVX2(intel_flags)) {
  7226. AES_GCM_encrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7227. aes->aSz, aes->gcm.H, AES_INITCTR(aes));
  7228. }
  7229. else
  7230. #endif
  7231. #ifdef HAVE_INTEL_AVX1
  7232. if (IS_INTEL_AVX1(intel_flags)) {
  7233. AES_GCM_encrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7234. aes->aSz, aes->gcm.H, AES_INITCTR(aes));
  7235. }
  7236. else
  7237. #endif
  7238. {
  7239. AES_GCM_encrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7240. aes->aSz, aes->gcm.H, AES_INITCTR(aes));
  7241. }
  7242. RESTORE_VECTOR_REGISTERS();
  7243. return 0;
  7244. }
  7245. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  7246. #ifdef __cplusplus
  7247. extern "C" {
  7248. #endif
  7249. /* Assembly code implementations in: aes_gcm_asm.S and aes_gcm_x86_asm.S */
  7250. #ifdef HAVE_INTEL_AVX2
  7251. extern void AES_GCM_decrypt_update_avx2(const unsigned char* key, int nr,
  7252. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7253. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7254. extern void AES_GCM_decrypt_final_avx2(unsigned char* tag,
  7255. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7256. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  7257. #endif
  7258. #ifdef HAVE_INTEL_AVX1
  7259. extern void AES_GCM_decrypt_update_avx1(const unsigned char* key, int nr,
  7260. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7261. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7262. extern void AES_GCM_decrypt_final_avx1(unsigned char* tag,
  7263. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7264. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  7265. #endif
  7266. extern void AES_GCM_decrypt_update_aesni(const unsigned char* key, int nr,
  7267. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7268. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7269. extern void AES_GCM_decrypt_final_aesni(unsigned char* tag,
  7270. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7271. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  7272. #ifdef __cplusplus
  7273. } /* extern "C" */
  7274. #endif
  7275. /* Update the AES GCM for decryption with data and/or authentication data.
  7276. *
  7277. * @param [in, out] aes AES object.
  7278. * @param [out] p Buffer to hold plaintext.
  7279. * @param [in] c Buffer holding ciper text.
  7280. * @param [in] cSz Length of cipher text/plaintext in bytes.
  7281. * @param [in] a Buffer holding authentication data.
  7282. * @param [in] aSz Length of authentication data in bytes.
  7283. */
  7284. static WARN_UNUSED_RESULT int AesGcmDecryptUpdate_aesni(
  7285. Aes* aes, byte* p, const byte* c, word32 cSz, const byte* a, word32 aSz)
  7286. {
  7287. word32 blocks;
  7288. int partial;
  7289. int ret;
  7290. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7291. /* Hash in A, the Authentication Data */
  7292. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  7293. if (ret != 0)
  7294. return ret;
  7295. /* Hash in C, the Cipher text, and decrypt. */
  7296. if (cSz != 0 && p != NULL) {
  7297. /* Update count of cipher text we have hashed. */
  7298. aes->cSz += cSz;
  7299. if (aes->cOver > 0) {
  7300. /* Calculate amount we can use - fill up the block. */
  7301. byte sz = AES_BLOCK_SIZE - aes->cOver;
  7302. if (sz > cSz) {
  7303. sz = (byte)cSz;
  7304. }
  7305. /* Keep a copy of the cipher text for GHASH. */
  7306. XMEMCPY(AES_LASTBLOCK(aes) + aes->cOver, c, sz);
  7307. /* Decrypt some of the cipher text. */
  7308. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  7309. XMEMCPY(p, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  7310. /* Update count of unsed encrypted counter. */
  7311. aes->cOver += sz;
  7312. if (aes->cOver == AES_BLOCK_SIZE) {
  7313. /* We have filled up the block and can process. */
  7314. #ifdef HAVE_INTEL_AVX2
  7315. if (IS_INTEL_AVX2(intel_flags)) {
  7316. AES_GCM_ghash_block_avx2(AES_LASTBLOCK(aes), AES_TAG(aes),
  7317. aes->gcm.H);
  7318. }
  7319. else
  7320. #endif
  7321. #ifdef HAVE_INTEL_AVX1
  7322. if (IS_INTEL_AVX1(intel_flags)) {
  7323. AES_GCM_ghash_block_avx1(AES_LASTBLOCK(aes), AES_TAG(aes),
  7324. aes->gcm.H);
  7325. }
  7326. else
  7327. #endif
  7328. {
  7329. AES_GCM_ghash_block_aesni(AES_LASTBLOCK(aes), AES_TAG(aes),
  7330. aes->gcm.H);
  7331. }
  7332. /* Reset count. */
  7333. aes->cOver = 0;
  7334. }
  7335. /* Used up some data. */
  7336. cSz -= sz;
  7337. c += sz;
  7338. p += sz;
  7339. }
  7340. /* Calculate number of blocks of plaintext and the leftover. */
  7341. blocks = cSz / AES_BLOCK_SIZE;
  7342. partial = cSz % AES_BLOCK_SIZE;
  7343. if (blocks > 0) {
  7344. /* Decrypt and GHASH full blocks now. */
  7345. #ifdef HAVE_INTEL_AVX2
  7346. if (IS_INTEL_AVX2(intel_flags)) {
  7347. AES_GCM_decrypt_update_avx2((byte*)aes->key, (int)aes->rounds,
  7348. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7349. AES_COUNTER(aes));
  7350. }
  7351. else
  7352. #endif
  7353. #ifdef HAVE_INTEL_AVX1
  7354. if (IS_INTEL_AVX1(intel_flags)) {
  7355. AES_GCM_decrypt_update_avx1((byte*)aes->key, (int)aes->rounds,
  7356. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7357. AES_COUNTER(aes));
  7358. }
  7359. else
  7360. #endif
  7361. {
  7362. AES_GCM_decrypt_update_aesni((byte*)aes->key, (int)aes->rounds,
  7363. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7364. AES_COUNTER(aes));
  7365. }
  7366. /* Skip over to end of blocks. */
  7367. c += blocks * AES_BLOCK_SIZE;
  7368. p += blocks * AES_BLOCK_SIZE;
  7369. }
  7370. if (partial != 0) {
  7371. /* Encrypt the counter - XOR in zeros as proxy for cipher text. */
  7372. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  7373. #ifdef HAVE_INTEL_AVX2
  7374. if (IS_INTEL_AVX2(intel_flags)) {
  7375. AES_GCM_encrypt_block_avx2((byte*)aes->key, (int)aes->rounds,
  7376. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7377. }
  7378. else
  7379. #endif
  7380. #ifdef HAVE_INTEL_AVX1
  7381. if (IS_INTEL_AVX1(intel_flags)) {
  7382. AES_GCM_encrypt_block_avx1((byte*)aes->key, (int)aes->rounds,
  7383. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7384. }
  7385. else
  7386. #endif
  7387. {
  7388. AES_GCM_encrypt_block_aesni((byte*)aes->key, (int)aes->rounds,
  7389. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7390. }
  7391. /* Keep cipher text for GHASH of last partial block. */
  7392. XMEMCPY(AES_LASTBLOCK(aes), c, (size_t)partial);
  7393. /* XOR the remaining cipher text to calculate plaintext. */
  7394. xorbuf(AES_LASTGBLOCK(aes), c, (word32)partial);
  7395. XMEMCPY(p, AES_LASTGBLOCK(aes), (size_t)partial);
  7396. /* Update count of the block used. */
  7397. aes->cOver = (byte)partial;
  7398. }
  7399. }
  7400. RESTORE_VECTOR_REGISTERS();
  7401. return 0;
  7402. }
  7403. /* Finalize the AES GCM for decryption and check the authentication tag.
  7404. *
  7405. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  7406. *
  7407. * @param [in, out] aes AES object.
  7408. * @param [in] authTag Buffer holding authentication tag.
  7409. * @param [in] authTagSz Length of authentication tag in bytes.
  7410. * @return 0 on success.
  7411. * @return AES_GCM_AUTH_E when authentication tag doesn't match calculated
  7412. * value.
  7413. */
  7414. static WARN_UNUSED_RESULT int AesGcmDecryptFinal_aesni(
  7415. Aes* aes, const byte* authTag, word32 authTagSz)
  7416. {
  7417. int ret = 0;
  7418. int res;
  7419. /* AAD block incomplete when > 0 */
  7420. byte over = aes->aOver;
  7421. byte *lastBlock = AES_LASTGBLOCK(aes);
  7422. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7423. if (aes->cOver > 0) {
  7424. /* Cipher text block incomplete. */
  7425. over = aes->cOver;
  7426. lastBlock = AES_LASTBLOCK(aes);
  7427. }
  7428. if (over > 0) {
  7429. /* Zeroize the unused part of the block. */
  7430. XMEMSET(lastBlock + over, 0, AES_BLOCK_SIZE - over);
  7431. /* Hash the last block of cipher text. */
  7432. #ifdef HAVE_INTEL_AVX2
  7433. if (IS_INTEL_AVX2(intel_flags)) {
  7434. AES_GCM_ghash_block_avx2(lastBlock, AES_TAG(aes), aes->gcm.H);
  7435. }
  7436. else
  7437. #endif
  7438. #ifdef HAVE_INTEL_AVX1
  7439. if (IS_INTEL_AVX1(intel_flags)) {
  7440. AES_GCM_ghash_block_avx1(lastBlock, AES_TAG(aes), aes->gcm.H);
  7441. }
  7442. else
  7443. #endif
  7444. {
  7445. AES_GCM_ghash_block_aesni(lastBlock, AES_TAG(aes), aes->gcm.H);
  7446. }
  7447. }
  7448. /* Calculate and compare the authentication tag. */
  7449. #ifdef HAVE_INTEL_AVX2
  7450. if (IS_INTEL_AVX2(intel_flags)) {
  7451. AES_GCM_decrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7452. aes->aSz, aes->gcm.H, AES_INITCTR(aes), &res);
  7453. }
  7454. else
  7455. #endif
  7456. #ifdef HAVE_INTEL_AVX1
  7457. if (IS_INTEL_AVX1(intel_flags)) {
  7458. AES_GCM_decrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7459. aes->aSz, aes->gcm.H, AES_INITCTR(aes), &res);
  7460. }
  7461. else
  7462. #endif
  7463. {
  7464. AES_GCM_decrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7465. aes->aSz, aes->gcm.H, AES_INITCTR(aes), &res);
  7466. }
  7467. RESTORE_VECTOR_REGISTERS();
  7468. /* Return error code when calculated doesn't match input. */
  7469. if (res == 0) {
  7470. ret = AES_GCM_AUTH_E;
  7471. }
  7472. return ret;
  7473. }
  7474. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  7475. #endif /* WOLFSSL_AESNI */
  7476. /* Initialize an AES GCM cipher for encryption or decryption.
  7477. *
  7478. * Must call wc_AesInit() before calling this function.
  7479. * Call wc_AesGcmSetIV() before calling this function to generate part of IV.
  7480. * Call wc_AesGcmSetExtIV() before calling this function to cache IV.
  7481. *
  7482. * @param [in, out] aes AES object.
  7483. * @param [in] key Buffer holding key.
  7484. * @param [in] len Length of key in bytes.
  7485. * @param [in] iv Buffer holding IV/nonce.
  7486. * @param [in] ivSz Length of IV/nonce in bytes.
  7487. * @return 0 on success.
  7488. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7489. * is NULL, or the IV is NULL and no previous IV has been set.
  7490. * @return MEMORY_E when dynamic memory allocation fails. (WOLFSSL_SMALL_STACK)
  7491. */
  7492. int wc_AesGcmInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  7493. word32 ivSz)
  7494. {
  7495. int ret = 0;
  7496. /* Check validity of parameters. */
  7497. if ((aes == NULL) || ((len > 0) && (key == NULL)) ||
  7498. ((ivSz == 0) && (iv != NULL)) ||
  7499. ((ivSz > 0) && (iv == NULL))) {
  7500. ret = BAD_FUNC_ARG;
  7501. }
  7502. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  7503. if ((ret == 0) && (aes->streamData == NULL)) {
  7504. /* Allocate buffers for streaming. */
  7505. aes->streamData = (byte*)XMALLOC(5 * AES_BLOCK_SIZE, aes->heap,
  7506. DYNAMIC_TYPE_AES);
  7507. if (aes->streamData == NULL) {
  7508. ret = MEMORY_E;
  7509. }
  7510. }
  7511. #endif
  7512. /* Set the key if passed in. */
  7513. if ((ret == 0) && (key != NULL)) {
  7514. ret = wc_AesGcmSetKey(aes, key, len);
  7515. }
  7516. if (ret == 0) {
  7517. /* Set the IV passed in if it is smaller than a block. */
  7518. if ((iv != NULL) && (ivSz <= AES_BLOCK_SIZE)) {
  7519. XMEMMOVE((byte*)aes->reg, iv, ivSz);
  7520. aes->nonceSz = ivSz;
  7521. }
  7522. /* No IV passed in, check for cached IV. */
  7523. if ((iv == NULL) && (aes->nonceSz != 0)) {
  7524. /* Use the cached copy. */
  7525. iv = (byte*)aes->reg;
  7526. ivSz = aes->nonceSz;
  7527. }
  7528. if (iv != NULL) {
  7529. /* Initialize with the IV. */
  7530. #ifdef WOLFSSL_AESNI
  7531. if (haveAESNI
  7532. #ifdef HAVE_INTEL_AVX2
  7533. || IS_INTEL_AVX2(intel_flags)
  7534. #endif
  7535. #ifdef HAVE_INTEL_AVX1
  7536. || IS_INTEL_AVX1(intel_flags)
  7537. #endif
  7538. ) {
  7539. ret = AesGcmInit_aesni(aes, iv, ivSz);
  7540. }
  7541. else
  7542. #endif
  7543. {
  7544. ret = AesGcmInit_C(aes, iv, ivSz);
  7545. }
  7546. aes->nonceSet = 1;
  7547. }
  7548. }
  7549. return ret;
  7550. }
  7551. /* Initialize an AES GCM cipher for encryption.
  7552. *
  7553. * Must call wc_AesInit() before calling this function.
  7554. *
  7555. * @param [in, out] aes AES object.
  7556. * @param [in] key Buffer holding key.
  7557. * @param [in] len Length of key in bytes.
  7558. * @param [in] iv Buffer holding IV/nonce.
  7559. * @param [in] ivSz Length of IV/nonce in bytes.
  7560. * @return 0 on success.
  7561. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7562. * is NULL, or the IV is NULL and no previous IV has been set.
  7563. */
  7564. int wc_AesGcmEncryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  7565. word32 ivSz)
  7566. {
  7567. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  7568. }
  7569. /* Initialize an AES GCM cipher for encryption. Get IV.
  7570. *
  7571. * Must call wc_AesGcmSetIV() to generate part of IV before calling this
  7572. * function.
  7573. * Must call wc_AesInit() before calling this function.
  7574. *
  7575. * See wc_AesGcmEncrypt_ex() for non-streaming version of getting IV out.
  7576. *
  7577. * @param [in, out] aes AES object.
  7578. * @param [in] key Buffer holding key.
  7579. * @param [in] len Length of key in bytes.
  7580. * @param [in] iv Buffer holding IV/nonce.
  7581. * @param [in] ivSz Length of IV/nonce in bytes.
  7582. * @return 0 on success.
  7583. * @return BAD_FUNC_ARG when aes is NULL, key length is non-zero but key
  7584. * is NULL, or the IV is NULL or ivOutSz is not the same as cached
  7585. * nonce size.
  7586. */
  7587. int wc_AesGcmEncryptInit_ex(Aes* aes, const byte* key, word32 len, byte* ivOut,
  7588. word32 ivOutSz)
  7589. {
  7590. int ret;
  7591. /* Check validity of parameters. */
  7592. if ((aes == NULL) || (ivOut == NULL) || (ivOutSz != aes->nonceSz)) {
  7593. ret = BAD_FUNC_ARG;
  7594. }
  7595. else {
  7596. /* Copy out the IV including generated part for decryption. */
  7597. XMEMCPY(ivOut, aes->reg, ivOutSz);
  7598. /* Initialize AES GCM cipher with key and cached Iv. */
  7599. ret = wc_AesGcmInit(aes, key, len, NULL, 0);
  7600. }
  7601. return ret;
  7602. }
  7603. /* Update the AES GCM for encryption with data and/or authentication data.
  7604. *
  7605. * All the AAD must be passed to update before the plaintext.
  7606. * Last part of AAD can be passed with first part of plaintext.
  7607. *
  7608. * Must set key and IV before calling this function.
  7609. * Must call wc_AesGcmInit() before calling this function.
  7610. *
  7611. * @param [in, out] aes AES object.
  7612. * @param [out] out Buffer to hold cipher text.
  7613. * @param [in] in Buffer holding plaintext.
  7614. * @param [in] sz Length of plaintext in bytes.
  7615. * @param [in] authIn Buffer holding authentication data.
  7616. * @param [in] authInSz Length of authentication data in bytes.
  7617. * @return 0 on success.
  7618. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7619. * is NULL.
  7620. */
  7621. int wc_AesGcmEncryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  7622. const byte* authIn, word32 authInSz)
  7623. {
  7624. int ret = 0;
  7625. /* Check validity of parameters. */
  7626. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  7627. ((out == NULL) || (in == NULL)))) {
  7628. ret = BAD_FUNC_ARG;
  7629. }
  7630. /* Check key has been set. */
  7631. if ((ret == 0) && (!aes->gcmKeySet)) {
  7632. ret = MISSING_KEY;
  7633. }
  7634. /* Check IV has been set. */
  7635. if ((ret == 0) && (!aes->nonceSet)) {
  7636. ret = MISSING_IV;
  7637. }
  7638. if ((ret == 0) && aes->ctrSet && (aes->aSz == 0) && (aes->cSz == 0)) {
  7639. aes->invokeCtr[0]++;
  7640. if (aes->invokeCtr[0] == 0) {
  7641. aes->invokeCtr[1]++;
  7642. if (aes->invokeCtr[1] == 0)
  7643. ret = AES_GCM_OVERFLOW_E;
  7644. }
  7645. }
  7646. if (ret == 0) {
  7647. /* Encrypt with AAD and/or plaintext. */
  7648. #if defined(WOLFSSL_AESNI)
  7649. if (haveAESNI
  7650. #ifdef HAVE_INTEL_AVX2
  7651. || IS_INTEL_AVX2(intel_flags)
  7652. #endif
  7653. #ifdef HAVE_INTEL_AVX1
  7654. || IS_INTEL_AVX1(intel_flags)
  7655. #endif
  7656. ) {
  7657. ret = AesGcmEncryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  7658. }
  7659. else
  7660. #endif
  7661. {
  7662. /* Encrypt the plaintext. */
  7663. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  7664. if (ret != 0)
  7665. return ret;
  7666. /* Update the authenication tag with any authentication data and the
  7667. * new cipher text. */
  7668. GHASH_UPDATE(aes, authIn, authInSz, out, sz);
  7669. }
  7670. }
  7671. return ret;
  7672. }
  7673. /* Finalize the AES GCM for encryption and return the authentication tag.
  7674. *
  7675. * Must set key and IV before calling this function.
  7676. * Must call wc_AesGcmInit() before calling this function.
  7677. *
  7678. * @param [in, out] aes AES object.
  7679. * @param [out] authTag Buffer to hold authentication tag.
  7680. * @param [in] authTagSz Length of authentication tag in bytes.
  7681. * @return 0 on success.
  7682. */
  7683. int wc_AesGcmEncryptFinal(Aes* aes, byte* authTag, word32 authTagSz)
  7684. {
  7685. int ret = 0;
  7686. /* Check validity of parameters. */
  7687. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  7688. (authTagSz == 0)) {
  7689. ret = BAD_FUNC_ARG;
  7690. }
  7691. /* Check key has been set. */
  7692. if ((ret == 0) && (!aes->gcmKeySet)) {
  7693. ret = MISSING_KEY;
  7694. }
  7695. /* Check IV has been set. */
  7696. if ((ret == 0) && (!aes->nonceSet)) {
  7697. ret = MISSING_IV;
  7698. }
  7699. if (ret == 0) {
  7700. /* Calculate authentication tag. */
  7701. #ifdef WOLFSSL_AESNI
  7702. if (haveAESNI
  7703. #ifdef HAVE_INTEL_AVX2
  7704. || IS_INTEL_AVX2(intel_flags)
  7705. #endif
  7706. #ifdef HAVE_INTEL_AVX1
  7707. || IS_INTEL_AVX1(intel_flags)
  7708. #endif
  7709. ) {
  7710. ret = AesGcmEncryptFinal_aesni(aes, authTag, authTagSz);
  7711. }
  7712. else
  7713. #endif
  7714. {
  7715. ret = AesGcmFinal_C(aes, authTag, authTagSz);
  7716. }
  7717. }
  7718. if ((ret == 0) && aes->ctrSet) {
  7719. IncCtr((byte*)aes->reg, aes->nonceSz);
  7720. }
  7721. return ret;
  7722. }
  7723. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  7724. /* Initialize an AES GCM cipher for decryption.
  7725. *
  7726. * Must call wc_AesInit() before calling this function.
  7727. *
  7728. * Call wc_AesGcmSetExtIV() before calling this function to use FIPS external IV
  7729. * instead.
  7730. *
  7731. * @param [in, out] aes AES object.
  7732. * @param [in] key Buffer holding key.
  7733. * @param [in] len Length of key in bytes.
  7734. * @param [in] iv Buffer holding IV/nonce.
  7735. * @param [in] ivSz Length of IV/nonce in bytes.
  7736. * @return 0 on success.
  7737. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7738. * is NULL, or the IV is NULL and no previous IV has been set.
  7739. */
  7740. int wc_AesGcmDecryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  7741. word32 ivSz)
  7742. {
  7743. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  7744. }
  7745. /* Update the AES GCM for decryption with data and/or authentication data.
  7746. *
  7747. * All the AAD must be passed to update before the cipher text.
  7748. * Last part of AAD can be passed with first part of cipher text.
  7749. *
  7750. * Must set key and IV before calling this function.
  7751. * Must call wc_AesGcmInit() before calling this function.
  7752. *
  7753. * @param [in, out] aes AES object.
  7754. * @param [out] out Buffer to hold plaintext.
  7755. * @param [in] in Buffer holding cipher text.
  7756. * @param [in] sz Length of cipher text in bytes.
  7757. * @param [in] authIn Buffer holding authentication data.
  7758. * @param [in] authInSz Length of authentication data in bytes.
  7759. * @return 0 on success.
  7760. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7761. * is NULL.
  7762. */
  7763. int wc_AesGcmDecryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  7764. const byte* authIn, word32 authInSz)
  7765. {
  7766. int ret = 0;
  7767. /* Check validity of parameters. */
  7768. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  7769. ((out == NULL) || (in == NULL)))) {
  7770. ret = BAD_FUNC_ARG;
  7771. }
  7772. /* Check key has been set. */
  7773. if ((ret == 0) && (!aes->gcmKeySet)) {
  7774. ret = MISSING_KEY;
  7775. }
  7776. /* Check IV has been set. */
  7777. if ((ret == 0) && (!aes->nonceSet)) {
  7778. ret = MISSING_IV;
  7779. }
  7780. if (ret == 0) {
  7781. /* Decrypt with AAD and/or cipher text. */
  7782. #if defined(WOLFSSL_AESNI)
  7783. if (haveAESNI
  7784. #ifdef HAVE_INTEL_AVX2
  7785. || IS_INTEL_AVX2(intel_flags)
  7786. #endif
  7787. #ifdef HAVE_INTEL_AVX1
  7788. || IS_INTEL_AVX1(intel_flags)
  7789. #endif
  7790. ) {
  7791. ret = AesGcmDecryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  7792. }
  7793. else
  7794. #endif
  7795. {
  7796. /* Update the authenication tag with any authentication data and
  7797. * cipher text. */
  7798. GHASH_UPDATE(aes, authIn, authInSz, in, sz);
  7799. /* Decrypt the cipher text. */
  7800. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  7801. }
  7802. }
  7803. return ret;
  7804. }
  7805. /* Finalize the AES GCM for decryption and check the authentication tag.
  7806. *
  7807. * Must set key and IV before calling this function.
  7808. * Must call wc_AesGcmInit() before calling this function.
  7809. *
  7810. * @param [in, out] aes AES object.
  7811. * @param [in] authTag Buffer holding authentication tag.
  7812. * @param [in] authTagSz Length of authentication tag in bytes.
  7813. * @return 0 on success.
  7814. */
  7815. int wc_AesGcmDecryptFinal(Aes* aes, const byte* authTag, word32 authTagSz)
  7816. {
  7817. int ret = 0;
  7818. /* Check validity of parameters. */
  7819. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  7820. (authTagSz == 0)) {
  7821. ret = BAD_FUNC_ARG;
  7822. }
  7823. /* Check key has been set. */
  7824. if ((ret == 0) && (!aes->gcmKeySet)) {
  7825. ret = MISSING_KEY;
  7826. }
  7827. /* Check IV has been set. */
  7828. if ((ret == 0) && (!aes->nonceSet)) {
  7829. ret = MISSING_IV;
  7830. }
  7831. if (ret == 0) {
  7832. /* Calculate authentication tag and compare with one passed in.. */
  7833. #ifdef WOLFSSL_AESNI
  7834. if (haveAESNI
  7835. #ifdef HAVE_INTEL_AVX2
  7836. || IS_INTEL_AVX2(intel_flags)
  7837. #endif
  7838. #ifdef HAVE_INTEL_AVX1
  7839. || IS_INTEL_AVX1(intel_flags)
  7840. #endif
  7841. ) {
  7842. ret = AesGcmDecryptFinal_aesni(aes, authTag, authTagSz);
  7843. }
  7844. else
  7845. #endif
  7846. {
  7847. ALIGN32 byte calcTag[AES_BLOCK_SIZE];
  7848. /* Calculate authentication tag. */
  7849. ret = AesGcmFinal_C(aes, calcTag, authTagSz);
  7850. if (ret == 0) {
  7851. /* Check calculated tag matches the one passed in. */
  7852. if (ConstantCompare(authTag, calcTag, (int)authTagSz) != 0) {
  7853. ret = AES_GCM_AUTH_E;
  7854. }
  7855. }
  7856. }
  7857. }
  7858. /* reset the state */
  7859. if (ret == 0)
  7860. wc_AesFree(aes);
  7861. return ret;
  7862. }
  7863. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  7864. #endif /* WOLFSSL_AESGCM_STREAM */
  7865. #endif /* WOLFSSL_XILINX_CRYPT */
  7866. #endif /* end of block for AESGCM implementation selection */
  7867. /* Common to all, abstract functions that build off of lower level AESGCM
  7868. * functions */
  7869. #ifndef WC_NO_RNG
  7870. static WARN_UNUSED_RESULT WC_INLINE int CheckAesGcmIvSize(int ivSz) {
  7871. return (ivSz == GCM_NONCE_MIN_SZ ||
  7872. ivSz == GCM_NONCE_MID_SZ ||
  7873. ivSz == GCM_NONCE_MAX_SZ);
  7874. }
  7875. int wc_AesGcmSetExtIV(Aes* aes, const byte* iv, word32 ivSz)
  7876. {
  7877. int ret = 0;
  7878. if (aes == NULL || iv == NULL || !CheckAesGcmIvSize((int)ivSz)) {
  7879. ret = BAD_FUNC_ARG;
  7880. }
  7881. if (ret == 0) {
  7882. XMEMCPY((byte*)aes->reg, iv, ivSz);
  7883. /* If the IV is 96, allow for a 2^64 invocation counter.
  7884. * For any other size for the nonce, limit the invocation
  7885. * counter to 32-bits. (SP 800-38D 8.3) */
  7886. aes->invokeCtr[0] = 0;
  7887. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  7888. #ifdef WOLFSSL_AESGCM_STREAM
  7889. aes->ctrSet = 1;
  7890. #endif
  7891. aes->nonceSz = ivSz;
  7892. }
  7893. return ret;
  7894. }
  7895. int wc_AesGcmSetIV(Aes* aes, word32 ivSz,
  7896. const byte* ivFixed, word32 ivFixedSz,
  7897. WC_RNG* rng)
  7898. {
  7899. int ret = 0;
  7900. if (aes == NULL || rng == NULL || !CheckAesGcmIvSize((int)ivSz) ||
  7901. (ivFixed == NULL && ivFixedSz != 0) ||
  7902. (ivFixed != NULL && ivFixedSz != AES_IV_FIXED_SZ)) {
  7903. ret = BAD_FUNC_ARG;
  7904. }
  7905. if (ret == 0) {
  7906. byte* iv = (byte*)aes->reg;
  7907. if (ivFixedSz)
  7908. XMEMCPY(iv, ivFixed, ivFixedSz);
  7909. ret = wc_RNG_GenerateBlock(rng, iv + ivFixedSz, ivSz - ivFixedSz);
  7910. }
  7911. if (ret == 0) {
  7912. /* If the IV is 96, allow for a 2^64 invocation counter.
  7913. * For any other size for the nonce, limit the invocation
  7914. * counter to 32-bits. (SP 800-38D 8.3) */
  7915. aes->invokeCtr[0] = 0;
  7916. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  7917. #ifdef WOLFSSL_AESGCM_STREAM
  7918. aes->ctrSet = 1;
  7919. #endif
  7920. aes->nonceSz = ivSz;
  7921. }
  7922. return ret;
  7923. }
  7924. int wc_AesGcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  7925. byte* ivOut, word32 ivOutSz,
  7926. byte* authTag, word32 authTagSz,
  7927. const byte* authIn, word32 authInSz)
  7928. {
  7929. int ret = 0;
  7930. if (aes == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  7931. ivOut == NULL || ivOutSz != aes->nonceSz ||
  7932. (authIn == NULL && authInSz != 0)) {
  7933. ret = BAD_FUNC_ARG;
  7934. }
  7935. if (ret == 0) {
  7936. aes->invokeCtr[0]++;
  7937. if (aes->invokeCtr[0] == 0) {
  7938. aes->invokeCtr[1]++;
  7939. if (aes->invokeCtr[1] == 0)
  7940. ret = AES_GCM_OVERFLOW_E;
  7941. }
  7942. }
  7943. if (ret == 0) {
  7944. XMEMCPY(ivOut, aes->reg, ivOutSz);
  7945. ret = wc_AesGcmEncrypt(aes, out, in, sz,
  7946. (byte*)aes->reg, ivOutSz,
  7947. authTag, authTagSz,
  7948. authIn, authInSz);
  7949. if (ret == 0)
  7950. IncCtr((byte*)aes->reg, ivOutSz);
  7951. }
  7952. return ret;
  7953. }
  7954. int wc_Gmac(const byte* key, word32 keySz, byte* iv, word32 ivSz,
  7955. const byte* authIn, word32 authInSz,
  7956. byte* authTag, word32 authTagSz, WC_RNG* rng)
  7957. {
  7958. #ifdef WOLFSSL_SMALL_STACK
  7959. Aes *aes = NULL;
  7960. #else
  7961. Aes aes[1];
  7962. #endif
  7963. int ret;
  7964. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  7965. authTag == NULL || authTagSz == 0 || rng == NULL) {
  7966. return BAD_FUNC_ARG;
  7967. }
  7968. #ifdef WOLFSSL_SMALL_STACK
  7969. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  7970. DYNAMIC_TYPE_AES)) == NULL)
  7971. return MEMORY_E;
  7972. #endif
  7973. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  7974. if (ret == 0) {
  7975. ret = wc_AesGcmSetKey(aes, key, keySz);
  7976. if (ret == 0)
  7977. ret = wc_AesGcmSetIV(aes, ivSz, NULL, 0, rng);
  7978. if (ret == 0)
  7979. ret = wc_AesGcmEncrypt_ex(aes, NULL, NULL, 0, iv, ivSz,
  7980. authTag, authTagSz, authIn, authInSz);
  7981. wc_AesFree(aes);
  7982. }
  7983. ForceZero(aes, sizeof *aes);
  7984. #ifdef WOLFSSL_SMALL_STACK
  7985. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  7986. #endif
  7987. return ret;
  7988. }
  7989. int wc_GmacVerify(const byte* key, word32 keySz,
  7990. const byte* iv, word32 ivSz,
  7991. const byte* authIn, word32 authInSz,
  7992. const byte* authTag, word32 authTagSz)
  7993. {
  7994. int ret;
  7995. #ifdef HAVE_AES_DECRYPT
  7996. #ifdef WOLFSSL_SMALL_STACK
  7997. Aes *aes = NULL;
  7998. #else
  7999. Aes aes[1];
  8000. #endif
  8001. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  8002. authTag == NULL || authTagSz == 0 || authTagSz > AES_BLOCK_SIZE) {
  8003. return BAD_FUNC_ARG;
  8004. }
  8005. #ifdef WOLFSSL_SMALL_STACK
  8006. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  8007. DYNAMIC_TYPE_AES)) == NULL)
  8008. return MEMORY_E;
  8009. #endif
  8010. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  8011. if (ret == 0) {
  8012. ret = wc_AesGcmSetKey(aes, key, keySz);
  8013. if (ret == 0)
  8014. ret = wc_AesGcmDecrypt(aes, NULL, NULL, 0, iv, ivSz,
  8015. authTag, authTagSz, authIn, authInSz);
  8016. wc_AesFree(aes);
  8017. }
  8018. ForceZero(aes, sizeof *aes);
  8019. #ifdef WOLFSSL_SMALL_STACK
  8020. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  8021. #endif
  8022. #else
  8023. (void)key;
  8024. (void)keySz;
  8025. (void)iv;
  8026. (void)ivSz;
  8027. (void)authIn;
  8028. (void)authInSz;
  8029. (void)authTag;
  8030. (void)authTagSz;
  8031. ret = NOT_COMPILED_IN;
  8032. #endif
  8033. return ret;
  8034. }
  8035. #endif /* WC_NO_RNG */
  8036. WOLFSSL_API int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len)
  8037. {
  8038. if (gmac == NULL || key == NULL) {
  8039. return BAD_FUNC_ARG;
  8040. }
  8041. return wc_AesGcmSetKey(&gmac->aes, key, len);
  8042. }
  8043. WOLFSSL_API int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz,
  8044. const byte* authIn, word32 authInSz,
  8045. byte* authTag, word32 authTagSz)
  8046. {
  8047. if (gmac == NULL) {
  8048. return BAD_FUNC_ARG;
  8049. }
  8050. return wc_AesGcmEncrypt(&gmac->aes, NULL, NULL, 0, iv, ivSz,
  8051. authTag, authTagSz, authIn, authInSz);
  8052. }
  8053. #endif /* HAVE_AESGCM */
  8054. #ifdef HAVE_AESCCM
  8055. int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz)
  8056. {
  8057. if (!((keySz == 16) || (keySz == 24) || (keySz == 32)))
  8058. return BAD_FUNC_ARG;
  8059. return wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  8060. }
  8061. /* Checks if the tag size is an accepted value based on RFC 3610 section 2
  8062. * returns 0 if tag size is ok
  8063. */
  8064. int wc_AesCcmCheckTagSize(int sz)
  8065. {
  8066. /* values here are from RFC 3610 section 2 */
  8067. if (sz != 4 && sz != 6 && sz != 8 && sz != 10 && sz != 12 && sz != 14
  8068. && sz != 16) {
  8069. WOLFSSL_MSG("Bad auth tag size AES-CCM");
  8070. return BAD_FUNC_ARG;
  8071. }
  8072. return 0;
  8073. }
  8074. #ifdef WOLFSSL_ARMASM
  8075. /* implementation located in wolfcrypt/src/port/arm/armv8-aes.c */
  8076. #elif defined(HAVE_COLDFIRE_SEC)
  8077. #error "Coldfire SEC doesn't currently support AES-CCM mode"
  8078. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  8079. !defined(WOLFSSL_QNX_CAAM)
  8080. /* implemented in wolfcrypt/src/port/caam_aes.c */
  8081. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  8082. /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */
  8083. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8084. const byte* nonce, word32 nonceSz,
  8085. byte* authTag, word32 authTagSz,
  8086. const byte* authIn, word32 authInSz)
  8087. {
  8088. return wc_AesCcmEncrypt_silabs(
  8089. aes, out, in, inSz,
  8090. nonce, nonceSz,
  8091. authTag, authTagSz,
  8092. authIn, authInSz);
  8093. }
  8094. #ifdef HAVE_AES_DECRYPT
  8095. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8096. const byte* nonce, word32 nonceSz,
  8097. const byte* authTag, word32 authTagSz,
  8098. const byte* authIn, word32 authInSz)
  8099. {
  8100. return wc_AesCcmDecrypt_silabs(
  8101. aes, out, in, inSz,
  8102. nonce, nonceSz,
  8103. authTag, authTagSz,
  8104. authIn, authInSz);
  8105. }
  8106. #endif
  8107. #elif defined(FREESCALE_LTC)
  8108. /* return 0 on success */
  8109. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8110. const byte* nonce, word32 nonceSz,
  8111. byte* authTag, word32 authTagSz,
  8112. const byte* authIn, word32 authInSz)
  8113. {
  8114. byte *key;
  8115. word32 keySize;
  8116. status_t status;
  8117. /* sanity check on arguments */
  8118. /* note, LTC_AES_EncryptTagCcm() doesn't allow null src or dst
  8119. * ptrs even if inSz is zero (ltc_aes_ccm_check_input_args()), so
  8120. * don't allow it here either.
  8121. */
  8122. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  8123. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  8124. return BAD_FUNC_ARG;
  8125. }
  8126. if (wc_AesCcmCheckTagSize(authTagSz) != 0) {
  8127. return BAD_FUNC_ARG;
  8128. }
  8129. key = (byte*)aes->key;
  8130. status = wc_AesGetKeySize(aes, &keySize);
  8131. if (status != 0) {
  8132. return status;
  8133. }
  8134. status = wolfSSL_CryptHwMutexLock();
  8135. if (status != 0)
  8136. return status;
  8137. status = LTC_AES_EncryptTagCcm(LTC_BASE, in, out, inSz,
  8138. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  8139. wolfSSL_CryptHwMutexUnLock();
  8140. return (kStatus_Success == status) ? 0 : BAD_FUNC_ARG;
  8141. }
  8142. #ifdef HAVE_AES_DECRYPT
  8143. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8144. const byte* nonce, word32 nonceSz,
  8145. const byte* authTag, word32 authTagSz,
  8146. const byte* authIn, word32 authInSz)
  8147. {
  8148. byte *key;
  8149. word32 keySize;
  8150. status_t status;
  8151. /* sanity check on arguments */
  8152. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  8153. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  8154. return BAD_FUNC_ARG;
  8155. }
  8156. key = (byte*)aes->key;
  8157. status = wc_AesGetKeySize(aes, &keySize);
  8158. if (status != 0) {
  8159. return status;
  8160. }
  8161. status = wolfSSL_CryptHwMutexLock();
  8162. if (status != 0)
  8163. return status;
  8164. status = LTC_AES_DecryptTagCcm(LTC_BASE, in, out, inSz,
  8165. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  8166. wolfSSL_CryptHwMutexUnLock();
  8167. if (status != kStatus_Success) {
  8168. XMEMSET(out, 0, inSz);
  8169. return AES_CCM_AUTH_E;
  8170. }
  8171. return 0;
  8172. }
  8173. #endif /* HAVE_AES_DECRYPT */
  8174. #else
  8175. /* Software CCM */
  8176. static WARN_UNUSED_RESULT int roll_x(
  8177. Aes* aes, const byte* in, word32 inSz, byte* out)
  8178. {
  8179. int ret;
  8180. /* process the bulk of the data */
  8181. while (inSz >= AES_BLOCK_SIZE) {
  8182. xorbuf(out, in, AES_BLOCK_SIZE);
  8183. in += AES_BLOCK_SIZE;
  8184. inSz -= AES_BLOCK_SIZE;
  8185. ret = wc_AesEncrypt(aes, out, out);
  8186. if (ret != 0)
  8187. return ret;
  8188. }
  8189. /* process remainder of the data */
  8190. if (inSz > 0) {
  8191. xorbuf(out, in, inSz);
  8192. ret = wc_AesEncrypt(aes, out, out);
  8193. if (ret != 0)
  8194. return ret;
  8195. }
  8196. return 0;
  8197. }
  8198. static WARN_UNUSED_RESULT int roll_auth(
  8199. Aes* aes, const byte* in, word32 inSz, byte* out)
  8200. {
  8201. word32 authLenSz;
  8202. word32 remainder;
  8203. int ret;
  8204. /* encode the length in */
  8205. if (inSz <= 0xFEFF) {
  8206. authLenSz = 2;
  8207. out[0] ^= (byte)(inSz >> 8);
  8208. out[1] ^= (byte)inSz;
  8209. }
  8210. else {
  8211. authLenSz = 6;
  8212. out[0] ^= 0xFF;
  8213. out[1] ^= 0xFE;
  8214. out[2] ^= (byte)(inSz >> 24);
  8215. out[3] ^= (byte)(inSz >> 16);
  8216. out[4] ^= (byte)(inSz >> 8);
  8217. out[5] ^= (byte)inSz;
  8218. }
  8219. /* Note, the protocol handles auth data up to 2^64, but we are
  8220. * using 32-bit sizes right now, so the bigger data isn't handled
  8221. * else {}
  8222. */
  8223. /* start fill out the rest of the first block */
  8224. remainder = AES_BLOCK_SIZE - authLenSz;
  8225. if (inSz >= remainder) {
  8226. /* plenty of bulk data to fill the remainder of this block */
  8227. xorbuf(out + authLenSz, in, remainder);
  8228. inSz -= remainder;
  8229. in += remainder;
  8230. }
  8231. else {
  8232. /* not enough bulk data, copy what is available, and pad zero */
  8233. xorbuf(out + authLenSz, in, inSz);
  8234. inSz = 0;
  8235. }
  8236. ret = wc_AesEncrypt(aes, out, out);
  8237. if ((ret == 0) && (inSz > 0)) {
  8238. ret = roll_x(aes, in, inSz, out);
  8239. }
  8240. return ret;
  8241. }
  8242. static WC_INLINE void AesCcmCtrInc(byte* B, word32 lenSz)
  8243. {
  8244. word32 i;
  8245. for (i = 0; i < lenSz; i++) {
  8246. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) return;
  8247. }
  8248. }
  8249. #ifdef WOLFSSL_AESNI
  8250. static WC_INLINE void AesCcmCtrIncSet4(byte* B, word32 lenSz)
  8251. {
  8252. word32 i;
  8253. /* B+1 = B */
  8254. XMEMCPY(B + AES_BLOCK_SIZE * 1, B, AES_BLOCK_SIZE);
  8255. /* B+2,B+3 = B,B+1 */
  8256. XMEMCPY(B + AES_BLOCK_SIZE * 2, B, AES_BLOCK_SIZE * 2);
  8257. for (i = 0; i < lenSz; i++) {
  8258. if (++B[AES_BLOCK_SIZE * 2 - 1 - i] != 0) break;
  8259. }
  8260. B[AES_BLOCK_SIZE * 3 - 1] += 2;
  8261. if (B[AES_BLOCK_SIZE * 3 - 1] < 2) {
  8262. for (i = 1; i < lenSz; i++) {
  8263. if (++B[AES_BLOCK_SIZE * 3 - 1 - i] != 0) break;
  8264. }
  8265. }
  8266. B[AES_BLOCK_SIZE * 4 - 1] += 3;
  8267. if (B[AES_BLOCK_SIZE * 4 - 1] < 3) {
  8268. for (i = 1; i < lenSz; i++) {
  8269. if (++B[AES_BLOCK_SIZE * 4 - 1 - i] != 0) break;
  8270. }
  8271. }
  8272. }
  8273. static WC_INLINE void AesCcmCtrInc4(byte* B, word32 lenSz)
  8274. {
  8275. word32 i;
  8276. B[AES_BLOCK_SIZE - 1] += 4;
  8277. if (B[AES_BLOCK_SIZE - 1] < 4) {
  8278. for (i = 1; i < lenSz; i++) {
  8279. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) break;
  8280. }
  8281. }
  8282. }
  8283. #endif
  8284. /* Software AES - CCM Encrypt */
  8285. /* return 0 on success */
  8286. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8287. const byte* nonce, word32 nonceSz,
  8288. byte* authTag, word32 authTagSz,
  8289. const byte* authIn, word32 authInSz)
  8290. {
  8291. #ifndef WOLFSSL_AESNI
  8292. byte A[AES_BLOCK_SIZE];
  8293. byte B[AES_BLOCK_SIZE];
  8294. #else
  8295. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  8296. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  8297. #endif
  8298. byte lenSz;
  8299. word32 i;
  8300. byte mask = 0xFF;
  8301. const word32 wordSz = (word32)sizeof(word32);
  8302. int ret;
  8303. /* sanity check on arguments */
  8304. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  8305. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  8306. authTagSz > AES_BLOCK_SIZE)
  8307. return BAD_FUNC_ARG;
  8308. /* sanity check on tag size */
  8309. if (wc_AesCcmCheckTagSize((int)authTagSz) != 0) {
  8310. return BAD_FUNC_ARG;
  8311. }
  8312. #ifdef WOLF_CRYPTO_CB
  8313. #ifndef WOLF_CRYPTO_CB_FIND
  8314. if (aes->devId != INVALID_DEVID)
  8315. #endif
  8316. {
  8317. int crypto_cb_ret =
  8318. wc_CryptoCb_AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz,
  8319. authTag, authTagSz, authIn, authInSz);
  8320. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  8321. return crypto_cb_ret;
  8322. /* fall-through when unavailable */
  8323. }
  8324. #endif
  8325. XMEMSET(A, 0, sizeof(A));
  8326. XMEMCPY(B+1, nonce, nonceSz);
  8327. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  8328. B[0] = (byte)((authInSz > 0 ? 64 : 0)
  8329. + (8 * (((byte)authTagSz - 2) / 2))
  8330. + (lenSz - 1));
  8331. for (i = 0; i < lenSz; i++) {
  8332. if (mask && i >= wordSz)
  8333. mask = 0x00;
  8334. B[AES_BLOCK_SIZE - 1 - i] = (byte)((inSz >> ((8 * i) & mask)) & mask);
  8335. }
  8336. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8337. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  8338. #endif
  8339. ret = wc_AesEncrypt(aes, B, A);
  8340. if (ret != 0) {
  8341. ForceZero(B, sizeof(B));
  8342. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8343. wc_MemZero_Check(B, sizeof(B));
  8344. #endif
  8345. return ret;
  8346. }
  8347. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8348. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  8349. #endif
  8350. if (authInSz > 0) {
  8351. ret = roll_auth(aes, authIn, authInSz, A);
  8352. if (ret != 0) {
  8353. ForceZero(A, sizeof(A));
  8354. ForceZero(B, sizeof(B));
  8355. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8356. wc_MemZero_Check(A, sizeof(A));
  8357. wc_MemZero_Check(B, sizeof(B));
  8358. #endif
  8359. return ret;
  8360. }
  8361. }
  8362. if (inSz > 0) {
  8363. ret = roll_x(aes, in, inSz, A);
  8364. if (ret != 0) {
  8365. ForceZero(A, sizeof(A));
  8366. ForceZero(B, sizeof(B));
  8367. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8368. wc_MemZero_Check(A, sizeof(A));
  8369. wc_MemZero_Check(B, sizeof(B));
  8370. #endif
  8371. return ret;
  8372. }
  8373. }
  8374. XMEMCPY(authTag, A, authTagSz);
  8375. B[0] = lenSz - 1;
  8376. for (i = 0; i < lenSz; i++)
  8377. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8378. ret = wc_AesEncrypt(aes, B, A);
  8379. if (ret != 0) {
  8380. ForceZero(A, sizeof(A));
  8381. ForceZero(B, sizeof(B));
  8382. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8383. wc_MemZero_Check(A, sizeof(A));
  8384. wc_MemZero_Check(B, sizeof(B));
  8385. #endif
  8386. return ret;
  8387. }
  8388. xorbuf(authTag, A, authTagSz);
  8389. B[15] = 1;
  8390. #ifdef WOLFSSL_AESNI
  8391. if (haveAESNI && aes->use_aesni) {
  8392. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8393. while (inSz >= AES_BLOCK_SIZE * 4) {
  8394. AesCcmCtrIncSet4(B, lenSz);
  8395. AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  8396. (int)aes->rounds);
  8397. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  8398. XMEMCPY(out, A, AES_BLOCK_SIZE * 4);
  8399. inSz -= AES_BLOCK_SIZE * 4;
  8400. in += AES_BLOCK_SIZE * 4;
  8401. out += AES_BLOCK_SIZE * 4;
  8402. AesCcmCtrInc4(B, lenSz);
  8403. }
  8404. RESTORE_VECTOR_REGISTERS();
  8405. }
  8406. #endif
  8407. while (inSz >= AES_BLOCK_SIZE) {
  8408. ret = wc_AesEncrypt(aes, B, A);
  8409. if (ret != 0) {
  8410. ForceZero(A, sizeof(A));
  8411. ForceZero(B, sizeof(B));
  8412. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8413. wc_MemZero_Check(A, sizeof(A));
  8414. wc_MemZero_Check(B, sizeof(B));
  8415. #endif
  8416. return ret;
  8417. }
  8418. xorbuf(A, in, AES_BLOCK_SIZE);
  8419. XMEMCPY(out, A, AES_BLOCK_SIZE);
  8420. AesCcmCtrInc(B, lenSz);
  8421. inSz -= AES_BLOCK_SIZE;
  8422. in += AES_BLOCK_SIZE;
  8423. out += AES_BLOCK_SIZE;
  8424. }
  8425. if (inSz > 0) {
  8426. ret = wc_AesEncrypt(aes, B, A);
  8427. if (ret != 0) {
  8428. ForceZero(A, sizeof(A));
  8429. ForceZero(B, sizeof(B));
  8430. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8431. wc_MemZero_Check(A, sizeof(A));
  8432. wc_MemZero_Check(B, sizeof(B));
  8433. #endif
  8434. return ret;
  8435. }
  8436. xorbuf(A, in, inSz);
  8437. XMEMCPY(out, A, inSz);
  8438. }
  8439. ForceZero(A, sizeof(A));
  8440. ForceZero(B, sizeof(B));
  8441. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8442. wc_MemZero_Check(A, sizeof(A));
  8443. wc_MemZero_Check(B, sizeof(B));
  8444. #endif
  8445. return 0;
  8446. }
  8447. #ifdef HAVE_AES_DECRYPT
  8448. /* Software AES - CCM Decrypt */
  8449. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8450. const byte* nonce, word32 nonceSz,
  8451. const byte* authTag, word32 authTagSz,
  8452. const byte* authIn, word32 authInSz)
  8453. {
  8454. #ifndef WOLFSSL_AESNI
  8455. byte A[AES_BLOCK_SIZE];
  8456. byte B[AES_BLOCK_SIZE];
  8457. #else
  8458. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  8459. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  8460. #endif
  8461. byte* o;
  8462. byte lenSz;
  8463. word32 i, oSz;
  8464. byte mask = 0xFF;
  8465. const word32 wordSz = (word32)sizeof(word32);
  8466. int ret;
  8467. /* sanity check on arguments */
  8468. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  8469. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  8470. authTagSz > AES_BLOCK_SIZE)
  8471. return BAD_FUNC_ARG;
  8472. /* sanity check on tag size */
  8473. if (wc_AesCcmCheckTagSize((int)authTagSz) != 0) {
  8474. return BAD_FUNC_ARG;
  8475. }
  8476. #ifdef WOLF_CRYPTO_CB
  8477. #ifndef WOLF_CRYPTO_CB_FIND
  8478. if (aes->devId != INVALID_DEVID)
  8479. #endif
  8480. {
  8481. int crypto_cb_ret =
  8482. wc_CryptoCb_AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz,
  8483. authTag, authTagSz, authIn, authInSz);
  8484. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  8485. return crypto_cb_ret;
  8486. /* fall-through when unavailable */
  8487. }
  8488. #endif
  8489. o = out;
  8490. oSz = inSz;
  8491. XMEMSET(A, 0, sizeof A);
  8492. XMEMCPY(B+1, nonce, nonceSz);
  8493. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  8494. B[0] = lenSz - 1;
  8495. for (i = 0; i < lenSz; i++)
  8496. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8497. B[15] = 1;
  8498. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8499. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  8500. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  8501. #endif
  8502. #ifdef WOLFSSL_AESNI
  8503. if (haveAESNI && aes->use_aesni) {
  8504. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8505. while (oSz >= AES_BLOCK_SIZE * 4) {
  8506. AesCcmCtrIncSet4(B, lenSz);
  8507. AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  8508. (int)aes->rounds);
  8509. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  8510. XMEMCPY(o, A, AES_BLOCK_SIZE * 4);
  8511. oSz -= AES_BLOCK_SIZE * 4;
  8512. in += AES_BLOCK_SIZE * 4;
  8513. o += AES_BLOCK_SIZE * 4;
  8514. AesCcmCtrInc4(B, lenSz);
  8515. }
  8516. RESTORE_VECTOR_REGISTERS();
  8517. }
  8518. #endif
  8519. while (oSz >= AES_BLOCK_SIZE) {
  8520. ret = wc_AesEncrypt(aes, B, A);
  8521. if (ret != 0) {
  8522. ForceZero(A, sizeof(A));
  8523. ForceZero(B, sizeof(B));
  8524. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8525. wc_MemZero_Check(A, sizeof(A));
  8526. wc_MemZero_Check(B, sizeof(B));
  8527. #endif
  8528. return ret;
  8529. }
  8530. xorbuf(A, in, AES_BLOCK_SIZE);
  8531. XMEMCPY(o, A, AES_BLOCK_SIZE);
  8532. AesCcmCtrInc(B, lenSz);
  8533. oSz -= AES_BLOCK_SIZE;
  8534. in += AES_BLOCK_SIZE;
  8535. o += AES_BLOCK_SIZE;
  8536. }
  8537. if (inSz > 0) {
  8538. ret = wc_AesEncrypt(aes, B, A);
  8539. if (ret != 0) {
  8540. ForceZero(A, sizeof(A));
  8541. ForceZero(B, sizeof(B));
  8542. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8543. wc_MemZero_Check(A, sizeof(A));
  8544. wc_MemZero_Check(B, sizeof(B));
  8545. #endif
  8546. return ret;
  8547. }
  8548. xorbuf(A, in, oSz);
  8549. XMEMCPY(o, A, oSz);
  8550. }
  8551. for (i = 0; i < lenSz; i++)
  8552. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8553. ret = wc_AesEncrypt(aes, B, A);
  8554. if (ret != 0) {
  8555. ForceZero(A, sizeof(A));
  8556. ForceZero(B, sizeof(B));
  8557. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8558. wc_MemZero_Check(A, sizeof(A));
  8559. wc_MemZero_Check(B, sizeof(B));
  8560. #endif
  8561. return ret;
  8562. }
  8563. o = out;
  8564. oSz = inSz;
  8565. B[0] = (byte)((authInSz > 0 ? 64 : 0)
  8566. + (8 * (((byte)authTagSz - 2) / 2))
  8567. + (lenSz - 1));
  8568. for (i = 0; i < lenSz; i++) {
  8569. if (mask && i >= wordSz)
  8570. mask = 0x00;
  8571. B[AES_BLOCK_SIZE - 1 - i] = (byte)((inSz >> ((8 * i) & mask)) & mask);
  8572. }
  8573. ret = wc_AesEncrypt(aes, B, A);
  8574. if (ret != 0) {
  8575. ForceZero(A, sizeof(A));
  8576. ForceZero(B, sizeof(B));
  8577. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8578. wc_MemZero_Check(A, sizeof(A));
  8579. wc_MemZero_Check(B, sizeof(B));
  8580. #endif
  8581. return ret;
  8582. }
  8583. if (authInSz > 0) {
  8584. ret = roll_auth(aes, authIn, authInSz, A);
  8585. if (ret != 0) {
  8586. ForceZero(A, sizeof(A));
  8587. ForceZero(B, sizeof(B));
  8588. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8589. wc_MemZero_Check(A, sizeof(A));
  8590. wc_MemZero_Check(B, sizeof(B));
  8591. #endif
  8592. return ret;
  8593. }
  8594. }
  8595. if (inSz > 0) {
  8596. ret = roll_x(aes, o, oSz, A);
  8597. if (ret != 0) {
  8598. ForceZero(A, sizeof(A));
  8599. ForceZero(B, sizeof(B));
  8600. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8601. wc_MemZero_Check(A, sizeof(A));
  8602. wc_MemZero_Check(B, sizeof(B));
  8603. #endif
  8604. return ret;
  8605. }
  8606. }
  8607. B[0] = lenSz - 1;
  8608. for (i = 0; i < lenSz; i++)
  8609. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8610. ret = wc_AesEncrypt(aes, B, B);
  8611. if (ret != 0) {
  8612. ForceZero(A, sizeof(A));
  8613. ForceZero(B, sizeof(B));
  8614. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8615. wc_MemZero_Check(A, sizeof(A));
  8616. wc_MemZero_Check(B, sizeof(B));
  8617. #endif
  8618. return ret;
  8619. }
  8620. xorbuf(A, B, authTagSz);
  8621. if (ConstantCompare(A, authTag, (int)authTagSz) != 0) {
  8622. /* If the authTag check fails, don't keep the decrypted data.
  8623. * Unfortunately, you need the decrypted data to calculate the
  8624. * check value. */
  8625. #if defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2) && \
  8626. defined(ACVP_VECTOR_TESTING)
  8627. WOLFSSL_MSG("Preserve output for vector responses");
  8628. #else
  8629. if (inSz > 0)
  8630. XMEMSET(out, 0, inSz);
  8631. #endif
  8632. ret = AES_CCM_AUTH_E;
  8633. }
  8634. ForceZero(A, sizeof(A));
  8635. ForceZero(B, sizeof(B));
  8636. o = NULL;
  8637. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8638. wc_MemZero_Check(A, sizeof(A));
  8639. wc_MemZero_Check(B, sizeof(B));
  8640. #endif
  8641. return ret;
  8642. }
  8643. #endif /* HAVE_AES_DECRYPT */
  8644. #endif /* software CCM */
  8645. /* abstract functions that call lower level AESCCM functions */
  8646. #ifndef WC_NO_RNG
  8647. int wc_AesCcmSetNonce(Aes* aes, const byte* nonce, word32 nonceSz)
  8648. {
  8649. int ret = 0;
  8650. if (aes == NULL || nonce == NULL ||
  8651. nonceSz < CCM_NONCE_MIN_SZ || nonceSz > CCM_NONCE_MAX_SZ) {
  8652. ret = BAD_FUNC_ARG;
  8653. }
  8654. if (ret == 0) {
  8655. XMEMCPY(aes->reg, nonce, nonceSz);
  8656. aes->nonceSz = nonceSz;
  8657. /* Invocation counter should be 2^61 */
  8658. aes->invokeCtr[0] = 0;
  8659. aes->invokeCtr[1] = 0xE0000000;
  8660. }
  8661. return ret;
  8662. }
  8663. int wc_AesCcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  8664. byte* ivOut, word32 ivOutSz,
  8665. byte* authTag, word32 authTagSz,
  8666. const byte* authIn, word32 authInSz)
  8667. {
  8668. int ret = 0;
  8669. if (aes == NULL || out == NULL ||
  8670. (in == NULL && sz != 0) ||
  8671. ivOut == NULL ||
  8672. (authIn == NULL && authInSz != 0) ||
  8673. (ivOutSz != aes->nonceSz)) {
  8674. ret = BAD_FUNC_ARG;
  8675. }
  8676. if (ret == 0) {
  8677. aes->invokeCtr[0]++;
  8678. if (aes->invokeCtr[0] == 0) {
  8679. aes->invokeCtr[1]++;
  8680. if (aes->invokeCtr[1] == 0)
  8681. ret = AES_CCM_OVERFLOW_E;
  8682. }
  8683. }
  8684. if (ret == 0) {
  8685. ret = wc_AesCcmEncrypt(aes, out, in, sz,
  8686. (byte*)aes->reg, aes->nonceSz,
  8687. authTag, authTagSz,
  8688. authIn, authInSz);
  8689. if (ret == 0) {
  8690. XMEMCPY(ivOut, aes->reg, aes->nonceSz);
  8691. IncCtr((byte*)aes->reg, aes->nonceSz);
  8692. }
  8693. }
  8694. return ret;
  8695. }
  8696. #endif /* WC_NO_RNG */
  8697. #endif /* HAVE_AESCCM */
  8698. /* Initialize Aes for use with async hardware */
  8699. int wc_AesInit(Aes* aes, void* heap, int devId)
  8700. {
  8701. int ret = 0;
  8702. if (aes == NULL)
  8703. return BAD_FUNC_ARG;
  8704. aes->heap = heap;
  8705. #ifdef WOLF_CRYPTO_CB
  8706. aes->devId = devId;
  8707. aes->devCtx = NULL;
  8708. #else
  8709. (void)devId;
  8710. #endif
  8711. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  8712. ret = wolfAsync_DevCtxInit(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES,
  8713. aes->heap, devId);
  8714. #endif /* WOLFSSL_ASYNC_CRYPT */
  8715. #ifdef WOLFSSL_AFALG
  8716. aes->alFd = WC_SOCK_NOTSET;
  8717. aes->rdFd = WC_SOCK_NOTSET;
  8718. #endif
  8719. #ifdef WOLFSSL_KCAPI_AES
  8720. aes->handle = NULL;
  8721. aes->init = 0;
  8722. #endif
  8723. #if defined(WOLFSSL_DEVCRYPTO) && \
  8724. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  8725. aes->ctx.cfd = -1;
  8726. #endif
  8727. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  8728. XMEMSET(&aes->ctx, 0, sizeof(aes->ctx));
  8729. #endif
  8730. #if defined(WOLFSSL_IMXRT_DCP)
  8731. DCPAesInit(aes);
  8732. #endif
  8733. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  8734. XMEMSET(&aes->maxq_ctx, 0, sizeof(aes->maxq_ctx));
  8735. #endif
  8736. #ifdef HAVE_AESGCM
  8737. #ifdef OPENSSL_EXTRA
  8738. XMEMSET(aes->gcm.aadH, 0, sizeof(aes->gcm.aadH));
  8739. aes->gcm.aadLen = 0;
  8740. #endif
  8741. #endif
  8742. #ifdef WOLFSSL_AESGCM_STREAM
  8743. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  8744. aes->streamData = NULL;
  8745. #endif
  8746. aes->keylen = 0;
  8747. aes->nonceSz = 0;
  8748. aes->gcmKeySet = 0;
  8749. aes->nonceSet = 0;
  8750. aes->ctrSet = 0;
  8751. #endif
  8752. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  8753. ret = wc_psa_aes_init(aes);
  8754. #endif
  8755. return ret;
  8756. }
  8757. #ifdef WOLF_PRIVATE_KEY_ID
  8758. int wc_AesInit_Id(Aes* aes, unsigned char* id, int len, void* heap, int devId)
  8759. {
  8760. int ret = 0;
  8761. if (aes == NULL)
  8762. ret = BAD_FUNC_ARG;
  8763. if (ret == 0 && (len < 0 || len > AES_MAX_ID_LEN))
  8764. ret = BUFFER_E;
  8765. if (ret == 0)
  8766. ret = wc_AesInit(aes, heap, devId);
  8767. if (ret == 0) {
  8768. XMEMCPY(aes->id, id, (size_t)len);
  8769. aes->idLen = len;
  8770. aes->labelLen = 0;
  8771. }
  8772. return ret;
  8773. }
  8774. int wc_AesInit_Label(Aes* aes, const char* label, void* heap, int devId)
  8775. {
  8776. int ret = 0;
  8777. size_t labelLen = 0;
  8778. if (aes == NULL || label == NULL)
  8779. ret = BAD_FUNC_ARG;
  8780. if (ret == 0) {
  8781. labelLen = XSTRLEN(label);
  8782. if (labelLen == 0 || labelLen > AES_MAX_LABEL_LEN)
  8783. ret = BUFFER_E;
  8784. }
  8785. if (ret == 0)
  8786. ret = wc_AesInit(aes, heap, devId);
  8787. if (ret == 0) {
  8788. XMEMCPY(aes->label, label, labelLen);
  8789. aes->labelLen = (int)labelLen;
  8790. aes->idLen = 0;
  8791. }
  8792. return ret;
  8793. }
  8794. #endif
  8795. /* Free Aes from use with async hardware */
  8796. void wc_AesFree(Aes* aes)
  8797. {
  8798. if (aes == NULL)
  8799. return;
  8800. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  8801. wolfAsync_DevCtxFree(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES);
  8802. #endif /* WOLFSSL_ASYNC_CRYPT */
  8803. #if defined(WOLFSSL_AFALG) || defined(WOLFSSL_AFALG_XILINX_AES)
  8804. if (aes->rdFd > 0) { /* negative is error case */
  8805. close(aes->rdFd);
  8806. aes->rdFd = WC_SOCK_NOTSET;
  8807. }
  8808. if (aes->alFd > 0) {
  8809. close(aes->alFd);
  8810. aes->alFd = WC_SOCK_NOTSET;
  8811. }
  8812. #endif /* WOLFSSL_AFALG */
  8813. #ifdef WOLFSSL_KCAPI_AES
  8814. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  8815. if (aes->init == 1) {
  8816. kcapi_cipher_destroy(aes->handle);
  8817. }
  8818. aes->init = 0;
  8819. aes->handle = NULL;
  8820. #endif
  8821. #if defined(WOLFSSL_DEVCRYPTO) && \
  8822. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  8823. wc_DevCryptoFree(&aes->ctx);
  8824. #endif
  8825. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  8826. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  8827. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  8828. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  8829. #endif
  8830. #if defined(WOLFSSL_IMXRT_DCP)
  8831. DCPAesFree(aes);
  8832. #endif
  8833. #if defined(WOLFSSL_AESGCM_STREAM) && defined(WOLFSSL_SMALL_STACK) && \
  8834. !defined(WOLFSSL_AESNI)
  8835. if (aes->streamData != NULL) {
  8836. XFREE(aes->streamData, aes->heap, DYNAMIC_TYPE_AES);
  8837. aes->streamData = NULL;
  8838. }
  8839. #endif
  8840. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  8841. if (aes->useSWCrypt == 0) {
  8842. se050_aes_free(aes);
  8843. }
  8844. #endif
  8845. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  8846. wc_psa_aes_free(aes);
  8847. #endif
  8848. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  8849. wc_MAXQ10XX_AesFree(aes);
  8850. #endif
  8851. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8852. wc_MemZero_Check(aes, sizeof(Aes));
  8853. #endif
  8854. }
  8855. int wc_AesGetKeySize(Aes* aes, word32* keySize)
  8856. {
  8857. int ret = 0;
  8858. if (aes == NULL || keySize == NULL) {
  8859. return BAD_FUNC_ARG;
  8860. }
  8861. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  8862. return wc_psa_aes_get_key_size(aes, keySize);
  8863. #endif
  8864. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  8865. *keySize = aes->ctx.key.keySize;
  8866. return ret;
  8867. #endif
  8868. switch (aes->rounds) {
  8869. #ifdef WOLFSSL_AES_128
  8870. case 10:
  8871. *keySize = 16;
  8872. break;
  8873. #endif
  8874. #ifdef WOLFSSL_AES_192
  8875. case 12:
  8876. *keySize = 24;
  8877. break;
  8878. #endif
  8879. #ifdef WOLFSSL_AES_256
  8880. case 14:
  8881. *keySize = 32;
  8882. break;
  8883. #endif
  8884. default:
  8885. *keySize = 0;
  8886. ret = BAD_FUNC_ARG;
  8887. }
  8888. return ret;
  8889. }
  8890. #endif /* !WOLFSSL_TI_CRYPT */
  8891. #ifdef HAVE_AES_ECB
  8892. #if defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  8893. !defined(WOLFSSL_QNX_CAAM)
  8894. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  8895. #elif defined(WOLFSSL_AFALG)
  8896. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  8897. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  8898. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  8899. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  8900. /* Software AES - ECB */
  8901. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  8902. {
  8903. if ((in == NULL) || (out == NULL) || (aes == NULL))
  8904. return BAD_FUNC_ARG;
  8905. return AES_ECB_encrypt(aes, in, out, sz);
  8906. }
  8907. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  8908. {
  8909. if ((in == NULL) || (out == NULL) || (aes == NULL))
  8910. return BAD_FUNC_ARG;
  8911. return AES_ECB_decrypt(aes, in, out, sz);
  8912. }
  8913. #else
  8914. /* Software AES - ECB */
  8915. static WARN_UNUSED_RESULT int _AesEcbEncrypt(
  8916. Aes* aes, byte* out, const byte* in, word32 sz)
  8917. {
  8918. word32 blocks = sz / AES_BLOCK_SIZE;
  8919. #ifdef WOLF_CRYPTO_CB
  8920. #ifndef WOLF_CRYPTO_CB_FIND
  8921. if (aes->devId != INVALID_DEVID)
  8922. #endif
  8923. {
  8924. int ret = wc_CryptoCb_AesEcbEncrypt(aes, out, in, sz);
  8925. if (ret != CRYPTOCB_UNAVAILABLE)
  8926. return ret;
  8927. /* fall-through when unavailable */
  8928. }
  8929. #endif
  8930. #ifdef WOLFSSL_IMXRT_DCP
  8931. if (aes->keylen == 16)
  8932. return DCPAesEcbEncrypt(aes, out, in, sz);
  8933. #endif
  8934. #ifdef WOLFSSL_AESNI
  8935. if (haveAESNI && aes->use_aesni) {
  8936. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8937. AES_ECB_encrypt(in, out, sz, (byte*)aes->key, (int)aes->rounds);
  8938. RESTORE_VECTOR_REGISTERS();
  8939. blocks = 0;
  8940. }
  8941. #endif
  8942. while (blocks > 0) {
  8943. int ret = wc_AesEncryptDirect(aes, out, in);
  8944. if (ret != 0)
  8945. return ret;
  8946. out += AES_BLOCK_SIZE;
  8947. in += AES_BLOCK_SIZE;
  8948. blocks--;
  8949. }
  8950. return 0;
  8951. }
  8952. static WARN_UNUSED_RESULT int _AesEcbDecrypt(
  8953. Aes* aes, byte* out, const byte* in, word32 sz)
  8954. {
  8955. word32 blocks = sz / AES_BLOCK_SIZE;
  8956. #ifdef WOLF_CRYPTO_CB
  8957. #ifndef WOLF_CRYPTO_CB_FIND
  8958. if (aes->devId != INVALID_DEVID)
  8959. #endif
  8960. {
  8961. int ret = wc_CryptoCb_AesEcbDecrypt(aes, out, in, sz);
  8962. if (ret != CRYPTOCB_UNAVAILABLE)
  8963. return ret;
  8964. /* fall-through when unavailable */
  8965. }
  8966. #endif
  8967. #ifdef WOLFSSL_IMXRT_DCP
  8968. if (aes->keylen == 16)
  8969. return DCPAesEcbDecrypt(aes, out, in, sz);
  8970. #endif
  8971. #ifdef WOLFSSL_AESNI
  8972. if (haveAESNI && aes->use_aesni) {
  8973. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8974. AES_ECB_decrypt(in, out, sz, (byte*)aes->key, (int)aes->rounds);
  8975. RESTORE_VECTOR_REGISTERS();
  8976. blocks = 0;
  8977. }
  8978. #endif
  8979. while (blocks > 0) {
  8980. int ret = wc_AesDecryptDirect(aes, out, in);
  8981. if (ret != 0)
  8982. return ret;
  8983. out += AES_BLOCK_SIZE;
  8984. in += AES_BLOCK_SIZE;
  8985. blocks--;
  8986. }
  8987. return 0;
  8988. }
  8989. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  8990. {
  8991. int ret;
  8992. if ((in == NULL) || (out == NULL) || (aes == NULL))
  8993. return BAD_FUNC_ARG;
  8994. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8995. ret = _AesEcbEncrypt(aes, out, in, sz);
  8996. RESTORE_VECTOR_REGISTERS();
  8997. return ret;
  8998. }
  8999. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9000. {
  9001. int ret;
  9002. if ((in == NULL) || (out == NULL) || (aes == NULL))
  9003. return BAD_FUNC_ARG;
  9004. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9005. ret = _AesEcbDecrypt(aes, out, in, sz);
  9006. RESTORE_VECTOR_REGISTERS();
  9007. return ret;
  9008. }
  9009. #endif
  9010. #endif /* HAVE_AES_ECB */
  9011. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_OFB)
  9012. /* Feedback AES mode
  9013. *
  9014. * aes structure holding key to use for encryption
  9015. * out buffer to hold result of encryption (must be at least as large as input
  9016. * buffer)
  9017. * in buffer to encrypt
  9018. * sz size of input buffer
  9019. * mode flag to specify AES mode
  9020. *
  9021. * returns 0 on success and negative error values on failure
  9022. */
  9023. /* Software AES - CFB Encrypt */
  9024. static WARN_UNUSED_RESULT int wc_AesFeedbackEncrypt(
  9025. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  9026. {
  9027. byte* tmp = NULL;
  9028. int ret = 0;
  9029. word32 processed;
  9030. if (aes == NULL || out == NULL || in == NULL) {
  9031. return BAD_FUNC_ARG;
  9032. }
  9033. /* consume any unused bytes left in aes->tmp */
  9034. processed = min(aes->left, sz);
  9035. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left, processed);
  9036. #ifdef WOLFSSL_AES_CFB
  9037. if (mode == AES_CFB_MODE) {
  9038. XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, out, processed);
  9039. }
  9040. #endif
  9041. aes->left -= processed;
  9042. out += processed;
  9043. in += processed;
  9044. sz -= processed;
  9045. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9046. while (sz >= AES_BLOCK_SIZE) {
  9047. /* Using aes->tmp here for inline case i.e. in=out */
  9048. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9049. if (ret != 0)
  9050. break;
  9051. #ifdef WOLFSSL_AES_OFB
  9052. if (mode == AES_OFB_MODE) {
  9053. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9054. }
  9055. #endif
  9056. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  9057. #ifdef WOLFSSL_AES_CFB
  9058. if (mode == AES_CFB_MODE) {
  9059. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9060. }
  9061. #endif
  9062. XMEMCPY(out, aes->tmp, AES_BLOCK_SIZE);
  9063. out += AES_BLOCK_SIZE;
  9064. in += AES_BLOCK_SIZE;
  9065. sz -= AES_BLOCK_SIZE;
  9066. aes->left = 0;
  9067. }
  9068. /* encrypt left over data */
  9069. if ((ret == 0) && sz) {
  9070. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9071. }
  9072. if ((ret == 0) && sz) {
  9073. aes->left = AES_BLOCK_SIZE;
  9074. tmp = (byte*)aes->tmp;
  9075. #ifdef WOLFSSL_AES_OFB
  9076. if (mode == AES_OFB_MODE) {
  9077. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9078. }
  9079. #endif
  9080. xorbufout(out, in, tmp, sz);
  9081. #ifdef WOLFSSL_AES_CFB
  9082. if (mode == AES_CFB_MODE) {
  9083. XMEMCPY(aes->reg, out, sz);
  9084. }
  9085. #endif
  9086. aes->left -= sz;
  9087. }
  9088. RESTORE_VECTOR_REGISTERS();
  9089. return ret;
  9090. }
  9091. #ifdef HAVE_AES_DECRYPT
  9092. /* CFB 128
  9093. *
  9094. * aes structure holding key to use for decryption
  9095. * out buffer to hold result of decryption (must be at least as large as input
  9096. * buffer)
  9097. * in buffer to decrypt
  9098. * sz size of input buffer
  9099. *
  9100. * returns 0 on success and negative error values on failure
  9101. */
  9102. /* Software AES - CFB Decrypt */
  9103. static WARN_UNUSED_RESULT int wc_AesFeedbackDecrypt(
  9104. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  9105. {
  9106. int ret = 0;
  9107. word32 processed;
  9108. if (aes == NULL || out == NULL || in == NULL) {
  9109. return BAD_FUNC_ARG;
  9110. }
  9111. #ifdef WOLFSSL_AES_CFB
  9112. /* check if more input needs copied over to aes->reg */
  9113. if (aes->left && sz && mode == AES_CFB_MODE) {
  9114. word32 size = min(aes->left, sz);
  9115. XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, in, size);
  9116. }
  9117. #endif
  9118. /* consume any unused bytes left in aes->tmp */
  9119. processed = min(aes->left, sz);
  9120. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left, processed);
  9121. aes->left -= processed;
  9122. out += processed;
  9123. in += processed;
  9124. sz -= processed;
  9125. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9126. while (sz > AES_BLOCK_SIZE) {
  9127. /* Using aes->tmp here for inline case i.e. in=out */
  9128. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9129. if (ret != 0)
  9130. break;
  9131. #ifdef WOLFSSL_AES_OFB
  9132. if (mode == AES_OFB_MODE) {
  9133. XMEMCPY((byte*)aes->reg, (byte*)aes->tmp, AES_BLOCK_SIZE);
  9134. }
  9135. #endif
  9136. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  9137. #ifdef WOLFSSL_AES_CFB
  9138. if (mode == AES_CFB_MODE) {
  9139. XMEMCPY(aes->reg, in, AES_BLOCK_SIZE);
  9140. }
  9141. #endif
  9142. XMEMCPY(out, (byte*)aes->tmp, AES_BLOCK_SIZE);
  9143. out += AES_BLOCK_SIZE;
  9144. in += AES_BLOCK_SIZE;
  9145. sz -= AES_BLOCK_SIZE;
  9146. aes->left = 0;
  9147. }
  9148. /* decrypt left over data */
  9149. if ((ret == 0) && sz) {
  9150. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9151. }
  9152. if ((ret == 0) && sz) {
  9153. #ifdef WOLFSSL_AES_CFB
  9154. if (mode == AES_CFB_MODE) {
  9155. XMEMCPY(aes->reg, in, sz);
  9156. }
  9157. #endif
  9158. #ifdef WOLFSSL_AES_OFB
  9159. if (mode == AES_OFB_MODE) {
  9160. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9161. }
  9162. #endif
  9163. aes->left = AES_BLOCK_SIZE - sz;
  9164. xorbufout(out, in, aes->tmp, sz);
  9165. }
  9166. RESTORE_VECTOR_REGISTERS();
  9167. return ret;
  9168. }
  9169. #endif /* HAVE_AES_DECRYPT */
  9170. #endif /* WOLFSSL_AES_CFB */
  9171. #ifdef WOLFSSL_AES_CFB
  9172. /* CFB 128
  9173. *
  9174. * aes structure holding key to use for encryption
  9175. * out buffer to hold result of encryption (must be at least as large as input
  9176. * buffer)
  9177. * in buffer to encrypt
  9178. * sz size of input buffer
  9179. *
  9180. * returns 0 on success and negative error values on failure
  9181. */
  9182. /* Software AES - CFB Encrypt */
  9183. int wc_AesCfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9184. {
  9185. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_CFB_MODE);
  9186. }
  9187. #ifdef HAVE_AES_DECRYPT
  9188. /* CFB 128
  9189. *
  9190. * aes structure holding key to use for decryption
  9191. * out buffer to hold result of decryption (must be at least as large as input
  9192. * buffer)
  9193. * in buffer to decrypt
  9194. * sz size of input buffer
  9195. *
  9196. * returns 0 on success and negative error values on failure
  9197. */
  9198. /* Software AES - CFB Decrypt */
  9199. int wc_AesCfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9200. {
  9201. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_CFB_MODE);
  9202. }
  9203. #endif /* HAVE_AES_DECRYPT */
  9204. /* shift the whole AES_BLOCK_SIZE array left by 8 or 1 bits */
  9205. static void shiftLeftArray(byte* ary, byte shift)
  9206. {
  9207. int i;
  9208. if (shift == WOLFSSL_BIT_SIZE) {
  9209. /* shifting over by 8 bits */
  9210. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  9211. ary[i] = ary[i+1];
  9212. }
  9213. ary[i] = 0;
  9214. }
  9215. else {
  9216. /* shifting over by 7 or less bits */
  9217. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  9218. byte carry = ary[i+1] & (0XFF << (WOLFSSL_BIT_SIZE - shift));
  9219. carry >>= (WOLFSSL_BIT_SIZE - shift);
  9220. ary[i] = (byte)((ary[i] << shift) + carry);
  9221. }
  9222. ary[i] = ary[i] << shift;
  9223. }
  9224. }
  9225. /* returns 0 on success and negative values on failure */
  9226. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB8(
  9227. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  9228. {
  9229. byte *pt;
  9230. int ret = 0;
  9231. if (aes == NULL || out == NULL || in == NULL) {
  9232. return BAD_FUNC_ARG;
  9233. }
  9234. if (sz == 0) {
  9235. return 0;
  9236. }
  9237. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9238. while (sz > 0) {
  9239. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9240. if (ret != 0)
  9241. break;
  9242. if (dir == AES_DECRYPTION) {
  9243. pt = (byte*)aes->reg;
  9244. /* LSB + CAT */
  9245. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  9246. pt[AES_BLOCK_SIZE - 1] = in[0];
  9247. }
  9248. /* MSB + XOR */
  9249. #ifdef BIG_ENDIAN_ORDER
  9250. ByteReverseWords(aes->tmp, aes->tmp, AES_BLOCK_SIZE);
  9251. #endif
  9252. out[0] = (byte)(aes->tmp[0] ^ in[0]);
  9253. if (dir == AES_ENCRYPTION) {
  9254. pt = (byte*)aes->reg;
  9255. /* LSB + CAT */
  9256. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  9257. pt[AES_BLOCK_SIZE - 1] = out[0];
  9258. }
  9259. out += 1;
  9260. in += 1;
  9261. sz -= 1;
  9262. }
  9263. RESTORE_VECTOR_REGISTERS();
  9264. return ret;
  9265. }
  9266. /* returns 0 on success and negative values on failure */
  9267. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB1(
  9268. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  9269. {
  9270. byte tmp;
  9271. byte cur = 0; /* hold current work in order to handle inline in=out */
  9272. byte* pt;
  9273. int bit = 7;
  9274. int ret = 0;
  9275. if (aes == NULL || out == NULL || in == NULL) {
  9276. return BAD_FUNC_ARG;
  9277. }
  9278. if (sz == 0) {
  9279. return 0;
  9280. }
  9281. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9282. while (sz > 0) {
  9283. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9284. if (ret != 0)
  9285. break;
  9286. if (dir == AES_DECRYPTION) {
  9287. pt = (byte*)aes->reg;
  9288. /* LSB + CAT */
  9289. tmp = (0X01 << bit) & in[0];
  9290. tmp = tmp >> bit;
  9291. tmp &= 0x01;
  9292. shiftLeftArray((byte*)aes->reg, 1);
  9293. pt[AES_BLOCK_SIZE - 1] |= tmp;
  9294. }
  9295. /* MSB + XOR */
  9296. tmp = (0X01 << bit) & in[0];
  9297. pt = (byte*)aes->tmp;
  9298. tmp = (pt[0] >> 7) ^ (tmp >> bit);
  9299. tmp &= 0x01;
  9300. cur |= (tmp << bit);
  9301. if (dir == AES_ENCRYPTION) {
  9302. pt = (byte*)aes->reg;
  9303. /* LSB + CAT */
  9304. shiftLeftArray((byte*)aes->reg, 1);
  9305. pt[AES_BLOCK_SIZE - 1] |= tmp;
  9306. }
  9307. bit--;
  9308. if (bit < 0) {
  9309. out[0] = cur;
  9310. out += 1;
  9311. in += 1;
  9312. sz -= 1;
  9313. bit = 7;
  9314. cur = 0;
  9315. }
  9316. else {
  9317. sz -= 1;
  9318. }
  9319. }
  9320. if (ret == 0) {
  9321. if (bit > 0 && bit < 7) {
  9322. out[0] = cur;
  9323. }
  9324. }
  9325. RESTORE_VECTOR_REGISTERS();
  9326. return ret;
  9327. }
  9328. /* CFB 1
  9329. *
  9330. * aes structure holding key to use for encryption
  9331. * out buffer to hold result of encryption (must be at least as large as input
  9332. * buffer)
  9333. * in buffer to encrypt (packed to left, i.e. 101 is 0x90)
  9334. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  9335. *
  9336. * returns 0 on success and negative values on failure
  9337. */
  9338. int wc_AesCfb1Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9339. {
  9340. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_ENCRYPTION);
  9341. }
  9342. /* CFB 8
  9343. *
  9344. * aes structure holding key to use for encryption
  9345. * out buffer to hold result of encryption (must be at least as large as input
  9346. * buffer)
  9347. * in buffer to encrypt
  9348. * sz size of input buffer
  9349. *
  9350. * returns 0 on success and negative values on failure
  9351. */
  9352. int wc_AesCfb8Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9353. {
  9354. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_ENCRYPTION);
  9355. }
  9356. #ifdef HAVE_AES_DECRYPT
  9357. /* CFB 1
  9358. *
  9359. * aes structure holding key to use for encryption
  9360. * out buffer to hold result of encryption (must be at least as large as input
  9361. * buffer)
  9362. * in buffer to encrypt
  9363. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  9364. *
  9365. * returns 0 on success and negative values on failure
  9366. */
  9367. int wc_AesCfb1Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9368. {
  9369. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_DECRYPTION);
  9370. }
  9371. /* CFB 8
  9372. *
  9373. * aes structure holding key to use for encryption
  9374. * out buffer to hold result of encryption (must be at least as large as input
  9375. * buffer)
  9376. * in buffer to encrypt
  9377. * sz size of input buffer
  9378. *
  9379. * returns 0 on success and negative values on failure
  9380. */
  9381. int wc_AesCfb8Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9382. {
  9383. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_DECRYPTION);
  9384. }
  9385. #endif /* HAVE_AES_DECRYPT */
  9386. #endif /* WOLFSSL_AES_CFB */
  9387. #ifdef WOLFSSL_AES_OFB
  9388. /* OFB
  9389. *
  9390. * aes structure holding key to use for encryption
  9391. * out buffer to hold result of encryption (must be at least as large as input
  9392. * buffer)
  9393. * in buffer to encrypt
  9394. * sz size of input buffer
  9395. *
  9396. * returns 0 on success and negative error values on failure
  9397. */
  9398. /* Software AES - CFB Encrypt */
  9399. int wc_AesOfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9400. {
  9401. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_OFB_MODE);
  9402. }
  9403. #ifdef HAVE_AES_DECRYPT
  9404. /* OFB
  9405. *
  9406. * aes structure holding key to use for decryption
  9407. * out buffer to hold result of decryption (must be at least as large as input
  9408. * buffer)
  9409. * in buffer to decrypt
  9410. * sz size of input buffer
  9411. *
  9412. * returns 0 on success and negative error values on failure
  9413. */
  9414. /* Software AES - OFB Decrypt */
  9415. int wc_AesOfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9416. {
  9417. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_OFB_MODE);
  9418. }
  9419. #endif /* HAVE_AES_DECRYPT */
  9420. #endif /* WOLFSSL_AES_OFB */
  9421. #ifdef HAVE_AES_KEYWRAP
  9422. /* Initialize key wrap counter with value */
  9423. static WC_INLINE void InitKeyWrapCounter(byte* inOutCtr, word32 value)
  9424. {
  9425. word32 i;
  9426. word32 bytes;
  9427. bytes = sizeof(word32);
  9428. for (i = 0; i < sizeof(word32); i++) {
  9429. inOutCtr[i+sizeof(word32)] = (byte)(value >> ((bytes - 1) * 8));
  9430. bytes--;
  9431. }
  9432. }
  9433. /* Increment key wrap counter */
  9434. static WC_INLINE void IncrementKeyWrapCounter(byte* inOutCtr)
  9435. {
  9436. int i;
  9437. /* in network byte order so start at end and work back */
  9438. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  9439. if (++inOutCtr[i]) /* we're done unless we overflow */
  9440. return;
  9441. }
  9442. }
  9443. /* Decrement key wrap counter */
  9444. static WC_INLINE void DecrementKeyWrapCounter(byte* inOutCtr)
  9445. {
  9446. int i;
  9447. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  9448. if (--inOutCtr[i] != 0xFF) /* we're done unless we underflow */
  9449. return;
  9450. }
  9451. }
  9452. int wc_AesKeyWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  9453. word32 outSz, const byte* iv)
  9454. {
  9455. word32 i;
  9456. byte* r;
  9457. int j;
  9458. int ret = 0;
  9459. byte t[KEYWRAP_BLOCK_SIZE];
  9460. byte tmp[AES_BLOCK_SIZE];
  9461. /* n must be at least 2 64-bit blocks, output size is (n + 1) 8 bytes (64-bit) */
  9462. if (aes == NULL || in == NULL || inSz < 2*KEYWRAP_BLOCK_SIZE ||
  9463. out == NULL || outSz < (inSz + KEYWRAP_BLOCK_SIZE))
  9464. return BAD_FUNC_ARG;
  9465. /* input must be multiple of 64-bits */
  9466. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  9467. return BAD_FUNC_ARG;
  9468. r = out + 8;
  9469. XMEMCPY(r, in, inSz);
  9470. XMEMSET(t, 0, sizeof(t));
  9471. /* user IV is optional */
  9472. if (iv == NULL) {
  9473. XMEMSET(tmp, 0xA6, KEYWRAP_BLOCK_SIZE);
  9474. } else {
  9475. XMEMCPY(tmp, iv, KEYWRAP_BLOCK_SIZE);
  9476. }
  9477. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9478. for (j = 0; j <= 5; j++) {
  9479. for (i = 1; i <= inSz / KEYWRAP_BLOCK_SIZE; i++) {
  9480. /* load R[i] */
  9481. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  9482. ret = wc_AesEncryptDirect(aes, tmp, tmp);
  9483. if (ret != 0)
  9484. break;
  9485. /* calculate new A */
  9486. IncrementKeyWrapCounter(t);
  9487. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  9488. /* save R[i] */
  9489. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  9490. r += KEYWRAP_BLOCK_SIZE;
  9491. }
  9492. if (ret != 0)
  9493. break;
  9494. r = out + KEYWRAP_BLOCK_SIZE;
  9495. }
  9496. RESTORE_VECTOR_REGISTERS();
  9497. if (ret != 0)
  9498. return ret;
  9499. /* C[0] = A */
  9500. XMEMCPY(out, tmp, KEYWRAP_BLOCK_SIZE);
  9501. return (int)(inSz + KEYWRAP_BLOCK_SIZE);
  9502. }
  9503. /* perform AES key wrap (RFC3394), return out sz on success, negative on err */
  9504. int wc_AesKeyWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  9505. byte* out, word32 outSz, const byte* iv)
  9506. {
  9507. #ifdef WOLFSSL_SMALL_STACK
  9508. Aes *aes = NULL;
  9509. #else
  9510. Aes aes[1];
  9511. #endif
  9512. int ret;
  9513. if (key == NULL)
  9514. return BAD_FUNC_ARG;
  9515. #ifdef WOLFSSL_SMALL_STACK
  9516. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  9517. DYNAMIC_TYPE_AES)) == NULL)
  9518. return MEMORY_E;
  9519. #endif
  9520. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  9521. if (ret != 0)
  9522. goto out;
  9523. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  9524. if (ret != 0) {
  9525. wc_AesFree(aes);
  9526. goto out;
  9527. }
  9528. ret = wc_AesKeyWrap_ex(aes, in, inSz, out, outSz, iv);
  9529. wc_AesFree(aes);
  9530. out:
  9531. #ifdef WOLFSSL_SMALL_STACK
  9532. if (aes != NULL)
  9533. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  9534. #endif
  9535. return ret;
  9536. }
  9537. int wc_AesKeyUnWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  9538. word32 outSz, const byte* iv)
  9539. {
  9540. byte* r;
  9541. word32 i, n;
  9542. int j;
  9543. int ret = 0;
  9544. byte t[KEYWRAP_BLOCK_SIZE];
  9545. byte tmp[AES_BLOCK_SIZE];
  9546. const byte* expIv;
  9547. const byte defaultIV[] = {
  9548. 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6
  9549. };
  9550. if (aes == NULL || in == NULL || inSz < 3 * KEYWRAP_BLOCK_SIZE ||
  9551. out == NULL || outSz < (inSz - KEYWRAP_BLOCK_SIZE))
  9552. return BAD_FUNC_ARG;
  9553. /* input must be multiple of 64-bits */
  9554. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  9555. return BAD_FUNC_ARG;
  9556. /* user IV optional */
  9557. if (iv != NULL)
  9558. expIv = iv;
  9559. else
  9560. expIv = defaultIV;
  9561. /* A = C[0], R[i] = C[i] */
  9562. XMEMCPY(tmp, in, KEYWRAP_BLOCK_SIZE);
  9563. XMEMCPY(out, in + KEYWRAP_BLOCK_SIZE, inSz - KEYWRAP_BLOCK_SIZE);
  9564. XMEMSET(t, 0, sizeof(t));
  9565. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9566. /* initialize counter to 6n */
  9567. n = (inSz - 1) / KEYWRAP_BLOCK_SIZE;
  9568. InitKeyWrapCounter(t, 6 * n);
  9569. for (j = 5; j >= 0; j--) {
  9570. for (i = n; i >= 1; i--) {
  9571. /* calculate A */
  9572. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  9573. DecrementKeyWrapCounter(t);
  9574. /* load R[i], starting at end of R */
  9575. r = out + ((i - 1) * KEYWRAP_BLOCK_SIZE);
  9576. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  9577. ret = wc_AesDecryptDirect(aes, tmp, tmp);
  9578. if (ret != 0)
  9579. break;
  9580. /* save R[i] */
  9581. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  9582. }
  9583. if (ret != 0)
  9584. break;
  9585. }
  9586. RESTORE_VECTOR_REGISTERS();
  9587. if (ret != 0)
  9588. return ret;
  9589. /* verify IV */
  9590. if (XMEMCMP(tmp, expIv, KEYWRAP_BLOCK_SIZE) != 0)
  9591. return BAD_KEYWRAP_IV_E;
  9592. return (int)(inSz - KEYWRAP_BLOCK_SIZE);
  9593. }
  9594. int wc_AesKeyUnWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  9595. byte* out, word32 outSz, const byte* iv)
  9596. {
  9597. #ifdef WOLFSSL_SMALL_STACK
  9598. Aes *aes = NULL;
  9599. #else
  9600. Aes aes[1];
  9601. #endif
  9602. int ret;
  9603. (void)iv;
  9604. if (key == NULL)
  9605. return BAD_FUNC_ARG;
  9606. #ifdef WOLFSSL_SMALL_STACK
  9607. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  9608. DYNAMIC_TYPE_AES)) == NULL)
  9609. return MEMORY_E;
  9610. #endif
  9611. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  9612. if (ret != 0)
  9613. goto out;
  9614. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_DECRYPTION);
  9615. if (ret != 0) {
  9616. wc_AesFree(aes);
  9617. goto out;
  9618. }
  9619. ret = wc_AesKeyUnWrap_ex(aes, in, inSz, out, outSz, iv);
  9620. wc_AesFree(aes);
  9621. out:
  9622. #ifdef WOLFSSL_SMALL_STACK
  9623. if (aes)
  9624. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  9625. #endif
  9626. return ret;
  9627. }
  9628. #endif /* HAVE_AES_KEYWRAP */
  9629. #ifdef WOLFSSL_AES_XTS
  9630. /* Galios Field to use */
  9631. #define GF_XTS 0x87
  9632. /* This is to help with setting keys to correct encrypt or decrypt type.
  9633. *
  9634. * tweak AES key for tweak in XTS
  9635. * aes AES key for encrypt/decrypt process
  9636. * key buffer holding aes key | tweak key
  9637. * len length of key buffer in bytes. Should be twice that of key size. i.e.
  9638. * 32 for a 16 byte key.
  9639. * dir direction, either AES_ENCRYPTION or AES_DECRYPTION
  9640. * heap heap hint to use for memory. Can be NULL
  9641. * devId id to use with async crypto. Can be 0
  9642. *
  9643. * Note: is up to user to call wc_AesFree on tweak and aes key when done.
  9644. *
  9645. * return 0 on success
  9646. */
  9647. int wc_AesXtsSetKey(XtsAes* aes, const byte* key, word32 len, int dir,
  9648. void* heap, int devId)
  9649. {
  9650. word32 keySz;
  9651. int ret = 0;
  9652. if (aes == NULL || key == NULL) {
  9653. return BAD_FUNC_ARG;
  9654. }
  9655. if ((ret = wc_AesInit(&aes->tweak, heap, devId)) != 0) {
  9656. return ret;
  9657. }
  9658. if ((ret = wc_AesInit(&aes->aes, heap, devId)) != 0) {
  9659. return ret;
  9660. }
  9661. keySz = len/2;
  9662. if (keySz != 16 && keySz != 32) {
  9663. WOLFSSL_MSG("Unsupported key size");
  9664. return WC_KEY_SIZE_E;
  9665. }
  9666. if ((ret = wc_AesSetKey(&aes->aes, key, keySz, NULL, dir)) == 0) {
  9667. ret = wc_AesSetKey(&aes->tweak, key + keySz, keySz, NULL,
  9668. AES_ENCRYPTION);
  9669. if (ret != 0) {
  9670. wc_AesFree(&aes->aes);
  9671. }
  9672. }
  9673. return ret;
  9674. }
  9675. /* This is used to free up resources used by Aes structs
  9676. *
  9677. * aes AES keys to free
  9678. *
  9679. * return 0 on success
  9680. */
  9681. int wc_AesXtsFree(XtsAes* aes)
  9682. {
  9683. if (aes != NULL) {
  9684. wc_AesFree(&aes->aes);
  9685. wc_AesFree(&aes->tweak);
  9686. }
  9687. return 0;
  9688. }
  9689. /* Same process as wc_AesXtsEncrypt but uses a word64 type as the tweak value
  9690. * instead of a byte array. This just converts the word64 to a byte array and
  9691. * calls wc_AesXtsEncrypt.
  9692. *
  9693. * aes AES keys to use for block encrypt/decrypt
  9694. * out output buffer to hold cipher text
  9695. * in input plain text buffer to encrypt
  9696. * sz size of both out and in buffers
  9697. * sector value to use for tweak
  9698. *
  9699. * returns 0 on success
  9700. */
  9701. int wc_AesXtsEncryptSector(XtsAes* aes, byte* out, const byte* in,
  9702. word32 sz, word64 sector)
  9703. {
  9704. byte* pt;
  9705. byte i[AES_BLOCK_SIZE];
  9706. XMEMSET(i, 0, AES_BLOCK_SIZE);
  9707. #ifdef BIG_ENDIAN_ORDER
  9708. sector = ByteReverseWord64(sector);
  9709. #endif
  9710. pt = (byte*)&sector;
  9711. XMEMCPY(i, pt, sizeof(word64));
  9712. return wc_AesXtsEncrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  9713. }
  9714. /* Same process as wc_AesXtsDecrypt but uses a word64 type as the tweak value
  9715. * instead of a byte array. This just converts the word64 to a byte array.
  9716. *
  9717. * aes AES keys to use for block encrypt/decrypt
  9718. * out output buffer to hold plain text
  9719. * in input cipher text buffer to encrypt
  9720. * sz size of both out and in buffers
  9721. * sector value to use for tweak
  9722. *
  9723. * returns 0 on success
  9724. */
  9725. int wc_AesXtsDecryptSector(XtsAes* aes, byte* out, const byte* in, word32 sz,
  9726. word64 sector)
  9727. {
  9728. byte* pt;
  9729. byte i[AES_BLOCK_SIZE];
  9730. XMEMSET(i, 0, AES_BLOCK_SIZE);
  9731. #ifdef BIG_ENDIAN_ORDER
  9732. sector = ByteReverseWord64(sector);
  9733. #endif
  9734. pt = (byte*)&sector;
  9735. XMEMCPY(i, pt, sizeof(word64));
  9736. return wc_AesXtsDecrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  9737. }
  9738. #ifdef HAVE_AES_ECB
  9739. /* helper function for encrypting / decrypting full buffer at once */
  9740. static WARN_UNUSED_RESULT int _AesXtsHelper(
  9741. Aes* aes, byte* out, const byte* in, word32 sz, int dir)
  9742. {
  9743. word32 outSz = sz;
  9744. word32 totalSz = (sz / AES_BLOCK_SIZE) * AES_BLOCK_SIZE; /* total bytes */
  9745. byte* pt = out;
  9746. outSz -= AES_BLOCK_SIZE;
  9747. while (outSz > 0) {
  9748. word32 j;
  9749. byte carry = 0;
  9750. /* multiply by shift left and propagate carry */
  9751. for (j = 0; j < AES_BLOCK_SIZE && outSz > 0; j++, outSz--) {
  9752. byte tmpC;
  9753. tmpC = (pt[j] >> 7) & 0x01;
  9754. pt[j+AES_BLOCK_SIZE] = (byte)((pt[j] << 1) + carry);
  9755. carry = tmpC;
  9756. }
  9757. if (carry) {
  9758. pt[AES_BLOCK_SIZE] ^= GF_XTS;
  9759. }
  9760. pt += AES_BLOCK_SIZE;
  9761. }
  9762. xorbuf(out, in, totalSz);
  9763. if (dir == AES_ENCRYPTION) {
  9764. return _AesEcbEncrypt(aes, out, out, totalSz);
  9765. }
  9766. else {
  9767. return _AesEcbDecrypt(aes, out, out, totalSz);
  9768. }
  9769. }
  9770. #endif /* HAVE_AES_ECB */
  9771. /* AES with XTS mode. (XTS) XEX encryption with Tweak and cipher text Stealing.
  9772. *
  9773. * xaes AES keys to use for block encrypt/decrypt
  9774. * out output buffer to hold cipher text
  9775. * in input plain text buffer to encrypt
  9776. * sz size of both out and in buffers
  9777. * i value to use for tweak
  9778. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  9779. * adds a sanity check on how the user calls the function.
  9780. *
  9781. * returns 0 on success
  9782. */
  9783. /* Software AES - XTS Encrypt */
  9784. int wc_AesXtsEncrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  9785. const byte* i, word32 iSz)
  9786. {
  9787. int ret = 0;
  9788. word32 blocks = (sz / AES_BLOCK_SIZE);
  9789. Aes *aes, *tweak;
  9790. if (xaes == NULL || out == NULL || in == NULL) {
  9791. return BAD_FUNC_ARG;
  9792. }
  9793. aes = &xaes->aes;
  9794. tweak = &xaes->tweak;
  9795. if (iSz < AES_BLOCK_SIZE) {
  9796. return BAD_FUNC_ARG;
  9797. }
  9798. if (blocks > 0) {
  9799. byte tmp[AES_BLOCK_SIZE];
  9800. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  9801. * key setup passed to encrypt direct*/
  9802. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9803. ret = wc_AesEncryptDirect(tweak, tmp, i);
  9804. if (ret != 0) {
  9805. RESTORE_VECTOR_REGISTERS();
  9806. return ret;
  9807. }
  9808. #ifdef HAVE_AES_ECB
  9809. /* encrypt all of buffer at once when possible */
  9810. if (in != out) { /* can not handle inline */
  9811. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  9812. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_ENCRYPTION)) != 0) {
  9813. RESTORE_VECTOR_REGISTERS();
  9814. return ret;
  9815. }
  9816. }
  9817. #endif
  9818. while (blocks > 0) {
  9819. word32 j;
  9820. byte carry = 0;
  9821. #ifdef HAVE_AES_ECB
  9822. if (in == out)
  9823. #endif
  9824. { /* check for if inline */
  9825. byte buf[AES_BLOCK_SIZE];
  9826. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  9827. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  9828. ret = wc_AesEncryptDirect(aes, out, buf);
  9829. if (ret != 0) {
  9830. RESTORE_VECTOR_REGISTERS();
  9831. return ret;
  9832. }
  9833. }
  9834. xorbuf(out, tmp, AES_BLOCK_SIZE);
  9835. /* multiply by shift left and propagate carry */
  9836. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  9837. byte tmpC;
  9838. tmpC = (tmp[j] >> 7) & 0x01;
  9839. tmp[j] = (byte)((tmp[j] << 1) + carry);
  9840. carry = tmpC;
  9841. }
  9842. if (carry) {
  9843. tmp[0] ^= GF_XTS;
  9844. }
  9845. in += AES_BLOCK_SIZE;
  9846. out += AES_BLOCK_SIZE;
  9847. sz -= AES_BLOCK_SIZE;
  9848. blocks--;
  9849. }
  9850. /* stealing operation of XTS to handle left overs */
  9851. if (sz > 0) {
  9852. byte buf[AES_BLOCK_SIZE];
  9853. XMEMCPY(buf, out - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  9854. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  9855. RESTORE_VECTOR_REGISTERS();
  9856. return BUFFER_E;
  9857. }
  9858. if (in != out) {
  9859. XMEMCPY(out, buf, sz);
  9860. XMEMCPY(buf, in, sz);
  9861. }
  9862. else {
  9863. byte buf2[AES_BLOCK_SIZE];
  9864. XMEMCPY(buf2, buf, sz);
  9865. XMEMCPY(buf, in, sz);
  9866. XMEMCPY(out, buf2, sz);
  9867. }
  9868. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  9869. ret = wc_AesEncryptDirect(aes, out - AES_BLOCK_SIZE, buf);
  9870. if (ret == 0)
  9871. xorbuf(out - AES_BLOCK_SIZE, tmp, AES_BLOCK_SIZE);
  9872. }
  9873. RESTORE_VECTOR_REGISTERS();
  9874. }
  9875. else {
  9876. WOLFSSL_MSG("Plain text input too small for encryption");
  9877. return BAD_FUNC_ARG;
  9878. }
  9879. return ret;
  9880. }
  9881. /* Same process as encryption but Aes key is AES_DECRYPTION type.
  9882. *
  9883. * xaes AES keys to use for block encrypt/decrypt
  9884. * out output buffer to hold plain text
  9885. * in input cipher text buffer to decrypt
  9886. * sz size of both out and in buffers
  9887. * i value to use for tweak
  9888. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  9889. * adds a sanity check on how the user calls the function.
  9890. *
  9891. * returns 0 on success
  9892. */
  9893. /* Software AES - XTS Decrypt */
  9894. int wc_AesXtsDecrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  9895. const byte* i, word32 iSz)
  9896. {
  9897. int ret = 0;
  9898. word32 blocks = (sz / AES_BLOCK_SIZE);
  9899. Aes *aes, *tweak;
  9900. if (xaes == NULL || out == NULL || in == NULL) {
  9901. return BAD_FUNC_ARG;
  9902. }
  9903. aes = &xaes->aes;
  9904. tweak = &xaes->tweak;
  9905. if (iSz < AES_BLOCK_SIZE) {
  9906. return BAD_FUNC_ARG;
  9907. }
  9908. if (blocks > 0) {
  9909. word32 j;
  9910. byte carry = 0;
  9911. byte tmp[AES_BLOCK_SIZE];
  9912. byte stl = (sz % AES_BLOCK_SIZE);
  9913. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  9914. * key setup passed to decrypt direct*/
  9915. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9916. ret = wc_AesEncryptDirect(tweak, tmp, i);
  9917. if (ret != 0) {
  9918. RESTORE_VECTOR_REGISTERS();
  9919. return ret;
  9920. }
  9921. /* if Stealing then break out of loop one block early to handle special
  9922. * case */
  9923. if (stl > 0) {
  9924. blocks--;
  9925. }
  9926. #ifdef HAVE_AES_ECB
  9927. /* decrypt all of buffer at once when possible */
  9928. if (in != out) { /* can not handle inline */
  9929. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  9930. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_DECRYPTION)) != 0) {
  9931. RESTORE_VECTOR_REGISTERS();
  9932. return ret;
  9933. }
  9934. }
  9935. #endif
  9936. while (blocks > 0) {
  9937. #ifdef HAVE_AES_ECB
  9938. if (in == out)
  9939. #endif
  9940. { /* check for if inline */
  9941. byte buf[AES_BLOCK_SIZE];
  9942. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  9943. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  9944. ret = wc_AesDecryptDirect(aes, out, buf);
  9945. if (ret != 0) {
  9946. RESTORE_VECTOR_REGISTERS();
  9947. return ret;
  9948. }
  9949. }
  9950. xorbuf(out, tmp, AES_BLOCK_SIZE);
  9951. /* multiply by shift left and propagate carry */
  9952. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  9953. byte tmpC;
  9954. tmpC = (tmp[j] >> 7) & 0x01;
  9955. tmp[j] = (byte)((tmp[j] << 1) + carry);
  9956. carry = tmpC;
  9957. }
  9958. if (carry) {
  9959. tmp[0] ^= GF_XTS;
  9960. }
  9961. carry = 0;
  9962. in += AES_BLOCK_SIZE;
  9963. out += AES_BLOCK_SIZE;
  9964. sz -= AES_BLOCK_SIZE;
  9965. blocks--;
  9966. }
  9967. /* stealing operation of XTS to handle left overs */
  9968. if (sz >= AES_BLOCK_SIZE) {
  9969. byte buf[AES_BLOCK_SIZE];
  9970. byte tmp2[AES_BLOCK_SIZE];
  9971. /* multiply by shift left and propagate carry */
  9972. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  9973. byte tmpC;
  9974. tmpC = (tmp[j] >> 7) & 0x01;
  9975. tmp2[j] = (byte)((tmp[j] << 1) + carry);
  9976. carry = tmpC;
  9977. }
  9978. if (carry) {
  9979. tmp2[0] ^= GF_XTS;
  9980. }
  9981. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  9982. xorbuf(buf, tmp2, AES_BLOCK_SIZE);
  9983. ret = wc_AesDecryptDirect(aes, out, buf);
  9984. if (ret != 0) {
  9985. RESTORE_VECTOR_REGISTERS();
  9986. return ret;
  9987. }
  9988. xorbuf(out, tmp2, AES_BLOCK_SIZE);
  9989. /* tmp2 holds partial | last */
  9990. XMEMCPY(tmp2, out, AES_BLOCK_SIZE);
  9991. in += AES_BLOCK_SIZE;
  9992. out += AES_BLOCK_SIZE;
  9993. sz -= AES_BLOCK_SIZE;
  9994. /* Make buffer with end of cipher text | last */
  9995. XMEMCPY(buf, tmp2, AES_BLOCK_SIZE);
  9996. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  9997. RESTORE_VECTOR_REGISTERS();
  9998. return BUFFER_E;
  9999. }
  10000. XMEMCPY(buf, in, sz);
  10001. XMEMCPY(out, tmp2, sz);
  10002. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  10003. ret = wc_AesDecryptDirect(aes, tmp2, buf);
  10004. if (ret != 0) {
  10005. RESTORE_VECTOR_REGISTERS();
  10006. return ret;
  10007. }
  10008. xorbuf(tmp2, tmp, AES_BLOCK_SIZE);
  10009. XMEMCPY(out - AES_BLOCK_SIZE, tmp2, AES_BLOCK_SIZE);
  10010. }
  10011. RESTORE_VECTOR_REGISTERS();
  10012. }
  10013. else {
  10014. WOLFSSL_MSG("Plain text input too small for encryption");
  10015. return BAD_FUNC_ARG;
  10016. }
  10017. return ret;
  10018. }
  10019. #endif /* WOLFSSL_AES_XTS */
  10020. #ifdef WOLFSSL_AES_SIV
  10021. /*
  10022. * See RFC 5297 Section 2.4.
  10023. */
  10024. static WARN_UNUSED_RESULT int S2V(
  10025. const byte* key, word32 keySz, const byte* assoc, word32 assocSz,
  10026. const byte* nonce, word32 nonceSz, const byte* data,
  10027. word32 dataSz, byte* out)
  10028. {
  10029. #ifdef WOLFSSL_SMALL_STACK
  10030. byte* tmp[3] = {NULL, NULL, NULL};
  10031. int i;
  10032. Cmac* cmac;
  10033. #else
  10034. byte tmp[3][AES_BLOCK_SIZE];
  10035. Cmac cmac[1];
  10036. #endif
  10037. word32 macSz = AES_BLOCK_SIZE;
  10038. int ret = 0;
  10039. word32 zeroBytes;
  10040. #ifdef WOLFSSL_SMALL_STACK
  10041. for (i = 0; i < 3; ++i) {
  10042. tmp[i] = (byte*)XMALLOC(AES_BLOCK_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10043. if (tmp[i] == NULL) {
  10044. ret = MEMORY_E;
  10045. break;
  10046. }
  10047. }
  10048. if (ret == 0)
  10049. #endif
  10050. {
  10051. XMEMSET(tmp[1], 0, AES_BLOCK_SIZE);
  10052. XMEMSET(tmp[2], 0, AES_BLOCK_SIZE);
  10053. ret = wc_AesCmacGenerate(tmp[0], &macSz, tmp[1], AES_BLOCK_SIZE,
  10054. key, keySz);
  10055. if (ret == 0) {
  10056. ShiftAndXorRb(tmp[1], tmp[0]);
  10057. ret = wc_AesCmacGenerate(tmp[0], &macSz, assoc, assocSz, key,
  10058. keySz);
  10059. if (ret == 0) {
  10060. xorbuf(tmp[1], tmp[0], AES_BLOCK_SIZE);
  10061. }
  10062. }
  10063. }
  10064. if (ret == 0) {
  10065. if (nonceSz > 0) {
  10066. ShiftAndXorRb(tmp[0], tmp[1]);
  10067. ret = wc_AesCmacGenerate(tmp[1], &macSz, nonce, nonceSz, key,
  10068. keySz);
  10069. if (ret == 0) {
  10070. xorbuf(tmp[0], tmp[1], AES_BLOCK_SIZE);
  10071. }
  10072. }
  10073. else {
  10074. XMEMCPY(tmp[0], tmp[1], AES_BLOCK_SIZE);
  10075. }
  10076. }
  10077. if (ret == 0) {
  10078. if (dataSz >= AES_BLOCK_SIZE) {
  10079. #ifdef WOLFSSL_SMALL_STACK
  10080. cmac = (Cmac*)XMALLOC(sizeof(Cmac), NULL, DYNAMIC_TYPE_CMAC);
  10081. if (cmac == NULL) {
  10082. ret = MEMORY_E;
  10083. }
  10084. if (ret == 0)
  10085. #endif
  10086. {
  10087. #ifdef WOLFSSL_CHECK_MEM_ZERO
  10088. /* Aes part is checked by wc_AesFree. */
  10089. wc_MemZero_Add("wc_AesCmacGenerate cmac",
  10090. ((unsigned char *)cmac) + sizeof(Aes),
  10091. sizeof(Cmac) - sizeof(Aes));
  10092. #endif
  10093. xorbuf(tmp[0], data + (dataSz - AES_BLOCK_SIZE),
  10094. AES_BLOCK_SIZE);
  10095. ret = wc_InitCmac(cmac, key, keySz, WC_CMAC_AES, NULL);
  10096. if (ret == 0) {
  10097. ret = wc_CmacUpdate(cmac, data, dataSz - AES_BLOCK_SIZE);
  10098. }
  10099. if (ret == 0) {
  10100. ret = wc_CmacUpdate(cmac, tmp[0], AES_BLOCK_SIZE);
  10101. }
  10102. if (ret == 0) {
  10103. ret = wc_CmacFinal(cmac, out, &macSz);
  10104. }
  10105. }
  10106. #ifdef WOLFSSL_SMALL_STACK
  10107. if (cmac != NULL) {
  10108. XFREE(cmac, NULL, DYNAMIC_TYPE_CMAC);
  10109. }
  10110. #elif defined(WOLFSSL_CHECK_MEM_ZERO)
  10111. wc_MemZero_Check(cmac, sizeof(Cmac));
  10112. #endif
  10113. }
  10114. else {
  10115. XMEMCPY(tmp[2], data, dataSz);
  10116. tmp[2][dataSz] |= 0x80;
  10117. zeroBytes = AES_BLOCK_SIZE - (dataSz + 1);
  10118. if (zeroBytes != 0) {
  10119. XMEMSET(tmp[2] + dataSz + 1, 0, zeroBytes);
  10120. }
  10121. ShiftAndXorRb(tmp[1], tmp[0]);
  10122. xorbuf(tmp[1], tmp[2], AES_BLOCK_SIZE);
  10123. ret = wc_AesCmacGenerate(out, &macSz, tmp[1], AES_BLOCK_SIZE, key,
  10124. keySz);
  10125. }
  10126. }
  10127. #ifdef WOLFSSL_SMALL_STACK
  10128. for (i = 0; i < 3; ++i) {
  10129. if (tmp[i] != NULL) {
  10130. XFREE(tmp[i], NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10131. }
  10132. }
  10133. #endif
  10134. return ret;
  10135. }
  10136. static WARN_UNUSED_RESULT int AesSivCipher(
  10137. const byte* key, word32 keySz, const byte* assoc,
  10138. word32 assocSz, const byte* nonce, word32 nonceSz,
  10139. const byte* data, word32 dataSz, byte* siv, byte* out,
  10140. int enc)
  10141. {
  10142. int ret = 0;
  10143. #ifdef WOLFSSL_SMALL_STACK
  10144. Aes* aes = NULL;
  10145. #else
  10146. Aes aes[1];
  10147. #endif
  10148. byte sivTmp[AES_BLOCK_SIZE];
  10149. if (key == NULL || siv == NULL || out == NULL) {
  10150. WOLFSSL_MSG("Bad parameter");
  10151. ret = BAD_FUNC_ARG;
  10152. }
  10153. if (ret == 0 && keySz != 32 && keySz != 48 && keySz != 64) {
  10154. WOLFSSL_MSG("Bad key size. Must be 256, 384, or 512 bits.");
  10155. ret = BAD_FUNC_ARG;
  10156. }
  10157. if (ret == 0) {
  10158. if (enc == 1) {
  10159. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, data,
  10160. dataSz, sivTmp);
  10161. if (ret != 0) {
  10162. WOLFSSL_MSG("S2V failed.");
  10163. }
  10164. else {
  10165. XMEMCPY(siv, sivTmp, AES_BLOCK_SIZE);
  10166. }
  10167. }
  10168. else {
  10169. XMEMCPY(sivTmp, siv, AES_BLOCK_SIZE);
  10170. }
  10171. }
  10172. #ifdef WOLFSSL_SMALL_STACK
  10173. if (ret == 0) {
  10174. aes = (Aes*)XMALLOC(sizeof(Aes), NULL, DYNAMIC_TYPE_AES);
  10175. if (aes == NULL) {
  10176. ret = MEMORY_E;
  10177. }
  10178. }
  10179. #endif
  10180. if (ret == 0) {
  10181. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  10182. if (ret != 0) {
  10183. WOLFSSL_MSG("Failed to initialized AES object.");
  10184. }
  10185. }
  10186. if (ret == 0 && dataSz > 0) {
  10187. sivTmp[12] &= 0x7f;
  10188. sivTmp[8] &= 0x7f;
  10189. ret = wc_AesSetKey(aes, key + keySz / 2, keySz / 2, sivTmp,
  10190. AES_ENCRYPTION);
  10191. if (ret != 0) {
  10192. WOLFSSL_MSG("Failed to set key for AES-CTR.");
  10193. }
  10194. else {
  10195. ret = wc_AesCtrEncrypt(aes, out, data, dataSz);
  10196. if (ret != 0) {
  10197. WOLFSSL_MSG("AES-CTR encryption failed.");
  10198. }
  10199. }
  10200. }
  10201. if (ret == 0 && enc == 0) {
  10202. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, out, dataSz,
  10203. sivTmp);
  10204. if (ret != 0) {
  10205. WOLFSSL_MSG("S2V failed.");
  10206. }
  10207. if (XMEMCMP(siv, sivTmp, AES_BLOCK_SIZE) != 0) {
  10208. WOLFSSL_MSG("Computed SIV doesn't match received SIV.");
  10209. ret = AES_SIV_AUTH_E;
  10210. }
  10211. }
  10212. wc_AesFree(aes);
  10213. #ifdef WOLFSSL_SMALL_STACK
  10214. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  10215. #endif
  10216. return ret;
  10217. }
  10218. /*
  10219. * See RFC 5297 Section 2.6.
  10220. */
  10221. int wc_AesSivEncrypt(const byte* key, word32 keySz, const byte* assoc,
  10222. word32 assocSz, const byte* nonce, word32 nonceSz,
  10223. const byte* in, word32 inSz, byte* siv, byte* out)
  10224. {
  10225. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  10226. siv, out, 1);
  10227. }
  10228. /*
  10229. * See RFC 5297 Section 2.7.
  10230. */
  10231. int wc_AesSivDecrypt(const byte* key, word32 keySz, const byte* assoc,
  10232. word32 assocSz, const byte* nonce, word32 nonceSz,
  10233. const byte* in, word32 inSz, byte* siv, byte* out)
  10234. {
  10235. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  10236. siv, out, 0);
  10237. }
  10238. #endif /* WOLFSSL_AES_SIV */
  10239. #endif /* HAVE_FIPS */
  10240. #endif /* !NO_AES */