1
0

lkcapi_glue.c 98 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153
  1. /* lkcapi_glue.c -- glue logic to register wolfCrypt implementations with
  2. * the Linux Kernel Cryptosystem
  3. *
  4. * Copyright (C) 2006-2024 wolfSSL Inc.
  5. *
  6. * This file is part of wolfSSL.
  7. *
  8. * wolfSSL is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License as published by
  10. * the Free Software Foundation; either version 2 of the License, or
  11. * (at your option) any later version.
  12. *
  13. * wolfSSL is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  21. */
  22. /* included by linuxkm/module_hooks.c */
  23. #ifndef LINUXKM_LKCAPI_REGISTER
  24. #error lkcapi_glue.c included in non-LINUXKM_LKCAPI_REGISTER project.
  25. #endif
  26. #ifndef WOLFSSL_LINUXKM_LKCAPI_PRIORITY
  27. /* Larger number means higher priority. The highest in-tree priority is 4001,
  28. * in the Cavium driver.
  29. */
  30. #define WOLFSSL_LINUXKM_LKCAPI_PRIORITY 10000
  31. #endif
  32. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  33. static int disable_setkey_warnings = 0;
  34. #else
  35. #define disable_setkey_warnings 0
  36. #endif
  37. #ifndef NO_AES
  38. /* note the FIPS code will be returned on failure even in non-FIPS builds. */
  39. #define LINUXKM_LKCAPI_AES_KAT_MISMATCH_E AES_KAT_FIPS_E
  40. #define LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E AESGCM_KAT_FIPS_E
  41. #define WOLFKM_AESCBC_NAME "cbc(aes)"
  42. #define WOLFKM_AESCFB_NAME "cfb(aes)"
  43. #define WOLFKM_AESGCM_NAME "gcm(aes)"
  44. #define WOLFKM_AESXTS_NAME "xts(aes)"
  45. #ifdef WOLFSSL_AESNI
  46. #define WOLFKM_DRIVER_ISA_EXT "-aesni"
  47. #else
  48. #define WOLFKM_DRIVER_ISA_EXT ""
  49. #endif
  50. #ifdef HAVE_FIPS
  51. #ifndef HAVE_FIPS_VERSION
  52. #define WOLFKM_DRIVER_FIPS "-fips-140"
  53. #elif HAVE_FIPS_VERSION >= 5
  54. #define WOLFKM_DRIVER_FIPS "-fips-140-3"
  55. #elif HAVE_FIPS_VERSION == 2
  56. #define WOLFKM_DRIVER_FIPS "-fips-140-2"
  57. #else
  58. #define WOLFKM_DRIVER_FIPS "-fips-140"
  59. #endif
  60. #else
  61. #define WOLFKM_DRIVER_FIPS ""
  62. #endif
  63. #define WOLFKM_DRIVER_SUFFIX \
  64. WOLFKM_DRIVER_ISA_EXT WOLFKM_DRIVER_FIPS "-wolfcrypt"
  65. #define WOLFKM_AESCBC_DRIVER ("cbc-aes" WOLFKM_DRIVER_SUFFIX)
  66. #define WOLFKM_AESCFB_DRIVER ("cfb-aes" WOLFKM_DRIVER_SUFFIX)
  67. #define WOLFKM_AESGCM_DRIVER ("gcm-aes" WOLFKM_DRIVER_SUFFIX)
  68. #define WOLFKM_AESXTS_DRIVER ("xts-aes" WOLFKM_DRIVER_SUFFIX)
  69. #if defined(HAVE_AES_CBC) && \
  70. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  71. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  72. #ifndef WOLFSSL_EXPERIMENTAL_SETTINGS
  73. #error Experimental settings without WOLFSSL_EXPERIMENTAL_SETTINGS
  74. #endif
  75. static int linuxkm_test_aescbc(void);
  76. #endif
  77. #if defined(WOLFSSL_AES_CFB) && \
  78. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  79. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  80. #ifndef WOLFSSL_EXPERIMENTAL_SETTINGS
  81. #error Experimental settings without WOLFSSL_EXPERIMENTAL_SETTINGS
  82. #endif
  83. static int linuxkm_test_aescfb(void);
  84. #endif
  85. #if defined(HAVE_AESGCM) && \
  86. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  87. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  88. #ifndef WOLFSSL_EXPERIMENTAL_SETTINGS
  89. #error Experimental settings without WOLFSSL_EXPERIMENTAL_SETTINGS
  90. #endif
  91. static int linuxkm_test_aesgcm(void);
  92. #endif
  93. #if defined(WOLFSSL_AES_XTS) && \
  94. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  95. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  96. static int linuxkm_test_aesxts(void);
  97. #endif
  98. /* km_AesX(): wrappers to wolfcrypt wc_AesX functions and
  99. * structures. */
  100. #include <wolfssl/wolfcrypt/aes.h>
  101. struct km_AesCtx {
  102. Aes *aes_encrypt; /* allocated in km_AesInitCommon() to assure
  103. * alignment, needed for AESNI.
  104. */
  105. Aes *aes_decrypt; /* same. */
  106. };
  107. #if defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  108. defined(LINUXKM_LKCAPI_REGISTER_AESCBC) || \
  109. defined(LINUXKM_LKCAPI_REGISTER_AESCFB) || \
  110. defined(LINUXKM_LKCAPI_REGISTER_AESGCM)
  111. static void km_AesExitCommon(struct km_AesCtx * ctx);
  112. static int km_AesInitCommon(
  113. struct km_AesCtx * ctx,
  114. const char * name,
  115. int need_decryption)
  116. {
  117. int err;
  118. ctx->aes_encrypt = (Aes *)malloc(sizeof(*ctx->aes_encrypt));
  119. if (! ctx->aes_encrypt) {
  120. pr_err("%s: allocation of %zu bytes for encryption key failed.\n",
  121. name, sizeof(*ctx->aes_encrypt));
  122. return MEMORY_E;
  123. }
  124. err = wc_AesInit(ctx->aes_encrypt, NULL, INVALID_DEVID);
  125. if (unlikely(err)) {
  126. pr_err("%s: wc_AesInit failed: %d\n", name, err);
  127. free(ctx->aes_encrypt);
  128. ctx->aes_encrypt = NULL;
  129. return -EINVAL;
  130. }
  131. if (! need_decryption) {
  132. ctx->aes_decrypt = NULL;
  133. return 0;
  134. }
  135. ctx->aes_decrypt = (Aes *)malloc(sizeof(*ctx->aes_decrypt));
  136. if (! ctx->aes_decrypt) {
  137. pr_err("%s: allocation of %zu bytes for decryption key failed.\n",
  138. name, sizeof(*ctx->aes_decrypt));
  139. km_AesExitCommon(ctx);
  140. return MEMORY_E;
  141. }
  142. err = wc_AesInit(ctx->aes_decrypt, NULL, INVALID_DEVID);
  143. if (unlikely(err)) {
  144. pr_err("%s: wc_AesInit failed: %d\n", name, err);
  145. free(ctx->aes_decrypt);
  146. ctx->aes_decrypt = NULL;
  147. km_AesExitCommon(ctx);
  148. return -EINVAL;
  149. }
  150. return 0;
  151. }
  152. static void km_AesExitCommon(struct km_AesCtx * ctx)
  153. {
  154. if (ctx->aes_encrypt) {
  155. wc_AesFree(ctx->aes_encrypt);
  156. free(ctx->aes_encrypt);
  157. ctx->aes_encrypt = NULL;
  158. }
  159. if (ctx->aes_decrypt) {
  160. wc_AesFree(ctx->aes_decrypt);
  161. free(ctx->aes_decrypt);
  162. ctx->aes_decrypt = NULL;
  163. }
  164. }
  165. #if defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  166. defined(LINUXKM_LKCAPI_REGISTER_AESCBC) || \
  167. defined(LINUXKM_LKCAPI_REGISTER_AESCFB)
  168. static int km_AesSetKeyCommon(struct km_AesCtx * ctx, const u8 *in_key,
  169. unsigned int key_len, const char * name)
  170. {
  171. int err;
  172. err = wc_AesSetKey(ctx->aes_encrypt, in_key, key_len, NULL, AES_ENCRYPTION);
  173. if (unlikely(err)) {
  174. if (! disable_setkey_warnings)
  175. pr_err("%s: wc_AesSetKey for encryption key failed: %d\n", name, err);
  176. return -ENOKEY;
  177. }
  178. if (ctx->aes_decrypt) {
  179. err = wc_AesSetKey(ctx->aes_decrypt, in_key, key_len, NULL,
  180. AES_DECRYPTION);
  181. if (unlikely(err)) {
  182. if (! disable_setkey_warnings)
  183. pr_err("%s: wc_AesSetKey for decryption key failed: %d\n",
  184. name, err);
  185. return -ENOKEY;
  186. }
  187. }
  188. return 0;
  189. }
  190. static void km_AesExit(struct crypto_skcipher *tfm)
  191. {
  192. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  193. km_AesExitCommon(ctx);
  194. }
  195. #endif /* LINUXKM_LKCAPI_REGISTER_ALL ||
  196. * LINUXKM_LKCAPI_REGISTER_AESCBC ||
  197. * LINUXKM_LKCAPI_REGISTER_AESCFB
  198. */
  199. #endif /* LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC ||
  200. * LINUXKM_LKCAPI_REGISTER_AESCFB || LINUXKM_LKCAPI_REGISTER_AESGCM
  201. */
  202. #if defined(HAVE_AES_CBC) && \
  203. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  204. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  205. static int km_AesCbcInit(struct crypto_skcipher *tfm)
  206. {
  207. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  208. return km_AesInitCommon(ctx, WOLFKM_AESCBC_DRIVER, 1);
  209. }
  210. static int km_AesCbcSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  211. unsigned int key_len)
  212. {
  213. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  214. return km_AesSetKeyCommon(ctx, in_key, key_len, WOLFKM_AESCBC_DRIVER);
  215. }
  216. static int km_AesCbcEncrypt(struct skcipher_request *req)
  217. {
  218. struct crypto_skcipher * tfm = NULL;
  219. struct km_AesCtx * ctx = NULL;
  220. struct skcipher_walk walk;
  221. unsigned int nbytes = 0;
  222. int err = 0;
  223. tfm = crypto_skcipher_reqtfm(req);
  224. ctx = crypto_skcipher_ctx(tfm);
  225. err = skcipher_walk_virt(&walk, req, false);
  226. if (unlikely(err)) {
  227. pr_err("%s: skcipher_walk_virt failed: %d\n",
  228. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  229. return err;
  230. }
  231. while ((nbytes = walk.nbytes) != 0) {
  232. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  233. if (unlikely(err)) {
  234. pr_err("%s: wc_AesSetIV failed: %d\n",
  235. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  236. return -EINVAL;
  237. }
  238. err = wc_AesCbcEncrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  239. walk.src.virt.addr, nbytes);
  240. if (unlikely(err)) {
  241. pr_err("%s: wc_AesCbcEncrypt failed: %d\n",
  242. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  243. return -EINVAL;
  244. }
  245. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  246. if (unlikely(err)) {
  247. pr_err("%s: skcipher_walk_done failed: %d\n",
  248. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  249. return err;
  250. }
  251. }
  252. return err;
  253. }
  254. static int km_AesCbcDecrypt(struct skcipher_request *req)
  255. {
  256. struct crypto_skcipher * tfm = NULL;
  257. struct km_AesCtx * ctx = NULL;
  258. struct skcipher_walk walk;
  259. unsigned int nbytes = 0;
  260. int err = 0;
  261. tfm = crypto_skcipher_reqtfm(req);
  262. ctx = crypto_skcipher_ctx(tfm);
  263. err = skcipher_walk_virt(&walk, req, false);
  264. if (unlikely(err)) {
  265. pr_err("%s: skcipher_walk_virt failed: %d\n",
  266. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  267. return err;
  268. }
  269. while ((nbytes = walk.nbytes) != 0) {
  270. err = wc_AesSetIV(ctx->aes_decrypt, walk.iv);
  271. if (unlikely(err)) {
  272. if (! disable_setkey_warnings)
  273. pr_err("%s: wc_AesSetKey failed: %d\n",
  274. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  275. return -EINVAL;
  276. }
  277. err = wc_AesCbcDecrypt(ctx->aes_decrypt, walk.dst.virt.addr,
  278. walk.src.virt.addr, nbytes);
  279. if (unlikely(err)) {
  280. pr_err("%s: wc_AesCbcDecrypt failed: %d\n",
  281. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  282. return -EINVAL;
  283. }
  284. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  285. if (unlikely(err)) {
  286. pr_err("%s: skcipher_walk_done failed: %d\n",
  287. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  288. return err;
  289. }
  290. }
  291. return err;
  292. }
  293. static struct skcipher_alg cbcAesAlg = {
  294. .base.cra_name = WOLFKM_AESCBC_NAME,
  295. .base.cra_driver_name = WOLFKM_AESCBC_DRIVER,
  296. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  297. .base.cra_blocksize = AES_BLOCK_SIZE,
  298. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  299. .base.cra_module = THIS_MODULE,
  300. .init = km_AesCbcInit,
  301. .exit = km_AesExit,
  302. .min_keysize = AES_128_KEY_SIZE,
  303. .max_keysize = AES_256_KEY_SIZE,
  304. .ivsize = AES_BLOCK_SIZE,
  305. .setkey = km_AesCbcSetKey,
  306. .encrypt = km_AesCbcEncrypt,
  307. .decrypt = km_AesCbcDecrypt,
  308. };
  309. static int cbcAesAlg_loaded = 0;
  310. #endif /* HAVE_AES_CBC &&
  311. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  312. */
  313. #if defined(WOLFSSL_AES_CFB) && \
  314. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  315. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  316. static int km_AesCfbInit(struct crypto_skcipher *tfm)
  317. {
  318. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  319. return km_AesInitCommon(ctx, WOLFKM_AESCFB_DRIVER, 0);
  320. }
  321. static int km_AesCfbSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  322. unsigned int key_len)
  323. {
  324. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  325. return km_AesSetKeyCommon(ctx, in_key, key_len, WOLFKM_AESCFB_DRIVER);
  326. }
  327. static int km_AesCfbEncrypt(struct skcipher_request *req)
  328. {
  329. struct crypto_skcipher * tfm = NULL;
  330. struct km_AesCtx * ctx = NULL;
  331. struct skcipher_walk walk;
  332. unsigned int nbytes = 0;
  333. int err = 0;
  334. tfm = crypto_skcipher_reqtfm(req);
  335. ctx = crypto_skcipher_ctx(tfm);
  336. err = skcipher_walk_virt(&walk, req, false);
  337. if (unlikely(err)) {
  338. pr_err("%s: skcipher_walk_virt failed: %d\n",
  339. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  340. return err;
  341. }
  342. while ((nbytes = walk.nbytes) != 0) {
  343. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  344. if (unlikely(err)) {
  345. if (! disable_setkey_warnings)
  346. pr_err("%s: wc_AesSetKey failed: %d\n",
  347. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  348. return -EINVAL;
  349. }
  350. err = wc_AesCfbEncrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  351. walk.src.virt.addr, nbytes);
  352. if (unlikely(err)) {
  353. pr_err("%s: wc_AesCfbEncrypt failed %d\n",
  354. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  355. return -EINVAL;
  356. }
  357. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  358. if (unlikely(err)) {
  359. pr_err("%s: skcipher_walk_done failed: %d\n",
  360. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  361. return err;
  362. }
  363. }
  364. return err;
  365. }
  366. static int km_AesCfbDecrypt(struct skcipher_request *req)
  367. {
  368. struct crypto_skcipher * tfm = NULL;
  369. struct km_AesCtx * ctx = NULL;
  370. struct skcipher_walk walk;
  371. unsigned int nbytes = 0;
  372. int err = 0;
  373. tfm = crypto_skcipher_reqtfm(req);
  374. ctx = crypto_skcipher_ctx(tfm);
  375. err = skcipher_walk_virt(&walk, req, false);
  376. if (unlikely(err)) {
  377. pr_err("%s: skcipher_walk_virt failed: %d\n",
  378. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  379. return err;
  380. }
  381. while ((nbytes = walk.nbytes) != 0) {
  382. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  383. if (unlikely(err)) {
  384. if (! disable_setkey_warnings)
  385. pr_err("%s: wc_AesSetKey failed: %d\n",
  386. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  387. return -EINVAL;
  388. }
  389. err = wc_AesCfbDecrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  390. walk.src.virt.addr, nbytes);
  391. if (unlikely(err)) {
  392. pr_err("%s: wc_AesCfbDecrypt failed: %d\n",
  393. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  394. return -EINVAL;
  395. }
  396. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  397. if (unlikely(err)) {
  398. pr_err("%s: skcipher_walk_done failed: %d\n",
  399. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  400. return err;
  401. }
  402. }
  403. return err;
  404. }
  405. static struct skcipher_alg cfbAesAlg = {
  406. .base.cra_name = WOLFKM_AESCFB_NAME,
  407. .base.cra_driver_name = WOLFKM_AESCFB_DRIVER,
  408. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  409. .base.cra_blocksize = AES_BLOCK_SIZE,
  410. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  411. .base.cra_module = THIS_MODULE,
  412. .init = km_AesCfbInit,
  413. .exit = km_AesExit,
  414. .min_keysize = AES_128_KEY_SIZE,
  415. .max_keysize = AES_256_KEY_SIZE,
  416. .ivsize = AES_BLOCK_SIZE,
  417. .setkey = km_AesCfbSetKey,
  418. .encrypt = km_AesCfbEncrypt,
  419. .decrypt = km_AesCfbDecrypt,
  420. };
  421. static int cfbAesAlg_loaded = 0;
  422. #endif /* WOLFSSL_AES_CFB &&
  423. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  424. */
  425. #if defined(HAVE_AESGCM) && \
  426. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  427. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  428. #ifndef WOLFSSL_AESGCM_STREAM
  429. #error LKCAPI registration of AES-GCM requires WOLFSSL_AESGCM_STREAM (--enable-aesgcm-stream).
  430. #endif
  431. static int km_AesGcmInit(struct crypto_aead * tfm)
  432. {
  433. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  434. return km_AesInitCommon(ctx, WOLFKM_AESGCM_DRIVER, 0);
  435. }
  436. static void km_AesGcmExit(struct crypto_aead * tfm)
  437. {
  438. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  439. km_AesExitCommon(ctx);
  440. }
  441. static int km_AesGcmSetKey(struct crypto_aead *tfm, const u8 *in_key,
  442. unsigned int key_len)
  443. {
  444. int err;
  445. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  446. err = wc_AesGcmSetKey(ctx->aes_encrypt, in_key, key_len);
  447. if (unlikely(err)) {
  448. if (! disable_setkey_warnings)
  449. pr_err("%s: wc_AesGcmSetKey failed: %d\n",
  450. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  451. return -ENOKEY;
  452. }
  453. return 0;
  454. }
  455. static int km_AesGcmSetAuthsize(struct crypto_aead *tfm, unsigned int authsize)
  456. {
  457. (void)tfm;
  458. if (authsize > AES_BLOCK_SIZE ||
  459. authsize < WOLFSSL_MIN_AUTH_TAG_SZ) {
  460. pr_err("%s: invalid authsize: %d\n",
  461. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), authsize);
  462. return -EINVAL;
  463. }
  464. return 0;
  465. }
  466. /*
  467. * aead ciphers recieve data in scatterlists in following order:
  468. * encrypt
  469. * req->src: aad||plaintext
  470. * req->dst: aad||ciphertext||tag
  471. * decrypt
  472. * req->src: aad||ciphertext||tag
  473. * req->dst: aad||plaintext, return 0 or -EBADMSG
  474. */
  475. static int km_AesGcmEncrypt(struct aead_request *req)
  476. {
  477. struct crypto_aead * tfm = NULL;
  478. struct km_AesCtx * ctx = NULL;
  479. struct skcipher_walk walk;
  480. struct scatter_walk assocSgWalk;
  481. unsigned int nbytes = 0;
  482. u8 authTag[AES_BLOCK_SIZE];
  483. int err = 0;
  484. unsigned int assocLeft = 0;
  485. unsigned int cryptLeft = 0;
  486. u8 * assoc = NULL;
  487. tfm = crypto_aead_reqtfm(req);
  488. ctx = crypto_aead_ctx(tfm);
  489. assocLeft = req->assoclen;
  490. cryptLeft = req->cryptlen;
  491. scatterwalk_start(&assocSgWalk, req->src);
  492. err = skcipher_walk_aead_encrypt(&walk, req, false);
  493. if (unlikely(err)) {
  494. pr_err("%s: skcipher_walk_aead_encrypt failed: %d\n",
  495. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  496. return -1;
  497. }
  498. err = wc_AesGcmInit(ctx->aes_encrypt, NULL /*key*/, 0 /*keylen*/, walk.iv,
  499. AES_BLOCK_SIZE);
  500. if (unlikely(err)) {
  501. pr_err("%s: wc_AesGcmInit failed: %d\n",
  502. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  503. return -EINVAL;
  504. }
  505. assoc = scatterwalk_map(&assocSgWalk);
  506. if (unlikely(IS_ERR(assoc))) {
  507. pr_err("%s: scatterwalk_map failed: %ld\n",
  508. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)),
  509. PTR_ERR(assoc));
  510. return err;
  511. }
  512. err = wc_AesGcmEncryptUpdate(ctx->aes_encrypt, NULL, NULL, 0,
  513. assoc, assocLeft);
  514. assocLeft -= assocLeft;
  515. scatterwalk_unmap(assoc);
  516. assoc = NULL;
  517. if (unlikely(err)) {
  518. pr_err("%s: wc_AesGcmEncryptUpdate failed: %d\n",
  519. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  520. return -EINVAL;
  521. }
  522. while ((nbytes = walk.nbytes) != 0) {
  523. int n = nbytes;
  524. if (likely(cryptLeft && nbytes)) {
  525. n = cryptLeft < nbytes ? cryptLeft : nbytes;
  526. err = wc_AesGcmEncryptUpdate(
  527. ctx->aes_encrypt,
  528. walk.dst.virt.addr,
  529. walk.src.virt.addr,
  530. cryptLeft,
  531. NULL, 0);
  532. nbytes -= n;
  533. cryptLeft -= n;
  534. }
  535. if (unlikely(err)) {
  536. pr_err("%s: wc_AesGcmEncryptUpdate failed: %d\n",
  537. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  538. return -EINVAL;
  539. }
  540. err = skcipher_walk_done(&walk, nbytes);
  541. if (unlikely(err)) {
  542. pr_err("%s: skcipher_walk_done failed: %d\n",
  543. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  544. return err;
  545. }
  546. }
  547. err = wc_AesGcmEncryptFinal(ctx->aes_encrypt, authTag, tfm->authsize);
  548. if (unlikely(err)) {
  549. pr_err("%s: wc_AesGcmEncryptFinal failed with return code %d\n",
  550. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  551. return -EINVAL;
  552. }
  553. /* Now copy the auth tag into request scatterlist. */
  554. scatterwalk_map_and_copy(authTag, req->dst,
  555. req->assoclen + req->cryptlen,
  556. tfm->authsize, 1);
  557. return err;
  558. }
  559. static int km_AesGcmDecrypt(struct aead_request *req)
  560. {
  561. struct crypto_aead * tfm = NULL;
  562. struct km_AesCtx * ctx = NULL;
  563. struct skcipher_walk walk;
  564. struct scatter_walk assocSgWalk;
  565. unsigned int nbytes = 0;
  566. u8 origAuthTag[AES_BLOCK_SIZE];
  567. int err = 0;
  568. unsigned int assocLeft = 0;
  569. unsigned int cryptLeft = 0;
  570. u8 * assoc = NULL;
  571. tfm = crypto_aead_reqtfm(req);
  572. ctx = crypto_aead_ctx(tfm);
  573. assocLeft = req->assoclen;
  574. cryptLeft = req->cryptlen - tfm->authsize;
  575. /* Copy out original auth tag from req->src. */
  576. scatterwalk_map_and_copy(origAuthTag, req->src,
  577. req->assoclen + req->cryptlen - tfm->authsize,
  578. tfm->authsize, 0);
  579. scatterwalk_start(&assocSgWalk, req->src);
  580. err = skcipher_walk_aead_decrypt(&walk, req, false);
  581. if (unlikely(err)) {
  582. pr_err("%s: skcipher_walk_aead_decrypt failed: %d\n",
  583. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  584. return err;
  585. }
  586. err = wc_AesGcmInit(ctx->aes_encrypt, NULL /*key*/, 0 /*keylen*/, walk.iv,
  587. AES_BLOCK_SIZE);
  588. if (unlikely(err)) {
  589. pr_err("%s: wc_AesGcmInit failed: %d\n",
  590. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  591. return -EINVAL;
  592. }
  593. assoc = scatterwalk_map(&assocSgWalk);
  594. if (unlikely(IS_ERR(assoc))) {
  595. pr_err("%s: scatterwalk_map failed: %ld\n",
  596. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)),
  597. PTR_ERR(assoc));
  598. return err;
  599. }
  600. err = wc_AesGcmDecryptUpdate(ctx->aes_encrypt, NULL, NULL, 0,
  601. assoc, assocLeft);
  602. assocLeft -= assocLeft;
  603. scatterwalk_unmap(assoc);
  604. assoc = NULL;
  605. if (unlikely(err)) {
  606. pr_err("%s: wc_AesGcmDecryptUpdate failed: %d\n",
  607. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  608. return -EINVAL;
  609. }
  610. while ((nbytes = walk.nbytes) != 0) {
  611. int n = nbytes;
  612. if (likely(cryptLeft && nbytes)) {
  613. n = cryptLeft < nbytes ? cryptLeft : nbytes;
  614. err = wc_AesGcmDecryptUpdate(
  615. ctx->aes_encrypt,
  616. walk.dst.virt.addr,
  617. walk.src.virt.addr,
  618. cryptLeft,
  619. NULL, 0);
  620. nbytes -= n;
  621. cryptLeft -= n;
  622. }
  623. if (unlikely(err)) {
  624. pr_err("%s: wc_AesGcmDecryptUpdate failed: %d\n",
  625. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  626. return -EINVAL;
  627. }
  628. err = skcipher_walk_done(&walk, nbytes);
  629. if (unlikely(err)) {
  630. pr_err("%s: skcipher_walk_done failed: %d\n",
  631. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  632. return err;
  633. }
  634. }
  635. err = wc_AesGcmDecryptFinal(ctx->aes_encrypt, origAuthTag, tfm->authsize);
  636. if (unlikely(err)) {
  637. pr_err("%s: wc_AesGcmDecryptFinal failed with return code %d\n",
  638. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  639. if (err == AES_GCM_AUTH_E) {
  640. return -EBADMSG;
  641. }
  642. else {
  643. return -EINVAL;
  644. }
  645. }
  646. return err;
  647. }
  648. static struct aead_alg gcmAesAead = {
  649. .base.cra_name = WOLFKM_AESGCM_NAME,
  650. .base.cra_driver_name = WOLFKM_AESGCM_DRIVER,
  651. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  652. .base.cra_blocksize = 1,
  653. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  654. .base.cra_module = THIS_MODULE,
  655. .init = km_AesGcmInit,
  656. .exit = km_AesGcmExit,
  657. .setkey = km_AesGcmSetKey,
  658. .setauthsize = km_AesGcmSetAuthsize,
  659. .encrypt = km_AesGcmEncrypt,
  660. .decrypt = km_AesGcmDecrypt,
  661. .ivsize = AES_BLOCK_SIZE,
  662. .maxauthsize = AES_BLOCK_SIZE,
  663. .chunksize = AES_BLOCK_SIZE,
  664. };
  665. static int gcmAesAead_loaded = 0;
  666. #endif /* HAVE_AESGCM &&
  667. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESGCM) &&
  668. */
  669. #if defined(WOLFSSL_AES_XTS) && \
  670. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  671. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  672. #ifndef WOLFSSL_AESXTS_STREAM
  673. #error LKCAPI registration of AES-XTS requires WOLFSSL_AESXTS_STREAM (--enable-aesxts-stream).
  674. #endif
  675. struct km_AesXtsCtx {
  676. XtsAes *aesXts; /* allocated in km_AesXtsInitCommon() to assure alignment
  677. * for AESNI.
  678. */
  679. };
  680. static int km_AesXtsInitCommon(struct km_AesXtsCtx * ctx, const char * name)
  681. {
  682. int err;
  683. ctx->aesXts = (XtsAes *)malloc(sizeof(*ctx->aesXts));
  684. if (! ctx->aesXts)
  685. return -MEMORY_E;
  686. err = wc_AesXtsInit(ctx->aesXts, NULL, INVALID_DEVID);
  687. if (unlikely(err)) {
  688. pr_err("%s: km_AesXtsInitCommon failed: %d\n", name, err);
  689. return -EINVAL;
  690. }
  691. return 0;
  692. }
  693. static int km_AesXtsInit(struct crypto_skcipher *tfm)
  694. {
  695. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  696. return km_AesXtsInitCommon(ctx, WOLFKM_AESXTS_DRIVER);
  697. }
  698. static void km_AesXtsExit(struct crypto_skcipher *tfm)
  699. {
  700. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  701. wc_AesXtsFree(ctx->aesXts);
  702. free(ctx->aesXts);
  703. ctx->aesXts = NULL;
  704. }
  705. static int km_AesXtsSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  706. unsigned int key_len)
  707. {
  708. int err;
  709. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  710. err = wc_AesXtsSetKeyNoInit(ctx->aesXts, in_key, key_len,
  711. AES_ENCRYPTION_AND_DECRYPTION);
  712. if (unlikely(err)) {
  713. if (! disable_setkey_warnings)
  714. pr_err("%s: wc_AesXtsSetKeyNoInit failed: %d\n",
  715. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  716. return -EINVAL;
  717. }
  718. return 0;
  719. }
  720. /* see /usr/src/linux/drivers/md/dm-crypt.c */
  721. static int km_AesXtsEncrypt(struct skcipher_request *req)
  722. {
  723. int err = 0;
  724. struct crypto_skcipher * tfm = NULL;
  725. struct km_AesXtsCtx * ctx = NULL;
  726. struct skcipher_walk walk;
  727. unsigned int nbytes = 0;
  728. tfm = crypto_skcipher_reqtfm(req);
  729. ctx = crypto_skcipher_ctx(tfm);
  730. if (req->cryptlen < AES_BLOCK_SIZE)
  731. return -EINVAL;
  732. err = skcipher_walk_virt(&walk, req, false);
  733. if (unlikely(err)) {
  734. pr_err("%s: skcipher_walk_virt failed: %d\n",
  735. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  736. return err;
  737. }
  738. if (walk.nbytes == walk.total) {
  739. err = wc_AesXtsEncrypt(ctx->aesXts, walk.dst.virt.addr,
  740. walk.src.virt.addr, walk.nbytes, walk.iv, walk.ivsize);
  741. if (unlikely(err)) {
  742. pr_err("%s: wc_AesXtsEncrypt failed: %d\n",
  743. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  744. return -EINVAL;
  745. }
  746. err = skcipher_walk_done(&walk, 0);
  747. } else {
  748. int tail = req->cryptlen % AES_BLOCK_SIZE;
  749. struct skcipher_request subreq;
  750. struct XtsAesStreamData stream;
  751. if (tail > 0) {
  752. int blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  753. skcipher_walk_abort(&walk);
  754. skcipher_request_set_tfm(&subreq, tfm);
  755. skcipher_request_set_callback(&subreq,
  756. skcipher_request_flags(req),
  757. NULL, NULL);
  758. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  759. blocks * AES_BLOCK_SIZE, req->iv);
  760. req = &subreq;
  761. err = skcipher_walk_virt(&walk, req, false);
  762. if (!walk.nbytes)
  763. return err ? : -EINVAL;
  764. } else {
  765. tail = 0;
  766. }
  767. err = wc_AesXtsEncryptInit(ctx->aesXts, walk.iv, walk.ivsize, &stream);
  768. if (unlikely(err)) {
  769. pr_err("%s: wc_AesXtsEncryptInit failed: %d\n",
  770. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  771. return -EINVAL;
  772. }
  773. while ((nbytes = walk.nbytes) != 0) {
  774. /* if this isn't the final call, pass block-aligned data to prevent
  775. * end-of-message ciphertext stealing.
  776. */
  777. if (nbytes < walk.total)
  778. nbytes &= ~(AES_BLOCK_SIZE - 1);
  779. if (nbytes & ((unsigned int)AES_BLOCK_SIZE - 1U))
  780. err = wc_AesXtsEncryptFinal(ctx->aesXts, walk.dst.virt.addr,
  781. walk.src.virt.addr, nbytes,
  782. &stream);
  783. else
  784. err = wc_AesXtsEncryptUpdate(ctx->aesXts, walk.dst.virt.addr,
  785. walk.src.virt.addr, nbytes,
  786. &stream);
  787. if (unlikely(err)) {
  788. pr_err("%s: wc_AesXtsEncryptUpdate failed: %d\n",
  789. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  790. return -EINVAL;
  791. }
  792. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  793. if (unlikely(err)) {
  794. pr_err("%s: skcipher_walk_done failed: %d\n",
  795. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  796. return err;
  797. }
  798. }
  799. if (unlikely(tail > 0)) {
  800. struct scatterlist sg_src[2], sg_dst[2];
  801. struct scatterlist *src, *dst;
  802. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  803. if (req->dst != req->src)
  804. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  805. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  806. req->iv);
  807. err = skcipher_walk_virt(&walk, &subreq, false);
  808. if (err)
  809. return err;
  810. err = wc_AesXtsEncryptFinal(ctx->aesXts, walk.dst.virt.addr,
  811. walk.src.virt.addr, walk.nbytes,
  812. &stream);
  813. if (unlikely(err)) {
  814. pr_err("%s: wc_AesXtsEncryptFinal failed: %d\n",
  815. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  816. return -EINVAL;
  817. }
  818. err = skcipher_walk_done(&walk, 0);
  819. } else if (! (stream.bytes_crypted_with_this_tweak & ((word32)AES_BLOCK_SIZE - 1U))) {
  820. err = wc_AesXtsEncryptFinal(ctx->aesXts, NULL, NULL, 0, &stream);
  821. }
  822. }
  823. return err;
  824. }
  825. static int km_AesXtsDecrypt(struct skcipher_request *req)
  826. {
  827. int err = 0;
  828. struct crypto_skcipher * tfm = NULL;
  829. struct km_AesXtsCtx * ctx = NULL;
  830. struct skcipher_walk walk;
  831. unsigned int nbytes = 0;
  832. tfm = crypto_skcipher_reqtfm(req);
  833. ctx = crypto_skcipher_ctx(tfm);
  834. if (req->cryptlen < AES_BLOCK_SIZE)
  835. return -EINVAL;
  836. err = skcipher_walk_virt(&walk, req, false);
  837. if (unlikely(err)) {
  838. pr_err("%s: skcipher_walk_virt failed: %d\n",
  839. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  840. return err;
  841. }
  842. if (walk.nbytes == walk.total) {
  843. err = wc_AesXtsDecrypt(ctx->aesXts,
  844. walk.dst.virt.addr, walk.src.virt.addr,
  845. walk.nbytes, walk.iv, walk.ivsize);
  846. if (unlikely(err)) {
  847. pr_err("%s: wc_AesXtsDecrypt failed: %d\n",
  848. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  849. return -EINVAL;
  850. }
  851. err = skcipher_walk_done(&walk, 0);
  852. } else {
  853. int tail = req->cryptlen % AES_BLOCK_SIZE;
  854. struct skcipher_request subreq;
  855. struct XtsAesStreamData stream;
  856. if (unlikely(tail > 0)) {
  857. int blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  858. skcipher_walk_abort(&walk);
  859. skcipher_request_set_tfm(&subreq, tfm);
  860. skcipher_request_set_callback(&subreq,
  861. skcipher_request_flags(req),
  862. NULL, NULL);
  863. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  864. blocks * AES_BLOCK_SIZE, req->iv);
  865. req = &subreq;
  866. err = skcipher_walk_virt(&walk, req, false);
  867. if (!walk.nbytes)
  868. return err ? : -EINVAL;
  869. } else {
  870. tail = 0;
  871. }
  872. err = wc_AesXtsDecryptInit(ctx->aesXts, walk.iv, walk.ivsize, &stream);
  873. if (unlikely(err)) {
  874. pr_err("%s: wc_AesXtsDecryptInit failed: %d\n",
  875. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  876. return -EINVAL;
  877. }
  878. while ((nbytes = walk.nbytes) != 0) {
  879. /* if this isn't the final call, pass block-aligned data to prevent
  880. * end-of-message ciphertext stealing.
  881. */
  882. if (nbytes < walk.total)
  883. nbytes &= ~(AES_BLOCK_SIZE - 1);
  884. if (nbytes & ((unsigned int)AES_BLOCK_SIZE - 1U))
  885. err = wc_AesXtsDecryptFinal(ctx->aesXts, walk.dst.virt.addr,
  886. walk.src.virt.addr, nbytes,
  887. &stream);
  888. else
  889. err = wc_AesXtsDecryptUpdate(ctx->aesXts, walk.dst.virt.addr,
  890. walk.src.virt.addr, nbytes,
  891. &stream);
  892. if (unlikely(err)) {
  893. pr_err("%s: wc_AesXtsDecryptUpdate failed: %d\n",
  894. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  895. return -EINVAL;
  896. }
  897. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  898. if (unlikely(err)) {
  899. pr_err("%s: skcipher_walk_done failed: %d\n",
  900. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  901. return err;
  902. }
  903. }
  904. if (unlikely(tail > 0)) {
  905. struct scatterlist sg_src[2], sg_dst[2];
  906. struct scatterlist *src, *dst;
  907. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  908. if (req->dst != req->src)
  909. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  910. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  911. req->iv);
  912. err = skcipher_walk_virt(&walk, &subreq, false);
  913. if (err)
  914. return err;
  915. err = wc_AesXtsDecryptFinal(ctx->aesXts, walk.dst.virt.addr,
  916. walk.src.virt.addr, walk.nbytes,
  917. &stream);
  918. if (unlikely(err)) {
  919. pr_err("%s: wc_AesXtsDecryptFinal failed: %d\n",
  920. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  921. return -EINVAL;
  922. }
  923. err = skcipher_walk_done(&walk, 0);
  924. } else if (! (stream.bytes_crypted_with_this_tweak & ((word32)AES_BLOCK_SIZE - 1U))) {
  925. err = wc_AesXtsDecryptFinal(ctx->aesXts, NULL, NULL, 0, &stream);
  926. }
  927. }
  928. return err;
  929. }
  930. static struct skcipher_alg xtsAesAlg = {
  931. .base.cra_name = WOLFKM_AESXTS_NAME,
  932. .base.cra_driver_name = WOLFKM_AESXTS_DRIVER,
  933. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  934. .base.cra_blocksize = AES_BLOCK_SIZE,
  935. .base.cra_ctxsize = sizeof(struct km_AesXtsCtx),
  936. .base.cra_module = THIS_MODULE,
  937. .min_keysize = 2 * AES_128_KEY_SIZE,
  938. .max_keysize = 2 * AES_256_KEY_SIZE,
  939. .ivsize = AES_BLOCK_SIZE,
  940. .walksize = 2 * AES_BLOCK_SIZE,
  941. .init = km_AesXtsInit,
  942. .exit = km_AesXtsExit,
  943. .setkey = km_AesXtsSetKey,
  944. .encrypt = km_AesXtsEncrypt,
  945. .decrypt = km_AesXtsDecrypt
  946. };
  947. static int xtsAesAlg_loaded = 0;
  948. #endif /* WOLFSSL_AES_XTS &&
  949. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESXTS)
  950. */
  951. /* cipher tests, cribbed from test.c, with supplementary LKCAPI tests: */
  952. #if defined(HAVE_AES_CBC) && \
  953. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  954. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  955. static int linuxkm_test_aescbc(void)
  956. {
  957. int ret = 0;
  958. struct crypto_skcipher * tfm = NULL;
  959. struct skcipher_request * req = NULL;
  960. struct scatterlist src, dst;
  961. Aes *aes;
  962. int aes_inited = 0;
  963. static const byte key32[] =
  964. {
  965. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  966. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  967. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  968. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  969. };
  970. static const byte p_vector[] =
  971. /* Now is the time for all good men w/o trailing 0 */
  972. {
  973. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  974. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  975. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20,
  976. 0x67,0x6f,0x6f,0x64,0x20,0x6d,0x65,0x6e
  977. };
  978. static const byte iv[] = "1234567890abcdef";
  979. static const byte c_vector[] =
  980. {
  981. 0xd7,0xd6,0x04,0x5b,0x4d,0xc4,0x90,0xdf,
  982. 0x4a,0x82,0xed,0x61,0x26,0x4e,0x23,0xb3,
  983. 0xe4,0xb5,0x85,0x30,0x29,0x4c,0x9d,0xcf,
  984. 0x73,0xc9,0x46,0xd1,0xaa,0xc8,0xcb,0x62
  985. };
  986. byte iv_copy[sizeof(iv)];
  987. byte enc[sizeof(p_vector)];
  988. byte dec[sizeof(p_vector)];
  989. u8 * enc2 = NULL;
  990. u8 * dec2 = NULL;
  991. aes = (Aes *)malloc(sizeof(*aes));
  992. if (aes == NULL)
  993. return -ENOMEM;
  994. XMEMSET(enc, 0, sizeof(enc));
  995. XMEMSET(dec, 0, sizeof(enc));
  996. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  997. if (ret) {
  998. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  999. goto test_cbc_end;
  1000. }
  1001. aes_inited = 1;
  1002. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  1003. if (ret) {
  1004. pr_err("wolfcrypt wc_AesSetKey failed with return code %d\n", ret);
  1005. goto test_cbc_end;
  1006. }
  1007. ret = wc_AesCbcEncrypt(aes, enc, p_vector, sizeof(p_vector));
  1008. if (ret) {
  1009. pr_err("wolfcrypt wc_AesCbcEncrypt failed with return code %d\n", ret);
  1010. goto test_cbc_end;
  1011. }
  1012. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1013. pr_err("wolfcrypt wc_AesCbcEncrypt KAT mismatch\n");
  1014. return LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1015. }
  1016. /* Re init for decrypt and set flag. */
  1017. wc_AesFree(aes);
  1018. aes_inited = 0;
  1019. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1020. if (ret) {
  1021. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  1022. goto test_cbc_end;
  1023. }
  1024. aes_inited = 1;
  1025. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_DECRYPTION);
  1026. if (ret) {
  1027. pr_err("wolfcrypt wc_AesSetKey failed with return code %d.\n", ret);
  1028. goto test_cbc_end;
  1029. }
  1030. ret = wc_AesCbcDecrypt(aes, dec, enc, sizeof(p_vector));
  1031. if (ret) {
  1032. pr_err("wolfcrypt wc_AesCbcDecrypt failed with return code %d\n", ret);
  1033. goto test_cbc_end;
  1034. }
  1035. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1036. if (ret) {
  1037. pr_err("error: p_vector and dec do not match: %d\n", ret);
  1038. goto test_cbc_end;
  1039. }
  1040. /* now the kernel crypto part */
  1041. enc2 = malloc(sizeof(p_vector));
  1042. if (!enc2) {
  1043. pr_err("error: malloc failed\n");
  1044. goto test_cbc_end;
  1045. }
  1046. dec2 = malloc(sizeof(p_vector));
  1047. if (!dec2) {
  1048. pr_err("error: malloc failed\n");
  1049. goto test_cbc_end;
  1050. }
  1051. memcpy(dec2, p_vector, sizeof(p_vector));
  1052. tfm = crypto_alloc_skcipher(WOLFKM_AESCBC_NAME, 0, 0);
  1053. if (IS_ERR(tfm)) {
  1054. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1055. WOLFKM_AESCBC_DRIVER, PTR_ERR(tfm));
  1056. goto test_cbc_end;
  1057. }
  1058. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1059. {
  1060. const char *driver_name =
  1061. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1062. if (strcmp(driver_name, WOLFKM_AESCBC_DRIVER)) {
  1063. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1064. WOLFKM_AESCBC_NAME, driver_name, WOLFKM_AESCBC_DRIVER);
  1065. ret = -ENOENT;
  1066. goto test_cbc_end;
  1067. }
  1068. }
  1069. #endif
  1070. ret = crypto_skcipher_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1071. if (ret) {
  1072. pr_err("error: crypto_skcipher_setkey returned: %d\n", ret);
  1073. goto test_cbc_end;
  1074. }
  1075. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1076. if (IS_ERR(req)) {
  1077. pr_err("error: allocating AES skcipher request %s failed\n",
  1078. WOLFKM_AESCBC_DRIVER);
  1079. goto test_cbc_end;
  1080. }
  1081. sg_init_one(&src, dec2, sizeof(p_vector));
  1082. sg_init_one(&dst, enc2, sizeof(p_vector));
  1083. XMEMCPY(iv_copy, iv, sizeof(iv));
  1084. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1085. ret = crypto_skcipher_encrypt(req);
  1086. if (ret) {
  1087. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1088. goto test_cbc_end;
  1089. }
  1090. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1091. if (ret) {
  1092. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1093. goto test_cbc_end;
  1094. }
  1095. memset(dec2, 0, sizeof(p_vector));
  1096. sg_init_one(&src, enc2, sizeof(p_vector));
  1097. sg_init_one(&dst, dec2, sizeof(p_vector));
  1098. XMEMCPY(iv_copy, iv, sizeof(iv));
  1099. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1100. ret = crypto_skcipher_decrypt(req);
  1101. if (ret) {
  1102. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  1103. goto test_cbc_end;
  1104. }
  1105. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1106. if (ret) {
  1107. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1108. goto test_cbc_end;
  1109. }
  1110. test_cbc_end:
  1111. if (enc2) { free(enc2); }
  1112. if (dec2) { free(dec2); }
  1113. if (req) { skcipher_request_free(req); }
  1114. if (tfm) { crypto_free_skcipher(tfm); }
  1115. if (aes_inited)
  1116. wc_AesFree(aes);
  1117. free(aes);
  1118. return ret;
  1119. }
  1120. #endif /* HAVE_AES_CBC &&
  1121. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  1122. */
  1123. #if defined(WOLFSSL_AES_CFB) && \
  1124. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1125. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  1126. static int linuxkm_test_aescfb(void)
  1127. {
  1128. int ret = 0;
  1129. struct crypto_skcipher * tfm = NULL;
  1130. struct skcipher_request * req = NULL;
  1131. struct scatterlist src, dst;
  1132. Aes *aes;
  1133. int aes_inited = 0;
  1134. static const byte key32[] =
  1135. {
  1136. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1137. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  1138. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1139. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  1140. };
  1141. static const byte p_vector[] =
  1142. /* Now is the time for all good men w/o trailing 0 */
  1143. {
  1144. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  1145. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  1146. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20,
  1147. 0x67,0x6f,0x6f,0x64,0x20,0x6d,0x65,0x6e
  1148. };
  1149. static const byte iv[] = "1234567890abcdef";
  1150. static const byte c_vector[] =
  1151. {
  1152. 0x56,0x35,0x3f,0xdd,0xde,0xa6,0x15,0x87,
  1153. 0x57,0xdc,0x34,0x62,0x9a,0x68,0x96,0x51,
  1154. 0xc7,0x09,0xb9,0x4e,0x47,0x6b,0x24,0x72,
  1155. 0x19,0x5a,0xdf,0x7e,0xba,0xa8,0x01,0xb6
  1156. };
  1157. byte iv_copy[sizeof(iv)];
  1158. byte enc[sizeof(p_vector)];
  1159. byte dec[sizeof(p_vector)];
  1160. u8 * enc2 = NULL;
  1161. u8 * dec2 = NULL;
  1162. aes = (Aes *)malloc(sizeof(*aes));
  1163. if (aes == NULL)
  1164. return -ENOMEM;
  1165. XMEMSET(enc, 0, sizeof(enc));
  1166. XMEMSET(dec, 0, sizeof(enc));
  1167. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1168. if (ret) {
  1169. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  1170. goto test_cfb_end;
  1171. }
  1172. aes_inited = 1;
  1173. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  1174. if (ret) {
  1175. pr_err("wolfcrypt wc_AesSetKey failed with return code %d\n", ret);
  1176. goto test_cfb_end;
  1177. }
  1178. ret = wc_AesCfbEncrypt(aes, enc, p_vector, sizeof(p_vector));
  1179. if (ret) {
  1180. pr_err("wolfcrypt wc_AesCfbEncrypt failed with return code %d\n", ret);
  1181. goto test_cfb_end;
  1182. }
  1183. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1184. pr_err("wolfcrypt wc_AesCfbEncrypt KAT mismatch\n");
  1185. return LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1186. }
  1187. /* Re init for decrypt and set flag. */
  1188. wc_AesFree(aes);
  1189. aes_inited = 0;
  1190. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1191. if (ret) {
  1192. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  1193. goto test_cfb_end;
  1194. }
  1195. aes_inited = 1;
  1196. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  1197. if (ret) {
  1198. pr_err("wolfcrypt wc_AesSetKey failed with return code %d.\n", ret);
  1199. goto test_cfb_end;
  1200. }
  1201. ret = wc_AesCfbDecrypt(aes, dec, enc, sizeof(p_vector));
  1202. if (ret) {
  1203. pr_err("wolfcrypt wc_AesCfbDecrypt failed with return code %d\n", ret);
  1204. goto test_cfb_end;
  1205. }
  1206. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1207. if (ret) {
  1208. pr_err("error: p_vector and dec do not match: %d\n", ret);
  1209. goto test_cfb_end;
  1210. }
  1211. /* now the kernel crypto part */
  1212. enc2 = malloc(sizeof(p_vector));
  1213. if (!enc2) {
  1214. pr_err("error: malloc failed\n");
  1215. goto test_cfb_end;
  1216. }
  1217. dec2 = malloc(sizeof(p_vector));
  1218. if (!dec2) {
  1219. pr_err("error: malloc failed\n");
  1220. goto test_cfb_end;
  1221. }
  1222. memcpy(dec2, p_vector, sizeof(p_vector));
  1223. tfm = crypto_alloc_skcipher(WOLFKM_AESCFB_NAME, 0, 0);
  1224. if (IS_ERR(tfm)) {
  1225. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1226. WOLFKM_AESCFB_DRIVER, PTR_ERR(tfm));
  1227. goto test_cfb_end;
  1228. }
  1229. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1230. {
  1231. const char *driver_name =
  1232. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1233. if (strcmp(driver_name, WOLFKM_AESCFB_DRIVER)) {
  1234. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1235. WOLFKM_AESCFB_NAME, driver_name, WOLFKM_AESCFB_DRIVER);
  1236. ret = -ENOENT;
  1237. goto test_cfb_end;
  1238. }
  1239. }
  1240. #endif
  1241. ret = crypto_skcipher_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1242. if (ret) {
  1243. pr_err("error: crypto_skcipher_setkey returned: %d\n", ret);
  1244. goto test_cfb_end;
  1245. }
  1246. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1247. if (IS_ERR(req)) {
  1248. pr_err("error: allocating AES skcipher request %s failed\n",
  1249. WOLFKM_AESCFB_DRIVER);
  1250. goto test_cfb_end;
  1251. }
  1252. sg_init_one(&src, dec2, sizeof(p_vector));
  1253. sg_init_one(&dst, enc2, sizeof(p_vector));
  1254. XMEMCPY(iv_copy, iv, sizeof(iv));
  1255. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1256. ret = crypto_skcipher_encrypt(req);
  1257. if (ret) {
  1258. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1259. goto test_cfb_end;
  1260. }
  1261. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1262. if (ret) {
  1263. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1264. goto test_cfb_end;
  1265. }
  1266. memset(dec2, 0, sizeof(p_vector));
  1267. sg_init_one(&src, enc2, sizeof(p_vector));
  1268. sg_init_one(&dst, dec2, sizeof(p_vector));
  1269. XMEMCPY(iv_copy, iv, sizeof(iv));
  1270. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1271. ret = crypto_skcipher_decrypt(req);
  1272. if (ret) {
  1273. pr_err("error: crypto_skcipher_decrypt returned: %d\n", ret);
  1274. goto test_cfb_end;
  1275. }
  1276. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1277. if (ret) {
  1278. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1279. goto test_cfb_end;
  1280. }
  1281. test_cfb_end:
  1282. if (enc2) { free(enc2); }
  1283. if (dec2) { free(dec2); }
  1284. if (req) { skcipher_request_free(req); }
  1285. if (tfm) { crypto_free_skcipher(tfm); }
  1286. if (aes_inited)
  1287. wc_AesFree(aes);
  1288. free(aes);
  1289. return ret;
  1290. }
  1291. #endif /* WOLFSSL_AES_CFB &&
  1292. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCFB)
  1293. */
  1294. #if defined(HAVE_AESGCM) && \
  1295. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1296. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  1297. static int linuxkm_test_aesgcm(void)
  1298. {
  1299. int ret = 0;
  1300. struct crypto_aead * tfm = NULL;
  1301. struct aead_request * req = NULL;
  1302. struct scatterlist * src = NULL;
  1303. struct scatterlist * dst = NULL;
  1304. Aes *aes;
  1305. int aes_inited = 0;
  1306. static const byte key32[] =
  1307. {
  1308. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1309. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  1310. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1311. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  1312. };
  1313. static const byte p_vector[] =
  1314. /* Now is the time for all w/o trailing 0 */
  1315. {
  1316. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  1317. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  1318. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20
  1319. };
  1320. static const byte assoc[] =
  1321. {
  1322. 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
  1323. 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
  1324. 0xab, 0xad, 0xda, 0xd2
  1325. };
  1326. static const byte ivstr[] = "1234567890abcdef";
  1327. static const byte c_vector[] =
  1328. {
  1329. 0x0c,0x97,0x05,0x3c,0xef,0x5c,0x63,0x6b,
  1330. 0x15,0xe4,0x00,0x63,0xf8,0x8c,0xd0,0x95,
  1331. 0x27,0x81,0x90,0x9c,0x9f,0xe6,0x98,0xe9
  1332. };
  1333. static const byte KAT_authTag[] =
  1334. {
  1335. 0xc9,0xd5,0x7a,0x77,0xac,0x28,0xc2,0xe7,
  1336. 0xe4,0x28,0x90,0xaa,0x09,0xab,0xf9,0x7c
  1337. };
  1338. byte enc[sizeof(p_vector)];
  1339. byte authTag[AES_BLOCK_SIZE];
  1340. byte dec[sizeof(p_vector)];
  1341. u8 * assoc2 = NULL;
  1342. u8 * enc2 = NULL;
  1343. u8 * dec2 = NULL;
  1344. u8 * iv = NULL;
  1345. size_t encryptLen = sizeof(p_vector);
  1346. size_t decryptLen = sizeof(p_vector) + sizeof(authTag);
  1347. /* Init stack variables. */
  1348. XMEMSET(enc, 0, sizeof(p_vector));
  1349. XMEMSET(dec, 0, sizeof(p_vector));
  1350. XMEMSET(authTag, 0, AES_BLOCK_SIZE);
  1351. aes = (Aes *)malloc(sizeof(*aes));
  1352. if (aes == NULL)
  1353. return -ENOMEM;
  1354. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1355. if (ret) {
  1356. pr_err("error: wc_AesInit failed with return code %d.\n", ret);
  1357. goto test_gcm_end;
  1358. }
  1359. aes_inited = 1;
  1360. ret = wc_AesGcmInit(aes, key32, sizeof(key32)/sizeof(byte), ivstr,
  1361. AES_BLOCK_SIZE);
  1362. if (ret) {
  1363. pr_err("error: wc_AesGcmInit failed with return code %d.\n", ret);
  1364. goto test_gcm_end;
  1365. }
  1366. ret = wc_AesGcmEncryptUpdate(aes, NULL, NULL, 0, assoc, sizeof(assoc));
  1367. if (ret) {
  1368. pr_err("error: wc_AesGcmEncryptUpdate failed with return code %d\n",
  1369. ret);
  1370. goto test_gcm_end;
  1371. }
  1372. ret = wc_AesGcmEncryptUpdate(aes, enc, p_vector, sizeof(p_vector), NULL, 0);
  1373. if (ret) {
  1374. pr_err("error: wc_AesGcmEncryptUpdate failed with return code %d\n",
  1375. ret);
  1376. goto test_gcm_end;
  1377. }
  1378. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1379. pr_err("wolfcrypt AES-GCM KAT mismatch on ciphertext\n");
  1380. ret = LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E;
  1381. goto test_gcm_end;
  1382. }
  1383. ret = wc_AesGcmEncryptFinal(aes, authTag, AES_BLOCK_SIZE);
  1384. if (ret) {
  1385. pr_err("error: wc_AesGcmEncryptFinal failed with return code %d\n",
  1386. ret);
  1387. goto test_gcm_end;
  1388. }
  1389. if (XMEMCMP(authTag, KAT_authTag, sizeof(KAT_authTag)) != 0) {
  1390. pr_err("wolfcrypt AES-GCM KAT mismatch on authTag\n");
  1391. ret = LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E;
  1392. goto test_gcm_end;
  1393. }
  1394. ret = wc_AesGcmInit(aes, key32, sizeof(key32)/sizeof(byte), ivstr,
  1395. AES_BLOCK_SIZE);
  1396. if (ret) {
  1397. pr_err("error: wc_AesGcmInit failed with return code %d.\n", ret);
  1398. goto test_gcm_end;
  1399. }
  1400. ret = wc_AesGcmDecryptUpdate(aes, dec, enc, sizeof(p_vector),
  1401. assoc, sizeof(assoc));
  1402. if (ret) {
  1403. pr_err("error: wc_AesGcmDecryptUpdate failed with return code %d\n",
  1404. ret);
  1405. goto test_gcm_end;
  1406. }
  1407. ret = wc_AesGcmDecryptFinal(aes, authTag, AES_BLOCK_SIZE);
  1408. if (ret) {
  1409. pr_err("error: wc_AesGcmEncryptFinal failed with return code %d\n",
  1410. ret);
  1411. goto test_gcm_end;
  1412. }
  1413. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1414. if (ret) {
  1415. pr_err("error: gcm: p_vector and dec do not match: %d\n", ret);
  1416. goto test_gcm_end;
  1417. }
  1418. /* now the kernel crypto part */
  1419. assoc2 = malloc(sizeof(assoc));
  1420. if (IS_ERR(assoc2)) {
  1421. pr_err("error: malloc failed\n");
  1422. goto test_gcm_end;
  1423. }
  1424. memset(assoc2, 0, sizeof(assoc));
  1425. memcpy(assoc2, assoc, sizeof(assoc));
  1426. iv = malloc(AES_BLOCK_SIZE);
  1427. if (IS_ERR(iv)) {
  1428. pr_err("error: malloc failed\n");
  1429. goto test_gcm_end;
  1430. }
  1431. memset(iv, 0, AES_BLOCK_SIZE);
  1432. memcpy(iv, ivstr, AES_BLOCK_SIZE);
  1433. enc2 = malloc(decryptLen);
  1434. if (IS_ERR(enc2)) {
  1435. pr_err("error: malloc failed\n");
  1436. goto test_gcm_end;
  1437. }
  1438. dec2 = malloc(decryptLen);
  1439. if (IS_ERR(dec2)) {
  1440. pr_err("error: malloc failed\n");
  1441. goto test_gcm_end;
  1442. }
  1443. memset(enc2, 0, decryptLen);
  1444. memset(dec2, 0, decryptLen);
  1445. memcpy(dec2, p_vector, sizeof(p_vector));
  1446. tfm = crypto_alloc_aead(WOLFKM_AESGCM_NAME, 0, 0);
  1447. if (IS_ERR(tfm)) {
  1448. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1449. WOLFKM_AESGCM_DRIVER, PTR_ERR(tfm));
  1450. goto test_gcm_end;
  1451. }
  1452. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1453. {
  1454. const char *driver_name = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
  1455. if (strcmp(driver_name, WOLFKM_AESGCM_DRIVER)) {
  1456. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1457. WOLFKM_AESGCM_NAME, driver_name, WOLFKM_AESGCM_DRIVER);
  1458. ret = -ENOENT;
  1459. goto test_gcm_end;
  1460. }
  1461. }
  1462. #endif
  1463. ret = crypto_aead_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1464. if (ret) {
  1465. pr_err("error: crypto_aead_setkey returned: %d\n", ret);
  1466. goto test_gcm_end;
  1467. }
  1468. ret = crypto_aead_setauthsize(tfm, sizeof(authTag));
  1469. if (ret) {
  1470. pr_err("error: crypto_aead_setauthsize returned: %d\n", ret);
  1471. goto test_gcm_end;
  1472. }
  1473. req = aead_request_alloc(tfm, GFP_KERNEL);
  1474. if (IS_ERR(req)) {
  1475. pr_err("error: allocating AES aead request %s failed: %ld\n",
  1476. WOLFKM_AESCBC_DRIVER, PTR_ERR(req));
  1477. goto test_gcm_end;
  1478. }
  1479. src = malloc(sizeof(struct scatterlist) * 2);
  1480. dst = malloc(sizeof(struct scatterlist) * 2);
  1481. if (IS_ERR(src) || IS_ERR(dst)) {
  1482. pr_err("error: malloc src or dst failed: %ld, %ld\n",
  1483. PTR_ERR(src), PTR_ERR(dst));
  1484. goto test_gcm_end;
  1485. }
  1486. sg_init_table(src, 2);
  1487. sg_set_buf(src, assoc2, sizeof(assoc));
  1488. sg_set_buf(&src[1], dec2, sizeof(p_vector));
  1489. sg_init_table(dst, 2);
  1490. sg_set_buf(dst, assoc2, sizeof(assoc));
  1491. sg_set_buf(&dst[1], enc2, decryptLen);
  1492. aead_request_set_callback(req, 0, NULL, NULL);
  1493. aead_request_set_ad(req, sizeof(assoc));
  1494. aead_request_set_crypt(req, src, dst, sizeof(p_vector), iv);
  1495. ret = crypto_aead_encrypt(req);
  1496. if (ret) {
  1497. pr_err("error: crypto_aead_encrypt returned: %d\n", ret);
  1498. goto test_gcm_end;
  1499. }
  1500. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1501. if (ret) {
  1502. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1503. goto test_gcm_end;
  1504. }
  1505. ret = XMEMCMP(authTag, enc2 + encryptLen, sizeof(authTag));
  1506. if (ret) {
  1507. pr_err("error: authTags do not match: %d\n", ret);
  1508. goto test_gcm_end;
  1509. }
  1510. /* Now decrypt crypto request. Reverse src and dst. */
  1511. memset(dec2, 0, decryptLen);
  1512. aead_request_set_ad(req, sizeof(assoc));
  1513. aead_request_set_crypt(req, dst, src, decryptLen, iv);
  1514. ret = crypto_aead_decrypt(req);
  1515. if (ret) {
  1516. pr_err("error: crypto_aead_decrypt returned: %d\n", ret);
  1517. goto test_gcm_end;
  1518. }
  1519. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1520. if (ret) {
  1521. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1522. goto test_gcm_end;
  1523. }
  1524. test_gcm_end:
  1525. if (req) { aead_request_free(req); req = NULL; }
  1526. if (tfm) { crypto_free_aead(tfm); tfm = NULL; }
  1527. if (src) { free(src); src = NULL; }
  1528. if (dst) { free(dst); dst = NULL; }
  1529. if (dec2) { free(dec2); dec2 = NULL; }
  1530. if (enc2) { free(enc2); enc2 = NULL; }
  1531. if (assoc2) { free(assoc2); assoc2 = NULL; }
  1532. if (iv) { free(iv); iv = NULL; }
  1533. if (aes_inited)
  1534. wc_AesFree(aes);
  1535. free(aes);
  1536. return ret;
  1537. }
  1538. #endif /* HAVE_AESGCM &&
  1539. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESGCM) &&
  1540. */
  1541. #if defined(WOLFSSL_AES_XTS) && \
  1542. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1543. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  1544. /* test vectors from
  1545. * http://csrc.nist.gov/groups/STM/cavp/block-cipher-modes.html
  1546. */
  1547. #ifdef WOLFSSL_AES_128
  1548. static int aes_xts_128_test(void)
  1549. {
  1550. XtsAes *aes = NULL;
  1551. int aes_inited = 0;
  1552. int ret = 0;
  1553. #define AES_XTS_128_TEST_BUF_SIZ (AES_BLOCK_SIZE * 2 + 8)
  1554. unsigned char *buf = NULL;
  1555. unsigned char *cipher = NULL;
  1556. u8 * enc2 = NULL;
  1557. u8 * dec2 = NULL;
  1558. struct scatterlist * src = NULL;
  1559. struct scatterlist * dst = NULL;
  1560. struct crypto_skcipher *tfm = NULL;
  1561. struct skcipher_request *req = NULL;
  1562. struct XtsAesStreamData stream;
  1563. byte* large_input = NULL;
  1564. /* 128 key tests */
  1565. static const unsigned char k1[] = {
  1566. 0xa1, 0xb9, 0x0c, 0xba, 0x3f, 0x06, 0xac, 0x35,
  1567. 0x3b, 0x2c, 0x34, 0x38, 0x76, 0x08, 0x17, 0x62,
  1568. 0x09, 0x09, 0x23, 0x02, 0x6e, 0x91, 0x77, 0x18,
  1569. 0x15, 0xf2, 0x9d, 0xab, 0x01, 0x93, 0x2f, 0x2f
  1570. };
  1571. static const unsigned char i1[] = {
  1572. 0x4f, 0xae, 0xf7, 0x11, 0x7c, 0xda, 0x59, 0xc6,
  1573. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1574. };
  1575. static const unsigned char p1[] = {
  1576. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1577. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c
  1578. };
  1579. /* plain text test of partial block is not from NIST test vector list */
  1580. static const unsigned char pp[] = {
  1581. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1582. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c,
  1583. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1584. };
  1585. static const unsigned char c1[] = {
  1586. 0x77, 0x8a, 0xe8, 0xb4, 0x3c, 0xb9, 0x8d, 0x5a,
  1587. 0x82, 0x50, 0x81, 0xd5, 0xbe, 0x47, 0x1c, 0x63
  1588. };
  1589. /* plain text test of partial block is not from NIST test vector list */
  1590. static const unsigned char cp[] = {
  1591. 0x2b, 0xf7, 0x2c, 0xf3, 0xeb, 0x85, 0xef, 0x7b,
  1592. 0x0b, 0x76, 0xa0, 0xaa, 0xf3, 0x3f, 0x25, 0x8b,
  1593. 0x77, 0x8a, 0xe8, 0xb4, 0x3c, 0xb9, 0x8d, 0x5a
  1594. };
  1595. static const unsigned char k2[] = {
  1596. 0x39, 0x25, 0x79, 0x05, 0xdf, 0xcc, 0x77, 0x76,
  1597. 0x6c, 0x87, 0x0a, 0x80, 0x6a, 0x60, 0xe3, 0xc0,
  1598. 0x93, 0xd1, 0x2a, 0xcf, 0xcb, 0x51, 0x42, 0xfa,
  1599. 0x09, 0x69, 0x89, 0x62, 0x5b, 0x60, 0xdb, 0x16
  1600. };
  1601. static const unsigned char i2[] = {
  1602. 0x5c, 0xf7, 0x9d, 0xb6, 0xc5, 0xcd, 0x99, 0x1a,
  1603. 0x1c, 0x78, 0x81, 0x42, 0x24, 0x95, 0x1e, 0x84
  1604. };
  1605. static const unsigned char p2[] = {
  1606. 0xbd, 0xc5, 0x46, 0x8f, 0xbc, 0x8d, 0x50, 0xa1,
  1607. 0x0d, 0x1c, 0x85, 0x7f, 0x79, 0x1c, 0x5c, 0xba,
  1608. 0xb3, 0x81, 0x0d, 0x0d, 0x73, 0xcf, 0x8f, 0x20,
  1609. 0x46, 0xb1, 0xd1, 0x9e, 0x7d, 0x5d, 0x8a, 0x56
  1610. };
  1611. static const unsigned char c2[] = {
  1612. 0xd6, 0xbe, 0x04, 0x6d, 0x41, 0xf2, 0x3b, 0x5e,
  1613. 0xd7, 0x0b, 0x6b, 0x3d, 0x5c, 0x8e, 0x66, 0x23,
  1614. 0x2b, 0xe6, 0xb8, 0x07, 0xd4, 0xdc, 0xc6, 0x0e,
  1615. 0xff, 0x8d, 0xbc, 0x1d, 0x9f, 0x7f, 0xc8, 0x22
  1616. };
  1617. #ifndef HAVE_FIPS /* FIPS requires different keys for main and tweak. */
  1618. static const unsigned char k3[] = {
  1619. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1620. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1621. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1622. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1623. };
  1624. static const unsigned char i3[] = {
  1625. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1626. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1627. };
  1628. static const unsigned char p3[] = {
  1629. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1630. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1631. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1632. 0x20, 0xff, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1633. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20
  1634. };
  1635. static const unsigned char c3[] = {
  1636. 0xA2, 0x07, 0x47, 0x76, 0x3F, 0xEC, 0x0C, 0x23,
  1637. 0x1B, 0xD0, 0xBD, 0x46, 0x9A, 0x27, 0x38, 0x12,
  1638. 0x95, 0x02, 0x3D, 0x5D, 0xC6, 0x94, 0x51, 0x36,
  1639. 0xA0, 0x85, 0xD2, 0x69, 0x6E, 0x87, 0x0A, 0xBF,
  1640. 0xB5, 0x5A, 0xDD, 0xCB, 0x80, 0xE0, 0xFC, 0xCD
  1641. };
  1642. #endif /* HAVE_FIPS */
  1643. if ((aes = (XtsAes *)XMALLOC(sizeof(*aes), NULL, DYNAMIC_TYPE_AES))
  1644. == NULL)
  1645. {
  1646. ret = MEMORY_E;
  1647. goto out;
  1648. }
  1649. if ((buf = (unsigned char *)XMALLOC(AES_XTS_128_TEST_BUF_SIZ, NULL,
  1650. DYNAMIC_TYPE_AES)) == NULL)
  1651. {
  1652. ret = MEMORY_E;
  1653. goto out;
  1654. }
  1655. if ((cipher = (unsigned char *)XMALLOC(AES_XTS_128_TEST_BUF_SIZ, NULL,
  1656. DYNAMIC_TYPE_AES)) == NULL)
  1657. {
  1658. ret = MEMORY_E;
  1659. goto out;
  1660. }
  1661. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1662. ret = wc_AesXtsInit(aes, NULL, INVALID_DEVID);
  1663. if (ret != 0)
  1664. goto out;
  1665. else
  1666. aes_inited = 1;
  1667. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_ENCRYPTION);
  1668. if (ret != 0)
  1669. goto out;
  1670. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1671. if (ret != 0)
  1672. goto out;
  1673. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1674. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1675. goto out;
  1676. }
  1677. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1678. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1679. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1680. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1681. if (ret != 0)
  1682. goto out;
  1683. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1684. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1685. goto out;
  1686. }
  1687. #endif
  1688. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1689. ret = wc_AesXtsEncryptInit(aes, i2, sizeof(i2), &stream);
  1690. if (ret != 0)
  1691. goto out;
  1692. ret = wc_AesXtsEncryptUpdate(aes, buf, p2, AES_BLOCK_SIZE, &stream);
  1693. if (ret != 0)
  1694. goto out;
  1695. ret = wc_AesXtsEncryptFinal(aes, buf + AES_BLOCK_SIZE,
  1696. p2 + AES_BLOCK_SIZE,
  1697. sizeof(p2) - AES_BLOCK_SIZE, &stream);
  1698. if (ret != 0)
  1699. goto out;
  1700. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1701. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1702. goto out;
  1703. }
  1704. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1705. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1706. if (ret != 0)
  1707. goto out;
  1708. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1709. if (ret != 0)
  1710. goto out;
  1711. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1712. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1713. goto out;
  1714. }
  1715. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1716. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1717. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1718. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1719. if (ret != 0)
  1720. goto out;
  1721. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1722. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1723. goto out;
  1724. }
  1725. #endif
  1726. /* partial block encryption test */
  1727. XMEMSET(cipher, 0, AES_XTS_128_TEST_BUF_SIZ);
  1728. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1729. if (ret != 0)
  1730. goto out;
  1731. if (XMEMCMP(cp, cipher, sizeof(cp))) {
  1732. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1733. goto out;
  1734. }
  1735. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1736. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1737. XMEMSET(cipher, 0, AES_XTS_128_TEST_BUF_SIZ);
  1738. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1739. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1740. if (ret != 0)
  1741. goto out;
  1742. if (XMEMCMP(cp, cipher, sizeof(cp))) {
  1743. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1744. goto out;
  1745. }
  1746. #endif
  1747. /* partial block decrypt test */
  1748. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1749. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1750. if (ret != 0)
  1751. goto out;
  1752. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1753. if (ret != 0)
  1754. goto out;
  1755. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1756. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1757. goto out;
  1758. }
  1759. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1760. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1761. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1762. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1763. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1764. if (ret != 0)
  1765. goto out;
  1766. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1767. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1768. goto out;
  1769. }
  1770. #endif
  1771. /* NIST decrypt test vector */
  1772. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1773. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1774. if (ret != 0)
  1775. goto out;
  1776. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1777. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1778. goto out;
  1779. }
  1780. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1781. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1782. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1783. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1784. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1785. if (ret != 0)
  1786. goto out;
  1787. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1788. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1789. goto out;
  1790. }
  1791. #endif
  1792. /* fail case with decrypting using wrong key */
  1793. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1794. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  1795. if (ret != 0)
  1796. goto out;
  1797. if (XMEMCMP(p2, buf, sizeof(p2)) == 0) { /* fail case with wrong key */
  1798. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1799. goto out;
  1800. }
  1801. /* set correct key and retest */
  1802. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1803. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_DECRYPTION);
  1804. if (ret != 0)
  1805. goto out;
  1806. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  1807. if (ret != 0)
  1808. goto out;
  1809. if (XMEMCMP(p2, buf, sizeof(p2))) {
  1810. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1811. goto out;
  1812. }
  1813. #ifndef HAVE_FIPS
  1814. /* Test ciphertext stealing in-place. */
  1815. XMEMCPY(buf, p3, sizeof(p3));
  1816. ret = wc_AesXtsSetKeyNoInit(aes, k3, sizeof(k3), AES_ENCRYPTION);
  1817. if (ret != 0)
  1818. goto out;
  1819. ret = wc_AesXtsEncrypt(aes, buf, buf, sizeof(p3), i3, sizeof(i3));
  1820. if (ret != 0)
  1821. goto out;
  1822. if (XMEMCMP(c3, buf, sizeof(c3))) {
  1823. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1824. goto out;
  1825. }
  1826. ret = wc_AesXtsSetKeyNoInit(aes, k3, sizeof(k3), AES_DECRYPTION);
  1827. if (ret != 0)
  1828. goto out;
  1829. ret = wc_AesXtsDecrypt(aes, buf, buf, sizeof(c3), i3, sizeof(i3));
  1830. if (ret != 0)
  1831. goto out;
  1832. if (XMEMCMP(p3, buf, sizeof(p3))) {
  1833. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1834. goto out;
  1835. }
  1836. #endif /* HAVE_FIPS */
  1837. {
  1838. #define LARGE_XTS_SZ 1024
  1839. int i;
  1840. int j;
  1841. int k;
  1842. large_input = (byte *)XMALLOC(LARGE_XTS_SZ, NULL,
  1843. DYNAMIC_TYPE_TMP_BUFFER);
  1844. if (large_input == NULL) {
  1845. ret = MEMORY_E;
  1846. goto out;
  1847. }
  1848. for (i = 0; i < (int)LARGE_XTS_SZ; i++)
  1849. large_input[i] = (byte)i;
  1850. /* first, encrypt block by block then decrypt with a one-shot call. */
  1851. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  1852. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1853. if (ret != 0)
  1854. goto out;
  1855. ret = wc_AesXtsEncryptInit(aes, i1, sizeof(i1), &stream);
  1856. if (ret != 0)
  1857. goto out;
  1858. for (k = 0; k < j; k += AES_BLOCK_SIZE) {
  1859. if ((j - k) < AES_BLOCK_SIZE*2)
  1860. ret = wc_AesXtsEncryptFinal(aes, large_input + k, large_input + k, j - k, &stream);
  1861. else
  1862. ret = wc_AesXtsEncryptUpdate(aes, large_input + k, large_input + k, AES_BLOCK_SIZE, &stream);
  1863. if (ret != 0)
  1864. goto out;
  1865. if ((j - k) < AES_BLOCK_SIZE*2)
  1866. break;
  1867. }
  1868. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1869. if (ret != 0)
  1870. goto out;
  1871. ret = wc_AesXtsDecrypt(aes, large_input, large_input, j, i1,
  1872. sizeof(i1));
  1873. if (ret != 0)
  1874. goto out;
  1875. for (i = 0; i < j; i++) {
  1876. if (large_input[i] != (byte)i) {
  1877. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1878. goto out;
  1879. }
  1880. }
  1881. }
  1882. /* second, encrypt with a one-shot call then decrypt block by block. */
  1883. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  1884. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1885. if (ret != 0)
  1886. goto out;
  1887. ret = wc_AesXtsEncrypt(aes, large_input, large_input, j, i1,
  1888. sizeof(i1));
  1889. if (ret != 0)
  1890. goto out;
  1891. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1892. if (ret != 0)
  1893. goto out;
  1894. ret = wc_AesXtsDecryptInit(aes, i1, sizeof(i1), &stream);
  1895. if (ret != 0)
  1896. goto out;
  1897. for (k = 0; k < j; k += AES_BLOCK_SIZE) {
  1898. if ((j - k) < AES_BLOCK_SIZE*2)
  1899. ret = wc_AesXtsDecryptFinal(aes, large_input + k, large_input + k, j - k, &stream);
  1900. else
  1901. ret = wc_AesXtsDecryptUpdate(aes, large_input + k, large_input + k, AES_BLOCK_SIZE, &stream);
  1902. if (ret != 0)
  1903. goto out;
  1904. if ((j - k) < AES_BLOCK_SIZE*2)
  1905. break;
  1906. }
  1907. for (i = 0; i < j; i++) {
  1908. if (large_input[i] != (byte)i) {
  1909. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1910. goto out;
  1911. }
  1912. }
  1913. }
  1914. }
  1915. /* now the kernel crypto part */
  1916. enc2 = XMALLOC(sizeof(pp), NULL, DYNAMIC_TYPE_AES);
  1917. if (!enc2) {
  1918. pr_err("error: malloc failed\n");
  1919. ret = -ENOMEM;
  1920. goto test_xts_end;
  1921. }
  1922. dec2 = XMALLOC(sizeof(pp), NULL, DYNAMIC_TYPE_AES);
  1923. if (!dec2) {
  1924. pr_err("error: malloc failed\n");
  1925. ret = -ENOMEM;
  1926. goto test_xts_end;
  1927. }
  1928. src = XMALLOC(sizeof(*src) * 2, NULL, DYNAMIC_TYPE_AES);
  1929. if (! src) {
  1930. pr_err("error: malloc failed\n");
  1931. ret = -ENOMEM;
  1932. goto test_xts_end;
  1933. }
  1934. dst = XMALLOC(sizeof(*dst) * 2, NULL, DYNAMIC_TYPE_AES);
  1935. if (! dst) {
  1936. pr_err("error: malloc failed\n");
  1937. ret = -ENOMEM;
  1938. goto test_xts_end;
  1939. }
  1940. tfm = crypto_alloc_skcipher(WOLFKM_AESXTS_NAME, 0, 0);
  1941. if (IS_ERR(tfm)) {
  1942. ret = PTR_ERR(tfm);
  1943. pr_err("error: allocating AES skcipher algorithm %s failed: %d\n",
  1944. WOLFKM_AESXTS_DRIVER, ret);
  1945. goto test_xts_end;
  1946. }
  1947. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1948. {
  1949. const char *driver_name =
  1950. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1951. if (strcmp(driver_name, WOLFKM_AESXTS_DRIVER)) {
  1952. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1953. WOLFKM_AESXTS_NAME, driver_name, WOLFKM_AESXTS_DRIVER);
  1954. ret = -ENOENT;
  1955. goto test_xts_end;
  1956. }
  1957. }
  1958. #endif
  1959. ret = crypto_skcipher_ivsize(tfm);
  1960. if (ret != sizeof(stream.tweak_block)) {
  1961. pr_err("error: AES skcipher algorithm %s crypto_skcipher_ivsize()"
  1962. " returned %d but expected %d\n",
  1963. WOLFKM_AESXTS_DRIVER, ret, (int)sizeof(stream.tweak_block));
  1964. ret = -EINVAL;
  1965. goto test_xts_end;
  1966. }
  1967. ret = crypto_skcipher_setkey(tfm, k1, sizeof(k1));
  1968. if (ret) {
  1969. pr_err("error: crypto_skcipher_setkey for %s returned: %d\n",
  1970. WOLFKM_AESXTS_NAME, ret);
  1971. goto test_xts_end;
  1972. }
  1973. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1974. if (IS_ERR(req)) {
  1975. ret = PTR_ERR(req);
  1976. pr_err("error: allocating AES skcipher request %s failed: %d\n",
  1977. WOLFKM_AESXTS_DRIVER, ret);
  1978. goto test_xts_end;
  1979. }
  1980. memcpy(dec2, p1, sizeof(p1));
  1981. memset(enc2, 0, sizeof(p1));
  1982. sg_init_one(src, dec2, sizeof(p1));
  1983. sg_init_one(dst, enc2, sizeof(p1));
  1984. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  1985. skcipher_request_set_crypt(req, src, dst, sizeof(p1), stream.tweak_block);
  1986. ret = crypto_skcipher_encrypt(req);
  1987. if (ret) {
  1988. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1989. goto test_xts_end;
  1990. }
  1991. ret = XMEMCMP(c1, enc2, sizeof(c1));
  1992. if (ret) {
  1993. pr_err("error: c1 and enc2 do not match: %d\n", ret);
  1994. ret = -EINVAL;
  1995. goto test_xts_end;
  1996. }
  1997. memset(dec2, 0, sizeof(p1));
  1998. sg_init_one(src, enc2, sizeof(p1));
  1999. sg_init_one(dst, dec2, sizeof(p1));
  2000. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2001. skcipher_request_set_crypt(req, src, dst, sizeof(p1), stream.tweak_block);
  2002. ret = crypto_skcipher_decrypt(req);
  2003. if (ret) {
  2004. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2005. goto test_xts_end;
  2006. }
  2007. ret = XMEMCMP(p1, dec2, sizeof(p1));
  2008. if (ret) {
  2009. pr_err("error: p1 and dec2 do not match: %d\n", ret);
  2010. ret = -EINVAL;
  2011. goto test_xts_end;
  2012. }
  2013. memcpy(dec2, pp, sizeof(pp));
  2014. memset(enc2, 0, sizeof(pp));
  2015. sg_init_one(src, dec2, sizeof(pp));
  2016. sg_init_one(dst, enc2, sizeof(pp));
  2017. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2018. skcipher_request_set_crypt(req, src, dst, sizeof(pp), stream.tweak_block);
  2019. ret = crypto_skcipher_encrypt(req);
  2020. if (ret) {
  2021. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2022. goto test_xts_end;
  2023. }
  2024. ret = XMEMCMP(cp, enc2, sizeof(cp));
  2025. if (ret) {
  2026. pr_err("error: cp and enc2 do not match: %d\n", ret);
  2027. ret = -EINVAL;
  2028. goto test_xts_end;
  2029. }
  2030. memset(dec2, 0, sizeof(pp));
  2031. sg_init_one(src, enc2, sizeof(pp));
  2032. sg_init_one(dst, dec2, sizeof(pp));
  2033. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2034. skcipher_request_set_crypt(req, src, dst, sizeof(pp), stream.tweak_block);
  2035. ret = crypto_skcipher_decrypt(req);
  2036. if (ret) {
  2037. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2038. goto test_xts_end;
  2039. }
  2040. ret = XMEMCMP(pp, dec2, sizeof(pp));
  2041. if (ret) {
  2042. pr_err("error: pp and dec2 do not match: %d\n", ret);
  2043. ret = -EINVAL;
  2044. goto test_xts_end;
  2045. }
  2046. test_xts_end:
  2047. if (enc2)
  2048. XFREE(enc2, NULL, DYNAMIC_TYPE_AES);
  2049. if (dec2)
  2050. XFREE(dec2, NULL, DYNAMIC_TYPE_AES);
  2051. if (src)
  2052. XFREE(src, NULL, DYNAMIC_TYPE_AES);
  2053. if (dst)
  2054. XFREE(dst, NULL, DYNAMIC_TYPE_AES);
  2055. if (req)
  2056. skcipher_request_free(req);
  2057. if (tfm)
  2058. crypto_free_skcipher(tfm);
  2059. out:
  2060. if (large_input)
  2061. XFREE(large_input, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  2062. if (aes_inited)
  2063. wc_AesXtsFree(aes);
  2064. if (buf)
  2065. XFREE(buf, NULL, DYNAMIC_TYPE_AES);
  2066. if (cipher)
  2067. XFREE(cipher, NULL, DYNAMIC_TYPE_AES);
  2068. if (aes)
  2069. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  2070. #undef AES_XTS_128_TEST_BUF_SIZ
  2071. return ret;
  2072. }
  2073. #endif /* WOLFSSL_AES_128 */
  2074. #ifdef WOLFSSL_AES_256
  2075. static int aes_xts_256_test(void)
  2076. {
  2077. XtsAes *aes = NULL;
  2078. int aes_inited = 0;
  2079. int ret = 0;
  2080. #define AES_XTS_256_TEST_BUF_SIZ (AES_BLOCK_SIZE * 3)
  2081. unsigned char *buf = NULL;
  2082. unsigned char *cipher = NULL;
  2083. u8 * enc2 = NULL;
  2084. u8 * dec2 = NULL;
  2085. struct scatterlist * src = NULL;
  2086. struct scatterlist * dst = NULL;
  2087. struct crypto_skcipher *tfm = NULL;
  2088. struct skcipher_request *req = NULL;
  2089. struct XtsAesStreamData stream;
  2090. byte* large_input = NULL;
  2091. /* 256 key tests */
  2092. static const unsigned char k1[] = {
  2093. 0x1e, 0xa6, 0x61, 0xc5, 0x8d, 0x94, 0x3a, 0x0e,
  2094. 0x48, 0x01, 0xe4, 0x2f, 0x4b, 0x09, 0x47, 0x14,
  2095. 0x9e, 0x7f, 0x9f, 0x8e, 0x3e, 0x68, 0xd0, 0xc7,
  2096. 0x50, 0x52, 0x10, 0xbd, 0x31, 0x1a, 0x0e, 0x7c,
  2097. 0xd6, 0xe1, 0x3f, 0xfd, 0xf2, 0x41, 0x8d, 0x8d,
  2098. 0x19, 0x11, 0xc0, 0x04, 0xcd, 0xa5, 0x8d, 0xa3,
  2099. 0xd6, 0x19, 0xb7, 0xe2, 0xb9, 0x14, 0x1e, 0x58,
  2100. 0x31, 0x8e, 0xea, 0x39, 0x2c, 0xf4, 0x1b, 0x08
  2101. };
  2102. static const unsigned char i1[] = {
  2103. 0xad, 0xf8, 0xd9, 0x26, 0x27, 0x46, 0x4a, 0xd2,
  2104. 0xf0, 0x42, 0x8e, 0x84, 0xa9, 0xf8, 0x75, 0x64
  2105. };
  2106. static const unsigned char p1[] = {
  2107. 0x2e, 0xed, 0xea, 0x52, 0xcd, 0x82, 0x15, 0xe1,
  2108. 0xac, 0xc6, 0x47, 0xe8, 0x10, 0xbb, 0xc3, 0x64,
  2109. 0x2e, 0x87, 0x28, 0x7f, 0x8d, 0x2e, 0x57, 0xe3,
  2110. 0x6c, 0x0a, 0x24, 0xfb, 0xc1, 0x2a, 0x20, 0x2e
  2111. };
  2112. static const unsigned char c1[] = {
  2113. 0xcb, 0xaa, 0xd0, 0xe2, 0xf6, 0xce, 0xa3, 0xf5,
  2114. 0x0b, 0x37, 0xf9, 0x34, 0xd4, 0x6a, 0x9b, 0x13,
  2115. 0x0b, 0x9d, 0x54, 0xf0, 0x7e, 0x34, 0xf3, 0x6a,
  2116. 0xf7, 0x93, 0xe8, 0x6f, 0x73, 0xc6, 0xd7, 0xdb
  2117. };
  2118. /* plain text test of partial block is not from NIST test vector list */
  2119. static const unsigned char pp[] = {
  2120. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  2121. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c,
  2122. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  2123. };
  2124. static const unsigned char cp[] = {
  2125. 0x65, 0x5e, 0x1d, 0x37, 0x4a, 0x91, 0xe7, 0x6c,
  2126. 0x4f, 0x83, 0x92, 0xbc, 0x5a, 0x10, 0x55, 0x27,
  2127. 0x61, 0x0e, 0x5a, 0xde, 0xca, 0xc5, 0x12, 0xd8
  2128. };
  2129. static const unsigned char k2[] = {
  2130. 0xad, 0x50, 0x4b, 0x85, 0xd7, 0x51, 0xbf, 0xba,
  2131. 0x69, 0x13, 0xb4, 0xcc, 0x79, 0xb6, 0x5a, 0x62,
  2132. 0xf7, 0xf3, 0x9d, 0x36, 0x0f, 0x35, 0xb5, 0xec,
  2133. 0x4a, 0x7e, 0x95, 0xbd, 0x9b, 0xa5, 0xf2, 0xec,
  2134. 0xc1, 0xd7, 0x7e, 0xa3, 0xc3, 0x74, 0xbd, 0x4b,
  2135. 0x13, 0x1b, 0x07, 0x83, 0x87, 0xdd, 0x55, 0x5a,
  2136. 0xb5, 0xb0, 0xc7, 0xe5, 0x2d, 0xb5, 0x06, 0x12,
  2137. 0xd2, 0xb5, 0x3a, 0xcb, 0x47, 0x8a, 0x53, 0xb4
  2138. };
  2139. static const unsigned char i2[] = {
  2140. 0xe6, 0x42, 0x19, 0xed, 0xe0, 0xe1, 0xc2, 0xa0,
  2141. 0x0e, 0xf5, 0x58, 0x6a, 0xc4, 0x9b, 0xeb, 0x6f
  2142. };
  2143. static const unsigned char p2[] = {
  2144. 0x24, 0xcb, 0x76, 0x22, 0x55, 0xb5, 0xa8, 0x00,
  2145. 0xf4, 0x6e, 0x80, 0x60, 0x56, 0x9e, 0x05, 0x53,
  2146. 0xbc, 0xfe, 0x86, 0x55, 0x3b, 0xca, 0xd5, 0x89,
  2147. 0xc7, 0x54, 0x1a, 0x73, 0xac, 0xc3, 0x9a, 0xbd,
  2148. 0x53, 0xc4, 0x07, 0x76, 0xd8, 0xe8, 0x22, 0x61,
  2149. 0x9e, 0xa9, 0xad, 0x77, 0xa0, 0x13, 0x4c, 0xfc
  2150. };
  2151. static const unsigned char c2[] = {
  2152. 0xa3, 0xc6, 0xf3, 0xf3, 0x82, 0x79, 0x5b, 0x10,
  2153. 0x87, 0xd7, 0x02, 0x50, 0xdb, 0x2c, 0xd3, 0xb1,
  2154. 0xa1, 0x62, 0xa8, 0xb6, 0xdc, 0x12, 0x60, 0x61,
  2155. 0xc1, 0x0a, 0x84, 0xa5, 0x85, 0x3f, 0x3a, 0x89,
  2156. 0xe6, 0x6c, 0xdb, 0xb7, 0x9a, 0xb4, 0x28, 0x9b,
  2157. 0xc3, 0xea, 0xd8, 0x10, 0xe9, 0xc0, 0xaf, 0x92
  2158. };
  2159. if ((aes = (XtsAes *)XMALLOC(sizeof(*aes), NULL, DYNAMIC_TYPE_AES))
  2160. == NULL)
  2161. {
  2162. ret = MEMORY_E;
  2163. goto out;
  2164. }
  2165. if ((buf = (unsigned char *)XMALLOC(AES_XTS_256_TEST_BUF_SIZ, NULL,
  2166. DYNAMIC_TYPE_AES)) == NULL)
  2167. {
  2168. ret = MEMORY_E;
  2169. goto out;
  2170. }
  2171. if ((cipher = (unsigned char *)XMALLOC(AES_XTS_256_TEST_BUF_SIZ, NULL,
  2172. DYNAMIC_TYPE_AES)) == NULL)
  2173. {
  2174. ret = MEMORY_E;
  2175. goto out;
  2176. }
  2177. ret = wc_AesXtsInit(aes, NULL, INVALID_DEVID);
  2178. if (ret != 0)
  2179. goto out;
  2180. else
  2181. aes_inited = 1;
  2182. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2183. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_ENCRYPTION);
  2184. if (ret != 0)
  2185. goto out;
  2186. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  2187. if (ret != 0)
  2188. goto out;
  2189. if (XMEMCMP(c2, buf, sizeof(c2))) {
  2190. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2191. goto out;
  2192. }
  2193. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2194. ret = wc_AesXtsEncryptInit(aes, i2, sizeof(i2), &stream);
  2195. if (ret != 0)
  2196. goto out;
  2197. ret = wc_AesXtsEncryptUpdate(aes, buf, p2, AES_BLOCK_SIZE, &stream);
  2198. if (ret != 0)
  2199. goto out;
  2200. ret = wc_AesXtsEncryptFinal(aes, buf + AES_BLOCK_SIZE,
  2201. p2 + AES_BLOCK_SIZE,
  2202. sizeof(p2) - AES_BLOCK_SIZE, &stream);
  2203. if (ret != 0)
  2204. goto out;
  2205. if (XMEMCMP(c2, buf, sizeof(c2))) {
  2206. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2207. goto out;
  2208. }
  2209. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2210. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  2211. if (ret != 0)
  2212. goto out;
  2213. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  2214. if (ret != 0)
  2215. goto out;
  2216. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  2217. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2218. goto out;
  2219. }
  2220. /* partial block encryption test */
  2221. XMEMSET(cipher, 0, AES_XTS_256_TEST_BUF_SIZ);
  2222. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  2223. if (ret != 0)
  2224. goto out;
  2225. /* partial block decrypt test */
  2226. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2227. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  2228. if (ret != 0)
  2229. goto out;
  2230. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  2231. if (ret != 0)
  2232. goto out;
  2233. if (XMEMCMP(pp, buf, sizeof(pp))) {
  2234. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2235. goto out;
  2236. }
  2237. /* NIST decrypt test vector */
  2238. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2239. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  2240. if (ret != 0)
  2241. goto out;
  2242. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  2243. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2244. goto out;
  2245. }
  2246. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2247. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_DECRYPTION);
  2248. if (ret != 0)
  2249. goto out;
  2250. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  2251. if (ret != 0)
  2252. goto out;
  2253. if (XMEMCMP(p2, buf, sizeof(p2))) {
  2254. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2255. goto out;
  2256. }
  2257. {
  2258. #define LARGE_XTS_SZ 1024
  2259. int i;
  2260. int j;
  2261. int k;
  2262. large_input = (byte *)XMALLOC(LARGE_XTS_SZ, NULL,
  2263. DYNAMIC_TYPE_TMP_BUFFER);
  2264. if (large_input == NULL) {
  2265. ret = MEMORY_E;
  2266. goto out;
  2267. }
  2268. for (i = 0; i < (int)LARGE_XTS_SZ; i++)
  2269. large_input[i] = (byte)i;
  2270. /* first, encrypt block by block then decrypt with a one-shot call. */
  2271. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  2272. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  2273. if (ret != 0)
  2274. goto out;
  2275. ret = wc_AesXtsEncryptInit(aes, i1, sizeof(i1), &stream);
  2276. if (ret != 0)
  2277. goto out;
  2278. for (k = 0; k < j; k += AES_BLOCK_SIZE) {
  2279. if ((j - k) < AES_BLOCK_SIZE*2)
  2280. ret = wc_AesXtsEncryptFinal(aes, large_input + k, large_input + k, j - k, &stream);
  2281. else
  2282. ret = wc_AesXtsEncryptUpdate(aes, large_input + k, large_input + k, AES_BLOCK_SIZE, &stream);
  2283. if (ret != 0)
  2284. goto out;
  2285. if ((j - k) < AES_BLOCK_SIZE*2)
  2286. break;
  2287. }
  2288. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  2289. if (ret != 0)
  2290. goto out;
  2291. ret = wc_AesXtsDecrypt(aes, large_input, large_input, j, i1,
  2292. sizeof(i1));
  2293. if (ret != 0)
  2294. goto out;
  2295. for (i = 0; i < j; i++) {
  2296. if (large_input[i] != (byte)i) {
  2297. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2298. goto out;
  2299. }
  2300. }
  2301. }
  2302. /* second, encrypt with a one-shot call then decrypt block by block. */
  2303. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  2304. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  2305. if (ret != 0)
  2306. goto out;
  2307. ret = wc_AesXtsEncrypt(aes, large_input, large_input, j, i1,
  2308. sizeof(i1));
  2309. if (ret != 0)
  2310. goto out;
  2311. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  2312. if (ret != 0)
  2313. goto out;
  2314. ret = wc_AesXtsDecryptInit(aes, i1, sizeof(i1), &stream);
  2315. if (ret != 0)
  2316. goto out;
  2317. for (k = 0; k < j; k += AES_BLOCK_SIZE) {
  2318. if ((j - k) < AES_BLOCK_SIZE*2)
  2319. ret = wc_AesXtsDecryptFinal(aes, large_input + k, large_input + k, j - k, &stream);
  2320. else
  2321. ret = wc_AesXtsDecryptUpdate(aes, large_input + k, large_input + k, AES_BLOCK_SIZE, &stream);
  2322. if (ret != 0)
  2323. goto out;
  2324. if ((j - k) < AES_BLOCK_SIZE*2)
  2325. break;
  2326. }
  2327. for (i = 0; i < j; i++) {
  2328. if (large_input[i] != (byte)i) {
  2329. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2330. goto out;
  2331. }
  2332. }
  2333. }
  2334. }
  2335. /* now the kernel crypto part */
  2336. enc2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  2337. if (!enc2) {
  2338. pr_err("error: malloc failed\n");
  2339. ret = -ENOMEM;
  2340. goto test_xts_end;
  2341. }
  2342. dec2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  2343. if (!dec2) {
  2344. pr_err("error: malloc failed\n");
  2345. ret = -ENOMEM;
  2346. goto test_xts_end;
  2347. }
  2348. src = XMALLOC(sizeof(*src) * 2, NULL, DYNAMIC_TYPE_AES);
  2349. if (! src) {
  2350. pr_err("error: malloc failed\n");
  2351. ret = -ENOMEM;
  2352. goto test_xts_end;
  2353. }
  2354. dst = XMALLOC(sizeof(*dst) * 2, NULL, DYNAMIC_TYPE_AES);
  2355. if (! dst) {
  2356. pr_err("error: malloc failed\n");
  2357. ret = -ENOMEM;
  2358. goto test_xts_end;
  2359. }
  2360. tfm = crypto_alloc_skcipher(WOLFKM_AESXTS_NAME, 0, 0);
  2361. if (IS_ERR(tfm)) {
  2362. ret = PTR_ERR(tfm);
  2363. pr_err("error: allocating AES skcipher algorithm %s failed: %d\n",
  2364. WOLFKM_AESXTS_DRIVER, ret);
  2365. goto test_xts_end;
  2366. }
  2367. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  2368. {
  2369. const char *driver_name = crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  2370. if (strcmp(driver_name, WOLFKM_AESXTS_DRIVER)) {
  2371. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  2372. WOLFKM_AESXTS_NAME, driver_name, WOLFKM_AESXTS_DRIVER);
  2373. ret = -ENOENT;
  2374. goto test_xts_end;
  2375. }
  2376. }
  2377. #endif
  2378. ret = crypto_skcipher_ivsize(tfm);
  2379. if (ret != sizeof(stream.tweak_block)) {
  2380. pr_err("error: AES skcipher algorithm %s crypto_skcipher_ivsize()"
  2381. " returned %d but expected %d\n",
  2382. WOLFKM_AESXTS_DRIVER, ret, (int)sizeof(stream.tweak_block));
  2383. ret = -EINVAL;
  2384. goto test_xts_end;
  2385. }
  2386. ret = crypto_skcipher_setkey(tfm, k1, sizeof(k1));
  2387. if (ret) {
  2388. pr_err("error: crypto_skcipher_setkey for %s returned: %d\n",
  2389. WOLFKM_AESXTS_NAME, ret);
  2390. goto test_xts_end;
  2391. }
  2392. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  2393. if (IS_ERR(req)) {
  2394. ret = PTR_ERR(req);
  2395. pr_err("error: allocating AES skcipher request %s failed: %d\n",
  2396. WOLFKM_AESXTS_DRIVER, ret);
  2397. goto test_xts_end;
  2398. }
  2399. memcpy(dec2, p1, sizeof(p1));
  2400. memset(enc2, 0, sizeof(p1));
  2401. sg_init_one(src, dec2, sizeof(p1));
  2402. sg_init_one(dst, enc2, sizeof(p1));
  2403. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2404. skcipher_request_set_crypt(req, src, dst, sizeof(p1), stream.tweak_block);
  2405. ret = crypto_skcipher_encrypt(req);
  2406. if (ret) {
  2407. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2408. goto test_xts_end;
  2409. }
  2410. ret = XMEMCMP(c1, enc2, sizeof(c1));
  2411. if (ret) {
  2412. pr_err("error: c1 and enc2 do not match: %d\n", ret);
  2413. ret = -EINVAL;
  2414. goto test_xts_end;
  2415. }
  2416. memset(dec2, 0, sizeof(p1));
  2417. sg_init_one(src, enc2, sizeof(p1));
  2418. sg_init_one(dst, dec2, sizeof(p1));
  2419. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2420. skcipher_request_set_crypt(req, src, dst, sizeof(p1), stream.tweak_block);
  2421. ret = crypto_skcipher_decrypt(req);
  2422. if (ret) {
  2423. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2424. goto test_xts_end;
  2425. }
  2426. ret = XMEMCMP(p1, dec2, sizeof(p1));
  2427. if (ret) {
  2428. pr_err("error: p1 and dec2 do not match: %d\n", ret);
  2429. ret = -EINVAL;
  2430. goto test_xts_end;
  2431. }
  2432. memcpy(dec2, pp, sizeof(pp));
  2433. memset(enc2, 0, sizeof(pp));
  2434. sg_init_one(src, dec2, sizeof(pp));
  2435. sg_init_one(dst, enc2, sizeof(pp));
  2436. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2437. skcipher_request_set_crypt(req, src, dst, sizeof(pp), stream.tweak_block);
  2438. ret = crypto_skcipher_encrypt(req);
  2439. if (ret) {
  2440. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2441. goto test_xts_end;
  2442. }
  2443. ret = XMEMCMP(cp, enc2, sizeof(cp));
  2444. if (ret) {
  2445. pr_err("error: cp and enc2 do not match: %d\n", ret);
  2446. ret = -EINVAL;
  2447. goto test_xts_end;
  2448. }
  2449. memset(dec2, 0, sizeof(pp));
  2450. sg_init_one(src, enc2, sizeof(pp));
  2451. sg_init_one(dst, dec2, sizeof(pp));
  2452. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2453. skcipher_request_set_crypt(req, src, dst, sizeof(pp), stream.tweak_block);
  2454. ret = crypto_skcipher_decrypt(req);
  2455. if (ret) {
  2456. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2457. goto test_xts_end;
  2458. }
  2459. ret = XMEMCMP(pp, dec2, sizeof(pp));
  2460. if (ret) {
  2461. pr_err("error: pp and dec2 do not match: %d\n", ret);
  2462. ret = -EINVAL;
  2463. goto test_xts_end;
  2464. }
  2465. test_xts_end:
  2466. if (enc2)
  2467. XFREE(enc2, NULL, DYNAMIC_TYPE_AES);
  2468. if (dec2)
  2469. XFREE(dec2, NULL, DYNAMIC_TYPE_AES);
  2470. if (src)
  2471. XFREE(src, NULL, DYNAMIC_TYPE_AES);
  2472. if (dst)
  2473. XFREE(dst, NULL, DYNAMIC_TYPE_AES);
  2474. if (req)
  2475. skcipher_request_free(req);
  2476. if (tfm)
  2477. crypto_free_skcipher(tfm);
  2478. out:
  2479. if (large_input)
  2480. XFREE(large_input, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  2481. if (aes_inited)
  2482. wc_AesXtsFree(aes);
  2483. if (buf)
  2484. XFREE(buf, NULL, DYNAMIC_TYPE_AES);
  2485. if (cipher)
  2486. XFREE(cipher, NULL, DYNAMIC_TYPE_AES);
  2487. if (aes)
  2488. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  2489. #undef AES_XTS_256_TEST_BUF_SIZ
  2490. return ret;
  2491. }
  2492. #endif /* WOLFSSL_AES_256 */
  2493. static int linuxkm_test_aesxts(void) {
  2494. int ret;
  2495. #ifdef WOLFSSL_AES_128
  2496. ret = aes_xts_128_test();
  2497. if (ret != 0) {
  2498. pr_err("aes_xts_128_test() failed with retval %d.\n", ret);
  2499. goto out;
  2500. }
  2501. #endif
  2502. #ifdef WOLFSSL_AES_256
  2503. ret = aes_xts_256_test();
  2504. if (ret != 0) {
  2505. pr_err("aes_xts_256_test() failed with retval %d.\n", ret);
  2506. goto out;
  2507. }
  2508. #endif
  2509. out:
  2510. return ret;
  2511. }
  2512. #endif /* WOLFSSL_AES_XTS &&
  2513. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESXTS)
  2514. */
  2515. #endif /* !NO_AES */
  2516. #if defined(HAVE_FIPS) && defined(CONFIG_CRYPTO_MANAGER) && \
  2517. !defined(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
  2518. #ifdef CONFIG_CRYPTO_FIPS
  2519. #include <linux/fips.h>
  2520. #else
  2521. #error wolfCrypt FIPS with LINUXKM_LKCAPI_REGISTER and CONFIG_CRYPTO_MANAGER requires CONFIG_CRYPTO_FIPS
  2522. #endif
  2523. #endif
  2524. static int linuxkm_lkcapi_register(void)
  2525. {
  2526. int ret = 0;
  2527. #if defined(HAVE_FIPS) && defined(CONFIG_CRYPTO_MANAGER) && \
  2528. !defined(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
  2529. int enabled_fips = 0;
  2530. #endif
  2531. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2532. /* temporarily disable warnings around setkey failures, which are expected
  2533. * from the crypto fuzzer in FIPS configs, and potentially in others.
  2534. * unexpected setkey failures are fatal errors returned by the fuzzer.
  2535. */
  2536. disable_setkey_warnings = 1;
  2537. #endif
  2538. #if defined(HAVE_FIPS) && defined(CONFIG_CRYPTO_MANAGER) && \
  2539. !defined(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
  2540. if (! fips_enabled) {
  2541. /* temporarily assert system-wide FIPS status, to disable FIPS-forbidden
  2542. * test vectors and fuzzing from the CRYPTO_MANAGER.
  2543. */
  2544. enabled_fips = fips_enabled = 1;
  2545. }
  2546. #endif
  2547. #define REGISTER_ALG(alg, installer, tester) do { \
  2548. if (alg ## _loaded) { \
  2549. pr_err("ERROR: %s is already registered.\n", \
  2550. (alg).base.cra_driver_name); \
  2551. ret = -EEXIST; \
  2552. goto out; \
  2553. } \
  2554. \
  2555. ret = (installer)(&(alg)); \
  2556. \
  2557. if (ret) { \
  2558. pr_err("ERROR: " #installer " for %s failed " \
  2559. "with return code %d.\n", \
  2560. (alg).base.cra_driver_name, ret); \
  2561. goto out; \
  2562. } \
  2563. \
  2564. alg ## _loaded = 1; \
  2565. \
  2566. ret = (tester()); \
  2567. \
  2568. if (ret) { \
  2569. pr_err("ERROR: self-test for %s failed " \
  2570. "with return code %d.\n", \
  2571. (alg).base.cra_driver_name, ret); \
  2572. goto out; \
  2573. } \
  2574. pr_info("%s self-test OK -- " \
  2575. "registered for %s with priority %d.\n", \
  2576. (alg).base.cra_driver_name, \
  2577. (alg).base.cra_name, \
  2578. (alg).base.cra_priority); \
  2579. } while (0)
  2580. #if defined(HAVE_AES_CBC) && \
  2581. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2582. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  2583. REGISTER_ALG(cbcAesAlg, crypto_register_skcipher, linuxkm_test_aescbc);
  2584. #endif
  2585. #if defined(WOLFSSL_AES_CFB) && \
  2586. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2587. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  2588. REGISTER_ALG(cfbAesAlg, crypto_register_skcipher, linuxkm_test_aescfb);
  2589. #endif
  2590. #if defined(HAVE_AESGCM) && \
  2591. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2592. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  2593. REGISTER_ALG(gcmAesAead, crypto_register_aead, linuxkm_test_aesgcm);
  2594. #endif
  2595. #if defined(WOLFSSL_AES_XTS) && \
  2596. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2597. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  2598. REGISTER_ALG(xtsAesAlg, crypto_register_skcipher, linuxkm_test_aesxts);
  2599. #endif
  2600. #undef REGISTER_ALG
  2601. out:
  2602. #if defined(HAVE_FIPS) && defined(CONFIG_CRYPTO_MANAGER) && \
  2603. !defined(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
  2604. if (enabled_fips)
  2605. fips_enabled = 0;
  2606. #endif
  2607. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2608. disable_setkey_warnings = 0;
  2609. #endif
  2610. return ret;
  2611. }
  2612. static void linuxkm_lkcapi_unregister(void)
  2613. {
  2614. #define UNREGISTER_ALG(alg, uninstaller) do { \
  2615. if (alg ## _loaded) { \
  2616. (uninstaller)(&(alg)); \
  2617. alg ## _loaded = 0; \
  2618. } \
  2619. } while (0)
  2620. #if defined(HAVE_AES_CBC) && \
  2621. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2622. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  2623. UNREGISTER_ALG(cbcAesAlg, crypto_unregister_skcipher);
  2624. #endif
  2625. #if defined(WOLFSSL_AES_CFB) && \
  2626. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2627. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  2628. UNREGISTER_ALG(cfbAesAlg, crypto_unregister_skcipher);
  2629. #endif
  2630. #if defined(HAVE_AESGCM) && \
  2631. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2632. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  2633. UNREGISTER_ALG(gcmAesAead, crypto_unregister_aead);
  2634. #endif
  2635. #if defined(WOLFSSL_AES_XTS) && \
  2636. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2637. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  2638. UNREGISTER_ALG(xtsAesAlg, crypto_unregister_skcipher);
  2639. #endif
  2640. #undef UNREGISTER_ALG
  2641. }