2
0

lkcapi_glue.c 82 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739
  1. /* lkcapi_glue.c -- glue logic to register wolfCrypt implementations with
  2. * the Linux Kernel Cryptosystem
  3. *
  4. * Copyright (C) 2006-2024 wolfSSL Inc.
  5. *
  6. * This file is part of wolfSSL.
  7. *
  8. * wolfSSL is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License as published by
  10. * the Free Software Foundation; either version 2 of the License, or
  11. * (at your option) any later version.
  12. *
  13. * wolfSSL is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  21. */
  22. #ifndef LINUXKM_LKCAPI_REGISTER
  23. #error lkcapi_glue.c included in non-LINUXKM_LKCAPI_REGISTER project.
  24. #endif
  25. #ifndef WOLFSSL_LINUXKM_LKCAPI_PRIORITY
  26. /* Larger number means higher priority. The highest in-tree priority is 4001,
  27. * in the Cavium driver.
  28. */
  29. #define WOLFSSL_LINUXKM_LKCAPI_PRIORITY 10000
  30. #endif
  31. #ifndef NO_AES
  32. /* note the FIPS code will be returned on failure even in non-FIPS builds. */
  33. #define LINUXKM_LKCAPI_AES_KAT_MISMATCH_E AES_KAT_FIPS_E
  34. #define LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E AESGCM_KAT_FIPS_E
  35. #define WOLFKM_AESCBC_NAME "cbc(aes)"
  36. #define WOLFKM_AESCFB_NAME "cfb(aes)"
  37. #define WOLFKM_AESGCM_NAME "gcm(aes)"
  38. #define WOLFKM_AESXTS_NAME "xts(aes)"
  39. #ifdef WOLFSSL_AESNI
  40. #define WOLFKM_DRIVER_ISA_EXT "-aesni"
  41. #else
  42. #define WOLFKM_DRIVER_ISA_EXT ""
  43. #endif
  44. #ifdef HAVE_FIPS
  45. #ifndef HAVE_FIPS_VERSION
  46. #define WOLFKM_DRIVER_FIPS "-fips-140"
  47. #elif HAVE_FIPS_VERSION >= 5
  48. #define WOLFKM_DRIVER_FIPS "-fips-140-3"
  49. #elif HAVE_FIPS_VERSION == 2
  50. #define WOLFKM_DRIVER_FIPS "-fips-140-2"
  51. #else
  52. #define WOLFKM_DRIVER_FIPS "-fips-140"
  53. #endif
  54. #else
  55. #define WOLFKM_DRIVER_FIPS ""
  56. #endif
  57. #define WOLFKM_DRIVER_SUFFIX \
  58. WOLFKM_DRIVER_ISA_EXT WOLFKM_DRIVER_FIPS "-wolfcrypt"
  59. #define WOLFKM_AESCBC_DRIVER ("cbc-aes" WOLFKM_DRIVER_SUFFIX)
  60. #define WOLFKM_AESCFB_DRIVER ("cfb-aes" WOLFKM_DRIVER_SUFFIX)
  61. #define WOLFKM_AESGCM_DRIVER ("gcm-aes" WOLFKM_DRIVER_SUFFIX)
  62. #define WOLFKM_AESXTS_DRIVER ("xts-aes" WOLFKM_DRIVER_SUFFIX)
  63. #if defined(HAVE_AES_CBC) && \
  64. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  65. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  66. static int linuxkm_test_aescbc(void);
  67. #endif
  68. #if defined(WOLFSSL_AES_CFB) && \
  69. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  70. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  71. static int linuxkm_test_aescfb(void);
  72. #endif
  73. #if defined(HAVE_AESGCM) && \
  74. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  75. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  76. static int linuxkm_test_aesgcm(void);
  77. #endif
  78. #if defined(WOLFSSL_AES_XTS) && \
  79. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  80. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  81. static int linuxkm_test_aesxts(void);
  82. #endif
  83. /* km_AesX(): wrappers to wolfcrypt wc_AesX functions and
  84. * structures. */
  85. #include <wolfssl/wolfcrypt/aes.h>
  86. struct km_AesCtx {
  87. Aes *aes_encrypt; /* allocated in km_AesInitCommon() to assure
  88. * alignment, needed for AESNI.
  89. */
  90. Aes *aes_decrypt; /* same. */
  91. };
  92. #if defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  93. defined(LINUXKM_LKCAPI_REGISTER_AESCBC) || \
  94. defined(LINUXKM_LKCAPI_REGISTER_AESCFB) || \
  95. defined(LINUXKM_LKCAPI_REGISTER_AESGCM)
  96. static void km_AesExitCommon(struct km_AesCtx * ctx);
  97. static int km_AesInitCommon(
  98. struct km_AesCtx * ctx,
  99. const char * name,
  100. int need_decryption)
  101. {
  102. int err;
  103. ctx->aes_encrypt = (Aes *)malloc(sizeof(*ctx->aes_encrypt));
  104. if (! ctx->aes_encrypt) {
  105. pr_err("%s: allocation of %zu bytes for encryption key failed.\n",
  106. name, sizeof(*ctx->aes_encrypt));
  107. return MEMORY_E;
  108. }
  109. err = wc_AesInit(ctx->aes_encrypt, NULL, INVALID_DEVID);
  110. if (unlikely(err)) {
  111. pr_err("%s: wc_AesInit failed: %d\n", name, err);
  112. free(ctx->aes_encrypt);
  113. ctx->aes_encrypt = NULL;
  114. return -EINVAL;
  115. }
  116. if (! need_decryption) {
  117. ctx->aes_decrypt = NULL;
  118. return 0;
  119. }
  120. ctx->aes_decrypt = (Aes *)malloc(sizeof(*ctx->aes_decrypt));
  121. if (! ctx->aes_decrypt) {
  122. pr_err("%s: allocation of %zu bytes for decryption key failed.\n",
  123. name, sizeof(*ctx->aes_decrypt));
  124. km_AesExitCommon(ctx);
  125. return MEMORY_E;
  126. }
  127. err = wc_AesInit(ctx->aes_decrypt, NULL, INVALID_DEVID);
  128. if (unlikely(err)) {
  129. pr_err("%s: wc_AesInit failed: %d\n", name, err);
  130. free(ctx->aes_decrypt);
  131. ctx->aes_decrypt = NULL;
  132. km_AesExitCommon(ctx);
  133. return -EINVAL;
  134. }
  135. return 0;
  136. }
  137. static void km_AesExitCommon(struct km_AesCtx * ctx)
  138. {
  139. if (ctx->aes_encrypt) {
  140. wc_AesFree(ctx->aes_encrypt);
  141. free(ctx->aes_encrypt);
  142. ctx->aes_encrypt = NULL;
  143. }
  144. if (ctx->aes_decrypt) {
  145. wc_AesFree(ctx->aes_decrypt);
  146. free(ctx->aes_decrypt);
  147. ctx->aes_decrypt = NULL;
  148. }
  149. }
  150. #if defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  151. defined(LINUXKM_LKCAPI_REGISTER_AESCBC) || \
  152. defined(LINUXKM_LKCAPI_REGISTER_AESCFB)
  153. static int km_AesSetKeyCommon(struct km_AesCtx * ctx, const u8 *in_key,
  154. unsigned int key_len, const char * name)
  155. {
  156. int err;
  157. err = wc_AesSetKey(ctx->aes_encrypt, in_key, key_len, NULL, AES_ENCRYPTION);
  158. if (unlikely(err)) {
  159. pr_err("%s: wc_AesSetKey for encryption key failed: %d\n", name, err);
  160. return -ENOKEY;
  161. }
  162. if (ctx->aes_decrypt) {
  163. err = wc_AesSetKey(ctx->aes_decrypt, in_key, key_len, NULL,
  164. AES_DECRYPTION);
  165. if (unlikely(err)) {
  166. pr_err("%s: wc_AesSetKey for decryption key failed: %d\n",
  167. name, err);
  168. return -ENOKEY;
  169. }
  170. }
  171. return 0;
  172. }
  173. static void km_AesExit(struct crypto_skcipher *tfm)
  174. {
  175. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  176. km_AesExitCommon(ctx);
  177. }
  178. #endif /* LINUXKM_LKCAPI_REGISTER_ALL ||
  179. * LINUXKM_LKCAPI_REGISTER_AESCBC ||
  180. * LINUXKM_LKCAPI_REGISTER_AESCFB
  181. */
  182. #endif /* LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC ||
  183. * LINUXKM_LKCAPI_REGISTER_AESCFB || LINUXKM_LKCAPI_REGISTER_AESGCM
  184. */
  185. #if defined(HAVE_AES_CBC) && \
  186. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  187. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  188. static int km_AesCbcInit(struct crypto_skcipher *tfm)
  189. {
  190. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  191. return km_AesInitCommon(ctx, WOLFKM_AESCBC_DRIVER, 1);
  192. }
  193. static int km_AesCbcSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  194. unsigned int key_len)
  195. {
  196. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  197. return km_AesSetKeyCommon(ctx, in_key, key_len, WOLFKM_AESCBC_DRIVER);
  198. }
  199. static int km_AesCbcEncrypt(struct skcipher_request *req)
  200. {
  201. struct crypto_skcipher * tfm = NULL;
  202. struct km_AesCtx * ctx = NULL;
  203. struct skcipher_walk walk;
  204. unsigned int nbytes = 0;
  205. int err = 0;
  206. tfm = crypto_skcipher_reqtfm(req);
  207. ctx = crypto_skcipher_ctx(tfm);
  208. err = skcipher_walk_virt(&walk, req, false);
  209. if (unlikely(err)) {
  210. pr_err("%s: skcipher_walk_virt failed: %d\n",
  211. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  212. return err;
  213. }
  214. while ((nbytes = walk.nbytes) != 0) {
  215. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  216. if (unlikely(err)) {
  217. pr_err("%s: wc_AesSetIV failed: %d\n",
  218. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  219. return -EINVAL;
  220. }
  221. err = wc_AesCbcEncrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  222. walk.src.virt.addr, nbytes);
  223. if (unlikely(err)) {
  224. pr_err("%s: wc_AesCbcEncrypt failed: %d\n",
  225. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  226. return -EINVAL;
  227. }
  228. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  229. if (unlikely(err)) {
  230. pr_err("%s: skcipher_walk_done failed: %d\n",
  231. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  232. return err;
  233. }
  234. }
  235. return err;
  236. }
  237. static int km_AesCbcDecrypt(struct skcipher_request *req)
  238. {
  239. struct crypto_skcipher * tfm = NULL;
  240. struct km_AesCtx * ctx = NULL;
  241. struct skcipher_walk walk;
  242. unsigned int nbytes = 0;
  243. int err = 0;
  244. tfm = crypto_skcipher_reqtfm(req);
  245. ctx = crypto_skcipher_ctx(tfm);
  246. err = skcipher_walk_virt(&walk, req, false);
  247. if (unlikely(err)) {
  248. pr_err("%s: skcipher_walk_virt failed: %d\n",
  249. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  250. return err;
  251. }
  252. while ((nbytes = walk.nbytes) != 0) {
  253. err = wc_AesSetIV(ctx->aes_decrypt, walk.iv);
  254. if (unlikely(err)) {
  255. pr_err("%s: wc_AesSetKey failed: %d\n",
  256. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  257. return -EINVAL;
  258. }
  259. err = wc_AesCbcDecrypt(ctx->aes_decrypt, walk.dst.virt.addr,
  260. walk.src.virt.addr, nbytes);
  261. if (unlikely(err)) {
  262. pr_err("%s: wc_AesCbcDecrypt failed: %d\n",
  263. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  264. return -EINVAL;
  265. }
  266. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  267. if (unlikely(err)) {
  268. pr_err("%s: skcipher_walk_done failed: %d\n",
  269. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  270. return err;
  271. }
  272. }
  273. return err;
  274. }
  275. static struct skcipher_alg cbcAesAlg = {
  276. .base.cra_name = WOLFKM_AESCBC_NAME,
  277. .base.cra_driver_name = WOLFKM_AESCBC_DRIVER,
  278. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  279. .base.cra_blocksize = AES_BLOCK_SIZE,
  280. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  281. .base.cra_module = THIS_MODULE,
  282. .init = km_AesCbcInit,
  283. .exit = km_AesExit,
  284. .min_keysize = AES_128_KEY_SIZE,
  285. .max_keysize = AES_256_KEY_SIZE,
  286. .ivsize = AES_BLOCK_SIZE,
  287. .setkey = km_AesCbcSetKey,
  288. .encrypt = km_AesCbcEncrypt,
  289. .decrypt = km_AesCbcDecrypt,
  290. };
  291. static int cbcAesAlg_loaded = 0;
  292. #endif /* HAVE_AES_CBC &&
  293. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  294. */
  295. #if defined(WOLFSSL_AES_CFB) && \
  296. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  297. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  298. static int km_AesCfbInit(struct crypto_skcipher *tfm)
  299. {
  300. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  301. return km_AesInitCommon(ctx, WOLFKM_AESCFB_DRIVER, 0);
  302. }
  303. static int km_AesCfbSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  304. unsigned int key_len)
  305. {
  306. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  307. return km_AesSetKeyCommon(ctx, in_key, key_len, WOLFKM_AESCFB_DRIVER);
  308. }
  309. static int km_AesCfbEncrypt(struct skcipher_request *req)
  310. {
  311. struct crypto_skcipher * tfm = NULL;
  312. struct km_AesCtx * ctx = NULL;
  313. struct skcipher_walk walk;
  314. unsigned int nbytes = 0;
  315. int err = 0;
  316. tfm = crypto_skcipher_reqtfm(req);
  317. ctx = crypto_skcipher_ctx(tfm);
  318. err = skcipher_walk_virt(&walk, req, false);
  319. if (unlikely(err)) {
  320. pr_err("%s: skcipher_walk_virt failed: %d\n",
  321. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  322. return err;
  323. }
  324. while ((nbytes = walk.nbytes) != 0) {
  325. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  326. if (unlikely(err)) {
  327. pr_err("%s: wc_AesSetKey failed: %d\n",
  328. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  329. return -EINVAL;
  330. }
  331. err = wc_AesCfbEncrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  332. walk.src.virt.addr, nbytes);
  333. if (unlikely(err)) {
  334. pr_err("%s: wc_AesCfbEncrypt failed %d\n",
  335. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  336. return -EINVAL;
  337. }
  338. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  339. if (unlikely(err)) {
  340. pr_err("%s: skcipher_walk_done failed: %d\n",
  341. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  342. return err;
  343. }
  344. }
  345. return err;
  346. }
  347. static int km_AesCfbDecrypt(struct skcipher_request *req)
  348. {
  349. struct crypto_skcipher * tfm = NULL;
  350. struct km_AesCtx * ctx = NULL;
  351. struct skcipher_walk walk;
  352. unsigned int nbytes = 0;
  353. int err = 0;
  354. tfm = crypto_skcipher_reqtfm(req);
  355. ctx = crypto_skcipher_ctx(tfm);
  356. err = skcipher_walk_virt(&walk, req, false);
  357. if (unlikely(err)) {
  358. pr_err("%s: skcipher_walk_virt failed: %d\n",
  359. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  360. return err;
  361. }
  362. while ((nbytes = walk.nbytes) != 0) {
  363. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  364. if (unlikely(err)) {
  365. pr_err("%s: wc_AesSetKey failed: %d\n",
  366. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  367. return -EINVAL;
  368. }
  369. err = wc_AesCfbDecrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  370. walk.src.virt.addr, nbytes);
  371. if (unlikely(err)) {
  372. pr_err("%s: wc_AesCfbDecrypt failed: %d\n",
  373. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  374. return -EINVAL;
  375. }
  376. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  377. if (unlikely(err)) {
  378. pr_err("%s: skcipher_walk_done failed: %d\n",
  379. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  380. return err;
  381. }
  382. }
  383. return err;
  384. }
  385. static struct skcipher_alg cfbAesAlg = {
  386. .base.cra_name = WOLFKM_AESCFB_NAME,
  387. .base.cra_driver_name = WOLFKM_AESCFB_DRIVER,
  388. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  389. .base.cra_blocksize = AES_BLOCK_SIZE,
  390. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  391. .base.cra_module = THIS_MODULE,
  392. .init = km_AesCfbInit,
  393. .exit = km_AesExit,
  394. .min_keysize = AES_128_KEY_SIZE,
  395. .max_keysize = AES_256_KEY_SIZE,
  396. .ivsize = AES_BLOCK_SIZE,
  397. .setkey = km_AesCfbSetKey,
  398. .encrypt = km_AesCfbEncrypt,
  399. .decrypt = km_AesCfbDecrypt,
  400. };
  401. static int cfbAesAlg_loaded = 0;
  402. #endif /* WOLFSSL_AES_CFB &&
  403. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  404. */
  405. #if defined(HAVE_AESGCM) && \
  406. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  407. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  408. #ifndef WOLFSSL_AESGCM_STREAM
  409. #error LKCAPI registration of AES-GCM requires WOLFSSL_AESGCM_STREAM (--enable-aesgcm-stream).
  410. #endif
  411. static int km_AesGcmInit(struct crypto_aead * tfm)
  412. {
  413. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  414. return km_AesInitCommon(ctx, WOLFKM_AESGCM_DRIVER, 0);
  415. }
  416. static void km_AesGcmExit(struct crypto_aead * tfm)
  417. {
  418. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  419. km_AesExitCommon(ctx);
  420. }
  421. static int km_AesGcmSetKey(struct crypto_aead *tfm, const u8 *in_key,
  422. unsigned int key_len)
  423. {
  424. int err;
  425. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  426. err = wc_AesGcmSetKey(ctx->aes_encrypt, in_key, key_len);
  427. if (unlikely(err)) {
  428. pr_err("%s: wc_AesGcmSetKey failed: %d\n",
  429. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  430. return -ENOKEY;
  431. }
  432. return 0;
  433. }
  434. static int km_AesGcmSetAuthsize(struct crypto_aead *tfm, unsigned int authsize)
  435. {
  436. (void)tfm;
  437. if (authsize > AES_BLOCK_SIZE ||
  438. authsize < WOLFSSL_MIN_AUTH_TAG_SZ) {
  439. pr_err("%s: invalid authsize: %d\n",
  440. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), authsize);
  441. return -EINVAL;
  442. }
  443. return 0;
  444. }
  445. /*
  446. * aead ciphers recieve data in scatterlists in following order:
  447. * encrypt
  448. * req->src: aad||plaintext
  449. * req->dst: aad||ciphertext||tag
  450. * decrypt
  451. * req->src: aad||ciphertext||tag
  452. * req->dst: aad||plaintext, return 0 or -EBADMSG
  453. */
  454. static int km_AesGcmEncrypt(struct aead_request *req)
  455. {
  456. struct crypto_aead * tfm = NULL;
  457. struct km_AesCtx * ctx = NULL;
  458. struct skcipher_walk walk;
  459. struct scatter_walk assocSgWalk;
  460. unsigned int nbytes = 0;
  461. u8 authTag[AES_BLOCK_SIZE];
  462. int err = 0;
  463. unsigned int assocLeft = 0;
  464. unsigned int cryptLeft = 0;
  465. u8 * assoc = NULL;
  466. tfm = crypto_aead_reqtfm(req);
  467. ctx = crypto_aead_ctx(tfm);
  468. assocLeft = req->assoclen;
  469. cryptLeft = req->cryptlen;
  470. scatterwalk_start(&assocSgWalk, req->src);
  471. err = skcipher_walk_aead_encrypt(&walk, req, false);
  472. if (unlikely(err)) {
  473. pr_err("%s: skcipher_walk_aead_encrypt failed: %d\n",
  474. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  475. return -1;
  476. }
  477. err = wc_AesGcmInit(ctx->aes_encrypt, NULL /*key*/, 0 /*keylen*/, walk.iv,
  478. AES_BLOCK_SIZE);
  479. if (unlikely(err)) {
  480. pr_err("%s: wc_AesGcmInit failed: %d\n",
  481. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  482. return -EINVAL;
  483. }
  484. assoc = scatterwalk_map(&assocSgWalk);
  485. if (unlikely(IS_ERR(assoc))) {
  486. pr_err("%s: scatterwalk_map failed: %ld\n",
  487. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)),
  488. PTR_ERR(assoc));
  489. return err;
  490. }
  491. err = wc_AesGcmEncryptUpdate(ctx->aes_encrypt, NULL, NULL, 0,
  492. assoc, assocLeft);
  493. assocLeft -= assocLeft;
  494. scatterwalk_unmap(assoc);
  495. assoc = NULL;
  496. if (unlikely(err)) {
  497. pr_err("%s: wc_AesGcmEncryptUpdate failed: %d\n",
  498. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  499. return -EINVAL;
  500. }
  501. while ((nbytes = walk.nbytes) != 0) {
  502. int n = nbytes;
  503. if (likely(cryptLeft && nbytes)) {
  504. n = cryptLeft < nbytes ? cryptLeft : nbytes;
  505. err = wc_AesGcmEncryptUpdate(
  506. ctx->aes_encrypt,
  507. walk.dst.virt.addr,
  508. walk.src.virt.addr,
  509. cryptLeft,
  510. NULL, 0);
  511. nbytes -= n;
  512. cryptLeft -= n;
  513. }
  514. if (unlikely(err)) {
  515. pr_err("%s: wc_AesGcmEncryptUpdate failed: %d\n",
  516. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  517. return -EINVAL;
  518. }
  519. err = skcipher_walk_done(&walk, nbytes);
  520. if (unlikely(err)) {
  521. pr_err("%s: skcipher_walk_done failed: %d\n",
  522. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  523. return err;
  524. }
  525. }
  526. err = wc_AesGcmEncryptFinal(ctx->aes_encrypt, authTag, tfm->authsize);
  527. if (unlikely(err)) {
  528. pr_err("%s: wc_AesGcmEncryptFinal failed with return code %d\n",
  529. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  530. return -EINVAL;
  531. }
  532. /* Now copy the auth tag into request scatterlist. */
  533. scatterwalk_map_and_copy(authTag, req->dst,
  534. req->assoclen + req->cryptlen,
  535. tfm->authsize, 1);
  536. return err;
  537. }
  538. static int km_AesGcmDecrypt(struct aead_request *req)
  539. {
  540. struct crypto_aead * tfm = NULL;
  541. struct km_AesCtx * ctx = NULL;
  542. struct skcipher_walk walk;
  543. struct scatter_walk assocSgWalk;
  544. unsigned int nbytes = 0;
  545. u8 origAuthTag[AES_BLOCK_SIZE];
  546. int err = 0;
  547. unsigned int assocLeft = 0;
  548. unsigned int cryptLeft = 0;
  549. u8 * assoc = NULL;
  550. tfm = crypto_aead_reqtfm(req);
  551. ctx = crypto_aead_ctx(tfm);
  552. assocLeft = req->assoclen;
  553. cryptLeft = req->cryptlen - tfm->authsize;
  554. /* Copy out original auth tag from req->src. */
  555. scatterwalk_map_and_copy(origAuthTag, req->src,
  556. req->assoclen + req->cryptlen - tfm->authsize,
  557. tfm->authsize, 0);
  558. scatterwalk_start(&assocSgWalk, req->src);
  559. err = skcipher_walk_aead_decrypt(&walk, req, false);
  560. if (unlikely(err)) {
  561. pr_err("%s: skcipher_walk_aead_decrypt failed: %d\n",
  562. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  563. return err;
  564. }
  565. err = wc_AesGcmInit(ctx->aes_encrypt, NULL /*key*/, 0 /*keylen*/, walk.iv,
  566. AES_BLOCK_SIZE);
  567. if (unlikely(err)) {
  568. pr_err("%s: wc_AesGcmInit failed: %d\n",
  569. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  570. return -EINVAL;
  571. }
  572. assoc = scatterwalk_map(&assocSgWalk);
  573. if (unlikely(IS_ERR(assoc))) {
  574. pr_err("%s: scatterwalk_map failed: %ld\n",
  575. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)),
  576. PTR_ERR(assoc));
  577. return err;
  578. }
  579. err = wc_AesGcmDecryptUpdate(ctx->aes_encrypt, NULL, NULL, 0,
  580. assoc, assocLeft);
  581. assocLeft -= assocLeft;
  582. scatterwalk_unmap(assoc);
  583. assoc = NULL;
  584. if (unlikely(err)) {
  585. pr_err("%s: wc_AesGcmDecryptUpdate failed: %d\n",
  586. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  587. return -EINVAL;
  588. }
  589. while ((nbytes = walk.nbytes) != 0) {
  590. int n = nbytes;
  591. if (likely(cryptLeft && nbytes)) {
  592. n = cryptLeft < nbytes ? cryptLeft : nbytes;
  593. err = wc_AesGcmDecryptUpdate(
  594. ctx->aes_encrypt,
  595. walk.dst.virt.addr,
  596. walk.src.virt.addr,
  597. cryptLeft,
  598. NULL, 0);
  599. nbytes -= n;
  600. cryptLeft -= n;
  601. }
  602. if (unlikely(err)) {
  603. pr_err("%s: wc_AesGcmDecryptUpdate failed: %d\n",
  604. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  605. return -EINVAL;
  606. }
  607. err = skcipher_walk_done(&walk, nbytes);
  608. if (unlikely(err)) {
  609. pr_err("%s: skcipher_walk_done failed: %d\n",
  610. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  611. return err;
  612. }
  613. }
  614. err = wc_AesGcmDecryptFinal(ctx->aes_encrypt, origAuthTag, tfm->authsize);
  615. if (unlikely(err)) {
  616. pr_err("%s: wc_AesGcmDecryptFinal failed with return code %d\n",
  617. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  618. if (err == AES_GCM_AUTH_E) {
  619. return -EBADMSG;
  620. }
  621. else {
  622. return -EINVAL;
  623. }
  624. }
  625. return err;
  626. }
  627. static struct aead_alg gcmAesAead = {
  628. .base.cra_name = WOLFKM_AESGCM_NAME,
  629. .base.cra_driver_name = WOLFKM_AESGCM_DRIVER,
  630. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  631. .base.cra_blocksize = 1,
  632. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  633. .base.cra_module = THIS_MODULE,
  634. .init = km_AesGcmInit,
  635. .exit = km_AesGcmExit,
  636. .setkey = km_AesGcmSetKey,
  637. .setauthsize = km_AesGcmSetAuthsize,
  638. .encrypt = km_AesGcmEncrypt,
  639. .decrypt = km_AesGcmDecrypt,
  640. .ivsize = AES_BLOCK_SIZE,
  641. .maxauthsize = AES_BLOCK_SIZE,
  642. .chunksize = AES_BLOCK_SIZE,
  643. };
  644. static int gcmAesAead_loaded = 0;
  645. #endif /* HAVE_AESGCM &&
  646. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESGCM) &&
  647. */
  648. #if defined(WOLFSSL_AES_XTS) && \
  649. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  650. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  651. struct km_AesXtsCtx {
  652. XtsAes *aesXts; /* allocated in km_AesXtsInitCommon() to assure alignment
  653. * for AESNI.
  654. */
  655. };
  656. static int km_AesXtsInitCommon(struct km_AesXtsCtx * ctx, const char * name)
  657. {
  658. int err;
  659. ctx->aesXts = (XtsAes *)malloc(sizeof(*ctx->aesXts));
  660. if (! ctx->aesXts)
  661. return -MEMORY_E;
  662. err = wc_AesXtsInit(ctx->aesXts, NULL, INVALID_DEVID);
  663. if (unlikely(err)) {
  664. pr_err("%s: km_AesXtsInitCommon failed: %d\n", name, err);
  665. return -EINVAL;
  666. }
  667. return 0;
  668. }
  669. static int km_AesXtsInit(struct crypto_skcipher *tfm)
  670. {
  671. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  672. return km_AesXtsInitCommon(ctx, WOLFKM_AESXTS_DRIVER);
  673. }
  674. static void km_AesXtsExit(struct crypto_skcipher *tfm)
  675. {
  676. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  677. wc_AesXtsFree(ctx->aesXts);
  678. free(ctx->aesXts);
  679. ctx->aesXts = NULL;
  680. }
  681. static int km_AesXtsSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  682. unsigned int key_len)
  683. {
  684. int err;
  685. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  686. err = wc_AesXtsSetKeyNoInit(ctx->aesXts, in_key, key_len,
  687. AES_ENCRYPTION_AND_DECRYPTION);
  688. if (unlikely(err)) {
  689. pr_err("%s: wc_AesXtsSetKeyNoInit failed: %d\n",
  690. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  691. return -EINVAL;
  692. }
  693. return 0;
  694. }
  695. /* see /usr/src/linux/drivers/md/dm-crypt.c */
  696. static int km_AesXtsEncrypt(struct skcipher_request *req)
  697. {
  698. int err = 0;
  699. struct crypto_skcipher * tfm = NULL;
  700. struct km_AesXtsCtx * ctx = NULL;
  701. struct skcipher_walk walk;
  702. unsigned int nbytes = 0;
  703. tfm = crypto_skcipher_reqtfm(req);
  704. ctx = crypto_skcipher_ctx(tfm);
  705. err = skcipher_walk_virt(&walk, req, false);
  706. if (unlikely(err)) {
  707. pr_err("%s: skcipher_walk_virt failed: %d\n",
  708. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  709. return err;
  710. }
  711. while ((nbytes = walk.nbytes) != 0) {
  712. err = wc_AesXtsEncrypt(ctx->aesXts, walk.dst.virt.addr,
  713. walk.src.virt.addr, nbytes,
  714. walk.iv, walk.ivsize);
  715. if (unlikely(err)) {
  716. pr_err("%s: wc_AesXtsEncrypt failed: %d\n",
  717. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  718. return -EINVAL;
  719. }
  720. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  721. if (unlikely(err)) {
  722. pr_err("%s: skcipher_walk_done failed: %d\n",
  723. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  724. return err;
  725. }
  726. }
  727. return err;
  728. }
  729. static int km_AesXtsDecrypt(struct skcipher_request *req)
  730. {
  731. int err = 0;
  732. struct crypto_skcipher * tfm = NULL;
  733. struct km_AesXtsCtx * ctx = NULL;
  734. struct skcipher_walk walk;
  735. unsigned int nbytes = 0;
  736. tfm = crypto_skcipher_reqtfm(req);
  737. ctx = crypto_skcipher_ctx(tfm);
  738. err = skcipher_walk_virt(&walk, req, false);
  739. if (unlikely(err)) {
  740. pr_err("%s: skcipher_walk_virt failed: %d\n",
  741. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  742. return err;
  743. }
  744. while ((nbytes = walk.nbytes) != 0) {
  745. err = wc_AesXtsDecrypt(ctx->aesXts, walk.dst.virt.addr,
  746. walk.src.virt.addr, nbytes,
  747. walk.iv, walk.ivsize);
  748. if (unlikely(err)) {
  749. pr_err("%s: wc_AesCbcDecrypt failed: %d\n",
  750. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  751. return -EINVAL;
  752. }
  753. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  754. if (unlikely(err)) {
  755. pr_err("%s: skcipher_walk_done failed: %d\n",
  756. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  757. return err;
  758. }
  759. }
  760. return err;
  761. }
  762. static struct skcipher_alg xtsAesAlg = {
  763. .base.cra_name = WOLFKM_AESXTS_NAME,
  764. .base.cra_driver_name = WOLFKM_AESXTS_DRIVER,
  765. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  766. .base.cra_blocksize = AES_BLOCK_SIZE,
  767. .base.cra_ctxsize = sizeof(struct km_AesXtsCtx),
  768. .base.cra_module = THIS_MODULE,
  769. .min_keysize = 2 * AES_128_KEY_SIZE,
  770. .max_keysize = 2 * AES_256_KEY_SIZE,
  771. .ivsize = AES_BLOCK_SIZE,
  772. .walksize = 2 * AES_BLOCK_SIZE,
  773. .init = km_AesXtsInit,
  774. .exit = km_AesXtsExit,
  775. .setkey = km_AesXtsSetKey,
  776. .encrypt = km_AesXtsEncrypt,
  777. .decrypt = km_AesXtsDecrypt
  778. };
  779. static int xtsAesAlg_loaded = 0;
  780. #endif /* WOLFSSL_AES_XTS &&
  781. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESXTS)
  782. */
  783. /* cipher tests, cribbed from test.c, with supplementary LKCAPI tests: */
  784. #if defined(HAVE_AES_CBC) && \
  785. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  786. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  787. static int linuxkm_test_aescbc(void)
  788. {
  789. int ret = 0;
  790. struct crypto_skcipher * tfm = NULL;
  791. struct skcipher_request * req = NULL;
  792. struct scatterlist src, dst;
  793. Aes *aes;
  794. int aes_inited = 0;
  795. static const byte key32[] =
  796. {
  797. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  798. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  799. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  800. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  801. };
  802. static const byte p_vector[] =
  803. /* Now is the time for all good men w/o trailing 0 */
  804. {
  805. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  806. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  807. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20,
  808. 0x67,0x6f,0x6f,0x64,0x20,0x6d,0x65,0x6e
  809. };
  810. static const byte iv[] = "1234567890abcdef";
  811. static const byte c_vector[] =
  812. {
  813. 0xd7,0xd6,0x04,0x5b,0x4d,0xc4,0x90,0xdf,
  814. 0x4a,0x82,0xed,0x61,0x26,0x4e,0x23,0xb3,
  815. 0xe4,0xb5,0x85,0x30,0x29,0x4c,0x9d,0xcf,
  816. 0x73,0xc9,0x46,0xd1,0xaa,0xc8,0xcb,0x62
  817. };
  818. byte iv_copy[sizeof(iv)];
  819. byte enc[sizeof(p_vector)];
  820. byte dec[sizeof(p_vector)];
  821. u8 * enc2 = NULL;
  822. u8 * dec2 = NULL;
  823. aes = (Aes *)malloc(sizeof(*aes));
  824. if (aes == NULL)
  825. return -ENOMEM;
  826. XMEMSET(enc, 0, sizeof(enc));
  827. XMEMSET(dec, 0, sizeof(enc));
  828. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  829. if (ret) {
  830. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  831. goto test_cbc_end;
  832. }
  833. aes_inited = 1;
  834. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  835. if (ret) {
  836. pr_err("wolfcrypt wc_AesSetKey failed with return code %d\n", ret);
  837. goto test_cbc_end;
  838. }
  839. ret = wc_AesCbcEncrypt(aes, enc, p_vector, sizeof(p_vector));
  840. if (ret) {
  841. pr_err("wolfcrypt wc_AesCbcEncrypt failed with return code %d\n", ret);
  842. goto test_cbc_end;
  843. }
  844. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  845. pr_err("wolfcrypt wc_AesCbcEncrypt KAT mismatch\n");
  846. return LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  847. }
  848. /* Re init for decrypt and set flag. */
  849. wc_AesFree(aes);
  850. aes_inited = 0;
  851. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  852. if (ret) {
  853. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  854. goto test_cbc_end;
  855. }
  856. aes_inited = 1;
  857. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_DECRYPTION);
  858. if (ret) {
  859. pr_err("wolfcrypt wc_AesSetKey failed with return code %d.\n", ret);
  860. goto test_cbc_end;
  861. }
  862. ret = wc_AesCbcDecrypt(aes, dec, enc, sizeof(p_vector));
  863. if (ret) {
  864. pr_err("wolfcrypt wc_AesCbcDecrypt failed with return code %d\n", ret);
  865. goto test_cbc_end;
  866. }
  867. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  868. if (ret) {
  869. pr_err("error: p_vector and dec do not match: %d\n", ret);
  870. goto test_cbc_end;
  871. }
  872. /* now the kernel crypto part */
  873. enc2 = malloc(sizeof(p_vector));
  874. if (!enc2) {
  875. pr_err("error: malloc failed\n");
  876. goto test_cbc_end;
  877. }
  878. dec2 = malloc(sizeof(p_vector));
  879. if (!dec2) {
  880. pr_err("error: malloc failed\n");
  881. goto test_cbc_end;
  882. }
  883. memcpy(dec2, p_vector, sizeof(p_vector));
  884. tfm = crypto_alloc_skcipher(WOLFKM_AESCBC_NAME, 0, 0);
  885. if (IS_ERR(tfm)) {
  886. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  887. WOLFKM_AESCBC_DRIVER, PTR_ERR(tfm));
  888. goto test_cbc_end;
  889. }
  890. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  891. {
  892. const char *driver_name =
  893. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  894. if (strcmp(driver_name, WOLFKM_AESCBC_DRIVER)) {
  895. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  896. WOLFKM_AESCBC_NAME, driver_name, WOLFKM_AESCBC_DRIVER);
  897. ret = -ENOENT;
  898. goto test_cbc_end;
  899. }
  900. }
  901. #endif
  902. ret = crypto_skcipher_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  903. if (ret) {
  904. pr_err("error: crypto_skcipher_setkey returned: %d\n", ret);
  905. goto test_cbc_end;
  906. }
  907. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  908. if (IS_ERR(req)) {
  909. pr_err("error: allocating AES skcipher request %s failed\n",
  910. WOLFKM_AESCBC_DRIVER);
  911. goto test_cbc_end;
  912. }
  913. sg_init_one(&src, dec2, sizeof(p_vector));
  914. sg_init_one(&dst, enc2, sizeof(p_vector));
  915. XMEMCPY(iv_copy, iv, sizeof(iv));
  916. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  917. ret = crypto_skcipher_encrypt(req);
  918. if (ret) {
  919. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  920. goto test_cbc_end;
  921. }
  922. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  923. if (ret) {
  924. pr_err("error: enc and enc2 do not match: %d\n", ret);
  925. goto test_cbc_end;
  926. }
  927. memset(dec2, 0, sizeof(p_vector));
  928. sg_init_one(&src, enc2, sizeof(p_vector));
  929. sg_init_one(&dst, dec2, sizeof(p_vector));
  930. XMEMCPY(iv_copy, iv, sizeof(iv));
  931. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  932. ret = crypto_skcipher_decrypt(req);
  933. if (ret) {
  934. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  935. goto test_cbc_end;
  936. }
  937. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  938. if (ret) {
  939. pr_err("error: dec and dec2 do not match: %d\n", ret);
  940. goto test_cbc_end;
  941. }
  942. test_cbc_end:
  943. if (enc2) { free(enc2); }
  944. if (dec2) { free(dec2); }
  945. if (req) { skcipher_request_free(req); }
  946. if (tfm) { crypto_free_skcipher(tfm); }
  947. if (aes_inited)
  948. wc_AesFree(aes);
  949. free(aes);
  950. return ret;
  951. }
  952. #endif /* HAVE_AES_CBC &&
  953. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  954. */
  955. #if defined(WOLFSSL_AES_CFB) && \
  956. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  957. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  958. static int linuxkm_test_aescfb(void)
  959. {
  960. int ret = 0;
  961. struct crypto_skcipher * tfm = NULL;
  962. struct skcipher_request * req = NULL;
  963. struct scatterlist src, dst;
  964. Aes *aes;
  965. int aes_inited = 0;
  966. static const byte key32[] =
  967. {
  968. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  969. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  970. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  971. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  972. };
  973. static const byte p_vector[] =
  974. /* Now is the time for all good men w/o trailing 0 */
  975. {
  976. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  977. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  978. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20,
  979. 0x67,0x6f,0x6f,0x64,0x20,0x6d,0x65,0x6e
  980. };
  981. static const byte iv[] = "1234567890abcdef";
  982. static const byte c_vector[] =
  983. {
  984. 0x56,0x35,0x3f,0xdd,0xde,0xa6,0x15,0x87,
  985. 0x57,0xdc,0x34,0x62,0x9a,0x68,0x96,0x51,
  986. 0xc7,0x09,0xb9,0x4e,0x47,0x6b,0x24,0x72,
  987. 0x19,0x5a,0xdf,0x7e,0xba,0xa8,0x01,0xb6
  988. };
  989. byte iv_copy[sizeof(iv)];
  990. byte enc[sizeof(p_vector)];
  991. byte dec[sizeof(p_vector)];
  992. u8 * enc2 = NULL;
  993. u8 * dec2 = NULL;
  994. aes = (Aes *)malloc(sizeof(*aes));
  995. if (aes == NULL)
  996. return -ENOMEM;
  997. XMEMSET(enc, 0, sizeof(enc));
  998. XMEMSET(dec, 0, sizeof(enc));
  999. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1000. if (ret) {
  1001. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  1002. goto test_cfb_end;
  1003. }
  1004. aes_inited = 1;
  1005. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  1006. if (ret) {
  1007. pr_err("wolfcrypt wc_AesSetKey failed with return code %d\n", ret);
  1008. goto test_cfb_end;
  1009. }
  1010. ret = wc_AesCfbEncrypt(aes, enc, p_vector, sizeof(p_vector));
  1011. if (ret) {
  1012. pr_err("wolfcrypt wc_AesCfbEncrypt failed with return code %d\n", ret);
  1013. goto test_cfb_end;
  1014. }
  1015. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1016. pr_err("wolfcrypt wc_AesCfbEncrypt KAT mismatch\n");
  1017. return LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1018. }
  1019. /* Re init for decrypt and set flag. */
  1020. wc_AesFree(aes);
  1021. aes_inited = 0;
  1022. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1023. if (ret) {
  1024. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  1025. goto test_cfb_end;
  1026. }
  1027. aes_inited = 1;
  1028. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  1029. if (ret) {
  1030. pr_err("wolfcrypt wc_AesSetKey failed with return code %d.\n", ret);
  1031. goto test_cfb_end;
  1032. }
  1033. ret = wc_AesCfbDecrypt(aes, dec, enc, sizeof(p_vector));
  1034. if (ret) {
  1035. pr_err("wolfcrypt wc_AesCfbDecrypt failed with return code %d\n", ret);
  1036. goto test_cfb_end;
  1037. }
  1038. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1039. if (ret) {
  1040. pr_err("error: p_vector and dec do not match: %d\n", ret);
  1041. goto test_cfb_end;
  1042. }
  1043. /* now the kernel crypto part */
  1044. enc2 = malloc(sizeof(p_vector));
  1045. if (!enc2) {
  1046. pr_err("error: malloc failed\n");
  1047. goto test_cfb_end;
  1048. }
  1049. dec2 = malloc(sizeof(p_vector));
  1050. if (!dec2) {
  1051. pr_err("error: malloc failed\n");
  1052. goto test_cfb_end;
  1053. }
  1054. memcpy(dec2, p_vector, sizeof(p_vector));
  1055. tfm = crypto_alloc_skcipher(WOLFKM_AESCFB_NAME, 0, 0);
  1056. if (IS_ERR(tfm)) {
  1057. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1058. WOLFKM_AESCFB_DRIVER, PTR_ERR(tfm));
  1059. goto test_cfb_end;
  1060. }
  1061. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1062. {
  1063. const char *driver_name =
  1064. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1065. if (strcmp(driver_name, WOLFKM_AESCFB_DRIVER)) {
  1066. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1067. WOLFKM_AESCFB_NAME, driver_name, WOLFKM_AESCFB_DRIVER);
  1068. ret = -ENOENT;
  1069. goto test_cfb_end;
  1070. }
  1071. }
  1072. #endif
  1073. ret = crypto_skcipher_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1074. if (ret) {
  1075. pr_err("error: crypto_skcipher_setkey returned: %d\n", ret);
  1076. goto test_cfb_end;
  1077. }
  1078. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1079. if (IS_ERR(req)) {
  1080. pr_err("error: allocating AES skcipher request %s failed\n",
  1081. WOLFKM_AESCFB_DRIVER);
  1082. goto test_cfb_end;
  1083. }
  1084. sg_init_one(&src, dec2, sizeof(p_vector));
  1085. sg_init_one(&dst, enc2, sizeof(p_vector));
  1086. XMEMCPY(iv_copy, iv, sizeof(iv));
  1087. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1088. ret = crypto_skcipher_encrypt(req);
  1089. if (ret) {
  1090. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1091. goto test_cfb_end;
  1092. }
  1093. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1094. if (ret) {
  1095. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1096. goto test_cfb_end;
  1097. }
  1098. memset(dec2, 0, sizeof(p_vector));
  1099. sg_init_one(&src, enc2, sizeof(p_vector));
  1100. sg_init_one(&dst, dec2, sizeof(p_vector));
  1101. XMEMCPY(iv_copy, iv, sizeof(iv));
  1102. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1103. ret = crypto_skcipher_decrypt(req);
  1104. if (ret) {
  1105. pr_err("error: crypto_skcipher_decrypt returned: %d\n", ret);
  1106. goto test_cfb_end;
  1107. }
  1108. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1109. if (ret) {
  1110. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1111. goto test_cfb_end;
  1112. }
  1113. test_cfb_end:
  1114. if (enc2) { free(enc2); }
  1115. if (dec2) { free(dec2); }
  1116. if (req) { skcipher_request_free(req); }
  1117. if (tfm) { crypto_free_skcipher(tfm); }
  1118. if (aes_inited)
  1119. wc_AesFree(aes);
  1120. free(aes);
  1121. return ret;
  1122. }
  1123. #endif /* WOLFSSL_AES_CFB &&
  1124. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCFB)
  1125. */
  1126. #if defined(HAVE_AESGCM) && \
  1127. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1128. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  1129. static int linuxkm_test_aesgcm(void)
  1130. {
  1131. int ret = 0;
  1132. struct crypto_aead * tfm = NULL;
  1133. struct aead_request * req = NULL;
  1134. struct scatterlist * src = NULL;
  1135. struct scatterlist * dst = NULL;
  1136. Aes *aes;
  1137. int aes_inited = 0;
  1138. static const byte key32[] =
  1139. {
  1140. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1141. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  1142. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1143. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  1144. };
  1145. static const byte p_vector[] =
  1146. /* Now is the time for all w/o trailing 0 */
  1147. {
  1148. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  1149. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  1150. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20
  1151. };
  1152. static const byte assoc[] =
  1153. {
  1154. 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
  1155. 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
  1156. 0xab, 0xad, 0xda, 0xd2
  1157. };
  1158. static const byte ivstr[] = "1234567890abcdef";
  1159. static const byte c_vector[] =
  1160. {
  1161. 0x0c,0x97,0x05,0x3c,0xef,0x5c,0x63,0x6b,
  1162. 0x15,0xe4,0x00,0x63,0xf8,0x8c,0xd0,0x95,
  1163. 0x27,0x81,0x90,0x9c,0x9f,0xe6,0x98,0xe9
  1164. };
  1165. static const byte KAT_authTag[] =
  1166. {
  1167. 0xc9,0xd5,0x7a,0x77,0xac,0x28,0xc2,0xe7,
  1168. 0xe4,0x28,0x90,0xaa,0x09,0xab,0xf9,0x7c
  1169. };
  1170. byte enc[sizeof(p_vector)];
  1171. byte authTag[AES_BLOCK_SIZE];
  1172. byte dec[sizeof(p_vector)];
  1173. u8 * assoc2 = NULL;
  1174. u8 * enc2 = NULL;
  1175. u8 * dec2 = NULL;
  1176. u8 * iv = NULL;
  1177. size_t encryptLen = sizeof(p_vector);
  1178. size_t decryptLen = sizeof(p_vector) + sizeof(authTag);
  1179. /* Init stack variables. */
  1180. XMEMSET(enc, 0, sizeof(p_vector));
  1181. XMEMSET(dec, 0, sizeof(p_vector));
  1182. XMEMSET(authTag, 0, AES_BLOCK_SIZE);
  1183. aes = (Aes *)malloc(sizeof(*aes));
  1184. if (aes == NULL)
  1185. return -ENOMEM;
  1186. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1187. if (ret) {
  1188. pr_err("error: wc_AesInit failed with return code %d.\n", ret);
  1189. goto test_gcm_end;
  1190. }
  1191. aes_inited = 1;
  1192. ret = wc_AesGcmInit(aes, key32, sizeof(key32)/sizeof(byte), ivstr,
  1193. AES_BLOCK_SIZE);
  1194. if (ret) {
  1195. pr_err("error: wc_AesGcmInit failed with return code %d.\n", ret);
  1196. goto test_gcm_end;
  1197. }
  1198. ret = wc_AesGcmEncryptUpdate(aes, NULL, NULL, 0, assoc, sizeof(assoc));
  1199. if (ret) {
  1200. pr_err("error: wc_AesGcmEncryptUpdate failed with return code %d\n",
  1201. ret);
  1202. goto test_gcm_end;
  1203. }
  1204. ret = wc_AesGcmEncryptUpdate(aes, enc, p_vector, sizeof(p_vector), NULL, 0);
  1205. if (ret) {
  1206. pr_err("error: wc_AesGcmEncryptUpdate failed with return code %d\n",
  1207. ret);
  1208. goto test_gcm_end;
  1209. }
  1210. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1211. pr_err("wolfcrypt AES-GCM KAT mismatch on ciphertext\n");
  1212. ret = LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E;
  1213. goto test_gcm_end;
  1214. }
  1215. ret = wc_AesGcmEncryptFinal(aes, authTag, AES_BLOCK_SIZE);
  1216. if (ret) {
  1217. pr_err("error: wc_AesGcmEncryptFinal failed with return code %d\n",
  1218. ret);
  1219. goto test_gcm_end;
  1220. }
  1221. if (XMEMCMP(authTag, KAT_authTag, sizeof(KAT_authTag)) != 0) {
  1222. pr_err("wolfcrypt AES-GCM KAT mismatch on authTag\n");
  1223. ret = LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E;
  1224. goto test_gcm_end;
  1225. }
  1226. ret = wc_AesGcmInit(aes, key32, sizeof(key32)/sizeof(byte), ivstr,
  1227. AES_BLOCK_SIZE);
  1228. if (ret) {
  1229. pr_err("error: wc_AesGcmInit failed with return code %d.\n", ret);
  1230. goto test_gcm_end;
  1231. }
  1232. ret = wc_AesGcmDecryptUpdate(aes, dec, enc, sizeof(p_vector),
  1233. assoc, sizeof(assoc));
  1234. if (ret) {
  1235. pr_err("error: wc_AesGcmDecryptUpdate failed with return code %d\n",
  1236. ret);
  1237. goto test_gcm_end;
  1238. }
  1239. ret = wc_AesGcmDecryptFinal(aes, authTag, AES_BLOCK_SIZE);
  1240. if (ret) {
  1241. pr_err("error: wc_AesGcmEncryptFinal failed with return code %d\n",
  1242. ret);
  1243. goto test_gcm_end;
  1244. }
  1245. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1246. if (ret) {
  1247. pr_err("error: gcm: p_vector and dec do not match: %d\n", ret);
  1248. goto test_gcm_end;
  1249. }
  1250. /* now the kernel crypto part */
  1251. assoc2 = malloc(sizeof(assoc));
  1252. if (IS_ERR(assoc2)) {
  1253. pr_err("error: malloc failed\n");
  1254. goto test_gcm_end;
  1255. }
  1256. memset(assoc2, 0, sizeof(assoc));
  1257. memcpy(assoc2, assoc, sizeof(assoc));
  1258. iv = malloc(AES_BLOCK_SIZE);
  1259. if (IS_ERR(iv)) {
  1260. pr_err("error: malloc failed\n");
  1261. goto test_gcm_end;
  1262. }
  1263. memset(iv, 0, AES_BLOCK_SIZE);
  1264. memcpy(iv, ivstr, AES_BLOCK_SIZE);
  1265. enc2 = malloc(decryptLen);
  1266. if (IS_ERR(enc2)) {
  1267. pr_err("error: malloc failed\n");
  1268. goto test_gcm_end;
  1269. }
  1270. dec2 = malloc(decryptLen);
  1271. if (IS_ERR(dec2)) {
  1272. pr_err("error: malloc failed\n");
  1273. goto test_gcm_end;
  1274. }
  1275. memset(enc2, 0, decryptLen);
  1276. memset(dec2, 0, decryptLen);
  1277. memcpy(dec2, p_vector, sizeof(p_vector));
  1278. tfm = crypto_alloc_aead(WOLFKM_AESGCM_NAME, 0, 0);
  1279. if (IS_ERR(tfm)) {
  1280. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1281. WOLFKM_AESGCM_DRIVER, PTR_ERR(tfm));
  1282. goto test_gcm_end;
  1283. }
  1284. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1285. {
  1286. const char *driver_name = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
  1287. if (strcmp(driver_name, WOLFKM_AESGCM_DRIVER)) {
  1288. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1289. WOLFKM_AESGCM_NAME, driver_name, WOLFKM_AESGCM_DRIVER);
  1290. ret = -ENOENT;
  1291. goto test_gcm_end;
  1292. }
  1293. }
  1294. #endif
  1295. ret = crypto_aead_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1296. if (ret) {
  1297. pr_err("error: crypto_aead_setkey returned: %d\n", ret);
  1298. goto test_gcm_end;
  1299. }
  1300. ret = crypto_aead_setauthsize(tfm, sizeof(authTag));
  1301. if (ret) {
  1302. pr_err("error: crypto_aead_setauthsize returned: %d\n", ret);
  1303. goto test_gcm_end;
  1304. }
  1305. req = aead_request_alloc(tfm, GFP_KERNEL);
  1306. if (IS_ERR(req)) {
  1307. pr_err("error: allocating AES aead request %s failed: %ld\n",
  1308. WOLFKM_AESCBC_DRIVER, PTR_ERR(req));
  1309. goto test_gcm_end;
  1310. }
  1311. src = malloc(sizeof(struct scatterlist) * 2);
  1312. dst = malloc(sizeof(struct scatterlist) * 2);
  1313. if (IS_ERR(src) || IS_ERR(dst)) {
  1314. pr_err("error: malloc src or dst failed: %ld, %ld\n",
  1315. PTR_ERR(src), PTR_ERR(dst));
  1316. goto test_gcm_end;
  1317. }
  1318. sg_init_table(src, 2);
  1319. sg_set_buf(src, assoc2, sizeof(assoc));
  1320. sg_set_buf(&src[1], dec2, sizeof(p_vector));
  1321. sg_init_table(dst, 2);
  1322. sg_set_buf(dst, assoc2, sizeof(assoc));
  1323. sg_set_buf(&dst[1], enc2, decryptLen);
  1324. aead_request_set_callback(req, 0, NULL, NULL);
  1325. aead_request_set_ad(req, sizeof(assoc));
  1326. aead_request_set_crypt(req, src, dst, sizeof(p_vector), iv);
  1327. ret = crypto_aead_encrypt(req);
  1328. if (ret) {
  1329. pr_err("error: crypto_aead_encrypt returned: %d\n", ret);
  1330. goto test_gcm_end;
  1331. }
  1332. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1333. if (ret) {
  1334. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1335. goto test_gcm_end;
  1336. }
  1337. ret = XMEMCMP(authTag, enc2 + encryptLen, sizeof(authTag));
  1338. if (ret) {
  1339. pr_err("error: authTags do not match: %d\n", ret);
  1340. goto test_gcm_end;
  1341. }
  1342. /* Now decrypt crypto request. Reverse src and dst. */
  1343. memset(dec2, 0, decryptLen);
  1344. aead_request_set_ad(req, sizeof(assoc));
  1345. aead_request_set_crypt(req, dst, src, decryptLen, iv);
  1346. ret = crypto_aead_decrypt(req);
  1347. if (ret) {
  1348. pr_err("error: crypto_aead_decrypt returned: %d\n", ret);
  1349. goto test_gcm_end;
  1350. }
  1351. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1352. if (ret) {
  1353. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1354. goto test_gcm_end;
  1355. }
  1356. test_gcm_end:
  1357. if (req) { aead_request_free(req); req = NULL; }
  1358. if (tfm) { crypto_free_aead(tfm); tfm = NULL; }
  1359. if (src) { free(src); src = NULL; }
  1360. if (dst) { free(dst); dst = NULL; }
  1361. if (dec2) { free(dec2); dec2 = NULL; }
  1362. if (enc2) { free(enc2); enc2 = NULL; }
  1363. if (assoc2) { free(assoc2); assoc2 = NULL; }
  1364. if (iv) { free(iv); iv = NULL; }
  1365. if (aes_inited)
  1366. wc_AesFree(aes);
  1367. free(aes);
  1368. return ret;
  1369. }
  1370. #endif /* HAVE_AESGCM &&
  1371. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESGCM) &&
  1372. */
  1373. #if defined(WOLFSSL_AES_XTS) && \
  1374. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1375. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  1376. /* test vectors from
  1377. * http://csrc.nist.gov/groups/STM/cavp/block-cipher-modes.html
  1378. */
  1379. #ifdef WOLFSSL_AES_128
  1380. static int aes_xts_128_test(void)
  1381. {
  1382. XtsAes *aes = NULL;
  1383. int aes_inited = 0;
  1384. int ret = 0;
  1385. #define AES_XTS_128_TEST_BUF_SIZ (AES_BLOCK_SIZE * 2 + 8)
  1386. unsigned char *buf = NULL;
  1387. unsigned char *cipher = NULL;
  1388. u8 * enc2 = NULL;
  1389. u8 * dec2 = NULL;
  1390. struct scatterlist * src = NULL;
  1391. struct scatterlist * dst = NULL;
  1392. struct crypto_skcipher *tfm = NULL;
  1393. struct skcipher_request *req = NULL;
  1394. u8 iv[AES_BLOCK_SIZE];
  1395. byte* large_input = NULL;
  1396. /* 128 key tests */
  1397. static const unsigned char k1[] = {
  1398. 0xa1, 0xb9, 0x0c, 0xba, 0x3f, 0x06, 0xac, 0x35,
  1399. 0x3b, 0x2c, 0x34, 0x38, 0x76, 0x08, 0x17, 0x62,
  1400. 0x09, 0x09, 0x23, 0x02, 0x6e, 0x91, 0x77, 0x18,
  1401. 0x15, 0xf2, 0x9d, 0xab, 0x01, 0x93, 0x2f, 0x2f
  1402. };
  1403. static const unsigned char i1[] = {
  1404. 0x4f, 0xae, 0xf7, 0x11, 0x7c, 0xda, 0x59, 0xc6,
  1405. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1406. };
  1407. static const unsigned char p1[] = {
  1408. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1409. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c
  1410. };
  1411. /* plain text test of partial block is not from NIST test vector list */
  1412. static const unsigned char pp[] = {
  1413. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1414. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c,
  1415. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1416. };
  1417. static const unsigned char c1[] = {
  1418. 0x77, 0x8a, 0xe8, 0xb4, 0x3c, 0xb9, 0x8d, 0x5a,
  1419. 0x82, 0x50, 0x81, 0xd5, 0xbe, 0x47, 0x1c, 0x63
  1420. };
  1421. /* plain text test of partial block is not from NIST test vector list */
  1422. static const unsigned char cp[] = {
  1423. 0x2b, 0xf7, 0x2c, 0xf3, 0xeb, 0x85, 0xef, 0x7b,
  1424. 0x0b, 0x76, 0xa0, 0xaa, 0xf3, 0x3f, 0x25, 0x8b,
  1425. 0x77, 0x8a, 0xe8, 0xb4, 0x3c, 0xb9, 0x8d, 0x5a
  1426. };
  1427. static const unsigned char k2[] = {
  1428. 0x39, 0x25, 0x79, 0x05, 0xdf, 0xcc, 0x77, 0x76,
  1429. 0x6c, 0x87, 0x0a, 0x80, 0x6a, 0x60, 0xe3, 0xc0,
  1430. 0x93, 0xd1, 0x2a, 0xcf, 0xcb, 0x51, 0x42, 0xfa,
  1431. 0x09, 0x69, 0x89, 0x62, 0x5b, 0x60, 0xdb, 0x16
  1432. };
  1433. static const unsigned char i2[] = {
  1434. 0x5c, 0xf7, 0x9d, 0xb6, 0xc5, 0xcd, 0x99, 0x1a,
  1435. 0x1c, 0x78, 0x81, 0x42, 0x24, 0x95, 0x1e, 0x84
  1436. };
  1437. static const unsigned char p2[] = {
  1438. 0xbd, 0xc5, 0x46, 0x8f, 0xbc, 0x8d, 0x50, 0xa1,
  1439. 0x0d, 0x1c, 0x85, 0x7f, 0x79, 0x1c, 0x5c, 0xba,
  1440. 0xb3, 0x81, 0x0d, 0x0d, 0x73, 0xcf, 0x8f, 0x20,
  1441. 0x46, 0xb1, 0xd1, 0x9e, 0x7d, 0x5d, 0x8a, 0x56
  1442. };
  1443. static const unsigned char c2[] = {
  1444. 0xd6, 0xbe, 0x04, 0x6d, 0x41, 0xf2, 0x3b, 0x5e,
  1445. 0xd7, 0x0b, 0x6b, 0x3d, 0x5c, 0x8e, 0x66, 0x23,
  1446. 0x2b, 0xe6, 0xb8, 0x07, 0xd4, 0xdc, 0xc6, 0x0e,
  1447. 0xff, 0x8d, 0xbc, 0x1d, 0x9f, 0x7f, 0xc8, 0x22
  1448. };
  1449. #ifndef HAVE_FIPS /* FIPS requires different keys for main and tweak. */
  1450. static const unsigned char k3[] = {
  1451. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1452. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1453. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1454. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1455. };
  1456. static const unsigned char i3[] = {
  1457. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1458. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1459. };
  1460. static const unsigned char p3[] = {
  1461. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1462. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1463. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1464. 0x20, 0xff, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1465. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20
  1466. };
  1467. static const unsigned char c3[] = {
  1468. 0xA2, 0x07, 0x47, 0x76, 0x3F, 0xEC, 0x0C, 0x23,
  1469. 0x1B, 0xD0, 0xBD, 0x46, 0x9A, 0x27, 0x38, 0x12,
  1470. 0x95, 0x02, 0x3D, 0x5D, 0xC6, 0x94, 0x51, 0x36,
  1471. 0xA0, 0x85, 0xD2, 0x69, 0x6E, 0x87, 0x0A, 0xBF,
  1472. 0xB5, 0x5A, 0xDD, 0xCB, 0x80, 0xE0, 0xFC, 0xCD
  1473. };
  1474. #endif /* HAVE_FIPS */
  1475. if ((aes = (XtsAes *)XMALLOC(sizeof(*aes), NULL, DYNAMIC_TYPE_AES))
  1476. == NULL)
  1477. {
  1478. ret = MEMORY_E;
  1479. goto out;
  1480. }
  1481. if ((buf = (unsigned char *)XMALLOC(AES_XTS_128_TEST_BUF_SIZ, NULL,
  1482. DYNAMIC_TYPE_AES)) == NULL)
  1483. {
  1484. ret = MEMORY_E;
  1485. goto out;
  1486. }
  1487. if ((cipher = (unsigned char *)XMALLOC(AES_XTS_128_TEST_BUF_SIZ, NULL,
  1488. DYNAMIC_TYPE_AES)) == NULL)
  1489. {
  1490. ret = MEMORY_E;
  1491. goto out;
  1492. }
  1493. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1494. ret = wc_AesXtsInit(aes, NULL, INVALID_DEVID);
  1495. if (ret != 0)
  1496. goto out;
  1497. else
  1498. aes_inited = 1;
  1499. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_ENCRYPTION);
  1500. if (ret != 0)
  1501. goto out;
  1502. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1503. if (ret != 0)
  1504. goto out;
  1505. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1506. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1507. goto out;
  1508. }
  1509. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1510. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1511. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1512. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1513. if (ret != 0)
  1514. goto out;
  1515. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1516. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1517. goto out;
  1518. }
  1519. #endif
  1520. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1521. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1522. if (ret != 0)
  1523. goto out;
  1524. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1525. if (ret != 0)
  1526. goto out;
  1527. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1528. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1529. goto out;
  1530. }
  1531. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1532. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1533. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1534. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1535. if (ret != 0)
  1536. goto out;
  1537. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1538. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1539. goto out;
  1540. }
  1541. #endif
  1542. /* partial block encryption test */
  1543. XMEMSET(cipher, 0, AES_XTS_128_TEST_BUF_SIZ);
  1544. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1545. if (ret != 0)
  1546. goto out;
  1547. if (XMEMCMP(cp, cipher, sizeof(cp))) {
  1548. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1549. goto out;
  1550. }
  1551. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1552. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1553. XMEMSET(cipher, 0, AES_XTS_128_TEST_BUF_SIZ);
  1554. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1555. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1556. if (ret != 0)
  1557. goto out;
  1558. if (XMEMCMP(cp, cipher, sizeof(cp))) {
  1559. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1560. goto out;
  1561. }
  1562. #endif
  1563. /* partial block decrypt test */
  1564. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1565. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1566. if (ret != 0)
  1567. goto out;
  1568. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1569. if (ret != 0)
  1570. goto out;
  1571. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1572. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1573. goto out;
  1574. }
  1575. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1576. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1577. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1578. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1579. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1580. if (ret != 0)
  1581. goto out;
  1582. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1583. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1584. goto out;
  1585. }
  1586. #endif
  1587. /* NIST decrypt test vector */
  1588. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1589. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1590. if (ret != 0)
  1591. goto out;
  1592. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1593. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1594. goto out;
  1595. }
  1596. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1597. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1598. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1599. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1600. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1601. if (ret != 0)
  1602. goto out;
  1603. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1604. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1605. goto out;
  1606. }
  1607. #endif
  1608. /* fail case with decrypting using wrong key */
  1609. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1610. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  1611. if (ret != 0)
  1612. goto out;
  1613. if (XMEMCMP(p2, buf, sizeof(p2)) == 0) { /* fail case with wrong key */
  1614. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1615. goto out;
  1616. }
  1617. /* set correct key and retest */
  1618. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1619. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_DECRYPTION);
  1620. if (ret != 0)
  1621. goto out;
  1622. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  1623. if (ret != 0)
  1624. goto out;
  1625. if (XMEMCMP(p2, buf, sizeof(p2))) {
  1626. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1627. goto out;
  1628. }
  1629. #ifndef HAVE_FIPS
  1630. /* Test ciphertext stealing in-place. */
  1631. XMEMCPY(buf, p3, sizeof(p3));
  1632. ret = wc_AesXtsSetKeyNoInit(aes, k3, sizeof(k3), AES_ENCRYPTION);
  1633. if (ret != 0)
  1634. goto out;
  1635. ret = wc_AesXtsEncrypt(aes, buf, buf, sizeof(p3), i3, sizeof(i3));
  1636. if (ret != 0)
  1637. goto out;
  1638. if (XMEMCMP(c3, buf, sizeof(c3))) {
  1639. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1640. goto out;
  1641. }
  1642. ret = wc_AesXtsSetKeyNoInit(aes, k3, sizeof(k3), AES_DECRYPTION);
  1643. if (ret != 0)
  1644. goto out;
  1645. ret = wc_AesXtsDecrypt(aes, buf, buf, sizeof(c3), i3, sizeof(i3));
  1646. if (ret != 0)
  1647. goto out;
  1648. if (XMEMCMP(p3, buf, sizeof(p3))) {
  1649. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1650. goto out;
  1651. }
  1652. #endif /* HAVE_FIPS */
  1653. {
  1654. #define LARGE_XTS_SZ 1024
  1655. int i;
  1656. int j;
  1657. large_input = (byte *)XMALLOC(LARGE_XTS_SZ, NULL,
  1658. DYNAMIC_TYPE_TMP_BUFFER);
  1659. if (large_input == NULL) {
  1660. ret = MEMORY_E;
  1661. goto out;
  1662. }
  1663. for (i = 0; i < (int)LARGE_XTS_SZ; i++)
  1664. large_input[i] = (byte)i;
  1665. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  1666. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1667. if (ret != 0)
  1668. goto out;
  1669. ret = wc_AesXtsEncrypt(aes, large_input, large_input, j, i1,
  1670. sizeof(i1));
  1671. if (ret != 0)
  1672. goto out;
  1673. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1674. if (ret != 0)
  1675. goto out;
  1676. ret = wc_AesXtsDecrypt(aes, large_input, large_input, j, i1,
  1677. sizeof(i1));
  1678. if (ret != 0)
  1679. goto out;
  1680. for (i = 0; i < j; i++) {
  1681. if (large_input[i] != (byte)i) {
  1682. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1683. goto out;
  1684. }
  1685. }
  1686. }
  1687. }
  1688. /* now the kernel crypto part */
  1689. enc2 = XMALLOC(sizeof(pp), NULL, DYNAMIC_TYPE_AES);
  1690. if (!enc2) {
  1691. pr_err("error: malloc failed\n");
  1692. ret = -ENOMEM;
  1693. goto test_xts_end;
  1694. }
  1695. dec2 = XMALLOC(sizeof(pp), NULL, DYNAMIC_TYPE_AES);
  1696. if (!dec2) {
  1697. pr_err("error: malloc failed\n");
  1698. ret = -ENOMEM;
  1699. goto test_xts_end;
  1700. }
  1701. src = XMALLOC(sizeof(*src) * 2, NULL, DYNAMIC_TYPE_AES);
  1702. if (! src) {
  1703. pr_err("error: malloc failed\n");
  1704. ret = -ENOMEM;
  1705. goto test_xts_end;
  1706. }
  1707. dst = XMALLOC(sizeof(*dst) * 2, NULL, DYNAMIC_TYPE_AES);
  1708. if (! dst) {
  1709. pr_err("error: malloc failed\n");
  1710. ret = -ENOMEM;
  1711. goto test_xts_end;
  1712. }
  1713. tfm = crypto_alloc_skcipher(WOLFKM_AESXTS_NAME, 0, 0);
  1714. if (IS_ERR(tfm)) {
  1715. ret = PTR_ERR(tfm);
  1716. pr_err("error: allocating AES skcipher algorithm %s failed: %d\n",
  1717. WOLFKM_AESXTS_DRIVER, ret);
  1718. goto test_xts_end;
  1719. }
  1720. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1721. {
  1722. const char *driver_name =
  1723. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1724. if (strcmp(driver_name, WOLFKM_AESXTS_DRIVER)) {
  1725. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1726. WOLFKM_AESXTS_NAME, driver_name, WOLFKM_AESXTS_DRIVER);
  1727. ret = -ENOENT;
  1728. goto test_xts_end;
  1729. }
  1730. }
  1731. #endif
  1732. ret = crypto_skcipher_ivsize(tfm);
  1733. if (ret != sizeof(iv)) {
  1734. pr_err("error: AES skcipher algorithm %s crypto_skcipher_ivsize()"
  1735. " returned %d but expected %d\n",
  1736. WOLFKM_AESXTS_DRIVER, ret, (int)sizeof(iv));
  1737. ret = -EINVAL;
  1738. goto test_xts_end;
  1739. }
  1740. ret = crypto_skcipher_setkey(tfm, k1, sizeof(k1));
  1741. if (ret) {
  1742. pr_err("error: crypto_skcipher_setkey for %s returned: %d\n",
  1743. WOLFKM_AESXTS_NAME, ret);
  1744. goto test_xts_end;
  1745. }
  1746. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1747. if (IS_ERR(req)) {
  1748. ret = PTR_ERR(req);
  1749. pr_err("error: allocating AES skcipher request %s failed: %d\n",
  1750. WOLFKM_AESXTS_DRIVER, ret);
  1751. goto test_xts_end;
  1752. }
  1753. memcpy(dec2, p1, sizeof(p1));
  1754. memset(enc2, 0, sizeof(p1));
  1755. sg_init_one(src, dec2, sizeof(p1));
  1756. sg_init_one(dst, enc2, sizeof(p1));
  1757. memcpy(iv, i1, sizeof(iv));
  1758. skcipher_request_set_crypt(req, src, dst, sizeof(p1), iv);
  1759. ret = crypto_skcipher_encrypt(req);
  1760. if (ret) {
  1761. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1762. goto test_xts_end;
  1763. }
  1764. ret = XMEMCMP(c1, enc2, sizeof(c1));
  1765. if (ret) {
  1766. pr_err("error: c1 and enc2 do not match: %d\n", ret);
  1767. ret = -EINVAL;
  1768. goto test_xts_end;
  1769. }
  1770. memset(dec2, 0, sizeof(p1));
  1771. sg_init_one(src, enc2, sizeof(p1));
  1772. sg_init_one(dst, dec2, sizeof(p1));
  1773. memcpy(iv, i1, sizeof(iv));
  1774. skcipher_request_set_crypt(req, src, dst, sizeof(p1), iv);
  1775. ret = crypto_skcipher_decrypt(req);
  1776. if (ret) {
  1777. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  1778. goto test_xts_end;
  1779. }
  1780. ret = XMEMCMP(p1, dec2, sizeof(p1));
  1781. if (ret) {
  1782. pr_err("error: p1 and dec2 do not match: %d\n", ret);
  1783. ret = -EINVAL;
  1784. goto test_xts_end;
  1785. }
  1786. memcpy(dec2, pp, sizeof(pp));
  1787. memset(enc2, 0, sizeof(pp));
  1788. sg_init_one(src, dec2, sizeof(pp));
  1789. sg_init_one(dst, enc2, sizeof(pp));
  1790. memcpy(iv, i1, sizeof(iv));
  1791. skcipher_request_set_crypt(req, src, dst, sizeof(pp), iv);
  1792. ret = crypto_skcipher_encrypt(req);
  1793. if (ret) {
  1794. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1795. goto test_xts_end;
  1796. }
  1797. ret = XMEMCMP(cp, enc2, sizeof(cp));
  1798. if (ret) {
  1799. pr_err("error: cp and enc2 do not match: %d\n", ret);
  1800. ret = -EINVAL;
  1801. goto test_xts_end;
  1802. }
  1803. memset(dec2, 0, sizeof(pp));
  1804. sg_init_one(src, enc2, sizeof(pp));
  1805. sg_init_one(dst, dec2, sizeof(pp));
  1806. memcpy(iv, i1, sizeof(iv));
  1807. skcipher_request_set_crypt(req, src, dst, sizeof(pp), iv);
  1808. ret = crypto_skcipher_decrypt(req);
  1809. if (ret) {
  1810. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  1811. goto test_xts_end;
  1812. }
  1813. ret = XMEMCMP(pp, dec2, sizeof(pp));
  1814. if (ret) {
  1815. pr_err("error: pp and dec2 do not match: %d\n", ret);
  1816. ret = -EINVAL;
  1817. goto test_xts_end;
  1818. }
  1819. test_xts_end:
  1820. if (enc2)
  1821. XFREE(enc2, NULL, DYNAMIC_TYPE_AES);
  1822. if (dec2)
  1823. XFREE(dec2, NULL, DYNAMIC_TYPE_AES);
  1824. if (src)
  1825. XFREE(src, NULL, DYNAMIC_TYPE_AES);
  1826. if (dst)
  1827. XFREE(dst, NULL, DYNAMIC_TYPE_AES);
  1828. if (req)
  1829. skcipher_request_free(req);
  1830. if (tfm)
  1831. crypto_free_skcipher(tfm);
  1832. out:
  1833. if (large_input)
  1834. XFREE(large_input, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  1835. if (aes_inited)
  1836. wc_AesXtsFree(aes);
  1837. if (buf)
  1838. XFREE(buf, NULL, DYNAMIC_TYPE_AES);
  1839. if (cipher)
  1840. XFREE(cipher, NULL, DYNAMIC_TYPE_AES);
  1841. if (aes)
  1842. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  1843. #undef AES_XTS_128_TEST_BUF_SIZ
  1844. return ret;
  1845. }
  1846. #endif /* WOLFSSL_AES_128 */
  1847. #ifdef WOLFSSL_AES_256
  1848. static int aes_xts_256_test(void)
  1849. {
  1850. XtsAes *aes = NULL;
  1851. int aes_inited = 0;
  1852. int ret = 0;
  1853. #define AES_XTS_256_TEST_BUF_SIZ (AES_BLOCK_SIZE * 3)
  1854. unsigned char *buf = NULL;
  1855. unsigned char *cipher = NULL;
  1856. u8 * enc2 = NULL;
  1857. u8 * dec2 = NULL;
  1858. struct scatterlist * src = NULL;
  1859. struct scatterlist * dst = NULL;
  1860. struct crypto_skcipher *tfm = NULL;
  1861. struct skcipher_request *req = NULL;
  1862. u8 iv[AES_BLOCK_SIZE];
  1863. /* 256 key tests */
  1864. static const unsigned char k1[] = {
  1865. 0x1e, 0xa6, 0x61, 0xc5, 0x8d, 0x94, 0x3a, 0x0e,
  1866. 0x48, 0x01, 0xe4, 0x2f, 0x4b, 0x09, 0x47, 0x14,
  1867. 0x9e, 0x7f, 0x9f, 0x8e, 0x3e, 0x68, 0xd0, 0xc7,
  1868. 0x50, 0x52, 0x10, 0xbd, 0x31, 0x1a, 0x0e, 0x7c,
  1869. 0xd6, 0xe1, 0x3f, 0xfd, 0xf2, 0x41, 0x8d, 0x8d,
  1870. 0x19, 0x11, 0xc0, 0x04, 0xcd, 0xa5, 0x8d, 0xa3,
  1871. 0xd6, 0x19, 0xb7, 0xe2, 0xb9, 0x14, 0x1e, 0x58,
  1872. 0x31, 0x8e, 0xea, 0x39, 0x2c, 0xf4, 0x1b, 0x08
  1873. };
  1874. static const unsigned char i1[] = {
  1875. 0xad, 0xf8, 0xd9, 0x26, 0x27, 0x46, 0x4a, 0xd2,
  1876. 0xf0, 0x42, 0x8e, 0x84, 0xa9, 0xf8, 0x75, 0x64
  1877. };
  1878. static const unsigned char p1[] = {
  1879. 0x2e, 0xed, 0xea, 0x52, 0xcd, 0x82, 0x15, 0xe1,
  1880. 0xac, 0xc6, 0x47, 0xe8, 0x10, 0xbb, 0xc3, 0x64,
  1881. 0x2e, 0x87, 0x28, 0x7f, 0x8d, 0x2e, 0x57, 0xe3,
  1882. 0x6c, 0x0a, 0x24, 0xfb, 0xc1, 0x2a, 0x20, 0x2e
  1883. };
  1884. static const unsigned char c1[] = {
  1885. 0xcb, 0xaa, 0xd0, 0xe2, 0xf6, 0xce, 0xa3, 0xf5,
  1886. 0x0b, 0x37, 0xf9, 0x34, 0xd4, 0x6a, 0x9b, 0x13,
  1887. 0x0b, 0x9d, 0x54, 0xf0, 0x7e, 0x34, 0xf3, 0x6a,
  1888. 0xf7, 0x93, 0xe8, 0x6f, 0x73, 0xc6, 0xd7, 0xdb
  1889. };
  1890. /* plain text test of partial block is not from NIST test vector list */
  1891. static const unsigned char pp[] = {
  1892. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1893. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c,
  1894. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1895. };
  1896. static const unsigned char cp[] = {
  1897. 0x65, 0x5e, 0x1d, 0x37, 0x4a, 0x91, 0xe7, 0x6c,
  1898. 0x4f, 0x83, 0x92, 0xbc, 0x5a, 0x10, 0x55, 0x27,
  1899. 0x61, 0x0e, 0x5a, 0xde, 0xca, 0xc5, 0x12, 0xd8
  1900. };
  1901. static const unsigned char k2[] = {
  1902. 0xad, 0x50, 0x4b, 0x85, 0xd7, 0x51, 0xbf, 0xba,
  1903. 0x69, 0x13, 0xb4, 0xcc, 0x79, 0xb6, 0x5a, 0x62,
  1904. 0xf7, 0xf3, 0x9d, 0x36, 0x0f, 0x35, 0xb5, 0xec,
  1905. 0x4a, 0x7e, 0x95, 0xbd, 0x9b, 0xa5, 0xf2, 0xec,
  1906. 0xc1, 0xd7, 0x7e, 0xa3, 0xc3, 0x74, 0xbd, 0x4b,
  1907. 0x13, 0x1b, 0x07, 0x83, 0x87, 0xdd, 0x55, 0x5a,
  1908. 0xb5, 0xb0, 0xc7, 0xe5, 0x2d, 0xb5, 0x06, 0x12,
  1909. 0xd2, 0xb5, 0x3a, 0xcb, 0x47, 0x8a, 0x53, 0xb4
  1910. };
  1911. static const unsigned char i2[] = {
  1912. 0xe6, 0x42, 0x19, 0xed, 0xe0, 0xe1, 0xc2, 0xa0,
  1913. 0x0e, 0xf5, 0x58, 0x6a, 0xc4, 0x9b, 0xeb, 0x6f
  1914. };
  1915. static const unsigned char p2[] = {
  1916. 0x24, 0xcb, 0x76, 0x22, 0x55, 0xb5, 0xa8, 0x00,
  1917. 0xf4, 0x6e, 0x80, 0x60, 0x56, 0x9e, 0x05, 0x53,
  1918. 0xbc, 0xfe, 0x86, 0x55, 0x3b, 0xca, 0xd5, 0x89,
  1919. 0xc7, 0x54, 0x1a, 0x73, 0xac, 0xc3, 0x9a, 0xbd,
  1920. 0x53, 0xc4, 0x07, 0x76, 0xd8, 0xe8, 0x22, 0x61,
  1921. 0x9e, 0xa9, 0xad, 0x77, 0xa0, 0x13, 0x4c, 0xfc
  1922. };
  1923. static const unsigned char c2[] = {
  1924. 0xa3, 0xc6, 0xf3, 0xf3, 0x82, 0x79, 0x5b, 0x10,
  1925. 0x87, 0xd7, 0x02, 0x50, 0xdb, 0x2c, 0xd3, 0xb1,
  1926. 0xa1, 0x62, 0xa8, 0xb6, 0xdc, 0x12, 0x60, 0x61,
  1927. 0xc1, 0x0a, 0x84, 0xa5, 0x85, 0x3f, 0x3a, 0x89,
  1928. 0xe6, 0x6c, 0xdb, 0xb7, 0x9a, 0xb4, 0x28, 0x9b,
  1929. 0xc3, 0xea, 0xd8, 0x10, 0xe9, 0xc0, 0xaf, 0x92
  1930. };
  1931. if ((aes = (XtsAes *)XMALLOC(sizeof(*aes), NULL, DYNAMIC_TYPE_AES))
  1932. == NULL)
  1933. {
  1934. ret = MEMORY_E;
  1935. goto out;
  1936. }
  1937. if ((buf = (unsigned char *)XMALLOC(AES_XTS_256_TEST_BUF_SIZ, NULL,
  1938. DYNAMIC_TYPE_AES)) == NULL)
  1939. {
  1940. ret = MEMORY_E;
  1941. goto out;
  1942. }
  1943. if ((cipher = (unsigned char *)XMALLOC(AES_XTS_256_TEST_BUF_SIZ, NULL,
  1944. DYNAMIC_TYPE_AES)) == NULL)
  1945. {
  1946. ret = MEMORY_E;
  1947. goto out;
  1948. }
  1949. ret = wc_AesXtsInit(aes, NULL, INVALID_DEVID);
  1950. if (ret != 0)
  1951. goto out;
  1952. else
  1953. aes_inited = 1;
  1954. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  1955. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_ENCRYPTION);
  1956. if (ret != 0)
  1957. goto out;
  1958. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1959. if (ret != 0)
  1960. goto out;
  1961. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1962. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1963. goto out;
  1964. }
  1965. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  1966. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1967. if (ret != 0)
  1968. goto out;
  1969. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1970. if (ret != 0)
  1971. goto out;
  1972. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1973. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1974. goto out;
  1975. }
  1976. /* partial block encryption test */
  1977. XMEMSET(cipher, 0, AES_XTS_256_TEST_BUF_SIZ);
  1978. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1979. if (ret != 0)
  1980. goto out;
  1981. /* partial block decrypt test */
  1982. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  1983. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1984. if (ret != 0)
  1985. goto out;
  1986. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1987. if (ret != 0)
  1988. goto out;
  1989. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1990. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1991. goto out;
  1992. }
  1993. /* NIST decrypt test vector */
  1994. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  1995. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1996. if (ret != 0)
  1997. goto out;
  1998. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1999. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2000. goto out;
  2001. }
  2002. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2003. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_DECRYPTION);
  2004. if (ret != 0)
  2005. goto out;
  2006. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  2007. if (ret != 0)
  2008. goto out;
  2009. if (XMEMCMP(p2, buf, sizeof(p2))) {
  2010. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2011. goto out;
  2012. }
  2013. /* now the kernel crypto part */
  2014. enc2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  2015. if (!enc2) {
  2016. pr_err("error: malloc failed\n");
  2017. ret = -ENOMEM;
  2018. goto test_xts_end;
  2019. }
  2020. dec2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  2021. if (!dec2) {
  2022. pr_err("error: malloc failed\n");
  2023. ret = -ENOMEM;
  2024. goto test_xts_end;
  2025. }
  2026. src = XMALLOC(sizeof(*src) * 2, NULL, DYNAMIC_TYPE_AES);
  2027. if (! src) {
  2028. pr_err("error: malloc failed\n");
  2029. ret = -ENOMEM;
  2030. goto test_xts_end;
  2031. }
  2032. dst = XMALLOC(sizeof(*dst) * 2, NULL, DYNAMIC_TYPE_AES);
  2033. if (! dst) {
  2034. pr_err("error: malloc failed\n");
  2035. ret = -ENOMEM;
  2036. goto test_xts_end;
  2037. }
  2038. tfm = crypto_alloc_skcipher(WOLFKM_AESXTS_NAME, 0, 0);
  2039. if (IS_ERR(tfm)) {
  2040. ret = PTR_ERR(tfm);
  2041. pr_err("error: allocating AES skcipher algorithm %s failed: %d\n",
  2042. WOLFKM_AESXTS_DRIVER, ret);
  2043. goto test_xts_end;
  2044. }
  2045. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  2046. {
  2047. const char *driver_name = crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  2048. if (strcmp(driver_name, WOLFKM_AESXTS_DRIVER)) {
  2049. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  2050. WOLFKM_AESXTS_NAME, driver_name, WOLFKM_AESXTS_DRIVER);
  2051. ret = -ENOENT;
  2052. goto test_xts_end;
  2053. }
  2054. }
  2055. #endif
  2056. ret = crypto_skcipher_ivsize(tfm);
  2057. if (ret != sizeof(iv)) {
  2058. pr_err("error: AES skcipher algorithm %s crypto_skcipher_ivsize()"
  2059. " returned %d but expected %d\n",
  2060. WOLFKM_AESXTS_DRIVER, ret, (int)sizeof(iv));
  2061. ret = -EINVAL;
  2062. goto test_xts_end;
  2063. }
  2064. ret = crypto_skcipher_setkey(tfm, k1, sizeof(k1));
  2065. if (ret) {
  2066. pr_err("error: crypto_skcipher_setkey for %s returned: %d\n",
  2067. WOLFKM_AESXTS_NAME, ret);
  2068. goto test_xts_end;
  2069. }
  2070. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  2071. if (IS_ERR(req)) {
  2072. ret = PTR_ERR(req);
  2073. pr_err("error: allocating AES skcipher request %s failed: %d\n",
  2074. WOLFKM_AESXTS_DRIVER, ret);
  2075. goto test_xts_end;
  2076. }
  2077. memcpy(dec2, p1, sizeof(p1));
  2078. memset(enc2, 0, sizeof(p1));
  2079. sg_init_one(src, dec2, sizeof(p1));
  2080. sg_init_one(dst, enc2, sizeof(p1));
  2081. memcpy(iv, i1, sizeof(iv));
  2082. skcipher_request_set_crypt(req, src, dst, sizeof(p1), iv);
  2083. ret = crypto_skcipher_encrypt(req);
  2084. if (ret) {
  2085. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2086. goto test_xts_end;
  2087. }
  2088. ret = XMEMCMP(c1, enc2, sizeof(c1));
  2089. if (ret) {
  2090. pr_err("error: c1 and enc2 do not match: %d\n", ret);
  2091. ret = -EINVAL;
  2092. goto test_xts_end;
  2093. }
  2094. memset(dec2, 0, sizeof(p1));
  2095. sg_init_one(src, enc2, sizeof(p1));
  2096. sg_init_one(dst, dec2, sizeof(p1));
  2097. memcpy(iv, i1, sizeof(iv));
  2098. skcipher_request_set_crypt(req, src, dst, sizeof(p1), iv);
  2099. ret = crypto_skcipher_decrypt(req);
  2100. if (ret) {
  2101. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2102. goto test_xts_end;
  2103. }
  2104. ret = XMEMCMP(p1, dec2, sizeof(p1));
  2105. if (ret) {
  2106. pr_err("error: p1 and dec2 do not match: %d\n", ret);
  2107. ret = -EINVAL;
  2108. goto test_xts_end;
  2109. }
  2110. memcpy(dec2, pp, sizeof(pp));
  2111. memset(enc2, 0, sizeof(pp));
  2112. sg_init_one(src, dec2, sizeof(pp));
  2113. sg_init_one(dst, enc2, sizeof(pp));
  2114. memcpy(iv, i1, sizeof(iv));
  2115. skcipher_request_set_crypt(req, src, dst, sizeof(pp), iv);
  2116. ret = crypto_skcipher_encrypt(req);
  2117. if (ret) {
  2118. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2119. goto test_xts_end;
  2120. }
  2121. ret = XMEMCMP(cp, enc2, sizeof(cp));
  2122. if (ret) {
  2123. pr_err("error: cp and enc2 do not match: %d\n", ret);
  2124. ret = -EINVAL;
  2125. goto test_xts_end;
  2126. }
  2127. memset(dec2, 0, sizeof(pp));
  2128. sg_init_one(src, enc2, sizeof(pp));
  2129. sg_init_one(dst, dec2, sizeof(pp));
  2130. memcpy(iv, i1, sizeof(iv));
  2131. skcipher_request_set_crypt(req, src, dst, sizeof(pp), iv);
  2132. ret = crypto_skcipher_decrypt(req);
  2133. if (ret) {
  2134. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2135. goto test_xts_end;
  2136. }
  2137. ret = XMEMCMP(pp, dec2, sizeof(pp));
  2138. if (ret) {
  2139. pr_err("error: pp and dec2 do not match: %d\n", ret);
  2140. ret = -EINVAL;
  2141. goto test_xts_end;
  2142. }
  2143. test_xts_end:
  2144. if (enc2)
  2145. XFREE(enc2, NULL, DYNAMIC_TYPE_AES);
  2146. if (dec2)
  2147. XFREE(dec2, NULL, DYNAMIC_TYPE_AES);
  2148. if (src)
  2149. XFREE(src, NULL, DYNAMIC_TYPE_AES);
  2150. if (dst)
  2151. XFREE(dst, NULL, DYNAMIC_TYPE_AES);
  2152. if (req)
  2153. skcipher_request_free(req);
  2154. if (tfm)
  2155. crypto_free_skcipher(tfm);
  2156. out:
  2157. if (aes_inited)
  2158. wc_AesXtsFree(aes);
  2159. if (buf)
  2160. XFREE(buf, NULL, DYNAMIC_TYPE_AES);
  2161. if (cipher)
  2162. XFREE(cipher, NULL, DYNAMIC_TYPE_AES);
  2163. if (aes)
  2164. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  2165. #undef AES_XTS_256_TEST_BUF_SIZ
  2166. return ret;
  2167. }
  2168. #endif /* WOLFSSL_AES_256 */
  2169. static int linuxkm_test_aesxts(void) {
  2170. int ret;
  2171. #ifdef WOLFSSL_AES_128
  2172. ret = aes_xts_128_test();
  2173. if (ret != 0) {
  2174. pr_err("aes_xts_128_test() failed with retval %d.\n", ret);
  2175. goto out;
  2176. }
  2177. #endif
  2178. #ifdef WOLFSSL_AES_256
  2179. ret = aes_xts_256_test();
  2180. if (ret != 0) {
  2181. pr_err("aes_xts_256_test() failed with retval %d.\n", ret);
  2182. goto out;
  2183. }
  2184. #endif
  2185. out:
  2186. return ret;
  2187. }
  2188. #endif /* WOLFSSL_AES_XTS &&
  2189. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESXTS)
  2190. */
  2191. #endif /* !NO_AES */
  2192. static int linuxkm_lkcapi_register(void)
  2193. {
  2194. int ret = 0;
  2195. #define REGISTER_ALG(alg, installer, tester) do { \
  2196. if (alg ## _loaded) { \
  2197. pr_err("ERROR: %s is already registered.\n", \
  2198. (alg).base.cra_driver_name); \
  2199. return -EEXIST; \
  2200. } \
  2201. \
  2202. ret = (installer)(&(alg)); \
  2203. \
  2204. if (ret) { \
  2205. pr_err("ERROR: " #installer " for %s failed " \
  2206. "with return code %d.\n", \
  2207. (alg).base.cra_driver_name, ret); \
  2208. return ret; \
  2209. } \
  2210. \
  2211. alg ## _loaded = 1; \
  2212. \
  2213. ret = (tester()); \
  2214. \
  2215. if (ret) { \
  2216. pr_err("ERROR: self-test for %s failed " \
  2217. "with return code %d.\n", \
  2218. (alg).base.cra_driver_name, ret); \
  2219. return ret; \
  2220. } \
  2221. pr_info("%s self-test OK -- " \
  2222. "registered for %s with priority %d.\n", \
  2223. (alg).base.cra_driver_name, \
  2224. (alg).base.cra_name, \
  2225. (alg).base.cra_priority); \
  2226. } while (0)
  2227. #if defined(HAVE_AES_CBC) && \
  2228. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2229. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  2230. REGISTER_ALG(cbcAesAlg, crypto_register_skcipher, linuxkm_test_aescbc);
  2231. #endif
  2232. #if defined(WOLFSSL_AES_CFB) && \
  2233. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2234. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  2235. REGISTER_ALG(cfbAesAlg, crypto_register_skcipher, linuxkm_test_aescfb);
  2236. #endif
  2237. #if defined(HAVE_AESGCM) && \
  2238. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2239. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  2240. REGISTER_ALG(gcmAesAead, crypto_register_aead, linuxkm_test_aesgcm);
  2241. #endif
  2242. #if defined(WOLFSSL_AES_XTS) && \
  2243. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2244. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  2245. REGISTER_ALG(xtsAesAlg, crypto_register_skcipher, linuxkm_test_aesxts);
  2246. #endif
  2247. #undef REGISTER_ALG
  2248. return 0;
  2249. }
  2250. static void linuxkm_lkcapi_unregister(void)
  2251. {
  2252. #define UNREGISTER_ALG(alg, uninstaller) do { \
  2253. if (alg ## _loaded) { \
  2254. (uninstaller)(&(alg)); \
  2255. alg ## _loaded = 0; \
  2256. } \
  2257. } while (0)
  2258. #if defined(HAVE_AES_CBC) && \
  2259. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2260. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  2261. UNREGISTER_ALG(cbcAesAlg, crypto_unregister_skcipher);
  2262. #endif
  2263. #if defined(WOLFSSL_AES_CFB) && \
  2264. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2265. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  2266. UNREGISTER_ALG(cfbAesAlg, crypto_unregister_skcipher);
  2267. #endif
  2268. #if defined(HAVE_AESGCM) && \
  2269. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2270. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  2271. UNREGISTER_ALG(gcmAesAead, crypto_unregister_aead);
  2272. #endif
  2273. #if defined(WOLFSSL_AES_XTS) && \
  2274. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2275. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  2276. UNREGISTER_ALG(xtsAesAlg, crypto_unregister_skcipher);
  2277. #endif
  2278. #undef UNREGISTER_ALG
  2279. }