lkcapi_glue.c 81 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701
  1. /* lkcapi_glue.c -- glue logic to register wolfCrypt implementations with
  2. * the Linux Kernel Cryptosystem
  3. *
  4. * Copyright (C) 2006-2024 wolfSSL Inc.
  5. *
  6. * This file is part of wolfSSL.
  7. *
  8. * wolfSSL is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License as published by
  10. * the Free Software Foundation; either version 2 of the License, or
  11. * (at your option) any later version.
  12. *
  13. * wolfSSL is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  21. */
  22. #ifndef LINUXKM_LKCAPI_REGISTER
  23. #error lkcapi_glue.c included in non-LINUXKM_LKCAPI_REGISTER project.
  24. #endif
  25. #ifndef WOLFSSL_LINUXKM_LKCAPI_PRIORITY
  26. /* Larger number means higher priority. The highest in-tree priority is 4001,
  27. * in the Cavium driver.
  28. */
  29. #define WOLFSSL_LINUXKM_LKCAPI_PRIORITY 10000
  30. #endif
  31. #ifndef NO_AES
  32. /* note the FIPS code will be returned on failure even in non-FIPS builds. */
  33. #define LINUXKM_LKCAPI_AES_KAT_MISMATCH_E AES_KAT_FIPS_E
  34. #define LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E AESGCM_KAT_FIPS_E
  35. #define WOLFKM_AESCBC_NAME "cbc(aes)"
  36. #define WOLFKM_AESCFB_NAME "cfb(aes)"
  37. #define WOLFKM_AESGCM_NAME "gcm(aes)"
  38. #define WOLFKM_AESXTS_NAME "xts(aes)"
  39. #ifdef WOLFSSL_AESNI
  40. #define WOLFKM_DRIVER_ISA_EXT "-aesni"
  41. #else
  42. #define WOLFKM_DRIVER_ISA_EXT ""
  43. #endif
  44. #ifdef HAVE_FIPS
  45. #ifndef HAVE_FIPS_VERSION
  46. #define WOLFKM_DRIVER_FIPS "-fips-140"
  47. #elif HAVE_FIPS_VERSION >= 5
  48. #define WOLFKM_DRIVER_FIPS "-fips-140-3"
  49. #elif HAVE_FIPS_VERSION == 2
  50. #define WOLFKM_DRIVER_FIPS "-fips-140-2"
  51. #else
  52. #define WOLFKM_DRIVER_FIPS "-fips-140"
  53. #endif
  54. #else
  55. #define WOLFKM_DRIVER_FIPS ""
  56. #endif
  57. #define WOLFKM_DRIVER_SUFFIX \
  58. WOLFKM_DRIVER_ISA_EXT WOLFKM_DRIVER_FIPS "-wolfcrypt"
  59. #define WOLFKM_AESCBC_DRIVER ("cbc-aes" WOLFKM_DRIVER_SUFFIX)
  60. #define WOLFKM_AESCFB_DRIVER ("cfb-aes" WOLFKM_DRIVER_SUFFIX)
  61. #define WOLFKM_AESGCM_DRIVER ("gcm-aes" WOLFKM_DRIVER_SUFFIX)
  62. #define WOLFKM_AESXTS_DRIVER ("xts-aes" WOLFKM_DRIVER_SUFFIX)
  63. #if defined(HAVE_AES_CBC) && \
  64. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  65. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  66. static int linuxkm_test_aescbc(void);
  67. #endif
  68. #if defined(WOLFSSL_AES_CFB) && \
  69. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  70. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  71. static int linuxkm_test_aescfb(void);
  72. #endif
  73. #if defined(HAVE_AESGCM) && \
  74. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  75. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  76. static int linuxkm_test_aesgcm(void);
  77. #endif
  78. #if defined(WOLFSSL_AES_XTS) && \
  79. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  80. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  81. static int linuxkm_test_aesxts(void);
  82. #endif
  83. /* km_AesX(): wrappers to wolfcrypt wc_AesX functions and
  84. * structures. */
  85. #include <wolfssl/wolfcrypt/aes.h>
  86. struct km_AesCtx {
  87. Aes *aes_encrypt; /* allocated in km_AesInitCommon() to assure
  88. * alignment, needed for AESNI.
  89. */
  90. Aes *aes_decrypt; /* same. */
  91. };
  92. #if defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  93. defined(LINUXKM_LKCAPI_REGISTER_AESCBC) || \
  94. defined(LINUXKM_LKCAPI_REGISTER_AESCFB) || \
  95. defined(LINUXKM_LKCAPI_REGISTER_AESGCM)
  96. static void km_AesExitCommon(struct km_AesCtx * ctx);
  97. static int km_AesInitCommon(
  98. struct km_AesCtx * ctx,
  99. const char * name,
  100. int need_decryption)
  101. {
  102. int err;
  103. ctx->aes_encrypt = (Aes *)malloc(sizeof(*ctx->aes_encrypt));
  104. if (! ctx->aes_encrypt) {
  105. pr_err("%s: allocation of %zu bytes for encryption key failed.\n",
  106. name, sizeof(*ctx->aes_encrypt));
  107. return MEMORY_E;
  108. }
  109. err = wc_AesInit(ctx->aes_encrypt, NULL, INVALID_DEVID);
  110. if (unlikely(err)) {
  111. pr_err("%s: wc_AesInit failed: %d\n", name, err);
  112. free(ctx->aes_encrypt);
  113. ctx->aes_encrypt = NULL;
  114. return -EINVAL;
  115. }
  116. if (! need_decryption) {
  117. ctx->aes_decrypt = NULL;
  118. return 0;
  119. }
  120. ctx->aes_decrypt = (Aes *)malloc(sizeof(*ctx->aes_decrypt));
  121. if (! ctx->aes_decrypt) {
  122. pr_err("%s: allocation of %zu bytes for decryption key failed.\n",
  123. name, sizeof(*ctx->aes_decrypt));
  124. km_AesExitCommon(ctx);
  125. return MEMORY_E;
  126. }
  127. err = wc_AesInit(ctx->aes_decrypt, NULL, INVALID_DEVID);
  128. if (unlikely(err)) {
  129. pr_err("%s: wc_AesInit failed: %d\n", name, err);
  130. free(ctx->aes_decrypt);
  131. ctx->aes_decrypt = NULL;
  132. km_AesExitCommon(ctx);
  133. return -EINVAL;
  134. }
  135. return 0;
  136. }
  137. static void km_AesExitCommon(struct km_AesCtx * ctx)
  138. {
  139. if (ctx->aes_encrypt) {
  140. wc_AesFree(ctx->aes_encrypt);
  141. free(ctx->aes_encrypt);
  142. ctx->aes_encrypt = NULL;
  143. }
  144. if (ctx->aes_decrypt) {
  145. wc_AesFree(ctx->aes_decrypt);
  146. free(ctx->aes_decrypt);
  147. ctx->aes_decrypt = NULL;
  148. }
  149. }
  150. static int km_AesSetKeyCommon(struct km_AesCtx * ctx, const u8 *in_key,
  151. unsigned int key_len, const char * name)
  152. {
  153. int err;
  154. err = wc_AesSetKey(ctx->aes_encrypt, in_key, key_len, NULL, AES_ENCRYPTION);
  155. if (unlikely(err)) {
  156. pr_err("%s: wc_AesSetKey for encryption key failed: %d\n", name, err);
  157. return -ENOKEY;
  158. }
  159. if (ctx->aes_decrypt) {
  160. err = wc_AesSetKey(ctx->aes_decrypt, in_key, key_len, NULL,
  161. AES_DECRYPTION);
  162. if (unlikely(err)) {
  163. pr_err("%s: wc_AesSetKey for decryption key failed: %d\n",
  164. name, err);
  165. return -ENOKEY;
  166. }
  167. }
  168. return 0;
  169. }
  170. #if defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  171. defined(LINUXKM_LKCAPI_REGISTER_AESCBC) || \
  172. defined(LINUXKM_LKCAPI_REGISTER_AESCFB)
  173. static void km_AesExit(struct crypto_skcipher *tfm)
  174. {
  175. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  176. km_AesExitCommon(ctx);
  177. }
  178. #endif /* LINUXKM_LKCAPI_REGISTER_ALL ||
  179. * LINUXKM_LKCAPI_REGISTER_AESCBC ||
  180. * LINUXKM_LKCAPI_REGISTER_AESCFB
  181. */
  182. #endif /* LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC ||
  183. * LINUXKM_LKCAPI_REGISTER_AESCFB || LINUXKM_LKCAPI_REGISTER_AESGCM
  184. */
  185. #if defined(HAVE_AES_CBC) && \
  186. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  187. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  188. static int km_AesCbcInit(struct crypto_skcipher *tfm)
  189. {
  190. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  191. return km_AesInitCommon(ctx, WOLFKM_AESCBC_DRIVER, 1);
  192. }
  193. static int km_AesCbcSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  194. unsigned int key_len)
  195. {
  196. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  197. return km_AesSetKeyCommon(ctx, in_key, key_len, WOLFKM_AESCBC_DRIVER);
  198. }
  199. static int km_AesCbcEncrypt(struct skcipher_request *req)
  200. {
  201. struct crypto_skcipher * tfm = NULL;
  202. struct km_AesCtx * ctx = NULL;
  203. struct skcipher_walk walk;
  204. unsigned int nbytes = 0;
  205. int err = 0;
  206. tfm = crypto_skcipher_reqtfm(req);
  207. ctx = crypto_skcipher_ctx(tfm);
  208. err = skcipher_walk_virt(&walk, req, false);
  209. if (unlikely(err)) {
  210. pr_err("%s: skcipher_walk_virt failed: %d\n",
  211. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  212. return err;
  213. }
  214. while ((nbytes = walk.nbytes) != 0) {
  215. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  216. if (unlikely(err)) {
  217. pr_err("%s: wc_AesSetIV failed: %d\n",
  218. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  219. return -EINVAL;
  220. }
  221. err = wc_AesCbcEncrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  222. walk.src.virt.addr, nbytes);
  223. if (unlikely(err)) {
  224. pr_err("%s: wc_AesCbcEncrypt failed: %d\n",
  225. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  226. return -EINVAL;
  227. }
  228. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  229. if (unlikely(err)) {
  230. pr_err("%s: skcipher_walk_done failed: %d\n",
  231. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  232. return err;
  233. }
  234. }
  235. return err;
  236. }
  237. static int km_AesCbcDecrypt(struct skcipher_request *req)
  238. {
  239. struct crypto_skcipher * tfm = NULL;
  240. struct km_AesCtx * ctx = NULL;
  241. struct skcipher_walk walk;
  242. unsigned int nbytes = 0;
  243. int err = 0;
  244. tfm = crypto_skcipher_reqtfm(req);
  245. ctx = crypto_skcipher_ctx(tfm);
  246. err = skcipher_walk_virt(&walk, req, false);
  247. if (unlikely(err)) {
  248. pr_err("%s: skcipher_walk_virt failed: %d\n",
  249. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  250. return err;
  251. }
  252. while ((nbytes = walk.nbytes) != 0) {
  253. err = wc_AesSetIV(ctx->aes_decrypt, walk.iv);
  254. if (unlikely(err)) {
  255. pr_err("%s: wc_AesSetKey failed: %d\n",
  256. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  257. return -EINVAL;
  258. }
  259. err = wc_AesCbcDecrypt(ctx->aes_decrypt, walk.dst.virt.addr,
  260. walk.src.virt.addr, nbytes);
  261. if (unlikely(err)) {
  262. pr_err("%s: wc_AesCbcDecrypt failed: %d\n",
  263. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  264. return -EINVAL;
  265. }
  266. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  267. if (unlikely(err)) {
  268. pr_err("%s: skcipher_walk_done failed: %d\n",
  269. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  270. return err;
  271. }
  272. }
  273. return err;
  274. }
  275. static struct skcipher_alg cbcAesAlg = {
  276. .base.cra_name = WOLFKM_AESCBC_NAME,
  277. .base.cra_driver_name = WOLFKM_AESCBC_DRIVER,
  278. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  279. .base.cra_blocksize = AES_BLOCK_SIZE,
  280. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  281. .base.cra_module = THIS_MODULE,
  282. .init = km_AesCbcInit,
  283. .exit = km_AesExit,
  284. .min_keysize = AES_128_KEY_SIZE,
  285. .max_keysize = AES_256_KEY_SIZE,
  286. .ivsize = AES_BLOCK_SIZE,
  287. .setkey = km_AesCbcSetKey,
  288. .encrypt = km_AesCbcEncrypt,
  289. .decrypt = km_AesCbcDecrypt,
  290. };
  291. static int cbcAesAlg_loaded = 0;
  292. #endif /* HAVE_AES_CBC &&
  293. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  294. */
  295. #if defined(WOLFSSL_AES_CFB) && \
  296. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  297. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  298. static int km_AesCfbInit(struct crypto_skcipher *tfm)
  299. {
  300. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  301. return km_AesInitCommon(ctx, WOLFKM_AESCFB_DRIVER, 0);
  302. }
  303. static int km_AesCfbSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  304. unsigned int key_len)
  305. {
  306. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  307. return km_AesSetKeyCommon(ctx, in_key, key_len, WOLFKM_AESCFB_DRIVER);
  308. }
  309. static int km_AesCfbEncrypt(struct skcipher_request *req)
  310. {
  311. struct crypto_skcipher * tfm = NULL;
  312. struct km_AesCtx * ctx = NULL;
  313. struct skcipher_walk walk;
  314. unsigned int nbytes = 0;
  315. int err = 0;
  316. tfm = crypto_skcipher_reqtfm(req);
  317. ctx = crypto_skcipher_ctx(tfm);
  318. err = skcipher_walk_virt(&walk, req, false);
  319. if (unlikely(err)) {
  320. pr_err("%s: skcipher_walk_virt failed: %d\n",
  321. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  322. return err;
  323. }
  324. while ((nbytes = walk.nbytes) != 0) {
  325. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  326. if (unlikely(err)) {
  327. pr_err("%s: wc_AesSetKey failed: %d\n",
  328. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  329. return -EINVAL;
  330. }
  331. err = wc_AesCfbEncrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  332. walk.src.virt.addr, nbytes);
  333. if (unlikely(err)) {
  334. pr_err("%s: wc_AesCfbEncrypt failed %d\n",
  335. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  336. return -EINVAL;
  337. }
  338. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  339. if (unlikely(err)) {
  340. pr_err("%s: skcipher_walk_done failed: %d\n",
  341. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  342. return err;
  343. }
  344. }
  345. return err;
  346. }
  347. static int km_AesCfbDecrypt(struct skcipher_request *req)
  348. {
  349. struct crypto_skcipher * tfm = NULL;
  350. struct km_AesCtx * ctx = NULL;
  351. struct skcipher_walk walk;
  352. unsigned int nbytes = 0;
  353. int err = 0;
  354. tfm = crypto_skcipher_reqtfm(req);
  355. ctx = crypto_skcipher_ctx(tfm);
  356. err = skcipher_walk_virt(&walk, req, false);
  357. if (unlikely(err)) {
  358. pr_err("%s: skcipher_walk_virt failed: %d\n",
  359. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  360. return err;
  361. }
  362. while ((nbytes = walk.nbytes) != 0) {
  363. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  364. if (unlikely(err)) {
  365. pr_err("%s: wc_AesSetKey failed: %d\n",
  366. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  367. return -EINVAL;
  368. }
  369. err = wc_AesCfbDecrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  370. walk.src.virt.addr, nbytes);
  371. if (unlikely(err)) {
  372. pr_err("%s: wc_AesCfbDecrypt failed: %d\n",
  373. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  374. return -EINVAL;
  375. }
  376. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  377. if (unlikely(err)) {
  378. pr_err("%s: skcipher_walk_done failed: %d\n",
  379. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  380. return err;
  381. }
  382. }
  383. return err;
  384. }
  385. static struct skcipher_alg cfbAesAlg = {
  386. .base.cra_name = WOLFKM_AESCFB_NAME,
  387. .base.cra_driver_name = WOLFKM_AESCFB_DRIVER,
  388. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  389. .base.cra_blocksize = AES_BLOCK_SIZE,
  390. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  391. .base.cra_module = THIS_MODULE,
  392. .init = km_AesCfbInit,
  393. .exit = km_AesExit,
  394. .min_keysize = AES_128_KEY_SIZE,
  395. .max_keysize = AES_256_KEY_SIZE,
  396. .ivsize = AES_BLOCK_SIZE,
  397. .setkey = km_AesCfbSetKey,
  398. .encrypt = km_AesCfbEncrypt,
  399. .decrypt = km_AesCfbDecrypt,
  400. };
  401. static int cfbAesAlg_loaded = 0;
  402. #endif /* WOLFSSL_AES_CFB &&
  403. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  404. */
  405. #if defined(HAVE_AESGCM) && \
  406. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  407. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  408. #ifndef WOLFSSL_AESGCM_STREAM
  409. #error LKCAPI registration of AES-GCM requires WOLFSSL_AESGCM_STREAM (--enable-aesgcm-stream).
  410. #endif
  411. static int km_AesGcmInit(struct crypto_aead * tfm)
  412. {
  413. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  414. return km_AesInitCommon(ctx, WOLFKM_AESGCM_DRIVER, 0);
  415. }
  416. static void km_AesGcmExit(struct crypto_aead * tfm)
  417. {
  418. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  419. km_AesExitCommon(ctx);
  420. }
  421. static int km_AesGcmSetKey(struct crypto_aead *tfm, const u8 *in_key,
  422. unsigned int key_len)
  423. {
  424. int err;
  425. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  426. err = wc_AesGcmSetKey(ctx->aes_encrypt, in_key, key_len);
  427. if (unlikely(err)) {
  428. pr_err("%s: wc_AesGcmSetKey failed: %d\n",
  429. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  430. return -ENOKEY;
  431. }
  432. return 0;
  433. }
  434. static int km_AesGcmSetAuthsize(struct crypto_aead *tfm, unsigned int authsize)
  435. {
  436. (void)tfm;
  437. if (authsize > AES_BLOCK_SIZE ||
  438. authsize < WOLFSSL_MIN_AUTH_TAG_SZ) {
  439. pr_err("%s: invalid authsize: %d\n",
  440. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), authsize);
  441. return -EINVAL;
  442. }
  443. return 0;
  444. }
  445. /*
  446. * aead ciphers recieve data in scatterlists in following order:
  447. * encrypt
  448. * req->src: aad||plaintext
  449. * req->dst: aad||ciphertext||tag
  450. * decrypt
  451. * req->src: aad||ciphertext||tag
  452. * req->dst: aad||plaintext, return 0 or -EBADMSG
  453. */
  454. static int km_AesGcmEncrypt(struct aead_request *req)
  455. {
  456. struct crypto_aead * tfm = NULL;
  457. struct km_AesCtx * ctx = NULL;
  458. struct skcipher_walk walk;
  459. struct scatter_walk assocSgWalk;
  460. unsigned int nbytes = 0;
  461. u8 authTag[AES_BLOCK_SIZE];
  462. int err = 0;
  463. unsigned int assocLeft = 0;
  464. unsigned int cryptLeft = 0;
  465. u8 * assoc = NULL;
  466. tfm = crypto_aead_reqtfm(req);
  467. ctx = crypto_aead_ctx(tfm);
  468. assocLeft = req->assoclen;
  469. cryptLeft = req->cryptlen;
  470. scatterwalk_start(&assocSgWalk, req->src);
  471. err = skcipher_walk_aead_encrypt(&walk, req, false);
  472. if (unlikely(err)) {
  473. pr_err("%s: skcipher_walk_aead_encrypt failed: %d\n",
  474. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  475. return -1;
  476. }
  477. err = wc_AesGcmInit(ctx->aes_encrypt, NULL /*key*/, 0 /*keylen*/, walk.iv,
  478. AES_BLOCK_SIZE);
  479. if (unlikely(err)) {
  480. pr_err("%s: wc_AesGcmInit failed: %d\n",
  481. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  482. return -EINVAL;
  483. }
  484. assoc = scatterwalk_map(&assocSgWalk);
  485. if (unlikely(IS_ERR(assoc))) {
  486. pr_err("%s: scatterwalk_map failed: %ld\n",
  487. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)),
  488. PTR_ERR(assoc));
  489. return err;
  490. }
  491. err = wc_AesGcmEncryptUpdate(ctx->aes_encrypt, NULL, NULL, 0,
  492. assoc, assocLeft);
  493. assocLeft -= assocLeft;
  494. scatterwalk_unmap(assoc);
  495. assoc = NULL;
  496. if (unlikely(err)) {
  497. pr_err("%s: wc_AesGcmEncryptUpdate failed: %d\n",
  498. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  499. return -EINVAL;
  500. }
  501. while ((nbytes = walk.nbytes) != 0) {
  502. int n = nbytes;
  503. if (likely(cryptLeft && nbytes)) {
  504. n = cryptLeft < nbytes ? cryptLeft : nbytes;
  505. err = wc_AesGcmEncryptUpdate(
  506. ctx->aes_encrypt,
  507. walk.dst.virt.addr,
  508. walk.src.virt.addr,
  509. cryptLeft,
  510. NULL, 0);
  511. nbytes -= n;
  512. cryptLeft -= n;
  513. }
  514. if (unlikely(err)) {
  515. pr_err("%s: wc_AesGcmEncryptUpdate failed: %d\n",
  516. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  517. return -EINVAL;
  518. }
  519. err = skcipher_walk_done(&walk, nbytes);
  520. if (unlikely(err)) {
  521. pr_err("%s: skcipher_walk_done failed: %d\n",
  522. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  523. return err;
  524. }
  525. }
  526. err = wc_AesGcmEncryptFinal(ctx->aes_encrypt, authTag, tfm->authsize);
  527. if (unlikely(err)) {
  528. pr_err("%s: wc_AesGcmEncryptFinal failed with return code %d\n",
  529. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  530. return -EINVAL;
  531. }
  532. /* Now copy the auth tag into request scatterlist. */
  533. scatterwalk_map_and_copy(authTag, req->dst,
  534. req->assoclen + req->cryptlen,
  535. tfm->authsize, 1);
  536. return err;
  537. }
  538. static int km_AesGcmDecrypt(struct aead_request *req)
  539. {
  540. struct crypto_aead * tfm = NULL;
  541. struct km_AesCtx * ctx = NULL;
  542. struct skcipher_walk walk;
  543. struct scatter_walk assocSgWalk;
  544. unsigned int nbytes = 0;
  545. u8 origAuthTag[AES_BLOCK_SIZE];
  546. int err = 0;
  547. unsigned int assocLeft = 0;
  548. unsigned int cryptLeft = 0;
  549. u8 * assoc = NULL;
  550. tfm = crypto_aead_reqtfm(req);
  551. ctx = crypto_aead_ctx(tfm);
  552. assocLeft = req->assoclen;
  553. cryptLeft = req->cryptlen - tfm->authsize;
  554. /* Copy out original auth tag from req->src. */
  555. scatterwalk_map_and_copy(origAuthTag, req->src,
  556. req->assoclen + req->cryptlen - tfm->authsize,
  557. tfm->authsize, 0);
  558. scatterwalk_start(&assocSgWalk, req->src);
  559. err = skcipher_walk_aead_decrypt(&walk, req, false);
  560. if (unlikely(err)) {
  561. pr_err("%s: skcipher_walk_aead_decrypt failed: %d\n",
  562. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  563. return err;
  564. }
  565. err = wc_AesGcmInit(ctx->aes_encrypt, NULL /*key*/, 0 /*keylen*/, walk.iv,
  566. AES_BLOCK_SIZE);
  567. if (unlikely(err)) {
  568. pr_err("%s: wc_AesGcmInit failed: %d\n",
  569. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  570. return -EINVAL;
  571. }
  572. assoc = scatterwalk_map(&assocSgWalk);
  573. if (unlikely(IS_ERR(assoc))) {
  574. pr_err("%s: scatterwalk_map failed: %ld\n",
  575. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)),
  576. PTR_ERR(assoc));
  577. return err;
  578. }
  579. err = wc_AesGcmDecryptUpdate(ctx->aes_encrypt, NULL, NULL, 0,
  580. assoc, assocLeft);
  581. assocLeft -= assocLeft;
  582. scatterwalk_unmap(assoc);
  583. assoc = NULL;
  584. if (unlikely(err)) {
  585. pr_err("%s: wc_AesGcmDecryptUpdate failed: %d\n",
  586. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  587. return -EINVAL;
  588. }
  589. while ((nbytes = walk.nbytes) != 0) {
  590. int n = nbytes;
  591. if (likely(cryptLeft && nbytes)) {
  592. n = cryptLeft < nbytes ? cryptLeft : nbytes;
  593. err = wc_AesGcmDecryptUpdate(
  594. ctx->aes_encrypt,
  595. walk.dst.virt.addr,
  596. walk.src.virt.addr,
  597. cryptLeft,
  598. NULL, 0);
  599. nbytes -= n;
  600. cryptLeft -= n;
  601. }
  602. if (unlikely(err)) {
  603. pr_err("%s: wc_AesGcmDecryptUpdate failed: %d\n",
  604. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  605. return -EINVAL;
  606. }
  607. err = skcipher_walk_done(&walk, nbytes);
  608. if (unlikely(err)) {
  609. pr_err("%s: skcipher_walk_done failed: %d\n",
  610. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  611. return err;
  612. }
  613. }
  614. err = wc_AesGcmDecryptFinal(ctx->aes_encrypt, origAuthTag, tfm->authsize);
  615. if (unlikely(err)) {
  616. pr_err("%s: wc_AesGcmDecryptFinal failed with return code %d\n",
  617. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  618. if (err == AES_GCM_AUTH_E) {
  619. return -EBADMSG;
  620. }
  621. else {
  622. return -EINVAL;
  623. }
  624. }
  625. return err;
  626. }
  627. static struct aead_alg gcmAesAead = {
  628. .base.cra_name = WOLFKM_AESGCM_NAME,
  629. .base.cra_driver_name = WOLFKM_AESGCM_DRIVER,
  630. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  631. .base.cra_blocksize = 1,
  632. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  633. .base.cra_module = THIS_MODULE,
  634. .init = km_AesGcmInit,
  635. .exit = km_AesGcmExit,
  636. .setkey = km_AesGcmSetKey,
  637. .setauthsize = km_AesGcmSetAuthsize,
  638. .encrypt = km_AesGcmEncrypt,
  639. .decrypt = km_AesGcmDecrypt,
  640. .ivsize = AES_BLOCK_SIZE,
  641. .maxauthsize = AES_BLOCK_SIZE,
  642. .chunksize = AES_BLOCK_SIZE,
  643. };
  644. static int gcmAesAead_loaded = 0;
  645. #endif /* HAVE_AESGCM &&
  646. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESGCM) &&
  647. */
  648. #if defined(WOLFSSL_AES_XTS) && \
  649. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  650. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  651. struct km_AesXtsCtx {
  652. XtsAes *aesXts; /* allocated in km_AesXtsInitCommon() to assure alignment
  653. * for AESNI.
  654. */
  655. };
  656. static int km_AesXtsInitCommon(struct km_AesXtsCtx * ctx, const char * name)
  657. {
  658. int err;
  659. ctx->aesXts = (XtsAes *)malloc(sizeof(*ctx->aesXts));
  660. if (! ctx->aesXts)
  661. return -MEMORY_E;
  662. err = wc_AesXtsInit(ctx->aesXts, NULL, INVALID_DEVID);
  663. if (unlikely(err)) {
  664. pr_err("%s: km_AesXtsInitCommon failed: %d\n", name, err);
  665. return -EINVAL;
  666. }
  667. return 0;
  668. }
  669. static int km_AesXtsInit(struct crypto_skcipher *tfm)
  670. {
  671. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  672. return km_AesXtsInitCommon(ctx, WOLFKM_AESXTS_DRIVER);
  673. }
  674. static void km_AesXtsExit(struct crypto_skcipher *tfm)
  675. {
  676. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  677. wc_AesXtsFree(ctx->aesXts);
  678. free(ctx->aesXts);
  679. ctx->aesXts = NULL;
  680. }
  681. static int km_AesXtsSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  682. unsigned int key_len)
  683. {
  684. int err;
  685. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  686. err = wc_AesXtsSetKeyNoInit(ctx->aesXts, in_key, key_len,
  687. AES_ENCRYPTION_AND_DECRYPTION);
  688. if (unlikely(err)) {
  689. pr_err("%s: wc_AesXtsSetKeyNoInit failed: %d\n",
  690. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  691. return -EINVAL;
  692. }
  693. return 0;
  694. }
  695. /* see /usr/src/linux/drivers/md/dm-crypt.c */
  696. static int km_AesXtsEncrypt(struct skcipher_request *req)
  697. {
  698. int err = 0;
  699. struct crypto_skcipher * tfm = NULL;
  700. struct km_AesXtsCtx * ctx = NULL;
  701. struct skcipher_walk walk;
  702. unsigned int nbytes = 0;
  703. tfm = crypto_skcipher_reqtfm(req);
  704. ctx = crypto_skcipher_ctx(tfm);
  705. err = skcipher_walk_virt(&walk, req, false);
  706. if (unlikely(err)) {
  707. pr_err("%s: skcipher_walk_virt failed: %d\n",
  708. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  709. return err;
  710. }
  711. while ((nbytes = walk.nbytes) != 0) {
  712. err = wc_AesXtsEncrypt(ctx->aesXts, walk.dst.virt.addr,
  713. walk.src.virt.addr, nbytes,
  714. walk.iv, walk.ivsize);
  715. if (unlikely(err)) {
  716. pr_err("%s: wc_AesXtsEncrypt failed: %d\n",
  717. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  718. return -EINVAL;
  719. }
  720. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  721. if (unlikely(err)) {
  722. pr_err("%s: skcipher_walk_done failed: %d\n",
  723. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  724. return err;
  725. }
  726. }
  727. return err;
  728. }
  729. static int km_AesXtsDecrypt(struct skcipher_request *req)
  730. {
  731. int err = 0;
  732. struct crypto_skcipher * tfm = NULL;
  733. struct km_AesXtsCtx * ctx = NULL;
  734. struct skcipher_walk walk;
  735. unsigned int nbytes = 0;
  736. tfm = crypto_skcipher_reqtfm(req);
  737. ctx = crypto_skcipher_ctx(tfm);
  738. err = skcipher_walk_virt(&walk, req, false);
  739. if (unlikely(err)) {
  740. pr_err("%s: skcipher_walk_virt failed: %d\n",
  741. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  742. return err;
  743. }
  744. while ((nbytes = walk.nbytes) != 0) {
  745. err = wc_AesXtsDecrypt(ctx->aesXts, walk.dst.virt.addr,
  746. walk.src.virt.addr, nbytes,
  747. walk.iv, walk.ivsize);
  748. if (unlikely(err)) {
  749. pr_err("%s: wc_AesCbcDecrypt failed: %d\n",
  750. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  751. return -EINVAL;
  752. }
  753. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  754. if (unlikely(err)) {
  755. pr_err("%s: skcipher_walk_done failed: %d\n",
  756. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  757. return err;
  758. }
  759. }
  760. return err;
  761. }
  762. static struct skcipher_alg xtsAesAlg = {
  763. .base.cra_name = WOLFKM_AESXTS_NAME,
  764. .base.cra_driver_name = WOLFKM_AESXTS_DRIVER,
  765. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  766. .base.cra_blocksize = AES_BLOCK_SIZE,
  767. .base.cra_ctxsize = sizeof(struct km_AesXtsCtx),
  768. .base.cra_module = THIS_MODULE,
  769. .min_keysize = 2 * AES_128_KEY_SIZE,
  770. .max_keysize = 2 * AES_256_KEY_SIZE,
  771. .ivsize = AES_BLOCK_SIZE,
  772. .walksize = 2 * AES_BLOCK_SIZE,
  773. .init = km_AesXtsInit,
  774. .exit = km_AesXtsExit,
  775. .setkey = km_AesXtsSetKey,
  776. .encrypt = km_AesXtsEncrypt,
  777. .decrypt = km_AesXtsDecrypt
  778. };
  779. static int xtsAesAlg_loaded = 0;
  780. #endif /* WOLFSSL_AES_XTS &&
  781. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESXTS)
  782. */
  783. /* cipher tests, cribbed from test.c, with supplementary LKCAPI tests: */
  784. #if defined(HAVE_AES_CBC) && \
  785. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  786. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  787. static int linuxkm_test_aescbc(void)
  788. {
  789. int ret = 0;
  790. struct crypto_skcipher * tfm = NULL;
  791. struct skcipher_request * req = NULL;
  792. struct scatterlist src, dst;
  793. Aes aes;
  794. static const byte key32[] =
  795. {
  796. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  797. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  798. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  799. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  800. };
  801. static const byte p_vector[] =
  802. /* Now is the time for all good men w/o trailing 0 */
  803. {
  804. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  805. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  806. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20,
  807. 0x67,0x6f,0x6f,0x64,0x20,0x6d,0x65,0x6e
  808. };
  809. static const byte iv[] = "1234567890abcdef";
  810. static const byte c_vector[] =
  811. {
  812. 0xd7,0xd6,0x04,0x5b,0x4d,0xc4,0x90,0xdf,
  813. 0x4a,0x82,0xed,0x61,0x26,0x4e,0x23,0xb3,
  814. 0xe4,0xb5,0x85,0x30,0x29,0x4c,0x9d,0xcf,
  815. 0x73,0xc9,0x46,0xd1,0xaa,0xc8,0xcb,0x62
  816. };
  817. byte iv_copy[sizeof(iv)];
  818. byte enc[sizeof(p_vector)];
  819. byte dec[sizeof(p_vector)];
  820. u8 * enc2 = NULL;
  821. u8 * dec2 = NULL;
  822. XMEMSET(enc, 0, sizeof(enc));
  823. XMEMSET(dec, 0, sizeof(enc));
  824. ret = wc_AesInit(&aes, NULL, INVALID_DEVID);
  825. if (ret) {
  826. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  827. return ret;
  828. }
  829. ret = wc_AesSetKey(&aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  830. if (ret) {
  831. pr_err("wolfcrypt wc_AesSetKey failed with return code %d\n", ret);
  832. return ret;
  833. }
  834. ret = wc_AesCbcEncrypt(&aes, enc, p_vector, sizeof(p_vector));
  835. if (ret) {
  836. pr_err("wolfcrypt wc_AesCbcEncrypt failed with return code %d\n", ret);
  837. return ret;
  838. }
  839. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  840. pr_err("wolfcrypt wc_AesCbcEncrypt KAT mismatch\n");
  841. return LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  842. }
  843. /* Re init for decrypt and set flag. */
  844. wc_AesFree(&aes);
  845. ret = wc_AesInit(&aes, NULL, INVALID_DEVID);
  846. if (ret) {
  847. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  848. return ret;
  849. }
  850. ret = wc_AesSetKey(&aes, key32, AES_BLOCK_SIZE * 2, iv, AES_DECRYPTION);
  851. if (ret) {
  852. pr_err("wolfcrypt wc_AesSetKey failed with return code %d.\n", ret);
  853. return ret;
  854. }
  855. ret = wc_AesCbcDecrypt(&aes, dec, enc, sizeof(p_vector));
  856. if (ret) {
  857. pr_err("wolfcrypt wc_AesCbcDecrypt failed with return code %d\n", ret);
  858. return ret;
  859. }
  860. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  861. if (ret) {
  862. pr_err("error: p_vector and dec do not match: %d\n", ret);
  863. return ret;
  864. }
  865. /* now the kernel crypto part */
  866. enc2 = kmalloc(sizeof(p_vector), GFP_KERNEL);
  867. if (!enc2) {
  868. pr_err("error: kmalloc failed\n");
  869. goto test_cbc_end;
  870. }
  871. dec2 = kmalloc(sizeof(p_vector), GFP_KERNEL);
  872. if (!dec2) {
  873. pr_err("error: kmalloc failed\n");
  874. goto test_cbc_end;
  875. }
  876. memcpy(dec2, p_vector, sizeof(p_vector));
  877. tfm = crypto_alloc_skcipher(WOLFKM_AESCBC_NAME, 0, 0);
  878. if (IS_ERR(tfm)) {
  879. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  880. WOLFKM_AESCBC_DRIVER, PTR_ERR(tfm));
  881. goto test_cbc_end;
  882. }
  883. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  884. {
  885. const char *driver_name =
  886. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  887. if (strcmp(driver_name, WOLFKM_AESCBC_DRIVER)) {
  888. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  889. WOLFKM_AESCBC_NAME, driver_name, WOLFKM_AESCBC_DRIVER);
  890. ret = -ENOENT;
  891. goto test_cbc_end;
  892. }
  893. }
  894. #endif
  895. ret = crypto_skcipher_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  896. if (ret) {
  897. pr_err("error: crypto_skcipher_setkey returned: %d\n", ret);
  898. goto test_cbc_end;
  899. }
  900. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  901. if (IS_ERR(req)) {
  902. pr_err("error: allocating AES skcipher request %s failed\n",
  903. WOLFKM_AESCBC_DRIVER);
  904. goto test_cbc_end;
  905. }
  906. sg_init_one(&src, dec2, sizeof(p_vector));
  907. sg_init_one(&dst, enc2, sizeof(p_vector));
  908. XMEMCPY(iv_copy, iv, sizeof(iv));
  909. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  910. ret = crypto_skcipher_encrypt(req);
  911. if (ret) {
  912. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  913. goto test_cbc_end;
  914. }
  915. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  916. if (ret) {
  917. pr_err("error: enc and enc2 do not match: %d\n", ret);
  918. goto test_cbc_end;
  919. }
  920. memset(dec2, 0, sizeof(p_vector));
  921. sg_init_one(&src, enc2, sizeof(p_vector));
  922. sg_init_one(&dst, dec2, sizeof(p_vector));
  923. XMEMCPY(iv_copy, iv, sizeof(iv));
  924. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  925. ret = crypto_skcipher_decrypt(req);
  926. if (ret) {
  927. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  928. goto test_cbc_end;
  929. }
  930. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  931. if (ret) {
  932. pr_err("error: dec and dec2 do not match: %d\n", ret);
  933. goto test_cbc_end;
  934. }
  935. test_cbc_end:
  936. if (enc2) { kfree(enc2); enc2 = NULL; }
  937. if (dec2) { kfree(dec2); dec2 = NULL; }
  938. if (req) { skcipher_request_free(req); req = NULL; }
  939. if (tfm) { crypto_free_skcipher(tfm); tfm = NULL; }
  940. return ret;
  941. }
  942. #endif /* HAVE_AES_CBC &&
  943. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  944. */
  945. #if defined(WOLFSSL_AES_CFB) && \
  946. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  947. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  948. static int linuxkm_test_aescfb(void)
  949. {
  950. int ret = 0;
  951. struct crypto_skcipher * tfm = NULL;
  952. struct skcipher_request * req = NULL;
  953. struct scatterlist src, dst;
  954. Aes aes;
  955. static const byte key32[] =
  956. {
  957. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  958. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  959. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  960. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  961. };
  962. static const byte p_vector[] =
  963. /* Now is the time for all good men w/o trailing 0 */
  964. {
  965. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  966. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  967. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20,
  968. 0x67,0x6f,0x6f,0x64,0x20,0x6d,0x65,0x6e
  969. };
  970. static const byte iv[] = "1234567890abcdef";
  971. static const byte c_vector[] =
  972. {
  973. 0x56,0x35,0x3f,0xdd,0xde,0xa6,0x15,0x87,
  974. 0x57,0xdc,0x34,0x62,0x9a,0x68,0x96,0x51,
  975. 0xc7,0x09,0xb9,0x4e,0x47,0x6b,0x24,0x72,
  976. 0x19,0x5a,0xdf,0x7e,0xba,0xa8,0x01,0xb6
  977. };
  978. byte iv_copy[sizeof(iv)];
  979. byte enc[sizeof(p_vector)];
  980. byte dec[sizeof(p_vector)];
  981. u8 * enc2 = NULL;
  982. u8 * dec2 = NULL;
  983. XMEMSET(enc, 0, sizeof(enc));
  984. XMEMSET(dec, 0, sizeof(enc));
  985. ret = wc_AesInit(&aes, NULL, INVALID_DEVID);
  986. if (ret) {
  987. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  988. return ret;
  989. }
  990. ret = wc_AesSetKey(&aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  991. if (ret) {
  992. pr_err("wolfcrypt wc_AesSetKey failed with return code %d\n", ret);
  993. return ret;
  994. }
  995. ret = wc_AesCfbEncrypt(&aes, enc, p_vector, sizeof(p_vector));
  996. if (ret) {
  997. pr_err("wolfcrypt wc_AesCfbEncrypt failed with return code %d\n", ret);
  998. return ret;
  999. }
  1000. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1001. pr_err("wolfcrypt wc_AesCfbEncrypt KAT mismatch\n");
  1002. return LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1003. }
  1004. /* Re init for decrypt and set flag. */
  1005. wc_AesFree(&aes);
  1006. ret = wc_AesInit(&aes, NULL, INVALID_DEVID);
  1007. if (ret) {
  1008. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  1009. return ret;
  1010. }
  1011. ret = wc_AesSetKey(&aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  1012. if (ret) {
  1013. pr_err("wolfcrypt wc_AesSetKey failed with return code %d.\n", ret);
  1014. return ret;
  1015. }
  1016. ret = wc_AesCfbDecrypt(&aes, dec, enc, sizeof(p_vector));
  1017. if (ret) {
  1018. pr_err("wolfcrypt wc_AesCfbDecrypt failed with return code %d\n", ret);
  1019. return ret;
  1020. }
  1021. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1022. if (ret) {
  1023. pr_err("error: p_vector and dec do not match: %d\n", ret);
  1024. return ret;
  1025. }
  1026. /* now the kernel crypto part */
  1027. enc2 = kmalloc(sizeof(p_vector), GFP_KERNEL);
  1028. if (!enc2) {
  1029. pr_err("error: kmalloc failed\n");
  1030. goto test_cfb_end;
  1031. }
  1032. dec2 = kmalloc(sizeof(p_vector), GFP_KERNEL);
  1033. if (!dec2) {
  1034. pr_err("error: kmalloc failed\n");
  1035. goto test_cfb_end;
  1036. }
  1037. memcpy(dec2, p_vector, sizeof(p_vector));
  1038. tfm = crypto_alloc_skcipher(WOLFKM_AESCFB_NAME, 0, 0);
  1039. if (IS_ERR(tfm)) {
  1040. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1041. WOLFKM_AESCFB_DRIVER, PTR_ERR(tfm));
  1042. goto test_cfb_end;
  1043. }
  1044. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1045. {
  1046. const char *driver_name =
  1047. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1048. if (strcmp(driver_name, WOLFKM_AESCFB_DRIVER)) {
  1049. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1050. WOLFKM_AESCFB_NAME, driver_name, WOLFKM_AESCFB_DRIVER);
  1051. ret = -ENOENT;
  1052. goto test_cfb_end;
  1053. }
  1054. }
  1055. #endif
  1056. ret = crypto_skcipher_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1057. if (ret) {
  1058. pr_err("error: crypto_skcipher_setkey returned: %d\n", ret);
  1059. goto test_cfb_end;
  1060. }
  1061. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1062. if (IS_ERR(req)) {
  1063. pr_err("error: allocating AES skcipher request %s failed\n",
  1064. WOLFKM_AESCFB_DRIVER);
  1065. goto test_cfb_end;
  1066. }
  1067. sg_init_one(&src, dec2, sizeof(p_vector));
  1068. sg_init_one(&dst, enc2, sizeof(p_vector));
  1069. XMEMCPY(iv_copy, iv, sizeof(iv));
  1070. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1071. ret = crypto_skcipher_encrypt(req);
  1072. if (ret) {
  1073. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1074. goto test_cfb_end;
  1075. }
  1076. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1077. if (ret) {
  1078. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1079. goto test_cfb_end;
  1080. }
  1081. memset(dec2, 0, sizeof(p_vector));
  1082. sg_init_one(&src, enc2, sizeof(p_vector));
  1083. sg_init_one(&dst, dec2, sizeof(p_vector));
  1084. XMEMCPY(iv_copy, iv, sizeof(iv));
  1085. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1086. ret = crypto_skcipher_decrypt(req);
  1087. if (ret) {
  1088. pr_err("error: crypto_skcipher_decrypt returned: %d\n", ret);
  1089. goto test_cfb_end;
  1090. }
  1091. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1092. if (ret) {
  1093. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1094. goto test_cfb_end;
  1095. }
  1096. test_cfb_end:
  1097. if (enc2) { kfree(enc2); enc2 = NULL; }
  1098. if (dec2) { kfree(dec2); dec2 = NULL; }
  1099. if (req) { skcipher_request_free(req); req = NULL; }
  1100. if (tfm) { crypto_free_skcipher(tfm); tfm = NULL; }
  1101. return ret;
  1102. }
  1103. #endif /* WOLFSSL_AES_CFB &&
  1104. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCFB)
  1105. */
  1106. #if defined(HAVE_AESGCM) && \
  1107. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1108. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  1109. static int linuxkm_test_aesgcm(void)
  1110. {
  1111. int ret = 0;
  1112. struct crypto_aead * tfm = NULL;
  1113. struct aead_request * req = NULL;
  1114. struct scatterlist * src = NULL;
  1115. struct scatterlist * dst = NULL;
  1116. Aes aes;
  1117. static const byte key32[] =
  1118. {
  1119. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1120. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  1121. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1122. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  1123. };
  1124. static const byte p_vector[] =
  1125. /* Now is the time for all w/o trailing 0 */
  1126. {
  1127. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  1128. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  1129. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20
  1130. };
  1131. static const byte assoc[] =
  1132. {
  1133. 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
  1134. 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
  1135. 0xab, 0xad, 0xda, 0xd2
  1136. };
  1137. static const byte ivstr[] = "1234567890abcdef";
  1138. static const byte c_vector[] =
  1139. {
  1140. 0x0c,0x97,0x05,0x3c,0xef,0x5c,0x63,0x6b,
  1141. 0x15,0xe4,0x00,0x63,0xf8,0x8c,0xd0,0x95,
  1142. 0x27,0x81,0x90,0x9c,0x9f,0xe6,0x98,0xe9
  1143. };
  1144. static const byte KAT_authTag[] =
  1145. {
  1146. 0xc9,0xd5,0x7a,0x77,0xac,0x28,0xc2,0xe7,
  1147. 0xe4,0x28,0x90,0xaa,0x09,0xab,0xf9,0x7c
  1148. };
  1149. byte enc[sizeof(p_vector)];
  1150. byte authTag[AES_BLOCK_SIZE];
  1151. byte dec[sizeof(p_vector)];
  1152. u8 * assoc2 = NULL;
  1153. u8 * enc2 = NULL;
  1154. u8 * dec2 = NULL;
  1155. u8 * iv = NULL;
  1156. size_t encryptLen = sizeof(p_vector);
  1157. size_t decryptLen = sizeof(p_vector) + sizeof(authTag);
  1158. /* Init stack variables. */
  1159. XMEMSET(enc, 0, sizeof(p_vector));
  1160. XMEMSET(dec, 0, sizeof(p_vector));
  1161. XMEMSET(authTag, 0, AES_BLOCK_SIZE);
  1162. ret = wc_AesInit(&aes, NULL, INVALID_DEVID);
  1163. if (ret) {
  1164. pr_err("error: wc_AesInit failed with return code %d.\n", ret);
  1165. goto test_gcm_end;
  1166. }
  1167. ret = wc_AesGcmInit(&aes, key32, sizeof(key32)/sizeof(byte), ivstr,
  1168. AES_BLOCK_SIZE);
  1169. if (ret) {
  1170. pr_err("error: wc_AesGcmInit failed with return code %d.\n", ret);
  1171. goto test_gcm_end;
  1172. }
  1173. ret = wc_AesGcmEncryptUpdate(&aes, NULL, NULL, 0, assoc, sizeof(assoc));
  1174. if (ret) {
  1175. pr_err("error: wc_AesGcmEncryptUpdate failed with return code %d\n",
  1176. ret);
  1177. goto test_gcm_end;
  1178. }
  1179. ret = wc_AesGcmEncryptUpdate(&aes, enc, p_vector, sizeof(p_vector), NULL, 0);
  1180. if (ret) {
  1181. pr_err("error: wc_AesGcmEncryptUpdate failed with return code %d\n",
  1182. ret);
  1183. goto test_gcm_end;
  1184. }
  1185. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1186. pr_err("wolfcrypt AES-GCM KAT mismatch on ciphertext\n");
  1187. ret = LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E;
  1188. goto test_gcm_end;
  1189. }
  1190. ret = wc_AesGcmEncryptFinal(&aes, authTag, AES_BLOCK_SIZE);
  1191. if (ret) {
  1192. pr_err("error: wc_AesGcmEncryptFinal failed with return code %d\n",
  1193. ret);
  1194. goto test_gcm_end;
  1195. }
  1196. if (XMEMCMP(authTag, KAT_authTag, sizeof(KAT_authTag)) != 0) {
  1197. pr_err("wolfcrypt AES-GCM KAT mismatch on authTag\n");
  1198. ret = LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E;
  1199. goto test_gcm_end;
  1200. }
  1201. ret = wc_AesGcmInit(&aes, key32, sizeof(key32)/sizeof(byte), ivstr,
  1202. AES_BLOCK_SIZE);
  1203. if (ret) {
  1204. pr_err("error: wc_AesGcmInit failed with return code %d.\n", ret);
  1205. goto test_gcm_end;
  1206. }
  1207. ret = wc_AesGcmDecryptUpdate(&aes, dec, enc, sizeof(p_vector),
  1208. assoc, sizeof(assoc));
  1209. if (ret) {
  1210. pr_err("error: wc_AesGcmDecryptUpdate failed with return code %d\n",
  1211. ret);
  1212. goto test_gcm_end;
  1213. }
  1214. ret = wc_AesGcmDecryptFinal(&aes, authTag, AES_BLOCK_SIZE);
  1215. if (ret) {
  1216. pr_err("error: wc_AesGcmEncryptFinal failed with return code %d\n",
  1217. ret);
  1218. goto test_gcm_end;
  1219. }
  1220. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1221. if (ret) {
  1222. pr_err("error: gcm: p_vector and dec do not match: %d\n", ret);
  1223. goto test_gcm_end;
  1224. }
  1225. /* now the kernel crypto part */
  1226. assoc2 = kmalloc(sizeof(assoc), GFP_KERNEL);
  1227. if (IS_ERR(assoc2)) {
  1228. pr_err("error: kmalloc failed\n");
  1229. goto test_gcm_end;
  1230. }
  1231. memset(assoc2, 0, sizeof(assoc));
  1232. memcpy(assoc2, assoc, sizeof(assoc));
  1233. iv = kmalloc(AES_BLOCK_SIZE, GFP_KERNEL);
  1234. if (IS_ERR(iv)) {
  1235. pr_err("error: kmalloc failed\n");
  1236. goto test_gcm_end;
  1237. }
  1238. memset(iv, 0, AES_BLOCK_SIZE);
  1239. memcpy(iv, ivstr, AES_BLOCK_SIZE);
  1240. enc2 = kmalloc(decryptLen, GFP_KERNEL);
  1241. if (IS_ERR(enc2)) {
  1242. pr_err("error: kmalloc failed\n");
  1243. goto test_gcm_end;
  1244. }
  1245. dec2 = kmalloc(decryptLen, GFP_KERNEL);
  1246. if (IS_ERR(dec2)) {
  1247. pr_err("error: kmalloc failed\n");
  1248. goto test_gcm_end;
  1249. }
  1250. memset(enc2, 0, decryptLen);
  1251. memset(dec2, 0, decryptLen);
  1252. memcpy(dec2, p_vector, sizeof(p_vector));
  1253. tfm = crypto_alloc_aead(WOLFKM_AESGCM_NAME, 0, 0);
  1254. if (IS_ERR(tfm)) {
  1255. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1256. WOLFKM_AESGCM_DRIVER, PTR_ERR(tfm));
  1257. goto test_gcm_end;
  1258. }
  1259. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1260. {
  1261. const char *driver_name = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
  1262. if (strcmp(driver_name, WOLFKM_AESGCM_DRIVER)) {
  1263. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1264. WOLFKM_AESGCM_NAME, driver_name, WOLFKM_AESGCM_DRIVER);
  1265. ret = -ENOENT;
  1266. goto test_gcm_end;
  1267. }
  1268. }
  1269. #endif
  1270. ret = crypto_aead_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1271. if (ret) {
  1272. pr_err("error: crypto_aead_setkey returned: %d\n", ret);
  1273. goto test_gcm_end;
  1274. }
  1275. ret = crypto_aead_setauthsize(tfm, sizeof(authTag));
  1276. if (ret) {
  1277. pr_err("error: crypto_aead_setauthsize returned: %d\n", ret);
  1278. goto test_gcm_end;
  1279. }
  1280. req = aead_request_alloc(tfm, GFP_KERNEL);
  1281. if (IS_ERR(req)) {
  1282. pr_err("error: allocating AES aead request %s failed: %ld\n",
  1283. WOLFKM_AESCBC_DRIVER, PTR_ERR(req));
  1284. goto test_gcm_end;
  1285. }
  1286. src = kmalloc(sizeof(struct scatterlist) * 2, GFP_KERNEL);
  1287. dst = kmalloc(sizeof(struct scatterlist) * 2, GFP_KERNEL);
  1288. if (IS_ERR(src) || IS_ERR(dst)) {
  1289. pr_err("error: kmalloc src or dst failed: %ld, %ld\n",
  1290. PTR_ERR(src), PTR_ERR(dst));
  1291. goto test_gcm_end;
  1292. }
  1293. sg_init_table(src, 2);
  1294. sg_set_buf(src, assoc2, sizeof(assoc));
  1295. sg_set_buf(&src[1], dec2, sizeof(p_vector));
  1296. sg_init_table(dst, 2);
  1297. sg_set_buf(dst, assoc2, sizeof(assoc));
  1298. sg_set_buf(&dst[1], enc2, decryptLen);
  1299. aead_request_set_callback(req, 0, NULL, NULL);
  1300. aead_request_set_ad(req, sizeof(assoc));
  1301. aead_request_set_crypt(req, src, dst, sizeof(p_vector), iv);
  1302. ret = crypto_aead_encrypt(req);
  1303. if (ret) {
  1304. pr_err("error: crypto_aead_encrypt returned: %d\n", ret);
  1305. goto test_gcm_end;
  1306. }
  1307. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1308. if (ret) {
  1309. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1310. goto test_gcm_end;
  1311. }
  1312. ret = XMEMCMP(authTag, enc2 + encryptLen, sizeof(authTag));
  1313. if (ret) {
  1314. pr_err("error: authTags do not match: %d\n", ret);
  1315. goto test_gcm_end;
  1316. }
  1317. /* Now decrypt crypto request. Reverse src and dst. */
  1318. memset(dec2, 0, decryptLen);
  1319. aead_request_set_ad(req, sizeof(assoc));
  1320. aead_request_set_crypt(req, dst, src, decryptLen, iv);
  1321. ret = crypto_aead_decrypt(req);
  1322. if (ret) {
  1323. pr_err("error: crypto_aead_decrypt returned: %d\n", ret);
  1324. goto test_gcm_end;
  1325. }
  1326. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1327. if (ret) {
  1328. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1329. goto test_gcm_end;
  1330. }
  1331. test_gcm_end:
  1332. if (req) { aead_request_free(req); req = NULL; }
  1333. if (tfm) { crypto_free_aead(tfm); tfm = NULL; }
  1334. if (src) { kfree(src); src = NULL; }
  1335. if (dst) { kfree(dst); dst = NULL; }
  1336. if (dec2) { kfree(dec2); dec2 = NULL; }
  1337. if (enc2) { kfree(enc2); enc2 = NULL; }
  1338. if (assoc2) { kfree(assoc2); assoc2 = NULL; }
  1339. if (iv) { kfree(iv); iv = NULL; }
  1340. return ret;
  1341. }
  1342. #endif /* HAVE_AESGCM &&
  1343. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESGCM) &&
  1344. */
  1345. #if defined(WOLFSSL_AES_XTS) && \
  1346. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1347. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  1348. /* test vectors from
  1349. * http://csrc.nist.gov/groups/STM/cavp/block-cipher-modes.html
  1350. */
  1351. #ifdef WOLFSSL_AES_128
  1352. static int aes_xts_128_test(void)
  1353. {
  1354. XtsAes *aes = NULL;
  1355. int aes_inited = 0;
  1356. int ret = 0;
  1357. #define AES_XTS_128_TEST_BUF_SIZ (AES_BLOCK_SIZE * 2 + 8)
  1358. unsigned char *buf = NULL;
  1359. unsigned char *cipher = NULL;
  1360. u8 * enc2 = NULL;
  1361. u8 * dec2 = NULL;
  1362. struct scatterlist * src = NULL;
  1363. struct scatterlist * dst = NULL;
  1364. struct crypto_skcipher *tfm = NULL;
  1365. struct skcipher_request *req = NULL;
  1366. u8 iv[AES_BLOCK_SIZE];
  1367. /* 128 key tests */
  1368. static const unsigned char k1[] = {
  1369. 0xa1, 0xb9, 0x0c, 0xba, 0x3f, 0x06, 0xac, 0x35,
  1370. 0x3b, 0x2c, 0x34, 0x38, 0x76, 0x08, 0x17, 0x62,
  1371. 0x09, 0x09, 0x23, 0x02, 0x6e, 0x91, 0x77, 0x18,
  1372. 0x15, 0xf2, 0x9d, 0xab, 0x01, 0x93, 0x2f, 0x2f
  1373. };
  1374. static const unsigned char i1[] = {
  1375. 0x4f, 0xae, 0xf7, 0x11, 0x7c, 0xda, 0x59, 0xc6,
  1376. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1377. };
  1378. static const unsigned char p1[] = {
  1379. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1380. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c
  1381. };
  1382. /* plain text test of partial block is not from NIST test vector list */
  1383. static const unsigned char pp[] = {
  1384. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1385. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c,
  1386. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1387. };
  1388. static const unsigned char c1[] = {
  1389. 0x77, 0x8a, 0xe8, 0xb4, 0x3c, 0xb9, 0x8d, 0x5a,
  1390. 0x82, 0x50, 0x81, 0xd5, 0xbe, 0x47, 0x1c, 0x63
  1391. };
  1392. /* plain text test of partial block is not from NIST test vector list */
  1393. static const unsigned char cp[] = {
  1394. 0x2b, 0xf7, 0x2c, 0xf3, 0xeb, 0x85, 0xef, 0x7b,
  1395. 0x0b, 0x76, 0xa0, 0xaa, 0xf3, 0x3f, 0x25, 0x8b,
  1396. 0x77, 0x8a, 0xe8, 0xb4, 0x3c, 0xb9, 0x8d, 0x5a
  1397. };
  1398. static const unsigned char k2[] = {
  1399. 0x39, 0x25, 0x79, 0x05, 0xdf, 0xcc, 0x77, 0x76,
  1400. 0x6c, 0x87, 0x0a, 0x80, 0x6a, 0x60, 0xe3, 0xc0,
  1401. 0x93, 0xd1, 0x2a, 0xcf, 0xcb, 0x51, 0x42, 0xfa,
  1402. 0x09, 0x69, 0x89, 0x62, 0x5b, 0x60, 0xdb, 0x16
  1403. };
  1404. static const unsigned char i2[] = {
  1405. 0x5c, 0xf7, 0x9d, 0xb6, 0xc5, 0xcd, 0x99, 0x1a,
  1406. 0x1c, 0x78, 0x81, 0x42, 0x24, 0x95, 0x1e, 0x84
  1407. };
  1408. static const unsigned char p2[] = {
  1409. 0xbd, 0xc5, 0x46, 0x8f, 0xbc, 0x8d, 0x50, 0xa1,
  1410. 0x0d, 0x1c, 0x85, 0x7f, 0x79, 0x1c, 0x5c, 0xba,
  1411. 0xb3, 0x81, 0x0d, 0x0d, 0x73, 0xcf, 0x8f, 0x20,
  1412. 0x46, 0xb1, 0xd1, 0x9e, 0x7d, 0x5d, 0x8a, 0x56
  1413. };
  1414. static const unsigned char c2[] = {
  1415. 0xd6, 0xbe, 0x04, 0x6d, 0x41, 0xf2, 0x3b, 0x5e,
  1416. 0xd7, 0x0b, 0x6b, 0x3d, 0x5c, 0x8e, 0x66, 0x23,
  1417. 0x2b, 0xe6, 0xb8, 0x07, 0xd4, 0xdc, 0xc6, 0x0e,
  1418. 0xff, 0x8d, 0xbc, 0x1d, 0x9f, 0x7f, 0xc8, 0x22
  1419. };
  1420. #ifndef HAVE_FIPS /* FIPS requires different keys for main and tweak. */
  1421. static const unsigned char k3[] = {
  1422. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1423. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1424. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1425. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1426. };
  1427. static const unsigned char i3[] = {
  1428. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1429. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1430. };
  1431. static const unsigned char p3[] = {
  1432. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1433. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1434. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1435. 0x20, 0xff, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1436. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20
  1437. };
  1438. static const unsigned char c3[] = {
  1439. 0xA2, 0x07, 0x47, 0x76, 0x3F, 0xEC, 0x0C, 0x23,
  1440. 0x1B, 0xD0, 0xBD, 0x46, 0x9A, 0x27, 0x38, 0x12,
  1441. 0x95, 0x02, 0x3D, 0x5D, 0xC6, 0x94, 0x51, 0x36,
  1442. 0xA0, 0x85, 0xD2, 0x69, 0x6E, 0x87, 0x0A, 0xBF,
  1443. 0xB5, 0x5A, 0xDD, 0xCB, 0x80, 0xE0, 0xFC, 0xCD
  1444. };
  1445. #endif /* HAVE_FIPS */
  1446. if ((aes = (XtsAes *)XMALLOC(sizeof(*aes), NULL, DYNAMIC_TYPE_AES))
  1447. == NULL)
  1448. {
  1449. ret = MEMORY_E;
  1450. goto out;
  1451. }
  1452. if ((buf = (unsigned char *)XMALLOC(AES_XTS_128_TEST_BUF_SIZ, NULL,
  1453. DYNAMIC_TYPE_AES)) == NULL)
  1454. {
  1455. ret = MEMORY_E;
  1456. goto out;
  1457. }
  1458. if ((cipher = (unsigned char *)XMALLOC(AES_XTS_128_TEST_BUF_SIZ, NULL,
  1459. DYNAMIC_TYPE_AES)) == NULL)
  1460. {
  1461. ret = MEMORY_E;
  1462. goto out;
  1463. }
  1464. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1465. ret = wc_AesXtsInit(aes, NULL, INVALID_DEVID);
  1466. if (ret != 0)
  1467. goto out;
  1468. else
  1469. aes_inited = 1;
  1470. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_ENCRYPTION);
  1471. if (ret != 0)
  1472. goto out;
  1473. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1474. if (ret != 0)
  1475. goto out;
  1476. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1477. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1478. goto out;
  1479. }
  1480. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1481. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1482. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1483. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1484. if (ret != 0)
  1485. goto out;
  1486. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1487. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1488. goto out;
  1489. }
  1490. #endif
  1491. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1492. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1493. if (ret != 0)
  1494. goto out;
  1495. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1496. if (ret != 0)
  1497. goto out;
  1498. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1499. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1500. goto out;
  1501. }
  1502. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1503. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1504. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1505. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1506. if (ret != 0)
  1507. goto out;
  1508. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1509. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1510. goto out;
  1511. }
  1512. #endif
  1513. /* partial block encryption test */
  1514. XMEMSET(cipher, 0, AES_XTS_128_TEST_BUF_SIZ);
  1515. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1516. if (ret != 0)
  1517. goto out;
  1518. if (XMEMCMP(cp, cipher, sizeof(cp))) {
  1519. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1520. goto out;
  1521. }
  1522. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1523. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1524. XMEMSET(cipher, 0, AES_XTS_128_TEST_BUF_SIZ);
  1525. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1526. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1527. if (ret != 0)
  1528. goto out;
  1529. if (XMEMCMP(cp, cipher, sizeof(cp))) {
  1530. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1531. goto out;
  1532. }
  1533. #endif
  1534. /* partial block decrypt test */
  1535. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1536. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1537. if (ret != 0)
  1538. goto out;
  1539. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1540. if (ret != 0)
  1541. goto out;
  1542. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1543. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1544. goto out;
  1545. }
  1546. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1547. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1548. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1549. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1550. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1551. if (ret != 0)
  1552. goto out;
  1553. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1554. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1555. goto out;
  1556. }
  1557. #endif
  1558. /* NIST decrypt test vector */
  1559. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1560. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1561. if (ret != 0)
  1562. goto out;
  1563. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1564. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1565. goto out;
  1566. }
  1567. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_AES_C_DYNAMIC_FALLBACK)
  1568. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(SYSLIB_FAILED_E);
  1569. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1570. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1571. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1572. if (ret != 0)
  1573. goto out;
  1574. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1575. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1576. goto out;
  1577. }
  1578. #endif
  1579. /* fail case with decrypting using wrong key */
  1580. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1581. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  1582. if (ret != 0)
  1583. goto out;
  1584. if (XMEMCMP(p2, buf, sizeof(p2)) == 0) { /* fail case with wrong key */
  1585. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1586. goto out;
  1587. }
  1588. /* set correct key and retest */
  1589. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1590. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_DECRYPTION);
  1591. if (ret != 0)
  1592. goto out;
  1593. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  1594. if (ret != 0)
  1595. goto out;
  1596. if (XMEMCMP(p2, buf, sizeof(p2))) {
  1597. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1598. goto out;
  1599. }
  1600. #ifndef HAVE_FIPS
  1601. /* Test ciphertext stealing in-place. */
  1602. XMEMCPY(buf, p3, sizeof(p3));
  1603. ret = wc_AesXtsSetKeyNoInit(aes, k3, sizeof(k3), AES_ENCRYPTION);
  1604. if (ret != 0)
  1605. goto out;
  1606. ret = wc_AesXtsEncrypt(aes, buf, buf, sizeof(p3), i3, sizeof(i3));
  1607. if (ret != 0)
  1608. goto out;
  1609. if (XMEMCMP(c3, buf, sizeof(c3))) {
  1610. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1611. goto out;
  1612. }
  1613. ret = wc_AesXtsSetKeyNoInit(aes, k3, sizeof(k3), AES_DECRYPTION);
  1614. if (ret != 0)
  1615. goto out;
  1616. ret = wc_AesXtsDecrypt(aes, buf, buf, sizeof(c3), i3, sizeof(i3));
  1617. if (ret != 0)
  1618. goto out;
  1619. if (XMEMCMP(p3, buf, sizeof(p3))) {
  1620. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1621. goto out;
  1622. }
  1623. #endif /* HAVE_FIPS */
  1624. {
  1625. #define LARGE_XTS_SZ 1024
  1626. byte* large_input = (byte *)XMALLOC(LARGE_XTS_SZ, NULL,
  1627. DYNAMIC_TYPE_TMP_BUFFER);
  1628. int i;
  1629. int j;
  1630. if (large_input == NULL)
  1631. ret = MEMORY_E;
  1632. goto out;
  1633. for (i = 0; i < (int)LARGE_XTS_SZ; i++)
  1634. large_input[i] = (byte)i;
  1635. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  1636. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1637. if (ret != 0)
  1638. goto out;
  1639. ret = wc_AesXtsEncrypt(aes, large_input, large_input, j, i1,
  1640. sizeof(i1));
  1641. if (ret != 0)
  1642. goto out;
  1643. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1644. if (ret != 0)
  1645. goto out;
  1646. ret = wc_AesXtsDecrypt(aes, large_input, large_input, j, i1,
  1647. sizeof(i1));
  1648. if (ret != 0)
  1649. goto out;
  1650. for (i = 0; i < j; i++) {
  1651. if (large_input[i] != (byte)i) {
  1652. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1653. goto out;
  1654. }
  1655. }
  1656. }
  1657. XFREE(large_input, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  1658. }
  1659. /* now the kernel crypto part */
  1660. enc2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  1661. if (!enc2) {
  1662. pr_err("error: malloc failed\n");
  1663. ret = -ENOMEM;
  1664. goto test_xts_end;
  1665. }
  1666. dec2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  1667. if (!dec2) {
  1668. pr_err("error: malloc failed\n");
  1669. ret = -ENOMEM;
  1670. goto test_xts_end;
  1671. }
  1672. src = XMALLOC(sizeof(*src) * 2, NULL, DYNAMIC_TYPE_AES);
  1673. if (! src) {
  1674. pr_err("error: malloc failed\n");
  1675. ret = -ENOMEM;
  1676. goto test_xts_end;
  1677. }
  1678. dst = XMALLOC(sizeof(*dst) * 2, NULL, DYNAMIC_TYPE_AES);
  1679. if (! dst) {
  1680. pr_err("error: malloc failed\n");
  1681. ret = -ENOMEM;
  1682. goto test_xts_end;
  1683. }
  1684. tfm = crypto_alloc_skcipher(WOLFKM_AESXTS_NAME, 0, 0);
  1685. if (IS_ERR(tfm)) {
  1686. ret = PTR_ERR(tfm);
  1687. pr_err("error: allocating AES skcipher algorithm %s failed: %d\n",
  1688. WOLFKM_AESXTS_DRIVER, ret);
  1689. goto test_xts_end;
  1690. }
  1691. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1692. {
  1693. const char *driver_name =
  1694. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1695. if (strcmp(driver_name, WOLFKM_AESXTS_DRIVER)) {
  1696. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1697. WOLFKM_AESXTS_NAME, driver_name, WOLFKM_AESXTS_DRIVER);
  1698. ret = -ENOENT;
  1699. goto test_xts_end;
  1700. }
  1701. }
  1702. #endif
  1703. ret = crypto_skcipher_ivsize(tfm);
  1704. if (ret != sizeof(iv)) {
  1705. pr_err("error: AES skcipher algorithm %s crypto_skcipher_ivsize()"
  1706. " returned %d but expected %d\n",
  1707. WOLFKM_AESXTS_DRIVER, ret, (int)sizeof(iv));
  1708. ret = -EINVAL;
  1709. goto test_xts_end;
  1710. }
  1711. ret = crypto_skcipher_setkey(tfm, k1, sizeof(k1));
  1712. if (ret) {
  1713. pr_err("error: crypto_skcipher_setkey for %s returned: %d\n",
  1714. WOLFKM_AESXTS_NAME, ret);
  1715. goto test_xts_end;
  1716. }
  1717. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1718. if (IS_ERR(req)) {
  1719. ret = PTR_ERR(req);
  1720. pr_err("error: allocating AES skcipher request %s failed: %d\n",
  1721. WOLFKM_AESXTS_DRIVER, ret);
  1722. goto test_xts_end;
  1723. }
  1724. memcpy(dec2, p1, sizeof(p1));
  1725. memset(enc2, 0, sizeof(p1));
  1726. sg_init_one(src, dec2, sizeof(p1));
  1727. sg_init_one(dst, enc2, sizeof(p1));
  1728. memcpy(iv, i1, sizeof(iv));
  1729. skcipher_request_set_crypt(req, src, dst, sizeof(p1), iv);
  1730. ret = crypto_skcipher_encrypt(req);
  1731. if (ret) {
  1732. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1733. goto test_xts_end;
  1734. }
  1735. ret = XMEMCMP(c1, enc2, sizeof(c1));
  1736. if (ret) {
  1737. pr_err("error: c1 and enc2 do not match: %d\n", ret);
  1738. ret = -EINVAL;
  1739. goto test_xts_end;
  1740. }
  1741. memset(dec2, 0, sizeof(p1));
  1742. sg_init_one(src, enc2, sizeof(p1));
  1743. sg_init_one(dst, dec2, sizeof(p1));
  1744. memcpy(iv, i1, sizeof(iv));
  1745. skcipher_request_set_crypt(req, src, dst, sizeof(p1), iv);
  1746. ret = crypto_skcipher_decrypt(req);
  1747. if (ret) {
  1748. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  1749. goto test_xts_end;
  1750. }
  1751. ret = XMEMCMP(p1, dec2, sizeof(p1));
  1752. if (ret) {
  1753. pr_err("error: p1 and dec2 do not match: %d\n", ret);
  1754. ret = -EINVAL;
  1755. goto test_xts_end;
  1756. }
  1757. memcpy(dec2, pp, sizeof(pp));
  1758. memset(enc2, 0, sizeof(pp));
  1759. sg_init_one(src, dec2, sizeof(pp));
  1760. sg_init_one(dst, enc2, sizeof(pp));
  1761. memcpy(iv, i1, sizeof(iv));
  1762. skcipher_request_set_crypt(req, src, dst, sizeof(pp), iv);
  1763. ret = crypto_skcipher_encrypt(req);
  1764. if (ret) {
  1765. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1766. goto test_xts_end;
  1767. }
  1768. ret = XMEMCMP(cp, enc2, sizeof(cp));
  1769. if (ret) {
  1770. pr_err("error: cp and enc2 do not match: %d\n", ret);
  1771. ret = -EINVAL;
  1772. goto test_xts_end;
  1773. }
  1774. memset(dec2, 0, sizeof(pp));
  1775. sg_init_one(src, enc2, sizeof(pp));
  1776. sg_init_one(dst, dec2, sizeof(pp));
  1777. memcpy(iv, i1, sizeof(iv));
  1778. skcipher_request_set_crypt(req, src, dst, sizeof(pp), iv);
  1779. ret = crypto_skcipher_decrypt(req);
  1780. if (ret) {
  1781. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  1782. goto test_xts_end;
  1783. }
  1784. ret = XMEMCMP(pp, dec2, sizeof(pp));
  1785. if (ret) {
  1786. pr_err("error: pp and dec2 do not match: %d\n", ret);
  1787. ret = -EINVAL;
  1788. goto test_xts_end;
  1789. }
  1790. test_xts_end:
  1791. if (enc2)
  1792. XFREE(enc2, NULL, DYNAMIC_TYPE_AES);
  1793. if (dec2)
  1794. XFREE(dec2, NULL, DYNAMIC_TYPE_AES);
  1795. if (src)
  1796. XFREE(src, NULL, DYNAMIC_TYPE_AES);
  1797. if (dst)
  1798. XFREE(dst, NULL, DYNAMIC_TYPE_AES);
  1799. if (req)
  1800. skcipher_request_free(req);
  1801. if (tfm)
  1802. crypto_free_skcipher(tfm);
  1803. out:
  1804. if (aes_inited)
  1805. wc_AesXtsFree(aes);
  1806. if (buf)
  1807. XFREE(buf, NULL, DYNAMIC_TYPE_AES);
  1808. if (cipher)
  1809. XFREE(cipher, NULL, DYNAMIC_TYPE_AES);
  1810. if (aes)
  1811. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  1812. #undef AES_XTS_128_TEST_BUF_SIZ
  1813. return ret;
  1814. }
  1815. #endif /* WOLFSSL_AES_128 */
  1816. #ifdef WOLFSSL_AES_256
  1817. static int aes_xts_256_test(void)
  1818. {
  1819. XtsAes *aes = NULL;
  1820. int aes_inited = 0;
  1821. int ret = 0;
  1822. #define AES_XTS_256_TEST_BUF_SIZ (AES_BLOCK_SIZE * 3)
  1823. unsigned char *buf = NULL;
  1824. unsigned char *cipher = NULL;
  1825. u8 * enc2 = NULL;
  1826. u8 * dec2 = NULL;
  1827. struct scatterlist * src = NULL;
  1828. struct scatterlist * dst = NULL;
  1829. struct crypto_skcipher *tfm = NULL;
  1830. struct skcipher_request *req = NULL;
  1831. u8 iv[AES_BLOCK_SIZE];
  1832. /* 256 key tests */
  1833. static const unsigned char k1[] = {
  1834. 0x1e, 0xa6, 0x61, 0xc5, 0x8d, 0x94, 0x3a, 0x0e,
  1835. 0x48, 0x01, 0xe4, 0x2f, 0x4b, 0x09, 0x47, 0x14,
  1836. 0x9e, 0x7f, 0x9f, 0x8e, 0x3e, 0x68, 0xd0, 0xc7,
  1837. 0x50, 0x52, 0x10, 0xbd, 0x31, 0x1a, 0x0e, 0x7c,
  1838. 0xd6, 0xe1, 0x3f, 0xfd, 0xf2, 0x41, 0x8d, 0x8d,
  1839. 0x19, 0x11, 0xc0, 0x04, 0xcd, 0xa5, 0x8d, 0xa3,
  1840. 0xd6, 0x19, 0xb7, 0xe2, 0xb9, 0x14, 0x1e, 0x58,
  1841. 0x31, 0x8e, 0xea, 0x39, 0x2c, 0xf4, 0x1b, 0x08
  1842. };
  1843. static const unsigned char i1[] = {
  1844. 0xad, 0xf8, 0xd9, 0x26, 0x27, 0x46, 0x4a, 0xd2,
  1845. 0xf0, 0x42, 0x8e, 0x84, 0xa9, 0xf8, 0x75, 0x64
  1846. };
  1847. static const unsigned char p1[] = {
  1848. 0x2e, 0xed, 0xea, 0x52, 0xcd, 0x82, 0x15, 0xe1,
  1849. 0xac, 0xc6, 0x47, 0xe8, 0x10, 0xbb, 0xc3, 0x64,
  1850. 0x2e, 0x87, 0x28, 0x7f, 0x8d, 0x2e, 0x57, 0xe3,
  1851. 0x6c, 0x0a, 0x24, 0xfb, 0xc1, 0x2a, 0x20, 0x2e
  1852. };
  1853. static const unsigned char c1[] = {
  1854. 0xcb, 0xaa, 0xd0, 0xe2, 0xf6, 0xce, 0xa3, 0xf5,
  1855. 0x0b, 0x37, 0xf9, 0x34, 0xd4, 0x6a, 0x9b, 0x13,
  1856. 0x0b, 0x9d, 0x54, 0xf0, 0x7e, 0x34, 0xf3, 0x6a,
  1857. 0xf7, 0x93, 0xe8, 0x6f, 0x73, 0xc6, 0xd7, 0xdb
  1858. };
  1859. /* plain text test of partial block is not from NIST test vector list */
  1860. static const unsigned char pp[] = {
  1861. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1862. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c,
  1863. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1864. };
  1865. static const unsigned char cp[] = {
  1866. 0x65, 0x5e, 0x1d, 0x37, 0x4a, 0x91, 0xe7, 0x6c,
  1867. 0x4f, 0x83, 0x92, 0xbc, 0x5a, 0x10, 0x55, 0x27,
  1868. 0x61, 0x0e, 0x5a, 0xde, 0xca, 0xc5, 0x12, 0xd8
  1869. };
  1870. static const unsigned char k2[] = {
  1871. 0xad, 0x50, 0x4b, 0x85, 0xd7, 0x51, 0xbf, 0xba,
  1872. 0x69, 0x13, 0xb4, 0xcc, 0x79, 0xb6, 0x5a, 0x62,
  1873. 0xf7, 0xf3, 0x9d, 0x36, 0x0f, 0x35, 0xb5, 0xec,
  1874. 0x4a, 0x7e, 0x95, 0xbd, 0x9b, 0xa5, 0xf2, 0xec,
  1875. 0xc1, 0xd7, 0x7e, 0xa3, 0xc3, 0x74, 0xbd, 0x4b,
  1876. 0x13, 0x1b, 0x07, 0x83, 0x87, 0xdd, 0x55, 0x5a,
  1877. 0xb5, 0xb0, 0xc7, 0xe5, 0x2d, 0xb5, 0x06, 0x12,
  1878. 0xd2, 0xb5, 0x3a, 0xcb, 0x47, 0x8a, 0x53, 0xb4
  1879. };
  1880. static const unsigned char i2[] = {
  1881. 0xe6, 0x42, 0x19, 0xed, 0xe0, 0xe1, 0xc2, 0xa0,
  1882. 0x0e, 0xf5, 0x58, 0x6a, 0xc4, 0x9b, 0xeb, 0x6f
  1883. };
  1884. static const unsigned char p2[] = {
  1885. 0x24, 0xcb, 0x76, 0x22, 0x55, 0xb5, 0xa8, 0x00,
  1886. 0xf4, 0x6e, 0x80, 0x60, 0x56, 0x9e, 0x05, 0x53,
  1887. 0xbc, 0xfe, 0x86, 0x55, 0x3b, 0xca, 0xd5, 0x89,
  1888. 0xc7, 0x54, 0x1a, 0x73, 0xac, 0xc3, 0x9a, 0xbd,
  1889. 0x53, 0xc4, 0x07, 0x76, 0xd8, 0xe8, 0x22, 0x61,
  1890. 0x9e, 0xa9, 0xad, 0x77, 0xa0, 0x13, 0x4c, 0xfc
  1891. };
  1892. static const unsigned char c2[] = {
  1893. 0xa3, 0xc6, 0xf3, 0xf3, 0x82, 0x79, 0x5b, 0x10,
  1894. 0x87, 0xd7, 0x02, 0x50, 0xdb, 0x2c, 0xd3, 0xb1,
  1895. 0xa1, 0x62, 0xa8, 0xb6, 0xdc, 0x12, 0x60, 0x61,
  1896. 0xc1, 0x0a, 0x84, 0xa5, 0x85, 0x3f, 0x3a, 0x89,
  1897. 0xe6, 0x6c, 0xdb, 0xb7, 0x9a, 0xb4, 0x28, 0x9b,
  1898. 0xc3, 0xea, 0xd8, 0x10, 0xe9, 0xc0, 0xaf, 0x92
  1899. };
  1900. if ((aes = (XtsAes *)XMALLOC(sizeof(*aes), NULL, DYNAMIC_TYPE_AES))
  1901. == NULL)
  1902. {
  1903. ret = MEMORY_E;
  1904. goto out;
  1905. }
  1906. if ((buf = (unsigned char *)XMALLOC(AES_XTS_256_TEST_BUF_SIZ, NULL,
  1907. DYNAMIC_TYPE_AES)) == NULL)
  1908. {
  1909. ret = MEMORY_E;
  1910. goto out;
  1911. }
  1912. if ((cipher = (unsigned char *)XMALLOC(AES_XTS_256_TEST_BUF_SIZ, NULL,
  1913. DYNAMIC_TYPE_AES)) == NULL)
  1914. {
  1915. ret = MEMORY_E;
  1916. goto out;
  1917. }
  1918. ret = wc_AesXtsInit(aes, NULL, INVALID_DEVID);
  1919. if (ret != 0)
  1920. goto out;
  1921. else
  1922. aes_inited = 1;
  1923. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  1924. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_ENCRYPTION);
  1925. if (ret != 0)
  1926. goto out;
  1927. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1928. if (ret != 0)
  1929. goto out;
  1930. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1931. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1932. goto out;
  1933. }
  1934. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  1935. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1936. if (ret != 0)
  1937. goto out;
  1938. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1939. if (ret != 0)
  1940. goto out;
  1941. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1942. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1943. goto out;
  1944. }
  1945. /* partial block encryption test */
  1946. XMEMSET(cipher, 0, AES_XTS_256_TEST_BUF_SIZ);
  1947. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1948. if (ret != 0)
  1949. goto out;
  1950. /* partial block decrypt test */
  1951. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  1952. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1953. if (ret != 0)
  1954. goto out;
  1955. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1956. if (ret != 0)
  1957. goto out;
  1958. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1959. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1960. goto out;
  1961. }
  1962. /* NIST decrypt test vector */
  1963. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  1964. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1965. if (ret != 0)
  1966. goto out;
  1967. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1968. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1969. goto out;
  1970. }
  1971. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  1972. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_DECRYPTION);
  1973. if (ret != 0)
  1974. goto out;
  1975. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  1976. if (ret != 0)
  1977. goto out;
  1978. if (XMEMCMP(p2, buf, sizeof(p2))) {
  1979. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1980. goto out;
  1981. }
  1982. /* now the kernel crypto part */
  1983. enc2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  1984. if (!enc2) {
  1985. pr_err("error: malloc failed\n");
  1986. ret = -ENOMEM;
  1987. goto test_xts_end;
  1988. }
  1989. dec2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  1990. if (!dec2) {
  1991. pr_err("error: malloc failed\n");
  1992. ret = -ENOMEM;
  1993. goto test_xts_end;
  1994. }
  1995. src = XMALLOC(sizeof(*src) * 2, NULL, DYNAMIC_TYPE_AES);
  1996. if (! src) {
  1997. pr_err("error: malloc failed\n");
  1998. ret = -ENOMEM;
  1999. goto test_xts_end;
  2000. }
  2001. dst = XMALLOC(sizeof(*dst) * 2, NULL, DYNAMIC_TYPE_AES);
  2002. if (! dst) {
  2003. pr_err("error: malloc failed\n");
  2004. ret = -ENOMEM;
  2005. goto test_xts_end;
  2006. }
  2007. tfm = crypto_alloc_skcipher(WOLFKM_AESXTS_NAME, 0, 0);
  2008. if (IS_ERR(tfm)) {
  2009. ret = PTR_ERR(tfm);
  2010. pr_err("error: allocating AES skcipher algorithm %s failed: %d\n",
  2011. WOLFKM_AESXTS_DRIVER, ret);
  2012. goto test_xts_end;
  2013. }
  2014. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  2015. {
  2016. const char *driver_name = crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  2017. if (strcmp(driver_name, WOLFKM_AESXTS_DRIVER)) {
  2018. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  2019. WOLFKM_AESXTS_NAME, driver_name, WOLFKM_AESXTS_DRIVER);
  2020. ret = -ENOENT;
  2021. goto test_xts_end;
  2022. }
  2023. }
  2024. #endif
  2025. ret = crypto_skcipher_ivsize(tfm);
  2026. if (ret != sizeof(iv)) {
  2027. pr_err("error: AES skcipher algorithm %s crypto_skcipher_ivsize()"
  2028. " returned %d but expected %d\n",
  2029. WOLFKM_AESXTS_DRIVER, ret, (int)sizeof(iv));
  2030. ret = -EINVAL;
  2031. goto test_xts_end;
  2032. }
  2033. ret = crypto_skcipher_setkey(tfm, k1, sizeof(k1));
  2034. if (ret) {
  2035. pr_err("error: crypto_skcipher_setkey for %s returned: %d\n",
  2036. WOLFKM_AESXTS_NAME, ret);
  2037. goto test_xts_end;
  2038. }
  2039. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  2040. if (IS_ERR(req)) {
  2041. ret = PTR_ERR(req);
  2042. pr_err("error: allocating AES skcipher request %s failed: %d\n",
  2043. WOLFKM_AESXTS_DRIVER, ret);
  2044. goto test_xts_end;
  2045. }
  2046. memcpy(dec2, p1, sizeof(p1));
  2047. memset(enc2, 0, sizeof(p1));
  2048. sg_init_one(src, dec2, sizeof(p1));
  2049. sg_init_one(dst, enc2, sizeof(p1));
  2050. memcpy(iv, i1, sizeof(iv));
  2051. skcipher_request_set_crypt(req, src, dst, sizeof(p1), iv);
  2052. ret = crypto_skcipher_encrypt(req);
  2053. if (ret) {
  2054. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2055. goto test_xts_end;
  2056. }
  2057. ret = XMEMCMP(c1, enc2, sizeof(c1));
  2058. if (ret) {
  2059. pr_err("error: c1 and enc2 do not match: %d\n", ret);
  2060. ret = -EINVAL;
  2061. goto test_xts_end;
  2062. }
  2063. memset(dec2, 0, sizeof(p1));
  2064. sg_init_one(src, enc2, sizeof(p1));
  2065. sg_init_one(dst, dec2, sizeof(p1));
  2066. memcpy(iv, i1, sizeof(iv));
  2067. skcipher_request_set_crypt(req, src, dst, sizeof(p1), iv);
  2068. ret = crypto_skcipher_decrypt(req);
  2069. if (ret) {
  2070. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2071. goto test_xts_end;
  2072. }
  2073. ret = XMEMCMP(p1, dec2, sizeof(p1));
  2074. if (ret) {
  2075. pr_err("error: p1 and dec2 do not match: %d\n", ret);
  2076. ret = -EINVAL;
  2077. goto test_xts_end;
  2078. }
  2079. memcpy(dec2, pp, sizeof(pp));
  2080. memset(enc2, 0, sizeof(pp));
  2081. sg_init_one(src, dec2, sizeof(pp));
  2082. sg_init_one(dst, enc2, sizeof(pp));
  2083. memcpy(iv, i1, sizeof(iv));
  2084. skcipher_request_set_crypt(req, src, dst, sizeof(pp), iv);
  2085. ret = crypto_skcipher_encrypt(req);
  2086. if (ret) {
  2087. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2088. goto test_xts_end;
  2089. }
  2090. ret = XMEMCMP(cp, enc2, sizeof(cp));
  2091. if (ret) {
  2092. pr_err("error: cp and enc2 do not match: %d\n", ret);
  2093. ret = -EINVAL;
  2094. goto test_xts_end;
  2095. }
  2096. memset(dec2, 0, sizeof(pp));
  2097. sg_init_one(src, enc2, sizeof(pp));
  2098. sg_init_one(dst, dec2, sizeof(pp));
  2099. memcpy(iv, i1, sizeof(iv));
  2100. skcipher_request_set_crypt(req, src, dst, sizeof(pp), iv);
  2101. ret = crypto_skcipher_decrypt(req);
  2102. if (ret) {
  2103. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2104. goto test_xts_end;
  2105. }
  2106. ret = XMEMCMP(pp, dec2, sizeof(pp));
  2107. if (ret) {
  2108. pr_err("error: pp and dec2 do not match: %d\n", ret);
  2109. ret = -EINVAL;
  2110. goto test_xts_end;
  2111. }
  2112. test_xts_end:
  2113. if (enc2)
  2114. XFREE(enc2, NULL, DYNAMIC_TYPE_AES);
  2115. if (dec2)
  2116. XFREE(dec2, NULL, DYNAMIC_TYPE_AES);
  2117. if (src)
  2118. XFREE(src, NULL, DYNAMIC_TYPE_AES);
  2119. if (dst)
  2120. XFREE(dst, NULL, DYNAMIC_TYPE_AES);
  2121. if (req)
  2122. skcipher_request_free(req);
  2123. if (tfm)
  2124. crypto_free_skcipher(tfm);
  2125. out:
  2126. if (aes_inited)
  2127. wc_AesXtsFree(aes);
  2128. if (buf)
  2129. XFREE(buf, NULL, DYNAMIC_TYPE_AES);
  2130. if (cipher)
  2131. XFREE(cipher, NULL, DYNAMIC_TYPE_AES);
  2132. if (aes)
  2133. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  2134. #undef AES_XTS_256_TEST_BUF_SIZ
  2135. return ret;
  2136. }
  2137. #endif /* WOLFSSL_AES_256 */
  2138. static int linuxkm_test_aesxts(void) {
  2139. int ret;
  2140. #ifdef WOLFSSL_AES_128
  2141. ret = aes_xts_128_test();
  2142. if (ret != 0) {
  2143. pr_err("aes_xts_128_test() failed with retval %d.\n", ret);
  2144. goto out;
  2145. }
  2146. #endif
  2147. #ifdef WOLFSSL_AES_256
  2148. ret = aes_xts_256_test();
  2149. if (ret != 0) {
  2150. pr_err("aes_xts_256_test() failed with retval %d.\n", ret);
  2151. goto out;
  2152. }
  2153. #endif
  2154. out:
  2155. return ret;
  2156. }
  2157. #endif /* WOLFSSL_AES_XTS &&
  2158. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESXTS)
  2159. */
  2160. #endif /* !NO_AES */
  2161. static int linuxkm_lkcapi_register(void)
  2162. {
  2163. int ret = 0;
  2164. #define REGISTER_ALG(alg, installer, tester) do { \
  2165. if (alg ## _loaded) { \
  2166. pr_err("ERROR: %s is already registered.\n", \
  2167. (alg).base.cra_driver_name); \
  2168. return -EEXIST; \
  2169. } \
  2170. \
  2171. ret = (installer)(&(alg)); \
  2172. \
  2173. if (ret) { \
  2174. pr_err("ERROR: " #installer " for %s failed " \
  2175. "with return code %d.\n", \
  2176. (alg).base.cra_driver_name, ret); \
  2177. return ret; \
  2178. } \
  2179. \
  2180. alg ## _loaded = 1; \
  2181. \
  2182. ret = (tester()); \
  2183. \
  2184. if (ret) { \
  2185. pr_err("ERROR: self-test for %s failed " \
  2186. "with return code %d.\n", \
  2187. (alg).base.cra_driver_name, ret); \
  2188. return ret; \
  2189. } \
  2190. pr_info("%s self-test OK -- " \
  2191. "registered for %s with priority %d.\n", \
  2192. (alg).base.cra_driver_name, \
  2193. (alg).base.cra_name, \
  2194. (alg).base.cra_priority); \
  2195. } while (0)
  2196. #if defined(HAVE_AES_CBC) && \
  2197. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2198. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  2199. REGISTER_ALG(cbcAesAlg, crypto_register_skcipher, linuxkm_test_aescbc);
  2200. #endif
  2201. #if defined(WOLFSSL_AES_CFB) && \
  2202. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2203. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  2204. REGISTER_ALG(cfbAesAlg, crypto_register_skcipher, linuxkm_test_aescfb);
  2205. #endif
  2206. #if defined(HAVE_AESGCM) && \
  2207. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2208. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  2209. REGISTER_ALG(gcmAesAead, crypto_register_aead, linuxkm_test_aesgcm);
  2210. #endif
  2211. #if defined(WOLFSSL_AES_XTS) && \
  2212. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2213. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  2214. REGISTER_ALG(xtsAesAlg, crypto_register_skcipher, linuxkm_test_aesxts);
  2215. #endif
  2216. #undef REGISTER_ALG
  2217. return 0;
  2218. }
  2219. static void linuxkm_lkcapi_unregister(void)
  2220. {
  2221. #define UNREGISTER_ALG(alg, uninstaller) do { \
  2222. if (alg ## _loaded) { \
  2223. (uninstaller)(&(alg)); \
  2224. alg ## _loaded = 0; \
  2225. } \
  2226. } while (0)
  2227. #if defined(HAVE_AES_CBC) && \
  2228. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2229. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  2230. UNREGISTER_ALG(cbcAesAlg, crypto_unregister_skcipher);
  2231. #endif
  2232. #if defined(WOLFSSL_AES_CFB) && \
  2233. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2234. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  2235. UNREGISTER_ALG(cfbAesAlg, crypto_unregister_skcipher);
  2236. #endif
  2237. #if defined(HAVE_AESGCM) && \
  2238. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2239. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  2240. UNREGISTER_ALG(gcmAesAead, crypto_unregister_aead);
  2241. #endif
  2242. #if defined(WOLFSSL_AES_XTS) && \
  2243. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2244. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  2245. UNREGISTER_ALG(xtsAesAlg, crypto_unregister_skcipher);
  2246. #endif
  2247. #undef UNREGISTER_ALG
  2248. }