1
0

lkcapi_glue.c 97 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136
  1. /* lkcapi_glue.c -- glue logic to register wolfCrypt implementations with
  2. * the Linux Kernel Cryptosystem
  3. *
  4. * Copyright (C) 2006-2024 wolfSSL Inc.
  5. *
  6. * This file is part of wolfSSL.
  7. *
  8. * wolfSSL is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License as published by
  10. * the Free Software Foundation; either version 2 of the License, or
  11. * (at your option) any later version.
  12. *
  13. * wolfSSL is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  21. */
  22. /* included by linuxkm/module_hooks.c */
  23. #ifndef LINUXKM_LKCAPI_REGISTER
  24. #error lkcapi_glue.c included in non-LINUXKM_LKCAPI_REGISTER project.
  25. #endif
  26. #ifndef WOLFSSL_LINUXKM_LKCAPI_PRIORITY
  27. /* Larger number means higher priority. The highest in-tree priority is 4001,
  28. * in the Cavium driver.
  29. */
  30. #define WOLFSSL_LINUXKM_LKCAPI_PRIORITY 10000
  31. #endif
  32. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  33. static int disable_setkey_warnings = 0;
  34. #else
  35. #define disable_setkey_warnings 0
  36. #endif
  37. #ifndef NO_AES
  38. /* note the FIPS code will be returned on failure even in non-FIPS builds. */
  39. #define LINUXKM_LKCAPI_AES_KAT_MISMATCH_E AES_KAT_FIPS_E
  40. #define LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E AESGCM_KAT_FIPS_E
  41. #define WOLFKM_AESCBC_NAME "cbc(aes)"
  42. #define WOLFKM_AESCFB_NAME "cfb(aes)"
  43. #define WOLFKM_AESGCM_NAME "gcm(aes)"
  44. #define WOLFKM_AESXTS_NAME "xts(aes)"
  45. #ifdef WOLFSSL_AESNI
  46. #define WOLFKM_DRIVER_ISA_EXT "-aesni"
  47. #else
  48. #define WOLFKM_DRIVER_ISA_EXT ""
  49. #endif
  50. #ifdef HAVE_FIPS
  51. #ifndef HAVE_FIPS_VERSION
  52. #define WOLFKM_DRIVER_FIPS "-fips-140"
  53. #elif HAVE_FIPS_VERSION >= 5
  54. #define WOLFKM_DRIVER_FIPS "-fips-140-3"
  55. #elif HAVE_FIPS_VERSION == 2
  56. #define WOLFKM_DRIVER_FIPS "-fips-140-2"
  57. #else
  58. #define WOLFKM_DRIVER_FIPS "-fips-140"
  59. #endif
  60. #else
  61. #define WOLFKM_DRIVER_FIPS ""
  62. #endif
  63. #define WOLFKM_DRIVER_SUFFIX \
  64. WOLFKM_DRIVER_ISA_EXT WOLFKM_DRIVER_FIPS "-wolfcrypt"
  65. #define WOLFKM_AESCBC_DRIVER ("cbc-aes" WOLFKM_DRIVER_SUFFIX)
  66. #define WOLFKM_AESCFB_DRIVER ("cfb-aes" WOLFKM_DRIVER_SUFFIX)
  67. #define WOLFKM_AESGCM_DRIVER ("gcm-aes" WOLFKM_DRIVER_SUFFIX)
  68. #define WOLFKM_AESXTS_DRIVER ("xts-aes" WOLFKM_DRIVER_SUFFIX)
  69. #if defined(HAVE_AES_CBC) && \
  70. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  71. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  72. #ifndef WOLFSSL_EXPERIMENTAL_SETTINGS
  73. #error Experimental settings without WOLFSSL_EXPERIMENTAL_SETTINGS
  74. #endif
  75. static int linuxkm_test_aescbc(void);
  76. #endif
  77. #if defined(WOLFSSL_AES_CFB) && \
  78. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  79. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  80. #ifndef WOLFSSL_EXPERIMENTAL_SETTINGS
  81. #error Experimental settings without WOLFSSL_EXPERIMENTAL_SETTINGS
  82. #endif
  83. static int linuxkm_test_aescfb(void);
  84. #endif
  85. #if defined(HAVE_AESGCM) && \
  86. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  87. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  88. #ifndef WOLFSSL_EXPERIMENTAL_SETTINGS
  89. #error Experimental settings without WOLFSSL_EXPERIMENTAL_SETTINGS
  90. #endif
  91. static int linuxkm_test_aesgcm(void);
  92. #endif
  93. #if defined(WOLFSSL_AES_XTS) && \
  94. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  95. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  96. static int linuxkm_test_aesxts(void);
  97. #endif
  98. /* km_AesX(): wrappers to wolfcrypt wc_AesX functions and
  99. * structures. */
  100. #include <wolfssl/wolfcrypt/aes.h>
  101. struct km_AesCtx {
  102. Aes *aes_encrypt; /* allocated in km_AesInitCommon() to assure
  103. * alignment, needed for AESNI.
  104. */
  105. Aes *aes_decrypt; /* same. */
  106. };
  107. #if defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  108. defined(LINUXKM_LKCAPI_REGISTER_AESCBC) || \
  109. defined(LINUXKM_LKCAPI_REGISTER_AESCFB) || \
  110. defined(LINUXKM_LKCAPI_REGISTER_AESGCM)
  111. static void km_AesExitCommon(struct km_AesCtx * ctx);
  112. static int km_AesInitCommon(
  113. struct km_AesCtx * ctx,
  114. const char * name,
  115. int need_decryption)
  116. {
  117. int err;
  118. ctx->aes_encrypt = (Aes *)malloc(sizeof(*ctx->aes_encrypt));
  119. if (! ctx->aes_encrypt) {
  120. pr_err("%s: allocation of %zu bytes for encryption key failed.\n",
  121. name, sizeof(*ctx->aes_encrypt));
  122. return MEMORY_E;
  123. }
  124. err = wc_AesInit(ctx->aes_encrypt, NULL, INVALID_DEVID);
  125. if (unlikely(err)) {
  126. pr_err("%s: wc_AesInit failed: %d\n", name, err);
  127. free(ctx->aes_encrypt);
  128. ctx->aes_encrypt = NULL;
  129. return -EINVAL;
  130. }
  131. if (! need_decryption) {
  132. ctx->aes_decrypt = NULL;
  133. return 0;
  134. }
  135. ctx->aes_decrypt = (Aes *)malloc(sizeof(*ctx->aes_decrypt));
  136. if (! ctx->aes_decrypt) {
  137. pr_err("%s: allocation of %zu bytes for decryption key failed.\n",
  138. name, sizeof(*ctx->aes_decrypt));
  139. km_AesExitCommon(ctx);
  140. return MEMORY_E;
  141. }
  142. err = wc_AesInit(ctx->aes_decrypt, NULL, INVALID_DEVID);
  143. if (unlikely(err)) {
  144. pr_err("%s: wc_AesInit failed: %d\n", name, err);
  145. free(ctx->aes_decrypt);
  146. ctx->aes_decrypt = NULL;
  147. km_AesExitCommon(ctx);
  148. return -EINVAL;
  149. }
  150. return 0;
  151. }
  152. static void km_AesExitCommon(struct km_AesCtx * ctx)
  153. {
  154. if (ctx->aes_encrypt) {
  155. wc_AesFree(ctx->aes_encrypt);
  156. free(ctx->aes_encrypt);
  157. ctx->aes_encrypt = NULL;
  158. }
  159. if (ctx->aes_decrypt) {
  160. wc_AesFree(ctx->aes_decrypt);
  161. free(ctx->aes_decrypt);
  162. ctx->aes_decrypt = NULL;
  163. }
  164. }
  165. #if defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  166. defined(LINUXKM_LKCAPI_REGISTER_AESCBC) || \
  167. defined(LINUXKM_LKCAPI_REGISTER_AESCFB)
  168. static int km_AesSetKeyCommon(struct km_AesCtx * ctx, const u8 *in_key,
  169. unsigned int key_len, const char * name)
  170. {
  171. int err;
  172. err = wc_AesSetKey(ctx->aes_encrypt, in_key, key_len, NULL, AES_ENCRYPTION);
  173. if (unlikely(err)) {
  174. if (! disable_setkey_warnings)
  175. pr_err("%s: wc_AesSetKey for encryption key failed: %d\n", name, err);
  176. return -ENOKEY;
  177. }
  178. if (ctx->aes_decrypt) {
  179. err = wc_AesSetKey(ctx->aes_decrypt, in_key, key_len, NULL,
  180. AES_DECRYPTION);
  181. if (unlikely(err)) {
  182. if (! disable_setkey_warnings)
  183. pr_err("%s: wc_AesSetKey for decryption key failed: %d\n",
  184. name, err);
  185. return -ENOKEY;
  186. }
  187. }
  188. return 0;
  189. }
  190. static void km_AesExit(struct crypto_skcipher *tfm)
  191. {
  192. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  193. km_AesExitCommon(ctx);
  194. }
  195. #endif /* LINUXKM_LKCAPI_REGISTER_ALL ||
  196. * LINUXKM_LKCAPI_REGISTER_AESCBC ||
  197. * LINUXKM_LKCAPI_REGISTER_AESCFB
  198. */
  199. #endif /* LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC ||
  200. * LINUXKM_LKCAPI_REGISTER_AESCFB || LINUXKM_LKCAPI_REGISTER_AESGCM
  201. */
  202. #if defined(HAVE_AES_CBC) && \
  203. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  204. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  205. static int km_AesCbcInit(struct crypto_skcipher *tfm)
  206. {
  207. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  208. return km_AesInitCommon(ctx, WOLFKM_AESCBC_DRIVER, 1);
  209. }
  210. static int km_AesCbcSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  211. unsigned int key_len)
  212. {
  213. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  214. return km_AesSetKeyCommon(ctx, in_key, key_len, WOLFKM_AESCBC_DRIVER);
  215. }
  216. static int km_AesCbcEncrypt(struct skcipher_request *req)
  217. {
  218. struct crypto_skcipher * tfm = NULL;
  219. struct km_AesCtx * ctx = NULL;
  220. struct skcipher_walk walk;
  221. unsigned int nbytes = 0;
  222. int err = 0;
  223. tfm = crypto_skcipher_reqtfm(req);
  224. ctx = crypto_skcipher_ctx(tfm);
  225. err = skcipher_walk_virt(&walk, req, false);
  226. if (unlikely(err)) {
  227. pr_err("%s: skcipher_walk_virt failed: %d\n",
  228. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  229. return err;
  230. }
  231. while ((nbytes = walk.nbytes) != 0) {
  232. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  233. if (unlikely(err)) {
  234. pr_err("%s: wc_AesSetIV failed: %d\n",
  235. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  236. return -EINVAL;
  237. }
  238. err = wc_AesCbcEncrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  239. walk.src.virt.addr, nbytes);
  240. if (unlikely(err)) {
  241. pr_err("%s: wc_AesCbcEncrypt failed: %d\n",
  242. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  243. return -EINVAL;
  244. }
  245. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  246. if (unlikely(err)) {
  247. pr_err("%s: skcipher_walk_done failed: %d\n",
  248. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  249. return err;
  250. }
  251. }
  252. return err;
  253. }
  254. static int km_AesCbcDecrypt(struct skcipher_request *req)
  255. {
  256. struct crypto_skcipher * tfm = NULL;
  257. struct km_AesCtx * ctx = NULL;
  258. struct skcipher_walk walk;
  259. unsigned int nbytes = 0;
  260. int err = 0;
  261. tfm = crypto_skcipher_reqtfm(req);
  262. ctx = crypto_skcipher_ctx(tfm);
  263. err = skcipher_walk_virt(&walk, req, false);
  264. if (unlikely(err)) {
  265. pr_err("%s: skcipher_walk_virt failed: %d\n",
  266. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  267. return err;
  268. }
  269. while ((nbytes = walk.nbytes) != 0) {
  270. err = wc_AesSetIV(ctx->aes_decrypt, walk.iv);
  271. if (unlikely(err)) {
  272. if (! disable_setkey_warnings)
  273. pr_err("%s: wc_AesSetKey failed: %d\n",
  274. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  275. return -EINVAL;
  276. }
  277. err = wc_AesCbcDecrypt(ctx->aes_decrypt, walk.dst.virt.addr,
  278. walk.src.virt.addr, nbytes);
  279. if (unlikely(err)) {
  280. pr_err("%s: wc_AesCbcDecrypt failed: %d\n",
  281. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  282. return -EINVAL;
  283. }
  284. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  285. if (unlikely(err)) {
  286. pr_err("%s: skcipher_walk_done failed: %d\n",
  287. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  288. return err;
  289. }
  290. }
  291. return err;
  292. }
  293. static struct skcipher_alg cbcAesAlg = {
  294. .base.cra_name = WOLFKM_AESCBC_NAME,
  295. .base.cra_driver_name = WOLFKM_AESCBC_DRIVER,
  296. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  297. .base.cra_blocksize = AES_BLOCK_SIZE,
  298. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  299. .base.cra_module = THIS_MODULE,
  300. .init = km_AesCbcInit,
  301. .exit = km_AesExit,
  302. .min_keysize = AES_128_KEY_SIZE,
  303. .max_keysize = AES_256_KEY_SIZE,
  304. .ivsize = AES_BLOCK_SIZE,
  305. .setkey = km_AesCbcSetKey,
  306. .encrypt = km_AesCbcEncrypt,
  307. .decrypt = km_AesCbcDecrypt,
  308. };
  309. static int cbcAesAlg_loaded = 0;
  310. #endif /* HAVE_AES_CBC &&
  311. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  312. */
  313. #if defined(WOLFSSL_AES_CFB) && \
  314. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  315. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  316. static int km_AesCfbInit(struct crypto_skcipher *tfm)
  317. {
  318. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  319. return km_AesInitCommon(ctx, WOLFKM_AESCFB_DRIVER, 0);
  320. }
  321. static int km_AesCfbSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  322. unsigned int key_len)
  323. {
  324. struct km_AesCtx * ctx = crypto_skcipher_ctx(tfm);
  325. return km_AesSetKeyCommon(ctx, in_key, key_len, WOLFKM_AESCFB_DRIVER);
  326. }
  327. static int km_AesCfbEncrypt(struct skcipher_request *req)
  328. {
  329. struct crypto_skcipher * tfm = NULL;
  330. struct km_AesCtx * ctx = NULL;
  331. struct skcipher_walk walk;
  332. unsigned int nbytes = 0;
  333. int err = 0;
  334. tfm = crypto_skcipher_reqtfm(req);
  335. ctx = crypto_skcipher_ctx(tfm);
  336. err = skcipher_walk_virt(&walk, req, false);
  337. if (unlikely(err)) {
  338. pr_err("%s: skcipher_walk_virt failed: %d\n",
  339. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  340. return err;
  341. }
  342. while ((nbytes = walk.nbytes) != 0) {
  343. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  344. if (unlikely(err)) {
  345. if (! disable_setkey_warnings)
  346. pr_err("%s: wc_AesSetKey failed: %d\n",
  347. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  348. return -EINVAL;
  349. }
  350. err = wc_AesCfbEncrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  351. walk.src.virt.addr, nbytes);
  352. if (unlikely(err)) {
  353. pr_err("%s: wc_AesCfbEncrypt failed %d\n",
  354. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  355. return -EINVAL;
  356. }
  357. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  358. if (unlikely(err)) {
  359. pr_err("%s: skcipher_walk_done failed: %d\n",
  360. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  361. return err;
  362. }
  363. }
  364. return err;
  365. }
  366. static int km_AesCfbDecrypt(struct skcipher_request *req)
  367. {
  368. struct crypto_skcipher * tfm = NULL;
  369. struct km_AesCtx * ctx = NULL;
  370. struct skcipher_walk walk;
  371. unsigned int nbytes = 0;
  372. int err = 0;
  373. tfm = crypto_skcipher_reqtfm(req);
  374. ctx = crypto_skcipher_ctx(tfm);
  375. err = skcipher_walk_virt(&walk, req, false);
  376. if (unlikely(err)) {
  377. pr_err("%s: skcipher_walk_virt failed: %d\n",
  378. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  379. return err;
  380. }
  381. while ((nbytes = walk.nbytes) != 0) {
  382. err = wc_AesSetIV(ctx->aes_encrypt, walk.iv);
  383. if (unlikely(err)) {
  384. if (! disable_setkey_warnings)
  385. pr_err("%s: wc_AesSetKey failed: %d\n",
  386. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  387. return -EINVAL;
  388. }
  389. err = wc_AesCfbDecrypt(ctx->aes_encrypt, walk.dst.virt.addr,
  390. walk.src.virt.addr, nbytes);
  391. if (unlikely(err)) {
  392. pr_err("%s: wc_AesCfbDecrypt failed: %d\n",
  393. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  394. return -EINVAL;
  395. }
  396. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  397. if (unlikely(err)) {
  398. pr_err("%s: skcipher_walk_done failed: %d\n",
  399. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  400. return err;
  401. }
  402. }
  403. return err;
  404. }
  405. static struct skcipher_alg cfbAesAlg = {
  406. .base.cra_name = WOLFKM_AESCFB_NAME,
  407. .base.cra_driver_name = WOLFKM_AESCFB_DRIVER,
  408. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  409. .base.cra_blocksize = AES_BLOCK_SIZE,
  410. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  411. .base.cra_module = THIS_MODULE,
  412. .init = km_AesCfbInit,
  413. .exit = km_AesExit,
  414. .min_keysize = AES_128_KEY_SIZE,
  415. .max_keysize = AES_256_KEY_SIZE,
  416. .ivsize = AES_BLOCK_SIZE,
  417. .setkey = km_AesCfbSetKey,
  418. .encrypt = km_AesCfbEncrypt,
  419. .decrypt = km_AesCfbDecrypt,
  420. };
  421. static int cfbAesAlg_loaded = 0;
  422. #endif /* WOLFSSL_AES_CFB &&
  423. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  424. */
  425. #if defined(HAVE_AESGCM) && \
  426. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  427. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  428. #ifndef WOLFSSL_AESGCM_STREAM
  429. #error LKCAPI registration of AES-GCM requires WOLFSSL_AESGCM_STREAM (--enable-aesgcm-stream).
  430. #endif
  431. static int km_AesGcmInit(struct crypto_aead * tfm)
  432. {
  433. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  434. return km_AesInitCommon(ctx, WOLFKM_AESGCM_DRIVER, 0);
  435. }
  436. static void km_AesGcmExit(struct crypto_aead * tfm)
  437. {
  438. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  439. km_AesExitCommon(ctx);
  440. }
  441. static int km_AesGcmSetKey(struct crypto_aead *tfm, const u8 *in_key,
  442. unsigned int key_len)
  443. {
  444. int err;
  445. struct km_AesCtx * ctx = crypto_aead_ctx(tfm);
  446. err = wc_AesGcmSetKey(ctx->aes_encrypt, in_key, key_len);
  447. if (unlikely(err)) {
  448. if (! disable_setkey_warnings)
  449. pr_err("%s: wc_AesGcmSetKey failed: %d\n",
  450. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  451. return -ENOKEY;
  452. }
  453. return 0;
  454. }
  455. static int km_AesGcmSetAuthsize(struct crypto_aead *tfm, unsigned int authsize)
  456. {
  457. (void)tfm;
  458. if (authsize > AES_BLOCK_SIZE ||
  459. authsize < WOLFSSL_MIN_AUTH_TAG_SZ) {
  460. pr_err("%s: invalid authsize: %d\n",
  461. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), authsize);
  462. return -EINVAL;
  463. }
  464. return 0;
  465. }
  466. /*
  467. * aead ciphers recieve data in scatterlists in following order:
  468. * encrypt
  469. * req->src: aad||plaintext
  470. * req->dst: aad||ciphertext||tag
  471. * decrypt
  472. * req->src: aad||ciphertext||tag
  473. * req->dst: aad||plaintext, return 0 or -EBADMSG
  474. */
  475. static int km_AesGcmEncrypt(struct aead_request *req)
  476. {
  477. struct crypto_aead * tfm = NULL;
  478. struct km_AesCtx * ctx = NULL;
  479. struct skcipher_walk walk;
  480. struct scatter_walk assocSgWalk;
  481. unsigned int nbytes = 0;
  482. u8 authTag[AES_BLOCK_SIZE];
  483. int err = 0;
  484. unsigned int assocLeft = 0;
  485. unsigned int cryptLeft = 0;
  486. u8 * assoc = NULL;
  487. tfm = crypto_aead_reqtfm(req);
  488. ctx = crypto_aead_ctx(tfm);
  489. assocLeft = req->assoclen;
  490. cryptLeft = req->cryptlen;
  491. scatterwalk_start(&assocSgWalk, req->src);
  492. err = skcipher_walk_aead_encrypt(&walk, req, false);
  493. if (unlikely(err)) {
  494. pr_err("%s: skcipher_walk_aead_encrypt failed: %d\n",
  495. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  496. return -1;
  497. }
  498. err = wc_AesGcmInit(ctx->aes_encrypt, NULL /*key*/, 0 /*keylen*/, walk.iv,
  499. AES_BLOCK_SIZE);
  500. if (unlikely(err)) {
  501. pr_err("%s: wc_AesGcmInit failed: %d\n",
  502. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  503. return -EINVAL;
  504. }
  505. assoc = scatterwalk_map(&assocSgWalk);
  506. if (unlikely(IS_ERR(assoc))) {
  507. pr_err("%s: scatterwalk_map failed: %ld\n",
  508. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)),
  509. PTR_ERR(assoc));
  510. return err;
  511. }
  512. err = wc_AesGcmEncryptUpdate(ctx->aes_encrypt, NULL, NULL, 0,
  513. assoc, assocLeft);
  514. assocLeft -= assocLeft;
  515. scatterwalk_unmap(assoc);
  516. assoc = NULL;
  517. if (unlikely(err)) {
  518. pr_err("%s: wc_AesGcmEncryptUpdate failed: %d\n",
  519. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  520. return -EINVAL;
  521. }
  522. while ((nbytes = walk.nbytes) != 0) {
  523. int n = nbytes;
  524. if (likely(cryptLeft && nbytes)) {
  525. n = cryptLeft < nbytes ? cryptLeft : nbytes;
  526. err = wc_AesGcmEncryptUpdate(
  527. ctx->aes_encrypt,
  528. walk.dst.virt.addr,
  529. walk.src.virt.addr,
  530. cryptLeft,
  531. NULL, 0);
  532. nbytes -= n;
  533. cryptLeft -= n;
  534. }
  535. if (unlikely(err)) {
  536. pr_err("%s: wc_AesGcmEncryptUpdate failed: %d\n",
  537. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  538. return -EINVAL;
  539. }
  540. err = skcipher_walk_done(&walk, nbytes);
  541. if (unlikely(err)) {
  542. pr_err("%s: skcipher_walk_done failed: %d\n",
  543. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  544. return err;
  545. }
  546. }
  547. err = wc_AesGcmEncryptFinal(ctx->aes_encrypt, authTag, tfm->authsize);
  548. if (unlikely(err)) {
  549. pr_err("%s: wc_AesGcmEncryptFinal failed with return code %d\n",
  550. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  551. return -EINVAL;
  552. }
  553. /* Now copy the auth tag into request scatterlist. */
  554. scatterwalk_map_and_copy(authTag, req->dst,
  555. req->assoclen + req->cryptlen,
  556. tfm->authsize, 1);
  557. return err;
  558. }
  559. static int km_AesGcmDecrypt(struct aead_request *req)
  560. {
  561. struct crypto_aead * tfm = NULL;
  562. struct km_AesCtx * ctx = NULL;
  563. struct skcipher_walk walk;
  564. struct scatter_walk assocSgWalk;
  565. unsigned int nbytes = 0;
  566. u8 origAuthTag[AES_BLOCK_SIZE];
  567. int err = 0;
  568. unsigned int assocLeft = 0;
  569. unsigned int cryptLeft = 0;
  570. u8 * assoc = NULL;
  571. tfm = crypto_aead_reqtfm(req);
  572. ctx = crypto_aead_ctx(tfm);
  573. assocLeft = req->assoclen;
  574. cryptLeft = req->cryptlen - tfm->authsize;
  575. /* Copy out original auth tag from req->src. */
  576. scatterwalk_map_and_copy(origAuthTag, req->src,
  577. req->assoclen + req->cryptlen - tfm->authsize,
  578. tfm->authsize, 0);
  579. scatterwalk_start(&assocSgWalk, req->src);
  580. err = skcipher_walk_aead_decrypt(&walk, req, false);
  581. if (unlikely(err)) {
  582. pr_err("%s: skcipher_walk_aead_decrypt failed: %d\n",
  583. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  584. return err;
  585. }
  586. err = wc_AesGcmInit(ctx->aes_encrypt, NULL /*key*/, 0 /*keylen*/, walk.iv,
  587. AES_BLOCK_SIZE);
  588. if (unlikely(err)) {
  589. pr_err("%s: wc_AesGcmInit failed: %d\n",
  590. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  591. return -EINVAL;
  592. }
  593. assoc = scatterwalk_map(&assocSgWalk);
  594. if (unlikely(IS_ERR(assoc))) {
  595. pr_err("%s: scatterwalk_map failed: %ld\n",
  596. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)),
  597. PTR_ERR(assoc));
  598. return err;
  599. }
  600. err = wc_AesGcmDecryptUpdate(ctx->aes_encrypt, NULL, NULL, 0,
  601. assoc, assocLeft);
  602. assocLeft -= assocLeft;
  603. scatterwalk_unmap(assoc);
  604. assoc = NULL;
  605. if (unlikely(err)) {
  606. pr_err("%s: wc_AesGcmDecryptUpdate failed: %d\n",
  607. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  608. return -EINVAL;
  609. }
  610. while ((nbytes = walk.nbytes) != 0) {
  611. int n = nbytes;
  612. if (likely(cryptLeft && nbytes)) {
  613. n = cryptLeft < nbytes ? cryptLeft : nbytes;
  614. err = wc_AesGcmDecryptUpdate(
  615. ctx->aes_encrypt,
  616. walk.dst.virt.addr,
  617. walk.src.virt.addr,
  618. cryptLeft,
  619. NULL, 0);
  620. nbytes -= n;
  621. cryptLeft -= n;
  622. }
  623. if (unlikely(err)) {
  624. pr_err("%s: wc_AesGcmDecryptUpdate failed: %d\n",
  625. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  626. return -EINVAL;
  627. }
  628. err = skcipher_walk_done(&walk, nbytes);
  629. if (unlikely(err)) {
  630. pr_err("%s: skcipher_walk_done failed: %d\n",
  631. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  632. return err;
  633. }
  634. }
  635. err = wc_AesGcmDecryptFinal(ctx->aes_encrypt, origAuthTag, tfm->authsize);
  636. if (unlikely(err)) {
  637. pr_err("%s: wc_AesGcmDecryptFinal failed with return code %d\n",
  638. crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)), err);
  639. if (err == WC_NO_ERR_TRACE(AES_GCM_AUTH_E)) {
  640. return -EBADMSG;
  641. }
  642. else {
  643. return -EINVAL;
  644. }
  645. }
  646. return err;
  647. }
  648. static struct aead_alg gcmAesAead = {
  649. .base.cra_name = WOLFKM_AESGCM_NAME,
  650. .base.cra_driver_name = WOLFKM_AESGCM_DRIVER,
  651. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  652. .base.cra_blocksize = 1,
  653. .base.cra_ctxsize = sizeof(struct km_AesCtx),
  654. .base.cra_module = THIS_MODULE,
  655. .init = km_AesGcmInit,
  656. .exit = km_AesGcmExit,
  657. .setkey = km_AesGcmSetKey,
  658. .setauthsize = km_AesGcmSetAuthsize,
  659. .encrypt = km_AesGcmEncrypt,
  660. .decrypt = km_AesGcmDecrypt,
  661. .ivsize = AES_BLOCK_SIZE,
  662. .maxauthsize = AES_BLOCK_SIZE,
  663. .chunksize = AES_BLOCK_SIZE,
  664. };
  665. static int gcmAesAead_loaded = 0;
  666. #endif /* HAVE_AESGCM &&
  667. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESGCM) &&
  668. */
  669. #if defined(WOLFSSL_AES_XTS) && \
  670. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  671. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  672. #ifndef WOLFSSL_AESXTS_STREAM
  673. #error LKCAPI registration of AES-XTS requires WOLFSSL_AESXTS_STREAM (--enable-aesxts-stream).
  674. #endif
  675. struct km_AesXtsCtx {
  676. XtsAes *aesXts; /* allocated in km_AesXtsInitCommon() to assure alignment
  677. * for AESNI.
  678. */
  679. };
  680. static int km_AesXtsInitCommon(struct km_AesXtsCtx * ctx, const char * name)
  681. {
  682. int err;
  683. ctx->aesXts = (XtsAes *)malloc(sizeof(*ctx->aesXts));
  684. if (! ctx->aesXts)
  685. return -MEMORY_E;
  686. err = wc_AesXtsInit(ctx->aesXts, NULL, INVALID_DEVID);
  687. if (unlikely(err)) {
  688. pr_err("%s: km_AesXtsInitCommon failed: %d\n", name, err);
  689. return -EINVAL;
  690. }
  691. return 0;
  692. }
  693. static int km_AesXtsInit(struct crypto_skcipher *tfm)
  694. {
  695. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  696. return km_AesXtsInitCommon(ctx, WOLFKM_AESXTS_DRIVER);
  697. }
  698. static void km_AesXtsExit(struct crypto_skcipher *tfm)
  699. {
  700. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  701. wc_AesXtsFree(ctx->aesXts);
  702. free(ctx->aesXts);
  703. ctx->aesXts = NULL;
  704. }
  705. static int km_AesXtsSetKey(struct crypto_skcipher *tfm, const u8 *in_key,
  706. unsigned int key_len)
  707. {
  708. int err;
  709. struct km_AesXtsCtx * ctx = crypto_skcipher_ctx(tfm);
  710. err = wc_AesXtsSetKeyNoInit(ctx->aesXts, in_key, key_len,
  711. AES_ENCRYPTION_AND_DECRYPTION);
  712. if (unlikely(err)) {
  713. if (! disable_setkey_warnings)
  714. pr_err("%s: wc_AesXtsSetKeyNoInit failed: %d\n",
  715. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  716. return -EINVAL;
  717. }
  718. return 0;
  719. }
  720. /* see /usr/src/linux/drivers/md/dm-crypt.c */
  721. static int km_AesXtsEncrypt(struct skcipher_request *req)
  722. {
  723. int err = 0;
  724. struct crypto_skcipher * tfm = NULL;
  725. struct km_AesXtsCtx * ctx = NULL;
  726. struct skcipher_walk walk;
  727. unsigned int nbytes = 0;
  728. tfm = crypto_skcipher_reqtfm(req);
  729. ctx = crypto_skcipher_ctx(tfm);
  730. if (req->cryptlen < AES_BLOCK_SIZE)
  731. return -EINVAL;
  732. err = skcipher_walk_virt(&walk, req, false);
  733. if (unlikely(err)) {
  734. pr_err("%s: skcipher_walk_virt failed: %d\n",
  735. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  736. return err;
  737. }
  738. if (walk.nbytes == walk.total) {
  739. err = wc_AesXtsEncrypt(ctx->aesXts, walk.dst.virt.addr,
  740. walk.src.virt.addr, walk.nbytes, walk.iv, walk.ivsize);
  741. if (unlikely(err)) {
  742. pr_err("%s: wc_AesXtsEncrypt failed: %d\n",
  743. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  744. return -EINVAL;
  745. }
  746. err = skcipher_walk_done(&walk, 0);
  747. } else {
  748. int tail = req->cryptlen % AES_BLOCK_SIZE;
  749. struct skcipher_request subreq;
  750. struct XtsAesStreamData stream;
  751. if (tail > 0) {
  752. int blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  753. skcipher_walk_abort(&walk);
  754. skcipher_request_set_tfm(&subreq, tfm);
  755. skcipher_request_set_callback(&subreq,
  756. skcipher_request_flags(req),
  757. NULL, NULL);
  758. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  759. blocks * AES_BLOCK_SIZE, req->iv);
  760. req = &subreq;
  761. err = skcipher_walk_virt(&walk, req, false);
  762. if (!walk.nbytes)
  763. return err ? : -EINVAL;
  764. } else {
  765. tail = 0;
  766. }
  767. err = wc_AesXtsEncryptInit(ctx->aesXts, walk.iv, walk.ivsize, &stream);
  768. if (unlikely(err)) {
  769. pr_err("%s: wc_AesXtsEncryptInit failed: %d\n",
  770. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  771. return -EINVAL;
  772. }
  773. while ((nbytes = walk.nbytes) != 0) {
  774. /* if this isn't the final call, pass block-aligned data to prevent
  775. * end-of-message ciphertext stealing.
  776. */
  777. if (nbytes < walk.total)
  778. nbytes &= ~(AES_BLOCK_SIZE - 1);
  779. if (nbytes & ((unsigned int)AES_BLOCK_SIZE - 1U))
  780. err = wc_AesXtsEncryptFinal(ctx->aesXts, walk.dst.virt.addr,
  781. walk.src.virt.addr, nbytes,
  782. &stream);
  783. else
  784. err = wc_AesXtsEncryptUpdate(ctx->aesXts, walk.dst.virt.addr,
  785. walk.src.virt.addr, nbytes,
  786. &stream);
  787. if (unlikely(err)) {
  788. pr_err("%s: wc_AesXtsEncryptUpdate failed: %d\n",
  789. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  790. return -EINVAL;
  791. }
  792. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  793. if (unlikely(err)) {
  794. pr_err("%s: skcipher_walk_done failed: %d\n",
  795. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  796. return err;
  797. }
  798. }
  799. if (unlikely(tail > 0)) {
  800. struct scatterlist sg_src[2], sg_dst[2];
  801. struct scatterlist *src, *dst;
  802. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  803. if (req->dst != req->src)
  804. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  805. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  806. req->iv);
  807. err = skcipher_walk_virt(&walk, &subreq, false);
  808. if (err)
  809. return err;
  810. err = wc_AesXtsEncryptFinal(ctx->aesXts, walk.dst.virt.addr,
  811. walk.src.virt.addr, walk.nbytes,
  812. &stream);
  813. if (unlikely(err)) {
  814. pr_err("%s: wc_AesXtsEncryptFinal failed: %d\n",
  815. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  816. return -EINVAL;
  817. }
  818. err = skcipher_walk_done(&walk, 0);
  819. } else if (! (stream.bytes_crypted_with_this_tweak & ((word32)AES_BLOCK_SIZE - 1U))) {
  820. err = wc_AesXtsEncryptFinal(ctx->aesXts, NULL, NULL, 0, &stream);
  821. }
  822. }
  823. return err;
  824. }
  825. static int km_AesXtsDecrypt(struct skcipher_request *req)
  826. {
  827. int err = 0;
  828. struct crypto_skcipher * tfm = NULL;
  829. struct km_AesXtsCtx * ctx = NULL;
  830. struct skcipher_walk walk;
  831. unsigned int nbytes = 0;
  832. tfm = crypto_skcipher_reqtfm(req);
  833. ctx = crypto_skcipher_ctx(tfm);
  834. if (req->cryptlen < AES_BLOCK_SIZE)
  835. return -EINVAL;
  836. err = skcipher_walk_virt(&walk, req, false);
  837. if (unlikely(err)) {
  838. pr_err("%s: skcipher_walk_virt failed: %d\n",
  839. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  840. return err;
  841. }
  842. if (walk.nbytes == walk.total) {
  843. err = wc_AesXtsDecrypt(ctx->aesXts,
  844. walk.dst.virt.addr, walk.src.virt.addr,
  845. walk.nbytes, walk.iv, walk.ivsize);
  846. if (unlikely(err)) {
  847. pr_err("%s: wc_AesXtsDecrypt failed: %d\n",
  848. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  849. return -EINVAL;
  850. }
  851. err = skcipher_walk_done(&walk, 0);
  852. } else {
  853. int tail = req->cryptlen % AES_BLOCK_SIZE;
  854. struct skcipher_request subreq;
  855. struct XtsAesStreamData stream;
  856. if (unlikely(tail > 0)) {
  857. int blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  858. skcipher_walk_abort(&walk);
  859. skcipher_request_set_tfm(&subreq, tfm);
  860. skcipher_request_set_callback(&subreq,
  861. skcipher_request_flags(req),
  862. NULL, NULL);
  863. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  864. blocks * AES_BLOCK_SIZE, req->iv);
  865. req = &subreq;
  866. err = skcipher_walk_virt(&walk, req, false);
  867. if (!walk.nbytes)
  868. return err ? : -EINVAL;
  869. } else {
  870. tail = 0;
  871. }
  872. err = wc_AesXtsDecryptInit(ctx->aesXts, walk.iv, walk.ivsize, &stream);
  873. if (unlikely(err)) {
  874. pr_err("%s: wc_AesXtsDecryptInit failed: %d\n",
  875. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  876. return -EINVAL;
  877. }
  878. while ((nbytes = walk.nbytes) != 0) {
  879. /* if this isn't the final call, pass block-aligned data to prevent
  880. * end-of-message ciphertext stealing.
  881. */
  882. if (nbytes < walk.total)
  883. nbytes &= ~(AES_BLOCK_SIZE - 1);
  884. if (nbytes & ((unsigned int)AES_BLOCK_SIZE - 1U))
  885. err = wc_AesXtsDecryptFinal(ctx->aesXts, walk.dst.virt.addr,
  886. walk.src.virt.addr, nbytes,
  887. &stream);
  888. else
  889. err = wc_AesXtsDecryptUpdate(ctx->aesXts, walk.dst.virt.addr,
  890. walk.src.virt.addr, nbytes,
  891. &stream);
  892. if (unlikely(err)) {
  893. pr_err("%s: wc_AesXtsDecryptUpdate failed: %d\n",
  894. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  895. return -EINVAL;
  896. }
  897. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  898. if (unlikely(err)) {
  899. pr_err("%s: skcipher_walk_done failed: %d\n",
  900. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  901. return err;
  902. }
  903. }
  904. if (unlikely(tail > 0)) {
  905. struct scatterlist sg_src[2], sg_dst[2];
  906. struct scatterlist *src, *dst;
  907. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  908. if (req->dst != req->src)
  909. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  910. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  911. req->iv);
  912. err = skcipher_walk_virt(&walk, &subreq, false);
  913. if (err)
  914. return err;
  915. err = wc_AesXtsDecryptFinal(ctx->aesXts, walk.dst.virt.addr,
  916. walk.src.virt.addr, walk.nbytes,
  917. &stream);
  918. if (unlikely(err)) {
  919. pr_err("%s: wc_AesXtsDecryptFinal failed: %d\n",
  920. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)), err);
  921. return -EINVAL;
  922. }
  923. err = skcipher_walk_done(&walk, 0);
  924. } else if (! (stream.bytes_crypted_with_this_tweak & ((word32)AES_BLOCK_SIZE - 1U))) {
  925. err = wc_AesXtsDecryptFinal(ctx->aesXts, NULL, NULL, 0, &stream);
  926. }
  927. }
  928. return err;
  929. }
  930. static struct skcipher_alg xtsAesAlg = {
  931. .base.cra_name = WOLFKM_AESXTS_NAME,
  932. .base.cra_driver_name = WOLFKM_AESXTS_DRIVER,
  933. .base.cra_priority = WOLFSSL_LINUXKM_LKCAPI_PRIORITY,
  934. .base.cra_blocksize = AES_BLOCK_SIZE,
  935. .base.cra_ctxsize = sizeof(struct km_AesXtsCtx),
  936. .base.cra_module = THIS_MODULE,
  937. .min_keysize = 2 * AES_128_KEY_SIZE,
  938. .max_keysize = 2 * AES_256_KEY_SIZE,
  939. .ivsize = AES_BLOCK_SIZE,
  940. .walksize = 2 * AES_BLOCK_SIZE,
  941. .init = km_AesXtsInit,
  942. .exit = km_AesXtsExit,
  943. .setkey = km_AesXtsSetKey,
  944. .encrypt = km_AesXtsEncrypt,
  945. .decrypt = km_AesXtsDecrypt
  946. };
  947. static int xtsAesAlg_loaded = 0;
  948. #endif /* WOLFSSL_AES_XTS &&
  949. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESXTS)
  950. */
  951. /* cipher tests, cribbed from test.c, with supplementary LKCAPI tests: */
  952. #if defined(HAVE_AES_CBC) && \
  953. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  954. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  955. static int linuxkm_test_aescbc(void)
  956. {
  957. int ret = 0;
  958. struct crypto_skcipher * tfm = NULL;
  959. struct skcipher_request * req = NULL;
  960. struct scatterlist src, dst;
  961. Aes *aes;
  962. int aes_inited = 0;
  963. static const byte key32[] =
  964. {
  965. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  966. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  967. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  968. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  969. };
  970. static const byte p_vector[] =
  971. /* Now is the time for all good men w/o trailing 0 */
  972. {
  973. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  974. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  975. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20,
  976. 0x67,0x6f,0x6f,0x64,0x20,0x6d,0x65,0x6e
  977. };
  978. static const byte iv[] = "1234567890abcdef";
  979. static const byte c_vector[] =
  980. {
  981. 0xd7,0xd6,0x04,0x5b,0x4d,0xc4,0x90,0xdf,
  982. 0x4a,0x82,0xed,0x61,0x26,0x4e,0x23,0xb3,
  983. 0xe4,0xb5,0x85,0x30,0x29,0x4c,0x9d,0xcf,
  984. 0x73,0xc9,0x46,0xd1,0xaa,0xc8,0xcb,0x62
  985. };
  986. byte iv_copy[sizeof(iv)];
  987. byte enc[sizeof(p_vector)];
  988. byte dec[sizeof(p_vector)];
  989. u8 * enc2 = NULL;
  990. u8 * dec2 = NULL;
  991. aes = (Aes *)malloc(sizeof(*aes));
  992. if (aes == NULL)
  993. return -ENOMEM;
  994. XMEMSET(enc, 0, sizeof(enc));
  995. XMEMSET(dec, 0, sizeof(enc));
  996. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  997. if (ret) {
  998. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  999. goto test_cbc_end;
  1000. }
  1001. aes_inited = 1;
  1002. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  1003. if (ret) {
  1004. pr_err("wolfcrypt wc_AesSetKey failed with return code %d\n", ret);
  1005. goto test_cbc_end;
  1006. }
  1007. ret = wc_AesCbcEncrypt(aes, enc, p_vector, sizeof(p_vector));
  1008. if (ret) {
  1009. pr_err("wolfcrypt wc_AesCbcEncrypt failed with return code %d\n", ret);
  1010. goto test_cbc_end;
  1011. }
  1012. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1013. pr_err("wolfcrypt wc_AesCbcEncrypt KAT mismatch\n");
  1014. return LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1015. }
  1016. /* Re init for decrypt and set flag. */
  1017. wc_AesFree(aes);
  1018. aes_inited = 0;
  1019. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1020. if (ret) {
  1021. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  1022. goto test_cbc_end;
  1023. }
  1024. aes_inited = 1;
  1025. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_DECRYPTION);
  1026. if (ret) {
  1027. pr_err("wolfcrypt wc_AesSetKey failed with return code %d.\n", ret);
  1028. goto test_cbc_end;
  1029. }
  1030. ret = wc_AesCbcDecrypt(aes, dec, enc, sizeof(p_vector));
  1031. if (ret) {
  1032. pr_err("wolfcrypt wc_AesCbcDecrypt failed with return code %d\n", ret);
  1033. goto test_cbc_end;
  1034. }
  1035. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1036. if (ret) {
  1037. pr_err("error: p_vector and dec do not match: %d\n", ret);
  1038. goto test_cbc_end;
  1039. }
  1040. /* now the kernel crypto part */
  1041. enc2 = malloc(sizeof(p_vector));
  1042. if (!enc2) {
  1043. pr_err("error: malloc failed\n");
  1044. goto test_cbc_end;
  1045. }
  1046. dec2 = malloc(sizeof(p_vector));
  1047. if (!dec2) {
  1048. pr_err("error: malloc failed\n");
  1049. goto test_cbc_end;
  1050. }
  1051. memcpy(dec2, p_vector, sizeof(p_vector));
  1052. tfm = crypto_alloc_skcipher(WOLFKM_AESCBC_NAME, 0, 0);
  1053. if (IS_ERR(tfm)) {
  1054. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1055. WOLFKM_AESCBC_DRIVER, PTR_ERR(tfm));
  1056. goto test_cbc_end;
  1057. }
  1058. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1059. {
  1060. const char *driver_name =
  1061. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1062. if (strcmp(driver_name, WOLFKM_AESCBC_DRIVER)) {
  1063. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1064. WOLFKM_AESCBC_NAME, driver_name, WOLFKM_AESCBC_DRIVER);
  1065. ret = -ENOENT;
  1066. goto test_cbc_end;
  1067. }
  1068. }
  1069. #endif
  1070. ret = crypto_skcipher_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1071. if (ret) {
  1072. pr_err("error: crypto_skcipher_setkey returned: %d\n", ret);
  1073. goto test_cbc_end;
  1074. }
  1075. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1076. if (IS_ERR(req)) {
  1077. pr_err("error: allocating AES skcipher request %s failed\n",
  1078. WOLFKM_AESCBC_DRIVER);
  1079. goto test_cbc_end;
  1080. }
  1081. sg_init_one(&src, dec2, sizeof(p_vector));
  1082. sg_init_one(&dst, enc2, sizeof(p_vector));
  1083. XMEMCPY(iv_copy, iv, sizeof(iv));
  1084. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1085. ret = crypto_skcipher_encrypt(req);
  1086. if (ret) {
  1087. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1088. goto test_cbc_end;
  1089. }
  1090. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1091. if (ret) {
  1092. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1093. goto test_cbc_end;
  1094. }
  1095. memset(dec2, 0, sizeof(p_vector));
  1096. sg_init_one(&src, enc2, sizeof(p_vector));
  1097. sg_init_one(&dst, dec2, sizeof(p_vector));
  1098. XMEMCPY(iv_copy, iv, sizeof(iv));
  1099. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1100. ret = crypto_skcipher_decrypt(req);
  1101. if (ret) {
  1102. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  1103. goto test_cbc_end;
  1104. }
  1105. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1106. if (ret) {
  1107. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1108. goto test_cbc_end;
  1109. }
  1110. test_cbc_end:
  1111. if (enc2) { free(enc2); }
  1112. if (dec2) { free(dec2); }
  1113. if (req) { skcipher_request_free(req); }
  1114. if (tfm) { crypto_free_skcipher(tfm); }
  1115. if (aes_inited)
  1116. wc_AesFree(aes);
  1117. free(aes);
  1118. return ret;
  1119. }
  1120. #endif /* HAVE_AES_CBC &&
  1121. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCBC)
  1122. */
  1123. #if defined(WOLFSSL_AES_CFB) && \
  1124. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1125. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  1126. static int linuxkm_test_aescfb(void)
  1127. {
  1128. int ret = 0;
  1129. struct crypto_skcipher * tfm = NULL;
  1130. struct skcipher_request * req = NULL;
  1131. struct scatterlist src, dst;
  1132. Aes *aes;
  1133. int aes_inited = 0;
  1134. static const byte key32[] =
  1135. {
  1136. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1137. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  1138. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1139. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  1140. };
  1141. static const byte p_vector[] =
  1142. /* Now is the time for all good men w/o trailing 0 */
  1143. {
  1144. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  1145. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  1146. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20,
  1147. 0x67,0x6f,0x6f,0x64,0x20,0x6d,0x65,0x6e
  1148. };
  1149. static const byte iv[] = "1234567890abcdef";
  1150. static const byte c_vector[] =
  1151. {
  1152. 0x56,0x35,0x3f,0xdd,0xde,0xa6,0x15,0x87,
  1153. 0x57,0xdc,0x34,0x62,0x9a,0x68,0x96,0x51,
  1154. 0xc7,0x09,0xb9,0x4e,0x47,0x6b,0x24,0x72,
  1155. 0x19,0x5a,0xdf,0x7e,0xba,0xa8,0x01,0xb6
  1156. };
  1157. byte iv_copy[sizeof(iv)];
  1158. byte enc[sizeof(p_vector)];
  1159. byte dec[sizeof(p_vector)];
  1160. u8 * enc2 = NULL;
  1161. u8 * dec2 = NULL;
  1162. aes = (Aes *)malloc(sizeof(*aes));
  1163. if (aes == NULL)
  1164. return -ENOMEM;
  1165. XMEMSET(enc, 0, sizeof(enc));
  1166. XMEMSET(dec, 0, sizeof(enc));
  1167. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1168. if (ret) {
  1169. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  1170. goto test_cfb_end;
  1171. }
  1172. aes_inited = 1;
  1173. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  1174. if (ret) {
  1175. pr_err("wolfcrypt wc_AesSetKey failed with return code %d\n", ret);
  1176. goto test_cfb_end;
  1177. }
  1178. ret = wc_AesCfbEncrypt(aes, enc, p_vector, sizeof(p_vector));
  1179. if (ret) {
  1180. pr_err("wolfcrypt wc_AesCfbEncrypt failed with return code %d\n", ret);
  1181. goto test_cfb_end;
  1182. }
  1183. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1184. pr_err("wolfcrypt wc_AesCfbEncrypt KAT mismatch\n");
  1185. return LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1186. }
  1187. /* Re init for decrypt and set flag. */
  1188. wc_AesFree(aes);
  1189. aes_inited = 0;
  1190. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1191. if (ret) {
  1192. pr_err("wolfcrypt wc_AesInit failed with return code %d.\n", ret);
  1193. goto test_cfb_end;
  1194. }
  1195. aes_inited = 1;
  1196. ret = wc_AesSetKey(aes, key32, AES_BLOCK_SIZE * 2, iv, AES_ENCRYPTION);
  1197. if (ret) {
  1198. pr_err("wolfcrypt wc_AesSetKey failed with return code %d.\n", ret);
  1199. goto test_cfb_end;
  1200. }
  1201. ret = wc_AesCfbDecrypt(aes, dec, enc, sizeof(p_vector));
  1202. if (ret) {
  1203. pr_err("wolfcrypt wc_AesCfbDecrypt failed with return code %d\n", ret);
  1204. goto test_cfb_end;
  1205. }
  1206. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1207. if (ret) {
  1208. pr_err("error: p_vector and dec do not match: %d\n", ret);
  1209. goto test_cfb_end;
  1210. }
  1211. /* now the kernel crypto part */
  1212. enc2 = malloc(sizeof(p_vector));
  1213. if (!enc2) {
  1214. pr_err("error: malloc failed\n");
  1215. goto test_cfb_end;
  1216. }
  1217. dec2 = malloc(sizeof(p_vector));
  1218. if (!dec2) {
  1219. pr_err("error: malloc failed\n");
  1220. goto test_cfb_end;
  1221. }
  1222. memcpy(dec2, p_vector, sizeof(p_vector));
  1223. tfm = crypto_alloc_skcipher(WOLFKM_AESCFB_NAME, 0, 0);
  1224. if (IS_ERR(tfm)) {
  1225. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1226. WOLFKM_AESCFB_DRIVER, PTR_ERR(tfm));
  1227. goto test_cfb_end;
  1228. }
  1229. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1230. {
  1231. const char *driver_name =
  1232. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1233. if (strcmp(driver_name, WOLFKM_AESCFB_DRIVER)) {
  1234. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1235. WOLFKM_AESCFB_NAME, driver_name, WOLFKM_AESCFB_DRIVER);
  1236. ret = -ENOENT;
  1237. goto test_cfb_end;
  1238. }
  1239. }
  1240. #endif
  1241. ret = crypto_skcipher_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1242. if (ret) {
  1243. pr_err("error: crypto_skcipher_setkey returned: %d\n", ret);
  1244. goto test_cfb_end;
  1245. }
  1246. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1247. if (IS_ERR(req)) {
  1248. pr_err("error: allocating AES skcipher request %s failed\n",
  1249. WOLFKM_AESCFB_DRIVER);
  1250. goto test_cfb_end;
  1251. }
  1252. sg_init_one(&src, dec2, sizeof(p_vector));
  1253. sg_init_one(&dst, enc2, sizeof(p_vector));
  1254. XMEMCPY(iv_copy, iv, sizeof(iv));
  1255. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1256. ret = crypto_skcipher_encrypt(req);
  1257. if (ret) {
  1258. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1259. goto test_cfb_end;
  1260. }
  1261. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1262. if (ret) {
  1263. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1264. goto test_cfb_end;
  1265. }
  1266. memset(dec2, 0, sizeof(p_vector));
  1267. sg_init_one(&src, enc2, sizeof(p_vector));
  1268. sg_init_one(&dst, dec2, sizeof(p_vector));
  1269. XMEMCPY(iv_copy, iv, sizeof(iv));
  1270. skcipher_request_set_crypt(req, &src, &dst, sizeof(p_vector), iv_copy);
  1271. ret = crypto_skcipher_decrypt(req);
  1272. if (ret) {
  1273. pr_err("error: crypto_skcipher_decrypt returned: %d\n", ret);
  1274. goto test_cfb_end;
  1275. }
  1276. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1277. if (ret) {
  1278. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1279. goto test_cfb_end;
  1280. }
  1281. test_cfb_end:
  1282. if (enc2) { free(enc2); }
  1283. if (dec2) { free(dec2); }
  1284. if (req) { skcipher_request_free(req); }
  1285. if (tfm) { crypto_free_skcipher(tfm); }
  1286. if (aes_inited)
  1287. wc_AesFree(aes);
  1288. free(aes);
  1289. return ret;
  1290. }
  1291. #endif /* WOLFSSL_AES_CFB &&
  1292. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESCFB)
  1293. */
  1294. #if defined(HAVE_AESGCM) && \
  1295. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1296. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  1297. static int linuxkm_test_aesgcm(void)
  1298. {
  1299. int ret = 0;
  1300. struct crypto_aead * tfm = NULL;
  1301. struct aead_request * req = NULL;
  1302. struct scatterlist * src = NULL;
  1303. struct scatterlist * dst = NULL;
  1304. Aes *aes;
  1305. int aes_inited = 0;
  1306. static const byte key32[] =
  1307. {
  1308. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1309. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
  1310. 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
  1311. 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66
  1312. };
  1313. static const byte p_vector[] =
  1314. /* Now is the time for all w/o trailing 0 */
  1315. {
  1316. 0x4e,0x6f,0x77,0x20,0x69,0x73,0x20,0x74,
  1317. 0x68,0x65,0x20,0x74,0x69,0x6d,0x65,0x20,
  1318. 0x66,0x6f,0x72,0x20,0x61,0x6c,0x6c,0x20
  1319. };
  1320. static const byte assoc[] =
  1321. {
  1322. 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
  1323. 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
  1324. 0xab, 0xad, 0xda, 0xd2
  1325. };
  1326. static const byte ivstr[] = "1234567890abcdef";
  1327. static const byte c_vector[] =
  1328. {
  1329. 0x0c,0x97,0x05,0x3c,0xef,0x5c,0x63,0x6b,
  1330. 0x15,0xe4,0x00,0x63,0xf8,0x8c,0xd0,0x95,
  1331. 0x27,0x81,0x90,0x9c,0x9f,0xe6,0x98,0xe9
  1332. };
  1333. static const byte KAT_authTag[] =
  1334. {
  1335. 0xc9,0xd5,0x7a,0x77,0xac,0x28,0xc2,0xe7,
  1336. 0xe4,0x28,0x90,0xaa,0x09,0xab,0xf9,0x7c
  1337. };
  1338. byte enc[sizeof(p_vector)];
  1339. byte authTag[AES_BLOCK_SIZE];
  1340. byte dec[sizeof(p_vector)];
  1341. u8 * assoc2 = NULL;
  1342. u8 * enc2 = NULL;
  1343. u8 * dec2 = NULL;
  1344. u8 * iv = NULL;
  1345. size_t encryptLen = sizeof(p_vector);
  1346. size_t decryptLen = sizeof(p_vector) + sizeof(authTag);
  1347. /* Init stack variables. */
  1348. XMEMSET(enc, 0, sizeof(p_vector));
  1349. XMEMSET(dec, 0, sizeof(p_vector));
  1350. XMEMSET(authTag, 0, AES_BLOCK_SIZE);
  1351. aes = (Aes *)malloc(sizeof(*aes));
  1352. if (aes == NULL)
  1353. return -ENOMEM;
  1354. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  1355. if (ret) {
  1356. pr_err("error: wc_AesInit failed with return code %d.\n", ret);
  1357. goto test_gcm_end;
  1358. }
  1359. aes_inited = 1;
  1360. ret = wc_AesGcmInit(aes, key32, sizeof(key32)/sizeof(byte), ivstr,
  1361. AES_BLOCK_SIZE);
  1362. if (ret) {
  1363. pr_err("error: wc_AesGcmInit failed with return code %d.\n", ret);
  1364. goto test_gcm_end;
  1365. }
  1366. ret = wc_AesGcmEncryptUpdate(aes, NULL, NULL, 0, assoc, sizeof(assoc));
  1367. if (ret) {
  1368. pr_err("error: wc_AesGcmEncryptUpdate failed with return code %d\n",
  1369. ret);
  1370. goto test_gcm_end;
  1371. }
  1372. ret = wc_AesGcmEncryptUpdate(aes, enc, p_vector, sizeof(p_vector), NULL, 0);
  1373. if (ret) {
  1374. pr_err("error: wc_AesGcmEncryptUpdate failed with return code %d\n",
  1375. ret);
  1376. goto test_gcm_end;
  1377. }
  1378. if (XMEMCMP(enc, c_vector, sizeof(c_vector)) != 0) {
  1379. pr_err("wolfcrypt AES-GCM KAT mismatch on ciphertext\n");
  1380. ret = LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E;
  1381. goto test_gcm_end;
  1382. }
  1383. ret = wc_AesGcmEncryptFinal(aes, authTag, AES_BLOCK_SIZE);
  1384. if (ret) {
  1385. pr_err("error: wc_AesGcmEncryptFinal failed with return code %d\n",
  1386. ret);
  1387. goto test_gcm_end;
  1388. }
  1389. if (XMEMCMP(authTag, KAT_authTag, sizeof(KAT_authTag)) != 0) {
  1390. pr_err("wolfcrypt AES-GCM KAT mismatch on authTag\n");
  1391. ret = LINUXKM_LKCAPI_AESGCM_KAT_MISMATCH_E;
  1392. goto test_gcm_end;
  1393. }
  1394. ret = wc_AesGcmInit(aes, key32, sizeof(key32)/sizeof(byte), ivstr,
  1395. AES_BLOCK_SIZE);
  1396. if (ret) {
  1397. pr_err("error: wc_AesGcmInit failed with return code %d.\n", ret);
  1398. goto test_gcm_end;
  1399. }
  1400. ret = wc_AesGcmDecryptUpdate(aes, dec, enc, sizeof(p_vector),
  1401. assoc, sizeof(assoc));
  1402. if (ret) {
  1403. pr_err("error: wc_AesGcmDecryptUpdate failed with return code %d\n",
  1404. ret);
  1405. goto test_gcm_end;
  1406. }
  1407. ret = wc_AesGcmDecryptFinal(aes, authTag, AES_BLOCK_SIZE);
  1408. if (ret) {
  1409. pr_err("error: wc_AesGcmEncryptFinal failed with return code %d\n",
  1410. ret);
  1411. goto test_gcm_end;
  1412. }
  1413. ret = XMEMCMP(p_vector, dec, sizeof(p_vector));
  1414. if (ret) {
  1415. pr_err("error: gcm: p_vector and dec do not match: %d\n", ret);
  1416. goto test_gcm_end;
  1417. }
  1418. /* now the kernel crypto part */
  1419. assoc2 = malloc(sizeof(assoc));
  1420. if (IS_ERR(assoc2)) {
  1421. pr_err("error: malloc failed\n");
  1422. goto test_gcm_end;
  1423. }
  1424. memset(assoc2, 0, sizeof(assoc));
  1425. memcpy(assoc2, assoc, sizeof(assoc));
  1426. iv = malloc(AES_BLOCK_SIZE);
  1427. if (IS_ERR(iv)) {
  1428. pr_err("error: malloc failed\n");
  1429. goto test_gcm_end;
  1430. }
  1431. memset(iv, 0, AES_BLOCK_SIZE);
  1432. memcpy(iv, ivstr, AES_BLOCK_SIZE);
  1433. enc2 = malloc(decryptLen);
  1434. if (IS_ERR(enc2)) {
  1435. pr_err("error: malloc failed\n");
  1436. goto test_gcm_end;
  1437. }
  1438. dec2 = malloc(decryptLen);
  1439. if (IS_ERR(dec2)) {
  1440. pr_err("error: malloc failed\n");
  1441. goto test_gcm_end;
  1442. }
  1443. memset(enc2, 0, decryptLen);
  1444. memset(dec2, 0, decryptLen);
  1445. memcpy(dec2, p_vector, sizeof(p_vector));
  1446. tfm = crypto_alloc_aead(WOLFKM_AESGCM_NAME, 0, 0);
  1447. if (IS_ERR(tfm)) {
  1448. pr_err("error: allocating AES skcipher algorithm %s failed: %ld\n",
  1449. WOLFKM_AESGCM_DRIVER, PTR_ERR(tfm));
  1450. goto test_gcm_end;
  1451. }
  1452. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1453. {
  1454. const char *driver_name = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
  1455. if (strcmp(driver_name, WOLFKM_AESGCM_DRIVER)) {
  1456. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1457. WOLFKM_AESGCM_NAME, driver_name, WOLFKM_AESGCM_DRIVER);
  1458. ret = -ENOENT;
  1459. goto test_gcm_end;
  1460. }
  1461. }
  1462. #endif
  1463. ret = crypto_aead_setkey(tfm, key32, AES_BLOCK_SIZE * 2);
  1464. if (ret) {
  1465. pr_err("error: crypto_aead_setkey returned: %d\n", ret);
  1466. goto test_gcm_end;
  1467. }
  1468. ret = crypto_aead_setauthsize(tfm, sizeof(authTag));
  1469. if (ret) {
  1470. pr_err("error: crypto_aead_setauthsize returned: %d\n", ret);
  1471. goto test_gcm_end;
  1472. }
  1473. req = aead_request_alloc(tfm, GFP_KERNEL);
  1474. if (IS_ERR(req)) {
  1475. pr_err("error: allocating AES aead request %s failed: %ld\n",
  1476. WOLFKM_AESCBC_DRIVER, PTR_ERR(req));
  1477. goto test_gcm_end;
  1478. }
  1479. src = malloc(sizeof(struct scatterlist) * 2);
  1480. dst = malloc(sizeof(struct scatterlist) * 2);
  1481. if (IS_ERR(src) || IS_ERR(dst)) {
  1482. pr_err("error: malloc src or dst failed: %ld, %ld\n",
  1483. PTR_ERR(src), PTR_ERR(dst));
  1484. goto test_gcm_end;
  1485. }
  1486. sg_init_table(src, 2);
  1487. sg_set_buf(src, assoc2, sizeof(assoc));
  1488. sg_set_buf(&src[1], dec2, sizeof(p_vector));
  1489. sg_init_table(dst, 2);
  1490. sg_set_buf(dst, assoc2, sizeof(assoc));
  1491. sg_set_buf(&dst[1], enc2, decryptLen);
  1492. aead_request_set_callback(req, 0, NULL, NULL);
  1493. aead_request_set_ad(req, sizeof(assoc));
  1494. aead_request_set_crypt(req, src, dst, sizeof(p_vector), iv);
  1495. ret = crypto_aead_encrypt(req);
  1496. if (ret) {
  1497. pr_err("error: crypto_aead_encrypt returned: %d\n", ret);
  1498. goto test_gcm_end;
  1499. }
  1500. ret = XMEMCMP(enc, enc2, sizeof(p_vector));
  1501. if (ret) {
  1502. pr_err("error: enc and enc2 do not match: %d\n", ret);
  1503. goto test_gcm_end;
  1504. }
  1505. ret = XMEMCMP(authTag, enc2 + encryptLen, sizeof(authTag));
  1506. if (ret) {
  1507. pr_err("error: authTags do not match: %d\n", ret);
  1508. goto test_gcm_end;
  1509. }
  1510. /* Now decrypt crypto request. Reverse src and dst. */
  1511. memset(dec2, 0, decryptLen);
  1512. aead_request_set_ad(req, sizeof(assoc));
  1513. aead_request_set_crypt(req, dst, src, decryptLen, iv);
  1514. ret = crypto_aead_decrypt(req);
  1515. if (ret) {
  1516. pr_err("error: crypto_aead_decrypt returned: %d\n", ret);
  1517. goto test_gcm_end;
  1518. }
  1519. ret = XMEMCMP(dec, dec2, sizeof(p_vector));
  1520. if (ret) {
  1521. pr_err("error: dec and dec2 do not match: %d\n", ret);
  1522. goto test_gcm_end;
  1523. }
  1524. test_gcm_end:
  1525. if (req) { aead_request_free(req); req = NULL; }
  1526. if (tfm) { crypto_free_aead(tfm); tfm = NULL; }
  1527. if (src) { free(src); src = NULL; }
  1528. if (dst) { free(dst); dst = NULL; }
  1529. if (dec2) { free(dec2); dec2 = NULL; }
  1530. if (enc2) { free(enc2); enc2 = NULL; }
  1531. if (assoc2) { free(assoc2); assoc2 = NULL; }
  1532. if (iv) { free(iv); iv = NULL; }
  1533. if (aes_inited)
  1534. wc_AesFree(aes);
  1535. free(aes);
  1536. return ret;
  1537. }
  1538. #endif /* HAVE_AESGCM &&
  1539. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESGCM) &&
  1540. */
  1541. #if defined(WOLFSSL_AES_XTS) && \
  1542. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  1543. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  1544. /* test vectors from
  1545. * http://csrc.nist.gov/groups/STM/cavp/block-cipher-modes.html
  1546. */
  1547. #ifdef WOLFSSL_AES_128
  1548. static int aes_xts_128_test(void)
  1549. {
  1550. XtsAes *aes = NULL;
  1551. int aes_inited = 0;
  1552. int ret = 0;
  1553. #define AES_XTS_128_TEST_BUF_SIZ (AES_BLOCK_SIZE * 2 + 8)
  1554. unsigned char *buf = NULL;
  1555. unsigned char *cipher = NULL;
  1556. u8 * enc2 = NULL;
  1557. u8 * dec2 = NULL;
  1558. struct scatterlist * src = NULL;
  1559. struct scatterlist * dst = NULL;
  1560. struct crypto_skcipher *tfm = NULL;
  1561. struct skcipher_request *req = NULL;
  1562. struct XtsAesStreamData stream;
  1563. byte* large_input = NULL;
  1564. /* 128 key tests */
  1565. static const unsigned char k1[] = {
  1566. 0xa1, 0xb9, 0x0c, 0xba, 0x3f, 0x06, 0xac, 0x35,
  1567. 0x3b, 0x2c, 0x34, 0x38, 0x76, 0x08, 0x17, 0x62,
  1568. 0x09, 0x09, 0x23, 0x02, 0x6e, 0x91, 0x77, 0x18,
  1569. 0x15, 0xf2, 0x9d, 0xab, 0x01, 0x93, 0x2f, 0x2f
  1570. };
  1571. static const unsigned char i1[] = {
  1572. 0x4f, 0xae, 0xf7, 0x11, 0x7c, 0xda, 0x59, 0xc6,
  1573. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1574. };
  1575. static const unsigned char p1[] = {
  1576. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1577. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c
  1578. };
  1579. /* plain text test of partial block is not from NIST test vector list */
  1580. static const unsigned char pp[] = {
  1581. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  1582. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c,
  1583. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  1584. };
  1585. static const unsigned char c1[] = {
  1586. 0x77, 0x8a, 0xe8, 0xb4, 0x3c, 0xb9, 0x8d, 0x5a,
  1587. 0x82, 0x50, 0x81, 0xd5, 0xbe, 0x47, 0x1c, 0x63
  1588. };
  1589. /* plain text test of partial block is not from NIST test vector list */
  1590. static const unsigned char cp[] = {
  1591. 0x2b, 0xf7, 0x2c, 0xf3, 0xeb, 0x85, 0xef, 0x7b,
  1592. 0x0b, 0x76, 0xa0, 0xaa, 0xf3, 0x3f, 0x25, 0x8b,
  1593. 0x77, 0x8a, 0xe8, 0xb4, 0x3c, 0xb9, 0x8d, 0x5a
  1594. };
  1595. static const unsigned char k2[] = {
  1596. 0x39, 0x25, 0x79, 0x05, 0xdf, 0xcc, 0x77, 0x76,
  1597. 0x6c, 0x87, 0x0a, 0x80, 0x6a, 0x60, 0xe3, 0xc0,
  1598. 0x93, 0xd1, 0x2a, 0xcf, 0xcb, 0x51, 0x42, 0xfa,
  1599. 0x09, 0x69, 0x89, 0x62, 0x5b, 0x60, 0xdb, 0x16
  1600. };
  1601. static const unsigned char i2[] = {
  1602. 0x5c, 0xf7, 0x9d, 0xb6, 0xc5, 0xcd, 0x99, 0x1a,
  1603. 0x1c, 0x78, 0x81, 0x42, 0x24, 0x95, 0x1e, 0x84
  1604. };
  1605. static const unsigned char p2[] = {
  1606. 0xbd, 0xc5, 0x46, 0x8f, 0xbc, 0x8d, 0x50, 0xa1,
  1607. 0x0d, 0x1c, 0x85, 0x7f, 0x79, 0x1c, 0x5c, 0xba,
  1608. 0xb3, 0x81, 0x0d, 0x0d, 0x73, 0xcf, 0x8f, 0x20,
  1609. 0x46, 0xb1, 0xd1, 0x9e, 0x7d, 0x5d, 0x8a, 0x56
  1610. };
  1611. static const unsigned char c2[] = {
  1612. 0xd6, 0xbe, 0x04, 0x6d, 0x41, 0xf2, 0x3b, 0x5e,
  1613. 0xd7, 0x0b, 0x6b, 0x3d, 0x5c, 0x8e, 0x66, 0x23,
  1614. 0x2b, 0xe6, 0xb8, 0x07, 0xd4, 0xdc, 0xc6, 0x0e,
  1615. 0xff, 0x8d, 0xbc, 0x1d, 0x9f, 0x7f, 0xc8, 0x22
  1616. };
  1617. #ifndef HAVE_FIPS /* FIPS requires different keys for main and tweak. */
  1618. static const unsigned char k3[] = {
  1619. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1620. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1621. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1622. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1623. };
  1624. static const unsigned char i3[] = {
  1625. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1626. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1627. };
  1628. static const unsigned char p3[] = {
  1629. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1630. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1631. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1632. 0x20, 0xff, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
  1633. 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20
  1634. };
  1635. static const unsigned char c3[] = {
  1636. 0xA2, 0x07, 0x47, 0x76, 0x3F, 0xEC, 0x0C, 0x23,
  1637. 0x1B, 0xD0, 0xBD, 0x46, 0x9A, 0x27, 0x38, 0x12,
  1638. 0x95, 0x02, 0x3D, 0x5D, 0xC6, 0x94, 0x51, 0x36,
  1639. 0xA0, 0x85, 0xD2, 0x69, 0x6E, 0x87, 0x0A, 0xBF,
  1640. 0xB5, 0x5A, 0xDD, 0xCB, 0x80, 0xE0, 0xFC, 0xCD
  1641. };
  1642. #endif /* HAVE_FIPS */
  1643. if ((aes = (XtsAes *)XMALLOC(sizeof(*aes), NULL, DYNAMIC_TYPE_AES))
  1644. == NULL)
  1645. {
  1646. ret = MEMORY_E;
  1647. goto out;
  1648. }
  1649. if ((buf = (unsigned char *)XMALLOC(AES_XTS_128_TEST_BUF_SIZ, NULL,
  1650. DYNAMIC_TYPE_AES)) == NULL)
  1651. {
  1652. ret = MEMORY_E;
  1653. goto out;
  1654. }
  1655. if ((cipher = (unsigned char *)XMALLOC(AES_XTS_128_TEST_BUF_SIZ, NULL,
  1656. DYNAMIC_TYPE_AES)) == NULL)
  1657. {
  1658. ret = MEMORY_E;
  1659. goto out;
  1660. }
  1661. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1662. ret = wc_AesXtsInit(aes, NULL, INVALID_DEVID);
  1663. if (ret != 0)
  1664. goto out;
  1665. else
  1666. aes_inited = 1;
  1667. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_ENCRYPTION);
  1668. if (ret != 0)
  1669. goto out;
  1670. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1671. if (ret != 0)
  1672. goto out;
  1673. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1674. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1675. goto out;
  1676. }
  1677. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1678. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(WC_NO_ERR_TRACE(SYSLIB_FAILED_E));
  1679. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  1680. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1681. if (ret != 0)
  1682. goto out;
  1683. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1684. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1685. goto out;
  1686. }
  1687. #endif
  1688. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1689. ret = wc_AesXtsEncryptInit(aes, i2, sizeof(i2), &stream);
  1690. if (ret != 0)
  1691. goto out;
  1692. ret = wc_AesXtsEncryptUpdate(aes, buf, p2, AES_BLOCK_SIZE, &stream);
  1693. if (ret != 0)
  1694. goto out;
  1695. ret = wc_AesXtsEncryptFinal(aes, buf + AES_BLOCK_SIZE,
  1696. p2 + AES_BLOCK_SIZE,
  1697. sizeof(p2) - AES_BLOCK_SIZE, &stream);
  1698. if (ret != 0)
  1699. goto out;
  1700. if (XMEMCMP(c2, buf, sizeof(c2))) {
  1701. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1702. goto out;
  1703. }
  1704. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1705. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1706. if (ret != 0)
  1707. goto out;
  1708. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1709. if (ret != 0)
  1710. goto out;
  1711. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1712. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1713. goto out;
  1714. }
  1715. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1716. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(WC_NO_ERR_TRACE(SYSLIB_FAILED_E));
  1717. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  1718. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1719. if (ret != 0)
  1720. goto out;
  1721. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  1722. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1723. goto out;
  1724. }
  1725. #endif
  1726. /* partial block encryption test */
  1727. XMEMSET(cipher, 0, AES_XTS_128_TEST_BUF_SIZ);
  1728. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1729. if (ret != 0)
  1730. goto out;
  1731. if (XMEMCMP(cp, cipher, sizeof(cp))) {
  1732. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1733. goto out;
  1734. }
  1735. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1736. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(WC_NO_ERR_TRACE(SYSLIB_FAILED_E));
  1737. XMEMSET(cipher, 0, AES_XTS_128_TEST_BUF_SIZ);
  1738. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  1739. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1740. if (ret != 0)
  1741. goto out;
  1742. if (XMEMCMP(cp, cipher, sizeof(cp))) {
  1743. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1744. goto out;
  1745. }
  1746. #endif
  1747. /* partial block decrypt test */
  1748. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1749. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1750. if (ret != 0)
  1751. goto out;
  1752. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1753. if (ret != 0)
  1754. goto out;
  1755. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1756. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1757. goto out;
  1758. }
  1759. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1760. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(WC_NO_ERR_TRACE(SYSLIB_FAILED_E));
  1761. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1762. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  1763. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1764. if (ret != 0)
  1765. goto out;
  1766. if (XMEMCMP(pp, buf, sizeof(pp))) {
  1767. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1768. goto out;
  1769. }
  1770. #endif
  1771. /* NIST decrypt test vector */
  1772. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1773. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1774. if (ret != 0)
  1775. goto out;
  1776. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1777. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1778. goto out;
  1779. }
  1780. #if defined(DEBUG_VECTOR_REGISTER_ACCESS) && defined(WC_C_DYNAMIC_FALLBACK)
  1781. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(WC_NO_ERR_TRACE(SYSLIB_FAILED_E));
  1782. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1783. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  1784. WC_DEBUG_SET_VECTOR_REGISTERS_RETVAL(0);
  1785. if (ret != 0)
  1786. goto out;
  1787. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  1788. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1789. goto out;
  1790. }
  1791. #endif
  1792. /* fail case with decrypting using wrong key */
  1793. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1794. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  1795. if (ret != 0)
  1796. goto out;
  1797. if (XMEMCMP(p2, buf, sizeof(p2)) == 0) { /* fail case with wrong key */
  1798. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1799. goto out;
  1800. }
  1801. /* set correct key and retest */
  1802. XMEMSET(buf, 0, AES_XTS_128_TEST_BUF_SIZ);
  1803. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_DECRYPTION);
  1804. if (ret != 0)
  1805. goto out;
  1806. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  1807. if (ret != 0)
  1808. goto out;
  1809. if (XMEMCMP(p2, buf, sizeof(p2))) {
  1810. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1811. goto out;
  1812. }
  1813. #ifndef HAVE_FIPS
  1814. /* Test ciphertext stealing in-place. */
  1815. XMEMCPY(buf, p3, sizeof(p3));
  1816. ret = wc_AesXtsSetKeyNoInit(aes, k3, sizeof(k3), AES_ENCRYPTION);
  1817. if (ret != 0)
  1818. goto out;
  1819. ret = wc_AesXtsEncrypt(aes, buf, buf, sizeof(p3), i3, sizeof(i3));
  1820. if (ret != 0)
  1821. goto out;
  1822. if (XMEMCMP(c3, buf, sizeof(c3))) {
  1823. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1824. goto out;
  1825. }
  1826. ret = wc_AesXtsSetKeyNoInit(aes, k3, sizeof(k3), AES_DECRYPTION);
  1827. if (ret != 0)
  1828. goto out;
  1829. ret = wc_AesXtsDecrypt(aes, buf, buf, sizeof(c3), i3, sizeof(i3));
  1830. if (ret != 0)
  1831. goto out;
  1832. if (XMEMCMP(p3, buf, sizeof(p3))) {
  1833. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1834. goto out;
  1835. }
  1836. #endif /* HAVE_FIPS */
  1837. {
  1838. #define LARGE_XTS_SZ 1024
  1839. int i;
  1840. int j;
  1841. int k;
  1842. large_input = (byte *)XMALLOC(LARGE_XTS_SZ, NULL,
  1843. DYNAMIC_TYPE_TMP_BUFFER);
  1844. if (large_input == NULL) {
  1845. ret = MEMORY_E;
  1846. goto out;
  1847. }
  1848. for (i = 0; i < (int)LARGE_XTS_SZ; i++)
  1849. large_input[i] = (byte)i;
  1850. /* first, encrypt block by block then decrypt with a one-shot call. */
  1851. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  1852. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1853. if (ret != 0)
  1854. goto out;
  1855. ret = wc_AesXtsEncryptInit(aes, i1, sizeof(i1), &stream);
  1856. if (ret != 0)
  1857. goto out;
  1858. for (k = 0; k < j; k += AES_BLOCK_SIZE) {
  1859. if ((j - k) < AES_BLOCK_SIZE*2)
  1860. ret = wc_AesXtsEncryptFinal(aes, large_input + k, large_input + k, j - k, &stream);
  1861. else
  1862. ret = wc_AesXtsEncryptUpdate(aes, large_input + k, large_input + k, AES_BLOCK_SIZE, &stream);
  1863. if (ret != 0)
  1864. goto out;
  1865. if ((j - k) < AES_BLOCK_SIZE*2)
  1866. break;
  1867. }
  1868. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1869. if (ret != 0)
  1870. goto out;
  1871. ret = wc_AesXtsDecrypt(aes, large_input, large_input, j, i1,
  1872. sizeof(i1));
  1873. if (ret != 0)
  1874. goto out;
  1875. for (i = 0; i < j; i++) {
  1876. if (large_input[i] != (byte)i) {
  1877. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1878. goto out;
  1879. }
  1880. }
  1881. }
  1882. /* second, encrypt with a one-shot call then decrypt block by block. */
  1883. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  1884. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  1885. if (ret != 0)
  1886. goto out;
  1887. ret = wc_AesXtsEncrypt(aes, large_input, large_input, j, i1,
  1888. sizeof(i1));
  1889. if (ret != 0)
  1890. goto out;
  1891. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  1892. if (ret != 0)
  1893. goto out;
  1894. ret = wc_AesXtsDecryptInit(aes, i1, sizeof(i1), &stream);
  1895. if (ret != 0)
  1896. goto out;
  1897. for (k = 0; k < j; k += AES_BLOCK_SIZE) {
  1898. if ((j - k) < AES_BLOCK_SIZE*2)
  1899. ret = wc_AesXtsDecryptFinal(aes, large_input + k, large_input + k, j - k, &stream);
  1900. else
  1901. ret = wc_AesXtsDecryptUpdate(aes, large_input + k, large_input + k, AES_BLOCK_SIZE, &stream);
  1902. if (ret != 0)
  1903. goto out;
  1904. if ((j - k) < AES_BLOCK_SIZE*2)
  1905. break;
  1906. }
  1907. for (i = 0; i < j; i++) {
  1908. if (large_input[i] != (byte)i) {
  1909. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  1910. goto out;
  1911. }
  1912. }
  1913. }
  1914. }
  1915. /* now the kernel crypto part */
  1916. enc2 = XMALLOC(sizeof(pp), NULL, DYNAMIC_TYPE_AES);
  1917. if (!enc2) {
  1918. pr_err("error: malloc failed\n");
  1919. ret = -ENOMEM;
  1920. goto test_xts_end;
  1921. }
  1922. dec2 = XMALLOC(sizeof(pp), NULL, DYNAMIC_TYPE_AES);
  1923. if (!dec2) {
  1924. pr_err("error: malloc failed\n");
  1925. ret = -ENOMEM;
  1926. goto test_xts_end;
  1927. }
  1928. src = XMALLOC(sizeof(*src) * 2, NULL, DYNAMIC_TYPE_AES);
  1929. if (! src) {
  1930. pr_err("error: malloc failed\n");
  1931. ret = -ENOMEM;
  1932. goto test_xts_end;
  1933. }
  1934. dst = XMALLOC(sizeof(*dst) * 2, NULL, DYNAMIC_TYPE_AES);
  1935. if (! dst) {
  1936. pr_err("error: malloc failed\n");
  1937. ret = -ENOMEM;
  1938. goto test_xts_end;
  1939. }
  1940. tfm = crypto_alloc_skcipher(WOLFKM_AESXTS_NAME, 0, 0);
  1941. if (IS_ERR(tfm)) {
  1942. ret = PTR_ERR(tfm);
  1943. pr_err("error: allocating AES skcipher algorithm %s failed: %d\n",
  1944. WOLFKM_AESXTS_DRIVER, ret);
  1945. goto test_xts_end;
  1946. }
  1947. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  1948. {
  1949. const char *driver_name =
  1950. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  1951. if (strcmp(driver_name, WOLFKM_AESXTS_DRIVER)) {
  1952. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  1953. WOLFKM_AESXTS_NAME, driver_name, WOLFKM_AESXTS_DRIVER);
  1954. ret = -ENOENT;
  1955. goto test_xts_end;
  1956. }
  1957. }
  1958. #endif
  1959. ret = crypto_skcipher_ivsize(tfm);
  1960. if (ret != sizeof(stream.tweak_block)) {
  1961. pr_err("error: AES skcipher algorithm %s crypto_skcipher_ivsize()"
  1962. " returned %d but expected %d\n",
  1963. WOLFKM_AESXTS_DRIVER, ret, (int)sizeof(stream.tweak_block));
  1964. ret = -EINVAL;
  1965. goto test_xts_end;
  1966. }
  1967. ret = crypto_skcipher_setkey(tfm, k1, sizeof(k1));
  1968. if (ret) {
  1969. pr_err("error: crypto_skcipher_setkey for %s returned: %d\n",
  1970. WOLFKM_AESXTS_NAME, ret);
  1971. goto test_xts_end;
  1972. }
  1973. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  1974. if (IS_ERR(req)) {
  1975. ret = PTR_ERR(req);
  1976. pr_err("error: allocating AES skcipher request %s failed: %d\n",
  1977. WOLFKM_AESXTS_DRIVER, ret);
  1978. goto test_xts_end;
  1979. }
  1980. memcpy(dec2, p1, sizeof(p1));
  1981. memset(enc2, 0, sizeof(p1));
  1982. sg_init_one(src, dec2, sizeof(p1));
  1983. sg_init_one(dst, enc2, sizeof(p1));
  1984. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  1985. skcipher_request_set_crypt(req, src, dst, sizeof(p1), stream.tweak_block);
  1986. ret = crypto_skcipher_encrypt(req);
  1987. if (ret) {
  1988. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  1989. goto test_xts_end;
  1990. }
  1991. ret = XMEMCMP(c1, enc2, sizeof(c1));
  1992. if (ret) {
  1993. pr_err("error: c1 and enc2 do not match: %d\n", ret);
  1994. ret = -EINVAL;
  1995. goto test_xts_end;
  1996. }
  1997. memset(dec2, 0, sizeof(p1));
  1998. sg_init_one(src, enc2, sizeof(p1));
  1999. sg_init_one(dst, dec2, sizeof(p1));
  2000. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2001. skcipher_request_set_crypt(req, src, dst, sizeof(p1), stream.tweak_block);
  2002. ret = crypto_skcipher_decrypt(req);
  2003. if (ret) {
  2004. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2005. goto test_xts_end;
  2006. }
  2007. ret = XMEMCMP(p1, dec2, sizeof(p1));
  2008. if (ret) {
  2009. pr_err("error: p1 and dec2 do not match: %d\n", ret);
  2010. ret = -EINVAL;
  2011. goto test_xts_end;
  2012. }
  2013. memcpy(dec2, pp, sizeof(pp));
  2014. memset(enc2, 0, sizeof(pp));
  2015. sg_init_one(src, dec2, sizeof(pp));
  2016. sg_init_one(dst, enc2, sizeof(pp));
  2017. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2018. skcipher_request_set_crypt(req, src, dst, sizeof(pp), stream.tweak_block);
  2019. ret = crypto_skcipher_encrypt(req);
  2020. if (ret) {
  2021. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2022. goto test_xts_end;
  2023. }
  2024. ret = XMEMCMP(cp, enc2, sizeof(cp));
  2025. if (ret) {
  2026. pr_err("error: cp and enc2 do not match: %d\n", ret);
  2027. ret = -EINVAL;
  2028. goto test_xts_end;
  2029. }
  2030. memset(dec2, 0, sizeof(pp));
  2031. sg_init_one(src, enc2, sizeof(pp));
  2032. sg_init_one(dst, dec2, sizeof(pp));
  2033. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2034. skcipher_request_set_crypt(req, src, dst, sizeof(pp), stream.tweak_block);
  2035. ret = crypto_skcipher_decrypt(req);
  2036. if (ret) {
  2037. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2038. goto test_xts_end;
  2039. }
  2040. ret = XMEMCMP(pp, dec2, sizeof(pp));
  2041. if (ret) {
  2042. pr_err("error: pp and dec2 do not match: %d\n", ret);
  2043. ret = -EINVAL;
  2044. goto test_xts_end;
  2045. }
  2046. test_xts_end:
  2047. XFREE(enc2, NULL, DYNAMIC_TYPE_AES);
  2048. XFREE(dec2, NULL, DYNAMIC_TYPE_AES);
  2049. XFREE(src, NULL, DYNAMIC_TYPE_AES);
  2050. XFREE(dst, NULL, DYNAMIC_TYPE_AES);
  2051. if (req)
  2052. skcipher_request_free(req);
  2053. if (tfm)
  2054. crypto_free_skcipher(tfm);
  2055. out:
  2056. XFREE(large_input, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  2057. if (aes_inited)
  2058. wc_AesXtsFree(aes);
  2059. XFREE(buf, NULL, DYNAMIC_TYPE_AES);
  2060. XFREE(cipher, NULL, DYNAMIC_TYPE_AES);
  2061. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  2062. #undef AES_XTS_128_TEST_BUF_SIZ
  2063. return ret;
  2064. }
  2065. #endif /* WOLFSSL_AES_128 */
  2066. #ifdef WOLFSSL_AES_256
  2067. static int aes_xts_256_test(void)
  2068. {
  2069. XtsAes *aes = NULL;
  2070. int aes_inited = 0;
  2071. int ret = 0;
  2072. #define AES_XTS_256_TEST_BUF_SIZ (AES_BLOCK_SIZE * 3)
  2073. unsigned char *buf = NULL;
  2074. unsigned char *cipher = NULL;
  2075. u8 * enc2 = NULL;
  2076. u8 * dec2 = NULL;
  2077. struct scatterlist * src = NULL;
  2078. struct scatterlist * dst = NULL;
  2079. struct crypto_skcipher *tfm = NULL;
  2080. struct skcipher_request *req = NULL;
  2081. struct XtsAesStreamData stream;
  2082. byte* large_input = NULL;
  2083. /* 256 key tests */
  2084. static const unsigned char k1[] = {
  2085. 0x1e, 0xa6, 0x61, 0xc5, 0x8d, 0x94, 0x3a, 0x0e,
  2086. 0x48, 0x01, 0xe4, 0x2f, 0x4b, 0x09, 0x47, 0x14,
  2087. 0x9e, 0x7f, 0x9f, 0x8e, 0x3e, 0x68, 0xd0, 0xc7,
  2088. 0x50, 0x52, 0x10, 0xbd, 0x31, 0x1a, 0x0e, 0x7c,
  2089. 0xd6, 0xe1, 0x3f, 0xfd, 0xf2, 0x41, 0x8d, 0x8d,
  2090. 0x19, 0x11, 0xc0, 0x04, 0xcd, 0xa5, 0x8d, 0xa3,
  2091. 0xd6, 0x19, 0xb7, 0xe2, 0xb9, 0x14, 0x1e, 0x58,
  2092. 0x31, 0x8e, 0xea, 0x39, 0x2c, 0xf4, 0x1b, 0x08
  2093. };
  2094. static const unsigned char i1[] = {
  2095. 0xad, 0xf8, 0xd9, 0x26, 0x27, 0x46, 0x4a, 0xd2,
  2096. 0xf0, 0x42, 0x8e, 0x84, 0xa9, 0xf8, 0x75, 0x64
  2097. };
  2098. static const unsigned char p1[] = {
  2099. 0x2e, 0xed, 0xea, 0x52, 0xcd, 0x82, 0x15, 0xe1,
  2100. 0xac, 0xc6, 0x47, 0xe8, 0x10, 0xbb, 0xc3, 0x64,
  2101. 0x2e, 0x87, 0x28, 0x7f, 0x8d, 0x2e, 0x57, 0xe3,
  2102. 0x6c, 0x0a, 0x24, 0xfb, 0xc1, 0x2a, 0x20, 0x2e
  2103. };
  2104. static const unsigned char c1[] = {
  2105. 0xcb, 0xaa, 0xd0, 0xe2, 0xf6, 0xce, 0xa3, 0xf5,
  2106. 0x0b, 0x37, 0xf9, 0x34, 0xd4, 0x6a, 0x9b, 0x13,
  2107. 0x0b, 0x9d, 0x54, 0xf0, 0x7e, 0x34, 0xf3, 0x6a,
  2108. 0xf7, 0x93, 0xe8, 0x6f, 0x73, 0xc6, 0xd7, 0xdb
  2109. };
  2110. /* plain text test of partial block is not from NIST test vector list */
  2111. static const unsigned char pp[] = {
  2112. 0xeb, 0xab, 0xce, 0x95, 0xb1, 0x4d, 0x3c, 0x8d,
  2113. 0x6f, 0xb3, 0x50, 0x39, 0x07, 0x90, 0x31, 0x1c,
  2114. 0x6e, 0x4b, 0x92, 0x01, 0x3e, 0x76, 0x8a, 0xd5
  2115. };
  2116. static const unsigned char cp[] = {
  2117. 0x65, 0x5e, 0x1d, 0x37, 0x4a, 0x91, 0xe7, 0x6c,
  2118. 0x4f, 0x83, 0x92, 0xbc, 0x5a, 0x10, 0x55, 0x27,
  2119. 0x61, 0x0e, 0x5a, 0xde, 0xca, 0xc5, 0x12, 0xd8
  2120. };
  2121. static const unsigned char k2[] = {
  2122. 0xad, 0x50, 0x4b, 0x85, 0xd7, 0x51, 0xbf, 0xba,
  2123. 0x69, 0x13, 0xb4, 0xcc, 0x79, 0xb6, 0x5a, 0x62,
  2124. 0xf7, 0xf3, 0x9d, 0x36, 0x0f, 0x35, 0xb5, 0xec,
  2125. 0x4a, 0x7e, 0x95, 0xbd, 0x9b, 0xa5, 0xf2, 0xec,
  2126. 0xc1, 0xd7, 0x7e, 0xa3, 0xc3, 0x74, 0xbd, 0x4b,
  2127. 0x13, 0x1b, 0x07, 0x83, 0x87, 0xdd, 0x55, 0x5a,
  2128. 0xb5, 0xb0, 0xc7, 0xe5, 0x2d, 0xb5, 0x06, 0x12,
  2129. 0xd2, 0xb5, 0x3a, 0xcb, 0x47, 0x8a, 0x53, 0xb4
  2130. };
  2131. static const unsigned char i2[] = {
  2132. 0xe6, 0x42, 0x19, 0xed, 0xe0, 0xe1, 0xc2, 0xa0,
  2133. 0x0e, 0xf5, 0x58, 0x6a, 0xc4, 0x9b, 0xeb, 0x6f
  2134. };
  2135. static const unsigned char p2[] = {
  2136. 0x24, 0xcb, 0x76, 0x22, 0x55, 0xb5, 0xa8, 0x00,
  2137. 0xf4, 0x6e, 0x80, 0x60, 0x56, 0x9e, 0x05, 0x53,
  2138. 0xbc, 0xfe, 0x86, 0x55, 0x3b, 0xca, 0xd5, 0x89,
  2139. 0xc7, 0x54, 0x1a, 0x73, 0xac, 0xc3, 0x9a, 0xbd,
  2140. 0x53, 0xc4, 0x07, 0x76, 0xd8, 0xe8, 0x22, 0x61,
  2141. 0x9e, 0xa9, 0xad, 0x77, 0xa0, 0x13, 0x4c, 0xfc
  2142. };
  2143. static const unsigned char c2[] = {
  2144. 0xa3, 0xc6, 0xf3, 0xf3, 0x82, 0x79, 0x5b, 0x10,
  2145. 0x87, 0xd7, 0x02, 0x50, 0xdb, 0x2c, 0xd3, 0xb1,
  2146. 0xa1, 0x62, 0xa8, 0xb6, 0xdc, 0x12, 0x60, 0x61,
  2147. 0xc1, 0x0a, 0x84, 0xa5, 0x85, 0x3f, 0x3a, 0x89,
  2148. 0xe6, 0x6c, 0xdb, 0xb7, 0x9a, 0xb4, 0x28, 0x9b,
  2149. 0xc3, 0xea, 0xd8, 0x10, 0xe9, 0xc0, 0xaf, 0x92
  2150. };
  2151. if ((aes = (XtsAes *)XMALLOC(sizeof(*aes), NULL, DYNAMIC_TYPE_AES))
  2152. == NULL)
  2153. {
  2154. ret = MEMORY_E;
  2155. goto out;
  2156. }
  2157. if ((buf = (unsigned char *)XMALLOC(AES_XTS_256_TEST_BUF_SIZ, NULL,
  2158. DYNAMIC_TYPE_AES)) == NULL)
  2159. {
  2160. ret = MEMORY_E;
  2161. goto out;
  2162. }
  2163. if ((cipher = (unsigned char *)XMALLOC(AES_XTS_256_TEST_BUF_SIZ, NULL,
  2164. DYNAMIC_TYPE_AES)) == NULL)
  2165. {
  2166. ret = MEMORY_E;
  2167. goto out;
  2168. }
  2169. ret = wc_AesXtsInit(aes, NULL, INVALID_DEVID);
  2170. if (ret != 0)
  2171. goto out;
  2172. else
  2173. aes_inited = 1;
  2174. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2175. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_ENCRYPTION);
  2176. if (ret != 0)
  2177. goto out;
  2178. ret = wc_AesXtsEncrypt(aes, buf, p2, sizeof(p2), i2, sizeof(i2));
  2179. if (ret != 0)
  2180. goto out;
  2181. if (XMEMCMP(c2, buf, sizeof(c2))) {
  2182. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2183. goto out;
  2184. }
  2185. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2186. ret = wc_AesXtsEncryptInit(aes, i2, sizeof(i2), &stream);
  2187. if (ret != 0)
  2188. goto out;
  2189. ret = wc_AesXtsEncryptUpdate(aes, buf, p2, AES_BLOCK_SIZE, &stream);
  2190. if (ret != 0)
  2191. goto out;
  2192. ret = wc_AesXtsEncryptFinal(aes, buf + AES_BLOCK_SIZE,
  2193. p2 + AES_BLOCK_SIZE,
  2194. sizeof(p2) - AES_BLOCK_SIZE, &stream);
  2195. if (ret != 0)
  2196. goto out;
  2197. if (XMEMCMP(c2, buf, sizeof(c2))) {
  2198. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2199. goto out;
  2200. }
  2201. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2202. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  2203. if (ret != 0)
  2204. goto out;
  2205. ret = wc_AesXtsEncrypt(aes, buf, p1, sizeof(p1), i1, sizeof(i1));
  2206. if (ret != 0)
  2207. goto out;
  2208. if (XMEMCMP(c1, buf, AES_BLOCK_SIZE)) {
  2209. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2210. goto out;
  2211. }
  2212. /* partial block encryption test */
  2213. XMEMSET(cipher, 0, AES_XTS_256_TEST_BUF_SIZ);
  2214. ret = wc_AesXtsEncrypt(aes, cipher, pp, sizeof(pp), i1, sizeof(i1));
  2215. if (ret != 0)
  2216. goto out;
  2217. /* partial block decrypt test */
  2218. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2219. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  2220. if (ret != 0)
  2221. goto out;
  2222. ret = wc_AesXtsDecrypt(aes, buf, cipher, sizeof(pp), i1, sizeof(i1));
  2223. if (ret != 0)
  2224. goto out;
  2225. if (XMEMCMP(pp, buf, sizeof(pp))) {
  2226. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2227. goto out;
  2228. }
  2229. /* NIST decrypt test vector */
  2230. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2231. ret = wc_AesXtsDecrypt(aes, buf, c1, sizeof(c1), i1, sizeof(i1));
  2232. if (ret != 0)
  2233. goto out;
  2234. if (XMEMCMP(p1, buf, AES_BLOCK_SIZE)) {
  2235. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2236. goto out;
  2237. }
  2238. XMEMSET(buf, 0, AES_XTS_256_TEST_BUF_SIZ);
  2239. ret = wc_AesXtsSetKeyNoInit(aes, k2, sizeof(k2), AES_DECRYPTION);
  2240. if (ret != 0)
  2241. goto out;
  2242. ret = wc_AesXtsDecrypt(aes, buf, c2, sizeof(c2), i2, sizeof(i2));
  2243. if (ret != 0)
  2244. goto out;
  2245. if (XMEMCMP(p2, buf, sizeof(p2))) {
  2246. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2247. goto out;
  2248. }
  2249. {
  2250. #define LARGE_XTS_SZ 1024
  2251. int i;
  2252. int j;
  2253. int k;
  2254. large_input = (byte *)XMALLOC(LARGE_XTS_SZ, NULL,
  2255. DYNAMIC_TYPE_TMP_BUFFER);
  2256. if (large_input == NULL) {
  2257. ret = MEMORY_E;
  2258. goto out;
  2259. }
  2260. for (i = 0; i < (int)LARGE_XTS_SZ; i++)
  2261. large_input[i] = (byte)i;
  2262. /* first, encrypt block by block then decrypt with a one-shot call. */
  2263. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  2264. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  2265. if (ret != 0)
  2266. goto out;
  2267. ret = wc_AesXtsEncryptInit(aes, i1, sizeof(i1), &stream);
  2268. if (ret != 0)
  2269. goto out;
  2270. for (k = 0; k < j; k += AES_BLOCK_SIZE) {
  2271. if ((j - k) < AES_BLOCK_SIZE*2)
  2272. ret = wc_AesXtsEncryptFinal(aes, large_input + k, large_input + k, j - k, &stream);
  2273. else
  2274. ret = wc_AesXtsEncryptUpdate(aes, large_input + k, large_input + k, AES_BLOCK_SIZE, &stream);
  2275. if (ret != 0)
  2276. goto out;
  2277. if ((j - k) < AES_BLOCK_SIZE*2)
  2278. break;
  2279. }
  2280. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  2281. if (ret != 0)
  2282. goto out;
  2283. ret = wc_AesXtsDecrypt(aes, large_input, large_input, j, i1,
  2284. sizeof(i1));
  2285. if (ret != 0)
  2286. goto out;
  2287. for (i = 0; i < j; i++) {
  2288. if (large_input[i] != (byte)i) {
  2289. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2290. goto out;
  2291. }
  2292. }
  2293. }
  2294. /* second, encrypt with a one-shot call then decrypt block by block. */
  2295. for (j = 16; j < (int)LARGE_XTS_SZ; j++) {
  2296. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_ENCRYPTION);
  2297. if (ret != 0)
  2298. goto out;
  2299. ret = wc_AesXtsEncrypt(aes, large_input, large_input, j, i1,
  2300. sizeof(i1));
  2301. if (ret != 0)
  2302. goto out;
  2303. ret = wc_AesXtsSetKeyNoInit(aes, k1, sizeof(k1), AES_DECRYPTION);
  2304. if (ret != 0)
  2305. goto out;
  2306. ret = wc_AesXtsDecryptInit(aes, i1, sizeof(i1), &stream);
  2307. if (ret != 0)
  2308. goto out;
  2309. for (k = 0; k < j; k += AES_BLOCK_SIZE) {
  2310. if ((j - k) < AES_BLOCK_SIZE*2)
  2311. ret = wc_AesXtsDecryptFinal(aes, large_input + k, large_input + k, j - k, &stream);
  2312. else
  2313. ret = wc_AesXtsDecryptUpdate(aes, large_input + k, large_input + k, AES_BLOCK_SIZE, &stream);
  2314. if (ret != 0)
  2315. goto out;
  2316. if ((j - k) < AES_BLOCK_SIZE*2)
  2317. break;
  2318. }
  2319. for (i = 0; i < j; i++) {
  2320. if (large_input[i] != (byte)i) {
  2321. ret = LINUXKM_LKCAPI_AES_KAT_MISMATCH_E;
  2322. goto out;
  2323. }
  2324. }
  2325. }
  2326. }
  2327. /* now the kernel crypto part */
  2328. enc2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  2329. if (!enc2) {
  2330. pr_err("error: malloc failed\n");
  2331. ret = -ENOMEM;
  2332. goto test_xts_end;
  2333. }
  2334. dec2 = XMALLOC(sizeof(p1), NULL, DYNAMIC_TYPE_AES);
  2335. if (!dec2) {
  2336. pr_err("error: malloc failed\n");
  2337. ret = -ENOMEM;
  2338. goto test_xts_end;
  2339. }
  2340. src = XMALLOC(sizeof(*src) * 2, NULL, DYNAMIC_TYPE_AES);
  2341. if (! src) {
  2342. pr_err("error: malloc failed\n");
  2343. ret = -ENOMEM;
  2344. goto test_xts_end;
  2345. }
  2346. dst = XMALLOC(sizeof(*dst) * 2, NULL, DYNAMIC_TYPE_AES);
  2347. if (! dst) {
  2348. pr_err("error: malloc failed\n");
  2349. ret = -ENOMEM;
  2350. goto test_xts_end;
  2351. }
  2352. tfm = crypto_alloc_skcipher(WOLFKM_AESXTS_NAME, 0, 0);
  2353. if (IS_ERR(tfm)) {
  2354. ret = PTR_ERR(tfm);
  2355. pr_err("error: allocating AES skcipher algorithm %s failed: %d\n",
  2356. WOLFKM_AESXTS_DRIVER, ret);
  2357. goto test_xts_end;
  2358. }
  2359. #ifndef LINUXKM_LKCAPI_PRIORITY_ALLOW_MASKING
  2360. {
  2361. const char *driver_name = crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  2362. if (strcmp(driver_name, WOLFKM_AESXTS_DRIVER)) {
  2363. pr_err("error: unexpected implementation for %s: %s (expected %s)\n",
  2364. WOLFKM_AESXTS_NAME, driver_name, WOLFKM_AESXTS_DRIVER);
  2365. ret = -ENOENT;
  2366. goto test_xts_end;
  2367. }
  2368. }
  2369. #endif
  2370. ret = crypto_skcipher_ivsize(tfm);
  2371. if (ret != sizeof(stream.tweak_block)) {
  2372. pr_err("error: AES skcipher algorithm %s crypto_skcipher_ivsize()"
  2373. " returned %d but expected %d\n",
  2374. WOLFKM_AESXTS_DRIVER, ret, (int)sizeof(stream.tweak_block));
  2375. ret = -EINVAL;
  2376. goto test_xts_end;
  2377. }
  2378. ret = crypto_skcipher_setkey(tfm, k1, sizeof(k1));
  2379. if (ret) {
  2380. pr_err("error: crypto_skcipher_setkey for %s returned: %d\n",
  2381. WOLFKM_AESXTS_NAME, ret);
  2382. goto test_xts_end;
  2383. }
  2384. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  2385. if (IS_ERR(req)) {
  2386. ret = PTR_ERR(req);
  2387. pr_err("error: allocating AES skcipher request %s failed: %d\n",
  2388. WOLFKM_AESXTS_DRIVER, ret);
  2389. goto test_xts_end;
  2390. }
  2391. memcpy(dec2, p1, sizeof(p1));
  2392. memset(enc2, 0, sizeof(p1));
  2393. sg_init_one(src, dec2, sizeof(p1));
  2394. sg_init_one(dst, enc2, sizeof(p1));
  2395. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2396. skcipher_request_set_crypt(req, src, dst, sizeof(p1), stream.tweak_block);
  2397. ret = crypto_skcipher_encrypt(req);
  2398. if (ret) {
  2399. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2400. goto test_xts_end;
  2401. }
  2402. ret = XMEMCMP(c1, enc2, sizeof(c1));
  2403. if (ret) {
  2404. pr_err("error: c1 and enc2 do not match: %d\n", ret);
  2405. ret = -EINVAL;
  2406. goto test_xts_end;
  2407. }
  2408. memset(dec2, 0, sizeof(p1));
  2409. sg_init_one(src, enc2, sizeof(p1));
  2410. sg_init_one(dst, dec2, sizeof(p1));
  2411. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2412. skcipher_request_set_crypt(req, src, dst, sizeof(p1), stream.tweak_block);
  2413. ret = crypto_skcipher_decrypt(req);
  2414. if (ret) {
  2415. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2416. goto test_xts_end;
  2417. }
  2418. ret = XMEMCMP(p1, dec2, sizeof(p1));
  2419. if (ret) {
  2420. pr_err("error: p1 and dec2 do not match: %d\n", ret);
  2421. ret = -EINVAL;
  2422. goto test_xts_end;
  2423. }
  2424. memcpy(dec2, pp, sizeof(pp));
  2425. memset(enc2, 0, sizeof(pp));
  2426. sg_init_one(src, dec2, sizeof(pp));
  2427. sg_init_one(dst, enc2, sizeof(pp));
  2428. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2429. skcipher_request_set_crypt(req, src, dst, sizeof(pp), stream.tweak_block);
  2430. ret = crypto_skcipher_encrypt(req);
  2431. if (ret) {
  2432. pr_err("error: crypto_skcipher_encrypt returned: %d\n", ret);
  2433. goto test_xts_end;
  2434. }
  2435. ret = XMEMCMP(cp, enc2, sizeof(cp));
  2436. if (ret) {
  2437. pr_err("error: cp and enc2 do not match: %d\n", ret);
  2438. ret = -EINVAL;
  2439. goto test_xts_end;
  2440. }
  2441. memset(dec2, 0, sizeof(pp));
  2442. sg_init_one(src, enc2, sizeof(pp));
  2443. sg_init_one(dst, dec2, sizeof(pp));
  2444. memcpy(stream.tweak_block, i1, sizeof(stream.tweak_block));
  2445. skcipher_request_set_crypt(req, src, dst, sizeof(pp), stream.tweak_block);
  2446. ret = crypto_skcipher_decrypt(req);
  2447. if (ret) {
  2448. pr_err("ERROR: crypto_skcipher_decrypt returned %d\n", ret);
  2449. goto test_xts_end;
  2450. }
  2451. ret = XMEMCMP(pp, dec2, sizeof(pp));
  2452. if (ret) {
  2453. pr_err("error: pp and dec2 do not match: %d\n", ret);
  2454. ret = -EINVAL;
  2455. goto test_xts_end;
  2456. }
  2457. test_xts_end:
  2458. XFREE(enc2, NULL, DYNAMIC_TYPE_AES);
  2459. XFREE(dec2, NULL, DYNAMIC_TYPE_AES);
  2460. XFREE(src, NULL, DYNAMIC_TYPE_AES);
  2461. XFREE(dst, NULL, DYNAMIC_TYPE_AES);
  2462. if (req)
  2463. skcipher_request_free(req);
  2464. if (tfm)
  2465. crypto_free_skcipher(tfm);
  2466. out:
  2467. XFREE(large_input, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  2468. if (aes_inited)
  2469. wc_AesXtsFree(aes);
  2470. XFREE(buf, NULL, DYNAMIC_TYPE_AES);
  2471. XFREE(cipher, NULL, DYNAMIC_TYPE_AES);
  2472. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  2473. #undef AES_XTS_256_TEST_BUF_SIZ
  2474. return ret;
  2475. }
  2476. #endif /* WOLFSSL_AES_256 */
  2477. static int linuxkm_test_aesxts(void) {
  2478. int ret;
  2479. #ifdef WOLFSSL_AES_128
  2480. ret = aes_xts_128_test();
  2481. if (ret != 0) {
  2482. pr_err("aes_xts_128_test() failed with retval %d.\n", ret);
  2483. goto out;
  2484. }
  2485. #endif
  2486. #ifdef WOLFSSL_AES_256
  2487. ret = aes_xts_256_test();
  2488. if (ret != 0) {
  2489. pr_err("aes_xts_256_test() failed with retval %d.\n", ret);
  2490. goto out;
  2491. }
  2492. #endif
  2493. out:
  2494. return ret;
  2495. }
  2496. #endif /* WOLFSSL_AES_XTS &&
  2497. * (LINUXKM_LKCAPI_REGISTER_ALL || LINUXKM_LKCAPI_REGISTER_AESXTS)
  2498. */
  2499. #endif /* !NO_AES */
  2500. #if defined(HAVE_FIPS) && defined(CONFIG_CRYPTO_MANAGER) && \
  2501. !defined(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
  2502. #ifdef CONFIG_CRYPTO_FIPS
  2503. #include <linux/fips.h>
  2504. #else
  2505. #error wolfCrypt FIPS with LINUXKM_LKCAPI_REGISTER and CONFIG_CRYPTO_MANAGER requires CONFIG_CRYPTO_FIPS
  2506. #endif
  2507. #endif
  2508. static int linuxkm_lkcapi_register(void)
  2509. {
  2510. int ret = 0;
  2511. #if defined(HAVE_FIPS) && defined(CONFIG_CRYPTO_MANAGER) && \
  2512. !defined(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
  2513. int enabled_fips = 0;
  2514. #endif
  2515. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2516. /* temporarily disable warnings around setkey failures, which are expected
  2517. * from the crypto fuzzer in FIPS configs, and potentially in others.
  2518. * unexpected setkey failures are fatal errors returned by the fuzzer.
  2519. */
  2520. disable_setkey_warnings = 1;
  2521. #endif
  2522. #if defined(HAVE_FIPS) && defined(CONFIG_CRYPTO_MANAGER) && \
  2523. !defined(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
  2524. if (! fips_enabled) {
  2525. /* temporarily assert system-wide FIPS status, to disable FIPS-forbidden
  2526. * test vectors and fuzzing from the CRYPTO_MANAGER.
  2527. */
  2528. enabled_fips = fips_enabled = 1;
  2529. }
  2530. #endif
  2531. #define REGISTER_ALG(alg, installer, tester) do { \
  2532. if (alg ## _loaded) { \
  2533. pr_err("ERROR: %s is already registered.\n", \
  2534. (alg).base.cra_driver_name); \
  2535. ret = -EEXIST; \
  2536. goto out; \
  2537. } \
  2538. \
  2539. ret = (installer)(&(alg)); \
  2540. \
  2541. if (ret) { \
  2542. pr_err("ERROR: " #installer " for %s failed " \
  2543. "with return code %d.\n", \
  2544. (alg).base.cra_driver_name, ret); \
  2545. goto out; \
  2546. } \
  2547. \
  2548. alg ## _loaded = 1; \
  2549. \
  2550. ret = (tester()); \
  2551. \
  2552. if (ret) { \
  2553. pr_err("ERROR: self-test for %s failed " \
  2554. "with return code %d.\n", \
  2555. (alg).base.cra_driver_name, ret); \
  2556. goto out; \
  2557. } \
  2558. pr_info("%s self-test OK -- " \
  2559. "registered for %s with priority %d.\n", \
  2560. (alg).base.cra_driver_name, \
  2561. (alg).base.cra_name, \
  2562. (alg).base.cra_priority); \
  2563. } while (0)
  2564. #if defined(HAVE_AES_CBC) && \
  2565. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2566. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  2567. REGISTER_ALG(cbcAesAlg, crypto_register_skcipher, linuxkm_test_aescbc);
  2568. #endif
  2569. #if defined(WOLFSSL_AES_CFB) && \
  2570. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2571. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  2572. REGISTER_ALG(cfbAesAlg, crypto_register_skcipher, linuxkm_test_aescfb);
  2573. #endif
  2574. #if defined(HAVE_AESGCM) && \
  2575. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2576. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  2577. REGISTER_ALG(gcmAesAead, crypto_register_aead, linuxkm_test_aesgcm);
  2578. #endif
  2579. #if defined(WOLFSSL_AES_XTS) && \
  2580. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2581. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  2582. REGISTER_ALG(xtsAesAlg, crypto_register_skcipher, linuxkm_test_aesxts);
  2583. #endif
  2584. #undef REGISTER_ALG
  2585. out:
  2586. #if defined(HAVE_FIPS) && defined(CONFIG_CRYPTO_MANAGER) && \
  2587. !defined(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
  2588. if (enabled_fips)
  2589. fips_enabled = 0;
  2590. #endif
  2591. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2592. disable_setkey_warnings = 0;
  2593. #endif
  2594. return ret;
  2595. }
  2596. static void linuxkm_lkcapi_unregister(void)
  2597. {
  2598. #define UNREGISTER_ALG(alg, uninstaller) do { \
  2599. if (alg ## _loaded) { \
  2600. (uninstaller)(&(alg)); \
  2601. alg ## _loaded = 0; \
  2602. } \
  2603. } while (0)
  2604. #if defined(HAVE_AES_CBC) && \
  2605. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2606. defined(LINUXKM_LKCAPI_REGISTER_AESCBC))
  2607. UNREGISTER_ALG(cbcAesAlg, crypto_unregister_skcipher);
  2608. #endif
  2609. #if defined(WOLFSSL_AES_CFB) && \
  2610. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2611. defined(LINUXKM_LKCAPI_REGISTER_AESCFB))
  2612. UNREGISTER_ALG(cfbAesAlg, crypto_unregister_skcipher);
  2613. #endif
  2614. #if defined(HAVE_AESGCM) && \
  2615. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2616. defined(LINUXKM_LKCAPI_REGISTER_AESGCM))
  2617. UNREGISTER_ALG(gcmAesAead, crypto_unregister_aead);
  2618. #endif
  2619. #if defined(WOLFSSL_AES_XTS) && \
  2620. (defined(LINUXKM_LKCAPI_REGISTER_ALL) || \
  2621. defined(LINUXKM_LKCAPI_REGISTER_AESXTS))
  2622. UNREGISTER_ALG(xtsAesAlg, crypto_unregister_skcipher);
  2623. #endif
  2624. #undef UNREGISTER_ALG
  2625. }