stm32_saes.c 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903
  1. /*
  2. * Copyright (c) 2022, STMicroelectronics - All Rights Reserved
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <assert.h>
  7. #include <endian.h>
  8. #include <errno.h>
  9. #include <stdint.h>
  10. #include <drivers/clk.h>
  11. #include <drivers/delay_timer.h>
  12. #include <drivers/st/stm32_saes.h>
  13. #include <drivers/st/stm32mp_reset.h>
  14. #include <lib/mmio.h>
  15. #include <lib/utils_def.h>
  16. #include <libfdt.h>
  17. #include <platform_def.h>
  18. #define UINT8_BIT 8U
  19. #define AES_BLOCK_SIZE_BIT 128U
  20. #define AES_BLOCK_SIZE (AES_BLOCK_SIZE_BIT / UINT8_BIT)
  21. #define AES_KEYSIZE_128 16U
  22. #define AES_KEYSIZE_256 32U
  23. #define AES_IVSIZE 16U
  24. /* SAES control register */
  25. #define _SAES_CR 0x0U
  26. /* SAES status register */
  27. #define _SAES_SR 0x04U
  28. /* SAES data input register */
  29. #define _SAES_DINR 0x08U
  30. /* SAES data output register */
  31. #define _SAES_DOUTR 0x0CU
  32. /* SAES key registers [0-3] */
  33. #define _SAES_KEYR0 0x10U
  34. #define _SAES_KEYR1 0x14U
  35. #define _SAES_KEYR2 0x18U
  36. #define _SAES_KEYR3 0x1CU
  37. /* SAES initialization vector registers [0-3] */
  38. #define _SAES_IVR0 0x20U
  39. #define _SAES_IVR1 0x24U
  40. #define _SAES_IVR2 0x28U
  41. #define _SAES_IVR3 0x2CU
  42. /* SAES key registers [4-7] */
  43. #define _SAES_KEYR4 0x30U
  44. #define _SAES_KEYR5 0x34U
  45. #define _SAES_KEYR6 0x38U
  46. #define _SAES_KEYR7 0x3CU
  47. /* SAES suspend registers [0-7] */
  48. #define _SAES_SUSPR0 0x40U
  49. #define _SAES_SUSPR1 0x44U
  50. #define _SAES_SUSPR2 0x48U
  51. #define _SAES_SUSPR3 0x4CU
  52. #define _SAES_SUSPR4 0x50U
  53. #define _SAES_SUSPR5 0x54U
  54. #define _SAES_SUSPR6 0x58U
  55. #define _SAES_SUSPR7 0x5CU
  56. /* SAES Interrupt Enable Register */
  57. #define _SAES_IER 0x300U
  58. /* SAES Interrupt Status Register */
  59. #define _SAES_ISR 0x304U
  60. /* SAES Interrupt Clear Register */
  61. #define _SAES_ICR 0x308U
  62. /* SAES control register fields */
  63. #define _SAES_CR_RESET_VALUE 0x0U
  64. #define _SAES_CR_IPRST BIT(31)
  65. #define _SAES_CR_KEYSEL_MASK GENMASK(30, 28)
  66. #define _SAES_CR_KEYSEL_SHIFT 28U
  67. #define _SAES_CR_KEYSEL_SOFT 0x0U
  68. #define _SAES_CR_KEYSEL_DHUK 0x1U
  69. #define _SAES_CR_KEYSEL_BHK 0x2U
  70. #define _SAES_CR_KEYSEL_BHU_XOR_BH_K 0x4U
  71. #define _SAES_CR_KEYSEL_TEST 0x7U
  72. #define _SAES_CR_KSHAREID_MASK GENMASK(27, 26)
  73. #define _SAES_CR_KSHAREID_SHIFT 26U
  74. #define _SAES_CR_KSHAREID_CRYP 0x0U
  75. #define _SAES_CR_KEYMOD_MASK GENMASK(25, 24)
  76. #define _SAES_CR_KEYMOD_SHIFT 24U
  77. #define _SAES_CR_KEYMOD_NORMAL 0x0U
  78. #define _SAES_CR_KEYMOD_WRAPPED 0x1U
  79. #define _SAES_CR_KEYMOD_SHARED 0x2U
  80. #define _SAES_CR_NPBLB_MASK GENMASK(23, 20)
  81. #define _SAES_CR_NPBLB_SHIFT 20U
  82. #define _SAES_CR_KEYPROT BIT(19)
  83. #define _SAES_CR_KEYSIZE BIT(18)
  84. #define _SAES_CR_GCMPH_MASK GENMASK(14, 13)
  85. #define _SAES_CR_GCMPH_SHIFT 13U
  86. #define _SAES_CR_GCMPH_INIT 0U
  87. #define _SAES_CR_GCMPH_HEADER 1U
  88. #define _SAES_CR_GCMPH_PAYLOAD 2U
  89. #define _SAES_CR_GCMPH_FINAL 3U
  90. #define _SAES_CR_DMAOUTEN BIT(12)
  91. #define _SAES_CR_DMAINEN BIT(11)
  92. #define _SAES_CR_CHMOD_MASK (BIT(16) | GENMASK(6, 5))
  93. #define _SAES_CR_CHMOD_SHIFT 5U
  94. #define _SAES_CR_CHMOD_ECB 0x0U
  95. #define _SAES_CR_CHMOD_CBC 0x1U
  96. #define _SAES_CR_CHMOD_CTR 0x2U
  97. #define _SAES_CR_CHMOD_GCM 0x3U
  98. #define _SAES_CR_CHMOD_GMAC 0x3U
  99. #define _SAES_CR_CHMOD_CCM 0x800U
  100. #define _SAES_CR_MODE_MASK GENMASK(4, 3)
  101. #define _SAES_CR_MODE_SHIFT 3U
  102. #define _SAES_CR_MODE_ENC 0U
  103. #define _SAES_CR_MODE_KEYPREP 1U
  104. #define _SAES_CR_MODE_DEC 2U
  105. #define _SAES_CR_DATATYPE_MASK GENMASK(2, 1)
  106. #define _SAES_CR_DATATYPE_SHIFT 1U
  107. #define _SAES_CR_DATATYPE_NONE 0U
  108. #define _SAES_CR_DATATYPE_HALF_WORD 1U
  109. #define _SAES_CR_DATATYPE_BYTE 2U
  110. #define _SAES_CR_DATATYPE_BIT 3U
  111. #define _SAES_CR_EN BIT(0)
  112. /* SAES status register fields */
  113. #define _SAES_SR_KEYVALID BIT(7)
  114. #define _SAES_SR_BUSY BIT(3)
  115. #define _SAES_SR_WRERR BIT(2)
  116. #define _SAES_SR_RDERR BIT(1)
  117. #define _SAES_SR_CCF BIT(0)
  118. /* SAES interrupt registers fields */
  119. #define _SAES_I_RNG_ERR BIT(3)
  120. #define _SAES_I_KEY_ERR BIT(2)
  121. #define _SAES_I_RW_ERR BIT(1)
  122. #define _SAES_I_CC BIT(0)
  123. #define SAES_TIMEOUT_US 100000U
  124. #define TIMEOUT_US_1MS 1000U
  125. #define SAES_RESET_DELAY 20U
  126. #define IS_CHAINING_MODE(mod, cr) \
  127. (((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mod << _SAES_CR_CHMOD_SHIFT))
  128. #define SET_CHAINING_MODE(mod, cr) \
  129. mmio_clrsetbits_32((cr), _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mod << _SAES_CR_CHMOD_SHIFT)
  130. static struct stm32_saes_platdata saes_pdata;
  131. static int stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata)
  132. {
  133. int node;
  134. struct dt_node_info info;
  135. void *fdt;
  136. if (fdt_get_address(&fdt) == 0) {
  137. return -FDT_ERR_NOTFOUND;
  138. }
  139. node = dt_get_node(&info, -1, DT_SAES_COMPAT);
  140. if (node < 0) {
  141. ERROR("No SAES entry in DT\n");
  142. return -FDT_ERR_NOTFOUND;
  143. }
  144. if (info.status == DT_DISABLED) {
  145. return -FDT_ERR_NOTFOUND;
  146. }
  147. if ((info.base == 0U) || (info.clock < 0) || (info.reset < 0)) {
  148. return -FDT_ERR_BADVALUE;
  149. }
  150. pdata->base = (uintptr_t)info.base;
  151. pdata->clock_id = (unsigned long)info.clock;
  152. pdata->reset_id = (unsigned int)info.reset;
  153. return 0;
  154. }
  155. static bool does_chaining_mode_need_iv(uint32_t cr)
  156. {
  157. return !(IS_CHAINING_MODE(ECB, cr));
  158. }
  159. static bool is_encrypt(uint32_t cr)
  160. {
  161. return (cr & _SAES_CR_MODE_MASK) == (_SAES_CR_MODE_ENC << _SAES_CR_MODE_SHIFT);
  162. }
  163. static bool is_decrypt(uint32_t cr)
  164. {
  165. return (cr & _SAES_CR_MODE_MASK) == (_SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
  166. }
  167. static int wait_computation_completed(uintptr_t base)
  168. {
  169. uint64_t timeout = timeout_init_us(SAES_TIMEOUT_US);
  170. while ((mmio_read_32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) {
  171. if (timeout_elapsed(timeout)) {
  172. WARN("%s: timeout\n", __func__);
  173. return -ETIMEDOUT;
  174. }
  175. }
  176. return 0;
  177. }
  178. static void clear_computation_completed(uintptr_t base)
  179. {
  180. mmio_setbits_32(base + _SAES_ICR, _SAES_I_CC);
  181. }
  182. static int saes_start(struct stm32_saes_context *ctx)
  183. {
  184. uint64_t timeout;
  185. /* Reset IP */
  186. mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
  187. udelay(SAES_RESET_DELAY);
  188. mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
  189. timeout = timeout_init_us(SAES_TIMEOUT_US);
  190. while ((mmio_read_32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) == _SAES_SR_BUSY) {
  191. if (timeout_elapsed(timeout)) {
  192. WARN("%s: timeout\n", __func__);
  193. return -ETIMEDOUT;
  194. }
  195. }
  196. return 0;
  197. }
  198. static void saes_end(struct stm32_saes_context *ctx, int prev_error)
  199. {
  200. if (prev_error != 0) {
  201. /* Reset IP */
  202. mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
  203. udelay(SAES_RESET_DELAY);
  204. mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
  205. }
  206. /* Disable the SAES peripheral */
  207. mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
  208. }
  209. static void saes_write_iv(struct stm32_saes_context *ctx)
  210. {
  211. /* If chaining mode need to restore IV */
  212. if (does_chaining_mode_need_iv(ctx->cr)) {
  213. uint8_t i;
  214. /* Restore the _SAES_IVRx */
  215. for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
  216. mmio_write_32(ctx->base + _SAES_IVR0 + i * sizeof(uint32_t), ctx->iv[i]);
  217. }
  218. }
  219. }
  220. static void saes_write_key(struct stm32_saes_context *ctx)
  221. {
  222. /* Restore the _SAES_KEYRx if SOFTWARE key */
  223. if ((ctx->cr & _SAES_CR_KEYSEL_MASK) == (_SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT)) {
  224. uint8_t i;
  225. for (i = 0U; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) {
  226. mmio_write_32(ctx->base + _SAES_KEYR0 + i * sizeof(uint32_t), ctx->key[i]);
  227. }
  228. if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) {
  229. for (i = 0U; i < (AES_KEYSIZE_256 / 2U) / sizeof(uint32_t); i++) {
  230. mmio_write_32(ctx->base + _SAES_KEYR4 + i * sizeof(uint32_t),
  231. ctx->key[i + 4U]);
  232. }
  233. }
  234. }
  235. }
  236. static int saes_prepare_key(struct stm32_saes_context *ctx)
  237. {
  238. /* Disable the SAES peripheral */
  239. mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
  240. /* Set key size */
  241. if ((ctx->cr & _SAES_CR_KEYSIZE) != 0U) {
  242. mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
  243. } else {
  244. mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
  245. }
  246. saes_write_key(ctx);
  247. /* For ECB/CBC decryption, key preparation mode must be selected to populate the key */
  248. if ((IS_CHAINING_MODE(ECB, ctx->cr) || IS_CHAINING_MODE(CBC, ctx->cr)) &&
  249. is_decrypt(ctx->cr)) {
  250. int ret;
  251. /* Select Mode 2 */
  252. mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
  253. _SAES_CR_MODE_KEYPREP << _SAES_CR_MODE_SHIFT);
  254. /* Enable SAES */
  255. mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
  256. /* Wait Computation completed */
  257. ret = wait_computation_completed(ctx->base);
  258. if (ret != 0) {
  259. return ret;
  260. }
  261. clear_computation_completed(ctx->base);
  262. /* Set Mode 3 */
  263. mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
  264. _SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
  265. }
  266. return 0;
  267. }
  268. static int save_context(struct stm32_saes_context *ctx)
  269. {
  270. if ((mmio_read_32(ctx->base + _SAES_SR) & _SAES_SR_CCF) != 0U) {
  271. /* Device should not be in a processing phase */
  272. return -EINVAL;
  273. }
  274. /* Save CR */
  275. ctx->cr = mmio_read_32(ctx->base + _SAES_CR);
  276. /* If chaining mode need to save current IV */
  277. if (does_chaining_mode_need_iv(ctx->cr)) {
  278. uint8_t i;
  279. /* Save IV */
  280. for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
  281. ctx->iv[i] = mmio_read_32(ctx->base + _SAES_IVR0 + i * sizeof(uint32_t));
  282. }
  283. }
  284. /* Disable the SAES peripheral */
  285. mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
  286. return 0;
  287. }
  288. /* To resume the processing of a message */
  289. static int restore_context(struct stm32_saes_context *ctx)
  290. {
  291. int ret;
  292. /* IP should be disabled */
  293. if ((mmio_read_32(ctx->base + _SAES_CR) & _SAES_CR_EN) != 0U) {
  294. VERBOSE("%s: Device is still enabled\n", __func__);
  295. return -EINVAL;
  296. }
  297. /* Reset internal state */
  298. mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
  299. /* Restore the _SAES_CR */
  300. mmio_write_32(ctx->base + _SAES_CR, ctx->cr);
  301. /* Preparation decrypt key */
  302. ret = saes_prepare_key(ctx);
  303. if (ret != 0) {
  304. return ret;
  305. }
  306. saes_write_iv(ctx);
  307. /* Enable the SAES peripheral */
  308. mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
  309. return 0;
  310. }
  311. /**
  312. * @brief Initialize SAES driver.
  313. * @param None.
  314. * @retval 0 if OK; negative value else.
  315. */
  316. int stm32_saes_driver_init(void)
  317. {
  318. int err;
  319. err = stm32_saes_parse_fdt(&saes_pdata);
  320. if (err != 0) {
  321. return err;
  322. }
  323. clk_enable(saes_pdata.clock_id);
  324. if (stm32mp_reset_assert(saes_pdata.reset_id, TIMEOUT_US_1MS) != 0) {
  325. panic();
  326. }
  327. udelay(SAES_RESET_DELAY);
  328. if (stm32mp_reset_deassert(saes_pdata.reset_id, TIMEOUT_US_1MS) != 0) {
  329. panic();
  330. }
  331. return 0;
  332. }
  333. /**
  334. * @brief Start a AES computation.
  335. * @param ctx: SAES process context
  336. * @param is_dec: true if decryption, false if encryption
  337. * @param ch_mode: define the chaining mode
  338. * @param key_select: define where the key comes from.
  339. * @param key: pointer to key (if key_select is KEY_SOFT, else unused)
  340. * @param key_size: key size
  341. * @param iv: pointer to initialization vectore (unsed if ch_mode is ECB)
  342. * @param iv_size: iv size
  343. * @note this function doesn't access to hardware but store in ctx the values
  344. *
  345. * @retval 0 if OK; negative value else.
  346. */
  347. int stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec,
  348. enum stm32_saes_chaining_mode ch_mode, enum stm32_saes_key_selection key_select,
  349. const void *key, size_t key_size, const void *iv, size_t iv_size)
  350. {
  351. unsigned int i;
  352. const uint32_t *iv_u32;
  353. const uint32_t *key_u32;
  354. ctx->assoc_len = 0U;
  355. ctx->load_len = 0U;
  356. ctx->base = saes_pdata.base;
  357. ctx->cr = _SAES_CR_RESET_VALUE;
  358. /* We want buffer to be u32 aligned */
  359. assert((uintptr_t)key % __alignof__(uint32_t) == 0);
  360. assert((uintptr_t)iv % __alignof__(uint32_t) == 0);
  361. iv_u32 = iv;
  362. key_u32 = key;
  363. if (is_dec) {
  364. /* Save Mode 3 = decrypt */
  365. mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_MODE_MASK,
  366. _SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
  367. } else {
  368. /* Save Mode 1 = crypt */
  369. mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_MODE_MASK,
  370. _SAES_CR_MODE_ENC << _SAES_CR_MODE_SHIFT);
  371. }
  372. /* Save chaining mode */
  373. switch (ch_mode) {
  374. case STM32_SAES_MODE_ECB:
  375. SET_CHAINING_MODE(ECB, (uintptr_t)&(ctx->cr));
  376. break;
  377. case STM32_SAES_MODE_CBC:
  378. SET_CHAINING_MODE(CBC, (uintptr_t)&(ctx->cr));
  379. break;
  380. case STM32_SAES_MODE_CTR:
  381. SET_CHAINING_MODE(CTR, (uintptr_t)&(ctx->cr));
  382. break;
  383. case STM32_SAES_MODE_GCM:
  384. SET_CHAINING_MODE(GCM, (uintptr_t)&(ctx->cr));
  385. break;
  386. case STM32_SAES_MODE_CCM:
  387. SET_CHAINING_MODE(CCM, (uintptr_t)&(ctx->cr));
  388. break;
  389. default:
  390. return -EINVAL;
  391. }
  392. /* We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data.
  393. * so we won't need to
  394. * htobe32(data) before write to DINR
  395. * nor
  396. * be32toh after reading from DOUTR
  397. *
  398. * But note that wrap key only accept _SAES_CR_DATATYPE_NONE
  399. */
  400. mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_DATATYPE_MASK,
  401. _SAES_CR_DATATYPE_BYTE << _SAES_CR_DATATYPE_SHIFT);
  402. /* Configure keysize */
  403. switch (key_size) {
  404. case AES_KEYSIZE_128:
  405. mmio_clrbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSIZE);
  406. break;
  407. case AES_KEYSIZE_256:
  408. mmio_setbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSIZE);
  409. break;
  410. default:
  411. return -EINVAL;
  412. }
  413. /* Configure key */
  414. switch (key_select) {
  415. case STM32_SAES_KEY_SOFT:
  416. mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
  417. _SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT);
  418. /* Save key */
  419. switch (key_size) {
  420. case AES_KEYSIZE_128:
  421. /* First 16 bytes == 4 u32 */
  422. for (i = 0U; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) {
  423. mmio_write_32((uintptr_t)(ctx->key + i), htobe32(key_u32[3 - i]));
  424. /* /!\ we save the key in HW byte order
  425. * and word order : key[i] is for _SAES_KEYRi
  426. */
  427. }
  428. break;
  429. case AES_KEYSIZE_256:
  430. for (i = 0U; i < AES_KEYSIZE_256 / sizeof(uint32_t); i++) {
  431. mmio_write_32((uintptr_t)(ctx->key + i), htobe32(key_u32[7 - i]));
  432. /* /!\ we save the key in HW byte order
  433. * and word order : key[i] is for _SAES_KEYRi
  434. */
  435. }
  436. break;
  437. default:
  438. return -EINVAL;
  439. }
  440. break;
  441. case STM32_SAES_KEY_DHU:
  442. mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
  443. _SAES_CR_KEYSEL_DHUK << _SAES_CR_KEYSEL_SHIFT);
  444. break;
  445. case STM32_SAES_KEY_BH:
  446. mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
  447. _SAES_CR_KEYSEL_BHK << _SAES_CR_KEYSEL_SHIFT);
  448. break;
  449. case STM32_SAES_KEY_BHU_XOR_BH:
  450. mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
  451. _SAES_CR_KEYSEL_BHU_XOR_BH_K << _SAES_CR_KEYSEL_SHIFT);
  452. break;
  453. case STM32_SAES_KEY_WRAPPED:
  454. mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
  455. _SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT);
  456. break;
  457. default:
  458. return -EINVAL;
  459. }
  460. /* Save IV */
  461. if (ch_mode != STM32_SAES_MODE_ECB) {
  462. if ((iv == NULL) || (iv_size != AES_IVSIZE)) {
  463. return -EINVAL;
  464. }
  465. for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
  466. mmio_write_32((uintptr_t)(ctx->iv + i), htobe32(iv_u32[3 - i]));
  467. /* /!\ We save the iv in HW byte order */
  468. }
  469. }
  470. return saes_start(ctx);
  471. }
  472. /**
  473. * @brief Update (or start) a AES authentificate process of associated data (CCM or GCM).
  474. * @param ctx: SAES process context
  475. * @param last_block: true if last assoc data block
  476. * @param data: pointer to associated data
  477. * @param data_size: data size
  478. *
  479. * @retval 0 if OK; negative value else.
  480. */
  481. int stm32_saes_update_assodata(struct stm32_saes_context *ctx, bool last_block,
  482. uint8_t *data, size_t data_size)
  483. {
  484. int ret;
  485. uint32_t *data_u32;
  486. unsigned int i = 0U;
  487. /* We want buffers to be u32 aligned */
  488. assert((uintptr_t)data % __alignof__(uint32_t) == 0);
  489. data_u32 = (uint32_t *)data;
  490. /* Init phase */
  491. ret = restore_context(ctx);
  492. if (ret != 0) {
  493. goto out;
  494. }
  495. ret = wait_computation_completed(ctx->base);
  496. if (ret != 0) {
  497. return ret;
  498. }
  499. clear_computation_completed(ctx->base);
  500. if ((data == NULL) || (data_size == 0U)) {
  501. /* No associated data */
  502. /* ret already = 0 */
  503. goto out;
  504. }
  505. /* There is an header/associated data phase */
  506. mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
  507. _SAES_CR_GCMPH_HEADER << _SAES_CR_GCMPH_SHIFT);
  508. /* Enable the SAES peripheral */
  509. mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
  510. while (i < round_down(data_size, AES_BLOCK_SIZE)) {
  511. unsigned int w; /* Word index */
  512. w = i / sizeof(uint32_t);
  513. /* No need to htobe() as we configure the HW to swap bytes */
  514. mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 0U]);
  515. mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 1U]);
  516. mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 2U]);
  517. mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 3U]);
  518. ret = wait_computation_completed(ctx->base);
  519. if (ret != 0) {
  520. goto out;
  521. }
  522. clear_computation_completed(ctx->base);
  523. /* Process next block */
  524. i += AES_BLOCK_SIZE;
  525. ctx->assoc_len += AES_BLOCK_SIZE_BIT;
  526. }
  527. /* Manage last block if not a block size multiple */
  528. if ((last_block) && (i < data_size)) {
  529. /* We don't manage unaligned last block yet */
  530. ret = -ENODEV;
  531. goto out;
  532. }
  533. out:
  534. if (ret != 0) {
  535. saes_end(ctx, ret);
  536. }
  537. return ret;
  538. }
  539. /**
  540. * @brief Update (or start) a AES authenticate and de/encrypt with payload data (CCM or GCM).
  541. * @param ctx: SAES process context
  542. * @param last_block: true if last payload data block
  543. * @param data_in: pointer to payload
  544. * @param data_out: pointer where to save de/encrypted payload
  545. * @param data_size: payload size
  546. *
  547. * @retval 0 if OK; negative value else.
  548. */
  549. int stm32_saes_update_load(struct stm32_saes_context *ctx, bool last_block,
  550. uint8_t *data_in, uint8_t *data_out, size_t data_size)
  551. {
  552. int ret = 0;
  553. uint32_t *data_in_u32;
  554. uint32_t *data_out_u32;
  555. unsigned int i = 0U;
  556. uint32_t prev_cr;
  557. /* We want buffers to be u32 aligned */
  558. assert((uintptr_t)data_in % __alignof__(uint32_t) == 0);
  559. assert((uintptr_t)data_out % __alignof__(uint32_t) == 0);
  560. data_in_u32 = (uint32_t *)data_in;
  561. data_out_u32 = (uint32_t *)data_out;
  562. prev_cr = mmio_read_32(ctx->base + _SAES_CR);
  563. if ((data_in == NULL) || (data_size == 0U)) {
  564. /* there is no data */
  565. goto out;
  566. }
  567. /* There is a load phase */
  568. mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
  569. _SAES_CR_GCMPH_PAYLOAD << _SAES_CR_GCMPH_SHIFT);
  570. if ((prev_cr & _SAES_CR_GCMPH_MASK) ==
  571. (_SAES_CR_GCMPH_INIT << _SAES_CR_GCMPH_SHIFT)) {
  572. /* Still in initialization phase, no header
  573. * We need to enable the SAES peripheral
  574. */
  575. mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
  576. }
  577. while (i < round_down(data_size, AES_BLOCK_SIZE)) {
  578. unsigned int w; /* Word index */
  579. w = i / sizeof(uint32_t);
  580. /* No need to htobe() as we configure the HW to swap bytes */
  581. mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 0U]);
  582. mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 1U]);
  583. mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 2U]);
  584. mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 3U]);
  585. ret = wait_computation_completed(ctx->base);
  586. if (ret != 0) {
  587. goto out;
  588. }
  589. /* No need to htobe() as we configure the HW to swap bytes */
  590. data_out_u32[w + 0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  591. data_out_u32[w + 1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  592. data_out_u32[w + 2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  593. data_out_u32[w + 3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  594. clear_computation_completed(ctx->base);
  595. /* Process next block */
  596. i += AES_BLOCK_SIZE;
  597. ctx->load_len += AES_BLOCK_SIZE_BIT;
  598. }
  599. /* Manage last block if not a block size multiple */
  600. if ((last_block) && (i < data_size)) {
  601. uint32_t block_in[AES_BLOCK_SIZE / sizeof(uint32_t)] = {0};
  602. uint32_t block_out[AES_BLOCK_SIZE / sizeof(uint32_t)] = {0};
  603. memcpy(block_in, data_in + i, data_size - i);
  604. /* No need to htobe() as we configure the HW to swap bytes */
  605. mmio_write_32(ctx->base + _SAES_DINR, block_in[0U]);
  606. mmio_write_32(ctx->base + _SAES_DINR, block_in[1U]);
  607. mmio_write_32(ctx->base + _SAES_DINR, block_in[2U]);
  608. mmio_write_32(ctx->base + _SAES_DINR, block_in[3U]);
  609. ret = wait_computation_completed(ctx->base);
  610. if (ret != 0) {
  611. VERBOSE("%s %d\n", __func__, __LINE__);
  612. goto out;
  613. }
  614. /* No need to htobe() as we configure the HW to swap bytes */
  615. block_out[0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  616. block_out[1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  617. block_out[2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  618. block_out[3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  619. clear_computation_completed(ctx->base);
  620. memcpy(data_out + i, block_out, data_size - i);
  621. ctx->load_len += (data_size - i) * UINT8_BIT;
  622. }
  623. out:
  624. if (ret != 0) {
  625. saes_end(ctx, ret);
  626. }
  627. return ret;
  628. }
  629. /**
  630. * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
  631. * @param ctx: SAES process context
  632. * @param tag: pointer where to save the tag
  633. * @param data_size: tag size
  634. *
  635. * @retval 0 if OK; negative value else.
  636. */
  637. int stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag,
  638. size_t tag_size)
  639. {
  640. int ret;
  641. uint32_t tag_u32[4];
  642. uint32_t prev_cr;
  643. prev_cr = mmio_read_32(ctx->base + _SAES_CR);
  644. mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
  645. _SAES_CR_GCMPH_FINAL << _SAES_CR_GCMPH_SHIFT);
  646. if ((prev_cr & _SAES_CR_GCMPH_MASK) == (_SAES_CR_GCMPH_INIT << _SAES_CR_GCMPH_SHIFT)) {
  647. /* Still in initialization phase, no header
  648. * We need to enable the SAES peripheral
  649. */
  650. mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
  651. }
  652. /* No need to htobe() as we configure the HW to swap bytes */
  653. mmio_write_32(ctx->base + _SAES_DINR, 0);
  654. mmio_write_32(ctx->base + _SAES_DINR, ctx->assoc_len);
  655. mmio_write_32(ctx->base + _SAES_DINR, 0);
  656. mmio_write_32(ctx->base + _SAES_DINR, ctx->load_len);
  657. ret = wait_computation_completed(ctx->base);
  658. if (ret != 0) {
  659. goto out;
  660. }
  661. /* No need to htobe() as we configure the HW to swap bytes */
  662. tag_u32[0] = mmio_read_32(ctx->base + _SAES_DOUTR);
  663. tag_u32[1] = mmio_read_32(ctx->base + _SAES_DOUTR);
  664. tag_u32[2] = mmio_read_32(ctx->base + _SAES_DOUTR);
  665. tag_u32[3] = mmio_read_32(ctx->base + _SAES_DOUTR);
  666. clear_computation_completed(ctx->base);
  667. memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
  668. out:
  669. saes_end(ctx, ret);
  670. return ret;
  671. }
  672. /**
  673. * @brief Update (or start) a AES de/encrypt process (ECB, CBC or CTR).
  674. * @param ctx: SAES process context
  675. * @param last_block: true if last payload data block
  676. * @param data_in: pointer to payload
  677. * @param data_out: pointer where to save de/encrypted payload
  678. * @param data_size: payload size
  679. *
  680. * @retval 0 if OK; negative value else.
  681. */
  682. int stm32_saes_update(struct stm32_saes_context *ctx, bool last_block,
  683. uint8_t *data_in, uint8_t *data_out, size_t data_size)
  684. {
  685. int ret;
  686. uint32_t *data_in_u32;
  687. uint32_t *data_out_u32;
  688. unsigned int i = 0U;
  689. /* We want buffers to be u32 aligned */
  690. assert((uintptr_t)data_in % __alignof__(uint32_t) == 0);
  691. assert((uintptr_t)data_out % __alignof__(uint32_t) == 0);
  692. data_in_u32 = (uint32_t *)data_in;
  693. data_out_u32 = (uint32_t *)data_out;
  694. if ((!last_block) &&
  695. (round_down(data_size, AES_BLOCK_SIZE) != data_size)) {
  696. ERROR("%s: non last block must be multiple of 128 bits\n",
  697. __func__);
  698. ret = -EINVAL;
  699. goto out;
  700. }
  701. /* In CBC encryption we need to manage specifically last 2 128bits
  702. * blocks if total size in not a block size aligned
  703. * work TODO. Currently return ENODEV.
  704. * Morevoer as we need to know last 2 block, if unaligned and
  705. * call with less than two block, return -EINVAL.
  706. */
  707. if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) && is_encrypt(ctx->cr) &&
  708. (round_down(data_size, AES_BLOCK_SIZE) != data_size)) {
  709. if (data_size < AES_BLOCK_SIZE * 2U) {
  710. ERROR("if CBC, last part size should be at least 2 * AES_BLOCK_SIZE\n");
  711. ret = -EINVAL;
  712. goto out;
  713. }
  714. /* Moreover the CBC specific padding for encrypt is not yet implemented */
  715. ret = -ENODEV;
  716. goto out;
  717. }
  718. ret = restore_context(ctx);
  719. if (ret != 0) {
  720. goto out;
  721. }
  722. while (i < round_down(data_size, AES_BLOCK_SIZE)) {
  723. unsigned int w; /* Word index */
  724. w = i / sizeof(uint32_t);
  725. /* No need to htobe() as we configure the HW to swap bytes */
  726. mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 0U]);
  727. mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 1U]);
  728. mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 2U]);
  729. mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 3U]);
  730. ret = wait_computation_completed(ctx->base);
  731. if (ret != 0) {
  732. goto out;
  733. }
  734. /* No need to htobe() as we configure the HW to swap bytes */
  735. data_out_u32[w + 0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  736. data_out_u32[w + 1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  737. data_out_u32[w + 2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  738. data_out_u32[w + 3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
  739. clear_computation_completed(ctx->base);
  740. /* Process next block */
  741. i += AES_BLOCK_SIZE;
  742. }
  743. /* Manage last block if not a block size multiple */
  744. if ((last_block) && (i < data_size)) {
  745. /* In and out buffer have same size so should be AES_BLOCK_SIZE multiple */
  746. ret = -ENODEV;
  747. goto out;
  748. }
  749. if (!last_block) {
  750. ret = save_context(ctx);
  751. }
  752. out:
  753. /* If last block or error, end of SAES process */
  754. if (last_block || (ret != 0)) {
  755. saes_end(ctx, ret);
  756. }
  757. return ret;
  758. }