e_aria.c 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756
  1. /*
  2. * Copyright 2017-2018 The OpenSSL Project Authors. All Rights Reserved.
  3. * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
  4. *
  5. * Licensed under the OpenSSL license (the "License"). You may not use
  6. * this file except in compliance with the License. You can obtain a copy
  7. * in the file LICENSE in the source distribution or at
  8. * https://www.openssl.org/source/license.html
  9. */
  10. #include "internal/cryptlib.h"
  11. #ifndef OPENSSL_NO_ARIA
  12. # include <openssl/evp.h>
  13. # include <openssl/modes.h>
  14. # include <openssl/rand.h>
  15. # include <openssl/rand_drbg.h>
  16. # include "internal/aria.h"
  17. # include "internal/evp_int.h"
  18. # include "modes_lcl.h"
  19. # include "evp_locl.h"
  20. /* ARIA subkey Structure */
  21. typedef struct {
  22. ARIA_KEY ks;
  23. } EVP_ARIA_KEY;
  24. /* ARIA GCM context */
  25. typedef struct {
  26. union {
  27. double align;
  28. ARIA_KEY ks;
  29. } ks; /* ARIA subkey to use */
  30. int key_set; /* Set if key initialised */
  31. int iv_set; /* Set if an iv is set */
  32. GCM128_CONTEXT gcm;
  33. unsigned char *iv; /* Temporary IV store */
  34. int ivlen; /* IV length */
  35. int taglen;
  36. int iv_gen; /* It is OK to generate IVs */
  37. int tls_aad_len; /* TLS AAD length */
  38. } EVP_ARIA_GCM_CTX;
  39. /* ARIA CCM context */
  40. typedef struct {
  41. union {
  42. double align;
  43. ARIA_KEY ks;
  44. } ks; /* ARIA key schedule to use */
  45. int key_set; /* Set if key initialised */
  46. int iv_set; /* Set if an iv is set */
  47. int tag_set; /* Set if tag is valid */
  48. int len_set; /* Set if message length set */
  49. int L, M; /* L and M parameters from RFC3610 */
  50. int tls_aad_len; /* TLS AAD length */
  51. CCM128_CONTEXT ccm;
  52. ccm128_f str;
  53. } EVP_ARIA_CCM_CTX;
  54. /* The subkey for ARIA is generated. */
  55. static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  56. const unsigned char *iv, int enc)
  57. {
  58. int ret;
  59. int mode = EVP_CIPHER_CTX_mode(ctx);
  60. if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
  61. ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  62. EVP_CIPHER_CTX_get_cipher_data(ctx));
  63. else
  64. ret = aria_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  65. EVP_CIPHER_CTX_get_cipher_data(ctx));
  66. if (ret < 0) {
  67. EVPerr(EVP_F_ARIA_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
  68. return 0;
  69. }
  70. return 1;
  71. }
  72. static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
  73. size_t len, const ARIA_KEY *key,
  74. unsigned char *ivec, const int enc)
  75. {
  76. if (enc)
  77. CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
  78. (block128_f) aria_encrypt);
  79. else
  80. CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
  81. (block128_f) aria_encrypt);
  82. }
  83. static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
  84. size_t length, const ARIA_KEY *key,
  85. unsigned char *ivec, int *num, const int enc)
  86. {
  87. CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
  88. (block128_f) aria_encrypt);
  89. }
  90. static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
  91. size_t length, const ARIA_KEY *key,
  92. unsigned char *ivec, int *num, const int enc)
  93. {
  94. CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
  95. (block128_f) aria_encrypt);
  96. }
  97. static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
  98. size_t length, const ARIA_KEY *key,
  99. unsigned char *ivec, int *num, const int enc)
  100. {
  101. CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
  102. (block128_f) aria_encrypt);
  103. }
  104. static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
  105. const ARIA_KEY *key, const int enc)
  106. {
  107. aria_encrypt(in, out, key);
  108. }
  109. static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
  110. size_t length, const ARIA_KEY *key,
  111. unsigned char *ivec, int *num)
  112. {
  113. CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
  114. (block128_f) aria_encrypt);
  115. }
  116. IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
  117. NID_aria_128, 16, 16, 16, 128,
  118. 0, aria_init_key, NULL,
  119. EVP_CIPHER_set_asn1_iv,
  120. EVP_CIPHER_get_asn1_iv,
  121. NULL)
  122. IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
  123. NID_aria_192, 16, 24, 16, 128,
  124. 0, aria_init_key, NULL,
  125. EVP_CIPHER_set_asn1_iv,
  126. EVP_CIPHER_get_asn1_iv,
  127. NULL)
  128. IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
  129. NID_aria_256, 16, 32, 16, 128,
  130. 0, aria_init_key, NULL,
  131. EVP_CIPHER_set_asn1_iv,
  132. EVP_CIPHER_get_asn1_iv,
  133. NULL)
  134. # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
  135. IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
  136. IMPLEMENT_ARIA_CFBR(128,1)
  137. IMPLEMENT_ARIA_CFBR(192,1)
  138. IMPLEMENT_ARIA_CFBR(256,1)
  139. IMPLEMENT_ARIA_CFBR(128,8)
  140. IMPLEMENT_ARIA_CFBR(192,8)
  141. IMPLEMENT_ARIA_CFBR(256,8)
  142. # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  143. static const EVP_CIPHER aria_##keylen##_##mode = { \
  144. nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
  145. flags|EVP_CIPH_##MODE##_MODE, \
  146. aria_init_key, \
  147. aria_##mode##_cipher, \
  148. NULL, \
  149. sizeof(EVP_ARIA_KEY), \
  150. NULL,NULL,NULL,NULL }; \
  151. const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
  152. { return &aria_##keylen##_##mode; }
  153. static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  154. const unsigned char *in, size_t len)
  155. {
  156. unsigned int num = EVP_CIPHER_CTX_num(ctx);
  157. EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY,ctx);
  158. CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
  159. EVP_CIPHER_CTX_iv_noconst(ctx),
  160. EVP_CIPHER_CTX_buf_noconst(ctx), &num,
  161. (block128_f) aria_encrypt);
  162. EVP_CIPHER_CTX_set_num(ctx, num);
  163. return 1;
  164. }
  165. BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
  166. BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
  167. BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
  168. /* Authenticated cipher modes (GCM/CCM) */
  169. /* increment counter (64-bit int) by 1 */
  170. static void ctr64_inc(unsigned char *counter)
  171. {
  172. int n = 8;
  173. unsigned char c;
  174. do {
  175. --n;
  176. c = counter[n];
  177. ++c;
  178. counter[n] = c;
  179. if (c)
  180. return;
  181. } while (n);
  182. }
  183. static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  184. const unsigned char *iv, int enc)
  185. {
  186. int ret;
  187. EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
  188. if (!iv && !key)
  189. return 1;
  190. if (key) {
  191. ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  192. &gctx->ks.ks);
  193. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  194. (block128_f) aria_encrypt);
  195. if (ret < 0) {
  196. EVPerr(EVP_F_ARIA_GCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
  197. return 0;
  198. }
  199. /*
  200. * If we have an iv can set it directly, otherwise use saved IV.
  201. */
  202. if (iv == NULL && gctx->iv_set)
  203. iv = gctx->iv;
  204. if (iv) {
  205. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  206. gctx->iv_set = 1;
  207. }
  208. gctx->key_set = 1;
  209. } else {
  210. /* If key set use IV, otherwise copy */
  211. if (gctx->key_set)
  212. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  213. else
  214. memcpy(gctx->iv, iv, gctx->ivlen);
  215. gctx->iv_set = 1;
  216. gctx->iv_gen = 0;
  217. }
  218. return 1;
  219. }
  220. static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  221. {
  222. EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,c);
  223. switch (type) {
  224. case EVP_CTRL_INIT:
  225. gctx->key_set = 0;
  226. gctx->iv_set = 0;
  227. gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
  228. gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
  229. gctx->taglen = -1;
  230. gctx->iv_gen = 0;
  231. gctx->tls_aad_len = -1;
  232. return 1;
  233. case EVP_CTRL_AEAD_SET_IVLEN:
  234. if (arg <= 0)
  235. return 0;
  236. /* Allocate memory for IV if needed */
  237. if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
  238. if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
  239. OPENSSL_free(gctx->iv);
  240. if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
  241. EVPerr(EVP_F_ARIA_GCM_CTRL, ERR_R_MALLOC_FAILURE);
  242. return 0;
  243. }
  244. }
  245. gctx->ivlen = arg;
  246. return 1;
  247. case EVP_CTRL_AEAD_SET_TAG:
  248. if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
  249. return 0;
  250. memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
  251. gctx->taglen = arg;
  252. return 1;
  253. case EVP_CTRL_AEAD_GET_TAG:
  254. if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
  255. || gctx->taglen < 0)
  256. return 0;
  257. memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
  258. return 1;
  259. case EVP_CTRL_GCM_SET_IV_FIXED:
  260. /* Special case: -1 length restores whole IV */
  261. if (arg == -1) {
  262. memcpy(gctx->iv, ptr, gctx->ivlen);
  263. gctx->iv_gen = 1;
  264. return 1;
  265. }
  266. /*
  267. * Fixed field must be at least 4 bytes and invocation field at least
  268. * 8.
  269. */
  270. if ((arg < 4) || (gctx->ivlen - arg) < 8)
  271. return 0;
  272. if (arg)
  273. memcpy(gctx->iv, ptr, arg);
  274. if (EVP_CIPHER_CTX_encrypting(c)
  275. && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
  276. return 0;
  277. gctx->iv_gen = 1;
  278. return 1;
  279. case EVP_CTRL_GCM_IV_GEN:
  280. if (gctx->iv_gen == 0 || gctx->key_set == 0)
  281. return 0;
  282. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  283. if (arg <= 0 || arg > gctx->ivlen)
  284. arg = gctx->ivlen;
  285. memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
  286. /*
  287. * Invocation field will be at least 8 bytes in size and so no need
  288. * to check wrap around or increment more than last 8 bytes.
  289. */
  290. ctr64_inc(gctx->iv + gctx->ivlen - 8);
  291. gctx->iv_set = 1;
  292. return 1;
  293. case EVP_CTRL_GCM_SET_IV_INV:
  294. if (gctx->iv_gen == 0 || gctx->key_set == 0
  295. || EVP_CIPHER_CTX_encrypting(c))
  296. return 0;
  297. memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
  298. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  299. gctx->iv_set = 1;
  300. return 1;
  301. case EVP_CTRL_AEAD_TLS1_AAD:
  302. /* Save the AAD for later use */
  303. if (arg != EVP_AEAD_TLS1_AAD_LEN)
  304. return 0;
  305. memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
  306. gctx->tls_aad_len = arg;
  307. {
  308. unsigned int len =
  309. EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
  310. | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
  311. /* Correct length for explicit IV */
  312. if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
  313. return 0;
  314. len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
  315. /* If decrypting correct for tag too */
  316. if (!EVP_CIPHER_CTX_encrypting(c)) {
  317. if (len < EVP_GCM_TLS_TAG_LEN)
  318. return 0;
  319. len -= EVP_GCM_TLS_TAG_LEN;
  320. }
  321. EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
  322. EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
  323. }
  324. /* Extra padding: tag appended to record */
  325. return EVP_GCM_TLS_TAG_LEN;
  326. case EVP_CTRL_COPY:
  327. {
  328. EVP_CIPHER_CTX *out = ptr;
  329. EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX,out);
  330. if (gctx->gcm.key) {
  331. if (gctx->gcm.key != &gctx->ks)
  332. return 0;
  333. gctx_out->gcm.key = &gctx_out->ks;
  334. }
  335. if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
  336. gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
  337. else {
  338. if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
  339. EVPerr(EVP_F_ARIA_GCM_CTRL, ERR_R_MALLOC_FAILURE);
  340. return 0;
  341. }
  342. memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
  343. }
  344. return 1;
  345. }
  346. default:
  347. return -1;
  348. }
  349. }
  350. static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  351. const unsigned char *in, size_t len)
  352. {
  353. EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
  354. int rv = -1;
  355. /* Encrypt/decrypt must be performed in place */
  356. if (out != in
  357. || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
  358. return -1;
  359. /*
  360. * Set IV from start of buffer or generate IV and write to start of
  361. * buffer.
  362. */
  363. if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
  364. EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
  365. EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
  366. goto err;
  367. /* Use saved AAD */
  368. if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
  369. gctx->tls_aad_len))
  370. goto err;
  371. /* Fix buffer and length to point to payload */
  372. in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
  373. out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
  374. len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
  375. if (EVP_CIPHER_CTX_encrypting(ctx)) {
  376. /* Encrypt payload */
  377. if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
  378. goto err;
  379. out += len;
  380. /* Finally write tag */
  381. CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
  382. rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
  383. } else {
  384. /* Decrypt */
  385. if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
  386. goto err;
  387. /* Retrieve tag */
  388. CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
  389. EVP_GCM_TLS_TAG_LEN);
  390. /* If tag mismatch wipe buffer */
  391. if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
  392. EVP_GCM_TLS_TAG_LEN)) {
  393. OPENSSL_cleanse(out, len);
  394. goto err;
  395. }
  396. rv = len;
  397. }
  398. err:
  399. gctx->iv_set = 0;
  400. gctx->tls_aad_len = -1;
  401. return rv;
  402. }
  403. static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  404. const unsigned char *in, size_t len)
  405. {
  406. EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
  407. /* If not set up, return error */
  408. if (!gctx->key_set)
  409. return -1;
  410. if (gctx->tls_aad_len >= 0)
  411. return aria_gcm_tls_cipher(ctx, out, in, len);
  412. if (!gctx->iv_set)
  413. return -1;
  414. if (in) {
  415. if (out == NULL) {
  416. if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
  417. return -1;
  418. } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
  419. if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
  420. return -1;
  421. } else {
  422. if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
  423. return -1;
  424. }
  425. return len;
  426. }
  427. if (!EVP_CIPHER_CTX_encrypting(ctx)) {
  428. if (gctx->taglen < 0)
  429. return -1;
  430. if (CRYPTO_gcm128_finish(&gctx->gcm,
  431. EVP_CIPHER_CTX_buf_noconst(ctx),
  432. gctx->taglen) != 0)
  433. return -1;
  434. gctx->iv_set = 0;
  435. return 0;
  436. }
  437. CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
  438. gctx->taglen = 16;
  439. /* Don't reuse the IV */
  440. gctx->iv_set = 0;
  441. return 0;
  442. }
  443. static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  444. const unsigned char *iv, int enc)
  445. {
  446. int ret;
  447. EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
  448. if (!iv && !key)
  449. return 1;
  450. if (key) {
  451. ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  452. &cctx->ks.ks);
  453. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  454. &cctx->ks, (block128_f) aria_encrypt);
  455. if (ret < 0) {
  456. EVPerr(EVP_F_ARIA_CCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
  457. return 0;
  458. }
  459. cctx->str = NULL;
  460. cctx->key_set = 1;
  461. }
  462. if (iv) {
  463. memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
  464. cctx->iv_set = 1;
  465. }
  466. return 1;
  467. }
  468. static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  469. {
  470. EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,c);
  471. switch (type) {
  472. case EVP_CTRL_INIT:
  473. cctx->key_set = 0;
  474. cctx->iv_set = 0;
  475. cctx->L = 8;
  476. cctx->M = 12;
  477. cctx->tag_set = 0;
  478. cctx->len_set = 0;
  479. cctx->tls_aad_len = -1;
  480. return 1;
  481. case EVP_CTRL_AEAD_TLS1_AAD:
  482. /* Save the AAD for later use */
  483. if (arg != EVP_AEAD_TLS1_AAD_LEN)
  484. return 0;
  485. memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
  486. cctx->tls_aad_len = arg;
  487. {
  488. uint16_t len =
  489. EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
  490. | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
  491. /* Correct length for explicit IV */
  492. if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
  493. return 0;
  494. len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
  495. /* If decrypting correct for tag too */
  496. if (!EVP_CIPHER_CTX_encrypting(c)) {
  497. if (len < cctx->M)
  498. return 0;
  499. len -= cctx->M;
  500. }
  501. EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
  502. EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
  503. }
  504. /* Extra padding: tag appended to record */
  505. return cctx->M;
  506. case EVP_CTRL_CCM_SET_IV_FIXED:
  507. /* Sanity check length */
  508. if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
  509. return 0;
  510. /* Just copy to first part of IV */
  511. memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
  512. return 1;
  513. case EVP_CTRL_AEAD_SET_IVLEN:
  514. arg = 15 - arg;
  515. /* fall thru */
  516. case EVP_CTRL_CCM_SET_L:
  517. if (arg < 2 || arg > 8)
  518. return 0;
  519. cctx->L = arg;
  520. return 1;
  521. case EVP_CTRL_AEAD_SET_TAG:
  522. if ((arg & 1) || arg < 4 || arg > 16)
  523. return 0;
  524. if (EVP_CIPHER_CTX_encrypting(c) && ptr)
  525. return 0;
  526. if (ptr) {
  527. cctx->tag_set = 1;
  528. memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
  529. }
  530. cctx->M = arg;
  531. return 1;
  532. case EVP_CTRL_AEAD_GET_TAG:
  533. if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
  534. return 0;
  535. if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
  536. return 0;
  537. cctx->tag_set = 0;
  538. cctx->iv_set = 0;
  539. cctx->len_set = 0;
  540. return 1;
  541. case EVP_CTRL_COPY:
  542. {
  543. EVP_CIPHER_CTX *out = ptr;
  544. EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX,out);
  545. if (cctx->ccm.key) {
  546. if (cctx->ccm.key != &cctx->ks)
  547. return 0;
  548. cctx_out->ccm.key = &cctx_out->ks;
  549. }
  550. return 1;
  551. }
  552. default:
  553. return -1;
  554. }
  555. }
  556. static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  557. const unsigned char *in, size_t len)
  558. {
  559. EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
  560. CCM128_CONTEXT *ccm = &cctx->ccm;
  561. /* Encrypt/decrypt must be performed in place */
  562. if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
  563. return -1;
  564. /* If encrypting set explicit IV from sequence number (start of AAD) */
  565. if (EVP_CIPHER_CTX_encrypting(ctx))
  566. memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
  567. EVP_CCM_TLS_EXPLICIT_IV_LEN);
  568. /* Get rest of IV from explicit IV */
  569. memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
  570. EVP_CCM_TLS_EXPLICIT_IV_LEN);
  571. /* Correct length value */
  572. len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
  573. if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
  574. len))
  575. return -1;
  576. /* Use saved AAD */
  577. CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
  578. /* Fix buffer to point to payload */
  579. in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
  580. out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
  581. if (EVP_CIPHER_CTX_encrypting(ctx)) {
  582. if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
  583. : CRYPTO_ccm128_encrypt(ccm, in, out, len))
  584. return -1;
  585. if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
  586. return -1;
  587. return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
  588. } else {
  589. if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
  590. : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
  591. unsigned char tag[16];
  592. if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
  593. if (!CRYPTO_memcmp(tag, in + len, cctx->M))
  594. return len;
  595. }
  596. }
  597. OPENSSL_cleanse(out, len);
  598. return -1;
  599. }
  600. }
  601. static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  602. const unsigned char *in, size_t len)
  603. {
  604. EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
  605. CCM128_CONTEXT *ccm = &cctx->ccm;
  606. /* If not set up, return error */
  607. if (!cctx->key_set)
  608. return -1;
  609. if (cctx->tls_aad_len >= 0)
  610. return aria_ccm_tls_cipher(ctx, out, in, len);
  611. /* EVP_*Final() doesn't return any data */
  612. if (in == NULL && out != NULL)
  613. return 0;
  614. if (!cctx->iv_set)
  615. return -1;
  616. if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
  617. return -1;
  618. if (!out) {
  619. if (!in) {
  620. if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
  621. 15 - cctx->L, len))
  622. return -1;
  623. cctx->len_set = 1;
  624. return len;
  625. }
  626. /* If have AAD need message length */
  627. if (!cctx->len_set && len)
  628. return -1;
  629. CRYPTO_ccm128_aad(ccm, in, len);
  630. return len;
  631. }
  632. /* If not set length yet do it */
  633. if (!cctx->len_set) {
  634. if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
  635. 15 - cctx->L, len))
  636. return -1;
  637. cctx->len_set = 1;
  638. }
  639. if (EVP_CIPHER_CTX_encrypting(ctx)) {
  640. if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
  641. : CRYPTO_ccm128_encrypt(ccm, in, out, len))
  642. return -1;
  643. cctx->tag_set = 1;
  644. return len;
  645. } else {
  646. int rv = -1;
  647. if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
  648. cctx->str) :
  649. !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
  650. unsigned char tag[16];
  651. if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
  652. if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
  653. cctx->M))
  654. rv = len;
  655. }
  656. }
  657. if (rv == -1)
  658. OPENSSL_cleanse(out, len);
  659. cctx->iv_set = 0;
  660. cctx->tag_set = 0;
  661. cctx->len_set = 0;
  662. return rv;
  663. }
  664. }
  665. #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
  666. | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
  667. | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
  668. | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER)
  669. #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  670. static const EVP_CIPHER aria_##keylen##_##mode = { \
  671. nid##_##keylen##_##nmode, \
  672. blocksize, keylen/8, ivlen, \
  673. ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
  674. aria_##mode##_init_key, \
  675. aria_##mode##_cipher, \
  676. NULL, \
  677. sizeof(EVP_ARIA_##MODE##_CTX), \
  678. NULL,NULL,aria_##mode##_ctrl,NULL }; \
  679. const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
  680. { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
  681. BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, gcm, gcm, GCM, 0)
  682. BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, gcm, gcm, GCM, 0)
  683. BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, gcm, gcm, GCM, 0)
  684. BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, ccm, ccm, CCM, 0)
  685. BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, ccm, ccm, CCM, 0)
  686. BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, ccm, ccm, CCM, 0)
  687. #endif