cipher_aes_gcm_siv_hw.c 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373
  1. /*
  2. * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved.
  3. *
  4. * Licensed under the Apache License 2.0 (the "License"). You may not use
  5. * this file except in compliance with the License. You can obtain a copy
  6. * in the file LICENSE in the source distribution or at
  7. * https://www.openssl.org/source/license.html
  8. */
  9. /*
  10. * AES low level APIs are deprecated for public use, but still ok for internal
  11. * use where we're using them to implement the higher level EVP interface, as is
  12. * the case here.
  13. */
  14. #include "internal/deprecated.h"
  15. #include <openssl/evp.h>
  16. #include <internal/endian.h>
  17. #include <prov/implementations.h>
  18. #include "cipher_aes_gcm_siv.h"
  19. static int aes_gcm_siv_ctr32(PROV_AES_GCM_SIV_CTX *ctx, const unsigned char *init_counter,
  20. unsigned char *out, const unsigned char *in, size_t len);
  21. static int aes_gcm_siv_initkey(void *vctx)
  22. {
  23. PROV_AES_GCM_SIV_CTX *ctx = (PROV_AES_GCM_SIV_CTX *)vctx;
  24. uint8_t output[BLOCK_SIZE];
  25. uint32_t counter = 0x0;
  26. size_t i;
  27. union {
  28. uint32_t counter;
  29. uint8_t block[BLOCK_SIZE];
  30. } data;
  31. int out_len;
  32. EVP_CIPHER *ecb = NULL;
  33. DECLARE_IS_ENDIAN;
  34. switch (ctx->key_len) {
  35. case 16:
  36. ecb = EVP_CIPHER_fetch(ctx->libctx, "AES-128-ECB", NULL);
  37. break;
  38. case 24:
  39. ecb = EVP_CIPHER_fetch(ctx->libctx, "AES-192-ECB", NULL);
  40. break;
  41. case 32:
  42. ecb = EVP_CIPHER_fetch(ctx->libctx, "AES-256-ECB", NULL);
  43. break;
  44. default:
  45. goto err;
  46. }
  47. if (ctx->ecb_ctx == NULL && (ctx->ecb_ctx = EVP_CIPHER_CTX_new()) == NULL)
  48. goto err;
  49. if (!EVP_EncryptInit_ex2(ctx->ecb_ctx, ecb, ctx->key_gen_key, NULL, NULL))
  50. goto err;
  51. memset(&data, 0, sizeof(data));
  52. memcpy(&data.block[sizeof(data.counter)], ctx->nonce, NONCE_SIZE);
  53. /* msg_auth_key is always 16 bytes in size, regardless of AES128/AES256 */
  54. /* counter is stored little-endian */
  55. for (i = 0; i < BLOCK_SIZE; i += 8) {
  56. if (IS_LITTLE_ENDIAN) {
  57. data.counter = counter;
  58. } else {
  59. data.counter = GSWAP4(counter);
  60. }
  61. /* Block size is 16 (128 bits), but only 8 bytes are used */
  62. out_len = BLOCK_SIZE;
  63. if (!EVP_EncryptUpdate(ctx->ecb_ctx, output, &out_len, data.block, BLOCK_SIZE))
  64. goto err;
  65. memcpy(&ctx->msg_auth_key[i], output, 8);
  66. counter++;
  67. }
  68. /* msg_enc_key length is directly tied to key length AES128/AES256 */
  69. for (i = 0; i < ctx->key_len; i += 8) {
  70. if (IS_LITTLE_ENDIAN) {
  71. data.counter = counter;
  72. } else {
  73. data.counter = GSWAP4(counter);
  74. }
  75. /* Block size is 16 bytes (128 bits), but only 8 bytes are used */
  76. out_len = BLOCK_SIZE;
  77. if (!EVP_EncryptUpdate(ctx->ecb_ctx, output, &out_len, data.block, BLOCK_SIZE))
  78. goto err;
  79. memcpy(&ctx->msg_enc_key[i], output, 8);
  80. counter++;
  81. }
  82. if (!EVP_EncryptInit_ex2(ctx->ecb_ctx, ecb, ctx->msg_enc_key, NULL, NULL))
  83. goto err;
  84. /* Freshen up the state */
  85. ctx->used_enc = 0;
  86. ctx->used_dec = 0;
  87. EVP_CIPHER_free(ecb);
  88. return 1;
  89. err:
  90. EVP_CIPHER_CTX_free(ctx->ecb_ctx);
  91. EVP_CIPHER_free(ecb);
  92. ctx->ecb_ctx = NULL;
  93. return 0;
  94. }
  95. static int aes_gcm_siv_aad(PROV_AES_GCM_SIV_CTX *ctx,
  96. const unsigned char *aad, size_t len)
  97. {
  98. size_t to_alloc;
  99. uint8_t *ptr;
  100. uint64_t len64;
  101. /* length of 0 resets the AAD */
  102. if (len == 0) {
  103. OPENSSL_free(ctx->aad);
  104. ctx->aad = NULL;
  105. ctx->aad_len = 0;
  106. return 1;
  107. }
  108. to_alloc = UP16(ctx->aad_len + len);
  109. /* need to check the size of the AAD per RFC8452 */
  110. len64 = to_alloc;
  111. if (len64 > ((uint64_t)1 << 36))
  112. return 0;
  113. ptr = OPENSSL_realloc(ctx->aad, to_alloc);
  114. if (ptr == NULL)
  115. return 0;
  116. ctx->aad = ptr;
  117. memcpy(&ctx->aad[ctx->aad_len], aad, len);
  118. ctx->aad_len += len;
  119. if (to_alloc > ctx->aad_len)
  120. memset(&ctx->aad[ctx->aad_len], 0, to_alloc - ctx->aad_len);
  121. return 1;
  122. }
  123. static int aes_gcm_siv_finish(PROV_AES_GCM_SIV_CTX *ctx)
  124. {
  125. int ret = 0;
  126. if (ctx->enc)
  127. return ctx->generated_tag;
  128. ret = !CRYPTO_memcmp(ctx->tag, ctx->user_tag, sizeof(ctx->tag));
  129. ret &= ctx->have_user_tag;
  130. return ret;
  131. }
  132. static int aes_gcm_siv_encrypt(PROV_AES_GCM_SIV_CTX *ctx, const unsigned char *in,
  133. unsigned char *out, size_t len)
  134. {
  135. uint64_t len_blk[2];
  136. uint8_t S_s[TAG_SIZE];
  137. uint8_t counter_block[TAG_SIZE];
  138. uint8_t padding[BLOCK_SIZE];
  139. size_t i;
  140. int64_t len64 = len;
  141. int out_len;
  142. int error = 0;
  143. DECLARE_IS_ENDIAN;
  144. ctx->generated_tag = 0;
  145. if (!ctx->speed && ctx->used_enc)
  146. return 0;
  147. /* need to check the size of the input! */
  148. if (len64 > ((int64_t)1 << 36) || len == 0)
  149. return 0;
  150. if (IS_LITTLE_ENDIAN) {
  151. len_blk[0] = (uint64_t)ctx->aad_len * 8;
  152. len_blk[1] = (uint64_t)len * 8;
  153. } else {
  154. len_blk[0] = GSWAP8((uint64_t)ctx->aad_len * 8);
  155. len_blk[1] = GSWAP8((uint64_t)len * 8);
  156. }
  157. memset(S_s, 0, TAG_SIZE);
  158. ossl_polyval_ghash_init(ctx->Htable, (const uint64_t*)ctx->msg_auth_key);
  159. if (ctx->aad != NULL) {
  160. /* AAD is allocated with padding, but need to adjust length */
  161. ossl_polyval_ghash_hash(ctx->Htable, S_s, ctx->aad, UP16(ctx->aad_len));
  162. }
  163. if (DOWN16(len) > 0)
  164. ossl_polyval_ghash_hash(ctx->Htable, S_s, (uint8_t *) in, DOWN16(len));
  165. if (!IS16(len)) {
  166. /* deal with padding - probably easier to memset the padding first rather than calculate */
  167. memset(padding, 0, sizeof(padding));
  168. memcpy(padding, &in[DOWN16(len)], REMAINDER16(len));
  169. ossl_polyval_ghash_hash(ctx->Htable, S_s, padding, sizeof(padding));
  170. }
  171. ossl_polyval_ghash_hash(ctx->Htable, S_s, (uint8_t *) len_blk, sizeof(len_blk));
  172. for (i = 0; i < NONCE_SIZE; i++)
  173. S_s[i] ^= ctx->nonce[i];
  174. S_s[TAG_SIZE - 1] &= 0x7f;
  175. out_len = sizeof(ctx->tag);
  176. error |= !EVP_EncryptUpdate(ctx->ecb_ctx, ctx->tag, &out_len, S_s, sizeof(S_s));
  177. memcpy(counter_block, ctx->tag, TAG_SIZE);
  178. counter_block[TAG_SIZE - 1] |= 0x80;
  179. error |= !aes_gcm_siv_ctr32(ctx, counter_block, out, in, len);
  180. ctx->generated_tag = !error;
  181. /* Regardless of error */
  182. ctx->used_enc = 1;
  183. return !error;
  184. }
  185. static int aes_gcm_siv_decrypt(PROV_AES_GCM_SIV_CTX *ctx, const unsigned char *in,
  186. unsigned char *out, size_t len)
  187. {
  188. uint8_t counter_block[TAG_SIZE];
  189. uint64_t len_blk[2];
  190. uint8_t S_s[TAG_SIZE];
  191. size_t i;
  192. uint64_t padding[2];
  193. int64_t len64 = len;
  194. int out_len;
  195. int error = 0;
  196. DECLARE_IS_ENDIAN;
  197. ctx->generated_tag = 0;
  198. if (!ctx->speed && ctx->used_dec)
  199. return 0;
  200. /* need to check the size of the input! */
  201. if (len64 > ((int64_t)1 << 36) || len == 0)
  202. return 0;
  203. memcpy(counter_block, ctx->user_tag, sizeof(counter_block));
  204. counter_block[TAG_SIZE - 1] |= 0x80;
  205. error |= !aes_gcm_siv_ctr32(ctx, counter_block, out, in, len);
  206. if (IS_LITTLE_ENDIAN) {
  207. len_blk[0] = (uint64_t)ctx->aad_len * 8;
  208. len_blk[1] = (uint64_t)len * 8;
  209. } else {
  210. len_blk[0] = GSWAP8((uint64_t)ctx->aad_len * 8);
  211. len_blk[1] = GSWAP8((uint64_t)len * 8);
  212. }
  213. memset(S_s, 0, TAG_SIZE);
  214. ossl_polyval_ghash_init(ctx->Htable, (const uint64_t*)ctx->msg_auth_key);
  215. if (ctx->aad != NULL) {
  216. /* AAD allocated with padding, but need to adjust length */
  217. ossl_polyval_ghash_hash(ctx->Htable, S_s, ctx->aad, UP16(ctx->aad_len));
  218. }
  219. if (DOWN16(len) > 0)
  220. ossl_polyval_ghash_hash(ctx->Htable, S_s, out, DOWN16(len));
  221. if (!IS16(len)) {
  222. /* deal with padding - probably easier to "memset" the padding first rather than calculate */
  223. padding[0] = padding[1] = 0;
  224. memcpy(padding, &out[DOWN16(len)], REMAINDER16(len));
  225. ossl_polyval_ghash_hash(ctx->Htable, S_s, (uint8_t *)padding, sizeof(padding));
  226. }
  227. ossl_polyval_ghash_hash(ctx->Htable, S_s, (uint8_t *)len_blk, TAG_SIZE);
  228. for (i = 0; i < NONCE_SIZE; i++)
  229. S_s[i] ^= ctx->nonce[i];
  230. S_s[TAG_SIZE - 1] &= 0x7f;
  231. /*
  232. * In the ctx, user_tag is the one received/set by the user,
  233. * and tag is generated from the input
  234. */
  235. out_len = sizeof(ctx->tag);
  236. error |= !EVP_EncryptUpdate(ctx->ecb_ctx, ctx->tag, &out_len, S_s, sizeof(S_s));
  237. ctx->generated_tag = !error;
  238. /* Regardless of error */
  239. ctx->used_dec = 1;
  240. return !error;
  241. }
  242. static int aes_gcm_siv_cipher(void *vctx, unsigned char *out,
  243. const unsigned char *in, size_t len)
  244. {
  245. PROV_AES_GCM_SIV_CTX *ctx = (PROV_AES_GCM_SIV_CTX *)vctx;
  246. /* EncryptFinal or DecryptFinal */
  247. if (in == NULL)
  248. return aes_gcm_siv_finish(ctx);
  249. /* Deal with associated data */
  250. if (out == NULL)
  251. return aes_gcm_siv_aad(ctx, in, len);
  252. if (ctx->enc)
  253. return aes_gcm_siv_encrypt(ctx, in, out, len);
  254. return aes_gcm_siv_decrypt(ctx, in, out, len);
  255. }
  256. static void aes_gcm_siv_clean_ctx(void *vctx)
  257. {
  258. PROV_AES_GCM_SIV_CTX *ctx = (PROV_AES_GCM_SIV_CTX *)vctx;
  259. EVP_CIPHER_CTX_free(ctx->ecb_ctx);
  260. ctx->ecb_ctx = NULL;
  261. }
  262. static int aes_gcm_siv_dup_ctx(void *vdst, void *vsrc)
  263. {
  264. PROV_AES_GCM_SIV_CTX *dst = (PROV_AES_GCM_SIV_CTX *)vdst;
  265. PROV_AES_GCM_SIV_CTX *src = (PROV_AES_GCM_SIV_CTX *)vsrc;
  266. dst->ecb_ctx = NULL;
  267. if (src->ecb_ctx != NULL) {
  268. if ((dst->ecb_ctx = EVP_CIPHER_CTX_new()) == NULL)
  269. goto err;
  270. if (!EVP_CIPHER_CTX_copy(dst->ecb_ctx, src->ecb_ctx))
  271. goto err;
  272. }
  273. return 1;
  274. err:
  275. EVP_CIPHER_CTX_free(dst->ecb_ctx);
  276. dst->ecb_ctx = NULL;
  277. return 0;
  278. }
  279. static const PROV_CIPHER_HW_AES_GCM_SIV aes_gcm_siv_hw =
  280. {
  281. aes_gcm_siv_initkey,
  282. aes_gcm_siv_cipher,
  283. aes_gcm_siv_dup_ctx,
  284. aes_gcm_siv_clean_ctx,
  285. };
  286. const PROV_CIPHER_HW_AES_GCM_SIV *ossl_prov_cipher_hw_aes_gcm_siv(size_t keybits)
  287. {
  288. return &aes_gcm_siv_hw;
  289. }
  290. /* AES-GCM-SIV needs AES-CTR32, which is different than the AES-CTR implementation */
  291. static int aes_gcm_siv_ctr32(PROV_AES_GCM_SIV_CTX *ctx, const unsigned char *init_counter,
  292. unsigned char *out, const unsigned char *in, size_t len)
  293. {
  294. uint8_t keystream[BLOCK_SIZE];
  295. int out_len;
  296. size_t i;
  297. size_t j;
  298. size_t todo;
  299. uint32_t counter;
  300. int error = 0;
  301. union {
  302. uint32_t x32[BLOCK_SIZE / sizeof(uint32_t)];
  303. uint8_t x8[BLOCK_SIZE];
  304. } block;
  305. DECLARE_IS_ENDIAN;
  306. memcpy(&block, init_counter, sizeof(block));
  307. if (IS_BIG_ENDIAN) {
  308. counter = GSWAP4(block.x32[0]);
  309. }
  310. for (i = 0; i < len; i += sizeof(block)) {
  311. out_len = BLOCK_SIZE;
  312. error |= !EVP_EncryptUpdate(ctx->ecb_ctx, keystream, &out_len, (uint8_t*)&block, sizeof(block));
  313. if (IS_LITTLE_ENDIAN) {
  314. block.x32[0]++;
  315. } else {
  316. counter++;
  317. block.x32[0] = GSWAP4(counter);
  318. }
  319. todo = len - i;
  320. if (todo > sizeof(keystream))
  321. todo = sizeof(keystream);
  322. /* Non optimal, but avoids alignment issues */
  323. for (j = 0; j < todo; j++)
  324. out[i + j] = in[i + j] ^ keystream[j];
  325. }
  326. return !error;
  327. }