cipher_aes_gcm_hw_vaes_avx512.inc 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. /*
  2. * Copyright 2021-2022 The OpenSSL Project Authors. All Rights Reserved.
  3. * Copyright (c) 2021, Intel Corporation. All Rights Reserved.
  4. *
  5. * Licensed under the Apache License 2.0 (the "License"). You may not use
  6. * this file except in compliance with the License. You can obtain a copy
  7. * in the file LICENSE in the source distribution or at
  8. * https://www.openssl.org/source/license.html
  9. */
  10. /*-
  11. * AVX512 VAES + VPCLMULDQD support for AES GCM.
  12. * This file is included by cipher_aes_gcm_hw_aesni.inc
  13. */
  14. #undef VAES_GCM_ENABLED
  15. #if (defined(__x86_64) || defined(__x86_64__) || \
  16. defined(_M_AMD64) || defined(_M_X64))
  17. # define VAES_GCM_ENABLED
  18. /* Returns non-zero when AVX512F + VAES + VPCLMULDQD combination is available */
  19. int ossl_vaes_vpclmulqdq_capable(void);
  20. # define OSSL_AES_GCM_UPDATE(direction) \
  21. void ossl_aes_gcm_ ## direction ## _avx512(const void *ks, \
  22. void *gcm128ctx, \
  23. unsigned int *pblocklen, \
  24. const unsigned char *in, \
  25. size_t len, \
  26. unsigned char *out);
  27. OSSL_AES_GCM_UPDATE(encrypt)
  28. OSSL_AES_GCM_UPDATE(decrypt)
  29. void ossl_aes_gcm_init_avx512(const void *ks, void *gcm128ctx);
  30. void ossl_aes_gcm_setiv_avx512(const void *ks, void *gcm128ctx,
  31. const unsigned char *iv, size_t ivlen);
  32. void ossl_aes_gcm_update_aad_avx512(void *gcm128ctx, const unsigned char *aad,
  33. size_t aadlen);
  34. void ossl_aes_gcm_finalize_avx512(void *gcm128ctx, unsigned int pblocklen);
  35. void ossl_gcm_gmult_avx512(u64 Xi[2], const void *gcm128ctx);
  36. static int vaes_gcm_setkey(PROV_GCM_CTX *ctx, const unsigned char *key,
  37. size_t keylen)
  38. {
  39. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  40. PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
  41. AES_KEY *ks = &actx->ks.ks;
  42. ctx->ks = ks;
  43. aesni_set_encrypt_key(key, keylen * 8, ks);
  44. memset(gcmctx, 0, sizeof(*gcmctx));
  45. gcmctx->key = ks;
  46. ctx->key_set = 1;
  47. ossl_aes_gcm_init_avx512(ks, gcmctx);
  48. return 1;
  49. }
  50. static int vaes_gcm_setiv(PROV_GCM_CTX *ctx, const unsigned char *iv,
  51. size_t ivlen)
  52. {
  53. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  54. gcmctx->Yi.u[0] = 0; /* Current counter */
  55. gcmctx->Yi.u[1] = 0;
  56. gcmctx->Xi.u[0] = 0; /* AAD hash */
  57. gcmctx->Xi.u[1] = 0;
  58. gcmctx->len.u[0] = 0; /* AAD length */
  59. gcmctx->len.u[1] = 0; /* Message length */
  60. gcmctx->ares = 0;
  61. gcmctx->mres = 0;
  62. /* IV is limited by 2^64 bits, thus 2^61 bytes */
  63. if (ivlen > (U64(1) << 61))
  64. return 0;
  65. ossl_aes_gcm_setiv_avx512(ctx->ks, gcmctx, iv, ivlen);
  66. return 1;
  67. }
  68. static int vaes_gcm_aadupdate(PROV_GCM_CTX *ctx,
  69. const unsigned char *aad,
  70. size_t aad_len)
  71. {
  72. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  73. u64 alen = gcmctx->len.u[0];
  74. unsigned int ares;
  75. size_t i, lenBlks;
  76. /* Bad sequence: call of AAD update after message processing */
  77. if (gcmctx->len.u[1] > 0)
  78. return 0;
  79. alen += aad_len;
  80. /* AAD is limited by 2^64 bits, thus 2^61 bytes */
  81. if ((alen > (U64(1) << 61)) || (alen < aad_len))
  82. return 0;
  83. gcmctx->len.u[0] = alen;
  84. ares = gcmctx->ares;
  85. /* Partial AAD block left from previous AAD update calls */
  86. if (ares > 0) {
  87. /*
  88. * Fill partial block buffer till full block
  89. * (note, the hash is stored reflected)
  90. */
  91. while (ares > 0 && aad_len > 0) {
  92. gcmctx->Xi.c[15 - ares] ^= *(aad++);
  93. --aad_len;
  94. ares = (ares + 1) % AES_BLOCK_SIZE;
  95. }
  96. /* Full block gathered */
  97. if (ares == 0) {
  98. ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
  99. } else { /* no more AAD */
  100. gcmctx->ares = ares;
  101. return 1;
  102. }
  103. }
  104. /* Bulk AAD processing */
  105. lenBlks = aad_len & ((size_t)(-AES_BLOCK_SIZE));
  106. if (lenBlks > 0) {
  107. ossl_aes_gcm_update_aad_avx512(gcmctx, aad, lenBlks);
  108. aad += lenBlks;
  109. aad_len -= lenBlks;
  110. }
  111. /* Add remaining AAD to the hash (note, the hash is stored reflected) */
  112. if (aad_len > 0) {
  113. ares = aad_len;
  114. for (i = 0; i < aad_len; i++)
  115. gcmctx->Xi.c[15 - i] ^= aad[i];
  116. }
  117. gcmctx->ares = ares;
  118. return 1;
  119. }
  120. static int vaes_gcm_cipherupdate(PROV_GCM_CTX *ctx, const unsigned char *in,
  121. size_t len, unsigned char *out)
  122. {
  123. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  124. u64 mlen = gcmctx->len.u[1];
  125. mlen += len;
  126. if (mlen > ((U64(1) << 36) - 32) || (mlen < len))
  127. return 0;
  128. gcmctx->len.u[1] = mlen;
  129. /* Finalize GHASH(AAD) if AAD partial blocks left unprocessed */
  130. if (gcmctx->ares > 0) {
  131. ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
  132. gcmctx->ares = 0;
  133. }
  134. if (ctx->enc)
  135. ossl_aes_gcm_encrypt_avx512(ctx->ks, gcmctx, &gcmctx->mres, in, len, out);
  136. else
  137. ossl_aes_gcm_decrypt_avx512(ctx->ks, gcmctx, &gcmctx->mres, in, len, out);
  138. return 1;
  139. }
  140. static int vaes_gcm_cipherfinal(PROV_GCM_CTX *ctx, unsigned char *tag)
  141. {
  142. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  143. unsigned int *res = &gcmctx->mres;
  144. /* Finalize AAD processing */
  145. if (gcmctx->ares > 0)
  146. res = &gcmctx->ares;
  147. ossl_aes_gcm_finalize_avx512(gcmctx, *res);
  148. if (ctx->enc) {
  149. ctx->taglen = GCM_TAG_MAX_SIZE;
  150. memcpy(tag, gcmctx->Xi.c,
  151. ctx->taglen <= sizeof(gcmctx->Xi.c) ? ctx->taglen :
  152. sizeof(gcmctx->Xi.c));
  153. *res = 0;
  154. } else {
  155. return !CRYPTO_memcmp(gcmctx->Xi.c, tag, ctx->taglen);
  156. }
  157. return 1;
  158. }
  159. static const PROV_GCM_HW vaes_gcm = {
  160. vaes_gcm_setkey,
  161. vaes_gcm_setiv,
  162. vaes_gcm_aadupdate,
  163. vaes_gcm_cipherupdate,
  164. vaes_gcm_cipherfinal,
  165. ossl_gcm_one_shot
  166. };
  167. #endif