cipher_aes_gcm_hw_vaes_avx512.inc 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204
  1. /*
  2. * Copyright 2021-2022 The OpenSSL Project Authors. All Rights Reserved.
  3. * Copyright (c) 2021, Intel Corporation. All Rights Reserved.
  4. *
  5. * Licensed under the Apache License 2.0 (the "License"). You may not use
  6. * this file except in compliance with the License. You can obtain a copy
  7. * in the file LICENSE in the source distribution or at
  8. * https://www.openssl.org/source/license.html
  9. */
  10. /*-
  11. * AVX512 VAES + VPCLMULDQD support for AES GCM.
  12. * This file is included by cipher_aes_gcm_hw_aesni.inc
  13. */
  14. #undef VAES_GCM_ENABLED
  15. #if (defined(__x86_64) || defined(__x86_64__) || \
  16. defined(_M_AMD64) || defined(_M_X64))
  17. # define VAES_GCM_ENABLED
  18. /* Returns non-zero when AVX512F + VAES + VPCLMULDQD combination is available */
  19. int ossl_vaes_vpclmulqdq_capable(void);
  20. # define OSSL_AES_GCM_UPDATE(direction) \
  21. void ossl_aes_gcm_ ## direction ## _avx512(const void *ks, \
  22. void *gcm128ctx, \
  23. unsigned int *pblocklen, \
  24. const unsigned char *in, \
  25. size_t len, \
  26. unsigned char *out);
  27. OSSL_AES_GCM_UPDATE(encrypt)
  28. OSSL_AES_GCM_UPDATE(decrypt)
  29. void ossl_aes_gcm_init_avx512(const void *ks, void *gcm128ctx);
  30. void ossl_aes_gcm_setiv_avx512(const void *ks, void *gcm128ctx,
  31. const unsigned char *iv, size_t ivlen);
  32. void ossl_aes_gcm_update_aad_avx512(void *gcm128ctx, const unsigned char *aad,
  33. size_t aadlen);
  34. void ossl_aes_gcm_finalize_avx512(void *gcm128ctx, unsigned int pblocklen);
  35. void ossl_gcm_gmult_avx512(u64 Xi[2], const void *gcm128ctx);
  36. static int vaes_gcm_setkey(PROV_GCM_CTX *ctx, const unsigned char *key,
  37. size_t keylen)
  38. {
  39. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  40. PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
  41. AES_KEY *ks = &actx->ks.ks;
  42. aesni_set_encrypt_key(key, keylen * 8, ks);
  43. memset(gcmctx, 0, sizeof(*gcmctx));
  44. gcmctx->key = ks;
  45. ctx->key_set = 1;
  46. ossl_aes_gcm_init_avx512(ks, gcmctx);
  47. return 1;
  48. }
  49. static int vaes_gcm_setiv(PROV_GCM_CTX *ctx, const unsigned char *iv,
  50. size_t ivlen)
  51. {
  52. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  53. gcmctx->Yi.u[0] = 0; /* Current counter */
  54. gcmctx->Yi.u[1] = 0;
  55. gcmctx->Xi.u[0] = 0; /* AAD hash */
  56. gcmctx->Xi.u[1] = 0;
  57. gcmctx->len.u[0] = 0; /* AAD length */
  58. gcmctx->len.u[1] = 0; /* Message length */
  59. gcmctx->ares = 0;
  60. gcmctx->mres = 0;
  61. /* IV is limited by 2^64 bits, thus 2^61 bytes */
  62. if (ivlen > (U64(1) << 61))
  63. return 0;
  64. ossl_aes_gcm_setiv_avx512(gcmctx->key, gcmctx, iv, ivlen);
  65. return 1;
  66. }
  67. static int vaes_gcm_aadupdate(PROV_GCM_CTX *ctx,
  68. const unsigned char *aad,
  69. size_t aad_len)
  70. {
  71. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  72. u64 alen = gcmctx->len.u[0];
  73. unsigned int ares;
  74. size_t i, lenBlks;
  75. /* Bad sequence: call of AAD update after message processing */
  76. if (gcmctx->len.u[1] > 0)
  77. return 0;
  78. alen += aad_len;
  79. /* AAD is limited by 2^64 bits, thus 2^61 bytes */
  80. if ((alen > (U64(1) << 61)) || (alen < aad_len))
  81. return 0;
  82. gcmctx->len.u[0] = alen;
  83. ares = gcmctx->ares;
  84. /* Partial AAD block left from previous AAD update calls */
  85. if (ares > 0) {
  86. /*
  87. * Fill partial block buffer till full block
  88. * (note, the hash is stored reflected)
  89. */
  90. while (ares > 0 && aad_len > 0) {
  91. gcmctx->Xi.c[15 - ares] ^= *(aad++);
  92. --aad_len;
  93. ares = (ares + 1) % AES_BLOCK_SIZE;
  94. }
  95. /* Full block gathered */
  96. if (ares == 0) {
  97. ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
  98. } else { /* no more AAD */
  99. gcmctx->ares = ares;
  100. return 1;
  101. }
  102. }
  103. /* Bulk AAD processing */
  104. lenBlks = aad_len & ((size_t)(-AES_BLOCK_SIZE));
  105. if (lenBlks > 0) {
  106. ossl_aes_gcm_update_aad_avx512(gcmctx, aad, lenBlks);
  107. aad += lenBlks;
  108. aad_len -= lenBlks;
  109. }
  110. /* Add remaining AAD to the hash (note, the hash is stored reflected) */
  111. if (aad_len > 0) {
  112. ares = aad_len;
  113. for (i = 0; i < aad_len; i++)
  114. gcmctx->Xi.c[15 - i] ^= aad[i];
  115. }
  116. gcmctx->ares = ares;
  117. return 1;
  118. }
  119. static int vaes_gcm_cipherupdate(PROV_GCM_CTX *ctx, const unsigned char *in,
  120. size_t len, unsigned char *out)
  121. {
  122. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  123. u64 mlen = gcmctx->len.u[1];
  124. mlen += len;
  125. if (mlen > ((U64(1) << 36) - 32) || (mlen < len))
  126. return 0;
  127. gcmctx->len.u[1] = mlen;
  128. /* Finalize GHASH(AAD) if AAD partial blocks left unprocessed */
  129. if (gcmctx->ares > 0) {
  130. ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
  131. gcmctx->ares = 0;
  132. }
  133. if (ctx->enc)
  134. ossl_aes_gcm_encrypt_avx512(gcmctx->key, gcmctx, &gcmctx->mres, in, len, out);
  135. else
  136. ossl_aes_gcm_decrypt_avx512(gcmctx->key, gcmctx, &gcmctx->mres, in, len, out);
  137. return 1;
  138. }
  139. static int vaes_gcm_cipherfinal(PROV_GCM_CTX *ctx, unsigned char *tag)
  140. {
  141. GCM128_CONTEXT *gcmctx = &ctx->gcm;
  142. unsigned int *res = &gcmctx->mres;
  143. /* Finalize AAD processing */
  144. if (gcmctx->ares > 0)
  145. res = &gcmctx->ares;
  146. ossl_aes_gcm_finalize_avx512(gcmctx, *res);
  147. if (ctx->enc) {
  148. ctx->taglen = GCM_TAG_MAX_SIZE;
  149. memcpy(tag, gcmctx->Xi.c,
  150. ctx->taglen <= sizeof(gcmctx->Xi.c) ? ctx->taglen :
  151. sizeof(gcmctx->Xi.c));
  152. *res = 0;
  153. } else {
  154. return !CRYPTO_memcmp(gcmctx->Xi.c, tag, ctx->taglen);
  155. }
  156. return 1;
  157. }
  158. static const PROV_GCM_HW vaes_gcm = {
  159. vaes_gcm_setkey,
  160. vaes_gcm_setiv,
  161. vaes_gcm_aadupdate,
  162. vaes_gcm_cipherupdate,
  163. vaes_gcm_cipherfinal,
  164. ossl_gcm_one_shot
  165. };
  166. #endif