cipher_aes_gcm_hw_armv8.inc 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107
  1. /*
  2. * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved.
  3. *
  4. * Licensed under the Apache License 2.0 (the "License"). You may not use
  5. * this file except in compliance with the License. You can obtain a copy
  6. * in the file LICENSE in the source distribution or at
  7. * https://www.openssl.org/source/license.html
  8. */
  9. /*
  10. * Crypto extension support for AES GCM.
  11. * This file is included by cipher_aes_gcm_hw.c
  12. */
  13. size_t armv8_aes_gcm_encrypt(const unsigned char *in, unsigned char *out, size_t len,
  14. const void *key, unsigned char ivec[16], u64 *Xi)
  15. {
  16. size_t align_bytes = 0;
  17. align_bytes = len - len % 16;
  18. AES_KEY *aes_key = (AES_KEY *)key;
  19. switch(aes_key->rounds) {
  20. case 10:
  21. if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
  22. unroll8_eor3_aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  23. } else {
  24. aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  25. }
  26. break;
  27. case 12:
  28. if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
  29. unroll8_eor3_aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  30. } else {
  31. aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  32. }
  33. break;
  34. case 14:
  35. if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
  36. unroll8_eor3_aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  37. } else {
  38. aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  39. }
  40. break;
  41. }
  42. return align_bytes;
  43. }
  44. size_t armv8_aes_gcm_decrypt(const unsigned char *in, unsigned char *out, size_t len,
  45. const void *key, unsigned char ivec[16], u64 *Xi)
  46. {
  47. size_t align_bytes = 0;
  48. align_bytes = len - len % 16;
  49. AES_KEY *aes_key = (AES_KEY *)key;
  50. switch(aes_key->rounds) {
  51. case 10:
  52. if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
  53. unroll8_eor3_aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  54. } else {
  55. aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  56. }
  57. break;
  58. case 12:
  59. if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
  60. unroll8_eor3_aes_gcm_dec_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  61. } else {
  62. aes_gcm_dec_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  63. }
  64. break;
  65. case 14:
  66. if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
  67. unroll8_eor3_aes_gcm_dec_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  68. } else {
  69. aes_gcm_dec_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  70. }
  71. break;
  72. }
  73. return align_bytes;
  74. }
  75. static int armv8_aes_gcm_initkey(PROV_GCM_CTX *ctx, const unsigned char *key,
  76. size_t keylen)
  77. {
  78. PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
  79. AES_KEY *ks = &actx->ks.ks;
  80. GCM_HW_SET_KEY_CTR_FN(ks, aes_v8_set_encrypt_key, aes_v8_encrypt,
  81. aes_v8_ctr32_encrypt_blocks);
  82. return 1;
  83. }
  84. static const PROV_GCM_HW armv8_aes_gcm = {
  85. armv8_aes_gcm_initkey,
  86. ossl_gcm_setiv,
  87. ossl_gcm_aad_update,
  88. generic_aes_gcm_cipher_update,
  89. ossl_gcm_cipher_final,
  90. ossl_gcm_one_shot
  91. };
  92. const PROV_GCM_HW *ossl_prov_aes_hw_gcm(size_t keybits)
  93. {
  94. return AES_PMULL_CAPABLE ? &armv8_aes_gcm : &aes_gcm;
  95. }