cipher_aes_gcm_hw_s390x.inc 9.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300
  1. /*
  2. * Copyright 2001-2020 The OpenSSL Project Authors. All Rights Reserved.
  3. *
  4. * Licensed under the Apache License 2.0 (the "License"). You may not use
  5. * this file except in compliance with the License. You can obtain a copy
  6. * in the file LICENSE in the source distribution or at
  7. * https://www.openssl.org/source/license.html
  8. */
  9. /*-
  10. * IBM S390X support for AES GCM.
  11. * This file is included by cipher_aes_gcm_hw.c
  12. */
  13. /* iv + padding length for iv lengths != 12 */
  14. #define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
  15. static int s390x_aes_gcm_initkey(PROV_GCM_CTX *ctx,
  16. const unsigned char *key, size_t keylen)
  17. {
  18. PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
  19. ctx->key_set = 1;
  20. memcpy(&actx->plat.s390x.param.kma.k, key, keylen);
  21. actx->plat.s390x.fc = S390X_AES_FC(keylen);
  22. if (!ctx->enc)
  23. actx->plat.s390x.fc |= S390X_DECRYPT;
  24. return 1;
  25. }
  26. static int s390x_aes_gcm_setiv(PROV_GCM_CTX *ctx, const unsigned char *iv,
  27. size_t ivlen)
  28. {
  29. PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
  30. S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
  31. kma->t.g[0] = 0;
  32. kma->t.g[1] = 0;
  33. kma->tpcl = 0;
  34. kma->taadl = 0;
  35. actx->plat.s390x.mreslen = 0;
  36. actx->plat.s390x.areslen = 0;
  37. actx->plat.s390x.kreslen = 0;
  38. if (ivlen == GCM_IV_DEFAULT_SIZE) {
  39. memcpy(&kma->j0, iv, ivlen);
  40. kma->j0.w[3] = 1;
  41. kma->cv.w = 1;
  42. } else {
  43. unsigned long long ivbits = ivlen << 3;
  44. size_t len = S390X_gcm_ivpadlen(ivlen);
  45. unsigned char iv_zero_pad[S390X_gcm_ivpadlen(GCM_IV_MAX_SIZE)];
  46. /*
  47. * The IV length needs to be zero padded to be a multiple of 16 bytes
  48. * followed by 8 bytes of zeros and 8 bytes for the IV length.
  49. * The GHASH of this value can then be calculated.
  50. */
  51. memcpy(iv_zero_pad, iv, ivlen);
  52. memset(iv_zero_pad + ivlen, 0, len - ivlen);
  53. memcpy(iv_zero_pad + len - sizeof(ivbits), &ivbits, sizeof(ivbits));
  54. /*
  55. * Calculate the ghash of the iv - the result is stored into the tag
  56. * param.
  57. */
  58. s390x_kma(iv_zero_pad, len, NULL, 0, NULL, actx->plat.s390x.fc, kma);
  59. actx->plat.s390x.fc |= S390X_KMA_HS; /* The hash subkey is set */
  60. /* Copy the 128 bit GHASH result into J0 and clear the tag */
  61. kma->j0.g[0] = kma->t.g[0];
  62. kma->j0.g[1] = kma->t.g[1];
  63. kma->t.g[0] = 0;
  64. kma->t.g[1] = 0;
  65. /* Set the 32 bit counter */
  66. kma->cv.w = kma->j0.w[3];
  67. }
  68. return 1;
  69. }
  70. static int s390x_aes_gcm_cipher_final(PROV_GCM_CTX *ctx, unsigned char *tag)
  71. {
  72. PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
  73. S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
  74. unsigned char out[AES_BLOCK_SIZE];
  75. int rc;
  76. kma->taadl <<= 3;
  77. kma->tpcl <<= 3;
  78. s390x_kma(actx->plat.s390x.ares, actx->plat.s390x.areslen,
  79. actx->plat.s390x.mres, actx->plat.s390x.mreslen, out,
  80. actx->plat.s390x.fc | S390X_KMA_LAAD | S390X_KMA_LPC, kma);
  81. /* gctx->mres already returned to the caller */
  82. OPENSSL_cleanse(out, actx->plat.s390x.mreslen);
  83. if (ctx->enc) {
  84. ctx->taglen = GCM_TAG_MAX_SIZE;
  85. memcpy(tag, kma->t.b, ctx->taglen);
  86. rc = 1;
  87. } else {
  88. rc = (CRYPTO_memcmp(tag, kma->t.b, ctx->taglen) == 0);
  89. }
  90. return rc;
  91. }
  92. static int s390x_aes_gcm_one_shot(PROV_GCM_CTX *ctx,
  93. unsigned char *aad, size_t aad_len,
  94. const unsigned char *in, size_t in_len,
  95. unsigned char *out,
  96. unsigned char *tag, size_t taglen)
  97. {
  98. PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
  99. S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
  100. int rc;
  101. kma->taadl = aad_len << 3;
  102. kma->tpcl = in_len << 3;
  103. s390x_kma(aad, aad_len, in, in_len, out,
  104. actx->plat.s390x.fc | S390X_KMA_LAAD | S390X_KMA_LPC, kma);
  105. if (ctx->enc) {
  106. memcpy(tag, kma->t.b, taglen);
  107. rc = 1;
  108. } else {
  109. rc = (CRYPTO_memcmp(tag, kma->t.b, taglen) == 0);
  110. }
  111. return rc;
  112. }
  113. /*
  114. * Process additional authenticated data. Returns 1 on success. Code is
  115. * big-endian.
  116. */
  117. static int s390x_aes_gcm_aad_update(PROV_GCM_CTX *ctx,
  118. const unsigned char *aad, size_t len)
  119. {
  120. PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
  121. S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
  122. unsigned long long alen;
  123. int n, rem;
  124. /* If already processed pt/ct then error */
  125. if (kma->tpcl != 0)
  126. return 0;
  127. /* update the total aad length */
  128. alen = kma->taadl + len;
  129. if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
  130. return 0;
  131. kma->taadl = alen;
  132. /* check if there is any existing aad data from a previous add */
  133. n = actx->plat.s390x.areslen;
  134. if (n) {
  135. /* add additional data to a buffer until it has 16 bytes */
  136. while (n && len) {
  137. actx->plat.s390x.ares[n] = *aad;
  138. ++aad;
  139. --len;
  140. n = (n + 1) & 0xf;
  141. }
  142. /* ctx->ares contains a complete block if offset has wrapped around */
  143. if (!n) {
  144. s390x_kma(actx->plat.s390x.ares, 16, NULL, 0, NULL,
  145. actx->plat.s390x.fc, kma);
  146. actx->plat.s390x.fc |= S390X_KMA_HS;
  147. }
  148. actx->plat.s390x.areslen = n;
  149. }
  150. /* If there are leftover bytes (< 128 bits) save them for next time */
  151. rem = len & 0xf;
  152. /* Add any remaining 16 byte blocks (128 bit each) */
  153. len &= ~(size_t)0xf;
  154. if (len) {
  155. s390x_kma(aad, len, NULL, 0, NULL, actx->plat.s390x.fc, kma);
  156. actx->plat.s390x.fc |= S390X_KMA_HS;
  157. aad += len;
  158. }
  159. if (rem) {
  160. actx->plat.s390x.areslen = rem;
  161. do {
  162. --rem;
  163. actx->plat.s390x.ares[rem] = aad[rem];
  164. } while (rem);
  165. }
  166. return 1;
  167. }
  168. /*-
  169. * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 1 for
  170. * success. Code is big-endian.
  171. */
  172. static int s390x_aes_gcm_cipher_update(PROV_GCM_CTX *ctx,
  173. const unsigned char *in, size_t len,
  174. unsigned char *out)
  175. {
  176. PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
  177. S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
  178. const unsigned char *inptr;
  179. unsigned long long mlen;
  180. union {
  181. unsigned int w[4];
  182. unsigned char b[16];
  183. } buf;
  184. size_t inlen;
  185. int n, rem, i;
  186. mlen = kma->tpcl + len;
  187. if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
  188. return 0;
  189. kma->tpcl = mlen;
  190. n = actx->plat.s390x.mreslen;
  191. if (n) {
  192. inptr = in;
  193. inlen = len;
  194. while (n && inlen) {
  195. actx->plat.s390x.mres[n] = *inptr;
  196. n = (n + 1) & 0xf;
  197. ++inptr;
  198. --inlen;
  199. }
  200. /* ctx->mres contains a complete block if offset has wrapped around */
  201. if (!n) {
  202. s390x_kma(actx->plat.s390x.ares, actx->plat.s390x.areslen,
  203. actx->plat.s390x.mres, 16, buf.b,
  204. actx->plat.s390x.fc | S390X_KMA_LAAD, kma);
  205. actx->plat.s390x.fc |= S390X_KMA_HS;
  206. actx->plat.s390x.areslen = 0;
  207. /* previous call already encrypted/decrypted its remainder,
  208. * see comment below */
  209. n = actx->plat.s390x.mreslen;
  210. while (n) {
  211. *out = buf.b[n];
  212. n = (n + 1) & 0xf;
  213. ++out;
  214. ++in;
  215. --len;
  216. }
  217. actx->plat.s390x.mreslen = 0;
  218. }
  219. }
  220. rem = len & 0xf;
  221. len &= ~(size_t)0xf;
  222. if (len) {
  223. s390x_kma(actx->plat.s390x.ares, actx->plat.s390x.areslen, in, len, out,
  224. actx->plat.s390x.fc | S390X_KMA_LAAD, kma);
  225. in += len;
  226. out += len;
  227. actx->plat.s390x.fc |= S390X_KMA_HS;
  228. actx->plat.s390x.areslen = 0;
  229. }
  230. /*-
  231. * If there is a remainder, it has to be saved such that it can be
  232. * processed by kma later. However, we also have to do the for-now
  233. * unauthenticated encryption/decryption part here and now...
  234. */
  235. if (rem) {
  236. if (!actx->plat.s390x.mreslen) {
  237. buf.w[0] = kma->j0.w[0];
  238. buf.w[1] = kma->j0.w[1];
  239. buf.w[2] = kma->j0.w[2];
  240. buf.w[3] = kma->cv.w + 1;
  241. s390x_km(buf.b, 16, actx->plat.s390x.kres,
  242. actx->plat.s390x.fc & 0x1f, &kma->k);
  243. }
  244. n = actx->plat.s390x.mreslen;
  245. for (i = 0; i < rem; i++) {
  246. actx->plat.s390x.mres[n + i] = in[i];
  247. out[i] = in[i] ^ actx->plat.s390x.kres[n + i];
  248. }
  249. actx->plat.s390x.mreslen += rem;
  250. }
  251. return 1;
  252. }
  253. static const PROV_GCM_HW s390x_aes_gcm = {
  254. s390x_aes_gcm_initkey,
  255. s390x_aes_gcm_setiv,
  256. s390x_aes_gcm_aad_update,
  257. s390x_aes_gcm_cipher_update,
  258. s390x_aes_gcm_cipher_final,
  259. s390x_aes_gcm_one_shot
  260. };
  261. const PROV_GCM_HW *ossl_prov_aes_hw_gcm(size_t keybits)
  262. {
  263. if ((keybits == 128 && S390X_aes_128_gcm_CAPABLE)
  264. || (keybits == 192 && S390X_aes_192_gcm_CAPABLE)
  265. || (keybits == 256 && S390X_aes_256_gcm_CAPABLE))
  266. return &s390x_aes_gcm;
  267. return &aes_gcm;
  268. }