e_aes.c 84 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455
  1. /* ====================================================================
  2. * Copyright (c) 2001-2014 The OpenSSL Project. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. *
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. *
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in
  13. * the documentation and/or other materials provided with the
  14. * distribution.
  15. *
  16. * 3. All advertising materials mentioning features or use of this
  17. * software must display the following acknowledgment:
  18. * "This product includes software developed by the OpenSSL Project
  19. * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
  20. *
  21. * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
  22. * endorse or promote products derived from this software without
  23. * prior written permission. For written permission, please contact
  24. * openssl-core@openssl.org.
  25. *
  26. * 5. Products derived from this software may not be called "OpenSSL"
  27. * nor may "OpenSSL" appear in their names without prior written
  28. * permission of the OpenSSL Project.
  29. *
  30. * 6. Redistributions of any form whatsoever must retain the following
  31. * acknowledgment:
  32. * "This product includes software developed by the OpenSSL Project
  33. * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
  34. *
  35. * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
  36. * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  37. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  38. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
  39. * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  40. * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  41. * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  42. * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  43. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
  44. * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  45. * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
  46. * OF THE POSSIBILITY OF SUCH DAMAGE.
  47. * ====================================================================
  48. *
  49. */
  50. #include <openssl/opensslconf.h>
  51. #ifndef OPENSSL_NO_AES
  52. # include <openssl/evp.h>
  53. # include <openssl/err.h>
  54. # include <string.h>
  55. # include <assert.h>
  56. # include <openssl/aes.h>
  57. # include "evp_locl.h"
  58. # include "modes_lcl.h"
  59. # include <openssl/rand.h>
  60. typedef struct {
  61. union {
  62. double align;
  63. AES_KEY ks;
  64. } ks;
  65. block128_f block;
  66. union {
  67. cbc128_f cbc;
  68. ctr128_f ctr;
  69. } stream;
  70. } EVP_AES_KEY;
  71. typedef struct {
  72. union {
  73. double align;
  74. AES_KEY ks;
  75. } ks; /* AES key schedule to use */
  76. int key_set; /* Set if key initialised */
  77. int iv_set; /* Set if an iv is set */
  78. GCM128_CONTEXT gcm;
  79. unsigned char *iv; /* Temporary IV store */
  80. int ivlen; /* IV length */
  81. int taglen;
  82. int iv_gen; /* It is OK to generate IVs */
  83. int tls_aad_len; /* TLS AAD length */
  84. ctr128_f ctr;
  85. } EVP_AES_GCM_CTX;
  86. typedef struct {
  87. union {
  88. double align;
  89. AES_KEY ks;
  90. } ks1, ks2; /* AES key schedules to use */
  91. XTS128_CONTEXT xts;
  92. void (*stream) (const unsigned char *in,
  93. unsigned char *out, size_t length,
  94. const AES_KEY *key1, const AES_KEY *key2,
  95. const unsigned char iv[16]);
  96. } EVP_AES_XTS_CTX;
  97. typedef struct {
  98. union {
  99. double align;
  100. AES_KEY ks;
  101. } ks; /* AES key schedule to use */
  102. int key_set; /* Set if key initialised */
  103. int iv_set; /* Set if an iv is set */
  104. int tag_set; /* Set if tag is valid */
  105. int len_set; /* Set if message length set */
  106. int L, M; /* L and M parameters from RFC3610 */
  107. CCM128_CONTEXT ccm;
  108. ccm128_f str;
  109. } EVP_AES_CCM_CTX;
  110. # ifndef OPENSSL_NO_OCB
  111. typedef struct {
  112. union {
  113. double align;
  114. AES_KEY ks;
  115. } ksenc; /* AES key schedule to use for encryption */
  116. union {
  117. double align;
  118. AES_KEY ks;
  119. } ksdec; /* AES key schedule to use for decryption */
  120. int key_set; /* Set if key initialised */
  121. int iv_set; /* Set if an iv is set */
  122. OCB128_CONTEXT ocb;
  123. unsigned char *iv; /* Temporary IV store */
  124. unsigned char tag[16];
  125. unsigned char data_buf[16]; /* Store partial data blocks */
  126. unsigned char aad_buf[16]; /* Store partial AAD blocks */
  127. int data_buf_len;
  128. int aad_buf_len;
  129. int ivlen; /* IV length */
  130. int taglen;
  131. } EVP_AES_OCB_CTX;
  132. # endif
  133. # define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
  134. # ifdef VPAES_ASM
  135. int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
  136. AES_KEY *key);
  137. int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
  138. AES_KEY *key);
  139. void vpaes_encrypt(const unsigned char *in, unsigned char *out,
  140. const AES_KEY *key);
  141. void vpaes_decrypt(const unsigned char *in, unsigned char *out,
  142. const AES_KEY *key);
  143. void vpaes_cbc_encrypt(const unsigned char *in,
  144. unsigned char *out,
  145. size_t length,
  146. const AES_KEY *key, unsigned char *ivec, int enc);
  147. # endif
  148. # ifdef BSAES_ASM
  149. void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
  150. size_t length, const AES_KEY *key,
  151. unsigned char ivec[16], int enc);
  152. void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
  153. size_t len, const AES_KEY *key,
  154. const unsigned char ivec[16]);
  155. void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
  156. size_t len, const AES_KEY *key1,
  157. const AES_KEY *key2, const unsigned char iv[16]);
  158. void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
  159. size_t len, const AES_KEY *key1,
  160. const AES_KEY *key2, const unsigned char iv[16]);
  161. # endif
  162. # ifdef AES_CTR_ASM
  163. void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
  164. size_t blocks, const AES_KEY *key,
  165. const unsigned char ivec[AES_BLOCK_SIZE]);
  166. # endif
  167. # ifdef AES_XTS_ASM
  168. void AES_xts_encrypt(const char *inp, char *out, size_t len,
  169. const AES_KEY *key1, const AES_KEY *key2,
  170. const unsigned char iv[16]);
  171. void AES_xts_decrypt(const char *inp, char *out, size_t len,
  172. const AES_KEY *key1, const AES_KEY *key2,
  173. const unsigned char iv[16]);
  174. # endif
  175. # if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
  176. # include "ppc_arch.h"
  177. # ifdef VPAES_ASM
  178. # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
  179. # endif
  180. # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
  181. # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
  182. # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
  183. # define HWAES_encrypt aes_p8_encrypt
  184. # define HWAES_decrypt aes_p8_decrypt
  185. # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
  186. # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
  187. # endif
  188. # if defined(AES_ASM) && !defined(I386_ONLY) && ( \
  189. ((defined(__i386) || defined(__i386__) || \
  190. defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
  191. defined(__x86_64) || defined(__x86_64__) || \
  192. defined(_M_AMD64) || defined(_M_X64) || \
  193. defined(__INTEL__) )
  194. extern unsigned int OPENSSL_ia32cap_P[];
  195. # ifdef VPAES_ASM
  196. # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
  197. # endif
  198. # ifdef BSAES_ASM
  199. # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
  200. # endif
  201. /*
  202. * AES-NI section
  203. */
  204. # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
  205. int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
  206. AES_KEY *key);
  207. int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
  208. AES_KEY *key);
  209. void aesni_encrypt(const unsigned char *in, unsigned char *out,
  210. const AES_KEY *key);
  211. void aesni_decrypt(const unsigned char *in, unsigned char *out,
  212. const AES_KEY *key);
  213. void aesni_ecb_encrypt(const unsigned char *in,
  214. unsigned char *out,
  215. size_t length, const AES_KEY *key, int enc);
  216. void aesni_cbc_encrypt(const unsigned char *in,
  217. unsigned char *out,
  218. size_t length,
  219. const AES_KEY *key, unsigned char *ivec, int enc);
  220. void aesni_ctr32_encrypt_blocks(const unsigned char *in,
  221. unsigned char *out,
  222. size_t blocks,
  223. const void *key, const unsigned char *ivec);
  224. void aesni_xts_encrypt(const unsigned char *in,
  225. unsigned char *out,
  226. size_t length,
  227. const AES_KEY *key1, const AES_KEY *key2,
  228. const unsigned char iv[16]);
  229. void aesni_xts_decrypt(const unsigned char *in,
  230. unsigned char *out,
  231. size_t length,
  232. const AES_KEY *key1, const AES_KEY *key2,
  233. const unsigned char iv[16]);
  234. void aesni_ccm64_encrypt_blocks(const unsigned char *in,
  235. unsigned char *out,
  236. size_t blocks,
  237. const void *key,
  238. const unsigned char ivec[16],
  239. unsigned char cmac[16]);
  240. void aesni_ccm64_decrypt_blocks(const unsigned char *in,
  241. unsigned char *out,
  242. size_t blocks,
  243. const void *key,
  244. const unsigned char ivec[16],
  245. unsigned char cmac[16]);
  246. # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
  247. size_t aesni_gcm_encrypt(const unsigned char *in,
  248. unsigned char *out,
  249. size_t len,
  250. const void *key, unsigned char ivec[16], u64 *Xi);
  251. # define AES_gcm_encrypt aesni_gcm_encrypt
  252. size_t aesni_gcm_decrypt(const unsigned char *in,
  253. unsigned char *out,
  254. size_t len,
  255. const void *key, unsigned char ivec[16], u64 *Xi);
  256. # define AES_gcm_decrypt aesni_gcm_decrypt
  257. void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
  258. size_t len);
  259. # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
  260. gctx->gcm.ghash==gcm_ghash_avx)
  261. # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
  262. gctx->gcm.ghash==gcm_ghash_avx)
  263. # undef AES_GCM_ASM2 /* minor size optimization */
  264. # endif
  265. static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  266. const unsigned char *iv, int enc)
  267. {
  268. int ret, mode;
  269. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  270. mode = ctx->cipher->flags & EVP_CIPH_MODE;
  271. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
  272. && !enc) {
  273. ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  274. dat->block = (block128_f) aesni_decrypt;
  275. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  276. (cbc128_f) aesni_cbc_encrypt : NULL;
  277. } else {
  278. ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  279. dat->block = (block128_f) aesni_encrypt;
  280. if (mode == EVP_CIPH_CBC_MODE)
  281. dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
  282. else if (mode == EVP_CIPH_CTR_MODE)
  283. dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
  284. else
  285. dat->stream.cbc = NULL;
  286. }
  287. if (ret < 0) {
  288. EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
  289. return 0;
  290. }
  291. return 1;
  292. }
  293. static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  294. const unsigned char *in, size_t len)
  295. {
  296. aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
  297. return 1;
  298. }
  299. static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  300. const unsigned char *in, size_t len)
  301. {
  302. size_t bl = ctx->cipher->block_size;
  303. if (len < bl)
  304. return 1;
  305. aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
  306. return 1;
  307. }
  308. # define aesni_ofb_cipher aes_ofb_cipher
  309. static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  310. const unsigned char *in, size_t len);
  311. # define aesni_cfb_cipher aes_cfb_cipher
  312. static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  313. const unsigned char *in, size_t len);
  314. # define aesni_cfb8_cipher aes_cfb8_cipher
  315. static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  316. const unsigned char *in, size_t len);
  317. # define aesni_cfb1_cipher aes_cfb1_cipher
  318. static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  319. const unsigned char *in, size_t len);
  320. # define aesni_ctr_cipher aes_ctr_cipher
  321. static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  322. const unsigned char *in, size_t len);
  323. static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  324. const unsigned char *iv, int enc)
  325. {
  326. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  327. if (!iv && !key)
  328. return 1;
  329. if (key) {
  330. aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
  331. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
  332. gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
  333. /*
  334. * If we have an iv can set it directly, otherwise use saved IV.
  335. */
  336. if (iv == NULL && gctx->iv_set)
  337. iv = gctx->iv;
  338. if (iv) {
  339. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  340. gctx->iv_set = 1;
  341. }
  342. gctx->key_set = 1;
  343. } else {
  344. /* If key set use IV, otherwise copy */
  345. if (gctx->key_set)
  346. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  347. else
  348. memcpy(gctx->iv, iv, gctx->ivlen);
  349. gctx->iv_set = 1;
  350. gctx->iv_gen = 0;
  351. }
  352. return 1;
  353. }
  354. # define aesni_gcm_cipher aes_gcm_cipher
  355. static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  356. const unsigned char *in, size_t len);
  357. static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  358. const unsigned char *iv, int enc)
  359. {
  360. EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
  361. if (!iv && !key)
  362. return 1;
  363. if (key) {
  364. /* key_len is two AES keys */
  365. if (enc) {
  366. aesni_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
  367. xctx->xts.block1 = (block128_f) aesni_encrypt;
  368. xctx->stream = aesni_xts_encrypt;
  369. } else {
  370. aesni_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
  371. xctx->xts.block1 = (block128_f) aesni_decrypt;
  372. xctx->stream = aesni_xts_decrypt;
  373. }
  374. aesni_set_encrypt_key(key + ctx->key_len / 2,
  375. ctx->key_len * 4, &xctx->ks2.ks);
  376. xctx->xts.block2 = (block128_f) aesni_encrypt;
  377. xctx->xts.key1 = &xctx->ks1;
  378. }
  379. if (iv) {
  380. xctx->xts.key2 = &xctx->ks2;
  381. memcpy(ctx->iv, iv, 16);
  382. }
  383. return 1;
  384. }
  385. # define aesni_xts_cipher aes_xts_cipher
  386. static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  387. const unsigned char *in, size_t len);
  388. static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  389. const unsigned char *iv, int enc)
  390. {
  391. EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
  392. if (!iv && !key)
  393. return 1;
  394. if (key) {
  395. aesni_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks);
  396. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  397. &cctx->ks, (block128_f) aesni_encrypt);
  398. cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
  399. (ccm128_f) aesni_ccm64_decrypt_blocks;
  400. cctx->key_set = 1;
  401. }
  402. if (iv) {
  403. memcpy(ctx->iv, iv, 15 - cctx->L);
  404. cctx->iv_set = 1;
  405. }
  406. return 1;
  407. }
  408. # define aesni_ccm_cipher aes_ccm_cipher
  409. static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  410. const unsigned char *in, size_t len);
  411. # ifndef OPENSSL_NO_OCB
  412. static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  413. const unsigned char *iv, int enc)
  414. {
  415. EVP_AES_OCB_CTX *octx = ctx->cipher_data;
  416. if (!iv && !key)
  417. return 1;
  418. if (key) {
  419. do {
  420. /*
  421. * We set both the encrypt and decrypt key here because decrypt
  422. * needs both. We could possibly optimise to remove setting the
  423. * decrypt for an encryption operation.
  424. */
  425. aesni_set_encrypt_key(key, ctx->key_len * 8, &octx->ksenc.ks);
  426. aesni_set_decrypt_key(key, ctx->key_len * 8, &octx->ksdec.ks);
  427. if (!CRYPTO_ocb128_init(&octx->ocb,
  428. &octx->ksenc.ks, &octx->ksdec.ks,
  429. (block128_f) aesni_encrypt,
  430. (block128_f) aesni_decrypt))
  431. return 0;
  432. }
  433. while (0);
  434. /*
  435. * If we have an iv we can set it directly, otherwise use saved IV.
  436. */
  437. if (iv == NULL && octx->iv_set)
  438. iv = octx->iv;
  439. if (iv) {
  440. if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
  441. != 1)
  442. return 0;
  443. octx->iv_set = 1;
  444. }
  445. octx->key_set = 1;
  446. } else {
  447. /* If key set use IV, otherwise copy */
  448. if (octx->key_set)
  449. CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
  450. else
  451. memcpy(octx->iv, iv, octx->ivlen);
  452. octx->iv_set = 1;
  453. }
  454. return 1;
  455. }
  456. # define aesni_ocb_cipher aes_ocb_cipher
  457. static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  458. const unsigned char *in, size_t len);
  459. # endif /* OPENSSL_NO_OCB */
  460. # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  461. static const EVP_CIPHER aesni_##keylen##_##mode = { \
  462. nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
  463. flags|EVP_CIPH_##MODE##_MODE, \
  464. aesni_init_key, \
  465. aesni_##mode##_cipher, \
  466. NULL, \
  467. sizeof(EVP_AES_KEY), \
  468. NULL,NULL,NULL,NULL }; \
  469. static const EVP_CIPHER aes_##keylen##_##mode = { \
  470. nid##_##keylen##_##nmode,blocksize, \
  471. keylen/8,ivlen, \
  472. flags|EVP_CIPH_##MODE##_MODE, \
  473. aes_init_key, \
  474. aes_##mode##_cipher, \
  475. NULL, \
  476. sizeof(EVP_AES_KEY), \
  477. NULL,NULL,NULL,NULL }; \
  478. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  479. { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
  480. # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
  481. static const EVP_CIPHER aesni_##keylen##_##mode = { \
  482. nid##_##keylen##_##mode,blocksize, \
  483. (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  484. flags|EVP_CIPH_##MODE##_MODE, \
  485. aesni_##mode##_init_key, \
  486. aesni_##mode##_cipher, \
  487. aes_##mode##_cleanup, \
  488. sizeof(EVP_AES_##MODE##_CTX), \
  489. NULL,NULL,aes_##mode##_ctrl,NULL }; \
  490. static const EVP_CIPHER aes_##keylen##_##mode = { \
  491. nid##_##keylen##_##mode,blocksize, \
  492. (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  493. flags|EVP_CIPH_##MODE##_MODE, \
  494. aes_##mode##_init_key, \
  495. aes_##mode##_cipher, \
  496. aes_##mode##_cleanup, \
  497. sizeof(EVP_AES_##MODE##_CTX), \
  498. NULL,NULL,aes_##mode##_ctrl,NULL }; \
  499. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  500. { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
  501. # elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
  502. # include "sparc_arch.h"
  503. extern unsigned int OPENSSL_sparcv9cap_P[];
  504. # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
  505. void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
  506. void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
  507. void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
  508. const AES_KEY *key);
  509. void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
  510. const AES_KEY *key);
  511. /*
  512. * Key-length specific subroutines were chosen for following reason.
  513. * Each SPARC T4 core can execute up to 8 threads which share core's
  514. * resources. Loading as much key material to registers allows to
  515. * minimize references to shared memory interface, as well as amount
  516. * of instructions in inner loops [much needed on T4]. But then having
  517. * non-key-length specific routines would require conditional branches
  518. * either in inner loops or on subroutines' entries. Former is hardly
  519. * acceptable, while latter means code size increase to size occupied
  520. * by multiple key-length specfic subroutines, so why fight?
  521. */
  522. void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
  523. size_t len, const AES_KEY *key,
  524. unsigned char *ivec);
  525. void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
  526. size_t len, const AES_KEY *key,
  527. unsigned char *ivec);
  528. void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
  529. size_t len, const AES_KEY *key,
  530. unsigned char *ivec);
  531. void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
  532. size_t len, const AES_KEY *key,
  533. unsigned char *ivec);
  534. void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
  535. size_t len, const AES_KEY *key,
  536. unsigned char *ivec);
  537. void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
  538. size_t len, const AES_KEY *key,
  539. unsigned char *ivec);
  540. void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
  541. size_t blocks, const AES_KEY *key,
  542. unsigned char *ivec);
  543. void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
  544. size_t blocks, const AES_KEY *key,
  545. unsigned char *ivec);
  546. void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
  547. size_t blocks, const AES_KEY *key,
  548. unsigned char *ivec);
  549. void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
  550. size_t blocks, const AES_KEY *key1,
  551. const AES_KEY *key2, const unsigned char *ivec);
  552. void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
  553. size_t blocks, const AES_KEY *key1,
  554. const AES_KEY *key2, const unsigned char *ivec);
  555. void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
  556. size_t blocks, const AES_KEY *key1,
  557. const AES_KEY *key2, const unsigned char *ivec);
  558. void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
  559. size_t blocks, const AES_KEY *key1,
  560. const AES_KEY *key2, const unsigned char *ivec);
  561. static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  562. const unsigned char *iv, int enc)
  563. {
  564. int ret, mode, bits;
  565. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  566. mode = ctx->cipher->flags & EVP_CIPH_MODE;
  567. bits = ctx->key_len * 8;
  568. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
  569. && !enc) {
  570. ret = 0;
  571. aes_t4_set_decrypt_key(key, bits, ctx->cipher_data);
  572. dat->block = (block128_f) aes_t4_decrypt;
  573. switch (bits) {
  574. case 128:
  575. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  576. (cbc128_f) aes128_t4_cbc_decrypt : NULL;
  577. break;
  578. case 192:
  579. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  580. (cbc128_f) aes192_t4_cbc_decrypt : NULL;
  581. break;
  582. case 256:
  583. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  584. (cbc128_f) aes256_t4_cbc_decrypt : NULL;
  585. break;
  586. default:
  587. ret = -1;
  588. }
  589. } else {
  590. ret = 0;
  591. aes_t4_set_encrypt_key(key, bits, ctx->cipher_data);
  592. dat->block = (block128_f) aes_t4_encrypt;
  593. switch (bits) {
  594. case 128:
  595. if (mode == EVP_CIPH_CBC_MODE)
  596. dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
  597. else if (mode == EVP_CIPH_CTR_MODE)
  598. dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
  599. else
  600. dat->stream.cbc = NULL;
  601. break;
  602. case 192:
  603. if (mode == EVP_CIPH_CBC_MODE)
  604. dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
  605. else if (mode == EVP_CIPH_CTR_MODE)
  606. dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
  607. else
  608. dat->stream.cbc = NULL;
  609. break;
  610. case 256:
  611. if (mode == EVP_CIPH_CBC_MODE)
  612. dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
  613. else if (mode == EVP_CIPH_CTR_MODE)
  614. dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
  615. else
  616. dat->stream.cbc = NULL;
  617. break;
  618. default:
  619. ret = -1;
  620. }
  621. }
  622. if (ret < 0) {
  623. EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
  624. return 0;
  625. }
  626. return 1;
  627. }
  628. # define aes_t4_cbc_cipher aes_cbc_cipher
  629. static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  630. const unsigned char *in, size_t len);
  631. # define aes_t4_ecb_cipher aes_ecb_cipher
  632. static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  633. const unsigned char *in, size_t len);
  634. # define aes_t4_ofb_cipher aes_ofb_cipher
  635. static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  636. const unsigned char *in, size_t len);
  637. # define aes_t4_cfb_cipher aes_cfb_cipher
  638. static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  639. const unsigned char *in, size_t len);
  640. # define aes_t4_cfb8_cipher aes_cfb8_cipher
  641. static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  642. const unsigned char *in, size_t len);
  643. # define aes_t4_cfb1_cipher aes_cfb1_cipher
  644. static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  645. const unsigned char *in, size_t len);
  646. # define aes_t4_ctr_cipher aes_ctr_cipher
  647. static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  648. const unsigned char *in, size_t len);
  649. static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  650. const unsigned char *iv, int enc)
  651. {
  652. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  653. if (!iv && !key)
  654. return 1;
  655. if (key) {
  656. int bits = ctx->key_len * 8;
  657. aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
  658. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  659. (block128_f) aes_t4_encrypt);
  660. switch (bits) {
  661. case 128:
  662. gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
  663. break;
  664. case 192:
  665. gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
  666. break;
  667. case 256:
  668. gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
  669. break;
  670. default:
  671. return 0;
  672. }
  673. /*
  674. * If we have an iv can set it directly, otherwise use saved IV.
  675. */
  676. if (iv == NULL && gctx->iv_set)
  677. iv = gctx->iv;
  678. if (iv) {
  679. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  680. gctx->iv_set = 1;
  681. }
  682. gctx->key_set = 1;
  683. } else {
  684. /* If key set use IV, otherwise copy */
  685. if (gctx->key_set)
  686. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  687. else
  688. memcpy(gctx->iv, iv, gctx->ivlen);
  689. gctx->iv_set = 1;
  690. gctx->iv_gen = 0;
  691. }
  692. return 1;
  693. }
  694. # define aes_t4_gcm_cipher aes_gcm_cipher
  695. static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  696. const unsigned char *in, size_t len);
  697. static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  698. const unsigned char *iv, int enc)
  699. {
  700. EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
  701. if (!iv && !key)
  702. return 1;
  703. if (key) {
  704. int bits = ctx->key_len * 4;
  705. xctx->stream = NULL;
  706. /* key_len is two AES keys */
  707. if (enc) {
  708. aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
  709. xctx->xts.block1 = (block128_f) aes_t4_encrypt;
  710. switch (bits) {
  711. case 128:
  712. xctx->stream = aes128_t4_xts_encrypt;
  713. break;
  714. case 256:
  715. xctx->stream = aes256_t4_xts_encrypt;
  716. break;
  717. default:
  718. return 0;
  719. }
  720. } else {
  721. aes_t4_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
  722. xctx->xts.block1 = (block128_f) aes_t4_decrypt;
  723. switch (bits) {
  724. case 128:
  725. xctx->stream = aes128_t4_xts_decrypt;
  726. break;
  727. case 256:
  728. xctx->stream = aes256_t4_xts_decrypt;
  729. break;
  730. default:
  731. return 0;
  732. }
  733. }
  734. aes_t4_set_encrypt_key(key + ctx->key_len / 2,
  735. ctx->key_len * 4, &xctx->ks2.ks);
  736. xctx->xts.block2 = (block128_f) aes_t4_encrypt;
  737. xctx->xts.key1 = &xctx->ks1;
  738. }
  739. if (iv) {
  740. xctx->xts.key2 = &xctx->ks2;
  741. memcpy(ctx->iv, iv, 16);
  742. }
  743. return 1;
  744. }
  745. # define aes_t4_xts_cipher aes_xts_cipher
  746. static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  747. const unsigned char *in, size_t len);
  748. static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  749. const unsigned char *iv, int enc)
  750. {
  751. EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
  752. if (!iv && !key)
  753. return 1;
  754. if (key) {
  755. int bits = ctx->key_len * 8;
  756. aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
  757. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  758. &cctx->ks, (block128_f) aes_t4_encrypt);
  759. cctx->str = NULL;
  760. cctx->key_set = 1;
  761. }
  762. if (iv) {
  763. memcpy(ctx->iv, iv, 15 - cctx->L);
  764. cctx->iv_set = 1;
  765. }
  766. return 1;
  767. }
  768. # define aes_t4_ccm_cipher aes_ccm_cipher
  769. static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  770. const unsigned char *in, size_t len);
  771. # ifndef OPENSSL_NO_OCB
  772. static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  773. const unsigned char *iv, int enc)
  774. {
  775. EVP_AES_OCB_CTX *octx = ctx->cipher_data;
  776. if (!iv && !key)
  777. return 1;
  778. if (key) {
  779. do {
  780. /*
  781. * We set both the encrypt and decrypt key here because decrypt
  782. * needs both. We could possibly optimise to remove setting the
  783. * decrypt for an encryption operation.
  784. */
  785. aes_t4_set_encrypt_key(key, ctx->key_len * 8, &octx->ksenc.ks);
  786. aes_t4_set_decrypt_key(key, ctx->key_len * 8, &octx->ksdec.ks);
  787. if (!CRYPTO_ocb128_init(&octx->ocb,
  788. &octx->ksenc.ks, &octx->ksdec.ks,
  789. (block128_f) aes_t4_encrypt,
  790. (block128_f) aes_t4_decrypt))
  791. return 0;
  792. }
  793. while (0);
  794. /*
  795. * If we have an iv we can set it directly, otherwise use saved IV.
  796. */
  797. if (iv == NULL && octx->iv_set)
  798. iv = octx->iv;
  799. if (iv) {
  800. if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
  801. != 1)
  802. return 0;
  803. octx->iv_set = 1;
  804. }
  805. octx->key_set = 1;
  806. } else {
  807. /* If key set use IV, otherwise copy */
  808. if (octx->key_set)
  809. CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
  810. else
  811. memcpy(octx->iv, iv, octx->ivlen);
  812. octx->iv_set = 1;
  813. }
  814. return 1;
  815. }
  816. # define aes_t4_ocb_cipher aes_ocb_cipher
  817. static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  818. const unsigned char *in, size_t len);
  819. # endif /* OPENSSL_NO_OCB */
  820. # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  821. static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
  822. nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
  823. flags|EVP_CIPH_##MODE##_MODE, \
  824. aes_t4_init_key, \
  825. aes_t4_##mode##_cipher, \
  826. NULL, \
  827. sizeof(EVP_AES_KEY), \
  828. NULL,NULL,NULL,NULL }; \
  829. static const EVP_CIPHER aes_##keylen##_##mode = { \
  830. nid##_##keylen##_##nmode,blocksize, \
  831. keylen/8,ivlen, \
  832. flags|EVP_CIPH_##MODE##_MODE, \
  833. aes_init_key, \
  834. aes_##mode##_cipher, \
  835. NULL, \
  836. sizeof(EVP_AES_KEY), \
  837. NULL,NULL,NULL,NULL }; \
  838. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  839. { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
  840. # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
  841. static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
  842. nid##_##keylen##_##mode,blocksize, \
  843. (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  844. flags|EVP_CIPH_##MODE##_MODE, \
  845. aes_t4_##mode##_init_key, \
  846. aes_t4_##mode##_cipher, \
  847. aes_##mode##_cleanup, \
  848. sizeof(EVP_AES_##MODE##_CTX), \
  849. NULL,NULL,aes_##mode##_ctrl,NULL }; \
  850. static const EVP_CIPHER aes_##keylen##_##mode = { \
  851. nid##_##keylen##_##mode,blocksize, \
  852. (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  853. flags|EVP_CIPH_##MODE##_MODE, \
  854. aes_##mode##_init_key, \
  855. aes_##mode##_cipher, \
  856. aes_##mode##_cleanup, \
  857. sizeof(EVP_AES_##MODE##_CTX), \
  858. NULL,NULL,aes_##mode##_ctrl,NULL }; \
  859. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  860. { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
  861. # else
  862. # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  863. static const EVP_CIPHER aes_##keylen##_##mode = { \
  864. nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
  865. flags|EVP_CIPH_##MODE##_MODE, \
  866. aes_init_key, \
  867. aes_##mode##_cipher, \
  868. NULL, \
  869. sizeof(EVP_AES_KEY), \
  870. NULL,NULL,NULL,NULL }; \
  871. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  872. { return &aes_##keylen##_##mode; }
  873. # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
  874. static const EVP_CIPHER aes_##keylen##_##mode = { \
  875. nid##_##keylen##_##mode,blocksize, \
  876. (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  877. flags|EVP_CIPH_##MODE##_MODE, \
  878. aes_##mode##_init_key, \
  879. aes_##mode##_cipher, \
  880. aes_##mode##_cleanup, \
  881. sizeof(EVP_AES_##MODE##_CTX), \
  882. NULL,NULL,aes_##mode##_ctrl,NULL }; \
  883. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  884. { return &aes_##keylen##_##mode; }
  885. # endif
  886. # if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
  887. # include "arm_arch.h"
  888. # if __ARM_MAX_ARCH__>=7
  889. # if defined(BSAES_ASM)
  890. # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
  891. # endif
  892. # if defined(VPAES_ASM)
  893. # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
  894. # endif
  895. # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
  896. # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
  897. # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
  898. # define HWAES_encrypt aes_v8_encrypt
  899. # define HWAES_decrypt aes_v8_decrypt
  900. # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
  901. # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
  902. # endif
  903. # endif
  904. # if defined(HWAES_CAPABLE)
  905. int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
  906. AES_KEY *key);
  907. int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
  908. AES_KEY *key);
  909. void HWAES_encrypt(const unsigned char *in, unsigned char *out,
  910. const AES_KEY *key);
  911. void HWAES_decrypt(const unsigned char *in, unsigned char *out,
  912. const AES_KEY *key);
  913. void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
  914. size_t length, const AES_KEY *key,
  915. unsigned char *ivec, const int enc);
  916. void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
  917. size_t len, const AES_KEY *key,
  918. const unsigned char ivec[16]);
  919. # endif
  920. # define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
  921. BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
  922. BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
  923. BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
  924. BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
  925. BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
  926. BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
  927. BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
  928. static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  929. const unsigned char *iv, int enc)
  930. {
  931. int ret, mode;
  932. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  933. mode = ctx->cipher->flags & EVP_CIPH_MODE;
  934. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
  935. && !enc)
  936. # ifdef HWAES_CAPABLE
  937. if (HWAES_CAPABLE) {
  938. ret = HWAES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  939. dat->block = (block128_f) HWAES_decrypt;
  940. dat->stream.cbc = NULL;
  941. # ifdef HWAES_cbc_encrypt
  942. if (mode == EVP_CIPH_CBC_MODE)
  943. dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
  944. # endif
  945. } else
  946. # endif
  947. # ifdef BSAES_CAPABLE
  948. if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
  949. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  950. dat->block = (block128_f) AES_decrypt;
  951. dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
  952. } else
  953. # endif
  954. # ifdef VPAES_CAPABLE
  955. if (VPAES_CAPABLE) {
  956. ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  957. dat->block = (block128_f) vpaes_decrypt;
  958. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  959. (cbc128_f) vpaes_cbc_encrypt : NULL;
  960. } else
  961. # endif
  962. {
  963. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  964. dat->block = (block128_f) AES_decrypt;
  965. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  966. (cbc128_f) AES_cbc_encrypt : NULL;
  967. } else
  968. # ifdef HWAES_CAPABLE
  969. if (HWAES_CAPABLE) {
  970. ret = HWAES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  971. dat->block = (block128_f) HWAES_encrypt;
  972. dat->stream.cbc = NULL;
  973. # ifdef HWAES_cbc_encrypt
  974. if (mode == EVP_CIPH_CBC_MODE)
  975. dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
  976. else
  977. # endif
  978. # ifdef HWAES_ctr32_encrypt_blocks
  979. if (mode == EVP_CIPH_CTR_MODE)
  980. dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
  981. else
  982. # endif
  983. (void)0; /* terminate potentially open 'else' */
  984. } else
  985. # endif
  986. # ifdef BSAES_CAPABLE
  987. if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
  988. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  989. dat->block = (block128_f) AES_encrypt;
  990. dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
  991. } else
  992. # endif
  993. # ifdef VPAES_CAPABLE
  994. if (VPAES_CAPABLE) {
  995. ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  996. dat->block = (block128_f) vpaes_encrypt;
  997. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  998. (cbc128_f) vpaes_cbc_encrypt : NULL;
  999. } else
  1000. # endif
  1001. {
  1002. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  1003. dat->block = (block128_f) AES_encrypt;
  1004. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  1005. (cbc128_f) AES_cbc_encrypt : NULL;
  1006. # ifdef AES_CTR_ASM
  1007. if (mode == EVP_CIPH_CTR_MODE)
  1008. dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
  1009. # endif
  1010. }
  1011. if (ret < 0) {
  1012. EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
  1013. return 0;
  1014. }
  1015. return 1;
  1016. }
  1017. static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1018. const unsigned char *in, size_t len)
  1019. {
  1020. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  1021. if (dat->stream.cbc)
  1022. (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
  1023. else if (ctx->encrypt)
  1024. CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  1025. else
  1026. CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  1027. return 1;
  1028. }
  1029. static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1030. const unsigned char *in, size_t len)
  1031. {
  1032. size_t bl = ctx->cipher->block_size;
  1033. size_t i;
  1034. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  1035. if (len < bl)
  1036. return 1;
  1037. for (i = 0, len -= bl; i <= len; i += bl)
  1038. (*dat->block) (in + i, out + i, &dat->ks);
  1039. return 1;
  1040. }
  1041. static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1042. const unsigned char *in, size_t len)
  1043. {
  1044. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  1045. CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
  1046. ctx->iv, &ctx->num, dat->block);
  1047. return 1;
  1048. }
  1049. static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1050. const unsigned char *in, size_t len)
  1051. {
  1052. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  1053. CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
  1054. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  1055. return 1;
  1056. }
  1057. static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1058. const unsigned char *in, size_t len)
  1059. {
  1060. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  1061. CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
  1062. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  1063. return 1;
  1064. }
  1065. static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1066. const unsigned char *in, size_t len)
  1067. {
  1068. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  1069. if (ctx->flags & EVP_CIPH_FLAG_LENGTH_BITS) {
  1070. CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
  1071. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  1072. return 1;
  1073. }
  1074. while (len >= MAXBITCHUNK) {
  1075. CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
  1076. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  1077. len -= MAXBITCHUNK;
  1078. }
  1079. if (len)
  1080. CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
  1081. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  1082. return 1;
  1083. }
  1084. static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1085. const unsigned char *in, size_t len)
  1086. {
  1087. unsigned int num = ctx->num;
  1088. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  1089. if (dat->stream.ctr)
  1090. CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
  1091. ctx->iv, ctx->buf, &num, dat->stream.ctr);
  1092. else
  1093. CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
  1094. ctx->iv, ctx->buf, &num, dat->block);
  1095. ctx->num = (size_t)num;
  1096. return 1;
  1097. }
  1098. BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
  1099. BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
  1100. BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
  1101. static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
  1102. {
  1103. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  1104. OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
  1105. if (gctx->iv != c->iv)
  1106. OPENSSL_free(gctx->iv);
  1107. return 1;
  1108. }
  1109. /* increment counter (64-bit int) by 1 */
  1110. static void ctr64_inc(unsigned char *counter)
  1111. {
  1112. int n = 8;
  1113. unsigned char c;
  1114. do {
  1115. --n;
  1116. c = counter[n];
  1117. ++c;
  1118. counter[n] = c;
  1119. if (c)
  1120. return;
  1121. } while (n);
  1122. }
  1123. static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  1124. {
  1125. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  1126. switch (type) {
  1127. case EVP_CTRL_INIT:
  1128. gctx->key_set = 0;
  1129. gctx->iv_set = 0;
  1130. gctx->ivlen = c->cipher->iv_len;
  1131. gctx->iv = c->iv;
  1132. gctx->taglen = -1;
  1133. gctx->iv_gen = 0;
  1134. gctx->tls_aad_len = -1;
  1135. return 1;
  1136. case EVP_CTRL_AEAD_SET_IVLEN:
  1137. if (arg <= 0)
  1138. return 0;
  1139. /* Allocate memory for IV if needed */
  1140. if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
  1141. if (gctx->iv != c->iv)
  1142. OPENSSL_free(gctx->iv);
  1143. gctx->iv = OPENSSL_malloc(arg);
  1144. if (!gctx->iv)
  1145. return 0;
  1146. }
  1147. gctx->ivlen = arg;
  1148. return 1;
  1149. case EVP_CTRL_AEAD_SET_TAG:
  1150. if (arg <= 0 || arg > 16 || c->encrypt)
  1151. return 0;
  1152. memcpy(c->buf, ptr, arg);
  1153. gctx->taglen = arg;
  1154. return 1;
  1155. case EVP_CTRL_AEAD_GET_TAG:
  1156. if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0)
  1157. return 0;
  1158. memcpy(ptr, c->buf, arg);
  1159. return 1;
  1160. case EVP_CTRL_GCM_SET_IV_FIXED:
  1161. /* Special case: -1 length restores whole IV */
  1162. if (arg == -1) {
  1163. memcpy(gctx->iv, ptr, gctx->ivlen);
  1164. gctx->iv_gen = 1;
  1165. return 1;
  1166. }
  1167. /*
  1168. * Fixed field must be at least 4 bytes and invocation field at least
  1169. * 8.
  1170. */
  1171. if ((arg < 4) || (gctx->ivlen - arg) < 8)
  1172. return 0;
  1173. if (arg)
  1174. memcpy(gctx->iv, ptr, arg);
  1175. if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
  1176. return 0;
  1177. gctx->iv_gen = 1;
  1178. return 1;
  1179. case EVP_CTRL_GCM_IV_GEN:
  1180. if (gctx->iv_gen == 0 || gctx->key_set == 0)
  1181. return 0;
  1182. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  1183. if (arg <= 0 || arg > gctx->ivlen)
  1184. arg = gctx->ivlen;
  1185. memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
  1186. /*
  1187. * Invocation field will be at least 8 bytes in size and so no need
  1188. * to check wrap around or increment more than last 8 bytes.
  1189. */
  1190. ctr64_inc(gctx->iv + gctx->ivlen - 8);
  1191. gctx->iv_set = 1;
  1192. return 1;
  1193. case EVP_CTRL_GCM_SET_IV_INV:
  1194. if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
  1195. return 0;
  1196. memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
  1197. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  1198. gctx->iv_set = 1;
  1199. return 1;
  1200. case EVP_CTRL_AEAD_TLS1_AAD:
  1201. /* Save the AAD for later use */
  1202. if (arg != EVP_AEAD_TLS1_AAD_LEN)
  1203. return 0;
  1204. memcpy(c->buf, ptr, arg);
  1205. gctx->tls_aad_len = arg;
  1206. {
  1207. unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
  1208. /* Correct length for explicit IV */
  1209. len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
  1210. /* If decrypting correct for tag too */
  1211. if (!c->encrypt)
  1212. len -= EVP_GCM_TLS_TAG_LEN;
  1213. c->buf[arg - 2] = len >> 8;
  1214. c->buf[arg - 1] = len & 0xff;
  1215. }
  1216. /* Extra padding: tag appended to record */
  1217. return EVP_GCM_TLS_TAG_LEN;
  1218. case EVP_CTRL_COPY:
  1219. {
  1220. EVP_CIPHER_CTX *out = ptr;
  1221. EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
  1222. if (gctx->gcm.key) {
  1223. if (gctx->gcm.key != &gctx->ks)
  1224. return 0;
  1225. gctx_out->gcm.key = &gctx_out->ks;
  1226. }
  1227. if (gctx->iv == c->iv)
  1228. gctx_out->iv = out->iv;
  1229. else {
  1230. gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
  1231. if (!gctx_out->iv)
  1232. return 0;
  1233. memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
  1234. }
  1235. return 1;
  1236. }
  1237. default:
  1238. return -1;
  1239. }
  1240. }
  1241. static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  1242. const unsigned char *iv, int enc)
  1243. {
  1244. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  1245. if (!iv && !key)
  1246. return 1;
  1247. if (key) {
  1248. do {
  1249. # ifdef HWAES_CAPABLE
  1250. if (HWAES_CAPABLE) {
  1251. HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
  1252. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  1253. (block128_f) HWAES_encrypt);
  1254. # ifdef HWAES_ctr32_encrypt_blocks
  1255. gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
  1256. # else
  1257. gctx->ctr = NULL;
  1258. # endif
  1259. break;
  1260. } else
  1261. # endif
  1262. # ifdef BSAES_CAPABLE
  1263. if (BSAES_CAPABLE) {
  1264. AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
  1265. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  1266. (block128_f) AES_encrypt);
  1267. gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
  1268. break;
  1269. } else
  1270. # endif
  1271. # ifdef VPAES_CAPABLE
  1272. if (VPAES_CAPABLE) {
  1273. vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
  1274. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  1275. (block128_f) vpaes_encrypt);
  1276. gctx->ctr = NULL;
  1277. break;
  1278. } else
  1279. # endif
  1280. (void)0; /* terminate potentially open 'else' */
  1281. AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
  1282. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  1283. (block128_f) AES_encrypt);
  1284. # ifdef AES_CTR_ASM
  1285. gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
  1286. # else
  1287. gctx->ctr = NULL;
  1288. # endif
  1289. } while (0);
  1290. /*
  1291. * If we have an iv can set it directly, otherwise use saved IV.
  1292. */
  1293. if (iv == NULL && gctx->iv_set)
  1294. iv = gctx->iv;
  1295. if (iv) {
  1296. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  1297. gctx->iv_set = 1;
  1298. }
  1299. gctx->key_set = 1;
  1300. } else {
  1301. /* If key set use IV, otherwise copy */
  1302. if (gctx->key_set)
  1303. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  1304. else
  1305. memcpy(gctx->iv, iv, gctx->ivlen);
  1306. gctx->iv_set = 1;
  1307. gctx->iv_gen = 0;
  1308. }
  1309. return 1;
  1310. }
  1311. /*
  1312. * Handle TLS GCM packet format. This consists of the last portion of the IV
  1313. * followed by the payload and finally the tag. On encrypt generate IV,
  1314. * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
  1315. * and verify tag.
  1316. */
  1317. static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1318. const unsigned char *in, size_t len)
  1319. {
  1320. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  1321. int rv = -1;
  1322. /* Encrypt/decrypt must be performed in place */
  1323. if (out != in
  1324. || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
  1325. return -1;
  1326. /*
  1327. * Set IV from start of buffer or generate IV and write to start of
  1328. * buffer.
  1329. */
  1330. if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
  1331. EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
  1332. EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
  1333. goto err;
  1334. /* Use saved AAD */
  1335. if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
  1336. goto err;
  1337. /* Fix buffer and length to point to payload */
  1338. in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
  1339. out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
  1340. len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
  1341. if (ctx->encrypt) {
  1342. /* Encrypt payload */
  1343. if (gctx->ctr) {
  1344. size_t bulk = 0;
  1345. # if defined(AES_GCM_ASM)
  1346. if (len >= 32 && AES_GCM_ASM(gctx)) {
  1347. if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
  1348. return -1;
  1349. bulk = AES_gcm_encrypt(in, out, len,
  1350. gctx->gcm.key,
  1351. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  1352. gctx->gcm.len.u[1] += bulk;
  1353. }
  1354. # endif
  1355. if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
  1356. in + bulk,
  1357. out + bulk,
  1358. len - bulk, gctx->ctr))
  1359. goto err;
  1360. } else {
  1361. size_t bulk = 0;
  1362. # if defined(AES_GCM_ASM2)
  1363. if (len >= 32 && AES_GCM_ASM2(gctx)) {
  1364. if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
  1365. return -1;
  1366. bulk = AES_gcm_encrypt(in, out, len,
  1367. gctx->gcm.key,
  1368. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  1369. gctx->gcm.len.u[1] += bulk;
  1370. }
  1371. # endif
  1372. if (CRYPTO_gcm128_encrypt(&gctx->gcm,
  1373. in + bulk, out + bulk, len - bulk))
  1374. goto err;
  1375. }
  1376. out += len;
  1377. /* Finally write tag */
  1378. CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
  1379. rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
  1380. } else {
  1381. /* Decrypt */
  1382. if (gctx->ctr) {
  1383. size_t bulk = 0;
  1384. # if defined(AES_GCM_ASM)
  1385. if (len >= 16 && AES_GCM_ASM(gctx)) {
  1386. if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
  1387. return -1;
  1388. bulk = AES_gcm_decrypt(in, out, len,
  1389. gctx->gcm.key,
  1390. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  1391. gctx->gcm.len.u[1] += bulk;
  1392. }
  1393. # endif
  1394. if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
  1395. in + bulk,
  1396. out + bulk,
  1397. len - bulk, gctx->ctr))
  1398. goto err;
  1399. } else {
  1400. size_t bulk = 0;
  1401. # if defined(AES_GCM_ASM2)
  1402. if (len >= 16 && AES_GCM_ASM2(gctx)) {
  1403. if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
  1404. return -1;
  1405. bulk = AES_gcm_decrypt(in, out, len,
  1406. gctx->gcm.key,
  1407. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  1408. gctx->gcm.len.u[1] += bulk;
  1409. }
  1410. # endif
  1411. if (CRYPTO_gcm128_decrypt(&gctx->gcm,
  1412. in + bulk, out + bulk, len - bulk))
  1413. goto err;
  1414. }
  1415. /* Retrieve tag */
  1416. CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
  1417. /* If tag mismatch wipe buffer */
  1418. if (memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
  1419. OPENSSL_cleanse(out, len);
  1420. goto err;
  1421. }
  1422. rv = len;
  1423. }
  1424. err:
  1425. gctx->iv_set = 0;
  1426. gctx->tls_aad_len = -1;
  1427. return rv;
  1428. }
  1429. static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1430. const unsigned char *in, size_t len)
  1431. {
  1432. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  1433. /* If not set up, return error */
  1434. if (!gctx->key_set)
  1435. return -1;
  1436. if (gctx->tls_aad_len >= 0)
  1437. return aes_gcm_tls_cipher(ctx, out, in, len);
  1438. if (!gctx->iv_set)
  1439. return -1;
  1440. if (in) {
  1441. if (out == NULL) {
  1442. if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
  1443. return -1;
  1444. } else if (ctx->encrypt) {
  1445. if (gctx->ctr) {
  1446. size_t bulk = 0;
  1447. # if defined(AES_GCM_ASM)
  1448. if (len >= 32 && AES_GCM_ASM(gctx)) {
  1449. size_t res = (16 - gctx->gcm.mres) % 16;
  1450. if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
  1451. return -1;
  1452. bulk = AES_gcm_encrypt(in + res,
  1453. out + res, len - res,
  1454. gctx->gcm.key, gctx->gcm.Yi.c,
  1455. gctx->gcm.Xi.u);
  1456. gctx->gcm.len.u[1] += bulk;
  1457. bulk += res;
  1458. }
  1459. # endif
  1460. if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
  1461. in + bulk,
  1462. out + bulk,
  1463. len - bulk, gctx->ctr))
  1464. return -1;
  1465. } else {
  1466. size_t bulk = 0;
  1467. # if defined(AES_GCM_ASM2)
  1468. if (len >= 32 && AES_GCM_ASM2(gctx)) {
  1469. size_t res = (16 - gctx->gcm.mres) % 16;
  1470. if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
  1471. return -1;
  1472. bulk = AES_gcm_encrypt(in + res,
  1473. out + res, len - res,
  1474. gctx->gcm.key, gctx->gcm.Yi.c,
  1475. gctx->gcm.Xi.u);
  1476. gctx->gcm.len.u[1] += bulk;
  1477. bulk += res;
  1478. }
  1479. # endif
  1480. if (CRYPTO_gcm128_encrypt(&gctx->gcm,
  1481. in + bulk, out + bulk, len - bulk))
  1482. return -1;
  1483. }
  1484. } else {
  1485. if (gctx->ctr) {
  1486. size_t bulk = 0;
  1487. # if defined(AES_GCM_ASM)
  1488. if (len >= 16 && AES_GCM_ASM(gctx)) {
  1489. size_t res = (16 - gctx->gcm.mres) % 16;
  1490. if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
  1491. return -1;
  1492. bulk = AES_gcm_decrypt(in + res,
  1493. out + res, len - res,
  1494. gctx->gcm.key,
  1495. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  1496. gctx->gcm.len.u[1] += bulk;
  1497. bulk += res;
  1498. }
  1499. # endif
  1500. if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
  1501. in + bulk,
  1502. out + bulk,
  1503. len - bulk, gctx->ctr))
  1504. return -1;
  1505. } else {
  1506. size_t bulk = 0;
  1507. # if defined(AES_GCM_ASM2)
  1508. if (len >= 16 && AES_GCM_ASM2(gctx)) {
  1509. size_t res = (16 - gctx->gcm.mres) % 16;
  1510. if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
  1511. return -1;
  1512. bulk = AES_gcm_decrypt(in + res,
  1513. out + res, len - res,
  1514. gctx->gcm.key,
  1515. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  1516. gctx->gcm.len.u[1] += bulk;
  1517. bulk += res;
  1518. }
  1519. # endif
  1520. if (CRYPTO_gcm128_decrypt(&gctx->gcm,
  1521. in + bulk, out + bulk, len - bulk))
  1522. return -1;
  1523. }
  1524. }
  1525. return len;
  1526. } else {
  1527. if (!ctx->encrypt) {
  1528. if (gctx->taglen < 0)
  1529. return -1;
  1530. if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
  1531. return -1;
  1532. gctx->iv_set = 0;
  1533. return 0;
  1534. }
  1535. CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
  1536. gctx->taglen = 16;
  1537. /* Don't reuse the IV */
  1538. gctx->iv_set = 0;
  1539. return 0;
  1540. }
  1541. }
  1542. # define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
  1543. | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
  1544. | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
  1545. | EVP_CIPH_CUSTOM_COPY)
  1546. BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
  1547. EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
  1548. BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
  1549. EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
  1550. BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
  1551. EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
  1552. static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  1553. {
  1554. EVP_AES_XTS_CTX *xctx = c->cipher_data;
  1555. if (type == EVP_CTRL_COPY) {
  1556. EVP_CIPHER_CTX *out = ptr;
  1557. EVP_AES_XTS_CTX *xctx_out = out->cipher_data;
  1558. if (xctx->xts.key1) {
  1559. if (xctx->xts.key1 != &xctx->ks1)
  1560. return 0;
  1561. xctx_out->xts.key1 = &xctx_out->ks1;
  1562. }
  1563. if (xctx->xts.key2) {
  1564. if (xctx->xts.key2 != &xctx->ks2)
  1565. return 0;
  1566. xctx_out->xts.key2 = &xctx_out->ks2;
  1567. }
  1568. return 1;
  1569. } else if (type != EVP_CTRL_INIT)
  1570. return -1;
  1571. /* key1 and key2 are used as an indicator both key and IV are set */
  1572. xctx->xts.key1 = NULL;
  1573. xctx->xts.key2 = NULL;
  1574. return 1;
  1575. }
  1576. static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  1577. const unsigned char *iv, int enc)
  1578. {
  1579. EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
  1580. if (!iv && !key)
  1581. return 1;
  1582. if (key)
  1583. do {
  1584. # ifdef AES_XTS_ASM
  1585. xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
  1586. # else
  1587. xctx->stream = NULL;
  1588. # endif
  1589. /* key_len is two AES keys */
  1590. # ifdef HWAES_CAPABLE
  1591. if (HWAES_CAPABLE) {
  1592. if (enc) {
  1593. HWAES_set_encrypt_key(key, ctx->key_len * 4,
  1594. &xctx->ks1.ks);
  1595. xctx->xts.block1 = (block128_f) HWAES_encrypt;
  1596. } else {
  1597. HWAES_set_decrypt_key(key, ctx->key_len * 4,
  1598. &xctx->ks1.ks);
  1599. xctx->xts.block1 = (block128_f) HWAES_decrypt;
  1600. }
  1601. HWAES_set_encrypt_key(key + ctx->key_len / 2,
  1602. ctx->key_len * 4, &xctx->ks2.ks);
  1603. xctx->xts.block2 = (block128_f) HWAES_encrypt;
  1604. xctx->xts.key1 = &xctx->ks1;
  1605. break;
  1606. } else
  1607. # endif
  1608. # ifdef BSAES_CAPABLE
  1609. if (BSAES_CAPABLE)
  1610. xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
  1611. else
  1612. # endif
  1613. # ifdef VPAES_CAPABLE
  1614. if (VPAES_CAPABLE) {
  1615. if (enc) {
  1616. vpaes_set_encrypt_key(key, ctx->key_len * 4,
  1617. &xctx->ks1.ks);
  1618. xctx->xts.block1 = (block128_f) vpaes_encrypt;
  1619. } else {
  1620. vpaes_set_decrypt_key(key, ctx->key_len * 4,
  1621. &xctx->ks1.ks);
  1622. xctx->xts.block1 = (block128_f) vpaes_decrypt;
  1623. }
  1624. vpaes_set_encrypt_key(key + ctx->key_len / 2,
  1625. ctx->key_len * 4, &xctx->ks2.ks);
  1626. xctx->xts.block2 = (block128_f) vpaes_encrypt;
  1627. xctx->xts.key1 = &xctx->ks1;
  1628. break;
  1629. } else
  1630. # endif
  1631. (void)0; /* terminate potentially open 'else' */
  1632. if (enc) {
  1633. AES_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
  1634. xctx->xts.block1 = (block128_f) AES_encrypt;
  1635. } else {
  1636. AES_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
  1637. xctx->xts.block1 = (block128_f) AES_decrypt;
  1638. }
  1639. AES_set_encrypt_key(key + ctx->key_len / 2,
  1640. ctx->key_len * 4, &xctx->ks2.ks);
  1641. xctx->xts.block2 = (block128_f) AES_encrypt;
  1642. xctx->xts.key1 = &xctx->ks1;
  1643. } while (0);
  1644. if (iv) {
  1645. xctx->xts.key2 = &xctx->ks2;
  1646. memcpy(ctx->iv, iv, 16);
  1647. }
  1648. return 1;
  1649. }
  1650. static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1651. const unsigned char *in, size_t len)
  1652. {
  1653. EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
  1654. if (!xctx->xts.key1 || !xctx->xts.key2)
  1655. return 0;
  1656. if (!out || !in || len < AES_BLOCK_SIZE)
  1657. return 0;
  1658. if (xctx->stream)
  1659. (*xctx->stream) (in, out, len,
  1660. xctx->xts.key1, xctx->xts.key2, ctx->iv);
  1661. else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
  1662. ctx->encrypt))
  1663. return 0;
  1664. return 1;
  1665. }
  1666. # define aes_xts_cleanup NULL
  1667. # define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
  1668. | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
  1669. | EVP_CIPH_CUSTOM_COPY)
  1670. BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
  1671. BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
  1672. static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  1673. {
  1674. EVP_AES_CCM_CTX *cctx = c->cipher_data;
  1675. switch (type) {
  1676. case EVP_CTRL_INIT:
  1677. cctx->key_set = 0;
  1678. cctx->iv_set = 0;
  1679. cctx->L = 8;
  1680. cctx->M = 12;
  1681. cctx->tag_set = 0;
  1682. cctx->len_set = 0;
  1683. return 1;
  1684. case EVP_CTRL_AEAD_SET_IVLEN:
  1685. arg = 15 - arg;
  1686. case EVP_CTRL_CCM_SET_L:
  1687. if (arg < 2 || arg > 8)
  1688. return 0;
  1689. cctx->L = arg;
  1690. return 1;
  1691. case EVP_CTRL_AEAD_SET_TAG:
  1692. if ((arg & 1) || arg < 4 || arg > 16)
  1693. return 0;
  1694. if ((c->encrypt && ptr) || (!c->encrypt && !ptr))
  1695. return 0;
  1696. if (ptr) {
  1697. cctx->tag_set = 1;
  1698. memcpy(c->buf, ptr, arg);
  1699. }
  1700. cctx->M = arg;
  1701. return 1;
  1702. case EVP_CTRL_AEAD_GET_TAG:
  1703. if (!c->encrypt || !cctx->tag_set)
  1704. return 0;
  1705. if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
  1706. return 0;
  1707. cctx->tag_set = 0;
  1708. cctx->iv_set = 0;
  1709. cctx->len_set = 0;
  1710. return 1;
  1711. case EVP_CTRL_COPY:
  1712. {
  1713. EVP_CIPHER_CTX *out = ptr;
  1714. EVP_AES_CCM_CTX *cctx_out = out->cipher_data;
  1715. if (cctx->ccm.key) {
  1716. if (cctx->ccm.key != &cctx->ks)
  1717. return 0;
  1718. cctx_out->ccm.key = &cctx_out->ks;
  1719. }
  1720. return 1;
  1721. }
  1722. default:
  1723. return -1;
  1724. }
  1725. }
  1726. static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  1727. const unsigned char *iv, int enc)
  1728. {
  1729. EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
  1730. if (!iv && !key)
  1731. return 1;
  1732. if (key)
  1733. do {
  1734. # ifdef HWAES_CAPABLE
  1735. if (HWAES_CAPABLE) {
  1736. HWAES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks);
  1737. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  1738. &cctx->ks, (block128_f) HWAES_encrypt);
  1739. cctx->str = NULL;
  1740. cctx->key_set = 1;
  1741. break;
  1742. } else
  1743. # endif
  1744. # ifdef VPAES_CAPABLE
  1745. if (VPAES_CAPABLE) {
  1746. vpaes_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks);
  1747. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  1748. &cctx->ks, (block128_f) vpaes_encrypt);
  1749. cctx->str = NULL;
  1750. cctx->key_set = 1;
  1751. break;
  1752. }
  1753. # endif
  1754. AES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks);
  1755. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  1756. &cctx->ks, (block128_f) AES_encrypt);
  1757. cctx->str = NULL;
  1758. cctx->key_set = 1;
  1759. } while (0);
  1760. if (iv) {
  1761. memcpy(ctx->iv, iv, 15 - cctx->L);
  1762. cctx->iv_set = 1;
  1763. }
  1764. return 1;
  1765. }
  1766. static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1767. const unsigned char *in, size_t len)
  1768. {
  1769. EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
  1770. CCM128_CONTEXT *ccm = &cctx->ccm;
  1771. /* If not set up, return error */
  1772. if (!cctx->iv_set && !cctx->key_set)
  1773. return -1;
  1774. if (!ctx->encrypt && !cctx->tag_set)
  1775. return -1;
  1776. if (!out) {
  1777. if (!in) {
  1778. if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
  1779. return -1;
  1780. cctx->len_set = 1;
  1781. return len;
  1782. }
  1783. /* If have AAD need message length */
  1784. if (!cctx->len_set && len)
  1785. return -1;
  1786. CRYPTO_ccm128_aad(ccm, in, len);
  1787. return len;
  1788. }
  1789. /* EVP_*Final() doesn't return any data */
  1790. if (!in)
  1791. return 0;
  1792. /* If not set length yet do it */
  1793. if (!cctx->len_set) {
  1794. if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
  1795. return -1;
  1796. cctx->len_set = 1;
  1797. }
  1798. if (ctx->encrypt) {
  1799. if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
  1800. cctx->str) :
  1801. CRYPTO_ccm128_encrypt(ccm, in, out, len))
  1802. return -1;
  1803. cctx->tag_set = 1;
  1804. return len;
  1805. } else {
  1806. int rv = -1;
  1807. if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
  1808. cctx->str) :
  1809. !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
  1810. unsigned char tag[16];
  1811. if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
  1812. if (!memcmp(tag, ctx->buf, cctx->M))
  1813. rv = len;
  1814. }
  1815. }
  1816. if (rv == -1)
  1817. OPENSSL_cleanse(out, len);
  1818. cctx->iv_set = 0;
  1819. cctx->tag_set = 0;
  1820. cctx->len_set = 0;
  1821. return rv;
  1822. }
  1823. }
  1824. # define aes_ccm_cleanup NULL
  1825. BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM, CUSTOM_FLAGS)
  1826. BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM, CUSTOM_FLAGS)
  1827. BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM, CUSTOM_FLAGS)
  1828. typedef struct {
  1829. union {
  1830. double align;
  1831. AES_KEY ks;
  1832. } ks;
  1833. /* Indicates if IV has been set */
  1834. unsigned char *iv;
  1835. } EVP_AES_WRAP_CTX;
  1836. static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  1837. const unsigned char *iv, int enc)
  1838. {
  1839. EVP_AES_WRAP_CTX *wctx = ctx->cipher_data;
  1840. if (!iv && !key)
  1841. return 1;
  1842. if (key) {
  1843. if (ctx->encrypt)
  1844. AES_set_encrypt_key(key, ctx->key_len * 8, &wctx->ks.ks);
  1845. else
  1846. AES_set_decrypt_key(key, ctx->key_len * 8, &wctx->ks.ks);
  1847. if (!iv)
  1848. wctx->iv = NULL;
  1849. }
  1850. if (iv) {
  1851. memcpy(ctx->iv, iv, EVP_CIPHER_CTX_iv_length(ctx));
  1852. wctx->iv = ctx->iv;
  1853. }
  1854. return 1;
  1855. }
  1856. static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1857. const unsigned char *in, size_t inlen)
  1858. {
  1859. EVP_AES_WRAP_CTX *wctx = ctx->cipher_data;
  1860. size_t rv;
  1861. /* AES wrap with padding has IV length of 4, without padding 8 */
  1862. int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
  1863. /* No final operation so always return zero length */
  1864. if (!in)
  1865. return 0;
  1866. /* Input length must always be non-zero */
  1867. if (!inlen)
  1868. return -1;
  1869. /* If decrypting need at least 16 bytes and multiple of 8 */
  1870. if (!ctx->encrypt && (inlen < 16 || inlen & 0x7))
  1871. return -1;
  1872. /* If not padding input must be multiple of 8 */
  1873. if (!pad && inlen & 0x7)
  1874. return -1;
  1875. if (!out) {
  1876. if (ctx->encrypt) {
  1877. /* If padding round up to multiple of 8 */
  1878. if (pad)
  1879. inlen = (inlen + 7) / 8 * 8;
  1880. /* 8 byte prefix */
  1881. return inlen + 8;
  1882. } else {
  1883. /*
  1884. * If not padding output will be exactly 8 bytes smaller than
  1885. * input. If padding it will be at least 8 bytes smaller but we
  1886. * don't know how much.
  1887. */
  1888. return inlen - 8;
  1889. }
  1890. }
  1891. if (pad) {
  1892. if (ctx->encrypt)
  1893. rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
  1894. out, in, inlen,
  1895. (block128_f) AES_encrypt);
  1896. else
  1897. rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
  1898. out, in, inlen,
  1899. (block128_f) AES_decrypt);
  1900. } else {
  1901. if (ctx->encrypt)
  1902. rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
  1903. out, in, inlen, (block128_f) AES_encrypt);
  1904. else
  1905. rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
  1906. out, in, inlen, (block128_f) AES_decrypt);
  1907. }
  1908. return rv ? (int)rv : -1;
  1909. }
  1910. # define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
  1911. | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
  1912. | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
  1913. static const EVP_CIPHER aes_128_wrap = {
  1914. NID_id_aes128_wrap,
  1915. 8, 16, 8, WRAP_FLAGS,
  1916. aes_wrap_init_key, aes_wrap_cipher,
  1917. NULL,
  1918. sizeof(EVP_AES_WRAP_CTX),
  1919. NULL, NULL, NULL, NULL
  1920. };
  1921. const EVP_CIPHER *EVP_aes_128_wrap(void)
  1922. {
  1923. return &aes_128_wrap;
  1924. }
  1925. static const EVP_CIPHER aes_192_wrap = {
  1926. NID_id_aes192_wrap,
  1927. 8, 24, 8, WRAP_FLAGS,
  1928. aes_wrap_init_key, aes_wrap_cipher,
  1929. NULL,
  1930. sizeof(EVP_AES_WRAP_CTX),
  1931. NULL, NULL, NULL, NULL
  1932. };
  1933. const EVP_CIPHER *EVP_aes_192_wrap(void)
  1934. {
  1935. return &aes_192_wrap;
  1936. }
  1937. static const EVP_CIPHER aes_256_wrap = {
  1938. NID_id_aes256_wrap,
  1939. 8, 32, 8, WRAP_FLAGS,
  1940. aes_wrap_init_key, aes_wrap_cipher,
  1941. NULL,
  1942. sizeof(EVP_AES_WRAP_CTX),
  1943. NULL, NULL, NULL, NULL
  1944. };
  1945. const EVP_CIPHER *EVP_aes_256_wrap(void)
  1946. {
  1947. return &aes_256_wrap;
  1948. }
  1949. static const EVP_CIPHER aes_128_wrap_pad = {
  1950. NID_id_aes128_wrap_pad,
  1951. 8, 16, 4, WRAP_FLAGS,
  1952. aes_wrap_init_key, aes_wrap_cipher,
  1953. NULL,
  1954. sizeof(EVP_AES_WRAP_CTX),
  1955. NULL, NULL, NULL, NULL
  1956. };
  1957. const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
  1958. {
  1959. return &aes_128_wrap_pad;
  1960. }
  1961. static const EVP_CIPHER aes_192_wrap_pad = {
  1962. NID_id_aes192_wrap_pad,
  1963. 8, 24, 4, WRAP_FLAGS,
  1964. aes_wrap_init_key, aes_wrap_cipher,
  1965. NULL,
  1966. sizeof(EVP_AES_WRAP_CTX),
  1967. NULL, NULL, NULL, NULL
  1968. };
  1969. const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
  1970. {
  1971. return &aes_192_wrap_pad;
  1972. }
  1973. static const EVP_CIPHER aes_256_wrap_pad = {
  1974. NID_id_aes256_wrap_pad,
  1975. 8, 32, 4, WRAP_FLAGS,
  1976. aes_wrap_init_key, aes_wrap_cipher,
  1977. NULL,
  1978. sizeof(EVP_AES_WRAP_CTX),
  1979. NULL, NULL, NULL, NULL
  1980. };
  1981. const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
  1982. {
  1983. return &aes_256_wrap_pad;
  1984. }
  1985. # ifndef OPENSSL_NO_OCB
  1986. static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  1987. {
  1988. EVP_AES_OCB_CTX *octx = c->cipher_data;
  1989. EVP_CIPHER_CTX *newc;
  1990. EVP_AES_OCB_CTX *new_octx;
  1991. switch (type) {
  1992. case EVP_CTRL_INIT:
  1993. octx->key_set = 0;
  1994. octx->iv_set = 0;
  1995. octx->ivlen = c->cipher->iv_len;
  1996. octx->iv = c->iv;
  1997. octx->taglen = 16;
  1998. octx->data_buf_len = 0;
  1999. octx->aad_buf_len = 0;
  2000. return 1;
  2001. case EVP_CTRL_AEAD_SET_IVLEN:
  2002. /* IV len must be 1 to 15 */
  2003. if (arg <= 0 || arg > 15)
  2004. return 0;
  2005. octx->ivlen = arg;
  2006. return 1;
  2007. case EVP_CTRL_AEAD_SET_TAG:
  2008. if (!ptr) {
  2009. /* Tag len must be 0 to 16 */
  2010. if (arg < 0 || arg > 16)
  2011. return 0;
  2012. octx->taglen = arg;
  2013. return 1;
  2014. }
  2015. if (arg != octx->taglen || c->encrypt)
  2016. return 0;
  2017. memcpy(octx->tag, ptr, arg);
  2018. return 1;
  2019. case EVP_CTRL_AEAD_GET_TAG:
  2020. if (arg != octx->taglen || !c->encrypt)
  2021. return 0;
  2022. memcpy(ptr, octx->tag, arg);
  2023. return 1;
  2024. case EVP_CTRL_COPY:
  2025. newc = (EVP_CIPHER_CTX *)ptr;
  2026. new_octx = newc->cipher_data;
  2027. return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
  2028. &new_octx->ksenc.ks,
  2029. &new_octx->ksdec.ks);
  2030. default:
  2031. return -1;
  2032. }
  2033. }
  2034. static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  2035. const unsigned char *iv, int enc)
  2036. {
  2037. EVP_AES_OCB_CTX *octx = ctx->cipher_data;
  2038. if (!iv && !key)
  2039. return 1;
  2040. if (key) {
  2041. do {
  2042. /*
  2043. * We set both the encrypt and decrypt key here because decrypt
  2044. * needs both. We could possibly optimise to remove setting the
  2045. * decrypt for an encryption operation.
  2046. */
  2047. # ifdef VPAES_CAPABLE
  2048. if (VPAES_CAPABLE) {
  2049. vpaes_set_encrypt_key(key, ctx->key_len * 8, &octx->ksenc.ks);
  2050. vpaes_set_decrypt_key(key, ctx->key_len * 8, &octx->ksdec.ks);
  2051. if (!CRYPTO_ocb128_init(&octx->ocb,
  2052. &octx->ksenc.ks, &octx->ksdec.ks,
  2053. (block128_f) vpaes_encrypt,
  2054. (block128_f) vpaes_decrypt))
  2055. return 0;
  2056. break;
  2057. }
  2058. # endif
  2059. AES_set_encrypt_key(key, ctx->key_len * 8, &octx->ksenc.ks);
  2060. AES_set_decrypt_key(key, ctx->key_len * 8, &octx->ksdec.ks);
  2061. if (!CRYPTO_ocb128_init(&octx->ocb,
  2062. &octx->ksenc.ks, &octx->ksdec.ks,
  2063. (block128_f) AES_encrypt,
  2064. (block128_f) AES_decrypt))
  2065. return 0;
  2066. }
  2067. while (0);
  2068. /*
  2069. * If we have an iv we can set it directly, otherwise use saved IV.
  2070. */
  2071. if (iv == NULL && octx->iv_set)
  2072. iv = octx->iv;
  2073. if (iv) {
  2074. if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
  2075. != 1)
  2076. return 0;
  2077. octx->iv_set = 1;
  2078. }
  2079. octx->key_set = 1;
  2080. } else {
  2081. /* If key set use IV, otherwise copy */
  2082. if (octx->key_set)
  2083. CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
  2084. else
  2085. memcpy(octx->iv, iv, octx->ivlen);
  2086. octx->iv_set = 1;
  2087. }
  2088. return 1;
  2089. }
  2090. static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  2091. const unsigned char *in, size_t len)
  2092. {
  2093. unsigned char *buf;
  2094. int *buf_len;
  2095. int written_len = 0;
  2096. size_t trailing_len;
  2097. EVP_AES_OCB_CTX *octx = ctx->cipher_data;
  2098. /* If IV or Key not set then return error */
  2099. if (!octx->iv_set)
  2100. return -1;
  2101. if (!octx->key_set)
  2102. return -1;
  2103. if (in) {
  2104. /*
  2105. * Need to ensure we are only passing full blocks to low level OCB
  2106. * routines. We do it here rather than in EVP_EncryptUpdate/
  2107. * EVP_DecryptUpdate because we need to pass full blocks of AAD too
  2108. * and those routines don't support that
  2109. */
  2110. /* Are we dealing with AAD or normal data here? */
  2111. if (out == NULL) {
  2112. buf = octx->aad_buf;
  2113. buf_len = &(octx->aad_buf_len);
  2114. } else {
  2115. buf = octx->data_buf;
  2116. buf_len = &(octx->data_buf_len);
  2117. }
  2118. /*
  2119. * If we've got a partially filled buffer from a previous call then
  2120. * use that data first
  2121. */
  2122. if (*buf_len) {
  2123. unsigned int remaining;
  2124. remaining = 16 - (*buf_len);
  2125. if (remaining > len) {
  2126. memcpy(buf + (*buf_len), in, len);
  2127. *(buf_len) += len;
  2128. return 0;
  2129. }
  2130. memcpy(buf + (*buf_len), in, remaining);
  2131. /*
  2132. * If we get here we've filled the buffer, so process it
  2133. */
  2134. len -= remaining;
  2135. in += remaining;
  2136. if (out == NULL) {
  2137. if (!CRYPTO_ocb128_aad(&octx->ocb, buf, 16))
  2138. return -1;
  2139. } else if (ctx->encrypt) {
  2140. if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out, 16))
  2141. return -1;
  2142. } else {
  2143. if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out, 16))
  2144. return -1;
  2145. }
  2146. written_len = 16;
  2147. *buf_len = 0;
  2148. }
  2149. /* Do we have a partial block to handle at the end? */
  2150. trailing_len = len % 16;
  2151. /*
  2152. * If we've got some full blocks to handle, then process these first
  2153. */
  2154. if (len != trailing_len) {
  2155. if (out == NULL) {
  2156. if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
  2157. return -1;
  2158. } else if (ctx->encrypt) {
  2159. if (!CRYPTO_ocb128_encrypt
  2160. (&octx->ocb, in, out, len - trailing_len))
  2161. return -1;
  2162. } else {
  2163. if (!CRYPTO_ocb128_decrypt
  2164. (&octx->ocb, in, out, len - trailing_len))
  2165. return -1;
  2166. }
  2167. written_len += len - trailing_len;
  2168. in += len - trailing_len;
  2169. }
  2170. /* Handle any trailing partial block */
  2171. if (trailing_len) {
  2172. memcpy(buf, in, trailing_len);
  2173. *buf_len = trailing_len;
  2174. }
  2175. return written_len;
  2176. } else {
  2177. /*
  2178. * First of all empty the buffer of any partial block that we might
  2179. * have been provided - both for data and AAD
  2180. */
  2181. if (octx->data_buf_len) {
  2182. if (ctx->encrypt) {
  2183. if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
  2184. octx->data_buf_len))
  2185. return -1;
  2186. } else {
  2187. if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
  2188. octx->data_buf_len))
  2189. return -1;
  2190. }
  2191. written_len = octx->data_buf_len;
  2192. octx->data_buf_len = 0;
  2193. }
  2194. if (octx->aad_buf_len) {
  2195. if (!CRYPTO_ocb128_aad
  2196. (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
  2197. return -1;
  2198. octx->aad_buf_len = 0;
  2199. }
  2200. /* If decrypting then verify */
  2201. if (!ctx->encrypt) {
  2202. if (octx->taglen < 0)
  2203. return -1;
  2204. if (CRYPTO_ocb128_finish(&octx->ocb,
  2205. octx->tag, octx->taglen) != 0)
  2206. return -1;
  2207. octx->iv_set = 0;
  2208. return written_len;
  2209. }
  2210. /* If encrypting then just get the tag */
  2211. if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
  2212. return -1;
  2213. /* Don't reuse the IV */
  2214. octx->iv_set = 0;
  2215. return written_len;
  2216. }
  2217. }
  2218. static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
  2219. {
  2220. EVP_AES_OCB_CTX *octx = c->cipher_data;
  2221. CRYPTO_ocb128_cleanup(&octx->ocb);
  2222. return 1;
  2223. }
  2224. BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB, CUSTOM_FLAGS)
  2225. BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB, CUSTOM_FLAGS)
  2226. BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB, CUSTOM_FLAGS)
  2227. # endif /* OPENSSL_NO_OCB */
  2228. #endif