123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752 |
- /*
- * Copyright 2004-2021 The OpenSSL Project Authors. All Rights Reserved.
- *
- * Licensed under the Apache License 2.0 (the "License"). You may not use
- * this file except in compliance with the License. You can obtain a copy
- * in the file LICENSE in the source distribution or at
- * https://www.openssl.org/source/license.html
- */
- /*
- * This file uses the low level AES and engine functions (which are deprecated
- * for non-internal use) in order to implement the padlock engine AES ciphers.
- */
- #define OPENSSL_SUPPRESS_DEPRECATED
- #include <stdio.h>
- #include <string.h>
- #include <openssl/opensslconf.h>
- #include <openssl/crypto.h>
- #include <openssl/engine.h>
- #include <openssl/evp.h>
- #include <openssl/aes.h>
- #include <openssl/rand.h>
- #include <openssl/err.h>
- #include <openssl/modes.h>
- #ifndef OPENSSL_NO_PADLOCKENG
- /*
- * VIA PadLock AES is available *ONLY* on some x86 CPUs. Not only that it
- * doesn't exist elsewhere, but it even can't be compiled on other platforms!
- */
- # undef COMPILE_PADLOCKENG
- # if defined(PADLOCK_ASM)
- # define COMPILE_PADLOCKENG
- # ifdef OPENSSL_NO_DYNAMIC_ENGINE
- static ENGINE *ENGINE_padlock(void);
- # endif
- # endif
- # ifdef OPENSSL_NO_DYNAMIC_ENGINE
- void engine_load_padlock_int(void);
- void engine_load_padlock_int(void)
- {
- /* On non-x86 CPUs it just returns. */
- # ifdef COMPILE_PADLOCKENG
- ENGINE *toadd = ENGINE_padlock();
- if (!toadd)
- return;
- ERR_set_mark();
- ENGINE_add(toadd);
- /*
- * If the "add" worked, it gets a structural reference. So either way, we
- * release our just-created reference.
- */
- ENGINE_free(toadd);
- /*
- * If the "add" didn't work, it was probably a conflict because it was
- * already added (eg. someone calling ENGINE_load_blah then calling
- * ENGINE_load_builtin_engines() perhaps).
- */
- ERR_pop_to_mark();
- # endif
- }
- # endif
- # ifdef COMPILE_PADLOCKENG
- /* Function for ENGINE detection and control */
- static int padlock_available(void);
- static int padlock_init(ENGINE *e);
- /* RNG Stuff */
- static RAND_METHOD padlock_rand;
- /* Cipher Stuff */
- static int padlock_ciphers(ENGINE *e, const EVP_CIPHER **cipher,
- const int **nids, int nid);
- /* Engine names */
- static const char *padlock_id = "padlock";
- static char padlock_name[100];
- /* Available features */
- static int padlock_use_ace = 0; /* Advanced Cryptography Engine */
- static int padlock_use_rng = 0; /* Random Number Generator */
- /* ===== Engine "management" functions ===== */
- /* Prepare the ENGINE structure for registration */
- static int padlock_bind_helper(ENGINE *e)
- {
- /* Check available features */
- padlock_available();
- /*
- * RNG is currently disabled for reasons discussed in commentary just
- * before padlock_rand_bytes function.
- */
- padlock_use_rng = 0;
- /* Generate a nice engine name with available features */
- BIO_snprintf(padlock_name, sizeof(padlock_name),
- "VIA PadLock (%s, %s)",
- padlock_use_rng ? "RNG" : "no-RNG",
- padlock_use_ace ? "ACE" : "no-ACE");
- /* Register everything or return with an error */
- if (!ENGINE_set_id(e, padlock_id) ||
- !ENGINE_set_name(e, padlock_name) ||
- !ENGINE_set_init_function(e, padlock_init) ||
- (padlock_use_ace && !ENGINE_set_ciphers(e, padlock_ciphers)) ||
- (padlock_use_rng && !ENGINE_set_RAND(e, &padlock_rand))) {
- return 0;
- }
- /* Everything looks good */
- return 1;
- }
- # ifdef OPENSSL_NO_DYNAMIC_ENGINE
- /* Constructor */
- static ENGINE *ENGINE_padlock(void)
- {
- ENGINE *eng = ENGINE_new();
- if (eng == NULL) {
- return NULL;
- }
- if (!padlock_bind_helper(eng)) {
- ENGINE_free(eng);
- return NULL;
- }
- return eng;
- }
- # endif
- /* Check availability of the engine */
- static int padlock_init(ENGINE *e)
- {
- return (padlock_use_rng || padlock_use_ace);
- }
- /*
- * This stuff is needed if this ENGINE is being compiled into a
- * self-contained shared-library.
- */
- # ifndef OPENSSL_NO_DYNAMIC_ENGINE
- static int padlock_bind_fn(ENGINE *e, const char *id)
- {
- if (id && (strcmp(id, padlock_id) != 0)) {
- return 0;
- }
- if (!padlock_bind_helper(e)) {
- return 0;
- }
- return 1;
- }
- IMPLEMENT_DYNAMIC_CHECK_FN()
- IMPLEMENT_DYNAMIC_BIND_FN(padlock_bind_fn)
- # endif /* !OPENSSL_NO_DYNAMIC_ENGINE */
- /* ===== Here comes the "real" engine ===== */
- /* Some AES-related constants */
- # define AES_BLOCK_SIZE 16
- # define AES_KEY_SIZE_128 16
- # define AES_KEY_SIZE_192 24
- # define AES_KEY_SIZE_256 32
- /*
- * Here we store the status information relevant to the current context.
- */
- /*
- * BIG FAT WARNING: Inline assembler in PADLOCK_XCRYPT_ASM() depends on
- * the order of items in this structure. Don't blindly modify, reorder,
- * etc!
- */
- struct padlock_cipher_data {
- unsigned char iv[AES_BLOCK_SIZE]; /* Initialization vector */
- union {
- unsigned int pad[4];
- struct {
- int rounds:4;
- int dgst:1; /* n/a in C3 */
- int align:1; /* n/a in C3 */
- int ciphr:1; /* n/a in C3 */
- unsigned int keygen:1;
- int interm:1;
- unsigned int encdec:1;
- int ksize:2;
- } b;
- } cword; /* Control word */
- AES_KEY ks; /* Encryption key */
- };
- /* Interface to assembler module */
- unsigned int padlock_capability(void);
- void padlock_key_bswap(AES_KEY *key);
- void padlock_verify_context(struct padlock_cipher_data *ctx);
- void padlock_reload_key(void);
- void padlock_aes_block(void *out, const void *inp,
- struct padlock_cipher_data *ctx);
- int padlock_ecb_encrypt(void *out, const void *inp,
- struct padlock_cipher_data *ctx, size_t len);
- int padlock_cbc_encrypt(void *out, const void *inp,
- struct padlock_cipher_data *ctx, size_t len);
- int padlock_cfb_encrypt(void *out, const void *inp,
- struct padlock_cipher_data *ctx, size_t len);
- int padlock_ofb_encrypt(void *out, const void *inp,
- struct padlock_cipher_data *ctx, size_t len);
- int padlock_ctr32_encrypt(void *out, const void *inp,
- struct padlock_cipher_data *ctx, size_t len);
- int padlock_xstore(void *out, int edx);
- void padlock_sha1_oneshot(void *ctx, const void *inp, size_t len);
- void padlock_sha1(void *ctx, const void *inp, size_t len);
- void padlock_sha256_oneshot(void *ctx, const void *inp, size_t len);
- void padlock_sha256(void *ctx, const void *inp, size_t len);
- /*
- * Load supported features of the CPU to see if the PadLock is available.
- */
- static int padlock_available(void)
- {
- unsigned int edx = padlock_capability();
- /* Fill up some flags */
- padlock_use_ace = ((edx & (0x3 << 6)) == (0x3 << 6));
- padlock_use_rng = ((edx & (0x3 << 2)) == (0x3 << 2));
- return padlock_use_ace + padlock_use_rng;
- }
- /* ===== AES encryption/decryption ===== */
- # if defined(NID_aes_128_cfb128) && ! defined (NID_aes_128_cfb)
- # define NID_aes_128_cfb NID_aes_128_cfb128
- # endif
- # if defined(NID_aes_128_ofb128) && ! defined (NID_aes_128_ofb)
- # define NID_aes_128_ofb NID_aes_128_ofb128
- # endif
- # if defined(NID_aes_192_cfb128) && ! defined (NID_aes_192_cfb)
- # define NID_aes_192_cfb NID_aes_192_cfb128
- # endif
- # if defined(NID_aes_192_ofb128) && ! defined (NID_aes_192_ofb)
- # define NID_aes_192_ofb NID_aes_192_ofb128
- # endif
- # if defined(NID_aes_256_cfb128) && ! defined (NID_aes_256_cfb)
- # define NID_aes_256_cfb NID_aes_256_cfb128
- # endif
- # if defined(NID_aes_256_ofb128) && ! defined (NID_aes_256_ofb)
- # define NID_aes_256_ofb NID_aes_256_ofb128
- # endif
- /* List of supported ciphers. */
- static const int padlock_cipher_nids[] = {
- NID_aes_128_ecb,
- NID_aes_128_cbc,
- NID_aes_128_cfb,
- NID_aes_128_ofb,
- NID_aes_128_ctr,
- NID_aes_192_ecb,
- NID_aes_192_cbc,
- NID_aes_192_cfb,
- NID_aes_192_ofb,
- NID_aes_192_ctr,
- NID_aes_256_ecb,
- NID_aes_256_cbc,
- NID_aes_256_cfb,
- NID_aes_256_ofb,
- NID_aes_256_ctr
- };
- static int padlock_cipher_nids_num = (sizeof(padlock_cipher_nids) /
- sizeof(padlock_cipher_nids[0]));
- /* Function prototypes ... */
- static int padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
- const unsigned char *iv, int enc);
- # define NEAREST_ALIGNED(ptr) ( (unsigned char *)(ptr) + \
- ( (0x10 - ((size_t)(ptr) & 0x0F)) & 0x0F ) )
- # define ALIGNED_CIPHER_DATA(ctx) ((struct padlock_cipher_data *)\
- NEAREST_ALIGNED(EVP_CIPHER_CTX_get_cipher_data(ctx)))
- static int
- padlock_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
- const unsigned char *in_arg, size_t nbytes)
- {
- return padlock_ecb_encrypt(out_arg, in_arg,
- ALIGNED_CIPHER_DATA(ctx), nbytes);
- }
- static int
- padlock_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
- const unsigned char *in_arg, size_t nbytes)
- {
- struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
- int ret;
- memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
- if ((ret = padlock_cbc_encrypt(out_arg, in_arg, cdata, nbytes)))
- memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
- return ret;
- }
- static int
- padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
- const unsigned char *in_arg, size_t nbytes)
- {
- struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
- size_t chunk;
- if ((chunk = EVP_CIPHER_CTX_get_num(ctx))) { /* borrow chunk variable */
- unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);
- if (chunk >= AES_BLOCK_SIZE)
- return 0; /* bogus value */
- if (EVP_CIPHER_CTX_is_encrypting(ctx))
- while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
- ivp[chunk] = *(out_arg++) = *(in_arg++) ^ ivp[chunk];
- chunk++, nbytes--;
- } else
- while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
- unsigned char c = *(in_arg++);
- *(out_arg++) = c ^ ivp[chunk];
- ivp[chunk++] = c, nbytes--;
- }
- EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
- }
- if (nbytes == 0)
- return 1;
- memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
- if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
- if (!padlock_cfb_encrypt(out_arg, in_arg, cdata, chunk))
- return 0;
- nbytes -= chunk;
- }
- if (nbytes) {
- unsigned char *ivp = cdata->iv;
- out_arg += chunk;
- in_arg += chunk;
- EVP_CIPHER_CTX_set_num(ctx, nbytes);
- if (cdata->cword.b.encdec) {
- cdata->cword.b.encdec = 0;
- padlock_reload_key();
- padlock_aes_block(ivp, ivp, cdata);
- cdata->cword.b.encdec = 1;
- padlock_reload_key();
- while (nbytes) {
- unsigned char c = *(in_arg++);
- *(out_arg++) = c ^ *ivp;
- *(ivp++) = c, nbytes--;
- }
- } else {
- padlock_reload_key();
- padlock_aes_block(ivp, ivp, cdata);
- padlock_reload_key();
- while (nbytes) {
- *ivp = *(out_arg++) = *(in_arg++) ^ *ivp;
- ivp++, nbytes--;
- }
- }
- }
- memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
- return 1;
- }
- static int
- padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
- const unsigned char *in_arg, size_t nbytes)
- {
- struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
- size_t chunk;
- /*
- * ctx->num is maintained in byte-oriented modes, such as CFB and OFB...
- */
- if ((chunk = EVP_CIPHER_CTX_get_num(ctx))) { /* borrow chunk variable */
- unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);
- if (chunk >= AES_BLOCK_SIZE)
- return 0; /* bogus value */
- while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
- *(out_arg++) = *(in_arg++) ^ ivp[chunk];
- chunk++, nbytes--;
- }
- EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
- }
- if (nbytes == 0)
- return 1;
- memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
- if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
- if (!padlock_ofb_encrypt(out_arg, in_arg, cdata, chunk))
- return 0;
- nbytes -= chunk;
- }
- if (nbytes) {
- unsigned char *ivp = cdata->iv;
- out_arg += chunk;
- in_arg += chunk;
- EVP_CIPHER_CTX_set_num(ctx, nbytes);
- padlock_reload_key(); /* empirically found */
- padlock_aes_block(ivp, ivp, cdata);
- padlock_reload_key(); /* empirically found */
- while (nbytes) {
- *(out_arg++) = *(in_arg++) ^ *ivp;
- ivp++, nbytes--;
- }
- }
- memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
- return 1;
- }
- static void padlock_ctr32_encrypt_glue(const unsigned char *in,
- unsigned char *out, size_t blocks,
- struct padlock_cipher_data *ctx,
- const unsigned char *ivec)
- {
- memcpy(ctx->iv, ivec, AES_BLOCK_SIZE);
- padlock_ctr32_encrypt(out, in, ctx, AES_BLOCK_SIZE * blocks);
- }
- static int
- padlock_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
- const unsigned char *in_arg, size_t nbytes)
- {
- struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
- int n = EVP_CIPHER_CTX_get_num(ctx);
- unsigned int num;
- if (n < 0)
- return 0;
- num = (unsigned int)n;
- CRYPTO_ctr128_encrypt_ctr32(in_arg, out_arg, nbytes,
- cdata, EVP_CIPHER_CTX_iv_noconst(ctx),
- EVP_CIPHER_CTX_buf_noconst(ctx), &num,
- (ctr128_f) padlock_ctr32_encrypt_glue);
- EVP_CIPHER_CTX_set_num(ctx, (size_t)num);
- return 1;
- }
- # define EVP_CIPHER_block_size_ECB AES_BLOCK_SIZE
- # define EVP_CIPHER_block_size_CBC AES_BLOCK_SIZE
- # define EVP_CIPHER_block_size_OFB 1
- # define EVP_CIPHER_block_size_CFB 1
- # define EVP_CIPHER_block_size_CTR 1
- /*
- * Declaring so many ciphers by hand would be a pain. Instead introduce a bit
- * of preprocessor magic :-)
- */
- # define DECLARE_AES_EVP(ksize,lmode,umode) \
- static EVP_CIPHER *_hidden_aes_##ksize##_##lmode = NULL; \
- static const EVP_CIPHER *padlock_aes_##ksize##_##lmode(void) \
- { \
- if (_hidden_aes_##ksize##_##lmode == NULL \
- && ((_hidden_aes_##ksize##_##lmode = \
- EVP_CIPHER_meth_new(NID_aes_##ksize##_##lmode, \
- EVP_CIPHER_block_size_##umode, \
- AES_KEY_SIZE_##ksize)) == NULL \
- || !EVP_CIPHER_meth_set_iv_length(_hidden_aes_##ksize##_##lmode, \
- AES_BLOCK_SIZE) \
- || !EVP_CIPHER_meth_set_flags(_hidden_aes_##ksize##_##lmode, \
- 0 | EVP_CIPH_##umode##_MODE) \
- || !EVP_CIPHER_meth_set_init(_hidden_aes_##ksize##_##lmode, \
- padlock_aes_init_key) \
- || !EVP_CIPHER_meth_set_do_cipher(_hidden_aes_##ksize##_##lmode, \
- padlock_##lmode##_cipher) \
- || !EVP_CIPHER_meth_set_impl_ctx_size(_hidden_aes_##ksize##_##lmode, \
- sizeof(struct padlock_cipher_data) + 16) \
- || !EVP_CIPHER_meth_set_set_asn1_params(_hidden_aes_##ksize##_##lmode, \
- EVP_CIPHER_set_asn1_iv) \
- || !EVP_CIPHER_meth_set_get_asn1_params(_hidden_aes_##ksize##_##lmode, \
- EVP_CIPHER_get_asn1_iv))) { \
- EVP_CIPHER_meth_free(_hidden_aes_##ksize##_##lmode); \
- _hidden_aes_##ksize##_##lmode = NULL; \
- } \
- return _hidden_aes_##ksize##_##lmode; \
- }
- DECLARE_AES_EVP(128, ecb, ECB)
- DECLARE_AES_EVP(128, cbc, CBC)
- DECLARE_AES_EVP(128, cfb, CFB)
- DECLARE_AES_EVP(128, ofb, OFB)
- DECLARE_AES_EVP(128, ctr, CTR)
- DECLARE_AES_EVP(192, ecb, ECB)
- DECLARE_AES_EVP(192, cbc, CBC)
- DECLARE_AES_EVP(192, cfb, CFB)
- DECLARE_AES_EVP(192, ofb, OFB)
- DECLARE_AES_EVP(192, ctr, CTR)
- DECLARE_AES_EVP(256, ecb, ECB)
- DECLARE_AES_EVP(256, cbc, CBC)
- DECLARE_AES_EVP(256, cfb, CFB)
- DECLARE_AES_EVP(256, ofb, OFB)
- DECLARE_AES_EVP(256, ctr, CTR)
- static int
- padlock_ciphers(ENGINE *e, const EVP_CIPHER **cipher, const int **nids,
- int nid)
- {
- /* No specific cipher => return a list of supported nids ... */
- if (!cipher) {
- *nids = padlock_cipher_nids;
- return padlock_cipher_nids_num;
- }
- /* ... or the requested "cipher" otherwise */
- switch (nid) {
- case NID_aes_128_ecb:
- *cipher = padlock_aes_128_ecb();
- break;
- case NID_aes_128_cbc:
- *cipher = padlock_aes_128_cbc();
- break;
- case NID_aes_128_cfb:
- *cipher = padlock_aes_128_cfb();
- break;
- case NID_aes_128_ofb:
- *cipher = padlock_aes_128_ofb();
- break;
- case NID_aes_128_ctr:
- *cipher = padlock_aes_128_ctr();
- break;
- case NID_aes_192_ecb:
- *cipher = padlock_aes_192_ecb();
- break;
- case NID_aes_192_cbc:
- *cipher = padlock_aes_192_cbc();
- break;
- case NID_aes_192_cfb:
- *cipher = padlock_aes_192_cfb();
- break;
- case NID_aes_192_ofb:
- *cipher = padlock_aes_192_ofb();
- break;
- case NID_aes_192_ctr:
- *cipher = padlock_aes_192_ctr();
- break;
- case NID_aes_256_ecb:
- *cipher = padlock_aes_256_ecb();
- break;
- case NID_aes_256_cbc:
- *cipher = padlock_aes_256_cbc();
- break;
- case NID_aes_256_cfb:
- *cipher = padlock_aes_256_cfb();
- break;
- case NID_aes_256_ofb:
- *cipher = padlock_aes_256_ofb();
- break;
- case NID_aes_256_ctr:
- *cipher = padlock_aes_256_ctr();
- break;
- default:
- /* Sorry, we don't support this NID */
- *cipher = NULL;
- return 0;
- }
- return 1;
- }
- /* Prepare the encryption key for PadLock usage */
- static int
- padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
- const unsigned char *iv, int enc)
- {
- struct padlock_cipher_data *cdata;
- int key_len = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
- unsigned long mode = EVP_CIPHER_CTX_get_mode(ctx);
- if (key == NULL)
- return 0; /* ERROR */
- cdata = ALIGNED_CIPHER_DATA(ctx);
- memset(cdata, 0, sizeof(*cdata));
- /* Prepare Control word. */
- if (mode == EVP_CIPH_OFB_MODE || mode == EVP_CIPH_CTR_MODE)
- cdata->cword.b.encdec = 0;
- else
- cdata->cword.b.encdec = (EVP_CIPHER_CTX_is_encrypting(ctx) == 0);
- cdata->cword.b.rounds = 10 + (key_len - 128) / 32;
- cdata->cword.b.ksize = (key_len - 128) / 64;
- switch (key_len) {
- case 128:
- /*
- * PadLock can generate an extended key for AES128 in hardware
- */
- memcpy(cdata->ks.rd_key, key, AES_KEY_SIZE_128);
- cdata->cword.b.keygen = 0;
- break;
- case 192:
- case 256:
- /*
- * Generate an extended AES key in software. Needed for AES192/AES256
- */
- /*
- * Well, the above applies to Stepping 8 CPUs and is listed as
- * hardware errata. They most likely will fix it at some point and
- * then a check for stepping would be due here.
- */
- if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
- && !enc)
- AES_set_decrypt_key(key, key_len, &cdata->ks);
- else
- AES_set_encrypt_key(key, key_len, &cdata->ks);
- # ifndef AES_ASM
- /*
- * OpenSSL C functions use byte-swapped extended key.
- */
- padlock_key_bswap(&cdata->ks);
- # endif
- cdata->cword.b.keygen = 1;
- break;
- default:
- /* ERROR */
- return 0;
- }
- /*
- * This is done to cover for cases when user reuses the
- * context for new key. The catch is that if we don't do
- * this, padlock_eas_cipher might proceed with old key...
- */
- padlock_reload_key();
- return 1;
- }
- /* ===== Random Number Generator ===== */
- /*
- * This code is not engaged. The reason is that it does not comply
- * with recommendations for VIA RNG usage for secure applications
- * (posted at http://www.via.com.tw/en/viac3/c3.jsp) nor does it
- * provide meaningful error control...
- */
- /*
- * Wrapper that provides an interface between the API and the raw PadLock
- * RNG
- */
- static int padlock_rand_bytes(unsigned char *output, int count)
- {
- unsigned int eax, buf;
- while (count >= 8) {
- eax = padlock_xstore(output, 0);
- if (!(eax & (1 << 6)))
- return 0; /* RNG disabled */
- /* this ---vv--- covers DC bias, Raw Bits and String Filter */
- if (eax & (0x1F << 10))
- return 0;
- if ((eax & 0x1F) == 0)
- continue; /* no data, retry... */
- if ((eax & 0x1F) != 8)
- return 0; /* fatal failure... */
- output += 8;
- count -= 8;
- }
- while (count > 0) {
- eax = padlock_xstore(&buf, 3);
- if (!(eax & (1 << 6)))
- return 0; /* RNG disabled */
- /* this ---vv--- covers DC bias, Raw Bits and String Filter */
- if (eax & (0x1F << 10))
- return 0;
- if ((eax & 0x1F) == 0)
- continue; /* no data, retry... */
- if ((eax & 0x1F) != 1)
- return 0; /* fatal failure... */
- *output++ = (unsigned char)buf;
- count--;
- }
- OPENSSL_cleanse(&buf, sizeof(buf));
- return 1;
- }
- /* Dummy but necessary function */
- static int padlock_rand_status(void)
- {
- return 1;
- }
- /* Prepare structure for registration */
- static RAND_METHOD padlock_rand = {
- NULL, /* seed */
- padlock_rand_bytes, /* bytes */
- NULL, /* cleanup */
- NULL, /* add */
- padlock_rand_bytes, /* pseudorand */
- padlock_rand_status, /* rand status */
- };
- # endif /* COMPILE_PADLOCKENG */
- #endif /* !OPENSSL_NO_PADLOCKENG */
- #if defined(OPENSSL_NO_PADLOCKENG) || !defined(COMPILE_PADLOCKENG)
- # ifndef OPENSSL_NO_DYNAMIC_ENGINE
- OPENSSL_EXPORT
- int bind_engine(ENGINE *e, const char *id, const dynamic_fns *fns);
- OPENSSL_EXPORT
- int bind_engine(ENGINE *e, const char *id, const dynamic_fns *fns)
- {
- return 0;
- }
- IMPLEMENT_DYNAMIC_CHECK_FN()
- # endif
- #endif
|