1
0
Fork 0

crypto: qat - Fix incorrect uses of memzero_explicit

memzero_explicit should only be used on stack variables that get
zapped just before they go out of scope.

This patch replaces all unnecessary uses of memzero_explicit with
memset, removes two memzero_explicit calls altogether as the tfm
context comes pre-zeroed, and adds a missing memzero_explicit of
the stack variable buff in qat_alg_do_precomputes.  The memzeros
on ipad/opad + digest_size/auth_keylen are also removed as the
entire auth_state is already zeroed on entry.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Acked-by: Tadeusz Struk <tadeusz.struk@intel.com>
hifive-unleashed-5.1
Herbert Xu 2015-01-06 07:54:41 +11:00
parent 15acabfd02
commit ad511e260a
1 changed files with 20 additions and 25 deletions

View File

@ -173,7 +173,7 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
__be64 *hash512_state_out; __be64 *hash512_state_out;
int i, offset; int i, offset;
memzero_explicit(auth_state.data, MAX_AUTH_STATE_SIZE + 64); memset(auth_state.data, 0, sizeof(auth_state.data));
shash->tfm = ctx->hash_tfm; shash->tfm = ctx->hash_tfm;
shash->flags = 0x0; shash->flags = 0x0;
@ -186,13 +186,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
memcpy(ipad, buff, digest_size); memcpy(ipad, buff, digest_size);
memcpy(opad, buff, digest_size); memcpy(opad, buff, digest_size);
memzero_explicit(ipad + digest_size, block_size - digest_size); memzero_explicit(buff, sizeof(buff));
memzero_explicit(opad + digest_size, block_size - digest_size);
} else { } else {
memcpy(ipad, auth_key, auth_keylen); memcpy(ipad, auth_key, auth_keylen);
memcpy(opad, auth_key, auth_keylen); memcpy(opad, auth_key, auth_keylen);
memzero_explicit(ipad + auth_keylen, block_size - auth_keylen);
memzero_explicit(opad + auth_keylen, block_size - auth_keylen);
} }
for (i = 0; i < block_size; i++) { for (i = 0; i < block_size; i++) {
@ -582,10 +579,10 @@ static int qat_alg_aead_setkey(struct crypto_aead *tfm, const uint8_t *key,
if (ctx->enc_cd) { if (ctx->enc_cd) {
/* rekeying */ /* rekeying */
dev = &GET_DEV(ctx->inst->accel_dev); dev = &GET_DEV(ctx->inst->accel_dev);
memzero_explicit(ctx->enc_cd, sizeof(*ctx->enc_cd)); memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd));
memzero_explicit(ctx->dec_cd, sizeof(*ctx->dec_cd)); memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd));
memzero_explicit(&ctx->enc_fw_req, sizeof(ctx->enc_fw_req)); memset(&ctx->enc_fw_req, 0, sizeof(ctx->enc_fw_req));
memzero_explicit(&ctx->dec_fw_req, sizeof(ctx->dec_fw_req)); memset(&ctx->dec_fw_req, 0, sizeof(ctx->dec_fw_req));
} else { } else {
/* new key */ /* new key */
int node = get_current_node(); int node = get_current_node();
@ -620,12 +617,12 @@ static int qat_alg_aead_setkey(struct crypto_aead *tfm, const uint8_t *key,
return 0; return 0;
out_free_all: out_free_all:
memzero_explicit(ctx->dec_cd, sizeof(struct qat_alg_cd)); memset(ctx->dec_cd, 0, sizeof(struct qat_alg_cd));
dma_free_coherent(dev, sizeof(struct qat_alg_cd), dma_free_coherent(dev, sizeof(struct qat_alg_cd),
ctx->dec_cd, ctx->dec_cd_paddr); ctx->dec_cd, ctx->dec_cd_paddr);
ctx->dec_cd = NULL; ctx->dec_cd = NULL;
out_free_enc: out_free_enc:
memzero_explicit(ctx->enc_cd, sizeof(struct qat_alg_cd)); memset(ctx->enc_cd, 0, sizeof(struct qat_alg_cd));
dma_free_coherent(dev, sizeof(struct qat_alg_cd), dma_free_coherent(dev, sizeof(struct qat_alg_cd),
ctx->enc_cd, ctx->enc_cd_paddr); ctx->enc_cd, ctx->enc_cd_paddr);
ctx->enc_cd = NULL; ctx->enc_cd = NULL;
@ -969,10 +966,10 @@ static int qat_alg_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
if (ctx->enc_cd) { if (ctx->enc_cd) {
/* rekeying */ /* rekeying */
dev = &GET_DEV(ctx->inst->accel_dev); dev = &GET_DEV(ctx->inst->accel_dev);
memzero_explicit(ctx->enc_cd, sizeof(*ctx->enc_cd)); memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd));
memzero_explicit(ctx->dec_cd, sizeof(*ctx->dec_cd)); memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd));
memzero_explicit(&ctx->enc_fw_req, sizeof(ctx->enc_fw_req)); memset(&ctx->enc_fw_req, 0, sizeof(ctx->enc_fw_req));
memzero_explicit(&ctx->dec_fw_req, sizeof(ctx->dec_fw_req)); memset(&ctx->dec_fw_req, 0, sizeof(ctx->dec_fw_req));
} else { } else {
/* new key */ /* new key */
int node = get_current_node(); int node = get_current_node();
@ -1007,12 +1004,12 @@ static int qat_alg_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
return 0; return 0;
out_free_all: out_free_all:
memzero_explicit(ctx->dec_cd, sizeof(*ctx->enc_cd)); memset(ctx->dec_cd, 0, sizeof(*ctx->enc_cd));
dma_free_coherent(dev, sizeof(*ctx->enc_cd), dma_free_coherent(dev, sizeof(*ctx->enc_cd),
ctx->dec_cd, ctx->dec_cd_paddr); ctx->dec_cd, ctx->dec_cd_paddr);
ctx->dec_cd = NULL; ctx->dec_cd = NULL;
out_free_enc: out_free_enc:
memzero_explicit(ctx->enc_cd, sizeof(*ctx->dec_cd)); memset(ctx->enc_cd, 0, sizeof(*ctx->dec_cd));
dma_free_coherent(dev, sizeof(*ctx->dec_cd), dma_free_coherent(dev, sizeof(*ctx->dec_cd),
ctx->enc_cd, ctx->enc_cd_paddr); ctx->enc_cd, ctx->enc_cd_paddr);
ctx->enc_cd = NULL; ctx->enc_cd = NULL;
@ -1101,7 +1098,6 @@ static int qat_alg_aead_init(struct crypto_tfm *tfm,
{ {
struct qat_alg_aead_ctx *ctx = crypto_tfm_ctx(tfm); struct qat_alg_aead_ctx *ctx = crypto_tfm_ctx(tfm);
memzero_explicit(ctx, sizeof(*ctx));
ctx->hash_tfm = crypto_alloc_shash(hash_name, 0, 0); ctx->hash_tfm = crypto_alloc_shash(hash_name, 0, 0);
if (IS_ERR(ctx->hash_tfm)) if (IS_ERR(ctx->hash_tfm))
return -EFAULT; return -EFAULT;
@ -1142,12 +1138,12 @@ static void qat_alg_aead_exit(struct crypto_tfm *tfm)
dev = &GET_DEV(inst->accel_dev); dev = &GET_DEV(inst->accel_dev);
if (ctx->enc_cd) { if (ctx->enc_cd) {
memzero_explicit(ctx->enc_cd, sizeof(struct qat_alg_cd)); memset(ctx->enc_cd, 0, sizeof(struct qat_alg_cd));
dma_free_coherent(dev, sizeof(struct qat_alg_cd), dma_free_coherent(dev, sizeof(struct qat_alg_cd),
ctx->enc_cd, ctx->enc_cd_paddr); ctx->enc_cd, ctx->enc_cd_paddr);
} }
if (ctx->dec_cd) { if (ctx->dec_cd) {
memzero_explicit(ctx->dec_cd, sizeof(struct qat_alg_cd)); memset(ctx->dec_cd, 0, sizeof(struct qat_alg_cd));
dma_free_coherent(dev, sizeof(struct qat_alg_cd), dma_free_coherent(dev, sizeof(struct qat_alg_cd),
ctx->dec_cd, ctx->dec_cd_paddr); ctx->dec_cd, ctx->dec_cd_paddr);
} }
@ -1158,7 +1154,6 @@ static int qat_alg_ablkcipher_init(struct crypto_tfm *tfm)
{ {
struct qat_alg_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm); struct qat_alg_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
memzero_explicit(ctx, sizeof(*ctx));
spin_lock_init(&ctx->lock); spin_lock_init(&ctx->lock);
tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) + tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
sizeof(struct qat_crypto_request); sizeof(struct qat_crypto_request);
@ -1177,15 +1172,15 @@ static void qat_alg_ablkcipher_exit(struct crypto_tfm *tfm)
dev = &GET_DEV(inst->accel_dev); dev = &GET_DEV(inst->accel_dev);
if (ctx->enc_cd) { if (ctx->enc_cd) {
memzero_explicit(ctx->enc_cd, memset(ctx->enc_cd, 0,
sizeof(struct icp_qat_hw_cipher_algo_blk)); sizeof(struct icp_qat_hw_cipher_algo_blk));
dma_free_coherent(dev, dma_free_coherent(dev,
sizeof(struct icp_qat_hw_cipher_algo_blk), sizeof(struct icp_qat_hw_cipher_algo_blk),
ctx->enc_cd, ctx->enc_cd_paddr); ctx->enc_cd, ctx->enc_cd_paddr);
} }
if (ctx->dec_cd) { if (ctx->dec_cd) {
memzero_explicit(ctx->dec_cd, memset(ctx->dec_cd, 0,
sizeof(struct icp_qat_hw_cipher_algo_blk)); sizeof(struct icp_qat_hw_cipher_algo_blk));
dma_free_coherent(dev, dma_free_coherent(dev,
sizeof(struct icp_qat_hw_cipher_algo_blk), sizeof(struct icp_qat_hw_cipher_algo_blk),
ctx->dec_cd, ctx->dec_cd_paddr); ctx->dec_cd, ctx->dec_cd_paddr);