aboutsummaryrefslogtreecommitdiffstats
path: root/target/linux/apm821xx/patches-4.14/020-0023-crypto-crypto4xx-prepare-for-AEAD-support.patch
diff options
context:
space:
mode:
authorChristian Lamparter <chunkeey@gmail.com>2018-01-07 21:27:50 +0100
committerMathias Kresin <dev@kresin.me>2018-01-12 08:00:04 +0100
commit780477d17c0e73e54397a0f3cebff08634e7231a (patch)
tree098826be0c3cbfb24fc1fc15219eed83d915e84a /target/linux/apm821xx/patches-4.14/020-0023-crypto-crypto4xx-prepare-for-AEAD-support.patch
parente4371779d2691553e6321e87d86212beaaa8b89d (diff)
downloadupstream-780477d17c0e73e54397a0f3cebff08634e7231a.tar.gz
upstream-780477d17c0e73e54397a0f3cebff08634e7231a.tar.bz2
upstream-780477d17c0e73e54397a0f3cebff08634e7231a.zip
apm821xx: backport crypto4xx patches from 4.15
This patch backports changes to crypto4xx in order to get the crypto4xx operational. Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
Diffstat (limited to 'target/linux/apm821xx/patches-4.14/020-0023-crypto-crypto4xx-prepare-for-AEAD-support.patch')
-rw-r--r--target/linux/apm821xx/patches-4.14/020-0023-crypto-crypto4xx-prepare-for-AEAD-support.patch617
1 files changed, 617 insertions, 0 deletions
diff --git a/target/linux/apm821xx/patches-4.14/020-0023-crypto-crypto4xx-prepare-for-AEAD-support.patch b/target/linux/apm821xx/patches-4.14/020-0023-crypto-crypto4xx-prepare-for-AEAD-support.patch
new file mode 100644
index 0000000000..ff19c4f54e
--- /dev/null
+++ b/target/linux/apm821xx/patches-4.14/020-0023-crypto-crypto4xx-prepare-for-AEAD-support.patch
@@ -0,0 +1,617 @@
+From a0aae821ba3d35a49d4d0143dfb0c07eee22130e Mon Sep 17 00:00:00 2001
+From: Christian Lamparter <chunkeey@gmail.com>
+Date: Wed, 4 Oct 2017 01:00:15 +0200
+Subject: [PATCH 23/25] crypto: crypto4xx - prepare for AEAD support
+
+This patch enhances existing interfaces and
+functions to support AEAD ciphers in the next
+patches.
+
+Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
+Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
+---
+ drivers/crypto/amcc/crypto4xx_alg.c | 19 +--
+ drivers/crypto/amcc/crypto4xx_core.c | 217 +++++++++++++++++++++++++++--------
+ drivers/crypto/amcc/crypto4xx_core.h | 22 ++--
+ drivers/crypto/amcc/crypto4xx_sa.h | 41 +++++++
+ 4 files changed, 226 insertions(+), 73 deletions(-)
+
+--- a/drivers/crypto/amcc/crypto4xx_alg.c
++++ b/drivers/crypto/amcc/crypto4xx_alg.c
+@@ -26,6 +26,7 @@
+ #include <crypto/internal/hash.h>
+ #include <linux/dma-mapping.h>
+ #include <crypto/algapi.h>
++#include <crypto/aead.h>
+ #include <crypto/aes.h>
+ #include <crypto/sha.h>
+ #include <crypto/ctr.h>
+@@ -83,7 +84,7 @@ int crypto4xx_encrypt(struct ablkcipher_
+ crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
+
+ return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
+- req->nbytes, iv, ivlen, ctx->sa_out, ctx->sa_len);
++ req->nbytes, iv, ivlen, ctx->sa_out, ctx->sa_len, 0);
+ }
+
+ int crypto4xx_decrypt(struct ablkcipher_request *req)
+@@ -97,7 +98,7 @@ int crypto4xx_decrypt(struct ablkcipher_
+ crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
+
+ return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
+- req->nbytes, iv, ivlen, ctx->sa_in, ctx->sa_len);
++ req->nbytes, iv, ivlen, ctx->sa_in, ctx->sa_len, 0);
+ }
+
+ /**
+@@ -213,7 +214,7 @@ int crypto4xx_rfc3686_encrypt(struct abl
+
+ return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
+ req->nbytes, iv, AES_IV_SIZE,
+- ctx->sa_out, ctx->sa_len);
++ ctx->sa_out, ctx->sa_len, 0);
+ }
+
+ int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req)
+@@ -227,7 +228,7 @@ int crypto4xx_rfc3686_decrypt(struct abl
+
+ return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
+ req->nbytes, iv, AES_IV_SIZE,
+- ctx->sa_out, ctx->sa_len);
++ ctx->sa_out, ctx->sa_len, 0);
+ }
+
+ /**
+@@ -239,11 +240,13 @@ static int crypto4xx_hash_alg_init(struc
+ unsigned char hm)
+ {
+ struct crypto_alg *alg = tfm->__crt_alg;
+- struct crypto4xx_alg *my_alg = crypto_alg_to_crypto4xx_alg(alg);
++ struct crypto4xx_alg *my_alg;
+ struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct dynamic_sa_hash160 *sa;
+ int rc;
+
++ my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
++ alg.u.hash);
+ ctx->dev = my_alg->dev;
+
+ /* Create SA */
+@@ -300,7 +303,7 @@ int crypto4xx_hash_update(struct ahash_r
+
+ return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
+ req->nbytes, NULL, 0, ctx->sa_in,
+- ctx->sa_len);
++ ctx->sa_len, 0);
+ }
+
+ int crypto4xx_hash_final(struct ahash_request *req)
+@@ -319,7 +322,7 @@ int crypto4xx_hash_digest(struct ahash_r
+
+ return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
+ req->nbytes, NULL, 0, ctx->sa_in,
+- ctx->sa_len);
++ ctx->sa_len, 0);
+ }
+
+ /**
+@@ -330,5 +333,3 @@ int crypto4xx_sha1_alg_init(struct crypt
+ return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
+ SA_HASH_MODE_HASH);
+ }
+-
+-
+--- a/drivers/crypto/amcc/crypto4xx_core.c
++++ b/drivers/crypto/amcc/crypto4xx_core.c
+@@ -35,10 +35,12 @@
+ #include <asm/dcr.h>
+ #include <asm/dcr-regs.h>
+ #include <asm/cacheflush.h>
++#include <crypto/aead.h>
+ #include <crypto/aes.h>
+ #include <crypto/ctr.h>
+ #include <crypto/sha.h>
+ #include <crypto/scatterwalk.h>
++#include <crypto/internal/aead.h>
+ #include <crypto/internal/skcipher.h>
+ #include "crypto4xx_reg_def.h"
+ #include "crypto4xx_core.h"
+@@ -518,7 +520,7 @@ static void crypto4xx_ret_sg_desc(struct
+ }
+ }
+
+-static u32 crypto4xx_ablkcipher_done(struct crypto4xx_device *dev,
++static void crypto4xx_ablkcipher_done(struct crypto4xx_device *dev,
+ struct pd_uinfo *pd_uinfo,
+ struct ce_pd *pd)
+ {
+@@ -543,11 +545,9 @@ static u32 crypto4xx_ablkcipher_done(str
+ if (pd_uinfo->state & PD_ENTRY_BUSY)
+ ablkcipher_request_complete(ablk_req, -EINPROGRESS);
+ ablkcipher_request_complete(ablk_req, 0);
+-
+- return 0;
+ }
+
+-static u32 crypto4xx_ahash_done(struct crypto4xx_device *dev,
++static void crypto4xx_ahash_done(struct crypto4xx_device *dev,
+ struct pd_uinfo *pd_uinfo)
+ {
+ struct crypto4xx_ctx *ctx;
+@@ -563,20 +563,88 @@ static u32 crypto4xx_ahash_done(struct c
+ if (pd_uinfo->state & PD_ENTRY_BUSY)
+ ahash_request_complete(ahash_req, -EINPROGRESS);
+ ahash_request_complete(ahash_req, 0);
++}
+
+- return 0;
++static void crypto4xx_aead_done(struct crypto4xx_device *dev,
++ struct pd_uinfo *pd_uinfo,
++ struct ce_pd *pd)
++{
++ struct aead_request *aead_req;
++ struct crypto4xx_ctx *ctx;
++ struct scatterlist *dst = pd_uinfo->dest_va;
++ int err = 0;
++
++ aead_req = container_of(pd_uinfo->async_req, struct aead_request,
++ base);
++ ctx = crypto_tfm_ctx(aead_req->base.tfm);
++
++ if (pd_uinfo->using_sd) {
++ crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo,
++ pd->pd_ctl_len.bf.pkt_len,
++ dst);
++ } else {
++ __dma_sync_page(sg_page(dst), dst->offset, dst->length,
++ DMA_FROM_DEVICE);
++ }
++
++ if (pd_uinfo->sa_va->sa_command_0.bf.dir == DIR_OUTBOUND) {
++ /* append icv at the end */
++ size_t cp_len = crypto_aead_authsize(
++ crypto_aead_reqtfm(aead_req));
++ u32 icv[cp_len];
++
++ crypto4xx_memcpy_from_le32(icv, pd_uinfo->sr_va->save_digest,
++ cp_len);
++
++ scatterwalk_map_and_copy(icv, dst, aead_req->cryptlen,
++ cp_len, 1);
++ }
++
++ crypto4xx_ret_sg_desc(dev, pd_uinfo);
++
++ if (pd->pd_ctl.bf.status & 0xff) {
++ if (pd->pd_ctl.bf.status & 0x1) {
++ /* authentication error */
++ err = -EBADMSG;
++ } else {
++ if (!__ratelimit(&dev->aead_ratelimit)) {
++ if (pd->pd_ctl.bf.status & 2)
++ pr_err("pad fail error\n");
++ if (pd->pd_ctl.bf.status & 4)
++ pr_err("seqnum fail\n");
++ if (pd->pd_ctl.bf.status & 8)
++ pr_err("error _notify\n");
++ pr_err("aead return err status = 0x%02x\n",
++ pd->pd_ctl.bf.status & 0xff);
++ pr_err("pd pad_ctl = 0x%08x\n",
++ pd->pd_ctl.bf.pd_pad_ctl);
++ }
++ err = -EINVAL;
++ }
++ }
++
++ if (pd_uinfo->state & PD_ENTRY_BUSY)
++ aead_request_complete(aead_req, -EINPROGRESS);
++
++ aead_request_complete(aead_req, err);
+ }
+
+-static u32 crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx)
++static void crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx)
+ {
+ struct ce_pd *pd = &dev->pdr[idx];
+ struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx];
+
+- if (crypto_tfm_alg_type(pd_uinfo->async_req->tfm) ==
+- CRYPTO_ALG_TYPE_ABLKCIPHER)
+- return crypto4xx_ablkcipher_done(dev, pd_uinfo, pd);
+- else
+- return crypto4xx_ahash_done(dev, pd_uinfo);
++ switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) {
++ case CRYPTO_ALG_TYPE_ABLKCIPHER:
++ crypto4xx_ablkcipher_done(dev, pd_uinfo, pd);
++ break;
++ case CRYPTO_ALG_TYPE_AEAD:
++ crypto4xx_aead_done(dev, pd_uinfo, pd);
++ break;
++ case CRYPTO_ALG_TYPE_AHASH:
++ crypto4xx_ahash_done(dev, pd_uinfo);
++ break;
++ }
+ }
+
+ static void crypto4xx_stop_all(struct crypto4xx_core_device *core_dev)
+@@ -612,8 +680,10 @@ int crypto4xx_build_pd(struct crypto_asy
+ const unsigned int datalen,
+ const __le32 *iv, const u32 iv_len,
+ const struct dynamic_sa_ctl *req_sa,
+- const unsigned int sa_len)
++ const unsigned int sa_len,
++ const unsigned int assoclen)
+ {
++ struct scatterlist _dst[2];
+ struct crypto4xx_device *dev = ctx->dev;
+ struct dynamic_sa_ctl *sa;
+ struct ce_gd *gd;
+@@ -627,18 +697,25 @@ int crypto4xx_build_pd(struct crypto_asy
+ unsigned int nbytes = datalen;
+ size_t offset_to_sr_ptr;
+ u32 gd_idx = 0;
++ int tmp;
+ bool is_busy;
+
+- /* figure how many gd is needed */
+- num_gd = sg_nents_for_len(src, datalen);
+- if ((int)num_gd < 0) {
++ /* figure how many gd are needed */
++ tmp = sg_nents_for_len(src, assoclen + datalen);
++ if (tmp < 0) {
+ dev_err(dev->core_dev->device, "Invalid number of src SG.\n");
+- return -EINVAL;
++ return tmp;
+ }
+- if (num_gd == 1)
+- num_gd = 0;
++ if (tmp == 1)
++ tmp = 0;
++ num_gd = tmp;
+
+- /* figure how many sd is needed */
++ if (assoclen) {
++ nbytes += assoclen;
++ dst = scatterwalk_ffwd(_dst, dst, assoclen);
++ }
++
++ /* figure how many sd are needed */
+ if (sg_is_last(dst)) {
+ num_sd = 0;
+ } else {
+@@ -724,6 +801,7 @@ int crypto4xx_build_pd(struct crypto_asy
+ sa = pd_uinfo->sa_va;
+ memcpy(sa, req_sa, sa_len * 4);
+
++ sa->sa_command_1.bf.hash_crypto_offset = (assoclen >> 2);
+ offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(sa);
+ *(u32 *)((unsigned long)sa + offset_to_sr_ptr) = pd_uinfo->sr_pa;
+
+@@ -830,7 +908,7 @@ int crypto4xx_build_pd(struct crypto_asy
+ ((crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AHASH) |
+ (crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AEAD) ?
+ PD_CTL_HASH_FINAL : 0);
+- pd->pd_ctl_len.w = 0x00400000 | datalen;
++ pd->pd_ctl_len.w = 0x00400000 | (assoclen + datalen);
+ pd_uinfo->state = PD_ENTRY_INUSE | (is_busy ? PD_ENTRY_BUSY : 0);
+
+ wmb();
+@@ -843,40 +921,68 @@ int crypto4xx_build_pd(struct crypto_asy
+ /**
+ * Algorithm Registration Functions
+ */
+-static int crypto4xx_alg_init(struct crypto_tfm *tfm)
++static void crypto4xx_ctx_init(struct crypto4xx_alg *amcc_alg,
++ struct crypto4xx_ctx *ctx)
+ {
+- struct crypto_alg *alg = tfm->__crt_alg;
+- struct crypto4xx_alg *amcc_alg = crypto_alg_to_crypto4xx_alg(alg);
+- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
+-
+ ctx->dev = amcc_alg->dev;
+ ctx->sa_in = NULL;
+ ctx->sa_out = NULL;
+ ctx->sa_len = 0;
++}
+
+- switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
+- default:
+- tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx);
+- break;
+- case CRYPTO_ALG_TYPE_AHASH:
+- crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
+- sizeof(struct crypto4xx_ctx));
+- break;
+- }
++static int crypto4xx_ablk_init(struct crypto_tfm *tfm)
++{
++ struct crypto_alg *alg = tfm->__crt_alg;
++ struct crypto4xx_alg *amcc_alg;
++ struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
+
++ amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
++ crypto4xx_ctx_init(amcc_alg, ctx);
++ tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx);
+ return 0;
+ }
+
+-static void crypto4xx_alg_exit(struct crypto_tfm *tfm)
++static void crypto4xx_common_exit(struct crypto4xx_ctx *ctx)
+ {
+- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
+-
+ crypto4xx_free_sa(ctx);
+ }
+
+-int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
+- struct crypto4xx_alg_common *crypto_alg,
+- int array_size)
++static void crypto4xx_ablk_exit(struct crypto_tfm *tfm)
++{
++ crypto4xx_common_exit(crypto_tfm_ctx(tfm));
++}
++
++static int crypto4xx_aead_init(struct crypto_aead *tfm)
++{
++ struct aead_alg *alg = crypto_aead_alg(tfm);
++ struct crypto4xx_ctx *ctx = crypto_aead_ctx(tfm);
++ struct crypto4xx_alg *amcc_alg;
++
++ ctx->sw_cipher.aead = crypto_alloc_aead(alg->base.cra_name, 0,
++ CRYPTO_ALG_NEED_FALLBACK |
++ CRYPTO_ALG_ASYNC);
++ if (IS_ERR(ctx->sw_cipher.aead))
++ return PTR_ERR(ctx->sw_cipher.aead);
++
++ amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.aead);
++ crypto4xx_ctx_init(amcc_alg, ctx);
++ crypto_aead_set_reqsize(tfm, sizeof(struct aead_request) +
++ max(sizeof(struct crypto4xx_ctx), 32 +
++ crypto_aead_reqsize(ctx->sw_cipher.aead)));
++ return 0;
++}
++
++static void crypto4xx_aead_exit(struct crypto_aead *tfm)
++{
++ struct crypto4xx_ctx *ctx = crypto_aead_ctx(tfm);
++
++ crypto4xx_common_exit(ctx);
++ crypto_free_aead(ctx->sw_cipher.aead);
++}
++
++static int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
++ struct crypto4xx_alg_common *crypto_alg,
++ int array_size)
+ {
+ struct crypto4xx_alg *alg;
+ int i;
+@@ -891,6 +997,10 @@ int crypto4xx_register_alg(struct crypto
+ alg->dev = sec_dev;
+
+ switch (alg->alg.type) {
++ case CRYPTO_ALG_TYPE_AEAD:
++ rc = crypto_register_aead(&alg->alg.u.aead);
++ break;
++
+ case CRYPTO_ALG_TYPE_AHASH:
+ rc = crypto_register_ahash(&alg->alg.u.hash);
+ break;
+@@ -920,6 +1030,10 @@ static void crypto4xx_unregister_alg(str
+ crypto_unregister_ahash(&alg->alg.u.hash);
+ break;
+
++ case CRYPTO_ALG_TYPE_AEAD:
++ crypto_unregister_aead(&alg->alg.u.aead);
++ break;
++
+ default:
+ crypto_unregister_alg(&alg->alg.u.cipher);
+ }
+@@ -973,7 +1087,7 @@ static irqreturn_t crypto4xx_ce_interrup
+ /**
+ * Supported Crypto Algorithms
+ */
+-struct crypto4xx_alg_common crypto4xx_alg[] = {
++static struct crypto4xx_alg_common crypto4xx_alg[] = {
+ /* Crypto AES modes */
+ { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
+ .cra_name = "cbc(aes)",
+@@ -985,8 +1099,8 @@ struct crypto4xx_alg_common crypto4xx_al
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
+ .cra_type = &crypto_ablkcipher_type,
+- .cra_init = crypto4xx_alg_init,
+- .cra_exit = crypto4xx_alg_exit,
++ .cra_init = crypto4xx_ablk_init,
++ .cra_exit = crypto4xx_ablk_exit,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .ablkcipher = {
+@@ -1009,8 +1123,8 @@ struct crypto4xx_alg_common crypto4xx_al
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
+ .cra_type = &crypto_ablkcipher_type,
+- .cra_init = crypto4xx_alg_init,
+- .cra_exit = crypto4xx_alg_exit,
++ .cra_init = crypto4xx_ablk_init,
++ .cra_exit = crypto4xx_ablk_exit,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .ablkcipher = {
+@@ -1033,8 +1147,8 @@ struct crypto4xx_alg_common crypto4xx_al
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
+ .cra_type = &crypto_ablkcipher_type,
+- .cra_init = crypto4xx_alg_init,
+- .cra_exit = crypto4xx_alg_exit,
++ .cra_init = crypto4xx_ablk_init,
++ .cra_exit = crypto4xx_ablk_exit,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .ablkcipher = {
+@@ -1059,8 +1173,8 @@ struct crypto4xx_alg_common crypto4xx_al
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
+ .cra_type = &crypto_ablkcipher_type,
+- .cra_init = crypto4xx_alg_init,
+- .cra_exit = crypto4xx_alg_exit,
++ .cra_init = crypto4xx_ablk_init,
++ .cra_exit = crypto4xx_ablk_exit,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .ablkcipher = {
+@@ -1082,8 +1196,8 @@ struct crypto4xx_alg_common crypto4xx_al
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
+ .cra_type = &crypto_ablkcipher_type,
+- .cra_init = crypto4xx_alg_init,
+- .cra_exit = crypto4xx_alg_exit,
++ .cra_init = crypto4xx_ablk_init,
++ .cra_exit = crypto4xx_ablk_exit,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .ablkcipher = {
+@@ -1149,6 +1263,7 @@ static int crypto4xx_probe(struct platfo
+ core_dev->device = dev;
+ spin_lock_init(&core_dev->lock);
+ INIT_LIST_HEAD(&core_dev->dev->alg_list);
++ ratelimit_default_init(&core_dev->dev->aead_ratelimit);
+ rc = crypto4xx_build_pdr(core_dev->dev);
+ if (rc)
+ goto err_build_pdr;
+--- a/drivers/crypto/amcc/crypto4xx_core.h
++++ b/drivers/crypto/amcc/crypto4xx_core.h
+@@ -22,7 +22,9 @@
+ #ifndef __CRYPTO4XX_CORE_H__
+ #define __CRYPTO4XX_CORE_H__
+
++#include <linux/ratelimit.h>
+ #include <crypto/internal/hash.h>
++#include <crypto/internal/aead.h>
+ #include "crypto4xx_reg_def.h"
+ #include "crypto4xx_sa.h"
+
+@@ -106,6 +108,7 @@ struct crypto4xx_device {
+ struct pd_uinfo *pdr_uinfo;
+ struct list_head alg_list; /* List of algorithm supported
+ by this device */
++ struct ratelimit_state aead_ratelimit;
+ };
+
+ struct crypto4xx_core_device {
+@@ -125,6 +128,9 @@ struct crypto4xx_ctx {
+ struct dynamic_sa_ctl *sa_out;
+ __le32 iv_nonce;
+ u32 sa_len;
++ union {
++ struct crypto_aead *aead;
++ } sw_cipher;
+ };
+
+ struct crypto4xx_alg_common {
+@@ -132,6 +138,7 @@ struct crypto4xx_alg_common {
+ union {
+ struct crypto_alg cipher;
+ struct ahash_alg hash;
++ struct aead_alg aead;
+ } u;
+ };
+
+@@ -141,18 +148,6 @@ struct crypto4xx_alg {
+ struct crypto4xx_device *dev;
+ };
+
+-static inline struct crypto4xx_alg *crypto_alg_to_crypto4xx_alg(
+- struct crypto_alg *x)
+-{
+- switch (x->cra_flags & CRYPTO_ALG_TYPE_MASK) {
+- case CRYPTO_ALG_TYPE_AHASH:
+- return container_of(__crypto_ahash_alg(x),
+- struct crypto4xx_alg, alg.u.hash);
+- }
+-
+- return container_of(x, struct crypto4xx_alg, alg.u.cipher);
+-}
+-
+ int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size);
+ void crypto4xx_free_sa(struct crypto4xx_ctx *ctx);
+ void crypto4xx_free_ctx(struct crypto4xx_ctx *ctx);
+@@ -163,7 +158,8 @@ int crypto4xx_build_pd(struct crypto_asy
+ const unsigned int datalen,
+ const __le32 *iv, const u32 iv_len,
+ const struct dynamic_sa_ctl *sa,
+- const unsigned int sa_len);
++ const unsigned int sa_len,
++ const unsigned int assoclen);
+ int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
+ const u8 *key, unsigned int keylen);
+ int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher,
+--- a/drivers/crypto/amcc/crypto4xx_sa.h
++++ b/drivers/crypto/amcc/crypto4xx_sa.h
+@@ -55,6 +55,8 @@ union dynamic_sa_contents {
+ #define SA_OP_GROUP_BASIC 0
+ #define SA_OPCODE_ENCRYPT 0
+ #define SA_OPCODE_DECRYPT 0
++#define SA_OPCODE_ENCRYPT_HASH 1
++#define SA_OPCODE_HASH_DECRYPT 1
+ #define SA_OPCODE_HASH 3
+ #define SA_CIPHER_ALG_DES 0
+ #define SA_CIPHER_ALG_3DES 1
+@@ -65,6 +67,8 @@ union dynamic_sa_contents {
+
+ #define SA_HASH_ALG_MD5 0
+ #define SA_HASH_ALG_SHA1 1
++#define SA_HASH_ALG_GHASH 12
++#define SA_HASH_ALG_CBC_MAC 14
+ #define SA_HASH_ALG_NULL 15
+ #define SA_HASH_ALG_SHA1_DIGEST_SIZE 20
+
+@@ -234,6 +238,36 @@ struct dynamic_sa_aes256 {
+ #define SA_AES_CONTENTS 0x3e000002
+
+ /**
++ * Security Association (SA) for AES128 CCM
++ */
++struct dynamic_sa_aes128_ccm {
++ struct dynamic_sa_ctl ctrl;
++ __le32 key[4];
++ __le32 iv[4];
++ u32 state_ptr;
++ u32 reserved;
++} __packed;
++#define SA_AES128_CCM_LEN (sizeof(struct dynamic_sa_aes128_ccm)/4)
++#define SA_AES128_CCM_CONTENTS 0x3e000042
++#define SA_AES_CCM_CONTENTS 0x3e000002
++
++/**
++ * Security Association (SA) for AES128_GCM
++ */
++struct dynamic_sa_aes128_gcm {
++ struct dynamic_sa_ctl ctrl;
++ __le32 key[4];
++ __le32 inner_digest[4];
++ __le32 iv[4];
++ u32 state_ptr;
++ u32 reserved;
++} __packed;
++
++#define SA_AES128_GCM_LEN (sizeof(struct dynamic_sa_aes128_gcm)/4)
++#define SA_AES128_GCM_CONTENTS 0x3e000442
++#define SA_AES_GCM_CONTENTS 0x3e000402
++
++/**
+ * Security Association (SA) for HASH160: HMAC-SHA1
+ */
+ struct dynamic_sa_hash160 {
+@@ -274,4 +308,11 @@ static inline __le32 *get_dynamic_sa_key
+ return (__le32 *) ((unsigned long)cts + sizeof(struct dynamic_sa_ctl));
+ }
+
++static inline __le32 *get_dynamic_sa_inner_digest(struct dynamic_sa_ctl *cts)
++{
++ return (__le32 *) ((unsigned long)cts +
++ sizeof(struct dynamic_sa_ctl) +
++ cts->sa_contents.bf.key_size * 4);
++}
++
+ #endif