#include <crypto/ctr.h>
#include <crypto/internal/des.h>
#include <crypto/sha.h>
+#include <crypto/xts.h>
#include <crypto/skcipher.h>
#include <crypto/internal/aead.h>
#include <crypto/internal/skcipher.h>
enum safexcel_cipher_alg alg;
bool aead;
- __le32 key[8];
+ __le32 key[16];
u32 nonce;
- unsigned int key_len;
+ unsigned int key_len, xts;
/* All the below is AEAD specific */
u32 hash_alg;
} else if (ctx->alg == SAFEXCEL_3DES) {
cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
} else if (ctx->alg == SAFEXCEL_AES) {
- switch (ctx->key_len) {
+ switch (ctx->key_len >> ctx->xts) {
case AES_KEYSIZE_128:
cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
break;
break;
default:
dev_err(priv->dev, "aes keysize not supported: %u\n",
- ctx->key_len);
+ ctx->key_len >> ctx->xts);
return -EINVAL;
}
}
},
},
};
+
+static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
+ const u8 *key, unsigned int len)
+{
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct safexcel_crypto_priv *priv = ctx->priv;
+ struct crypto_aes_ctx aes;
+ int ret, i;
+ unsigned int keylen;
+
+ /* Check for illegal XTS keys */
+ ret = xts_verify_key(ctfm, key, len);
+ if (ret)
+ return ret;
+
+ /* Only half of the key data is cipher key */
+ keylen = (len >> 1);
+ ret = aes_expandkey(&aes, key, keylen);
+ if (ret) {
+ crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ return ret;
+ }
+
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
+ for (i = 0; i < keylen / sizeof(u32); i++) {
+ if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
+ ctx->base.needs_inv = true;
+ break;
+ }
+ }
+ }
+
+ for (i = 0; i < keylen / sizeof(u32); i++)
+ ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
+
+ /* The other half is the tweak key */
+ ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
+ if (ret) {
+ crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ return ret;
+ }
+
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
+ for (i = 0; i < keylen / sizeof(u32); i++) {
+ if (ctx->key[i + keylen / sizeof(u32)] !=
+ cpu_to_le32(aes.key_enc[i])) {
+ ctx->base.needs_inv = true;
+ break;
+ }
+ }
+ }
+
+ for (i = 0; i < keylen / sizeof(u32); i++)
+ ctx->key[i + keylen / sizeof(u32)] =
+ cpu_to_le32(aes.key_enc[i]);
+
+ ctx->key_len = keylen << 1;
+
+ memzero_explicit(&aes, sizeof(aes));
+ return 0;
+}
+
+static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
+{
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ safexcel_skcipher_cra_init(tfm);
+ ctx->alg = SAFEXCEL_AES;
+ ctx->xts = 1;
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
+ return 0;
+}
+
+static int safexcel_encrypt_xts(struct skcipher_request *req)
+{
+ if (req->cryptlen < XTS_BLOCK_SIZE)
+ return -EINVAL;
+ return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
+ SAFEXCEL_ENCRYPT);
+}
+
+static int safexcel_decrypt_xts(struct skcipher_request *req)
+{
+ if (req->cryptlen < XTS_BLOCK_SIZE)
+ return -EINVAL;
+ return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
+ SAFEXCEL_DECRYPT);
+}
+
+struct safexcel_alg_template safexcel_alg_xts_aes = {
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = safexcel_skcipher_aesxts_setkey,
+ .encrypt = safexcel_encrypt_xts,
+ .decrypt = safexcel_decrypt_xts,
+ /* XTS actually uses 2 AES keys glued together */
+ .min_keysize = AES_MIN_KEY_SIZE * 2,
+ .max_keysize = AES_MAX_KEY_SIZE * 2,
+ .ivsize = XTS_BLOCK_SIZE,
+ .base = {
+ .cra_name = "xts(aes)",
+ .cra_driver_name = "safexcel-xts-aes",
+ .cra_priority = 300,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = XTS_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = safexcel_skcipher_aes_xts_cra_init,
+ .cra_exit = safexcel_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};