[CRYPTO] skcipher: Add skcipher_geniv_alloc/skcipher_geniv_free

This patch creates the infrastructure to help the construction of givcipher
templates that wrap around existing blkcipher/ablkcipher algorithms by adding
an IV generator to them.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
index 8f48b4d..092d965 100644
--- a/crypto/ablkcipher.c
+++ b/crypto/ablkcipher.c
@@ -80,6 +80,7 @@
 	crt->setkey = setkey;
 	crt->encrypt = alg->encrypt;
 	crt->decrypt = alg->decrypt;
+	crt->base = __crypto_ablkcipher_cast(tfm);
 	crt->ivsize = alg->ivsize;
 
 	return 0;
@@ -122,11 +123,13 @@
 	if (alg->ivsize > PAGE_SIZE / 8)
 		return -EINVAL;
 
-	crt->setkey = setkey;
+	crt->setkey = tfm->__crt_alg->cra_flags & CRYPTO_ALG_GENIV ?
+		      alg->setkey : setkey;
 	crt->encrypt = alg->encrypt;
 	crt->decrypt = alg->decrypt;
 	crt->givencrypt = alg->givencrypt;
 	crt->givdecrypt = alg->givdecrypt ?: no_givdecrypt;
+	crt->base = __crypto_ablkcipher_cast(tfm);
 	crt->ivsize = alg->ivsize;
 
 	return 0;
@@ -155,6 +158,11 @@
 };
 EXPORT_SYMBOL_GPL(crypto_givcipher_type);
 
+const char *crypto_default_geniv(const struct crypto_alg *alg)
+{
+	return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv";
+}
+
 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
 			 u32 type, u32 mask)
 {
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
index 7a3e1a3..ca6ef06 100644
--- a/crypto/blkcipher.c
+++ b/crypto/blkcipher.c
@@ -14,8 +14,8 @@
  *
  */
 
+#include <crypto/internal/skcipher.h>
 #include <crypto/scatterwalk.h>
-#include <linux/crypto.h>
 #include <linux/errno.h>
 #include <linux/hardirq.h>
 #include <linux/kernel.h>
@@ -450,6 +450,7 @@
 	crt->setkey = async_setkey;
 	crt->encrypt = async_encrypt;
 	crt->decrypt = async_decrypt;
+	crt->base = __crypto_ablkcipher_cast(tfm);
 	crt->ivsize = alg->ivsize;
 
 	return 0;
@@ -509,5 +510,187 @@
 };
 EXPORT_SYMBOL_GPL(crypto_blkcipher_type);
 
+static int crypto_grab_nivcipher(struct crypto_skcipher_spawn *spawn,
+				const char *name, u32 type, u32 mask)
+{
+	struct crypto_alg *alg;
+	int err;
+
+	type = crypto_skcipher_type(type);
+	mask = crypto_skcipher_mask(mask) | CRYPTO_ALG_GENIV;
+
+	alg = crypto_alg_mod_lookup(name, type, mask);
+	if (IS_ERR(alg))
+		return PTR_ERR(alg);
+
+	err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
+	crypto_mod_put(alg);
+	return err;
+}
+
+struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
+					     struct rtattr **tb, u32 type,
+					     u32 mask)
+{
+	struct {
+		int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
+			      unsigned int keylen);
+		int (*encrypt)(struct ablkcipher_request *req);
+		int (*decrypt)(struct ablkcipher_request *req);
+
+		unsigned int min_keysize;
+		unsigned int max_keysize;
+		unsigned int ivsize;
+
+		const char *geniv;
+	} balg;
+	const char *name;
+	struct crypto_skcipher_spawn *spawn;
+	struct crypto_attr_type *algt;
+	struct crypto_instance *inst;
+	struct crypto_alg *alg;
+	int err;
+
+	algt = crypto_get_attr_type(tb);
+	err = PTR_ERR(algt);
+	if (IS_ERR(algt))
+		return ERR_PTR(err);
+
+	if ((algt->type ^ (CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV)) &
+	    algt->mask)
+		return ERR_PTR(-EINVAL);
+
+	name = crypto_attr_alg_name(tb[1]);
+	err = PTR_ERR(name);
+	if (IS_ERR(name))
+		return ERR_PTR(err);
+
+	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+	if (!inst)
+		return ERR_PTR(-ENOMEM);
+
+	spawn = crypto_instance_ctx(inst);
+
+	/* Ignore async algorithms if necessary. */
+	mask |= crypto_requires_sync(algt->type, algt->mask);
+
+	crypto_set_skcipher_spawn(spawn, inst);
+	err = crypto_grab_nivcipher(spawn, name, type, mask);
+	if (err)
+		goto err_free_inst;
+
+	alg = crypto_skcipher_spawn_alg(spawn);
+
+	if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
+	    CRYPTO_ALG_TYPE_BLKCIPHER) {
+		balg.ivsize = alg->cra_blkcipher.ivsize;
+		balg.min_keysize = alg->cra_blkcipher.min_keysize;
+		balg.max_keysize = alg->cra_blkcipher.max_keysize;
+
+		balg.setkey = async_setkey;
+		balg.encrypt = async_encrypt;
+		balg.decrypt = async_decrypt;
+
+		balg.geniv = alg->cra_blkcipher.geniv;
+	} else {
+		balg.ivsize = alg->cra_ablkcipher.ivsize;
+		balg.min_keysize = alg->cra_ablkcipher.min_keysize;
+		balg.max_keysize = alg->cra_ablkcipher.max_keysize;
+
+		balg.setkey = alg->cra_ablkcipher.setkey;
+		balg.encrypt = alg->cra_ablkcipher.encrypt;
+		balg.decrypt = alg->cra_ablkcipher.decrypt;
+
+		balg.geniv = alg->cra_ablkcipher.geniv;
+	}
+
+	err = -EINVAL;
+	if (!balg.ivsize)
+		goto err_drop_alg;
+
+	/*
+	 * This is only true if we're constructing an algorithm with its
+	 * default IV generator.  For the default generator we elide the
+	 * template name and double-check the IV generator.
+	 */
+	if (algt->mask & CRYPTO_ALG_GENIV) {
+		if (!balg.geniv)
+			balg.geniv = crypto_default_geniv(alg);
+		err = -EAGAIN;
+		if (strcmp(tmpl->name, balg.geniv))
+			goto err_drop_alg;
+
+		memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
+		memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
+		       CRYPTO_MAX_ALG_NAME);
+	} else {
+		err = -ENAMETOOLONG;
+		if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
+			     "%s(%s)", tmpl->name, alg->cra_name) >=
+		    CRYPTO_MAX_ALG_NAME)
+			goto err_drop_alg;
+		if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+			     "%s(%s)", tmpl->name, alg->cra_driver_name) >=
+		    CRYPTO_MAX_ALG_NAME)
+			goto err_drop_alg;
+	}
+
+	inst->alg.cra_flags = CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV;
+	inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
+	inst->alg.cra_priority = alg->cra_priority;
+	inst->alg.cra_blocksize = alg->cra_blocksize;
+	inst->alg.cra_alignmask = alg->cra_alignmask;
+	inst->alg.cra_type = &crypto_givcipher_type;
+
+	inst->alg.cra_ablkcipher.ivsize = balg.ivsize;
+	inst->alg.cra_ablkcipher.min_keysize = balg.min_keysize;
+	inst->alg.cra_ablkcipher.max_keysize = balg.max_keysize;
+	inst->alg.cra_ablkcipher.geniv = balg.geniv;
+
+	inst->alg.cra_ablkcipher.setkey = balg.setkey;
+	inst->alg.cra_ablkcipher.encrypt = balg.encrypt;
+	inst->alg.cra_ablkcipher.decrypt = balg.decrypt;
+
+out:
+	return inst;
+
+err_drop_alg:
+	crypto_drop_skcipher(spawn);
+err_free_inst:
+	kfree(inst);
+	inst = ERR_PTR(err);
+	goto out;
+}
+EXPORT_SYMBOL_GPL(skcipher_geniv_alloc);
+
+void skcipher_geniv_free(struct crypto_instance *inst)
+{
+	crypto_drop_skcipher(crypto_instance_ctx(inst));
+	kfree(inst);
+}
+EXPORT_SYMBOL_GPL(skcipher_geniv_free);
+
+int skcipher_geniv_init(struct crypto_tfm *tfm)
+{
+	struct crypto_instance *inst = (void *)tfm->__crt_alg;
+	struct crypto_ablkcipher *cipher;
+
+	cipher = crypto_spawn_skcipher(crypto_instance_ctx(inst));
+	if (IS_ERR(cipher))
+		return PTR_ERR(cipher);
+
+	tfm->crt_ablkcipher.base = cipher;
+	tfm->crt_ablkcipher.reqsize += crypto_ablkcipher_reqsize(cipher);
+
+	return 0;
+}
+EXPORT_SYMBOL_GPL(skcipher_geniv_init);
+
+void skcipher_geniv_exit(struct crypto_tfm *tfm)
+{
+	crypto_free_ablkcipher(tfm->crt_ablkcipher.base);
+}
+EXPORT_SYMBOL_GPL(skcipher_geniv_exit);
+
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("Generic block chaining cipher type");