Update Linux to v5.10.109

Sourced from [1]

[1] https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-5.10.109.tar.xz

Change-Id: I19bca9fc6762d4e63bcf3e4cba88bbe560d9c76c
Signed-off-by: Olivier Deprez <olivier.deprez@arm.com>
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h
index e5bd302..18dd7a4 100644
--- a/include/crypto/algapi.h
+++ b/include/crypto/algapi.h
@@ -10,7 +10,6 @@
 #include <linux/crypto.h>
 #include <linux/list.h>
 #include <linux/kernel.h>
-#include <linux/skbuff.h>
 
 /*
  * Maximum values for blocksize and alignmask, used to allocate
@@ -27,6 +26,7 @@
 struct module;
 struct rtattr;
 struct seq_file;
+struct sk_buff;
 
 struct crypto_type {
 	unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
@@ -47,7 +47,13 @@
 	struct crypto_alg alg;
 
 	struct crypto_template *tmpl;
-	struct hlist_node list;
+
+	union {
+		/* Node in list of instances after registration. */
+		struct hlist_node list;
+		/* List of attached spawns before registration. */
+		struct crypto_spawn *spawns;
+	};
 
 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
 };
@@ -57,8 +63,6 @@
 	struct hlist_head instances;
 	struct module *module;
 
-	struct crypto_instance *(*alloc)(struct rtattr **tb);
-	void (*free)(struct crypto_instance *inst);
 	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
 
 	char name[CRYPTO_MAX_ALG_NAME];
@@ -67,9 +71,16 @@
 struct crypto_spawn {
 	struct list_head list;
 	struct crypto_alg *alg;
-	struct crypto_instance *inst;
+	union {
+		/* Back pointer to instance after registration.*/
+		struct crypto_instance *inst;
+		/* Spawn list pointer prior to registration. */
+		struct crypto_spawn *next;
+	};
 	const struct crypto_type *frontend;
 	u32 mask;
+	bool dead;
+	bool registered;
 };
 
 struct crypto_queue {
@@ -85,56 +96,6 @@
 	unsigned int offset;
 };
 
-struct blkcipher_walk {
-	union {
-		struct {
-			struct page *page;
-			unsigned long offset;
-		} phys;
-
-		struct {
-			u8 *page;
-			u8 *addr;
-		} virt;
-	} src, dst;
-
-	struct scatter_walk in;
-	unsigned int nbytes;
-
-	struct scatter_walk out;
-	unsigned int total;
-
-	void *page;
-	u8 *buffer;
-	u8 *iv;
-	unsigned int ivsize;
-
-	int flags;
-	unsigned int walk_blocksize;
-	unsigned int cipher_blocksize;
-	unsigned int alignmask;
-};
-
-struct ablkcipher_walk {
-	struct {
-		struct page *page;
-		unsigned int offset;
-	} src, dst;
-
-	struct scatter_walk	in;
-	unsigned int		nbytes;
-	struct scatter_walk	out;
-	unsigned int		total;
-	struct list_head	buffers;
-	u8			*iv_buffer;
-	u8			*iv;
-	int			flags;
-	unsigned int		blocksize;
-};
-
-extern const struct crypto_type crypto_ablkcipher_type;
-extern const struct crypto_type crypto_blkcipher_type;
-
 void crypto_mod_put(struct crypto_alg *alg);
 
 int crypto_register_template(struct crypto_template *tmpl);
@@ -145,49 +106,27 @@
 
 int crypto_register_instance(struct crypto_template *tmpl,
 			     struct crypto_instance *inst);
-int crypto_unregister_instance(struct crypto_instance *inst);
+void crypto_unregister_instance(struct crypto_instance *inst);
 
-int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
-		      struct crypto_instance *inst, u32 mask);
-int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
-		       struct crypto_instance *inst,
-		       const struct crypto_type *frontend);
-int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
-		      u32 type, u32 mask);
-
+int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
+		      const char *name, u32 type, u32 mask);
 void crypto_drop_spawn(struct crypto_spawn *spawn);
 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
 				    u32 mask);
 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
 
-static inline void crypto_set_spawn(struct crypto_spawn *spawn,
-				    struct crypto_instance *inst)
-{
-	spawn->inst = inst;
-}
-
 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
-int crypto_check_attr_type(struct rtattr **tb, u32 type);
+int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
 const char *crypto_attr_alg_name(struct rtattr *rta);
-struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
-				    const struct crypto_type *frontend,
-				    u32 type, u32 mask);
-
-static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
-						 u32 type, u32 mask)
-{
-	return crypto_attr_alg2(rta, NULL, type, mask);
-}
-
 int crypto_attr_u32(struct rtattr *rta, u32 *num);
 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
 			struct crypto_alg *alg);
-void *crypto_alloc_instance(const char *name, struct crypto_alg *alg,
-			    unsigned int head);
 
 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
 int crypto_enqueue_request(struct crypto_queue *queue,
 			   struct crypto_async_request *request);
+void crypto_enqueue_request_head(struct crypto_queue *queue,
+				 struct crypto_async_request *request);
 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
 {
@@ -233,26 +172,6 @@
 	}
 }
 
-int blkcipher_walk_done(struct blkcipher_desc *desc,
-			struct blkcipher_walk *walk, int err);
-int blkcipher_walk_virt(struct blkcipher_desc *desc,
-			struct blkcipher_walk *walk);
-int blkcipher_walk_phys(struct blkcipher_desc *desc,
-			struct blkcipher_walk *walk);
-int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
-			      struct blkcipher_walk *walk,
-			      unsigned int blocksize);
-int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
-				   struct blkcipher_walk *walk,
-				   struct crypto_aead *tfm,
-				   unsigned int blocksize);
-
-int ablkcipher_walk_done(struct ablkcipher_request *req,
-			 struct ablkcipher_walk *walk, int err);
-int ablkcipher_walk_phys(struct ablkcipher_request *req,
-			 struct ablkcipher_walk *walk);
-void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
-
 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
 {
 	return PTR_ALIGN(crypto_tfm_ctx(tfm),
@@ -270,48 +189,38 @@
 	return inst->__ctx;
 }
 
-static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
-	struct crypto_ablkcipher *tfm)
+struct crypto_cipher_spawn {
+	struct crypto_spawn base;
+};
+
+static inline int crypto_grab_cipher(struct crypto_cipher_spawn *spawn,
+				     struct crypto_instance *inst,
+				     const char *name, u32 type, u32 mask)
 {
-	return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
+	type &= ~CRYPTO_ALG_TYPE_MASK;
+	type |= CRYPTO_ALG_TYPE_CIPHER;
+	mask |= CRYPTO_ALG_TYPE_MASK;
+	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
 }
 
-static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
+static inline void crypto_drop_cipher(struct crypto_cipher_spawn *spawn)
 {
-	return crypto_tfm_ctx(&tfm->base);
+	crypto_drop_spawn(&spawn->base);
 }
 
-static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
+static inline struct crypto_alg *crypto_spawn_cipher_alg(
+	struct crypto_cipher_spawn *spawn)
 {
-	return crypto_tfm_ctx_aligned(&tfm->base);
-}
-
-static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
-	struct crypto_spawn *spawn)
-{
-	u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
-	u32 mask = CRYPTO_ALG_TYPE_MASK;
-
-	return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
-}
-
-static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
-{
-	return crypto_tfm_ctx(&tfm->base);
-}
-
-static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
-{
-	return crypto_tfm_ctx_aligned(&tfm->base);
+	return spawn->base.alg;
 }
 
 static inline struct crypto_cipher *crypto_spawn_cipher(
-	struct crypto_spawn *spawn)
+	struct crypto_cipher_spawn *spawn)
 {
 	u32 type = CRYPTO_ALG_TYPE_CIPHER;
 	u32 mask = CRYPTO_ALG_TYPE_MASK;
 
-	return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
+	return __crypto_cipher_cast(crypto_spawn_tfm(&spawn->base, type, mask));
 }
 
 static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
@@ -319,33 +228,6 @@
 	return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
 }
 
-static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
-				       struct scatterlist *dst,
-				       struct scatterlist *src,
-				       unsigned int nbytes)
-{
-	walk->in.sg = src;
-	walk->out.sg = dst;
-	walk->total = nbytes;
-}
-
-static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
-					struct scatterlist *dst,
-					struct scatterlist *src,
-					unsigned int nbytes)
-{
-	walk->in.sg = src;
-	walk->out.sg = dst;
-	walk->total = nbytes;
-	INIT_LIST_HEAD(&walk->buffers);
-}
-
-static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
-{
-	if (unlikely(!list_empty(&walk->buffers)))
-		__ablkcipher_walk_complete(walk);
-}
-
 static inline struct crypto_async_request *crypto_get_backlog(
 	struct crypto_queue *queue)
 {
@@ -353,41 +235,29 @@
 	       container_of(queue->backlog, struct crypto_async_request, list);
 }
 
-static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
-					     struct ablkcipher_request *request)
+static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
 {
-	return crypto_enqueue_request(queue, &request->base);
-}
-
-static inline struct ablkcipher_request *ablkcipher_dequeue_request(
-	struct crypto_queue *queue)
-{
-	return ablkcipher_request_cast(crypto_dequeue_request(queue));
-}
-
-static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
-{
-	return req->__ctx;
-}
-
-static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
-						     u32 type, u32 mask)
-{
-	return crypto_attr_alg(tb[1], type, mask);
-}
-
-static inline int crypto_requires_off(u32 type, u32 mask, u32 off)
-{
-	return (type ^ off) & mask & off;
+	return (algt->type ^ off) & algt->mask & off;
 }
 
 /*
- * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
- * Otherwise returns zero.
+ * When an algorithm uses another algorithm (e.g., if it's an instance of a
+ * template), these are the flags that should always be set on the "outer"
+ * algorithm if any "inner" algorithm has them set.
  */
-static inline int crypto_requires_sync(u32 type, u32 mask)
+#define CRYPTO_ALG_INHERITED_FLAGS	\
+	(CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |	\
+	 CRYPTO_ALG_ALLOCATES_MEMORY)
+
+/*
+ * Given the type and mask that specify the flags restrictions on a template
+ * instance being created, return the mask that should be passed to
+ * crypto_grab_*() (along with type=0) to honor any request the user made to
+ * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
+ */
+static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
 {
-	return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC);
+	return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
 }
 
 noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
@@ -407,12 +277,6 @@
 	return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
 }
 
-static inline void crypto_yield(u32 flags)
-{
-	if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
-		cond_resched();
-}
-
 int crypto_register_notifier(struct notifier_block *nb);
 int crypto_unregister_notifier(struct notifier_block *nb);