Update Linux to v5.4.2

Change-Id: Idf6911045d9d382da2cfe01b1edff026404ac8fd
diff --git a/tools/testing/radix-tree/iteration_check.c b/tools/testing/radix-tree/iteration_check.c
index a92bab5..e9908bc 100644
--- a/tools/testing/radix-tree/iteration_check.c
+++ b/tools/testing/radix-tree/iteration_check.c
@@ -1,52 +1,57 @@
+// SPDX-License-Identifier: GPL-2.0-only
 /*
- * iteration_check.c: test races having to do with radix tree iteration
+ * iteration_check.c: test races having to do with xarray iteration
  * Copyright (c) 2016 Intel Corporation
  * Author: Ross Zwisler <ross.zwisler@linux.intel.com>
- *
- * This program is free software; you can redistribute it and/or modify it
- * under the terms and conditions of the GNU General Public License,
- * version 2, as published by the Free Software Foundation.
- *
- * This program is distributed in the hope it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
- * more details.
  */
-#include <linux/radix-tree.h>
 #include <pthread.h>
 #include "test.h"
 
 #define NUM_THREADS	5
 #define MAX_IDX		100
-#define TAG		0
-#define NEW_TAG		1
+#define TAG		XA_MARK_0
+#define NEW_TAG		XA_MARK_1
 
-static pthread_mutex_t tree_lock = PTHREAD_MUTEX_INITIALIZER;
 static pthread_t threads[NUM_THREADS];
 static unsigned int seeds[3];
-static RADIX_TREE(tree, GFP_KERNEL);
+static DEFINE_XARRAY(array);
 static bool test_complete;
 static int max_order;
 
-/* relentlessly fill the tree with tagged entries */
+void my_item_insert(struct xarray *xa, unsigned long index)
+{
+	XA_STATE(xas, xa, index);
+	struct item *item = item_create(index, 0);
+	int order;
+
+retry:
+	xas_lock(&xas);
+	for (order = max_order; order >= 0; order--) {
+		xas_set_order(&xas, index, order);
+		item->order = order;
+		if (xas_find_conflict(&xas))
+			continue;
+		xas_store(&xas, item);
+		xas_set_mark(&xas, TAG);
+		break;
+	}
+	xas_unlock(&xas);
+	if (xas_nomem(&xas, GFP_KERNEL))
+		goto retry;
+	if (order < 0)
+		free(item);
+}
+
+/* relentlessly fill the array with tagged entries */
 static void *add_entries_fn(void *arg)
 {
 	rcu_register_thread();
 
 	while (!test_complete) {
 		unsigned long pgoff;
-		int order;
 
 		for (pgoff = 0; pgoff < MAX_IDX; pgoff++) {
-			pthread_mutex_lock(&tree_lock);
-			for (order = max_order; order >= 0; order--) {
-				if (item_insert_order(&tree, pgoff, order)
-						== 0) {
-					item_tag_set(&tree, pgoff, TAG);
-					break;
-				}
-			}
-			pthread_mutex_unlock(&tree_lock);
+			my_item_insert(&array, pgoff);
 		}
 	}
 
@@ -56,33 +61,25 @@
 }
 
 /*
- * Iterate over the tagged entries, doing a radix_tree_iter_retry() as we find
- * things that have been removed and randomly resetting our iteration to the
- * next chunk with radix_tree_iter_resume().  Both radix_tree_iter_retry() and
- * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a
- * NULL 'slot' variable.
+ * Iterate over tagged entries, retrying when we find ourselves in a deleted
+ * node and randomly pausing the iteration.
  */
 static void *tagged_iteration_fn(void *arg)
 {
-	struct radix_tree_iter iter;
-	void **slot;
+	XA_STATE(xas, &array, 0);
+	void *entry;
 
 	rcu_register_thread();
 
 	while (!test_complete) {
+		xas_set(&xas, 0);
 		rcu_read_lock();
-		radix_tree_for_each_tagged(slot, &tree, &iter, 0, TAG) {
-			void *entry = radix_tree_deref_slot(slot);
-			if (unlikely(!entry))
+		xas_for_each_marked(&xas, entry, ULONG_MAX, TAG) {
+			if (xas_retry(&xas, entry))
 				continue;
 
-			if (radix_tree_deref_retry(entry)) {
-				slot = radix_tree_iter_retry(&iter);
-				continue;
-			}
-
 			if (rand_r(&seeds[0]) % 50 == 0) {
-				slot = radix_tree_iter_resume(slot, &iter);
+				xas_pause(&xas);
 				rcu_read_unlock();
 				rcu_barrier();
 				rcu_read_lock();
@@ -97,33 +94,25 @@
 }
 
 /*
- * Iterate over the entries, doing a radix_tree_iter_retry() as we find things
- * that have been removed and randomly resetting our iteration to the next
- * chunk with radix_tree_iter_resume().  Both radix_tree_iter_retry() and
- * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a
- * NULL 'slot' variable.
+ * Iterate over the entries, retrying when we find ourselves in a deleted
+ * node and randomly pausing the iteration.
  */
 static void *untagged_iteration_fn(void *arg)
 {
-	struct radix_tree_iter iter;
-	void **slot;
+	XA_STATE(xas, &array, 0);
+	void *entry;
 
 	rcu_register_thread();
 
 	while (!test_complete) {
+		xas_set(&xas, 0);
 		rcu_read_lock();
-		radix_tree_for_each_slot(slot, &tree, &iter, 0) {
-			void *entry = radix_tree_deref_slot(slot);
-			if (unlikely(!entry))
+		xas_for_each(&xas, entry, ULONG_MAX) {
+			if (xas_retry(&xas, entry))
 				continue;
 
-			if (radix_tree_deref_retry(entry)) {
-				slot = radix_tree_iter_retry(&iter);
-				continue;
-			}
-
 			if (rand_r(&seeds[1]) % 50 == 0) {
-				slot = radix_tree_iter_resume(slot, &iter);
+				xas_pause(&xas);
 				rcu_read_unlock();
 				rcu_barrier();
 				rcu_read_lock();
@@ -138,7 +127,7 @@
 }
 
 /*
- * Randomly remove entries to help induce radix_tree_iter_retry() calls in the
+ * Randomly remove entries to help induce retries in the
  * two iteration functions.
  */
 static void *remove_entries_fn(void *arg)
@@ -147,12 +136,13 @@
 
 	while (!test_complete) {
 		int pgoff;
+		struct item *item;
 
 		pgoff = rand_r(&seeds[2]) % MAX_IDX;
 
-		pthread_mutex_lock(&tree_lock);
-		item_delete(&tree, pgoff);
-		pthread_mutex_unlock(&tree_lock);
+		item = xa_erase(&array, pgoff);
+		if (item)
+			item_free(item, pgoff);
 	}
 
 	rcu_unregister_thread();
@@ -165,8 +155,7 @@
 	rcu_register_thread();
 
 	while (!test_complete) {
-		tag_tagged_items(&tree, &tree_lock, 0, MAX_IDX, 10, TAG,
-					NEW_TAG);
+		tag_tagged_items(&array, 0, MAX_IDX, 10, TAG, NEW_TAG);
 	}
 	rcu_unregister_thread();
 	return NULL;
@@ -217,5 +206,5 @@
 		}
 	}
 
-	item_kill_tree(&tree);
+	item_kill_tree(&array);
 }