David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 1 | // SPDX-License-Identifier: GPL-2.0-only |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* |
| 3 | * benchmark.c: |
| 4 | * Author: Konstantin Khlebnikov <koct9i@gmail.com> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 5 | */ |
| 6 | #include <linux/radix-tree.h> |
| 7 | #include <linux/slab.h> |
| 8 | #include <linux/errno.h> |
| 9 | #include <time.h> |
| 10 | #include "test.h" |
| 11 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 12 | #define NSEC_PER_SEC 1000000000L |
| 13 | |
| 14 | static long long benchmark_iter(struct radix_tree_root *root, bool tagged) |
| 15 | { |
| 16 | volatile unsigned long sink = 0; |
| 17 | struct radix_tree_iter iter; |
| 18 | struct timespec start, finish; |
| 19 | long long nsec; |
| 20 | int l, loops = 1; |
| 21 | void **slot; |
| 22 | |
| 23 | #ifdef BENCHMARK |
| 24 | again: |
| 25 | #endif |
| 26 | clock_gettime(CLOCK_MONOTONIC, &start); |
| 27 | for (l = 0; l < loops; l++) { |
| 28 | if (tagged) { |
| 29 | radix_tree_for_each_tagged(slot, root, &iter, 0, 0) |
| 30 | sink ^= (unsigned long)slot; |
| 31 | } else { |
| 32 | radix_tree_for_each_slot(slot, root, &iter, 0) |
| 33 | sink ^= (unsigned long)slot; |
| 34 | } |
| 35 | } |
| 36 | clock_gettime(CLOCK_MONOTONIC, &finish); |
| 37 | |
| 38 | nsec = (finish.tv_sec - start.tv_sec) * NSEC_PER_SEC + |
| 39 | (finish.tv_nsec - start.tv_nsec); |
| 40 | |
| 41 | #ifdef BENCHMARK |
| 42 | if (loops == 1 && nsec * 5 < NSEC_PER_SEC) { |
| 43 | loops = NSEC_PER_SEC / nsec / 4 + 1; |
| 44 | goto again; |
| 45 | } |
| 46 | #endif |
| 47 | |
| 48 | nsec /= loops; |
| 49 | return nsec; |
| 50 | } |
| 51 | |
| 52 | static void benchmark_insert(struct radix_tree_root *root, |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 53 | unsigned long size, unsigned long step) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 54 | { |
| 55 | struct timespec start, finish; |
| 56 | unsigned long index; |
| 57 | long long nsec; |
| 58 | |
| 59 | clock_gettime(CLOCK_MONOTONIC, &start); |
| 60 | |
| 61 | for (index = 0 ; index < size ; index += step) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 62 | item_insert(root, index); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 63 | |
| 64 | clock_gettime(CLOCK_MONOTONIC, &finish); |
| 65 | |
| 66 | nsec = (finish.tv_sec - start.tv_sec) * NSEC_PER_SEC + |
| 67 | (finish.tv_nsec - start.tv_nsec); |
| 68 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 69 | printv(2, "Size: %8ld, step: %8ld, insertion: %15lld ns\n", |
| 70 | size, step, nsec); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 71 | } |
| 72 | |
| 73 | static void benchmark_tagging(struct radix_tree_root *root, |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 74 | unsigned long size, unsigned long step) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 75 | { |
| 76 | struct timespec start, finish; |
| 77 | unsigned long index; |
| 78 | long long nsec; |
| 79 | |
| 80 | clock_gettime(CLOCK_MONOTONIC, &start); |
| 81 | |
| 82 | for (index = 0 ; index < size ; index += step) |
| 83 | radix_tree_tag_set(root, index, 0); |
| 84 | |
| 85 | clock_gettime(CLOCK_MONOTONIC, &finish); |
| 86 | |
| 87 | nsec = (finish.tv_sec - start.tv_sec) * NSEC_PER_SEC + |
| 88 | (finish.tv_nsec - start.tv_nsec); |
| 89 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 90 | printv(2, "Size: %8ld, step: %8ld, tagging: %17lld ns\n", |
| 91 | size, step, nsec); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 92 | } |
| 93 | |
| 94 | static void benchmark_delete(struct radix_tree_root *root, |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 95 | unsigned long size, unsigned long step) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 96 | { |
| 97 | struct timespec start, finish; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 98 | unsigned long index; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 99 | long long nsec; |
| 100 | |
| 101 | clock_gettime(CLOCK_MONOTONIC, &start); |
| 102 | |
| 103 | for (index = 0 ; index < size ; index += step) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 104 | item_delete(root, index); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 105 | |
| 106 | clock_gettime(CLOCK_MONOTONIC, &finish); |
| 107 | |
| 108 | nsec = (finish.tv_sec - start.tv_sec) * NSEC_PER_SEC + |
| 109 | (finish.tv_nsec - start.tv_nsec); |
| 110 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 111 | printv(2, "Size: %8ld, step: %8ld, deletion: %16lld ns\n", |
| 112 | size, step, nsec); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 113 | } |
| 114 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 115 | static void benchmark_size(unsigned long size, unsigned long step) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 116 | { |
| 117 | RADIX_TREE(tree, GFP_KERNEL); |
| 118 | long long normal, tagged; |
| 119 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 120 | benchmark_insert(&tree, size, step); |
| 121 | benchmark_tagging(&tree, size, step); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 122 | |
| 123 | tagged = benchmark_iter(&tree, true); |
| 124 | normal = benchmark_iter(&tree, false); |
| 125 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 126 | printv(2, "Size: %8ld, step: %8ld, tagged iteration: %8lld ns\n", |
| 127 | size, step, tagged); |
| 128 | printv(2, "Size: %8ld, step: %8ld, normal iteration: %8lld ns\n", |
| 129 | size, step, normal); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 130 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 131 | benchmark_delete(&tree, size, step); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 132 | |
| 133 | item_kill_tree(&tree); |
| 134 | rcu_barrier(); |
| 135 | } |
| 136 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 137 | void benchmark(void) |
| 138 | { |
| 139 | unsigned long size[] = {1 << 10, 1 << 20, 0}; |
| 140 | unsigned long step[] = {1, 2, 7, 15, 63, 64, 65, |
| 141 | 128, 256, 512, 12345, 0}; |
| 142 | int c, s; |
| 143 | |
| 144 | printv(1, "starting benchmarks\n"); |
| 145 | printv(1, "RADIX_TREE_MAP_SHIFT = %d\n", RADIX_TREE_MAP_SHIFT); |
| 146 | |
| 147 | for (c = 0; size[c]; c++) |
| 148 | for (s = 0; step[s]; s++) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 149 | benchmark_size(size[c], step[s]); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 150 | } |