blob: 99058eb81042ef9ff05a08a4bce857c63b72b0af [file] [log] [blame]
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __LINUX_BITMAP_H
3#define __LINUX_BITMAP_H
4
5#ifndef __ASSEMBLY__
6
7#include <linux/types.h>
8#include <linux/bitops.h>
9#include <linux/string.h>
10#include <linux/kernel.h>
11
12/*
13 * bitmaps provide bit arrays that consume one or more unsigned
14 * longs. The bitmap interface and available operations are listed
15 * here, in bitmap.h
16 *
17 * Function implementations generic to all architectures are in
18 * lib/bitmap.c. Functions implementations that are architecture
19 * specific are in various include/asm-<arch>/bitops.h headers
20 * and other arch/<arch> specific files.
21 *
22 * See lib/bitmap.c for more details.
23 */
24
25/**
26 * DOC: bitmap overview
27 *
28 * The available bitmap operations and their rough meaning in the
29 * case that the bitmap is a single unsigned long are thus:
30 *
David Brazdil0f672f62019-12-10 10:32:29 +000031 * The generated code is more efficient when nbits is known at
32 * compile-time and at most BITS_PER_LONG.
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000033 *
34 * ::
35 *
36 * bitmap_zero(dst, nbits) *dst = 0UL
37 * bitmap_fill(dst, nbits) *dst = ~0UL
38 * bitmap_copy(dst, src, nbits) *dst = *src
39 * bitmap_and(dst, src1, src2, nbits) *dst = *src1 & *src2
40 * bitmap_or(dst, src1, src2, nbits) *dst = *src1 | *src2
41 * bitmap_xor(dst, src1, src2, nbits) *dst = *src1 ^ *src2
42 * bitmap_andnot(dst, src1, src2, nbits) *dst = *src1 & ~(*src2)
43 * bitmap_complement(dst, src, nbits) *dst = ~(*src)
44 * bitmap_equal(src1, src2, nbits) Are *src1 and *src2 equal?
45 * bitmap_intersects(src1, src2, nbits) Do *src1 and *src2 overlap?
46 * bitmap_subset(src1, src2, nbits) Is *src1 a subset of *src2?
47 * bitmap_empty(src, nbits) Are all bits zero in *src?
48 * bitmap_full(src, nbits) Are all bits set in *src?
49 * bitmap_weight(src, nbits) Hamming Weight: number set bits
50 * bitmap_set(dst, pos, nbits) Set specified bit area
51 * bitmap_clear(dst, pos, nbits) Clear specified bit area
52 * bitmap_find_next_zero_area(buf, len, pos, n, mask) Find bit free area
Olivier Deprez157378f2022-04-04 15:47:50 +020053 * bitmap_find_next_zero_area_off(buf, len, pos, n, mask, mask_off) as above
54 * bitmap_next_clear_region(map, &start, &end, nbits) Find next clear region
55 * bitmap_next_set_region(map, &start, &end, nbits) Find next set region
56 * bitmap_for_each_clear_region(map, rs, re, start, end)
57 * Iterate over all clear regions
58 * bitmap_for_each_set_region(map, rs, re, start, end)
59 * Iterate over all set regions
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000060 * bitmap_shift_right(dst, src, n, nbits) *dst = *src >> n
61 * bitmap_shift_left(dst, src, n, nbits) *dst = *src << n
Olivier Deprez157378f2022-04-04 15:47:50 +020062 * bitmap_cut(dst, src, first, n, nbits) Cut n bits from first, copy rest
63 * bitmap_replace(dst, old, new, mask, nbits) *dst = (*old & ~(*mask)) | (*new & *mask)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000064 * bitmap_remap(dst, src, old, new, nbits) *dst = map(old, new)(src)
65 * bitmap_bitremap(oldbit, old, new, nbits) newbit = map(old, new)(oldbit)
66 * bitmap_onto(dst, orig, relmap, nbits) *dst = orig relative to relmap
67 * bitmap_fold(dst, orig, sz, nbits) dst bits = orig bits mod sz
68 * bitmap_parse(buf, buflen, dst, nbits) Parse bitmap dst from kernel buf
69 * bitmap_parse_user(ubuf, ulen, dst, nbits) Parse bitmap dst from user buf
70 * bitmap_parselist(buf, dst, nbits) Parse bitmap dst from kernel buf
71 * bitmap_parselist_user(buf, dst, nbits) Parse bitmap dst from user buf
72 * bitmap_find_free_region(bitmap, bits, order) Find and allocate bit region
73 * bitmap_release_region(bitmap, pos, order) Free specified bit region
74 * bitmap_allocate_region(bitmap, pos, order) Allocate specified bit region
75 * bitmap_from_arr32(dst, buf, nbits) Copy nbits from u32[] buf to dst
76 * bitmap_to_arr32(buf, src, nbits) Copy nbits from buf to u32[] dst
Olivier Deprez157378f2022-04-04 15:47:50 +020077 * bitmap_get_value8(map, start) Get 8bit value from map at start
78 * bitmap_set_value8(map, value, start) Set 8bit value to map at start
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000079 *
80 * Note, bitmap_zero() and bitmap_fill() operate over the region of
81 * unsigned longs, that is, bits behind bitmap till the unsigned long
82 * boundary will be zeroed or filled as well. Consider to use
83 * bitmap_clear() or bitmap_set() to make explicit zeroing or filling
84 * respectively.
85 */
86
87/**
88 * DOC: bitmap bitops
89 *
90 * Also the following operations in asm/bitops.h apply to bitmaps.::
91 *
92 * set_bit(bit, addr) *addr |= bit
93 * clear_bit(bit, addr) *addr &= ~bit
94 * change_bit(bit, addr) *addr ^= bit
95 * test_bit(bit, addr) Is bit set in *addr?
96 * test_and_set_bit(bit, addr) Set bit and return old value
97 * test_and_clear_bit(bit, addr) Clear bit and return old value
98 * test_and_change_bit(bit, addr) Change bit and return old value
99 * find_first_zero_bit(addr, nbits) Position first zero bit in *addr
100 * find_first_bit(addr, nbits) Position first set bit in *addr
101 * find_next_zero_bit(addr, nbits, bit)
102 * Position next zero bit in *addr >= bit
103 * find_next_bit(addr, nbits, bit) Position next set bit in *addr >= bit
104 * find_next_and_bit(addr1, addr2, nbits, bit)
105 * Same as find_next_bit, but in
106 * (*addr1 & *addr2)
107 *
108 */
109
110/**
111 * DOC: declare bitmap
112 * The DECLARE_BITMAP(name,bits) macro, in linux/types.h, can be used
113 * to declare an array named 'name' of just enough unsigned longs to
114 * contain all bit positions from 0 to 'bits' - 1.
115 */
116
117/*
118 * Allocation and deallocation of bitmap.
119 * Provided in lib/bitmap.c to avoid circular dependency.
120 */
121extern unsigned long *bitmap_alloc(unsigned int nbits, gfp_t flags);
122extern unsigned long *bitmap_zalloc(unsigned int nbits, gfp_t flags);
123extern void bitmap_free(const unsigned long *bitmap);
124
125/*
126 * lib/bitmap.c provides these functions:
127 */
128
129extern int __bitmap_empty(const unsigned long *bitmap, unsigned int nbits);
130extern int __bitmap_full(const unsigned long *bitmap, unsigned int nbits);
131extern int __bitmap_equal(const unsigned long *bitmap1,
132 const unsigned long *bitmap2, unsigned int nbits);
David Brazdil0f672f62019-12-10 10:32:29 +0000133extern bool __pure __bitmap_or_equal(const unsigned long *src1,
134 const unsigned long *src2,
135 const unsigned long *src3,
136 unsigned int nbits);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000137extern void __bitmap_complement(unsigned long *dst, const unsigned long *src,
138 unsigned int nbits);
139extern void __bitmap_shift_right(unsigned long *dst, const unsigned long *src,
140 unsigned int shift, unsigned int nbits);
141extern void __bitmap_shift_left(unsigned long *dst, const unsigned long *src,
142 unsigned int shift, unsigned int nbits);
Olivier Deprez157378f2022-04-04 15:47:50 +0200143extern void bitmap_cut(unsigned long *dst, const unsigned long *src,
144 unsigned int first, unsigned int cut,
145 unsigned int nbits);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000146extern int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
147 const unsigned long *bitmap2, unsigned int nbits);
148extern void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
149 const unsigned long *bitmap2, unsigned int nbits);
150extern void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
151 const unsigned long *bitmap2, unsigned int nbits);
152extern int __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
153 const unsigned long *bitmap2, unsigned int nbits);
Olivier Deprez157378f2022-04-04 15:47:50 +0200154extern void __bitmap_replace(unsigned long *dst,
155 const unsigned long *old, const unsigned long *new,
156 const unsigned long *mask, unsigned int nbits);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000157extern int __bitmap_intersects(const unsigned long *bitmap1,
158 const unsigned long *bitmap2, unsigned int nbits);
159extern int __bitmap_subset(const unsigned long *bitmap1,
160 const unsigned long *bitmap2, unsigned int nbits);
161extern int __bitmap_weight(const unsigned long *bitmap, unsigned int nbits);
162extern void __bitmap_set(unsigned long *map, unsigned int start, int len);
163extern void __bitmap_clear(unsigned long *map, unsigned int start, int len);
164
165extern unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
166 unsigned long size,
167 unsigned long start,
168 unsigned int nr,
169 unsigned long align_mask,
170 unsigned long align_offset);
171
172/**
173 * bitmap_find_next_zero_area - find a contiguous aligned zero area
174 * @map: The address to base the search on
175 * @size: The bitmap size in bits
176 * @start: The bitnumber to start searching at
177 * @nr: The number of zeroed bits we're looking for
178 * @align_mask: Alignment mask for zero area
179 *
180 * The @align_mask should be one less than a power of 2; the effect is that
181 * the bit offset of all zero areas this function finds is multiples of that
182 * power of 2. A @align_mask of 0 means no alignment is required.
183 */
184static inline unsigned long
185bitmap_find_next_zero_area(unsigned long *map,
186 unsigned long size,
187 unsigned long start,
188 unsigned int nr,
189 unsigned long align_mask)
190{
191 return bitmap_find_next_zero_area_off(map, size, start, nr,
192 align_mask, 0);
193}
194
Olivier Deprez157378f2022-04-04 15:47:50 +0200195extern int bitmap_parse(const char *buf, unsigned int buflen,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000196 unsigned long *dst, int nbits);
197extern int bitmap_parse_user(const char __user *ubuf, unsigned int ulen,
198 unsigned long *dst, int nbits);
199extern int bitmap_parselist(const char *buf, unsigned long *maskp,
200 int nmaskbits);
201extern int bitmap_parselist_user(const char __user *ubuf, unsigned int ulen,
202 unsigned long *dst, int nbits);
203extern void bitmap_remap(unsigned long *dst, const unsigned long *src,
204 const unsigned long *old, const unsigned long *new, unsigned int nbits);
205extern int bitmap_bitremap(int oldbit,
206 const unsigned long *old, const unsigned long *new, int bits);
207extern void bitmap_onto(unsigned long *dst, const unsigned long *orig,
208 const unsigned long *relmap, unsigned int bits);
209extern void bitmap_fold(unsigned long *dst, const unsigned long *orig,
210 unsigned int sz, unsigned int nbits);
211extern int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order);
212extern void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order);
213extern int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order);
214
215#ifdef __BIG_ENDIAN
216extern void bitmap_copy_le(unsigned long *dst, const unsigned long *src, unsigned int nbits);
217#else
218#define bitmap_copy_le bitmap_copy
219#endif
220extern unsigned int bitmap_ord_to_pos(const unsigned long *bitmap, unsigned int ord, unsigned int nbits);
221extern int bitmap_print_to_pagebuf(bool list, char *buf,
222 const unsigned long *maskp, int nmaskbits);
223
224#define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1)))
225#define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1)))
226
David Brazdil0f672f62019-12-10 10:32:29 +0000227/*
228 * The static inlines below do not handle constant nbits==0 correctly,
229 * so make such users (should any ever turn up) call the out-of-line
230 * versions.
231 */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000232#define small_const_nbits(nbits) \
David Brazdil0f672f62019-12-10 10:32:29 +0000233 (__builtin_constant_p(nbits) && (nbits) <= BITS_PER_LONG && (nbits) > 0)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000234
235static inline void bitmap_zero(unsigned long *dst, unsigned int nbits)
236{
David Brazdil0f672f62019-12-10 10:32:29 +0000237 unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
238 memset(dst, 0, len);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000239}
240
241static inline void bitmap_fill(unsigned long *dst, unsigned int nbits)
242{
David Brazdil0f672f62019-12-10 10:32:29 +0000243 unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
244 memset(dst, 0xff, len);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000245}
246
247static inline void bitmap_copy(unsigned long *dst, const unsigned long *src,
248 unsigned int nbits)
249{
David Brazdil0f672f62019-12-10 10:32:29 +0000250 unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
251 memcpy(dst, src, len);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000252}
253
254/*
255 * Copy bitmap and clear tail bits in last word.
256 */
257static inline void bitmap_copy_clear_tail(unsigned long *dst,
258 const unsigned long *src, unsigned int nbits)
259{
260 bitmap_copy(dst, src, nbits);
261 if (nbits % BITS_PER_LONG)
262 dst[nbits / BITS_PER_LONG] &= BITMAP_LAST_WORD_MASK(nbits);
263}
264
265/*
266 * On 32-bit systems bitmaps are represented as u32 arrays internally, and
267 * therefore conversion is not needed when copying data from/to arrays of u32.
268 */
269#if BITS_PER_LONG == 64
270extern void bitmap_from_arr32(unsigned long *bitmap, const u32 *buf,
271 unsigned int nbits);
272extern void bitmap_to_arr32(u32 *buf, const unsigned long *bitmap,
273 unsigned int nbits);
274#else
275#define bitmap_from_arr32(bitmap, buf, nbits) \
276 bitmap_copy_clear_tail((unsigned long *) (bitmap), \
277 (const unsigned long *) (buf), (nbits))
278#define bitmap_to_arr32(buf, bitmap, nbits) \
279 bitmap_copy_clear_tail((unsigned long *) (buf), \
280 (const unsigned long *) (bitmap), (nbits))
281#endif
282
283static inline int bitmap_and(unsigned long *dst, const unsigned long *src1,
284 const unsigned long *src2, unsigned int nbits)
285{
286 if (small_const_nbits(nbits))
287 return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0;
288 return __bitmap_and(dst, src1, src2, nbits);
289}
290
291static inline void bitmap_or(unsigned long *dst, const unsigned long *src1,
292 const unsigned long *src2, unsigned int nbits)
293{
294 if (small_const_nbits(nbits))
295 *dst = *src1 | *src2;
296 else
297 __bitmap_or(dst, src1, src2, nbits);
298}
299
300static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1,
301 const unsigned long *src2, unsigned int nbits)
302{
303 if (small_const_nbits(nbits))
304 *dst = *src1 ^ *src2;
305 else
306 __bitmap_xor(dst, src1, src2, nbits);
307}
308
309static inline int bitmap_andnot(unsigned long *dst, const unsigned long *src1,
310 const unsigned long *src2, unsigned int nbits)
311{
312 if (small_const_nbits(nbits))
313 return (*dst = *src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
314 return __bitmap_andnot(dst, src1, src2, nbits);
315}
316
317static inline void bitmap_complement(unsigned long *dst, const unsigned long *src,
318 unsigned int nbits)
319{
320 if (small_const_nbits(nbits))
321 *dst = ~(*src);
322 else
323 __bitmap_complement(dst, src, nbits);
324}
325
326#ifdef __LITTLE_ENDIAN
327#define BITMAP_MEM_ALIGNMENT 8
328#else
329#define BITMAP_MEM_ALIGNMENT (8 * sizeof(unsigned long))
330#endif
331#define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1)
332
333static inline int bitmap_equal(const unsigned long *src1,
334 const unsigned long *src2, unsigned int nbits)
335{
336 if (small_const_nbits(nbits))
337 return !((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits));
338 if (__builtin_constant_p(nbits & BITMAP_MEM_MASK) &&
339 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT))
340 return !memcmp(src1, src2, nbits / 8);
341 return __bitmap_equal(src1, src2, nbits);
342}
343
David Brazdil0f672f62019-12-10 10:32:29 +0000344/**
345 * bitmap_or_equal - Check whether the or of two bitmaps is equal to a third
346 * @src1: Pointer to bitmap 1
347 * @src2: Pointer to bitmap 2 will be or'ed with bitmap 1
348 * @src3: Pointer to bitmap 3. Compare to the result of *@src1 | *@src2
349 * @nbits: number of bits in each of these bitmaps
350 *
351 * Returns: True if (*@src1 | *@src2) == *@src3, false otherwise
352 */
353static inline bool bitmap_or_equal(const unsigned long *src1,
354 const unsigned long *src2,
355 const unsigned long *src3,
356 unsigned int nbits)
357{
358 if (!small_const_nbits(nbits))
359 return __bitmap_or_equal(src1, src2, src3, nbits);
360
361 return !(((*src1 | *src2) ^ *src3) & BITMAP_LAST_WORD_MASK(nbits));
362}
363
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000364static inline int bitmap_intersects(const unsigned long *src1,
365 const unsigned long *src2, unsigned int nbits)
366{
367 if (small_const_nbits(nbits))
368 return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
369 else
370 return __bitmap_intersects(src1, src2, nbits);
371}
372
373static inline int bitmap_subset(const unsigned long *src1,
374 const unsigned long *src2, unsigned int nbits)
375{
376 if (small_const_nbits(nbits))
377 return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits));
378 else
379 return __bitmap_subset(src1, src2, nbits);
380}
381
382static inline int bitmap_empty(const unsigned long *src, unsigned nbits)
383{
384 if (small_const_nbits(nbits))
385 return ! (*src & BITMAP_LAST_WORD_MASK(nbits));
386
387 return find_first_bit(src, nbits) == nbits;
388}
389
390static inline int bitmap_full(const unsigned long *src, unsigned int nbits)
391{
392 if (small_const_nbits(nbits))
393 return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits));
394
395 return find_first_zero_bit(src, nbits) == nbits;
396}
397
398static __always_inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
399{
400 if (small_const_nbits(nbits))
401 return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits));
402 return __bitmap_weight(src, nbits);
403}
404
405static __always_inline void bitmap_set(unsigned long *map, unsigned int start,
406 unsigned int nbits)
407{
408 if (__builtin_constant_p(nbits) && nbits == 1)
409 __set_bit(start, map);
410 else if (__builtin_constant_p(start & BITMAP_MEM_MASK) &&
411 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) &&
412 __builtin_constant_p(nbits & BITMAP_MEM_MASK) &&
413 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT))
414 memset((char *)map + start / 8, 0xff, nbits / 8);
415 else
416 __bitmap_set(map, start, nbits);
417}
418
419static __always_inline void bitmap_clear(unsigned long *map, unsigned int start,
420 unsigned int nbits)
421{
422 if (__builtin_constant_p(nbits) && nbits == 1)
423 __clear_bit(start, map);
424 else if (__builtin_constant_p(start & BITMAP_MEM_MASK) &&
425 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) &&
426 __builtin_constant_p(nbits & BITMAP_MEM_MASK) &&
427 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT))
428 memset((char *)map + start / 8, 0, nbits / 8);
429 else
430 __bitmap_clear(map, start, nbits);
431}
432
433static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *src,
David Brazdil0f672f62019-12-10 10:32:29 +0000434 unsigned int shift, unsigned int nbits)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000435{
436 if (small_const_nbits(nbits))
437 *dst = (*src & BITMAP_LAST_WORD_MASK(nbits)) >> shift;
438 else
439 __bitmap_shift_right(dst, src, shift, nbits);
440}
441
442static inline void bitmap_shift_left(unsigned long *dst, const unsigned long *src,
443 unsigned int shift, unsigned int nbits)
444{
445 if (small_const_nbits(nbits))
446 *dst = (*src << shift) & BITMAP_LAST_WORD_MASK(nbits);
447 else
448 __bitmap_shift_left(dst, src, shift, nbits);
449}
450
Olivier Deprez157378f2022-04-04 15:47:50 +0200451static inline void bitmap_replace(unsigned long *dst,
452 const unsigned long *old,
453 const unsigned long *new,
454 const unsigned long *mask,
455 unsigned int nbits)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000456{
Olivier Deprez157378f2022-04-04 15:47:50 +0200457 if (small_const_nbits(nbits))
458 *dst = (*old & ~(*mask)) | (*new & *mask);
459 else
460 __bitmap_replace(dst, old, new, mask, nbits);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000461}
462
Olivier Deprez157378f2022-04-04 15:47:50 +0200463static inline void bitmap_next_clear_region(unsigned long *bitmap,
464 unsigned int *rs, unsigned int *re,
465 unsigned int end)
466{
467 *rs = find_next_zero_bit(bitmap, end, *rs);
468 *re = find_next_bit(bitmap, end, *rs + 1);
469}
470
471static inline void bitmap_next_set_region(unsigned long *bitmap,
472 unsigned int *rs, unsigned int *re,
473 unsigned int end)
474{
475 *rs = find_next_bit(bitmap, end, *rs);
476 *re = find_next_zero_bit(bitmap, end, *rs + 1);
477}
478
479/*
480 * Bitmap region iterators. Iterates over the bitmap between [@start, @end).
481 * @rs and @re should be integer variables and will be set to start and end
482 * index of the current clear or set region.
483 */
484#define bitmap_for_each_clear_region(bitmap, rs, re, start, end) \
485 for ((rs) = (start), \
486 bitmap_next_clear_region((bitmap), &(rs), &(re), (end)); \
487 (rs) < (re); \
488 (rs) = (re) + 1, \
489 bitmap_next_clear_region((bitmap), &(rs), &(re), (end)))
490
491#define bitmap_for_each_set_region(bitmap, rs, re, start, end) \
492 for ((rs) = (start), \
493 bitmap_next_set_region((bitmap), &(rs), &(re), (end)); \
494 (rs) < (re); \
495 (rs) = (re) + 1, \
496 bitmap_next_set_region((bitmap), &(rs), &(re), (end)))
497
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000498/**
499 * BITMAP_FROM_U64() - Represent u64 value in the format suitable for bitmap.
500 * @n: u64 value
501 *
502 * Linux bitmaps are internally arrays of unsigned longs, i.e. 32-bit
503 * integers in 32-bit environment, and 64-bit integers in 64-bit one.
504 *
505 * There are four combinations of endianness and length of the word in linux
506 * ABIs: LE64, BE64, LE32 and BE32.
507 *
508 * On 64-bit kernels 64-bit LE and BE numbers are naturally ordered in
509 * bitmaps and therefore don't require any special handling.
510 *
511 * On 32-bit kernels 32-bit LE ABI orders lo word of 64-bit number in memory
512 * prior to hi, and 32-bit BE orders hi word prior to lo. The bitmap on the
513 * other hand is represented as an array of 32-bit words and the position of
514 * bit N may therefore be calculated as: word #(N/32) and bit #(N%32) in that
515 * word. For example, bit #42 is located at 10th position of 2nd word.
516 * It matches 32-bit LE ABI, and we can simply let the compiler store 64-bit
517 * values in memory as it usually does. But for BE we need to swap hi and lo
518 * words manually.
519 *
520 * With all that, the macro BITMAP_FROM_U64() does explicit reordering of hi and
521 * lo parts of u64. For LE32 it does nothing, and for BE environment it swaps
522 * hi and lo words, as is expected by bitmap.
523 */
524#if __BITS_PER_LONG == 64
525#define BITMAP_FROM_U64(n) (n)
526#else
527#define BITMAP_FROM_U64(n) ((unsigned long) ((u64)(n) & ULONG_MAX)), \
528 ((unsigned long) ((u64)(n) >> 32))
529#endif
530
531/**
532 * bitmap_from_u64 - Check and swap words within u64.
533 * @mask: source bitmap
534 * @dst: destination bitmap
535 *
536 * In 32-bit Big Endian kernel, when using ``(u32 *)(&val)[*]``
537 * to read u64 mask, we will get the wrong word.
538 * That is ``(u32 *)(&val)[0]`` gets the upper 32 bits,
539 * but we expect the lower 32-bits of u64.
540 */
541static inline void bitmap_from_u64(unsigned long *dst, u64 mask)
542{
543 dst[0] = mask & ULONG_MAX;
544
545 if (sizeof(mask) > sizeof(unsigned long))
546 dst[1] = mask >> 32;
547}
548
Olivier Deprez157378f2022-04-04 15:47:50 +0200549/**
550 * bitmap_get_value8 - get an 8-bit value within a memory region
551 * @map: address to the bitmap memory region
552 * @start: bit offset of the 8-bit value; must be a multiple of 8
553 *
554 * Returns the 8-bit value located at the @start bit offset within the @src
555 * memory region.
556 */
557static inline unsigned long bitmap_get_value8(const unsigned long *map,
558 unsigned long start)
559{
560 const size_t index = BIT_WORD(start);
561 const unsigned long offset = start % BITS_PER_LONG;
562
563 return (map[index] >> offset) & 0xFF;
564}
565
566/**
567 * bitmap_set_value8 - set an 8-bit value within a memory region
568 * @map: address to the bitmap memory region
569 * @value: the 8-bit value; values wider than 8 bits may clobber bitmap
570 * @start: bit offset of the 8-bit value; must be a multiple of 8
571 */
572static inline void bitmap_set_value8(unsigned long *map, unsigned long value,
573 unsigned long start)
574{
575 const size_t index = BIT_WORD(start);
576 const unsigned long offset = start % BITS_PER_LONG;
577
578 map[index] &= ~(0xFFUL << offset);
579 map[index] |= value << offset;
580}
581
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000582#endif /* __ASSEMBLY__ */
583
584#endif /* __LINUX_BITMAP_H */