1  /* SPDX-License-Identifier: GPL-2.0 */
2  #ifndef __LINUX_BITMAP_H
3  #define __LINUX_BITMAP_H
4  
5  #ifndef __ASSEMBLY__
6  
7  #include <linux/align.h>
8  #include <linux/bitops.h>
9  #include <linux/cleanup.h>
10  #include <linux/errno.h>
11  #include <linux/find.h>
12  #include <linux/limits.h>
13  #include <linux/string.h>
14  #include <linux/types.h>
15  #include <linux/bitmap-str.h>
16  
17  struct device;
18  
19  /*
20   * bitmaps provide bit arrays that consume one or more unsigned
21   * longs.  The bitmap interface and available operations are listed
22   * here, in bitmap.h
23   *
24   * Function implementations generic to all architectures are in
25   * lib/bitmap.c.  Functions implementations that are architecture
26   * specific are in various include/asm-<arch>/bitops.h headers
27   * and other arch/<arch> specific files.
28   *
29   * See lib/bitmap.c for more details.
30   */
31  
32  /**
33   * DOC: bitmap overview
34   *
35   * The available bitmap operations and their rough meaning in the
36   * case that the bitmap is a single unsigned long are thus:
37   *
38   * The generated code is more efficient when nbits is known at
39   * compile-time and at most BITS_PER_LONG.
40   *
41   * ::
42   *
43   *  bitmap_zero(dst, nbits)                     *dst = 0UL
44   *  bitmap_fill(dst, nbits)                     *dst = ~0UL
45   *  bitmap_copy(dst, src, nbits)                *dst = *src
46   *  bitmap_and(dst, src1, src2, nbits)          *dst = *src1 & *src2
47   *  bitmap_or(dst, src1, src2, nbits)           *dst = *src1 | *src2
48   *  bitmap_xor(dst, src1, src2, nbits)          *dst = *src1 ^ *src2
49   *  bitmap_andnot(dst, src1, src2, nbits)       *dst = *src1 & ~(*src2)
50   *  bitmap_complement(dst, src, nbits)          *dst = ~(*src)
51   *  bitmap_equal(src1, src2, nbits)             Are *src1 and *src2 equal?
52   *  bitmap_intersects(src1, src2, nbits)        Do *src1 and *src2 overlap?
53   *  bitmap_subset(src1, src2, nbits)            Is *src1 a subset of *src2?
54   *  bitmap_empty(src, nbits)                    Are all bits zero in *src?
55   *  bitmap_full(src, nbits)                     Are all bits set in *src?
56   *  bitmap_weight(src, nbits)                   Hamming Weight: number set bits
57   *  bitmap_weight_and(src1, src2, nbits)        Hamming Weight of and'ed bitmap
58   *  bitmap_weight_andnot(src1, src2, nbits)     Hamming Weight of andnot'ed bitmap
59   *  bitmap_set(dst, pos, nbits)                 Set specified bit area
60   *  bitmap_clear(dst, pos, nbits)               Clear specified bit area
61   *  bitmap_find_next_zero_area(buf, len, pos, n, mask)  Find bit free area
62   *  bitmap_find_next_zero_area_off(buf, len, pos, n, mask, mask_off)  as above
63   *  bitmap_shift_right(dst, src, n, nbits)      *dst = *src >> n
64   *  bitmap_shift_left(dst, src, n, nbits)       *dst = *src << n
65   *  bitmap_cut(dst, src, first, n, nbits)       Cut n bits from first, copy rest
66   *  bitmap_replace(dst, old, new, mask, nbits)  *dst = (*old & ~(*mask)) | (*new & *mask)
67   *  bitmap_scatter(dst, src, mask, nbits)	*dst = map(dense, sparse)(src)
68   *  bitmap_gather(dst, src, mask, nbits)	*dst = map(sparse, dense)(src)
69   *  bitmap_remap(dst, src, old, new, nbits)     *dst = map(old, new)(src)
70   *  bitmap_bitremap(oldbit, old, new, nbits)    newbit = map(old, new)(oldbit)
71   *  bitmap_onto(dst, orig, relmap, nbits)       *dst = orig relative to relmap
72   *  bitmap_fold(dst, orig, sz, nbits)           dst bits = orig bits mod sz
73   *  bitmap_parse(buf, buflen, dst, nbits)       Parse bitmap dst from kernel buf
74   *  bitmap_parse_user(ubuf, ulen, dst, nbits)   Parse bitmap dst from user buf
75   *  bitmap_parselist(buf, dst, nbits)           Parse bitmap dst from kernel buf
76   *  bitmap_parselist_user(buf, dst, nbits)      Parse bitmap dst from user buf
77   *  bitmap_find_free_region(bitmap, bits, order)  Find and allocate bit region
78   *  bitmap_release_region(bitmap, pos, order)   Free specified bit region
79   *  bitmap_allocate_region(bitmap, pos, order)  Allocate specified bit region
80   *  bitmap_from_arr32(dst, buf, nbits)          Copy nbits from u32[] buf to dst
81   *  bitmap_from_arr64(dst, buf, nbits)          Copy nbits from u64[] buf to dst
82   *  bitmap_to_arr32(buf, src, nbits)            Copy nbits from buf to u32[] dst
83   *  bitmap_to_arr64(buf, src, nbits)            Copy nbits from buf to u64[] dst
84   *  bitmap_get_value8(map, start)               Get 8bit value from map at start
85   *  bitmap_set_value8(map, value, start)        Set 8bit value to map at start
86   *  bitmap_read(map, start, nbits)              Read an nbits-sized value from
87   *                                              map at start
88   *  bitmap_write(map, value, start, nbits)      Write an nbits-sized value to
89   *                                              map at start
90   *
91   * Note, bitmap_zero() and bitmap_fill() operate over the region of
92   * unsigned longs, that is, bits behind bitmap till the unsigned long
93   * boundary will be zeroed or filled as well. Consider to use
94   * bitmap_clear() or bitmap_set() to make explicit zeroing or filling
95   * respectively.
96   */
97  
98  /**
99   * DOC: bitmap bitops
100   *
101   * Also the following operations in asm/bitops.h apply to bitmaps.::
102   *
103   *  set_bit(bit, addr)                  *addr |= bit
104   *  clear_bit(bit, addr)                *addr &= ~bit
105   *  change_bit(bit, addr)               *addr ^= bit
106   *  test_bit(bit, addr)                 Is bit set in *addr?
107   *  test_and_set_bit(bit, addr)         Set bit and return old value
108   *  test_and_clear_bit(bit, addr)       Clear bit and return old value
109   *  test_and_change_bit(bit, addr)      Change bit and return old value
110   *  find_first_zero_bit(addr, nbits)    Position first zero bit in *addr
111   *  find_first_bit(addr, nbits)         Position first set bit in *addr
112   *  find_next_zero_bit(addr, nbits, bit)
113   *                                      Position next zero bit in *addr >= bit
114   *  find_next_bit(addr, nbits, bit)     Position next set bit in *addr >= bit
115   *  find_next_and_bit(addr1, addr2, nbits, bit)
116   *                                      Same as find_next_bit, but in
117   *                                      (*addr1 & *addr2)
118   *
119   */
120  
121  /**
122   * DOC: declare bitmap
123   * The DECLARE_BITMAP(name,bits) macro, in linux/types.h, can be used
124   * to declare an array named 'name' of just enough unsigned longs to
125   * contain all bit positions from 0 to 'bits' - 1.
126   */
127  
128  /*
129   * Allocation and deallocation of bitmap.
130   * Provided in lib/bitmap.c to avoid circular dependency.
131   */
132  unsigned long *bitmap_alloc(unsigned int nbits, gfp_t flags);
133  unsigned long *bitmap_zalloc(unsigned int nbits, gfp_t flags);
134  unsigned long *bitmap_alloc_node(unsigned int nbits, gfp_t flags, int node);
135  unsigned long *bitmap_zalloc_node(unsigned int nbits, gfp_t flags, int node);
136  void bitmap_free(const unsigned long *bitmap);
137  
138  DEFINE_FREE(bitmap, unsigned long *, if (_T) bitmap_free(_T))
139  
140  /* Managed variants of the above. */
141  unsigned long *devm_bitmap_alloc(struct device *dev,
142  				 unsigned int nbits, gfp_t flags);
143  unsigned long *devm_bitmap_zalloc(struct device *dev,
144  				  unsigned int nbits, gfp_t flags);
145  
146  /*
147   * lib/bitmap.c provides these functions:
148   */
149  
150  bool __bitmap_equal(const unsigned long *bitmap1,
151  		    const unsigned long *bitmap2, unsigned int nbits);
152  bool __pure __bitmap_or_equal(const unsigned long *src1,
153  			      const unsigned long *src2,
154  			      const unsigned long *src3,
155  			      unsigned int nbits);
156  void __bitmap_complement(unsigned long *dst, const unsigned long *src,
157  			 unsigned int nbits);
158  void __bitmap_shift_right(unsigned long *dst, const unsigned long *src,
159  			  unsigned int shift, unsigned int nbits);
160  void __bitmap_shift_left(unsigned long *dst, const unsigned long *src,
161  			 unsigned int shift, unsigned int nbits);
162  void bitmap_cut(unsigned long *dst, const unsigned long *src,
163  		unsigned int first, unsigned int cut, unsigned int nbits);
164  bool __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
165  		 const unsigned long *bitmap2, unsigned int nbits);
166  void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
167  		 const unsigned long *bitmap2, unsigned int nbits);
168  void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
169  		  const unsigned long *bitmap2, unsigned int nbits);
170  bool __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
171  		    const unsigned long *bitmap2, unsigned int nbits);
172  void __bitmap_replace(unsigned long *dst,
173  		      const unsigned long *old, const unsigned long *new,
174  		      const unsigned long *mask, unsigned int nbits);
175  bool __bitmap_intersects(const unsigned long *bitmap1,
176  			 const unsigned long *bitmap2, unsigned int nbits);
177  bool __bitmap_subset(const unsigned long *bitmap1,
178  		     const unsigned long *bitmap2, unsigned int nbits);
179  unsigned int __bitmap_weight(const unsigned long *bitmap, unsigned int nbits);
180  unsigned int __bitmap_weight_and(const unsigned long *bitmap1,
181  				 const unsigned long *bitmap2, unsigned int nbits);
182  unsigned int __bitmap_weight_andnot(const unsigned long *bitmap1,
183  				    const unsigned long *bitmap2, unsigned int nbits);
184  void __bitmap_set(unsigned long *map, unsigned int start, int len);
185  void __bitmap_clear(unsigned long *map, unsigned int start, int len);
186  
187  unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
188  					     unsigned long size,
189  					     unsigned long start,
190  					     unsigned int nr,
191  					     unsigned long align_mask,
192  					     unsigned long align_offset);
193  
194  /**
195   * bitmap_find_next_zero_area - find a contiguous aligned zero area
196   * @map: The address to base the search on
197   * @size: The bitmap size in bits
198   * @start: The bitnumber to start searching at
199   * @nr: The number of zeroed bits we're looking for
200   * @align_mask: Alignment mask for zero area
201   *
202   * The @align_mask should be one less than a power of 2; the effect is that
203   * the bit offset of all zero areas this function finds is multiples of that
204   * power of 2. A @align_mask of 0 means no alignment is required.
205   */
206  static __always_inline
bitmap_find_next_zero_area(unsigned long * map,unsigned long size,unsigned long start,unsigned int nr,unsigned long align_mask)207  unsigned long bitmap_find_next_zero_area(unsigned long *map,
208  					 unsigned long size,
209  					 unsigned long start,
210  					 unsigned int nr,
211  					 unsigned long align_mask)
212  {
213  	return bitmap_find_next_zero_area_off(map, size, start, nr,
214  					      align_mask, 0);
215  }
216  
217  void bitmap_remap(unsigned long *dst, const unsigned long *src,
218  		const unsigned long *old, const unsigned long *new, unsigned int nbits);
219  int bitmap_bitremap(int oldbit,
220  		const unsigned long *old, const unsigned long *new, int bits);
221  void bitmap_onto(unsigned long *dst, const unsigned long *orig,
222  		const unsigned long *relmap, unsigned int bits);
223  void bitmap_fold(unsigned long *dst, const unsigned long *orig,
224  		unsigned int sz, unsigned int nbits);
225  
226  #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1)))
227  #define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1)))
228  
229  #define bitmap_size(nbits)	(ALIGN(nbits, BITS_PER_LONG) / BITS_PER_BYTE)
230  
bitmap_zero(unsigned long * dst,unsigned int nbits)231  static __always_inline void bitmap_zero(unsigned long *dst, unsigned int nbits)
232  {
233  	unsigned int len = bitmap_size(nbits);
234  
235  	if (small_const_nbits(nbits))
236  		*dst = 0;
237  	else
238  		memset(dst, 0, len);
239  }
240  
bitmap_fill(unsigned long * dst,unsigned int nbits)241  static __always_inline void bitmap_fill(unsigned long *dst, unsigned int nbits)
242  {
243  	unsigned int len = bitmap_size(nbits);
244  
245  	if (small_const_nbits(nbits))
246  		*dst = ~0UL;
247  	else
248  		memset(dst, 0xff, len);
249  }
250  
251  static __always_inline
bitmap_copy(unsigned long * dst,const unsigned long * src,unsigned int nbits)252  void bitmap_copy(unsigned long *dst, const unsigned long *src, unsigned int nbits)
253  {
254  	unsigned int len = bitmap_size(nbits);
255  
256  	if (small_const_nbits(nbits))
257  		*dst = *src;
258  	else
259  		memcpy(dst, src, len);
260  }
261  
262  /*
263   * Copy bitmap and clear tail bits in last word.
264   */
265  static __always_inline
bitmap_copy_clear_tail(unsigned long * dst,const unsigned long * src,unsigned int nbits)266  void bitmap_copy_clear_tail(unsigned long *dst, const unsigned long *src, unsigned int nbits)
267  {
268  	bitmap_copy(dst, src, nbits);
269  	if (nbits % BITS_PER_LONG)
270  		dst[nbits / BITS_PER_LONG] &= BITMAP_LAST_WORD_MASK(nbits);
271  }
272  
bitmap_copy_and_extend(unsigned long * to,const unsigned long * from,unsigned int count,unsigned int size)273  static inline void bitmap_copy_and_extend(unsigned long *to,
274  					  const unsigned long *from,
275  					  unsigned int count, unsigned int size)
276  {
277  	unsigned int copy = BITS_TO_LONGS(count);
278  
279  	memcpy(to, from, copy * sizeof(long));
280  	if (count % BITS_PER_LONG)
281  		to[copy - 1] &= BITMAP_LAST_WORD_MASK(count);
282  	memset(to + copy, 0, bitmap_size(size) - copy * sizeof(long));
283  }
284  
285  /*
286   * On 32-bit systems bitmaps are represented as u32 arrays internally. On LE64
287   * machines the order of hi and lo parts of numbers match the bitmap structure.
288   * In both cases conversion is not needed when copying data from/to arrays of
289   * u32. But in LE64 case, typecast in bitmap_copy_clear_tail() may lead
290   * to out-of-bound access. To avoid that, both LE and BE variants of 64-bit
291   * architectures are not using bitmap_copy_clear_tail().
292   */
293  #if BITS_PER_LONG == 64
294  void bitmap_from_arr32(unsigned long *bitmap, const u32 *buf,
295  							unsigned int nbits);
296  void bitmap_to_arr32(u32 *buf, const unsigned long *bitmap,
297  							unsigned int nbits);
298  #else
299  #define bitmap_from_arr32(bitmap, buf, nbits)			\
300  	bitmap_copy_clear_tail((unsigned long *) (bitmap),	\
301  			(const unsigned long *) (buf), (nbits))
302  #define bitmap_to_arr32(buf, bitmap, nbits)			\
303  	bitmap_copy_clear_tail((unsigned long *) (buf),		\
304  			(const unsigned long *) (bitmap), (nbits))
305  #endif
306  
307  /*
308   * On 64-bit systems bitmaps are represented as u64 arrays internally. So,
309   * the conversion is not needed when copying data from/to arrays of u64.
310   */
311  #if BITS_PER_LONG == 32
312  void bitmap_from_arr64(unsigned long *bitmap, const u64 *buf, unsigned int nbits);
313  void bitmap_to_arr64(u64 *buf, const unsigned long *bitmap, unsigned int nbits);
314  #else
315  #define bitmap_from_arr64(bitmap, buf, nbits)			\
316  	bitmap_copy_clear_tail((unsigned long *)(bitmap), (const unsigned long *)(buf), (nbits))
317  #define bitmap_to_arr64(buf, bitmap, nbits)			\
318  	bitmap_copy_clear_tail((unsigned long *)(buf), (const unsigned long *)(bitmap), (nbits))
319  #endif
320  
321  static __always_inline
bitmap_and(unsigned long * dst,const unsigned long * src1,const unsigned long * src2,unsigned int nbits)322  bool bitmap_and(unsigned long *dst, const unsigned long *src1,
323  		const unsigned long *src2, unsigned int nbits)
324  {
325  	if (small_const_nbits(nbits))
326  		return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0;
327  	return __bitmap_and(dst, src1, src2, nbits);
328  }
329  
330  static __always_inline
bitmap_or(unsigned long * dst,const unsigned long * src1,const unsigned long * src2,unsigned int nbits)331  void bitmap_or(unsigned long *dst, const unsigned long *src1,
332  	       const unsigned long *src2, unsigned int nbits)
333  {
334  	if (small_const_nbits(nbits))
335  		*dst = *src1 | *src2;
336  	else
337  		__bitmap_or(dst, src1, src2, nbits);
338  }
339  
340  static __always_inline
bitmap_xor(unsigned long * dst,const unsigned long * src1,const unsigned long * src2,unsigned int nbits)341  void bitmap_xor(unsigned long *dst, const unsigned long *src1,
342  		const unsigned long *src2, unsigned int nbits)
343  {
344  	if (small_const_nbits(nbits))
345  		*dst = *src1 ^ *src2;
346  	else
347  		__bitmap_xor(dst, src1, src2, nbits);
348  }
349  
350  static __always_inline
bitmap_andnot(unsigned long * dst,const unsigned long * src1,const unsigned long * src2,unsigned int nbits)351  bool bitmap_andnot(unsigned long *dst, const unsigned long *src1,
352  		   const unsigned long *src2, unsigned int nbits)
353  {
354  	if (small_const_nbits(nbits))
355  		return (*dst = *src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
356  	return __bitmap_andnot(dst, src1, src2, nbits);
357  }
358  
359  static __always_inline
bitmap_complement(unsigned long * dst,const unsigned long * src,unsigned int nbits)360  void bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int nbits)
361  {
362  	if (small_const_nbits(nbits))
363  		*dst = ~(*src);
364  	else
365  		__bitmap_complement(dst, src, nbits);
366  }
367  
368  #ifdef __LITTLE_ENDIAN
369  #define BITMAP_MEM_ALIGNMENT 8
370  #else
371  #define BITMAP_MEM_ALIGNMENT (8 * sizeof(unsigned long))
372  #endif
373  #define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1)
374  
375  static __always_inline
bitmap_equal(const unsigned long * src1,const unsigned long * src2,unsigned int nbits)376  bool bitmap_equal(const unsigned long *src1, const unsigned long *src2, unsigned int nbits)
377  {
378  	if (small_const_nbits(nbits))
379  		return !((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits));
380  	if (__builtin_constant_p(nbits & BITMAP_MEM_MASK) &&
381  	    IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT))
382  		return !memcmp(src1, src2, nbits / 8);
383  	return __bitmap_equal(src1, src2, nbits);
384  }
385  
386  /**
387   * bitmap_or_equal - Check whether the or of two bitmaps is equal to a third
388   * @src1:	Pointer to bitmap 1
389   * @src2:	Pointer to bitmap 2 will be or'ed with bitmap 1
390   * @src3:	Pointer to bitmap 3. Compare to the result of *@src1 | *@src2
391   * @nbits:	number of bits in each of these bitmaps
392   *
393   * Returns: True if (*@src1 | *@src2) == *@src3, false otherwise
394   */
395  static __always_inline
bitmap_or_equal(const unsigned long * src1,const unsigned long * src2,const unsigned long * src3,unsigned int nbits)396  bool bitmap_or_equal(const unsigned long *src1, const unsigned long *src2,
397  		     const unsigned long *src3, unsigned int nbits)
398  {
399  	if (!small_const_nbits(nbits))
400  		return __bitmap_or_equal(src1, src2, src3, nbits);
401  
402  	return !(((*src1 | *src2) ^ *src3) & BITMAP_LAST_WORD_MASK(nbits));
403  }
404  
405  static __always_inline
bitmap_intersects(const unsigned long * src1,const unsigned long * src2,unsigned int nbits)406  bool bitmap_intersects(const unsigned long *src1, const unsigned long *src2, unsigned int nbits)
407  {
408  	if (small_const_nbits(nbits))
409  		return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
410  	else
411  		return __bitmap_intersects(src1, src2, nbits);
412  }
413  
414  static __always_inline
bitmap_subset(const unsigned long * src1,const unsigned long * src2,unsigned int nbits)415  bool bitmap_subset(const unsigned long *src1, const unsigned long *src2, unsigned int nbits)
416  {
417  	if (small_const_nbits(nbits))
418  		return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits));
419  	else
420  		return __bitmap_subset(src1, src2, nbits);
421  }
422  
423  static __always_inline
bitmap_empty(const unsigned long * src,unsigned nbits)424  bool bitmap_empty(const unsigned long *src, unsigned nbits)
425  {
426  	if (small_const_nbits(nbits))
427  		return ! (*src & BITMAP_LAST_WORD_MASK(nbits));
428  
429  	return find_first_bit(src, nbits) == nbits;
430  }
431  
432  static __always_inline
bitmap_full(const unsigned long * src,unsigned int nbits)433  bool bitmap_full(const unsigned long *src, unsigned int nbits)
434  {
435  	if (small_const_nbits(nbits))
436  		return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits));
437  
438  	return find_first_zero_bit(src, nbits) == nbits;
439  }
440  
441  static __always_inline
bitmap_weight(const unsigned long * src,unsigned int nbits)442  unsigned int bitmap_weight(const unsigned long *src, unsigned int nbits)
443  {
444  	if (small_const_nbits(nbits))
445  		return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits));
446  	return __bitmap_weight(src, nbits);
447  }
448  
449  static __always_inline
bitmap_weight_and(const unsigned long * src1,const unsigned long * src2,unsigned int nbits)450  unsigned long bitmap_weight_and(const unsigned long *src1,
451  				const unsigned long *src2, unsigned int nbits)
452  {
453  	if (small_const_nbits(nbits))
454  		return hweight_long(*src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits));
455  	return __bitmap_weight_and(src1, src2, nbits);
456  }
457  
458  static __always_inline
bitmap_weight_andnot(const unsigned long * src1,const unsigned long * src2,unsigned int nbits)459  unsigned long bitmap_weight_andnot(const unsigned long *src1,
460  				   const unsigned long *src2, unsigned int nbits)
461  {
462  	if (small_const_nbits(nbits))
463  		return hweight_long(*src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits));
464  	return __bitmap_weight_andnot(src1, src2, nbits);
465  }
466  
467  static __always_inline
bitmap_set(unsigned long * map,unsigned int start,unsigned int nbits)468  void bitmap_set(unsigned long *map, unsigned int start, unsigned int nbits)
469  {
470  	if (__builtin_constant_p(nbits) && nbits == 1)
471  		__set_bit(start, map);
472  	else if (small_const_nbits(start + nbits))
473  		*map |= GENMASK(start + nbits - 1, start);
474  	else if (__builtin_constant_p(start & BITMAP_MEM_MASK) &&
475  		 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) &&
476  		 __builtin_constant_p(nbits & BITMAP_MEM_MASK) &&
477  		 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT))
478  		memset((char *)map + start / 8, 0xff, nbits / 8);
479  	else
480  		__bitmap_set(map, start, nbits);
481  }
482  
483  static __always_inline
bitmap_clear(unsigned long * map,unsigned int start,unsigned int nbits)484  void bitmap_clear(unsigned long *map, unsigned int start, unsigned int nbits)
485  {
486  	if (__builtin_constant_p(nbits) && nbits == 1)
487  		__clear_bit(start, map);
488  	else if (small_const_nbits(start + nbits))
489  		*map &= ~GENMASK(start + nbits - 1, start);
490  	else if (__builtin_constant_p(start & BITMAP_MEM_MASK) &&
491  		 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) &&
492  		 __builtin_constant_p(nbits & BITMAP_MEM_MASK) &&
493  		 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT))
494  		memset((char *)map + start / 8, 0, nbits / 8);
495  	else
496  		__bitmap_clear(map, start, nbits);
497  }
498  
499  static __always_inline
bitmap_shift_right(unsigned long * dst,const unsigned long * src,unsigned int shift,unsigned int nbits)500  void bitmap_shift_right(unsigned long *dst, const unsigned long *src,
501  			unsigned int shift, unsigned int nbits)
502  {
503  	if (small_const_nbits(nbits))
504  		*dst = (*src & BITMAP_LAST_WORD_MASK(nbits)) >> shift;
505  	else
506  		__bitmap_shift_right(dst, src, shift, nbits);
507  }
508  
509  static __always_inline
bitmap_shift_left(unsigned long * dst,const unsigned long * src,unsigned int shift,unsigned int nbits)510  void bitmap_shift_left(unsigned long *dst, const unsigned long *src,
511  		       unsigned int shift, unsigned int nbits)
512  {
513  	if (small_const_nbits(nbits))
514  		*dst = (*src << shift) & BITMAP_LAST_WORD_MASK(nbits);
515  	else
516  		__bitmap_shift_left(dst, src, shift, nbits);
517  }
518  
519  static __always_inline
bitmap_replace(unsigned long * dst,const unsigned long * old,const unsigned long * new,const unsigned long * mask,unsigned int nbits)520  void bitmap_replace(unsigned long *dst,
521  		    const unsigned long *old,
522  		    const unsigned long *new,
523  		    const unsigned long *mask,
524  		    unsigned int nbits)
525  {
526  	if (small_const_nbits(nbits))
527  		*dst = (*old & ~(*mask)) | (*new & *mask);
528  	else
529  		__bitmap_replace(dst, old, new, mask, nbits);
530  }
531  
532  /**
533   * bitmap_scatter - Scatter a bitmap according to the given mask
534   * @dst: scattered bitmap
535   * @src: gathered bitmap
536   * @mask: mask representing bits to assign to in the scattered bitmap
537   * @nbits: number of bits in each of these bitmaps
538   *
539   * Scatters bitmap with sequential bits according to the given @mask.
540   *
541   * Example:
542   * If @src bitmap = 0x005a, with @mask = 0x1313, @dst will be 0x0302.
543   *
544   * Or in binary form
545   * @src			@mask			@dst
546   * 0000000001011010	0001001100010011	0000001100000010
547   *
548   * (Bits 0, 1, 2, 3, 4, 5 are copied to the bits 0, 1, 4, 8, 9, 12)
549   *
550   * A more 'visual' description of the operation::
551   *
552   *	src:  0000000001011010
553   *	                ||||||
554   *	         +------+|||||
555   *	         |  +----+||||
556   *	         |  |+----+|||
557   *	         |  ||   +-+||
558   *	         |  ||   |  ||
559   *	mask: ...v..vv...v..vv
560   *	      ...0..11...0..10
561   *	dst:  0000001100000010
562   *
563   * A relationship exists between bitmap_scatter() and bitmap_gather().
564   * bitmap_gather() can be seen as the 'reverse' bitmap_scatter() operation.
565   * See bitmap_scatter() for details related to this relationship.
566   */
567  static __always_inline
bitmap_scatter(unsigned long * dst,const unsigned long * src,const unsigned long * mask,unsigned int nbits)568  void bitmap_scatter(unsigned long *dst, const unsigned long *src,
569  		    const unsigned long *mask, unsigned int nbits)
570  {
571  	unsigned int n = 0;
572  	unsigned int bit;
573  
574  	bitmap_zero(dst, nbits);
575  
576  	for_each_set_bit(bit, mask, nbits)
577  		__assign_bit(bit, dst, test_bit(n++, src));
578  }
579  
580  /**
581   * bitmap_gather - Gather a bitmap according to given mask
582   * @dst: gathered bitmap
583   * @src: scattered bitmap
584   * @mask: mask representing bits to extract from in the scattered bitmap
585   * @nbits: number of bits in each of these bitmaps
586   *
587   * Gathers bitmap with sparse bits according to the given @mask.
588   *
589   * Example:
590   * If @src bitmap = 0x0302, with @mask = 0x1313, @dst will be 0x001a.
591   *
592   * Or in binary form
593   * @src			@mask			@dst
594   * 0000001100000010	0001001100010011	0000000000011010
595   *
596   * (Bits 0, 1, 4, 8, 9, 12 are copied to the bits 0, 1, 2, 3, 4, 5)
597   *
598   * A more 'visual' description of the operation::
599   *
600   *	mask: ...v..vv...v..vv
601   *	src:  0000001100000010
602   *	         ^  ^^   ^   0
603   *	         |  ||   |  10
604   *	         |  ||   > 010
605   *	         |  |+--> 1010
606   *	         |  +--> 11010
607   *	         +----> 011010
608   *	dst:  0000000000011010
609   *
610   * A relationship exists between bitmap_gather() and bitmap_scatter(). See
611   * bitmap_scatter() for the bitmap scatter detailed operations.
612   * Suppose scattered computed using bitmap_scatter(scattered, src, mask, n).
613   * The operation bitmap_gather(result, scattered, mask, n) leads to a result
614   * equal or equivalent to src.
615   *
616   * The result can be 'equivalent' because bitmap_scatter() and bitmap_gather()
617   * are not bijective.
618   * The result and src values are equivalent in that sense that a call to
619   * bitmap_scatter(res, src, mask, n) and a call to
620   * bitmap_scatter(res, result, mask, n) will lead to the same res value.
621   */
622  static __always_inline
bitmap_gather(unsigned long * dst,const unsigned long * src,const unsigned long * mask,unsigned int nbits)623  void bitmap_gather(unsigned long *dst, const unsigned long *src,
624  		   const unsigned long *mask, unsigned int nbits)
625  {
626  	unsigned int n = 0;
627  	unsigned int bit;
628  
629  	bitmap_zero(dst, nbits);
630  
631  	for_each_set_bit(bit, mask, nbits)
632  		__assign_bit(n++, dst, test_bit(bit, src));
633  }
634  
635  static __always_inline
bitmap_next_set_region(unsigned long * bitmap,unsigned int * rs,unsigned int * re,unsigned int end)636  void bitmap_next_set_region(unsigned long *bitmap, unsigned int *rs,
637  			    unsigned int *re, unsigned int end)
638  {
639  	*rs = find_next_bit(bitmap, end, *rs);
640  	*re = find_next_zero_bit(bitmap, end, *rs + 1);
641  }
642  
643  /**
644   * bitmap_release_region - release allocated bitmap region
645   *	@bitmap: array of unsigned longs corresponding to the bitmap
646   *	@pos: beginning of bit region to release
647   *	@order: region size (log base 2 of number of bits) to release
648   *
649   * This is the complement to __bitmap_find_free_region() and releases
650   * the found region (by clearing it in the bitmap).
651   */
652  static __always_inline
bitmap_release_region(unsigned long * bitmap,unsigned int pos,int order)653  void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order)
654  {
655  	bitmap_clear(bitmap, pos, BIT(order));
656  }
657  
658  /**
659   * bitmap_allocate_region - allocate bitmap region
660   *	@bitmap: array of unsigned longs corresponding to the bitmap
661   *	@pos: beginning of bit region to allocate
662   *	@order: region size (log base 2 of number of bits) to allocate
663   *
664   * Allocate (set bits in) a specified region of a bitmap.
665   *
666   * Returns: 0 on success, or %-EBUSY if specified region wasn't
667   * free (not all bits were zero).
668   */
669  static __always_inline
bitmap_allocate_region(unsigned long * bitmap,unsigned int pos,int order)670  int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order)
671  {
672  	unsigned int len = BIT(order);
673  
674  	if (find_next_bit(bitmap, pos + len, pos) < pos + len)
675  		return -EBUSY;
676  	bitmap_set(bitmap, pos, len);
677  	return 0;
678  }
679  
680  /**
681   * bitmap_find_free_region - find a contiguous aligned mem region
682   *	@bitmap: array of unsigned longs corresponding to the bitmap
683   *	@bits: number of bits in the bitmap
684   *	@order: region size (log base 2 of number of bits) to find
685   *
686   * Find a region of free (zero) bits in a @bitmap of @bits bits and
687   * allocate them (set them to one).  Only consider regions of length
688   * a power (@order) of two, aligned to that power of two, which
689   * makes the search algorithm much faster.
690   *
691   * Returns: the bit offset in bitmap of the allocated region,
692   * or -errno on failure.
693   */
694  static __always_inline
bitmap_find_free_region(unsigned long * bitmap,unsigned int bits,int order)695  int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order)
696  {
697  	unsigned int pos, end;		/* scans bitmap by regions of size order */
698  
699  	for (pos = 0; (end = pos + BIT(order)) <= bits; pos = end) {
700  		if (!bitmap_allocate_region(bitmap, pos, order))
701  			return pos;
702  	}
703  	return -ENOMEM;
704  }
705  
706  /**
707   * BITMAP_FROM_U64() - Represent u64 value in the format suitable for bitmap.
708   * @n: u64 value
709   *
710   * Linux bitmaps are internally arrays of unsigned longs, i.e. 32-bit
711   * integers in 32-bit environment, and 64-bit integers in 64-bit one.
712   *
713   * There are four combinations of endianness and length of the word in linux
714   * ABIs: LE64, BE64, LE32 and BE32.
715   *
716   * On 64-bit kernels 64-bit LE and BE numbers are naturally ordered in
717   * bitmaps and therefore don't require any special handling.
718   *
719   * On 32-bit kernels 32-bit LE ABI orders lo word of 64-bit number in memory
720   * prior to hi, and 32-bit BE orders hi word prior to lo. The bitmap on the
721   * other hand is represented as an array of 32-bit words and the position of
722   * bit N may therefore be calculated as: word #(N/32) and bit #(N%32) in that
723   * word.  For example, bit #42 is located at 10th position of 2nd word.
724   * It matches 32-bit LE ABI, and we can simply let the compiler store 64-bit
725   * values in memory as it usually does. But for BE we need to swap hi and lo
726   * words manually.
727   *
728   * With all that, the macro BITMAP_FROM_U64() does explicit reordering of hi and
729   * lo parts of u64.  For LE32 it does nothing, and for BE environment it swaps
730   * hi and lo words, as is expected by bitmap.
731   */
732  #if __BITS_PER_LONG == 64
733  #define BITMAP_FROM_U64(n) (n)
734  #else
735  #define BITMAP_FROM_U64(n) ((unsigned long) ((u64)(n) & ULONG_MAX)), \
736  				((unsigned long) ((u64)(n) >> 32))
737  #endif
738  
739  /**
740   * bitmap_from_u64 - Check and swap words within u64.
741   *  @mask: source bitmap
742   *  @dst:  destination bitmap
743   *
744   * In 32-bit Big Endian kernel, when using ``(u32 *)(&val)[*]``
745   * to read u64 mask, we will get the wrong word.
746   * That is ``(u32 *)(&val)[0]`` gets the upper 32 bits,
747   * but we expect the lower 32-bits of u64.
748   */
bitmap_from_u64(unsigned long * dst,u64 mask)749  static __always_inline void bitmap_from_u64(unsigned long *dst, u64 mask)
750  {
751  	bitmap_from_arr64(dst, &mask, 64);
752  }
753  
754  /**
755   * bitmap_read - read a value of n-bits from the memory region
756   * @map: address to the bitmap memory region
757   * @start: bit offset of the n-bit value
758   * @nbits: size of value in bits, nonzero, up to BITS_PER_LONG
759   *
760   * Returns: value of @nbits bits located at the @start bit offset within the
761   * @map memory region. For @nbits = 0 and @nbits > BITS_PER_LONG the return
762   * value is undefined.
763   */
764  static __always_inline
bitmap_read(const unsigned long * map,unsigned long start,unsigned long nbits)765  unsigned long bitmap_read(const unsigned long *map, unsigned long start, unsigned long nbits)
766  {
767  	size_t index = BIT_WORD(start);
768  	unsigned long offset = start % BITS_PER_LONG;
769  	unsigned long space = BITS_PER_LONG - offset;
770  	unsigned long value_low, value_high;
771  
772  	if (unlikely(!nbits || nbits > BITS_PER_LONG))
773  		return 0;
774  
775  	if (space >= nbits)
776  		return (map[index] >> offset) & BITMAP_LAST_WORD_MASK(nbits);
777  
778  	value_low = map[index] & BITMAP_FIRST_WORD_MASK(start);
779  	value_high = map[index + 1] & BITMAP_LAST_WORD_MASK(start + nbits);
780  	return (value_low >> offset) | (value_high << space);
781  }
782  
783  /**
784   * bitmap_write - write n-bit value within a memory region
785   * @map: address to the bitmap memory region
786   * @value: value to write, clamped to nbits
787   * @start: bit offset of the n-bit value
788   * @nbits: size of value in bits, nonzero, up to BITS_PER_LONG.
789   *
790   * bitmap_write() behaves as-if implemented as @nbits calls of __assign_bit(),
791   * i.e. bits beyond @nbits are ignored:
792   *
793   *   for (bit = 0; bit < nbits; bit++)
794   *           __assign_bit(start + bit, bitmap, val & BIT(bit));
795   *
796   * For @nbits == 0 and @nbits > BITS_PER_LONG no writes are performed.
797   */
798  static __always_inline
bitmap_write(unsigned long * map,unsigned long value,unsigned long start,unsigned long nbits)799  void bitmap_write(unsigned long *map, unsigned long value,
800  		  unsigned long start, unsigned long nbits)
801  {
802  	size_t index;
803  	unsigned long offset;
804  	unsigned long space;
805  	unsigned long mask;
806  	bool fit;
807  
808  	if (unlikely(!nbits || nbits > BITS_PER_LONG))
809  		return;
810  
811  	mask = BITMAP_LAST_WORD_MASK(nbits);
812  	value &= mask;
813  	offset = start % BITS_PER_LONG;
814  	space = BITS_PER_LONG - offset;
815  	fit = space >= nbits;
816  	index = BIT_WORD(start);
817  
818  	map[index] &= (fit ? (~(mask << offset)) : ~BITMAP_FIRST_WORD_MASK(start));
819  	map[index] |= value << offset;
820  	if (fit)
821  		return;
822  
823  	map[index + 1] &= BITMAP_FIRST_WORD_MASK(start + nbits);
824  	map[index + 1] |= (value >> space);
825  }
826  
827  #define bitmap_get_value8(map, start)			\
828  	bitmap_read(map, start, BITS_PER_BYTE)
829  #define bitmap_set_value8(map, value, start)		\
830  	bitmap_write(map, value, start, BITS_PER_BYTE)
831  
832  #endif /* __ASSEMBLY__ */
833  
834  #endif /* __LINUX_BITMAP_H */
835