1 | /* |
2 | * Copyright (c) 2015-2016 Apple Inc. All rights reserved. |
3 | * |
4 | * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ |
5 | * |
6 | * This file contains Original Code and/or Modifications of Original Code |
7 | * as defined in and that are subject to the Apple Public Source License |
8 | * Version 2.0 (the 'License'). You may not use this file except in |
9 | * compliance with the License. The rights granted to you under the License |
10 | * may not be used to create, or enable the creation or redistribution of, |
11 | * unlawful or unlicensed copies of an Apple operating system, or to |
12 | * circumvent, violate, or enable the circumvention or violation of, any |
13 | * terms of an Apple operating system software license agreement. |
14 | * |
15 | * Please obtain a copy of the License at |
16 | * http://www.opensource.apple.com/apsl/ and read it before using this file. |
17 | * |
18 | * The Original Code and all software distributed under the License are |
19 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER |
20 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, |
21 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, |
22 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. |
23 | * Please see the License for the specific language governing rights and |
24 | * limitations under the License. |
25 | * |
26 | * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ |
27 | * |
28 | * Bit manipulation functions |
29 | */ |
30 | |
31 | #ifndef __BITS_H__ |
32 | #define __BITS_H__ |
33 | |
34 | #ifdef KERNEL |
35 | #include <kern/assert.h> |
36 | #include <kern/kalloc.h> |
37 | #else |
38 | #include <assert.h> |
39 | #include <stdlib.h> |
40 | #define kalloc_data(x) malloc(x) |
41 | #define kfree_data(x, y) free(x) |
42 | #endif |
43 | #include <stdbool.h> |
44 | #include <stdint.h> |
45 | #include <stdatomic.h> |
46 | #include <string.h> |
47 | |
48 | #ifndef __DARWIN_UINT |
49 | typedef unsigned int uint; |
50 | #define __DARWIN_UINT |
51 | #endif |
52 | |
53 | #define BIT(b) (1ULL << (b)) |
54 | |
55 | #define mask(width) (width >= 64 ? -1ULL : (BIT(width) - 1)) |
56 | #define (x, shift, width) ((((uint64_t)(x)) >> (shift)) & mask(width)) |
57 | #define bits(x, hi, lo) extract((x), (lo), (hi) - (lo) + 1) |
58 | |
59 | #define bit_set(x, b) ((x) |= BIT(b)) |
60 | #define bit_clear(x, b) ((x) &= ~BIT(b)) |
61 | #define bit_test(x, b) ((bool)((x) & BIT(b))) |
62 | |
63 | inline static uint64_t |
64 | bit_ror64(uint64_t bitmap, uint n) |
65 | { |
66 | #if defined(__arm64__) |
67 | uint64_t result; |
68 | uint64_t _n = (uint64_t)n; |
69 | asm volatile ("ror %0, %1, %2" : "=r" (result) : "r" (bitmap), "r" (_n)); |
70 | return result; |
71 | #else |
72 | n = n & 63; |
73 | return (bitmap >> n) | (bitmap << (64 - n)); |
74 | #endif |
75 | } |
76 | |
77 | inline static uint64_t |
78 | bit_rol64(uint64_t bitmap, uint n) |
79 | { |
80 | #if defined(__arm64__) |
81 | return bit_ror64(bitmap, n: 64U - n); |
82 | #else |
83 | n = n & 63; |
84 | return (bitmap << n) | (bitmap >> (64 - n)); |
85 | #endif |
86 | } |
87 | |
88 | /* Non-atomically clear the bit and returns whether the bit value was changed */ |
89 | #define bit_clear_if_set(bitmap, bit) \ |
90 | ({ \ |
91 | int _n = (bit); \ |
92 | __auto_type _map = &(bitmap); \ |
93 | bool _bit_is_set = bit_test(*_map, _n); \ |
94 | bit_clear(*_map, _n); \ |
95 | _bit_is_set; \ |
96 | }) |
97 | |
98 | /* Non-atomically set the bit and returns whether the bit value was changed */ |
99 | #define bit_set_if_clear(bitmap, bit) \ |
100 | ({ \ |
101 | int _n = (bit); \ |
102 | __auto_type _map = &(bitmap); \ |
103 | bool _bit_is_set = bit_test(*_map, _n); \ |
104 | bit_set(*_map, _n); \ |
105 | !_bit_is_set; \ |
106 | }) |
107 | |
108 | /* Returns the most significant '1' bit, or -1 if all zeros */ |
109 | inline static int |
110 | bit_first(uint64_t bitmap) |
111 | { |
112 | #if defined(__arm64__) |
113 | int64_t result; |
114 | asm volatile ("clz %0, %1" : "=r" (result) : "r" (bitmap)); |
115 | return 63 - (int)result; |
116 | #else |
117 | return (bitmap == 0) ? -1 : 63 - __builtin_clzll(bitmap); |
118 | #endif |
119 | } |
120 | |
121 | |
122 | inline static int |
123 | __bit_next(uint64_t bitmap, int previous_bit) |
124 | { |
125 | uint64_t mask = previous_bit ? mask(previous_bit) : ~0ULL; |
126 | |
127 | return bit_first(bitmap: bitmap & mask); |
128 | } |
129 | |
130 | /* Returns the most significant '1' bit that is less significant than previous_bit, |
131 | * or -1 if no such bit exists. |
132 | */ |
133 | inline static int |
134 | bit_next(uint64_t bitmap, int previous_bit) |
135 | { |
136 | if (previous_bit == 0) { |
137 | return -1; |
138 | } else { |
139 | return __bit_next(bitmap, previous_bit); |
140 | } |
141 | } |
142 | |
143 | /* Returns the least significant '1' bit, or -1 if all zeros */ |
144 | inline static int |
145 | lsb_first(uint64_t bitmap) |
146 | { |
147 | return __builtin_ffsll((long long)bitmap) - 1; |
148 | } |
149 | |
150 | /* Returns the least significant '1' bit that is more significant than previous_bit, |
151 | * or -1 if no such bit exists. |
152 | * previous_bit may be -1, in which case this is equivalent to lsb_first() |
153 | */ |
154 | inline static int |
155 | lsb_next(uint64_t bitmap, int previous_bit) |
156 | { |
157 | uint64_t mask = mask(previous_bit + 1); |
158 | |
159 | return lsb_first(bitmap: bitmap & ~mask); |
160 | } |
161 | |
162 | inline static int |
163 | bit_count(uint64_t x) |
164 | { |
165 | return __builtin_popcountll(x); |
166 | } |
167 | |
168 | /* Return the highest power of 2 that is <= n, or -1 if n == 0 */ |
169 | inline static int |
170 | bit_floor(uint64_t n) |
171 | { |
172 | return bit_first(bitmap: n); |
173 | } |
174 | |
175 | /* Return the lowest power of 2 that is >= n, or -1 if n == 0 */ |
176 | inline static int |
177 | bit_ceiling(uint64_t n) |
178 | { |
179 | if (n == 0) { |
180 | return -1; |
181 | } |
182 | return bit_first(bitmap: n - 1) + 1; |
183 | } |
184 | |
185 | /* If n is a power of 2, bit_log2(n) == bit_floor(n) == bit_ceiling(n) */ |
186 | #define bit_log2(n) bit_floor((uint64_t)(n)) |
187 | |
188 | typedef uint64_t bitmap_t; |
189 | |
190 | |
191 | inline static bool |
192 | atomic_bit_set(_Atomic bitmap_t *__single map, int n, int mem_order) |
193 | { |
194 | bitmap_t prev; |
195 | prev = __c11_atomic_fetch_or(map, BIT(n), mem_order); |
196 | return bit_test(prev, n); |
197 | } |
198 | |
199 | inline static bool |
200 | atomic_bit_clear(_Atomic bitmap_t *__single map, int n, int mem_order) |
201 | { |
202 | bitmap_t prev; |
203 | prev = __c11_atomic_fetch_and(map, ~BIT(n), mem_order); |
204 | return bit_test(prev, n); |
205 | } |
206 | |
207 | |
208 | #define BITMAP_LEN(n) (((uint)(n) + 63) >> 6) /* Round to 64bit bitmap_t */ |
209 | #define BITMAP_SIZE(n) (size_t)(BITMAP_LEN(n) << 3) /* Round to 64bit bitmap_t, then convert to bytes */ |
210 | #define bitmap_bit(n) bits(n, 5, 0) |
211 | #define bitmap_index(n) bits(n, 63, 6) |
212 | |
213 | inline static bitmap_t * __header_indexable |
214 | bitmap_zero(bitmap_t *__header_indexable map, uint nbits) |
215 | { |
216 | memset(s: (void *)map, c: 0, BITMAP_SIZE(nbits)); |
217 | return map; |
218 | } |
219 | |
220 | inline static bitmap_t *__header_indexable |
221 | bitmap_full(bitmap_t *__header_indexable map, uint nbits) |
222 | { |
223 | uint i; |
224 | |
225 | for (i = 0; i < bitmap_index(nbits - 1); i++) { |
226 | map[i] = ~((uint64_t)0); |
227 | } |
228 | |
229 | uint nbits_filled = i * 64; |
230 | |
231 | if (nbits > nbits_filled) { |
232 | map[i] = mask(nbits - nbits_filled); |
233 | } |
234 | |
235 | return map; |
236 | } |
237 | |
238 | inline static bool |
239 | bitmap_is_empty(bitmap_t *__header_indexable map, uint nbits) |
240 | { |
241 | for (uint i = 0; i < BITMAP_LEN(nbits); i++) { |
242 | if (map[i]) { |
243 | return false; |
244 | } |
245 | } |
246 | |
247 | return true; |
248 | } |
249 | |
250 | inline static bool |
251 | bitmap_is_full(bitmap_t *__header_indexable map, uint nbits) |
252 | { |
253 | uint i; |
254 | |
255 | for (i = 0; i < bitmap_index(nbits - 1); i++) { |
256 | if (map[i] != ~((uint64_t)0)) { |
257 | return false; |
258 | } |
259 | } |
260 | |
261 | uint nbits_filled = i * 64; |
262 | |
263 | if (nbits > nbits_filled) { |
264 | return map[i] == mask(nbits - nbits_filled); |
265 | } |
266 | |
267 | return true; |
268 | } |
269 | |
270 | inline static bitmap_t *__header_indexable |
271 | bitmap_alloc(uint nbits) |
272 | { |
273 | assert(nbits > 0); |
274 | return (bitmap_t *)kalloc_data(BITMAP_SIZE(nbits), Z_WAITOK_ZERO); |
275 | } |
276 | |
277 | inline static void |
278 | bitmap_free(bitmap_t *map, uint nbits) |
279 | { |
280 | assert(nbits > 0); |
281 | kfree_data(map, BITMAP_SIZE(nbits)); |
282 | } |
283 | |
284 | inline static void |
285 | bitmap_set(bitmap_t *__header_indexable map, uint n) |
286 | { |
287 | bit_set(map[bitmap_index(n)], bitmap_bit(n)); |
288 | } |
289 | |
290 | inline static void |
291 | bitmap_clear(bitmap_t *__header_indexable map, uint n) |
292 | { |
293 | bit_clear(map[bitmap_index(n)], bitmap_bit(n)); |
294 | } |
295 | |
296 | inline static bool |
297 | atomic_bitmap_set(_Atomic bitmap_t *__header_indexable map, uint n, int mem_order) |
298 | { |
299 | return atomic_bit_set(map: &map[bitmap_index(n)], bitmap_bit(n), mem_order); |
300 | } |
301 | |
302 | inline static bool |
303 | atomic_bitmap_clear(_Atomic bitmap_t *__header_indexable map, uint n, int mem_order) |
304 | { |
305 | return atomic_bit_clear(map: &map[bitmap_index(n)], bitmap_bit(n), mem_order); |
306 | } |
307 | |
308 | inline static bool |
309 | bitmap_test(const bitmap_t *__header_indexable map, uint n) |
310 | { |
311 | return bit_test(map[bitmap_index(n)], bitmap_bit(n)); |
312 | } |
313 | |
314 | inline static int |
315 | bitmap_first(bitmap_t *__header_indexable map, uint nbits) |
316 | { |
317 | for (int i = (int)bitmap_index(nbits - 1); i >= 0; i--) { |
318 | if (map[i] == 0) { |
319 | continue; |
320 | } |
321 | return (i << 6) + bit_first(bitmap: map[i]); |
322 | } |
323 | |
324 | return -1; |
325 | } |
326 | |
327 | inline static void |
328 | bitmap_not( |
329 | bitmap_t *__header_indexable out, |
330 | const bitmap_t *__header_indexable in, |
331 | uint nbits) |
332 | { |
333 | uint i; |
334 | |
335 | for (i = 0; i < bitmap_index(nbits - 1); i++) { |
336 | out[i] = ~in[i]; |
337 | } |
338 | |
339 | uint nbits_complete = i * 64; |
340 | |
341 | if (nbits > nbits_complete) { |
342 | out[i] = ~in[i] & mask(nbits - nbits_complete); |
343 | } |
344 | } |
345 | |
346 | inline static void |
347 | bitmap_and( |
348 | bitmap_t *__header_indexable out, |
349 | const bitmap_t *__header_indexable in1, |
350 | const bitmap_t *__header_indexable in2, |
351 | uint nbits) |
352 | { |
353 | for (uint i = 0; i <= bitmap_index(nbits - 1); i++) { |
354 | out[i] = in1[i] & in2[i]; |
355 | } |
356 | } |
357 | |
358 | inline static void |
359 | bitmap_and_not( |
360 | bitmap_t *__header_indexable out, |
361 | const bitmap_t *__header_indexable in1, |
362 | const bitmap_t *__header_indexable in2, |
363 | uint nbits) |
364 | { |
365 | uint i; |
366 | |
367 | for (i = 0; i <= bitmap_index(nbits - 1); i++) { |
368 | out[i] = in1[i] & ~in2[i]; |
369 | } |
370 | } |
371 | |
372 | inline static void |
373 | bitmap_or( |
374 | bitmap_t *__header_indexable out, |
375 | const bitmap_t *__header_indexable in1, |
376 | const bitmap_t *__header_indexable in2, |
377 | uint nbits) |
378 | { |
379 | for (uint i = 0; i <= bitmap_index(nbits - 1); i++) { |
380 | out[i] = in1[i] | in2[i]; |
381 | } |
382 | } |
383 | |
384 | inline static bool |
385 | bitmap_equal( |
386 | const bitmap_t *__header_indexable in1, |
387 | const bitmap_t *__header_indexable in2, |
388 | uint nbits) |
389 | { |
390 | for (uint i = 0; i <= bitmap_index(nbits - 1); i++) { |
391 | if (in1[i] != in2[i]) { |
392 | return false; |
393 | } |
394 | } |
395 | |
396 | return true; |
397 | } |
398 | |
399 | inline static int |
400 | bitmap_and_not_mask_first( |
401 | bitmap_t *__header_indexable map, |
402 | const bitmap_t *__header_indexable mask, |
403 | uint nbits) |
404 | { |
405 | for (int i = (int)bitmap_index(nbits - 1); i >= 0; i--) { |
406 | if ((map[i] & ~mask[i]) == 0) { |
407 | continue; |
408 | } |
409 | return (i << 6) + bit_first(bitmap: map[i] & ~mask[i]); |
410 | } |
411 | |
412 | return -1; |
413 | } |
414 | |
415 | inline static int |
416 | bitmap_lsb_first(const bitmap_t *__header_indexable map, uint nbits) |
417 | { |
418 | for (uint i = 0; i <= bitmap_index(nbits - 1); i++) { |
419 | if (map[i] == 0) { |
420 | continue; |
421 | } |
422 | return (int)((i << 6) + (uint32_t)lsb_first(bitmap: map[i])); |
423 | } |
424 | |
425 | return -1; |
426 | } |
427 | |
428 | inline static int |
429 | bitmap_next(const bitmap_t *__header_indexable map, uint prev) |
430 | { |
431 | if (prev == 0) { |
432 | return -1; |
433 | } |
434 | |
435 | int64_t i = bitmap_index(prev - 1); |
436 | int res = __bit_next(bitmap: map[i], bits(prev, 5, 0)); |
437 | if (res >= 0) { |
438 | return (int)(res + (i << 6)); |
439 | } |
440 | |
441 | for (i = i - 1; i >= 0; i--) { |
442 | if (map[i] == 0) { |
443 | continue; |
444 | } |
445 | return (int)((i << 6) + bit_first(bitmap: map[i])); |
446 | } |
447 | |
448 | return -1; |
449 | } |
450 | |
451 | inline static int |
452 | bitmap_lsb_next(const bitmap_t *__header_indexable map, uint nbits, uint prev) |
453 | { |
454 | if ((prev + 1) >= nbits) { |
455 | return -1; |
456 | } |
457 | |
458 | uint64_t i = bitmap_index(prev + 1); |
459 | uint b = bits((prev + 1), 5, 0) - 1; |
460 | int32_t res = lsb_next(bitmap: (uint64_t)map[i], previous_bit: (int)b); |
461 | if (res >= 0) { |
462 | return (int)((uint64_t)res + (i << 6)); |
463 | } |
464 | |
465 | for (i = i + 1; i <= bitmap_index(nbits - 1); i++) { |
466 | if (map[i] == 0) { |
467 | continue; |
468 | } |
469 | return (int)((i << 6) + (uint64_t)lsb_first(bitmap: map[i])); |
470 | } |
471 | |
472 | return -1; |
473 | } |
474 | |
475 | #endif |
476 | |