1 | #ifndef _OS_REFCNT_INTERNAL_H |
2 | #define _OS_REFCNT_INTERNAL_H |
3 | |
4 | struct os_refcnt { |
5 | os_ref_atomic_t ref_count; |
6 | #if OS_REFCNT_DEBUG |
7 | struct os_refgrp *ref_group; |
8 | #endif |
9 | }; |
10 | |
11 | #if OS_REFCNT_DEBUG |
12 | |
13 | __options_closed_decl(os_refgrp_flags_t, uint64_t, { |
14 | OS_REFGRP_F_NONE = 0x0, |
15 | OS_REFGRP_F_ALWAYS_ENABLED = 0x1, |
16 | }); |
17 | |
18 | struct os_refgrp { |
19 | const char *grp_name; |
20 | os_ref_atomic_t grp_children; /* number of refcount objects in group */ |
21 | os_ref_atomic_t grp_count; /* current reference count of group */ |
22 | _Atomic uint64_t grp_retain_total; |
23 | _Atomic uint64_t grp_release_total; |
24 | struct os_refgrp *grp_parent; |
25 | void *grp_log; /* refcount logging context */ |
26 | uint64_t grp_flags; /* Unused for now. */ |
27 | }; |
28 | |
29 | #endif |
30 | |
31 | # define OS_REF_ATOMIC_INITIALIZER 0 |
32 | #if OS_REFCNT_DEBUG |
33 | # define OS_REF_INITIALIZER { .ref_count = OS_REF_ATOMIC_INITIALIZER, .ref_group = NULL } |
34 | #else |
35 | # define OS_REF_INITIALIZER { .ref_count = OS_REF_ATOMIC_INITIALIZER } |
36 | #endif |
37 | |
38 | __BEGIN_DECLS |
39 | |
40 | #if OS_REFCNT_DEBUG |
41 | # define os_ref_if_debug(x, y) x |
42 | #else |
43 | # define os_ref_if_debug(x, y) y |
44 | #endif |
45 | |
46 | void os_ref_init_count_external(os_ref_atomic_t *, struct os_refgrp *, os_ref_count_t); |
47 | void os_ref_retain_external(os_ref_atomic_t *, struct os_refgrp *); |
48 | void os_ref_retain_locked_external(os_ref_atomic_t *, struct os_refgrp *); |
49 | os_ref_count_t os_ref_release_external(os_ref_atomic_t *, struct os_refgrp *, |
50 | memory_order release_order, memory_order dealloc_order); |
51 | os_ref_count_t os_ref_release_relaxed_external(os_ref_atomic_t *, struct os_refgrp *); |
52 | os_ref_count_t os_ref_release_barrier_external(os_ref_atomic_t *, struct os_refgrp *); |
53 | os_ref_count_t os_ref_release_locked_external(os_ref_atomic_t *, struct os_refgrp *); |
54 | bool os_ref_retain_try_external(os_ref_atomic_t *, struct os_refgrp *); |
55 | |
56 | #if XNU_KERNEL_PRIVATE |
57 | void os_ref_init_count_internal(os_ref_atomic_t *, struct os_refgrp *, os_ref_count_t); |
58 | void os_ref_retain_internal(os_ref_atomic_t *, struct os_refgrp *); |
59 | void os_ref_retain_floor_internal(os_ref_atomic_t *, os_ref_count_t, struct os_refgrp *); |
60 | os_ref_count_t os_ref_release_relaxed_internal(os_ref_atomic_t *, struct os_refgrp *); |
61 | os_ref_count_t os_ref_release_barrier_internal(os_ref_atomic_t *, struct os_refgrp *); |
62 | os_ref_count_t os_ref_release_internal(os_ref_atomic_t *, struct os_refgrp *, |
63 | memory_order release_order, memory_order dealloc_order); |
64 | bool os_ref_retain_try_internal(os_ref_atomic_t *, struct os_refgrp *); |
65 | bool os_ref_retain_floor_try_internal(os_ref_atomic_t *, os_ref_count_t, struct os_refgrp *); |
66 | void os_ref_retain_locked_internal(os_ref_atomic_t *, struct os_refgrp *); |
67 | void os_ref_retain_floor_locked_internal(os_ref_atomic_t *, os_ref_count_t, struct os_refgrp *); |
68 | os_ref_count_t os_ref_release_locked_internal(os_ref_atomic_t *, struct os_refgrp *); |
69 | #else |
70 | /* For now, the internal and external variants are identical */ |
71 | #define os_ref_init_count_internal os_ref_init_count_external |
72 | #define os_ref_retain_internal os_ref_retain_external |
73 | #define os_ref_retain_locked_internal os_ref_retain_locked_external |
74 | #define os_ref_release_internal os_ref_release_external |
75 | #define os_ref_release_barrier_internal os_ref_release_barrier_external |
76 | #define os_ref_release_relaxed_internal os_ref_release_relaxed_external |
77 | #define os_ref_release_locked_internal os_ref_release_locked_external |
78 | #define os_ref_retain_try_internal os_ref_retain_try_external |
79 | #endif |
80 | |
81 | static inline void |
82 | os_ref_init_count(struct os_refcnt *rc, struct os_refgrp * __unused grp, os_ref_count_t count) |
83 | { |
84 | #if OS_REFCNT_DEBUG |
85 | rc->ref_group = grp; |
86 | #endif |
87 | os_ref_init_count_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL), count); |
88 | } |
89 | |
90 | static inline void |
91 | os_ref_retain(struct os_refcnt *rc) |
92 | { |
93 | os_ref_retain_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL)); |
94 | } |
95 | |
96 | static inline os_ref_count_t |
97 | os_ref_release_locked(struct os_refcnt *rc) |
98 | { |
99 | return os_ref_release_locked_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL)); |
100 | } |
101 | |
102 | static inline void |
103 | os_ref_retain_locked(struct os_refcnt *rc) |
104 | { |
105 | os_ref_retain_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL)); |
106 | } |
107 | |
108 | static inline bool |
109 | os_ref_retain_try(struct os_refcnt *rc) |
110 | { |
111 | return os_ref_retain_try_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL)); |
112 | } |
113 | |
114 | __deprecated_msg("inefficient codegen, prefer os_ref_release / os_ref_release_relaxed" ) |
115 | static inline os_ref_count_t OS_WARN_RESULT |
116 | os_ref_release_explicit(struct os_refcnt *rc, memory_order release_order, memory_order dealloc_order) |
117 | { |
118 | return os_ref_release_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL), |
119 | release_order, dealloc_order); |
120 | } |
121 | |
122 | #if OS_REFCNT_DEBUG |
123 | # define os_refgrp_initializer(name, parent, flags) \ |
124 | { \ |
125 | .grp_name = (name), \ |
126 | .grp_children = (0u), \ |
127 | .grp_count = (0u), \ |
128 | .grp_retain_total = (0u), \ |
129 | .grp_release_total = (0u), \ |
130 | .grp_parent = (parent), \ |
131 | .grp_log = NULL, \ |
132 | .grp_flags = flags, \ |
133 | } |
134 | |
135 | # define os_refgrp_decl_flags(qual, var, name, parent, flags) \ |
136 | qual struct os_refgrp __attribute__((section("__DATA,__refgrps"))) var = \ |
137 | os_refgrp_initializer(name, parent, flags) |
138 | |
139 | # define os_refgrp_decl(qual, var, name, parent) \ |
140 | os_refgrp_decl_flags(qual, var, name, parent, OS_REFGRP_F_NONE) |
141 | |
142 | # define os_refgrp_decl_extern(var) \ |
143 | extern struct os_refgrp var |
144 | |
145 | /* Create a default group based on the init() callsite if no explicit group |
146 | * is provided. */ |
147 | # define os_ref_init_count(rc, grp, count) ({ \ |
148 | os_refgrp_decl(static, __grp, __func__, NULL); \ |
149 | (os_ref_init_count)((rc), (grp) ? (grp) : &__grp, (count)); \ |
150 | }) |
151 | |
152 | #else /* OS_REFCNT_DEBUG */ |
153 | |
154 | # define os_refgrp_decl(qual, var, name, parent) extern struct os_refgrp var __attribute__((unused)) |
155 | # define os_refgrp_decl_extern(var) os_refgrp_decl(, var, ,) |
156 | # define os_ref_init_count(rc, grp, count) (os_ref_init_count)((rc), NULL, (count)) |
157 | |
158 | #endif /* OS_REFCNT_DEBUG */ |
159 | |
160 | #if XNU_KERNEL_PRIVATE |
161 | void os_ref_panic_live(void *rc) __abortlike; |
162 | #else |
163 | __abortlike |
164 | static inline void |
165 | os_ref_panic_live(void *rc) |
166 | { |
167 | panic("os_refcnt: unexpected release of final reference (rc=%p)\n" , rc); |
168 | __builtin_unreachable(); |
169 | } |
170 | #endif |
171 | |
172 | static inline os_ref_count_t OS_WARN_RESULT |
173 | os_ref_release(struct os_refcnt *rc) |
174 | { |
175 | return os_ref_release_barrier_internal(&rc->ref_count, |
176 | os_ref_if_debug(rc->ref_group, NULL)); |
177 | } |
178 | |
179 | static inline os_ref_count_t OS_WARN_RESULT |
180 | os_ref_release_relaxed(struct os_refcnt *rc) |
181 | { |
182 | return os_ref_release_relaxed_internal(&rc->ref_count, |
183 | os_ref_if_debug(rc->ref_group, NULL)); |
184 | } |
185 | |
186 | static inline void |
187 | os_ref_release_live(struct os_refcnt *rc) |
188 | { |
189 | if (__improbable(os_ref_release(rc) == 0)) { |
190 | os_ref_panic_live(rc); |
191 | } |
192 | } |
193 | |
194 | static inline os_ref_count_t |
195 | os_ref_get_count_internal(os_ref_atomic_t *rc) |
196 | { |
197 | return atomic_load_explicit(rc, memory_order_relaxed); |
198 | } |
199 | |
200 | static inline os_ref_count_t |
201 | os_ref_get_count(struct os_refcnt *rc) |
202 | { |
203 | return os_ref_get_count_internal(rc: &rc->ref_count); |
204 | } |
205 | |
206 | #if XNU_KERNEL_PRIVATE |
207 | #pragma GCC visibility push(hidden) |
208 | |
209 | /* |
210 | * Raw API |
211 | */ |
212 | |
213 | static inline void |
214 | os_ref_init_count_raw(os_ref_atomic_t *rc, struct os_refgrp *grp, os_ref_count_t count) |
215 | { |
216 | os_ref_init_count_internal(rc, grp, count); |
217 | } |
218 | |
219 | static inline void |
220 | os_ref_retain_floor(struct os_refcnt *rc, os_ref_count_t f) |
221 | { |
222 | os_ref_retain_floor_internal(&rc->ref_count, f, os_ref_if_debug(rc->ref_group, NULL)); |
223 | } |
224 | |
225 | static inline void |
226 | os_ref_retain_raw(os_ref_atomic_t *rc, struct os_refgrp *grp) |
227 | { |
228 | os_ref_retain_internal(rc, grp); |
229 | } |
230 | |
231 | static inline void |
232 | os_ref_retain_floor_raw(os_ref_atomic_t *rc, os_ref_count_t f, struct os_refgrp *grp) |
233 | { |
234 | os_ref_retain_floor_internal(rc, f, grp); |
235 | } |
236 | |
237 | static inline os_ref_count_t |
238 | os_ref_release_raw(os_ref_atomic_t *rc, struct os_refgrp *grp) |
239 | { |
240 | return os_ref_release_barrier_internal(rc, grp); |
241 | } |
242 | |
243 | static inline os_ref_count_t |
244 | os_ref_release_raw_relaxed(os_ref_atomic_t *rc, struct os_refgrp *grp) |
245 | { |
246 | return os_ref_release_relaxed_internal(rc, grp); |
247 | } |
248 | |
249 | static inline void |
250 | os_ref_release_live_raw(os_ref_atomic_t *rc, struct os_refgrp *grp) |
251 | { |
252 | if (__improbable(os_ref_release_barrier_internal(rc, grp) == 0)) { |
253 | os_ref_panic_live(rc); |
254 | } |
255 | } |
256 | |
257 | static inline bool |
258 | os_ref_retain_try_raw(os_ref_atomic_t *rc, struct os_refgrp *grp) |
259 | { |
260 | return os_ref_retain_try_internal(rc, grp); |
261 | } |
262 | |
263 | static inline bool |
264 | os_ref_retain_floor_try_raw(os_ref_atomic_t *rc, os_ref_count_t f, |
265 | struct os_refgrp *grp) |
266 | { |
267 | return os_ref_retain_floor_try_internal(rc, f, grp); |
268 | } |
269 | |
270 | static inline void |
271 | os_ref_retain_locked_raw(os_ref_atomic_t *rc, struct os_refgrp *grp) |
272 | { |
273 | os_ref_retain_locked_internal(rc, grp); |
274 | } |
275 | |
276 | static inline void |
277 | os_ref_retain_floor_locked_raw(os_ref_atomic_t *rc, os_ref_count_t f, |
278 | struct os_refgrp *grp) |
279 | { |
280 | os_ref_retain_floor_locked_internal(rc, f, grp); |
281 | } |
282 | |
283 | static inline os_ref_count_t |
284 | os_ref_release_locked_raw(os_ref_atomic_t *rc, struct os_refgrp *grp) |
285 | { |
286 | return os_ref_release_locked_internal(rc, grp); |
287 | } |
288 | |
289 | static inline os_ref_count_t |
290 | os_ref_get_count_raw(os_ref_atomic_t *rc) |
291 | { |
292 | return os_ref_get_count_internal(rc); |
293 | } |
294 | |
295 | #if !OS_REFCNT_DEBUG |
296 | /* remove the group argument for non-debug */ |
297 | #define os_ref_init_count_raw(rc, grp, count) (os_ref_init_count_raw)((rc), NULL, (count)) |
298 | #define os_ref_retain_raw(rc, grp) (os_ref_retain_raw)((rc), NULL) |
299 | #define os_ref_retain_floor_raw(rc, f, grp) (os_ref_retain_floor_raw)((rc), f, NULL) |
300 | #define os_ref_release_raw(rc, grp) (os_ref_release_raw)((rc), NULL) |
301 | #define os_ref_release_raw_relaxed(rc, grp) (os_ref_release_raw_relaxed)((rc), NULL) |
302 | #define os_ref_release_live_raw(rc, grp) (os_ref_release_live_raw)((rc), NULL) |
303 | #define os_ref_retain_try_raw(rc, grp) (os_ref_retain_try_raw)((rc), NULL) |
304 | #define os_ref_retain_floor_try_raw(rc, f, grp) (os_ref_retain_floor_try_raw)((rc), f, NULL) |
305 | #define os_ref_retain_locked_raw(rc, grp) (os_ref_retain_locked_raw)((rc), NULL) |
306 | #define os_ref_retain_floor_locked_raw(rc, f, grp) (os_ref_retain_floor_locked_raw)((rc), f, NULL) |
307 | #define os_ref_release_locked_raw(rc, grp) (os_ref_release_locked_raw)((rc), NULL) |
308 | #endif |
309 | |
310 | extern void |
311 | os_ref_log_fini(struct os_refgrp *grp); |
312 | |
313 | extern void |
314 | os_ref_log_init(struct os_refgrp *grp); |
315 | |
316 | extern void |
317 | os_ref_retain_mask_internal(os_ref_atomic_t *rc, uint32_t n, struct os_refgrp *grp); |
318 | extern void |
319 | os_ref_retain_acquire_mask_internal(os_ref_atomic_t *rc, uint32_t n, struct os_refgrp *grp); |
320 | extern uint32_t |
321 | os_ref_retain_try_mask_internal(os_ref_atomic_t *, uint32_t n, |
322 | uint32_t reject_mask, struct os_refgrp *grp) OS_WARN_RESULT; |
323 | extern bool |
324 | os_ref_retain_try_acquire_mask_internal(os_ref_atomic_t *, uint32_t n, |
325 | uint32_t reject_mask, struct os_refgrp *grp) OS_WARN_RESULT; |
326 | |
327 | extern uint32_t |
328 | os_ref_release_barrier_mask_internal(os_ref_atomic_t *rc, uint32_t n, struct os_refgrp *grp); |
329 | extern uint32_t |
330 | os_ref_release_relaxed_mask_internal(os_ref_atomic_t *rc, uint32_t n, struct os_refgrp *grp); |
331 | |
332 | static inline uint32_t |
333 | os_ref_get_raw_mask(os_ref_atomic_t *rc) |
334 | { |
335 | return os_ref_get_count_internal(rc); |
336 | } |
337 | |
338 | static inline uint32_t |
339 | os_ref_get_bits_mask(os_ref_atomic_t *rc, uint32_t b) |
340 | { |
341 | return os_ref_get_raw_mask(rc) & ((1u << b) - 1); |
342 | } |
343 | |
344 | static inline os_ref_count_t |
345 | os_ref_get_count_mask(os_ref_atomic_t *rc, uint32_t b) |
346 | { |
347 | return os_ref_get_raw_mask(rc) >> b; |
348 | } |
349 | |
350 | static inline void |
351 | os_ref_retain_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp) |
352 | { |
353 | os_ref_retain_mask_internal(rc, n: 1u << b, grp); |
354 | } |
355 | |
356 | static inline void |
357 | os_ref_retain_acquire_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp) |
358 | { |
359 | os_ref_retain_acquire_mask_internal(rc, n: 1u << b, grp); |
360 | } |
361 | |
362 | static inline uint32_t |
363 | os_ref_retain_try_mask(os_ref_atomic_t *rc, uint32_t b, |
364 | uint32_t reject_mask, struct os_refgrp *grp) |
365 | { |
366 | return os_ref_retain_try_mask_internal(rc, n: 1u << b, reject_mask, grp); |
367 | } |
368 | |
369 | static inline bool |
370 | os_ref_retain_try_acquire_mask(os_ref_atomic_t *rc, uint32_t b, |
371 | uint32_t reject_mask, struct os_refgrp *grp) |
372 | { |
373 | return os_ref_retain_try_acquire_mask_internal(rc, n: 1u << b, reject_mask, grp); |
374 | } |
375 | |
376 | static inline uint32_t |
377 | os_ref_release_raw_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp) |
378 | { |
379 | return os_ref_release_barrier_mask_internal(rc, n: 1u << b, grp); |
380 | } |
381 | |
382 | static inline uint32_t |
383 | os_ref_release_raw_relaxed_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp) |
384 | { |
385 | return os_ref_release_relaxed_mask_internal(rc, n: 1u << b, grp); |
386 | } |
387 | |
388 | static inline os_ref_count_t |
389 | os_ref_release_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp) |
390 | { |
391 | return os_ref_release_barrier_mask_internal(rc, n: 1u << b, grp) >> b; |
392 | } |
393 | |
394 | static inline os_ref_count_t |
395 | os_ref_release_relaxed_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp) |
396 | { |
397 | return os_ref_release_relaxed_mask_internal(rc, n: 1u << b, grp) >> b; |
398 | } |
399 | |
400 | static inline uint32_t |
401 | os_ref_release_live_raw_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp) |
402 | { |
403 | uint32_t val = os_ref_release_barrier_mask_internal(rc, n: 1u << b, grp); |
404 | if (__improbable(val < 1u << b)) { |
405 | os_ref_panic_live(rc); |
406 | } |
407 | return val; |
408 | } |
409 | |
410 | static inline void |
411 | os_ref_release_live_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp) |
412 | { |
413 | os_ref_release_live_raw_mask(rc, b, grp); |
414 | } |
415 | |
416 | #if !OS_REFCNT_DEBUG |
417 | /* remove the group argument for non-debug */ |
418 | #define os_ref_init_count_mask(rc, b, grp, init_c, init_b) (os_ref_init_count_mask)(rc, b, NULL, init_c, init_b) |
419 | #define os_ref_retain_mask(rc, b, grp) (os_ref_retain_mask)((rc), (b), NULL) |
420 | #define os_ref_retain_acquire_mask(rc, b, grp) (os_ref_retain_acquire_mask)((rc), (b), NULL) |
421 | #define os_ref_retain_try_mask(rc, b, m, grp) (os_ref_retain_try_mask)((rc), (b), (m), NULL) |
422 | #define os_ref_retain_try_acquire_mask(rc, b, grp) (os_ref_retain_try_acquire_mask)((rc), (b), NULL) |
423 | #define os_ref_release_mask(rc, b, grp) (os_ref_release_mask)((rc), (b), NULL) |
424 | #define os_ref_release_relaxed_mask(rc, b, grp) (os_ref_release_relaxed_mask)((rc), (b), NULL) |
425 | #define os_ref_release_raw_mask(rc, b, grp) (os_ref_release_raw_mask)((rc), (b), NULL) |
426 | #define os_ref_release_relaxed_raw_mask(rc, b, grp) (os_ref_release_relaxed_raw_mask)((rc), (b), NULL) |
427 | #define os_ref_release_live_raw_mask(rc, b, grp) (os_ref_release_live_raw_mask)((rc), (b), NULL) |
428 | #define os_ref_release_live_mask(rc, b, grp) (os_ref_release_live_mask)((rc), (b), NULL) |
429 | #endif |
430 | |
431 | #pragma GCC visibility pop |
432 | #endif |
433 | |
434 | __END_DECLS |
435 | |
436 | #endif /* _OS_REFCNT_INTERNAL_H */ |
437 | |