This source file includes following definitions.
- xa_lock_type
- xas_lock_type
- xas_unlock_type
- xa_track_free
- xa_zero_busy
- xa_mark_set
- xa_mark_clear
- node_marks
- node_get_mark
- node_set_mark
- node_clear_mark
- node_any_mark
- node_mark_all
- xas_squash_marks
- get_offset
- xas_set_offset
- xas_move_index
- xas_advance
- set_bounds
- xas_start
- xas_descend
- xas_load
- xas_destroy
- xas_nomem
- __xas_nomem
- xas_update
- xas_alloc
- xas_size
- xas_max
- max_index
- xas_shrink
- xas_delete_node
- xas_free_nodes
- xas_expand
- xas_create
- xas_create_range
- update_node
- xas_store
- xas_get_mark
- xas_set_mark
- xas_clear_mark
- xas_init_marks
- xas_pause
- __xas_prev
- __xas_next
- xas_find
- xas_find_marked
- xas_find_conflict
- xa_load
- xas_result
- __xa_erase
- xa_erase
- __xa_store
- xa_store
- __xa_cmpxchg
- __xa_insert
- xas_set_range
- xa_store_range
- __xa_alloc
- __xa_alloc_cyclic
- __xa_set_mark
- __xa_clear_mark
- xa_get_mark
- xa_set_mark
- xa_clear_mark
- xa_find
- xas_sibling
- xa_find_after
- xas_extract_present
- xas_extract_marked
- xa_extract
- xa_destroy
- xa_dump_node
- xa_dump_index
- xa_dump_entry
- xa_dump
1
2
3
4
5
6
7
8
9 #include <linux/bitmap.h>
10 #include <linux/export.h>
11 #include <linux/list.h>
12 #include <linux/slab.h>
13 #include <linux/xarray.h>
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31 static inline unsigned int xa_lock_type(const struct xarray *xa)
32 {
33 return (__force unsigned int)xa->xa_flags & 3;
34 }
35
36 static inline void xas_lock_type(struct xa_state *xas, unsigned int lock_type)
37 {
38 if (lock_type == XA_LOCK_IRQ)
39 xas_lock_irq(xas);
40 else if (lock_type == XA_LOCK_BH)
41 xas_lock_bh(xas);
42 else
43 xas_lock(xas);
44 }
45
46 static inline void xas_unlock_type(struct xa_state *xas, unsigned int lock_type)
47 {
48 if (lock_type == XA_LOCK_IRQ)
49 xas_unlock_irq(xas);
50 else if (lock_type == XA_LOCK_BH)
51 xas_unlock_bh(xas);
52 else
53 xas_unlock(xas);
54 }
55
56 static inline bool xa_track_free(const struct xarray *xa)
57 {
58 return xa->xa_flags & XA_FLAGS_TRACK_FREE;
59 }
60
61 static inline bool xa_zero_busy(const struct xarray *xa)
62 {
63 return xa->xa_flags & XA_FLAGS_ZERO_BUSY;
64 }
65
66 static inline void xa_mark_set(struct xarray *xa, xa_mark_t mark)
67 {
68 if (!(xa->xa_flags & XA_FLAGS_MARK(mark)))
69 xa->xa_flags |= XA_FLAGS_MARK(mark);
70 }
71
72 static inline void xa_mark_clear(struct xarray *xa, xa_mark_t mark)
73 {
74 if (xa->xa_flags & XA_FLAGS_MARK(mark))
75 xa->xa_flags &= ~(XA_FLAGS_MARK(mark));
76 }
77
78 static inline unsigned long *node_marks(struct xa_node *node, xa_mark_t mark)
79 {
80 return node->marks[(__force unsigned)mark];
81 }
82
83 static inline bool node_get_mark(struct xa_node *node,
84 unsigned int offset, xa_mark_t mark)
85 {
86 return test_bit(offset, node_marks(node, mark));
87 }
88
89
90 static inline bool node_set_mark(struct xa_node *node, unsigned int offset,
91 xa_mark_t mark)
92 {
93 return __test_and_set_bit(offset, node_marks(node, mark));
94 }
95
96
97 static inline bool node_clear_mark(struct xa_node *node, unsigned int offset,
98 xa_mark_t mark)
99 {
100 return __test_and_clear_bit(offset, node_marks(node, mark));
101 }
102
103 static inline bool node_any_mark(struct xa_node *node, xa_mark_t mark)
104 {
105 return !bitmap_empty(node_marks(node, mark), XA_CHUNK_SIZE);
106 }
107
108 static inline void node_mark_all(struct xa_node *node, xa_mark_t mark)
109 {
110 bitmap_fill(node_marks(node, mark), XA_CHUNK_SIZE);
111 }
112
113 #define mark_inc(mark) do { \
114 mark = (__force xa_mark_t)((__force unsigned)(mark) + 1); \
115 } while (0)
116
117
118
119
120
121
122
123
124 static void xas_squash_marks(const struct xa_state *xas)
125 {
126 unsigned int mark = 0;
127 unsigned int limit = xas->xa_offset + xas->xa_sibs + 1;
128
129 if (!xas->xa_sibs)
130 return;
131
132 do {
133 unsigned long *marks = xas->xa_node->marks[mark];
134 if (find_next_bit(marks, limit, xas->xa_offset + 1) == limit)
135 continue;
136 __set_bit(xas->xa_offset, marks);
137 bitmap_clear(marks, xas->xa_offset + 1, xas->xa_sibs);
138 } while (mark++ != (__force unsigned)XA_MARK_MAX);
139 }
140
141
142 static unsigned int get_offset(unsigned long index, struct xa_node *node)
143 {
144 return (index >> node->shift) & XA_CHUNK_MASK;
145 }
146
147 static void xas_set_offset(struct xa_state *xas)
148 {
149 xas->xa_offset = get_offset(xas->xa_index, xas->xa_node);
150 }
151
152
153 static void xas_move_index(struct xa_state *xas, unsigned long offset)
154 {
155 unsigned int shift = xas->xa_node->shift;
156 xas->xa_index &= ~XA_CHUNK_MASK << shift;
157 xas->xa_index += offset << shift;
158 }
159
160 static void xas_advance(struct xa_state *xas)
161 {
162 xas->xa_offset++;
163 xas_move_index(xas, xas->xa_offset);
164 }
165
166 static void *set_bounds(struct xa_state *xas)
167 {
168 xas->xa_node = XAS_BOUNDS;
169 return NULL;
170 }
171
172
173
174
175
176
177
178
179 static void *xas_start(struct xa_state *xas)
180 {
181 void *entry;
182
183 if (xas_valid(xas))
184 return xas_reload(xas);
185 if (xas_error(xas))
186 return NULL;
187
188 entry = xa_head(xas->xa);
189 if (!xa_is_node(entry)) {
190 if (xas->xa_index)
191 return set_bounds(xas);
192 } else {
193 if ((xas->xa_index >> xa_to_node(entry)->shift) > XA_CHUNK_MASK)
194 return set_bounds(xas);
195 }
196
197 xas->xa_node = NULL;
198 return entry;
199 }
200
201 static void *xas_descend(struct xa_state *xas, struct xa_node *node)
202 {
203 unsigned int offset = get_offset(xas->xa_index, node);
204 void *entry = xa_entry(xas->xa, node, offset);
205
206 xas->xa_node = node;
207 if (xa_is_sibling(entry)) {
208 offset = xa_to_sibling(entry);
209 entry = xa_entry(xas->xa, node, offset);
210 }
211
212 xas->xa_offset = offset;
213 return entry;
214 }
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231 void *xas_load(struct xa_state *xas)
232 {
233 void *entry = xas_start(xas);
234
235 while (xa_is_node(entry)) {
236 struct xa_node *node = xa_to_node(entry);
237
238 if (xas->xa_shift > node->shift)
239 break;
240 entry = xas_descend(xas, node);
241 if (node->shift == 0)
242 break;
243 }
244 return entry;
245 }
246 EXPORT_SYMBOL_GPL(xas_load);
247
248
249 extern struct kmem_cache *radix_tree_node_cachep;
250 extern void radix_tree_node_rcu_free(struct rcu_head *head);
251
252 #define XA_RCU_FREE ((struct xarray *)1)
253
254 static void xa_node_free(struct xa_node *node)
255 {
256 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
257 node->array = XA_RCU_FREE;
258 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
259 }
260
261
262
263
264
265
266
267 static void xas_destroy(struct xa_state *xas)
268 {
269 struct xa_node *node = xas->xa_alloc;
270
271 if (!node)
272 return;
273 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
274 kmem_cache_free(radix_tree_node_cachep, node);
275 xas->xa_alloc = NULL;
276 }
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296 bool xas_nomem(struct xa_state *xas, gfp_t gfp)
297 {
298 if (xas->xa_node != XA_ERROR(-ENOMEM)) {
299 xas_destroy(xas);
300 return false;
301 }
302 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
303 gfp |= __GFP_ACCOUNT;
304 xas->xa_alloc = kmem_cache_alloc(radix_tree_node_cachep, gfp);
305 if (!xas->xa_alloc)
306 return false;
307 XA_NODE_BUG_ON(xas->xa_alloc, !list_empty(&xas->xa_alloc->private_list));
308 xas->xa_node = XAS_RESTART;
309 return true;
310 }
311 EXPORT_SYMBOL_GPL(xas_nomem);
312
313
314
315
316
317
318
319
320
321
322 static bool __xas_nomem(struct xa_state *xas, gfp_t gfp)
323 __must_hold(xas->xa->xa_lock)
324 {
325 unsigned int lock_type = xa_lock_type(xas->xa);
326
327 if (xas->xa_node != XA_ERROR(-ENOMEM)) {
328 xas_destroy(xas);
329 return false;
330 }
331 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
332 gfp |= __GFP_ACCOUNT;
333 if (gfpflags_allow_blocking(gfp)) {
334 xas_unlock_type(xas, lock_type);
335 xas->xa_alloc = kmem_cache_alloc(radix_tree_node_cachep, gfp);
336 xas_lock_type(xas, lock_type);
337 } else {
338 xas->xa_alloc = kmem_cache_alloc(radix_tree_node_cachep, gfp);
339 }
340 if (!xas->xa_alloc)
341 return false;
342 XA_NODE_BUG_ON(xas->xa_alloc, !list_empty(&xas->xa_alloc->private_list));
343 xas->xa_node = XAS_RESTART;
344 return true;
345 }
346
347 static void xas_update(struct xa_state *xas, struct xa_node *node)
348 {
349 if (xas->xa_update)
350 xas->xa_update(node);
351 else
352 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
353 }
354
355 static void *xas_alloc(struct xa_state *xas, unsigned int shift)
356 {
357 struct xa_node *parent = xas->xa_node;
358 struct xa_node *node = xas->xa_alloc;
359
360 if (xas_invalid(xas))
361 return NULL;
362
363 if (node) {
364 xas->xa_alloc = NULL;
365 } else {
366 gfp_t gfp = GFP_NOWAIT | __GFP_NOWARN;
367
368 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
369 gfp |= __GFP_ACCOUNT;
370
371 node = kmem_cache_alloc(radix_tree_node_cachep, gfp);
372 if (!node) {
373 xas_set_err(xas, -ENOMEM);
374 return NULL;
375 }
376 }
377
378 if (parent) {
379 node->offset = xas->xa_offset;
380 parent->count++;
381 XA_NODE_BUG_ON(node, parent->count > XA_CHUNK_SIZE);
382 xas_update(xas, parent);
383 }
384 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG);
385 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
386 node->shift = shift;
387 node->count = 0;
388 node->nr_values = 0;
389 RCU_INIT_POINTER(node->parent, xas->xa_node);
390 node->array = xas->xa;
391
392 return node;
393 }
394
395 #ifdef CONFIG_XARRAY_MULTI
396
397 static unsigned long xas_size(const struct xa_state *xas)
398 {
399 return (xas->xa_sibs + 1UL) << xas->xa_shift;
400 }
401 #endif
402
403
404
405
406
407
408
409 static unsigned long xas_max(struct xa_state *xas)
410 {
411 unsigned long max = xas->xa_index;
412
413 #ifdef CONFIG_XARRAY_MULTI
414 if (xas->xa_shift || xas->xa_sibs) {
415 unsigned long mask = xas_size(xas) - 1;
416 max |= mask;
417 if (mask == max)
418 max++;
419 }
420 #endif
421
422 return max;
423 }
424
425
426 static unsigned long max_index(void *entry)
427 {
428 if (!xa_is_node(entry))
429 return 0;
430 return (XA_CHUNK_SIZE << xa_to_node(entry)->shift) - 1;
431 }
432
433 static void xas_shrink(struct xa_state *xas)
434 {
435 struct xarray *xa = xas->xa;
436 struct xa_node *node = xas->xa_node;
437
438 for (;;) {
439 void *entry;
440
441 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE);
442 if (node->count != 1)
443 break;
444 entry = xa_entry_locked(xa, node, 0);
445 if (!entry)
446 break;
447 if (!xa_is_node(entry) && node->shift)
448 break;
449 if (xa_is_zero(entry) && xa_zero_busy(xa))
450 entry = NULL;
451 xas->xa_node = XAS_BOUNDS;
452
453 RCU_INIT_POINTER(xa->xa_head, entry);
454 if (xa_track_free(xa) && !node_get_mark(node, 0, XA_FREE_MARK))
455 xa_mark_clear(xa, XA_FREE_MARK);
456
457 node->count = 0;
458 node->nr_values = 0;
459 if (!xa_is_node(entry))
460 RCU_INIT_POINTER(node->slots[0], XA_RETRY_ENTRY);
461 xas_update(xas, node);
462 xa_node_free(node);
463 if (!xa_is_node(entry))
464 break;
465 node = xa_to_node(entry);
466 node->parent = NULL;
467 }
468 }
469
470
471
472
473
474
475
476
477 static void xas_delete_node(struct xa_state *xas)
478 {
479 struct xa_node *node = xas->xa_node;
480
481 for (;;) {
482 struct xa_node *parent;
483
484 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE);
485 if (node->count)
486 break;
487
488 parent = xa_parent_locked(xas->xa, node);
489 xas->xa_node = parent;
490 xas->xa_offset = node->offset;
491 xa_node_free(node);
492
493 if (!parent) {
494 xas->xa->xa_head = NULL;
495 xas->xa_node = XAS_BOUNDS;
496 return;
497 }
498
499 parent->slots[xas->xa_offset] = NULL;
500 parent->count--;
501 XA_NODE_BUG_ON(parent, parent->count > XA_CHUNK_SIZE);
502 node = parent;
503 xas_update(xas, node);
504 }
505
506 if (!node->parent)
507 xas_shrink(xas);
508 }
509
510
511
512
513
514
515
516
517
518
519 static void xas_free_nodes(struct xa_state *xas, struct xa_node *top)
520 {
521 unsigned int offset = 0;
522 struct xa_node *node = top;
523
524 for (;;) {
525 void *entry = xa_entry_locked(xas->xa, node, offset);
526
527 if (node->shift && xa_is_node(entry)) {
528 node = xa_to_node(entry);
529 offset = 0;
530 continue;
531 }
532 if (entry)
533 RCU_INIT_POINTER(node->slots[offset], XA_RETRY_ENTRY);
534 offset++;
535 while (offset == XA_CHUNK_SIZE) {
536 struct xa_node *parent;
537
538 parent = xa_parent_locked(xas->xa, node);
539 offset = node->offset + 1;
540 node->count = 0;
541 node->nr_values = 0;
542 xas_update(xas, node);
543 xa_node_free(node);
544 if (node == top)
545 return;
546 node = parent;
547 }
548 }
549 }
550
551
552
553
554
555 static int xas_expand(struct xa_state *xas, void *head)
556 {
557 struct xarray *xa = xas->xa;
558 struct xa_node *node = NULL;
559 unsigned int shift = 0;
560 unsigned long max = xas_max(xas);
561
562 if (!head) {
563 if (max == 0)
564 return 0;
565 while ((max >> shift) >= XA_CHUNK_SIZE)
566 shift += XA_CHUNK_SHIFT;
567 return shift + XA_CHUNK_SHIFT;
568 } else if (xa_is_node(head)) {
569 node = xa_to_node(head);
570 shift = node->shift + XA_CHUNK_SHIFT;
571 }
572 xas->xa_node = NULL;
573
574 while (max > max_index(head)) {
575 xa_mark_t mark = 0;
576
577 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG);
578 node = xas_alloc(xas, shift);
579 if (!node)
580 return -ENOMEM;
581
582 node->count = 1;
583 if (xa_is_value(head))
584 node->nr_values = 1;
585 RCU_INIT_POINTER(node->slots[0], head);
586
587
588 for (;;) {
589 if (xa_track_free(xa) && mark == XA_FREE_MARK) {
590 node_mark_all(node, XA_FREE_MARK);
591 if (!xa_marked(xa, XA_FREE_MARK)) {
592 node_clear_mark(node, 0, XA_FREE_MARK);
593 xa_mark_set(xa, XA_FREE_MARK);
594 }
595 } else if (xa_marked(xa, mark)) {
596 node_set_mark(node, 0, mark);
597 }
598 if (mark == XA_MARK_MAX)
599 break;
600 mark_inc(mark);
601 }
602
603
604
605
606
607 if (xa_is_node(head)) {
608 xa_to_node(head)->offset = 0;
609 rcu_assign_pointer(xa_to_node(head)->parent, node);
610 }
611 head = xa_mk_node(node);
612 rcu_assign_pointer(xa->xa_head, head);
613 xas_update(xas, node);
614
615 shift += XA_CHUNK_SHIFT;
616 }
617
618 xas->xa_node = node;
619 return shift;
620 }
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635 static void *xas_create(struct xa_state *xas, bool allow_root)
636 {
637 struct xarray *xa = xas->xa;
638 void *entry;
639 void __rcu **slot;
640 struct xa_node *node = xas->xa_node;
641 int shift;
642 unsigned int order = xas->xa_shift;
643
644 if (xas_top(node)) {
645 entry = xa_head_locked(xa);
646 xas->xa_node = NULL;
647 if (!entry && xa_zero_busy(xa))
648 entry = XA_ZERO_ENTRY;
649 shift = xas_expand(xas, entry);
650 if (shift < 0)
651 return NULL;
652 if (!shift && !allow_root)
653 shift = XA_CHUNK_SHIFT;
654 entry = xa_head_locked(xa);
655 slot = &xa->xa_head;
656 } else if (xas_error(xas)) {
657 return NULL;
658 } else if (node) {
659 unsigned int offset = xas->xa_offset;
660
661 shift = node->shift;
662 entry = xa_entry_locked(xa, node, offset);
663 slot = &node->slots[offset];
664 } else {
665 shift = 0;
666 entry = xa_head_locked(xa);
667 slot = &xa->xa_head;
668 }
669
670 while (shift > order) {
671 shift -= XA_CHUNK_SHIFT;
672 if (!entry) {
673 node = xas_alloc(xas, shift);
674 if (!node)
675 break;
676 if (xa_track_free(xa))
677 node_mark_all(node, XA_FREE_MARK);
678 rcu_assign_pointer(*slot, xa_mk_node(node));
679 } else if (xa_is_node(entry)) {
680 node = xa_to_node(entry);
681 } else {
682 break;
683 }
684 entry = xas_descend(xas, node);
685 slot = &node->slots[xas->xa_offset];
686 }
687
688 return entry;
689 }
690
691
692
693
694
695
696
697
698
699
700 void xas_create_range(struct xa_state *xas)
701 {
702 unsigned long index = xas->xa_index;
703 unsigned char shift = xas->xa_shift;
704 unsigned char sibs = xas->xa_sibs;
705
706 xas->xa_index |= ((sibs + 1) << shift) - 1;
707 if (xas_is_node(xas) && xas->xa_node->shift == xas->xa_shift)
708 xas->xa_offset |= sibs;
709 xas->xa_shift = 0;
710 xas->xa_sibs = 0;
711
712 for (;;) {
713 xas_create(xas, true);
714 if (xas_error(xas))
715 goto restore;
716 if (xas->xa_index <= (index | XA_CHUNK_MASK))
717 goto success;
718 xas->xa_index -= XA_CHUNK_SIZE;
719
720 for (;;) {
721 struct xa_node *node = xas->xa_node;
722 xas->xa_node = xa_parent_locked(xas->xa, node);
723 xas->xa_offset = node->offset - 1;
724 if (node->offset != 0)
725 break;
726 }
727 }
728
729 restore:
730 xas->xa_shift = shift;
731 xas->xa_sibs = sibs;
732 xas->xa_index = index;
733 return;
734 success:
735 xas->xa_index = index;
736 if (xas->xa_node)
737 xas_set_offset(xas);
738 }
739 EXPORT_SYMBOL_GPL(xas_create_range);
740
741 static void update_node(struct xa_state *xas, struct xa_node *node,
742 int count, int values)
743 {
744 if (!node || (!count && !values))
745 return;
746
747 node->count += count;
748 node->nr_values += values;
749 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE);
750 XA_NODE_BUG_ON(node, node->nr_values > XA_CHUNK_SIZE);
751 xas_update(xas, node);
752 if (count < 0)
753 xas_delete_node(xas);
754 }
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769 void *xas_store(struct xa_state *xas, void *entry)
770 {
771 struct xa_node *node;
772 void __rcu **slot = &xas->xa->xa_head;
773 unsigned int offset, max;
774 int count = 0;
775 int values = 0;
776 void *first, *next;
777 bool value = xa_is_value(entry);
778
779 if (entry) {
780 bool allow_root = !xa_is_node(entry) && !xa_is_zero(entry);
781 first = xas_create(xas, allow_root);
782 } else {
783 first = xas_load(xas);
784 }
785
786 if (xas_invalid(xas))
787 return first;
788 node = xas->xa_node;
789 if (node && (xas->xa_shift < node->shift))
790 xas->xa_sibs = 0;
791 if ((first == entry) && !xas->xa_sibs)
792 return first;
793
794 next = first;
795 offset = xas->xa_offset;
796 max = xas->xa_offset + xas->xa_sibs;
797 if (node) {
798 slot = &node->slots[offset];
799 if (xas->xa_sibs)
800 xas_squash_marks(xas);
801 }
802 if (!entry)
803 xas_init_marks(xas);
804
805 for (;;) {
806
807
808
809
810
811
812
813 rcu_assign_pointer(*slot, entry);
814 if (xa_is_node(next) && (!node || node->shift))
815 xas_free_nodes(xas, xa_to_node(next));
816 if (!node)
817 break;
818 count += !next - !entry;
819 values += !xa_is_value(first) - !value;
820 if (entry) {
821 if (offset == max)
822 break;
823 if (!xa_is_sibling(entry))
824 entry = xa_mk_sibling(xas->xa_offset);
825 } else {
826 if (offset == XA_CHUNK_MASK)
827 break;
828 }
829 next = xa_entry_locked(xas->xa, node, ++offset);
830 if (!xa_is_sibling(next)) {
831 if (!entry && (offset > max))
832 break;
833 first = next;
834 }
835 slot++;
836 }
837
838 update_node(xas, node, count, values);
839 return first;
840 }
841 EXPORT_SYMBOL_GPL(xas_store);
842
843
844
845
846
847
848
849
850
851 bool xas_get_mark(const struct xa_state *xas, xa_mark_t mark)
852 {
853 if (xas_invalid(xas))
854 return false;
855 if (!xas->xa_node)
856 return xa_marked(xas->xa, mark);
857 return node_get_mark(xas->xa_node, xas->xa_offset, mark);
858 }
859 EXPORT_SYMBOL_GPL(xas_get_mark);
860
861
862
863
864
865
866
867
868
869
870 void xas_set_mark(const struct xa_state *xas, xa_mark_t mark)
871 {
872 struct xa_node *node = xas->xa_node;
873 unsigned int offset = xas->xa_offset;
874
875 if (xas_invalid(xas))
876 return;
877
878 while (node) {
879 if (node_set_mark(node, offset, mark))
880 return;
881 offset = node->offset;
882 node = xa_parent_locked(xas->xa, node);
883 }
884
885 if (!xa_marked(xas->xa, mark))
886 xa_mark_set(xas->xa, mark);
887 }
888 EXPORT_SYMBOL_GPL(xas_set_mark);
889
890
891
892
893
894
895
896
897
898
899 void xas_clear_mark(const struct xa_state *xas, xa_mark_t mark)
900 {
901 struct xa_node *node = xas->xa_node;
902 unsigned int offset = xas->xa_offset;
903
904 if (xas_invalid(xas))
905 return;
906
907 while (node) {
908 if (!node_clear_mark(node, offset, mark))
909 return;
910 if (node_any_mark(node, mark))
911 return;
912
913 offset = node->offset;
914 node = xa_parent_locked(xas->xa, node);
915 }
916
917 if (xa_marked(xas->xa, mark))
918 xa_mark_clear(xas->xa, mark);
919 }
920 EXPORT_SYMBOL_GPL(xas_clear_mark);
921
922
923
924
925
926
927
928
929
930
931
932
933 void xas_init_marks(const struct xa_state *xas)
934 {
935 xa_mark_t mark = 0;
936
937 for (;;) {
938 if (xa_track_free(xas->xa) && mark == XA_FREE_MARK)
939 xas_set_mark(xas, mark);
940 else
941 xas_clear_mark(xas, mark);
942 if (mark == XA_MARK_MAX)
943 break;
944 mark_inc(mark);
945 }
946 }
947 EXPORT_SYMBOL_GPL(xas_init_marks);
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964 void xas_pause(struct xa_state *xas)
965 {
966 struct xa_node *node = xas->xa_node;
967
968 if (xas_invalid(xas))
969 return;
970
971 xas->xa_node = XAS_RESTART;
972 if (node) {
973 unsigned long offset = xas->xa_offset;
974 while (++offset < XA_CHUNK_SIZE) {
975 if (!xa_is_sibling(xa_entry(xas->xa, node, offset)))
976 break;
977 }
978 xas->xa_index += (offset - xas->xa_offset) << node->shift;
979 if (xas->xa_index == 0)
980 xas->xa_node = XAS_BOUNDS;
981 } else {
982 xas->xa_index++;
983 }
984 }
985 EXPORT_SYMBOL_GPL(xas_pause);
986
987
988
989
990
991
992
993
994 void *__xas_prev(struct xa_state *xas)
995 {
996 void *entry;
997
998 if (!xas_frozen(xas->xa_node))
999 xas->xa_index--;
1000 if (!xas->xa_node)
1001 return set_bounds(xas);
1002 if (xas_not_node(xas->xa_node))
1003 return xas_load(xas);
1004
1005 if (xas->xa_offset != get_offset(xas->xa_index, xas->xa_node))
1006 xas->xa_offset--;
1007
1008 while (xas->xa_offset == 255) {
1009 xas->xa_offset = xas->xa_node->offset - 1;
1010 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1011 if (!xas->xa_node)
1012 return set_bounds(xas);
1013 }
1014
1015 for (;;) {
1016 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1017 if (!xa_is_node(entry))
1018 return entry;
1019
1020 xas->xa_node = xa_to_node(entry);
1021 xas_set_offset(xas);
1022 }
1023 }
1024 EXPORT_SYMBOL_GPL(__xas_prev);
1025
1026
1027
1028
1029
1030
1031
1032
1033 void *__xas_next(struct xa_state *xas)
1034 {
1035 void *entry;
1036
1037 if (!xas_frozen(xas->xa_node))
1038 xas->xa_index++;
1039 if (!xas->xa_node)
1040 return set_bounds(xas);
1041 if (xas_not_node(xas->xa_node))
1042 return xas_load(xas);
1043
1044 if (xas->xa_offset != get_offset(xas->xa_index, xas->xa_node))
1045 xas->xa_offset++;
1046
1047 while (xas->xa_offset == XA_CHUNK_SIZE) {
1048 xas->xa_offset = xas->xa_node->offset + 1;
1049 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1050 if (!xas->xa_node)
1051 return set_bounds(xas);
1052 }
1053
1054 for (;;) {
1055 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1056 if (!xa_is_node(entry))
1057 return entry;
1058
1059 xas->xa_node = xa_to_node(entry);
1060 xas_set_offset(xas);
1061 }
1062 }
1063 EXPORT_SYMBOL_GPL(__xas_next);
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081 void *xas_find(struct xa_state *xas, unsigned long max)
1082 {
1083 void *entry;
1084
1085 if (xas_error(xas) || xas->xa_node == XAS_BOUNDS)
1086 return NULL;
1087 if (xas->xa_index > max)
1088 return set_bounds(xas);
1089
1090 if (!xas->xa_node) {
1091 xas->xa_index = 1;
1092 return set_bounds(xas);
1093 } else if (xas->xa_node == XAS_RESTART) {
1094 entry = xas_load(xas);
1095 if (entry || xas_not_node(xas->xa_node))
1096 return entry;
1097 } else if (!xas->xa_node->shift &&
1098 xas->xa_offset != (xas->xa_index & XA_CHUNK_MASK)) {
1099 xas->xa_offset = ((xas->xa_index - 1) & XA_CHUNK_MASK) + 1;
1100 }
1101
1102 xas_advance(xas);
1103
1104 while (xas->xa_node && (xas->xa_index <= max)) {
1105 if (unlikely(xas->xa_offset == XA_CHUNK_SIZE)) {
1106 xas->xa_offset = xas->xa_node->offset + 1;
1107 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1108 continue;
1109 }
1110
1111 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1112 if (xa_is_node(entry)) {
1113 xas->xa_node = xa_to_node(entry);
1114 xas->xa_offset = 0;
1115 continue;
1116 }
1117 if (entry && !xa_is_sibling(entry))
1118 return entry;
1119
1120 xas_advance(xas);
1121 }
1122
1123 if (!xas->xa_node)
1124 xas->xa_node = XAS_BOUNDS;
1125 return NULL;
1126 }
1127 EXPORT_SYMBOL_GPL(xas_find);
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150 void *xas_find_marked(struct xa_state *xas, unsigned long max, xa_mark_t mark)
1151 {
1152 bool advance = true;
1153 unsigned int offset;
1154 void *entry;
1155
1156 if (xas_error(xas))
1157 return NULL;
1158 if (xas->xa_index > max)
1159 goto max;
1160
1161 if (!xas->xa_node) {
1162 xas->xa_index = 1;
1163 goto out;
1164 } else if (xas_top(xas->xa_node)) {
1165 advance = false;
1166 entry = xa_head(xas->xa);
1167 xas->xa_node = NULL;
1168 if (xas->xa_index > max_index(entry))
1169 goto out;
1170 if (!xa_is_node(entry)) {
1171 if (xa_marked(xas->xa, mark))
1172 return entry;
1173 xas->xa_index = 1;
1174 goto out;
1175 }
1176 xas->xa_node = xa_to_node(entry);
1177 xas->xa_offset = xas->xa_index >> xas->xa_node->shift;
1178 }
1179
1180 while (xas->xa_index <= max) {
1181 if (unlikely(xas->xa_offset == XA_CHUNK_SIZE)) {
1182 xas->xa_offset = xas->xa_node->offset + 1;
1183 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1184 if (!xas->xa_node)
1185 break;
1186 advance = false;
1187 continue;
1188 }
1189
1190 if (!advance) {
1191 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1192 if (xa_is_sibling(entry)) {
1193 xas->xa_offset = xa_to_sibling(entry);
1194 xas_move_index(xas, xas->xa_offset);
1195 }
1196 }
1197
1198 offset = xas_find_chunk(xas, advance, mark);
1199 if (offset > xas->xa_offset) {
1200 advance = false;
1201 xas_move_index(xas, offset);
1202
1203 if ((xas->xa_index - 1) >= max)
1204 goto max;
1205 xas->xa_offset = offset;
1206 if (offset == XA_CHUNK_SIZE)
1207 continue;
1208 }
1209
1210 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1211 if (!entry && !(xa_track_free(xas->xa) && mark == XA_FREE_MARK))
1212 continue;
1213 if (!xa_is_node(entry))
1214 return entry;
1215 xas->xa_node = xa_to_node(entry);
1216 xas_set_offset(xas);
1217 }
1218
1219 out:
1220 if (xas->xa_index > max)
1221 goto max;
1222 return set_bounds(xas);
1223 max:
1224 xas->xa_node = XAS_RESTART;
1225 return NULL;
1226 }
1227 EXPORT_SYMBOL_GPL(xas_find_marked);
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238 void *xas_find_conflict(struct xa_state *xas)
1239 {
1240 void *curr;
1241
1242 if (xas_error(xas))
1243 return NULL;
1244
1245 if (!xas->xa_node)
1246 return NULL;
1247
1248 if (xas_top(xas->xa_node)) {
1249 curr = xas_start(xas);
1250 if (!curr)
1251 return NULL;
1252 while (xa_is_node(curr)) {
1253 struct xa_node *node = xa_to_node(curr);
1254 curr = xas_descend(xas, node);
1255 }
1256 if (curr)
1257 return curr;
1258 }
1259
1260 if (xas->xa_node->shift > xas->xa_shift)
1261 return NULL;
1262
1263 for (;;) {
1264 if (xas->xa_node->shift == xas->xa_shift) {
1265 if ((xas->xa_offset & xas->xa_sibs) == xas->xa_sibs)
1266 break;
1267 } else if (xas->xa_offset == XA_CHUNK_MASK) {
1268 xas->xa_offset = xas->xa_node->offset;
1269 xas->xa_node = xa_parent_locked(xas->xa, xas->xa_node);
1270 if (!xas->xa_node)
1271 break;
1272 continue;
1273 }
1274 curr = xa_entry_locked(xas->xa, xas->xa_node, ++xas->xa_offset);
1275 if (xa_is_sibling(curr))
1276 continue;
1277 while (xa_is_node(curr)) {
1278 xas->xa_node = xa_to_node(curr);
1279 xas->xa_offset = 0;
1280 curr = xa_entry_locked(xas->xa, xas->xa_node, 0);
1281 }
1282 if (curr)
1283 return curr;
1284 }
1285 xas->xa_offset -= xas->xa_sibs;
1286 return NULL;
1287 }
1288 EXPORT_SYMBOL_GPL(xas_find_conflict);
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298 void *xa_load(struct xarray *xa, unsigned long index)
1299 {
1300 XA_STATE(xas, xa, index);
1301 void *entry;
1302
1303 rcu_read_lock();
1304 do {
1305 entry = xas_load(&xas);
1306 if (xa_is_zero(entry))
1307 entry = NULL;
1308 } while (xas_retry(&xas, entry));
1309 rcu_read_unlock();
1310
1311 return entry;
1312 }
1313 EXPORT_SYMBOL(xa_load);
1314
1315 static void *xas_result(struct xa_state *xas, void *curr)
1316 {
1317 if (xa_is_zero(curr))
1318 return NULL;
1319 if (xas_error(xas))
1320 curr = xas->xa_node;
1321 return curr;
1322 }
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336 void *__xa_erase(struct xarray *xa, unsigned long index)
1337 {
1338 XA_STATE(xas, xa, index);
1339 return xas_result(&xas, xas_store(&xas, NULL));
1340 }
1341 EXPORT_SYMBOL(__xa_erase);
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355 void *xa_erase(struct xarray *xa, unsigned long index)
1356 {
1357 void *entry;
1358
1359 xa_lock(xa);
1360 entry = __xa_erase(xa, index);
1361 xa_unlock(xa);
1362
1363 return entry;
1364 }
1365 EXPORT_SYMBOL(xa_erase);
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382 void *__xa_store(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp)
1383 {
1384 XA_STATE(xas, xa, index);
1385 void *curr;
1386
1387 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1388 return XA_ERROR(-EINVAL);
1389 if (xa_track_free(xa) && !entry)
1390 entry = XA_ZERO_ENTRY;
1391
1392 do {
1393 curr = xas_store(&xas, entry);
1394 if (xa_track_free(xa))
1395 xas_clear_mark(&xas, XA_FREE_MARK);
1396 } while (__xas_nomem(&xas, gfp));
1397
1398 return xas_result(&xas, curr);
1399 }
1400 EXPORT_SYMBOL(__xa_store);
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419 void *xa_store(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp)
1420 {
1421 void *curr;
1422
1423 xa_lock(xa);
1424 curr = __xa_store(xa, index, entry, gfp);
1425 xa_unlock(xa);
1426
1427 return curr;
1428 }
1429 EXPORT_SYMBOL(xa_store);
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447 void *__xa_cmpxchg(struct xarray *xa, unsigned long index,
1448 void *old, void *entry, gfp_t gfp)
1449 {
1450 XA_STATE(xas, xa, index);
1451 void *curr;
1452
1453 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1454 return XA_ERROR(-EINVAL);
1455
1456 do {
1457 curr = xas_load(&xas);
1458 if (curr == old) {
1459 xas_store(&xas, entry);
1460 if (xa_track_free(xa) && entry && !curr)
1461 xas_clear_mark(&xas, XA_FREE_MARK);
1462 }
1463 } while (__xas_nomem(&xas, gfp));
1464
1465 return xas_result(&xas, curr);
1466 }
1467 EXPORT_SYMBOL(__xa_cmpxchg);
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485 int __xa_insert(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp)
1486 {
1487 XA_STATE(xas, xa, index);
1488 void *curr;
1489
1490 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1491 return -EINVAL;
1492 if (!entry)
1493 entry = XA_ZERO_ENTRY;
1494
1495 do {
1496 curr = xas_load(&xas);
1497 if (!curr) {
1498 xas_store(&xas, entry);
1499 if (xa_track_free(xa))
1500 xas_clear_mark(&xas, XA_FREE_MARK);
1501 } else {
1502 xas_set_err(&xas, -EBUSY);
1503 }
1504 } while (__xas_nomem(&xas, gfp));
1505
1506 return xas_error(&xas);
1507 }
1508 EXPORT_SYMBOL(__xa_insert);
1509
1510 #ifdef CONFIG_XARRAY_MULTI
1511 static void xas_set_range(struct xa_state *xas, unsigned long first,
1512 unsigned long last)
1513 {
1514 unsigned int shift = 0;
1515 unsigned long sibs = last - first;
1516 unsigned int offset = XA_CHUNK_MASK;
1517
1518 xas_set(xas, first);
1519
1520 while ((first & XA_CHUNK_MASK) == 0) {
1521 if (sibs < XA_CHUNK_MASK)
1522 break;
1523 if ((sibs == XA_CHUNK_MASK) && (offset < XA_CHUNK_MASK))
1524 break;
1525 shift += XA_CHUNK_SHIFT;
1526 if (offset == XA_CHUNK_MASK)
1527 offset = sibs & XA_CHUNK_MASK;
1528 sibs >>= XA_CHUNK_SHIFT;
1529 first >>= XA_CHUNK_SHIFT;
1530 }
1531
1532 offset = first & XA_CHUNK_MASK;
1533 if (offset + sibs > XA_CHUNK_MASK)
1534 sibs = XA_CHUNK_MASK - offset;
1535 if ((((first + sibs + 1) << shift) - 1) > last)
1536 sibs -= 1;
1537
1538 xas->xa_shift = shift;
1539 xas->xa_sibs = sibs;
1540 }
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560 void *xa_store_range(struct xarray *xa, unsigned long first,
1561 unsigned long last, void *entry, gfp_t gfp)
1562 {
1563 XA_STATE(xas, xa, 0);
1564
1565 if (WARN_ON_ONCE(xa_is_internal(entry)))
1566 return XA_ERROR(-EINVAL);
1567 if (last < first)
1568 return XA_ERROR(-EINVAL);
1569
1570 do {
1571 xas_lock(&xas);
1572 if (entry) {
1573 unsigned int order = BITS_PER_LONG;
1574 if (last + 1)
1575 order = __ffs(last + 1);
1576 xas_set_order(&xas, last, order);
1577 xas_create(&xas, true);
1578 if (xas_error(&xas))
1579 goto unlock;
1580 }
1581 do {
1582 xas_set_range(&xas, first, last);
1583 xas_store(&xas, entry);
1584 if (xas_error(&xas))
1585 goto unlock;
1586 first += xas_size(&xas);
1587 } while (first <= last);
1588 unlock:
1589 xas_unlock(&xas);
1590 } while (xas_nomem(&xas, gfp));
1591
1592 return xas_result(&xas, NULL);
1593 }
1594 EXPORT_SYMBOL(xa_store_range);
1595 #endif
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614 int __xa_alloc(struct xarray *xa, u32 *id, void *entry,
1615 struct xa_limit limit, gfp_t gfp)
1616 {
1617 XA_STATE(xas, xa, 0);
1618
1619 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1620 return -EINVAL;
1621 if (WARN_ON_ONCE(!xa_track_free(xa)))
1622 return -EINVAL;
1623
1624 if (!entry)
1625 entry = XA_ZERO_ENTRY;
1626
1627 do {
1628 xas.xa_index = limit.min;
1629 xas_find_marked(&xas, limit.max, XA_FREE_MARK);
1630 if (xas.xa_node == XAS_RESTART)
1631 xas_set_err(&xas, -EBUSY);
1632 else
1633 *id = xas.xa_index;
1634 xas_store(&xas, entry);
1635 xas_clear_mark(&xas, XA_FREE_MARK);
1636 } while (__xas_nomem(&xas, gfp));
1637
1638 return xas_error(&xas);
1639 }
1640 EXPORT_SYMBOL(__xa_alloc);
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663 int __xa_alloc_cyclic(struct xarray *xa, u32 *id, void *entry,
1664 struct xa_limit limit, u32 *next, gfp_t gfp)
1665 {
1666 u32 min = limit.min;
1667 int ret;
1668
1669 limit.min = max(min, *next);
1670 ret = __xa_alloc(xa, id, entry, limit, gfp);
1671 if ((xa->xa_flags & XA_FLAGS_ALLOC_WRAPPED) && ret == 0) {
1672 xa->xa_flags &= ~XA_FLAGS_ALLOC_WRAPPED;
1673 ret = 1;
1674 }
1675
1676 if (ret < 0 && limit.min > min) {
1677 limit.min = min;
1678 ret = __xa_alloc(xa, id, entry, limit, gfp);
1679 if (ret == 0)
1680 ret = 1;
1681 }
1682
1683 if (ret >= 0) {
1684 *next = *id + 1;
1685 if (*next == 0)
1686 xa->xa_flags |= XA_FLAGS_ALLOC_WRAPPED;
1687 }
1688 return ret;
1689 }
1690 EXPORT_SYMBOL(__xa_alloc_cyclic);
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702 void __xa_set_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1703 {
1704 XA_STATE(xas, xa, index);
1705 void *entry = xas_load(&xas);
1706
1707 if (entry)
1708 xas_set_mark(&xas, mark);
1709 }
1710 EXPORT_SYMBOL(__xa_set_mark);
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720 void __xa_clear_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1721 {
1722 XA_STATE(xas, xa, index);
1723 void *entry = xas_load(&xas);
1724
1725 if (entry)
1726 xas_clear_mark(&xas, mark);
1727 }
1728 EXPORT_SYMBOL(__xa_clear_mark);
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742 bool xa_get_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1743 {
1744 XA_STATE(xas, xa, index);
1745 void *entry;
1746
1747 rcu_read_lock();
1748 entry = xas_start(&xas);
1749 while (xas_get_mark(&xas, mark)) {
1750 if (!xa_is_node(entry))
1751 goto found;
1752 entry = xas_descend(&xas, xa_to_node(entry));
1753 }
1754 rcu_read_unlock();
1755 return false;
1756 found:
1757 rcu_read_unlock();
1758 return true;
1759 }
1760 EXPORT_SYMBOL(xa_get_mark);
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772 void xa_set_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1773 {
1774 xa_lock(xa);
1775 __xa_set_mark(xa, index, mark);
1776 xa_unlock(xa);
1777 }
1778 EXPORT_SYMBOL(xa_set_mark);
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790 void xa_clear_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1791 {
1792 xa_lock(xa);
1793 __xa_clear_mark(xa, index, mark);
1794 xa_unlock(xa);
1795 }
1796 EXPORT_SYMBOL(xa_clear_mark);
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815 void *xa_find(struct xarray *xa, unsigned long *indexp,
1816 unsigned long max, xa_mark_t filter)
1817 {
1818 XA_STATE(xas, xa, *indexp);
1819 void *entry;
1820
1821 rcu_read_lock();
1822 do {
1823 if ((__force unsigned int)filter < XA_MAX_MARKS)
1824 entry = xas_find_marked(&xas, max, filter);
1825 else
1826 entry = xas_find(&xas, max);
1827 } while (xas_retry(&xas, entry));
1828 rcu_read_unlock();
1829
1830 if (entry)
1831 *indexp = xas.xa_index;
1832 return entry;
1833 }
1834 EXPORT_SYMBOL(xa_find);
1835
1836 static bool xas_sibling(struct xa_state *xas)
1837 {
1838 struct xa_node *node = xas->xa_node;
1839 unsigned long mask;
1840
1841 if (!node)
1842 return false;
1843 mask = (XA_CHUNK_SIZE << node->shift) - 1;
1844 return (xas->xa_index & mask) >
1845 ((unsigned long)xas->xa_offset << node->shift);
1846 }
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865 void *xa_find_after(struct xarray *xa, unsigned long *indexp,
1866 unsigned long max, xa_mark_t filter)
1867 {
1868 XA_STATE(xas, xa, *indexp + 1);
1869 void *entry;
1870
1871 if (xas.xa_index == 0)
1872 return NULL;
1873
1874 rcu_read_lock();
1875 for (;;) {
1876 if ((__force unsigned int)filter < XA_MAX_MARKS)
1877 entry = xas_find_marked(&xas, max, filter);
1878 else
1879 entry = xas_find(&xas, max);
1880
1881 if (xas_invalid(&xas))
1882 break;
1883 if (xas_sibling(&xas))
1884 continue;
1885 if (!xas_retry(&xas, entry))
1886 break;
1887 }
1888 rcu_read_unlock();
1889
1890 if (entry)
1891 *indexp = xas.xa_index;
1892 return entry;
1893 }
1894 EXPORT_SYMBOL(xa_find_after);
1895
1896 static unsigned int xas_extract_present(struct xa_state *xas, void **dst,
1897 unsigned long max, unsigned int n)
1898 {
1899 void *entry;
1900 unsigned int i = 0;
1901
1902 rcu_read_lock();
1903 xas_for_each(xas, entry, max) {
1904 if (xas_retry(xas, entry))
1905 continue;
1906 dst[i++] = entry;
1907 if (i == n)
1908 break;
1909 }
1910 rcu_read_unlock();
1911
1912 return i;
1913 }
1914
1915 static unsigned int xas_extract_marked(struct xa_state *xas, void **dst,
1916 unsigned long max, unsigned int n, xa_mark_t mark)
1917 {
1918 void *entry;
1919 unsigned int i = 0;
1920
1921 rcu_read_lock();
1922 xas_for_each_marked(xas, entry, max, mark) {
1923 if (xas_retry(xas, entry))
1924 continue;
1925 dst[i++] = entry;
1926 if (i == n)
1927 break;
1928 }
1929 rcu_read_unlock();
1930
1931 return i;
1932 }
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962 unsigned int xa_extract(struct xarray *xa, void **dst, unsigned long start,
1963 unsigned long max, unsigned int n, xa_mark_t filter)
1964 {
1965 XA_STATE(xas, xa, start);
1966
1967 if (!n)
1968 return 0;
1969
1970 if ((__force unsigned int)filter < XA_MAX_MARKS)
1971 return xas_extract_marked(&xas, dst, max, n, filter);
1972 return xas_extract_present(&xas, dst, max, n);
1973 }
1974 EXPORT_SYMBOL(xa_extract);
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986 void xa_destroy(struct xarray *xa)
1987 {
1988 XA_STATE(xas, xa, 0);
1989 unsigned long flags;
1990 void *entry;
1991
1992 xas.xa_node = NULL;
1993 xas_lock_irqsave(&xas, flags);
1994 entry = xa_head_locked(xa);
1995 RCU_INIT_POINTER(xa->xa_head, NULL);
1996 xas_init_marks(&xas);
1997 if (xa_zero_busy(xa))
1998 xa_mark_clear(xa, XA_FREE_MARK);
1999
2000 if (xa_is_node(entry))
2001 xas_free_nodes(&xas, xa_to_node(entry));
2002 xas_unlock_irqrestore(&xas, flags);
2003 }
2004 EXPORT_SYMBOL(xa_destroy);
2005
2006 #ifdef XA_DEBUG
2007 void xa_dump_node(const struct xa_node *node)
2008 {
2009 unsigned i, j;
2010
2011 if (!node)
2012 return;
2013 if ((unsigned long)node & 3) {
2014 pr_cont("node %px\n", node);
2015 return;
2016 }
2017
2018 pr_cont("node %px %s %d parent %px shift %d count %d values %d "
2019 "array %px list %px %px marks",
2020 node, node->parent ? "offset" : "max", node->offset,
2021 node->parent, node->shift, node->count, node->nr_values,
2022 node->array, node->private_list.prev, node->private_list.next);
2023 for (i = 0; i < XA_MAX_MARKS; i++)
2024 for (j = 0; j < XA_MARK_LONGS; j++)
2025 pr_cont(" %lx", node->marks[i][j]);
2026 pr_cont("\n");
2027 }
2028
2029 void xa_dump_index(unsigned long index, unsigned int shift)
2030 {
2031 if (!shift)
2032 pr_info("%lu: ", index);
2033 else if (shift >= BITS_PER_LONG)
2034 pr_info("0-%lu: ", ~0UL);
2035 else
2036 pr_info("%lu-%lu: ", index, index | ((1UL << shift) - 1));
2037 }
2038
2039 void xa_dump_entry(const void *entry, unsigned long index, unsigned long shift)
2040 {
2041 if (!entry)
2042 return;
2043
2044 xa_dump_index(index, shift);
2045
2046 if (xa_is_node(entry)) {
2047 if (shift == 0) {
2048 pr_cont("%px\n", entry);
2049 } else {
2050 unsigned long i;
2051 struct xa_node *node = xa_to_node(entry);
2052 xa_dump_node(node);
2053 for (i = 0; i < XA_CHUNK_SIZE; i++)
2054 xa_dump_entry(node->slots[i],
2055 index + (i << node->shift), node->shift);
2056 }
2057 } else if (xa_is_value(entry))
2058 pr_cont("value %ld (0x%lx) [%px]\n", xa_to_value(entry),
2059 xa_to_value(entry), entry);
2060 else if (!xa_is_internal(entry))
2061 pr_cont("%px\n", entry);
2062 else if (xa_is_retry(entry))
2063 pr_cont("retry (%ld)\n", xa_to_internal(entry));
2064 else if (xa_is_sibling(entry))
2065 pr_cont("sibling (slot %ld)\n", xa_to_sibling(entry));
2066 else if (xa_is_zero(entry))
2067 pr_cont("zero (%ld)\n", xa_to_internal(entry));
2068 else
2069 pr_cont("UNKNOWN ENTRY (%px)\n", entry);
2070 }
2071
2072 void xa_dump(const struct xarray *xa)
2073 {
2074 void *entry = xa->xa_head;
2075 unsigned int shift = 0;
2076
2077 pr_info("xarray: %px head %px flags %x marks %d %d %d\n", xa, entry,
2078 xa->xa_flags, xa_marked(xa, XA_MARK_0),
2079 xa_marked(xa, XA_MARK_1), xa_marked(xa, XA_MARK_2));
2080 if (xa_is_node(entry))
2081 shift = xa_to_node(entry)->shift + XA_CHUNK_SHIFT;
2082 xa_dump_entry(entry, 0, shift);
2083 }
2084 #endif