1/*
2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
7 *
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2, or (at
11 * your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16 * General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21 */
22
23#include <linux/errno.h>
24#include <linux/init.h>
25#include <linux/kernel.h>
26#include <linux/export.h>
27#include <linux/radix-tree.h>
28#include <linux/percpu.h>
29#include <linux/slab.h>
30#include <linux/kmemleak.h>
31#include <linux/notifier.h>
32#include <linux/cpu.h>
33#include <linux/string.h>
34#include <linux/bitops.h>
35#include <linux/rcupdate.h>
36#include <linux/preempt.h>		/* in_interrupt() */
37
38
39/*
40 * The height_to_maxindex array needs to be one deeper than the maximum
41 * path as height 0 holds only 1 entry.
42 */
43static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly;
44
45/*
46 * Radix tree node cache.
47 */
48static struct kmem_cache *radix_tree_node_cachep;
49
50/*
51 * The radix tree is variable-height, so an insert operation not only has
52 * to build the branch to its corresponding item, it also has to build the
53 * branch to existing items if the size has to be increased (by
54 * radix_tree_extend).
55 *
56 * The worst case is a zero height tree with just a single item at index 0,
57 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
58 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
59 * Hence:
60 */
61#define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
62
63/*
64 * Per-cpu pool of preloaded nodes
65 */
66struct radix_tree_preload {
67	int nr;
68	/* nodes->private_data points to next preallocated node */
69	struct radix_tree_node *nodes;
70};
71static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
72
73static inline void *ptr_to_indirect(void *ptr)
74{
75	return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
76}
77
78static inline void *indirect_to_ptr(void *ptr)
79{
80	return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
81}
82
83static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
84{
85	return root->gfp_mask & __GFP_BITS_MASK;
86}
87
88static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
89		int offset)
90{
91	__set_bit(offset, node->tags[tag]);
92}
93
94static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
95		int offset)
96{
97	__clear_bit(offset, node->tags[tag]);
98}
99
100static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
101		int offset)
102{
103	return test_bit(offset, node->tags[tag]);
104}
105
106static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
107{
108	root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
109}
110
111static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
112{
113	root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
114}
115
116static inline void root_tag_clear_all(struct radix_tree_root *root)
117{
118	root->gfp_mask &= __GFP_BITS_MASK;
119}
120
121static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
122{
123	return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
124}
125
126/*
127 * Returns 1 if any slot in the node has this tag set.
128 * Otherwise returns 0.
129 */
130static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
131{
132	int idx;
133	for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
134		if (node->tags[tag][idx])
135			return 1;
136	}
137	return 0;
138}
139
140/**
141 * radix_tree_find_next_bit - find the next set bit in a memory region
142 *
143 * @addr: The address to base the search on
144 * @size: The bitmap size in bits
145 * @offset: The bitnumber to start searching at
146 *
147 * Unrollable variant of find_next_bit() for constant size arrays.
148 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
149 * Returns next bit offset, or size if nothing found.
150 */
151static __always_inline unsigned long
152radix_tree_find_next_bit(const unsigned long *addr,
153			 unsigned long size, unsigned long offset)
154{
155	if (!__builtin_constant_p(size))
156		return find_next_bit(addr, size, offset);
157
158	if (offset < size) {
159		unsigned long tmp;
160
161		addr += offset / BITS_PER_LONG;
162		tmp = *addr >> (offset % BITS_PER_LONG);
163		if (tmp)
164			return __ffs(tmp) + offset;
165		offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
166		while (offset < size) {
167			tmp = *++addr;
168			if (tmp)
169				return __ffs(tmp) + offset;
170			offset += BITS_PER_LONG;
171		}
172	}
173	return size;
174}
175
176/*
177 * This assumes that the caller has performed appropriate preallocation, and
178 * that the caller has pinned this thread of control to the current CPU.
179 */
180static struct radix_tree_node *
181radix_tree_node_alloc(struct radix_tree_root *root)
182{
183	struct radix_tree_node *ret = NULL;
184	gfp_t gfp_mask = root_gfp_mask(root);
185
186	/*
187	 * Preload code isn't irq safe and it doesn't make sence to use
188	 * preloading in the interrupt anyway as all the allocations have to
189	 * be atomic. So just do normal allocation when in interrupt.
190	 */
191	if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) {
192		struct radix_tree_preload *rtp;
193
194		/*
195		 * Provided the caller has preloaded here, we will always
196		 * succeed in getting a node here (and never reach
197		 * kmem_cache_alloc)
198		 */
199		rtp = this_cpu_ptr(&radix_tree_preloads);
200		if (rtp->nr) {
201			ret = rtp->nodes;
202			rtp->nodes = ret->private_data;
203			ret->private_data = NULL;
204			rtp->nr--;
205		}
206		/*
207		 * Update the allocation stack trace as this is more useful
208		 * for debugging.
209		 */
210		kmemleak_update_trace(ret);
211	}
212	if (ret == NULL)
213		ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
214
215	BUG_ON(radix_tree_is_indirect_ptr(ret));
216	return ret;
217}
218
219static void radix_tree_node_rcu_free(struct rcu_head *head)
220{
221	struct radix_tree_node *node =
222			container_of(head, struct radix_tree_node, rcu_head);
223	int i;
224
225	/*
226	 * must only free zeroed nodes into the slab. radix_tree_shrink
227	 * can leave us with a non-NULL entry in the first slot, so clear
228	 * that here to make sure.
229	 */
230	for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
231		tag_clear(node, i, 0);
232
233	node->slots[0] = NULL;
234	node->count = 0;
235
236	kmem_cache_free(radix_tree_node_cachep, node);
237}
238
239static inline void
240radix_tree_node_free(struct radix_tree_node *node)
241{
242	call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
243}
244
245/*
246 * Load up this CPU's radix_tree_node buffer with sufficient objects to
247 * ensure that the addition of a single element in the tree cannot fail.  On
248 * success, return zero, with preemption disabled.  On error, return -ENOMEM
249 * with preemption not disabled.
250 *
251 * To make use of this facility, the radix tree must be initialised without
252 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
253 */
254static int __radix_tree_preload(gfp_t gfp_mask)
255{
256	struct radix_tree_preload *rtp;
257	struct radix_tree_node *node;
258	int ret = -ENOMEM;
259
260	preempt_disable();
261	rtp = this_cpu_ptr(&radix_tree_preloads);
262	while (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
263		preempt_enable();
264		node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
265		if (node == NULL)
266			goto out;
267		preempt_disable();
268		rtp = this_cpu_ptr(&radix_tree_preloads);
269		if (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
270			node->private_data = rtp->nodes;
271			rtp->nodes = node;
272			rtp->nr++;
273		} else {
274			kmem_cache_free(radix_tree_node_cachep, node);
275		}
276	}
277	ret = 0;
278out:
279	return ret;
280}
281
282/*
283 * Load up this CPU's radix_tree_node buffer with sufficient objects to
284 * ensure that the addition of a single element in the tree cannot fail.  On
285 * success, return zero, with preemption disabled.  On error, return -ENOMEM
286 * with preemption not disabled.
287 *
288 * To make use of this facility, the radix tree must be initialised without
289 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
290 */
291int radix_tree_preload(gfp_t gfp_mask)
292{
293	/* Warn on non-sensical use... */
294	WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
295	return __radix_tree_preload(gfp_mask);
296}
297EXPORT_SYMBOL(radix_tree_preload);
298
299/*
300 * The same as above function, except we don't guarantee preloading happens.
301 * We do it, if we decide it helps. On success, return zero with preemption
302 * disabled. On error, return -ENOMEM with preemption not disabled.
303 */
304int radix_tree_maybe_preload(gfp_t gfp_mask)
305{
306	if (gfpflags_allow_blocking(gfp_mask))
307		return __radix_tree_preload(gfp_mask);
308	/* Preloading doesn't help anything with this gfp mask, skip it */
309	preempt_disable();
310	return 0;
311}
312EXPORT_SYMBOL(radix_tree_maybe_preload);
313
314/*
315 *	Return the maximum key which can be store into a
316 *	radix tree with height HEIGHT.
317 */
318static inline unsigned long radix_tree_maxindex(unsigned int height)
319{
320	return height_to_maxindex[height];
321}
322
323/*
324 *	Extend a radix tree so it can store key @index.
325 */
326static int radix_tree_extend(struct radix_tree_root *root, unsigned long index)
327{
328	struct radix_tree_node *node;
329	struct radix_tree_node *slot;
330	unsigned int height;
331	int tag;
332
333	/* Figure out what the height should be.  */
334	height = root->height + 1;
335	while (index > radix_tree_maxindex(height))
336		height++;
337
338	if (root->rnode == NULL) {
339		root->height = height;
340		goto out;
341	}
342
343	do {
344		unsigned int newheight;
345		if (!(node = radix_tree_node_alloc(root)))
346			return -ENOMEM;
347
348		/* Propagate the aggregated tag info into the new root */
349		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
350			if (root_tag_get(root, tag))
351				tag_set(node, tag, 0);
352		}
353
354		/* Increase the height.  */
355		newheight = root->height+1;
356		BUG_ON(newheight & ~RADIX_TREE_HEIGHT_MASK);
357		node->path = newheight;
358		node->count = 1;
359		node->parent = NULL;
360		slot = root->rnode;
361		if (newheight > 1) {
362			slot = indirect_to_ptr(slot);
363			slot->parent = node;
364		}
365		node->slots[0] = slot;
366		node = ptr_to_indirect(node);
367		rcu_assign_pointer(root->rnode, node);
368		root->height = newheight;
369	} while (height > root->height);
370out:
371	return 0;
372}
373
374/**
375 *	__radix_tree_create	-	create a slot in a radix tree
376 *	@root:		radix tree root
377 *	@index:		index key
378 *	@nodep:		returns node
379 *	@slotp:		returns slot
380 *
381 *	Create, if necessary, and return the node and slot for an item
382 *	at position @index in the radix tree @root.
383 *
384 *	Until there is more than one item in the tree, no nodes are
385 *	allocated and @root->rnode is used as a direct slot instead of
386 *	pointing to a node, in which case *@nodep will be NULL.
387 *
388 *	Returns -ENOMEM, or 0 for success.
389 */
390int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
391			struct radix_tree_node **nodep, void ***slotp)
392{
393	struct radix_tree_node *node = NULL, *slot;
394	unsigned int height, shift, offset;
395	int error;
396
397	/* Make sure the tree is high enough.  */
398	if (index > radix_tree_maxindex(root->height)) {
399		error = radix_tree_extend(root, index);
400		if (error)
401			return error;
402	}
403
404	slot = indirect_to_ptr(root->rnode);
405
406	height = root->height;
407	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
408
409	offset = 0;			/* uninitialised var warning */
410	while (height > 0) {
411		if (slot == NULL) {
412			/* Have to add a child node.  */
413			if (!(slot = radix_tree_node_alloc(root)))
414				return -ENOMEM;
415			slot->path = height;
416			slot->parent = node;
417			if (node) {
418				rcu_assign_pointer(node->slots[offset], slot);
419				node->count++;
420				slot->path |= offset << RADIX_TREE_HEIGHT_SHIFT;
421			} else
422				rcu_assign_pointer(root->rnode, ptr_to_indirect(slot));
423		}
424
425		/* Go a level down */
426		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
427		node = slot;
428		slot = node->slots[offset];
429		shift -= RADIX_TREE_MAP_SHIFT;
430		height--;
431	}
432
433	if (nodep)
434		*nodep = node;
435	if (slotp)
436		*slotp = node ? node->slots + offset : (void **)&root->rnode;
437	return 0;
438}
439
440/**
441 *	radix_tree_insert    -    insert into a radix tree
442 *	@root:		radix tree root
443 *	@index:		index key
444 *	@item:		item to insert
445 *
446 *	Insert an item into the radix tree at position @index.
447 */
448int radix_tree_insert(struct radix_tree_root *root,
449			unsigned long index, void *item)
450{
451	struct radix_tree_node *node;
452	void **slot;
453	int error;
454
455	BUG_ON(radix_tree_is_indirect_ptr(item));
456
457	error = __radix_tree_create(root, index, &node, &slot);
458	if (error)
459		return error;
460	if (*slot != NULL)
461		return -EEXIST;
462	rcu_assign_pointer(*slot, item);
463
464	if (node) {
465		node->count++;
466		BUG_ON(tag_get(node, 0, index & RADIX_TREE_MAP_MASK));
467		BUG_ON(tag_get(node, 1, index & RADIX_TREE_MAP_MASK));
468	} else {
469		BUG_ON(root_tag_get(root, 0));
470		BUG_ON(root_tag_get(root, 1));
471	}
472
473	return 0;
474}
475EXPORT_SYMBOL(radix_tree_insert);
476
477/**
478 *	__radix_tree_lookup	-	lookup an item in a radix tree
479 *	@root:		radix tree root
480 *	@index:		index key
481 *	@nodep:		returns node
482 *	@slotp:		returns slot
483 *
484 *	Lookup and return the item at position @index in the radix
485 *	tree @root.
486 *
487 *	Until there is more than one item in the tree, no nodes are
488 *	allocated and @root->rnode is used as a direct slot instead of
489 *	pointing to a node, in which case *@nodep will be NULL.
490 */
491void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
492			  struct radix_tree_node **nodep, void ***slotp)
493{
494	struct radix_tree_node *node, *parent;
495	unsigned int height, shift;
496	void **slot;
497
498	node = rcu_dereference_raw(root->rnode);
499	if (node == NULL)
500		return NULL;
501
502	if (!radix_tree_is_indirect_ptr(node)) {
503		if (index > 0)
504			return NULL;
505
506		if (nodep)
507			*nodep = NULL;
508		if (slotp)
509			*slotp = (void **)&root->rnode;
510		return node;
511	}
512	node = indirect_to_ptr(node);
513
514	height = node->path & RADIX_TREE_HEIGHT_MASK;
515	if (index > radix_tree_maxindex(height))
516		return NULL;
517
518	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
519
520	do {
521		parent = node;
522		slot = node->slots + ((index >> shift) & RADIX_TREE_MAP_MASK);
523		node = rcu_dereference_raw(*slot);
524		if (node == NULL)
525			return NULL;
526
527		shift -= RADIX_TREE_MAP_SHIFT;
528		height--;
529	} while (height > 0);
530
531	if (nodep)
532		*nodep = parent;
533	if (slotp)
534		*slotp = slot;
535	return node;
536}
537
538/**
539 *	radix_tree_lookup_slot    -    lookup a slot in a radix tree
540 *	@root:		radix tree root
541 *	@index:		index key
542 *
543 *	Returns:  the slot corresponding to the position @index in the
544 *	radix tree @root. This is useful for update-if-exists operations.
545 *
546 *	This function can be called under rcu_read_lock iff the slot is not
547 *	modified by radix_tree_replace_slot, otherwise it must be called
548 *	exclusive from other writers. Any dereference of the slot must be done
549 *	using radix_tree_deref_slot.
550 */
551void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
552{
553	void **slot;
554
555	if (!__radix_tree_lookup(root, index, NULL, &slot))
556		return NULL;
557	return slot;
558}
559EXPORT_SYMBOL(radix_tree_lookup_slot);
560
561/**
562 *	radix_tree_lookup    -    perform lookup operation on a radix tree
563 *	@root:		radix tree root
564 *	@index:		index key
565 *
566 *	Lookup the item at the position @index in the radix tree @root.
567 *
568 *	This function can be called under rcu_read_lock, however the caller
569 *	must manage lifetimes of leaf nodes (eg. RCU may also be used to free
570 *	them safely). No RCU barriers are required to access or modify the
571 *	returned item, however.
572 */
573void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
574{
575	return __radix_tree_lookup(root, index, NULL, NULL);
576}
577EXPORT_SYMBOL(radix_tree_lookup);
578
579/**
580 *	radix_tree_tag_set - set a tag on a radix tree node
581 *	@root:		radix tree root
582 *	@index:		index key
583 *	@tag: 		tag index
584 *
585 *	Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
586 *	corresponding to @index in the radix tree.  From
587 *	the root all the way down to the leaf node.
588 *
589 *	Returns the address of the tagged item.   Setting a tag on a not-present
590 *	item is a bug.
591 */
592void *radix_tree_tag_set(struct radix_tree_root *root,
593			unsigned long index, unsigned int tag)
594{
595	unsigned int height, shift;
596	struct radix_tree_node *slot;
597
598	height = root->height;
599	BUG_ON(index > radix_tree_maxindex(height));
600
601	slot = indirect_to_ptr(root->rnode);
602	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
603
604	while (height > 0) {
605		int offset;
606
607		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
608		if (!tag_get(slot, tag, offset))
609			tag_set(slot, tag, offset);
610		slot = slot->slots[offset];
611		BUG_ON(slot == NULL);
612		shift -= RADIX_TREE_MAP_SHIFT;
613		height--;
614	}
615
616	/* set the root's tag bit */
617	if (slot && !root_tag_get(root, tag))
618		root_tag_set(root, tag);
619
620	return slot;
621}
622EXPORT_SYMBOL(radix_tree_tag_set);
623
624/**
625 *	radix_tree_tag_clear - clear a tag on a radix tree node
626 *	@root:		radix tree root
627 *	@index:		index key
628 *	@tag: 		tag index
629 *
630 *	Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
631 *	corresponding to @index in the radix tree.  If
632 *	this causes the leaf node to have no tags set then clear the tag in the
633 *	next-to-leaf node, etc.
634 *
635 *	Returns the address of the tagged item on success, else NULL.  ie:
636 *	has the same return value and semantics as radix_tree_lookup().
637 */
638void *radix_tree_tag_clear(struct radix_tree_root *root,
639			unsigned long index, unsigned int tag)
640{
641	struct radix_tree_node *node = NULL;
642	struct radix_tree_node *slot = NULL;
643	unsigned int height, shift;
644	int uninitialized_var(offset);
645
646	height = root->height;
647	if (index > radix_tree_maxindex(height))
648		goto out;
649
650	shift = height * RADIX_TREE_MAP_SHIFT;
651	slot = indirect_to_ptr(root->rnode);
652
653	while (shift) {
654		if (slot == NULL)
655			goto out;
656
657		shift -= RADIX_TREE_MAP_SHIFT;
658		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
659		node = slot;
660		slot = slot->slots[offset];
661	}
662
663	if (slot == NULL)
664		goto out;
665
666	while (node) {
667		if (!tag_get(node, tag, offset))
668			goto out;
669		tag_clear(node, tag, offset);
670		if (any_tag_set(node, tag))
671			goto out;
672
673		index >>= RADIX_TREE_MAP_SHIFT;
674		offset = index & RADIX_TREE_MAP_MASK;
675		node = node->parent;
676	}
677
678	/* clear the root's tag bit */
679	if (root_tag_get(root, tag))
680		root_tag_clear(root, tag);
681
682out:
683	return slot;
684}
685EXPORT_SYMBOL(radix_tree_tag_clear);
686
687/**
688 * radix_tree_tag_get - get a tag on a radix tree node
689 * @root:		radix tree root
690 * @index:		index key
691 * @tag: 		tag index (< RADIX_TREE_MAX_TAGS)
692 *
693 * Return values:
694 *
695 *  0: tag not present or not set
696 *  1: tag set
697 *
698 * Note that the return value of this function may not be relied on, even if
699 * the RCU lock is held, unless tag modification and node deletion are excluded
700 * from concurrency.
701 */
702int radix_tree_tag_get(struct radix_tree_root *root,
703			unsigned long index, unsigned int tag)
704{
705	unsigned int height, shift;
706	struct radix_tree_node *node;
707
708	/* check the root's tag bit */
709	if (!root_tag_get(root, tag))
710		return 0;
711
712	node = rcu_dereference_raw(root->rnode);
713	if (node == NULL)
714		return 0;
715
716	if (!radix_tree_is_indirect_ptr(node))
717		return (index == 0);
718	node = indirect_to_ptr(node);
719
720	height = node->path & RADIX_TREE_HEIGHT_MASK;
721	if (index > radix_tree_maxindex(height))
722		return 0;
723
724	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
725
726	for ( ; ; ) {
727		int offset;
728
729		if (node == NULL)
730			return 0;
731
732		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
733		if (!tag_get(node, tag, offset))
734			return 0;
735		if (height == 1)
736			return 1;
737		node = rcu_dereference_raw(node->slots[offset]);
738		shift -= RADIX_TREE_MAP_SHIFT;
739		height--;
740	}
741}
742EXPORT_SYMBOL(radix_tree_tag_get);
743
744/**
745 * radix_tree_next_chunk - find next chunk of slots for iteration
746 *
747 * @root:	radix tree root
748 * @iter:	iterator state
749 * @flags:	RADIX_TREE_ITER_* flags and tag index
750 * Returns:	pointer to chunk first slot, or NULL if iteration is over
751 */
752void **radix_tree_next_chunk(struct radix_tree_root *root,
753			     struct radix_tree_iter *iter, unsigned flags)
754{
755	unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
756	struct radix_tree_node *rnode, *node;
757	unsigned long index, offset, height;
758
759	if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
760		return NULL;
761
762	/*
763	 * Catch next_index overflow after ~0UL. iter->index never overflows
764	 * during iterating; it can be zero only at the beginning.
765	 * And we cannot overflow iter->next_index in a single step,
766	 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
767	 *
768	 * This condition also used by radix_tree_next_slot() to stop
769	 * contiguous iterating, and forbid swithing to the next chunk.
770	 */
771	index = iter->next_index;
772	if (!index && iter->index)
773		return NULL;
774
775	rnode = rcu_dereference_raw(root->rnode);
776	if (radix_tree_is_indirect_ptr(rnode)) {
777		rnode = indirect_to_ptr(rnode);
778	} else if (rnode && !index) {
779		/* Single-slot tree */
780		iter->index = 0;
781		iter->next_index = 1;
782		iter->tags = 1;
783		return (void **)&root->rnode;
784	} else
785		return NULL;
786
787restart:
788	height = rnode->path & RADIX_TREE_HEIGHT_MASK;
789	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
790	offset = index >> shift;
791
792	/* Index outside of the tree */
793	if (offset >= RADIX_TREE_MAP_SIZE)
794		return NULL;
795
796	node = rnode;
797	while (1) {
798		if ((flags & RADIX_TREE_ITER_TAGGED) ?
799				!test_bit(offset, node->tags[tag]) :
800				!node->slots[offset]) {
801			/* Hole detected */
802			if (flags & RADIX_TREE_ITER_CONTIG)
803				return NULL;
804
805			if (flags & RADIX_TREE_ITER_TAGGED)
806				offset = radix_tree_find_next_bit(
807						node->tags[tag],
808						RADIX_TREE_MAP_SIZE,
809						offset + 1);
810			else
811				while (++offset	< RADIX_TREE_MAP_SIZE) {
812					if (node->slots[offset])
813						break;
814				}
815			index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
816			index += offset << shift;
817			/* Overflow after ~0UL */
818			if (!index)
819				return NULL;
820			if (offset == RADIX_TREE_MAP_SIZE)
821				goto restart;
822		}
823
824		/* This is leaf-node */
825		if (!shift)
826			break;
827
828		node = rcu_dereference_raw(node->slots[offset]);
829		if (node == NULL)
830			goto restart;
831		shift -= RADIX_TREE_MAP_SHIFT;
832		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
833	}
834
835	/* Update the iterator state */
836	iter->index = index;
837	iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
838
839	/* Construct iter->tags bit-mask from node->tags[tag] array */
840	if (flags & RADIX_TREE_ITER_TAGGED) {
841		unsigned tag_long, tag_bit;
842
843		tag_long = offset / BITS_PER_LONG;
844		tag_bit  = offset % BITS_PER_LONG;
845		iter->tags = node->tags[tag][tag_long] >> tag_bit;
846		/* This never happens if RADIX_TREE_TAG_LONGS == 1 */
847		if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
848			/* Pick tags from next element */
849			if (tag_bit)
850				iter->tags |= node->tags[tag][tag_long + 1] <<
851						(BITS_PER_LONG - tag_bit);
852			/* Clip chunk size, here only BITS_PER_LONG tags */
853			iter->next_index = index + BITS_PER_LONG;
854		}
855	}
856
857	return node->slots + offset;
858}
859EXPORT_SYMBOL(radix_tree_next_chunk);
860
861/**
862 * radix_tree_range_tag_if_tagged - for each item in given range set given
863 *				   tag if item has another tag set
864 * @root:		radix tree root
865 * @first_indexp:	pointer to a starting index of a range to scan
866 * @last_index:		last index of a range to scan
867 * @nr_to_tag:		maximum number items to tag
868 * @iftag:		tag index to test
869 * @settag:		tag index to set if tested tag is set
870 *
871 * This function scans range of radix tree from first_index to last_index
872 * (inclusive).  For each item in the range if iftag is set, the function sets
873 * also settag. The function stops either after tagging nr_to_tag items or
874 * after reaching last_index.
875 *
876 * The tags must be set from the leaf level only and propagated back up the
877 * path to the root. We must do this so that we resolve the full path before
878 * setting any tags on intermediate nodes. If we set tags as we descend, then
879 * we can get to the leaf node and find that the index that has the iftag
880 * set is outside the range we are scanning. This reults in dangling tags and
881 * can lead to problems with later tag operations (e.g. livelocks on lookups).
882 *
883 * The function returns number of leaves where the tag was set and sets
884 * *first_indexp to the first unscanned index.
885 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
886 * be prepared to handle that.
887 */
888unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
889		unsigned long *first_indexp, unsigned long last_index,
890		unsigned long nr_to_tag,
891		unsigned int iftag, unsigned int settag)
892{
893	unsigned int height = root->height;
894	struct radix_tree_node *node = NULL;
895	struct radix_tree_node *slot;
896	unsigned int shift;
897	unsigned long tagged = 0;
898	unsigned long index = *first_indexp;
899
900	last_index = min(last_index, radix_tree_maxindex(height));
901	if (index > last_index)
902		return 0;
903	if (!nr_to_tag)
904		return 0;
905	if (!root_tag_get(root, iftag)) {
906		*first_indexp = last_index + 1;
907		return 0;
908	}
909	if (height == 0) {
910		*first_indexp = last_index + 1;
911		root_tag_set(root, settag);
912		return 1;
913	}
914
915	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
916	slot = indirect_to_ptr(root->rnode);
917
918	for (;;) {
919		unsigned long upindex;
920		int offset;
921
922		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
923		if (!slot->slots[offset])
924			goto next;
925		if (!tag_get(slot, iftag, offset))
926			goto next;
927		if (shift) {
928			/* Go down one level */
929			shift -= RADIX_TREE_MAP_SHIFT;
930			node = slot;
931			slot = slot->slots[offset];
932			continue;
933		}
934
935		/* tag the leaf */
936		tagged++;
937		tag_set(slot, settag, offset);
938
939		/* walk back up the path tagging interior nodes */
940		upindex = index;
941		while (node) {
942			upindex >>= RADIX_TREE_MAP_SHIFT;
943			offset = upindex & RADIX_TREE_MAP_MASK;
944
945			/* stop if we find a node with the tag already set */
946			if (tag_get(node, settag, offset))
947				break;
948			tag_set(node, settag, offset);
949			node = node->parent;
950		}
951
952		/*
953		 * Small optimization: now clear that node pointer.
954		 * Since all of this slot's ancestors now have the tag set
955		 * from setting it above, we have no further need to walk
956		 * back up the tree setting tags, until we update slot to
957		 * point to another radix_tree_node.
958		 */
959		node = NULL;
960
961next:
962		/* Go to next item at level determined by 'shift' */
963		index = ((index >> shift) + 1) << shift;
964		/* Overflow can happen when last_index is ~0UL... */
965		if (index > last_index || !index)
966			break;
967		if (tagged >= nr_to_tag)
968			break;
969		while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) {
970			/*
971			 * We've fully scanned this node. Go up. Because
972			 * last_index is guaranteed to be in the tree, what
973			 * we do below cannot wander astray.
974			 */
975			slot = slot->parent;
976			shift += RADIX_TREE_MAP_SHIFT;
977		}
978	}
979	/*
980	 * We need not to tag the root tag if there is no tag which is set with
981	 * settag within the range from *first_indexp to last_index.
982	 */
983	if (tagged > 0)
984		root_tag_set(root, settag);
985	*first_indexp = index;
986
987	return tagged;
988}
989EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
990
991/**
992 *	radix_tree_gang_lookup - perform multiple lookup on a radix tree
993 *	@root:		radix tree root
994 *	@results:	where the results of the lookup are placed
995 *	@first_index:	start the lookup from this key
996 *	@max_items:	place up to this many items at *results
997 *
998 *	Performs an index-ascending scan of the tree for present items.  Places
999 *	them at *@results and returns the number of items which were placed at
1000 *	*@results.
1001 *
1002 *	The implementation is naive.
1003 *
1004 *	Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1005 *	rcu_read_lock. In this case, rather than the returned results being
1006 *	an atomic snapshot of the tree at a single point in time, the semantics
1007 *	of an RCU protected gang lookup are as though multiple radix_tree_lookups
1008 *	have been issued in individual locks, and results stored in 'results'.
1009 */
1010unsigned int
1011radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
1012			unsigned long first_index, unsigned int max_items)
1013{
1014	struct radix_tree_iter iter;
1015	void **slot;
1016	unsigned int ret = 0;
1017
1018	if (unlikely(!max_items))
1019		return 0;
1020
1021	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1022		results[ret] = rcu_dereference_raw(*slot);
1023		if (!results[ret])
1024			continue;
1025		if (radix_tree_is_indirect_ptr(results[ret])) {
1026			slot = radix_tree_iter_retry(&iter);
1027			continue;
1028		}
1029		if (++ret == max_items)
1030			break;
1031	}
1032
1033	return ret;
1034}
1035EXPORT_SYMBOL(radix_tree_gang_lookup);
1036
1037/**
1038 *	radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1039 *	@root:		radix tree root
1040 *	@results:	where the results of the lookup are placed
1041 *	@indices:	where their indices should be placed (but usually NULL)
1042 *	@first_index:	start the lookup from this key
1043 *	@max_items:	place up to this many items at *results
1044 *
1045 *	Performs an index-ascending scan of the tree for present items.  Places
1046 *	their slots at *@results and returns the number of items which were
1047 *	placed at *@results.
1048 *
1049 *	The implementation is naive.
1050 *
1051 *	Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1052 *	be dereferenced with radix_tree_deref_slot, and if using only RCU
1053 *	protection, radix_tree_deref_slot may fail requiring a retry.
1054 */
1055unsigned int
1056radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1057			void ***results, unsigned long *indices,
1058			unsigned long first_index, unsigned int max_items)
1059{
1060	struct radix_tree_iter iter;
1061	void **slot;
1062	unsigned int ret = 0;
1063
1064	if (unlikely(!max_items))
1065		return 0;
1066
1067	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1068		results[ret] = slot;
1069		if (indices)
1070			indices[ret] = iter.index;
1071		if (++ret == max_items)
1072			break;
1073	}
1074
1075	return ret;
1076}
1077EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1078
1079/**
1080 *	radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1081 *	                             based on a tag
1082 *	@root:		radix tree root
1083 *	@results:	where the results of the lookup are placed
1084 *	@first_index:	start the lookup from this key
1085 *	@max_items:	place up to this many items at *results
1086 *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1087 *
1088 *	Performs an index-ascending scan of the tree for present items which
1089 *	have the tag indexed by @tag set.  Places the items at *@results and
1090 *	returns the number of items which were placed at *@results.
1091 */
1092unsigned int
1093radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1094		unsigned long first_index, unsigned int max_items,
1095		unsigned int tag)
1096{
1097	struct radix_tree_iter iter;
1098	void **slot;
1099	unsigned int ret = 0;
1100
1101	if (unlikely(!max_items))
1102		return 0;
1103
1104	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1105		results[ret] = rcu_dereference_raw(*slot);
1106		if (!results[ret])
1107			continue;
1108		if (radix_tree_is_indirect_ptr(results[ret])) {
1109			slot = radix_tree_iter_retry(&iter);
1110			continue;
1111		}
1112		if (++ret == max_items)
1113			break;
1114	}
1115
1116	return ret;
1117}
1118EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1119
1120/**
1121 *	radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1122 *					  radix tree based on a tag
1123 *	@root:		radix tree root
1124 *	@results:	where the results of the lookup are placed
1125 *	@first_index:	start the lookup from this key
1126 *	@max_items:	place up to this many items at *results
1127 *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1128 *
1129 *	Performs an index-ascending scan of the tree for present items which
1130 *	have the tag indexed by @tag set.  Places the slots at *@results and
1131 *	returns the number of slots which were placed at *@results.
1132 */
1133unsigned int
1134radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1135		unsigned long first_index, unsigned int max_items,
1136		unsigned int tag)
1137{
1138	struct radix_tree_iter iter;
1139	void **slot;
1140	unsigned int ret = 0;
1141
1142	if (unlikely(!max_items))
1143		return 0;
1144
1145	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1146		results[ret] = slot;
1147		if (++ret == max_items)
1148			break;
1149	}
1150
1151	return ret;
1152}
1153EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1154
1155#if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1156#include <linux/sched.h> /* for cond_resched() */
1157
1158/*
1159 * This linear search is at present only useful to shmem_unuse_inode().
1160 */
1161static unsigned long __locate(struct radix_tree_node *slot, void *item,
1162			      unsigned long index, unsigned long *found_index)
1163{
1164	unsigned int shift, height;
1165	unsigned long i;
1166
1167	height = slot->path & RADIX_TREE_HEIGHT_MASK;
1168	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
1169
1170	for ( ; height > 1; height--) {
1171		i = (index >> shift) & RADIX_TREE_MAP_MASK;
1172		for (;;) {
1173			if (slot->slots[i] != NULL)
1174				break;
1175			index &= ~((1UL << shift) - 1);
1176			index += 1UL << shift;
1177			if (index == 0)
1178				goto out;	/* 32-bit wraparound */
1179			i++;
1180			if (i == RADIX_TREE_MAP_SIZE)
1181				goto out;
1182		}
1183
1184		shift -= RADIX_TREE_MAP_SHIFT;
1185		slot = rcu_dereference_raw(slot->slots[i]);
1186		if (slot == NULL)
1187			goto out;
1188	}
1189
1190	/* Bottom level: check items */
1191	for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
1192		if (slot->slots[i] == item) {
1193			*found_index = index + i;
1194			index = 0;
1195			goto out;
1196		}
1197	}
1198	index += RADIX_TREE_MAP_SIZE;
1199out:
1200	return index;
1201}
1202
1203/**
1204 *	radix_tree_locate_item - search through radix tree for item
1205 *	@root:		radix tree root
1206 *	@item:		item to be found
1207 *
1208 *	Returns index where item was found, or -1 if not found.
1209 *	Caller must hold no lock (since this time-consuming function needs
1210 *	to be preemptible), and must check afterwards if item is still there.
1211 */
1212unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1213{
1214	struct radix_tree_node *node;
1215	unsigned long max_index;
1216	unsigned long cur_index = 0;
1217	unsigned long found_index = -1;
1218
1219	do {
1220		rcu_read_lock();
1221		node = rcu_dereference_raw(root->rnode);
1222		if (!radix_tree_is_indirect_ptr(node)) {
1223			rcu_read_unlock();
1224			if (node == item)
1225				found_index = 0;
1226			break;
1227		}
1228
1229		node = indirect_to_ptr(node);
1230		max_index = radix_tree_maxindex(node->path &
1231						RADIX_TREE_HEIGHT_MASK);
1232		if (cur_index > max_index) {
1233			rcu_read_unlock();
1234			break;
1235		}
1236
1237		cur_index = __locate(node, item, cur_index, &found_index);
1238		rcu_read_unlock();
1239		cond_resched();
1240	} while (cur_index != 0 && cur_index <= max_index);
1241
1242	return found_index;
1243}
1244#else
1245unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1246{
1247	return -1;
1248}
1249#endif /* CONFIG_SHMEM && CONFIG_SWAP */
1250
1251/**
1252 *	radix_tree_shrink    -    shrink height of a radix tree to minimal
1253 *	@root		radix tree root
1254 */
1255static inline void radix_tree_shrink(struct radix_tree_root *root)
1256{
1257	/* try to shrink tree height */
1258	while (root->height > 0) {
1259		struct radix_tree_node *to_free = root->rnode;
1260		struct radix_tree_node *slot;
1261
1262		BUG_ON(!radix_tree_is_indirect_ptr(to_free));
1263		to_free = indirect_to_ptr(to_free);
1264
1265		/*
1266		 * The candidate node has more than one child, or its child
1267		 * is not at the leftmost slot, we cannot shrink.
1268		 */
1269		if (to_free->count != 1)
1270			break;
1271		if (!to_free->slots[0])
1272			break;
1273
1274		/*
1275		 * We don't need rcu_assign_pointer(), since we are simply
1276		 * moving the node from one part of the tree to another: if it
1277		 * was safe to dereference the old pointer to it
1278		 * (to_free->slots[0]), it will be safe to dereference the new
1279		 * one (root->rnode) as far as dependent read barriers go.
1280		 */
1281		slot = to_free->slots[0];
1282		if (root->height > 1) {
1283			slot->parent = NULL;
1284			slot = ptr_to_indirect(slot);
1285		}
1286		root->rnode = slot;
1287		root->height--;
1288
1289		/*
1290		 * We have a dilemma here. The node's slot[0] must not be
1291		 * NULLed in case there are concurrent lookups expecting to
1292		 * find the item. However if this was a bottom-level node,
1293		 * then it may be subject to the slot pointer being visible
1294		 * to callers dereferencing it. If item corresponding to
1295		 * slot[0] is subsequently deleted, these callers would expect
1296		 * their slot to become empty sooner or later.
1297		 *
1298		 * For example, lockless pagecache will look up a slot, deref
1299		 * the page pointer, and if the page is 0 refcount it means it
1300		 * was concurrently deleted from pagecache so try the deref
1301		 * again. Fortunately there is already a requirement for logic
1302		 * to retry the entire slot lookup -- the indirect pointer
1303		 * problem (replacing direct root node with an indirect pointer
1304		 * also results in a stale slot). So tag the slot as indirect
1305		 * to force callers to retry.
1306		 */
1307		if (root->height == 0)
1308			*((unsigned long *)&to_free->slots[0]) |=
1309						RADIX_TREE_INDIRECT_PTR;
1310
1311		radix_tree_node_free(to_free);
1312	}
1313}
1314
1315/**
1316 *	__radix_tree_delete_node    -    try to free node after clearing a slot
1317 *	@root:		radix tree root
1318 *	@node:		node containing @index
1319 *
1320 *	After clearing the slot at @index in @node from radix tree
1321 *	rooted at @root, call this function to attempt freeing the
1322 *	node and shrinking the tree.
1323 *
1324 *	Returns %true if @node was freed, %false otherwise.
1325 */
1326bool __radix_tree_delete_node(struct radix_tree_root *root,
1327			      struct radix_tree_node *node)
1328{
1329	bool deleted = false;
1330
1331	do {
1332		struct radix_tree_node *parent;
1333
1334		if (node->count) {
1335			if (node == indirect_to_ptr(root->rnode)) {
1336				radix_tree_shrink(root);
1337				if (root->height == 0)
1338					deleted = true;
1339			}
1340			return deleted;
1341		}
1342
1343		parent = node->parent;
1344		if (parent) {
1345			unsigned int offset;
1346
1347			offset = node->path >> RADIX_TREE_HEIGHT_SHIFT;
1348			parent->slots[offset] = NULL;
1349			parent->count--;
1350		} else {
1351			root_tag_clear_all(root);
1352			root->height = 0;
1353			root->rnode = NULL;
1354		}
1355
1356		radix_tree_node_free(node);
1357		deleted = true;
1358
1359		node = parent;
1360	} while (node);
1361
1362	return deleted;
1363}
1364
1365/**
1366 *	radix_tree_delete_item    -    delete an item from a radix tree
1367 *	@root:		radix tree root
1368 *	@index:		index key
1369 *	@item:		expected item
1370 *
1371 *	Remove @item at @index from the radix tree rooted at @root.
1372 *
1373 *	Returns the address of the deleted item, or NULL if it was not present
1374 *	or the entry at the given @index was not @item.
1375 */
1376void *radix_tree_delete_item(struct radix_tree_root *root,
1377			     unsigned long index, void *item)
1378{
1379	struct radix_tree_node *node;
1380	unsigned int offset;
1381	void **slot;
1382	void *entry;
1383	int tag;
1384
1385	entry = __radix_tree_lookup(root, index, &node, &slot);
1386	if (!entry)
1387		return NULL;
1388
1389	if (item && entry != item)
1390		return NULL;
1391
1392	if (!node) {
1393		root_tag_clear_all(root);
1394		root->rnode = NULL;
1395		return entry;
1396	}
1397
1398	offset = index & RADIX_TREE_MAP_MASK;
1399
1400	/*
1401	 * Clear all tags associated with the item to be deleted.
1402	 * This way of doing it would be inefficient, but seldom is any set.
1403	 */
1404	for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
1405		if (tag_get(node, tag, offset))
1406			radix_tree_tag_clear(root, index, tag);
1407	}
1408
1409	node->slots[offset] = NULL;
1410	node->count--;
1411
1412	__radix_tree_delete_node(root, node);
1413
1414	return entry;
1415}
1416EXPORT_SYMBOL(radix_tree_delete_item);
1417
1418/**
1419 *	radix_tree_delete    -    delete an item from a radix tree
1420 *	@root:		radix tree root
1421 *	@index:		index key
1422 *
1423 *	Remove the item at @index from the radix tree rooted at @root.
1424 *
1425 *	Returns the address of the deleted item, or NULL if it was not present.
1426 */
1427void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1428{
1429	return radix_tree_delete_item(root, index, NULL);
1430}
1431EXPORT_SYMBOL(radix_tree_delete);
1432
1433/**
1434 *	radix_tree_tagged - test whether any items in the tree are tagged
1435 *	@root:		radix tree root
1436 *	@tag:		tag to test
1437 */
1438int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1439{
1440	return root_tag_get(root, tag);
1441}
1442EXPORT_SYMBOL(radix_tree_tagged);
1443
1444static void
1445radix_tree_node_ctor(void *arg)
1446{
1447	struct radix_tree_node *node = arg;
1448
1449	memset(node, 0, sizeof(*node));
1450	INIT_LIST_HEAD(&node->private_list);
1451}
1452
1453static __init unsigned long __maxindex(unsigned int height)
1454{
1455	unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1456	int shift = RADIX_TREE_INDEX_BITS - width;
1457
1458	if (shift < 0)
1459		return ~0UL;
1460	if (shift >= BITS_PER_LONG)
1461		return 0UL;
1462	return ~0UL >> shift;
1463}
1464
1465static __init void radix_tree_init_maxindex(void)
1466{
1467	unsigned int i;
1468
1469	for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1470		height_to_maxindex[i] = __maxindex(i);
1471}
1472
1473static int radix_tree_callback(struct notifier_block *nfb,
1474                            unsigned long action,
1475                            void *hcpu)
1476{
1477       int cpu = (long)hcpu;
1478       struct radix_tree_preload *rtp;
1479       struct radix_tree_node *node;
1480
1481       /* Free per-cpu pool of perloaded nodes */
1482       if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1483               rtp = &per_cpu(radix_tree_preloads, cpu);
1484               while (rtp->nr) {
1485			node = rtp->nodes;
1486			rtp->nodes = node->private_data;
1487			kmem_cache_free(radix_tree_node_cachep, node);
1488			rtp->nr--;
1489               }
1490       }
1491       return NOTIFY_OK;
1492}
1493
1494void __init radix_tree_init(void)
1495{
1496	radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1497			sizeof(struct radix_tree_node), 0,
1498			SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1499			radix_tree_node_ctor);
1500	radix_tree_init_maxindex();
1501	hotcpu_notifier(radix_tree_callback, 0);
1502}
1503