LCOV - code coverage report
Current view: top level - lib - radix-tree.c (source / functions) Hit Total Coverage
Test: coverage.info Lines: 276 505 54.7 %
Date: 2023-08-24 13:40:31 Functions: 23 41 56.1 %

          Line data    Source code
       1             : // SPDX-License-Identifier: GPL-2.0-or-later
       2             : /*
       3             :  * Copyright (C) 2001 Momchil Velikov
       4             :  * Portions Copyright (C) 2001 Christoph Hellwig
       5             :  * Copyright (C) 2005 SGI, Christoph Lameter
       6             :  * Copyright (C) 2006 Nick Piggin
       7             :  * Copyright (C) 2012 Konstantin Khlebnikov
       8             :  * Copyright (C) 2016 Intel, Matthew Wilcox
       9             :  * Copyright (C) 2016 Intel, Ross Zwisler
      10             :  */
      11             : 
      12             : #include <linux/bitmap.h>
      13             : #include <linux/bitops.h>
      14             : #include <linux/bug.h>
      15             : #include <linux/cpu.h>
      16             : #include <linux/errno.h>
      17             : #include <linux/export.h>
      18             : #include <linux/idr.h>
      19             : #include <linux/init.h>
      20             : #include <linux/kernel.h>
      21             : #include <linux/kmemleak.h>
      22             : #include <linux/percpu.h>
      23             : #include <linux/preempt.h>                /* in_interrupt() */
      24             : #include <linux/radix-tree.h>
      25             : #include <linux/rcupdate.h>
      26             : #include <linux/slab.h>
      27             : #include <linux/string.h>
      28             : #include <linux/xarray.h>
      29             : 
      30             : #include "radix-tree.h"
      31             : 
      32             : /*
      33             :  * Radix tree node cache.
      34             :  */
      35             : struct kmem_cache *radix_tree_node_cachep;
      36             : 
      37             : /*
      38             :  * The radix tree is variable-height, so an insert operation not only has
      39             :  * to build the branch to its corresponding item, it also has to build the
      40             :  * branch to existing items if the size has to be increased (by
      41             :  * radix_tree_extend).
      42             :  *
      43             :  * The worst case is a zero height tree with just a single item at index 0,
      44             :  * and then inserting an item at index ULONG_MAX. This requires 2 new branches
      45             :  * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
      46             :  * Hence:
      47             :  */
      48             : #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
      49             : 
      50             : /*
      51             :  * The IDR does not have to be as high as the radix tree since it uses
      52             :  * signed integers, not unsigned longs.
      53             :  */
      54             : #define IDR_INDEX_BITS          (8 /* CHAR_BIT */ * sizeof(int) - 1)
      55             : #define IDR_MAX_PATH            (DIV_ROUND_UP(IDR_INDEX_BITS, \
      56             :                                                 RADIX_TREE_MAP_SHIFT))
      57             : #define IDR_PRELOAD_SIZE        (IDR_MAX_PATH * 2 - 1)
      58             : 
      59             : /*
      60             :  * Per-cpu pool of preloaded nodes
      61             :  */
      62             : DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = {
      63             :         .lock = INIT_LOCAL_LOCK(lock),
      64             : };
      65             : EXPORT_PER_CPU_SYMBOL_GPL(radix_tree_preloads);
      66             : 
      67             : static inline struct radix_tree_node *entry_to_node(void *ptr)
      68             : {
      69       33891 :         return (void *)((unsigned long)ptr & ~RADIX_TREE_INTERNAL_NODE);
      70             : }
      71             : 
      72             : static inline void *node_to_entry(void *ptr)
      73             : {
      74        9841 :         return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE);
      75             : }
      76             : 
      77             : #define RADIX_TREE_RETRY        XA_RETRY_ENTRY
      78             : 
      79             : static inline unsigned long
      80             : get_slot_offset(const struct radix_tree_node *parent, void __rcu **slot)
      81             : {
      82        9480 :         return parent ? slot - parent->slots : 0;
      83             : }
      84             : 
      85             : static unsigned int radix_tree_descend(const struct radix_tree_node *parent,
      86             :                         struct radix_tree_node **nodep, unsigned long index)
      87             : {
      88       23254 :         unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK;
      89       23254 :         void __rcu **entry = rcu_dereference_raw(parent->slots[offset]);
      90             : 
      91       23254 :         *nodep = (void *)entry;
      92             :         return offset;
      93             : }
      94             : 
      95             : static inline gfp_t root_gfp_mask(const struct radix_tree_root *root)
      96             : {
      97           0 :         return root->xa_flags & (__GFP_BITS_MASK & ~GFP_ZONEMASK);
      98             : }
      99             : 
     100             : static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
     101             :                 int offset)
     102             : {
     103        1058 :         __set_bit(offset, node->tags[tag]);
     104             : }
     105             : 
     106             : static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
     107             :                 int offset)
     108             : {
     109       17796 :         __clear_bit(offset, node->tags[tag]);
     110             : }
     111             : 
     112       40598 : static inline int tag_get(const struct radix_tree_node *node, unsigned int tag,
     113             :                 int offset)
     114             : {
     115       81420 :         return test_bit(offset, node->tags[tag]);
     116             : }
     117             : 
     118             : static inline void root_tag_set(struct radix_tree_root *root, unsigned tag)
     119             : {
     120          10 :         root->xa_flags |= (__force gfp_t)(1 << (tag + ROOT_TAG_SHIFT));
     121             : }
     122             : 
     123             : static inline void root_tag_clear(struct radix_tree_root *root, unsigned tag)
     124             : {
     125           0 :         root->xa_flags &= (__force gfp_t)~(1 << (tag + ROOT_TAG_SHIFT));
     126             : }
     127             : 
     128             : static inline void root_tag_clear_all(struct radix_tree_root *root)
     129             : {
     130           0 :         root->xa_flags &= (__force gfp_t)((1 << ROOT_TAG_SHIFT) - 1);
     131             : }
     132             : 
     133             : static inline int root_tag_get(const struct radix_tree_root *root, unsigned tag)
     134             : {
     135        9267 :         return (__force int)root->xa_flags & (1 << (tag + ROOT_TAG_SHIFT));
     136             : }
     137             : 
     138             : static inline unsigned root_tags_get(const struct radix_tree_root *root)
     139             : {
     140           0 :         return (__force unsigned)root->xa_flags >> ROOT_TAG_SHIFT;
     141             : }
     142             : 
     143             : static inline bool is_idr(const struct radix_tree_root *root)
     144             : {
     145        9724 :         return !!(root->xa_flags & ROOT_IS_IDR);
     146             : }
     147             : 
     148             : /*
     149             :  * Returns 1 if any slot in the node has this tag set.
     150             :  * Otherwise returns 0.
     151             :  */
     152             : static inline int any_tag_set(const struct radix_tree_node *node,
     153             :                                                         unsigned int tag)
     154             : {
     155             :         unsigned idx;
     156         125 :         for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
     157        8898 :                 if (node->tags[tag][idx])
     158             :                         return 1;
     159             :         }
     160             :         return 0;
     161             : }
     162             : 
     163             : static inline void all_tag_set(struct radix_tree_node *node, unsigned int tag)
     164             : {
     165         376 :         bitmap_fill(node->tags[tag], RADIX_TREE_MAP_SIZE);
     166             : }
     167             : 
     168             : /**
     169             :  * radix_tree_find_next_bit - find the next set bit in a memory region
     170             :  *
     171             :  * @node: where to begin the search
     172             :  * @tag: the tag index
     173             :  * @offset: the bitnumber to start searching at
     174             :  *
     175             :  * Unrollable variant of find_next_bit() for constant size arrays.
     176             :  * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
     177             :  * Returns next bit offset, or size if nothing found.
     178             :  */
     179             : static __always_inline unsigned long
     180             : radix_tree_find_next_bit(struct radix_tree_node *node, unsigned int tag,
     181             :                          unsigned long offset)
     182             : {
     183         147 :         const unsigned long *addr = node->tags[tag];
     184             : 
     185         147 :         if (offset < RADIX_TREE_MAP_SIZE) {
     186             :                 unsigned long tmp;
     187             : 
     188         147 :                 addr += offset / BITS_PER_LONG;
     189         147 :                 tmp = *addr >> (offset % BITS_PER_LONG);
     190         147 :                 if (tmp)
     191         294 :                         return __ffs(tmp) + offset;
     192           0 :                 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
     193           0 :                 while (offset < RADIX_TREE_MAP_SIZE) {
     194           0 :                         tmp = *++addr;
     195           0 :                         if (tmp)
     196           0 :                                 return __ffs(tmp) + offset;
     197           0 :                         offset += BITS_PER_LONG;
     198             :                 }
     199             :         }
     200             :         return RADIX_TREE_MAP_SIZE;
     201             : }
     202             : 
     203             : static unsigned int iter_offset(const struct radix_tree_iter *iter)
     204             : {
     205        8773 :         return iter->index & RADIX_TREE_MAP_MASK;
     206             : }
     207             : 
     208             : /*
     209             :  * The maximum index which can be stored in a radix tree
     210             :  */
     211             : static inline unsigned long shift_maxindex(unsigned int shift)
     212             : {
     213       18683 :         return (RADIX_TREE_MAP_SIZE << shift) - 1;
     214             : }
     215             : 
     216             : static inline unsigned long node_maxindex(const struct radix_tree_node *node)
     217             : {
     218       37134 :         return shift_maxindex(node->shift);
     219             : }
     220             : 
     221             : static unsigned long next_index(unsigned long index,
     222             :                                 const struct radix_tree_node *node,
     223             :                                 unsigned long offset)
     224             : {
     225         294 :         return (index & ~node_maxindex(node)) + (offset << node->shift);
     226             : }
     227             : 
     228             : /*
     229             :  * This assumes that the caller has performed appropriate preallocation, and
     230             :  * that the caller has pinned this thread of control to the current CPU.
     231             :  */
     232             : static struct radix_tree_node *
     233         376 : radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent,
     234             :                         struct radix_tree_root *root,
     235             :                         unsigned int shift, unsigned int offset,
     236             :                         unsigned int count, unsigned int nr_values)
     237             : {
     238         376 :         struct radix_tree_node *ret = NULL;
     239             : 
     240             :         /*
     241             :          * Preload code isn't irq safe and it doesn't make sense to use
     242             :          * preloading during an interrupt anyway as all the allocations have
     243             :          * to be atomic. So just do normal allocation when in interrupt.
     244             :          */
     245         746 :         if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) {
     246             :                 struct radix_tree_preload *rtp;
     247             : 
     248             :                 /*
     249             :                  * Even if the caller has preloaded, try to allocate from the
     250             :                  * cache first for the new node to get accounted to the memory
     251             :                  * cgroup.
     252             :                  */
     253         370 :                 ret = kmem_cache_alloc(radix_tree_node_cachep,
     254             :                                        gfp_mask | __GFP_NOWARN);
     255         370 :                 if (ret)
     256             :                         goto out;
     257             : 
     258             :                 /*
     259             :                  * Provided the caller has preloaded here, we will always
     260             :                  * succeed in getting a node here (and never reach
     261             :                  * kmem_cache_alloc)
     262             :                  */
     263           0 :                 rtp = this_cpu_ptr(&radix_tree_preloads);
     264           0 :                 if (rtp->nr) {
     265           0 :                         ret = rtp->nodes;
     266           0 :                         rtp->nodes = ret->parent;
     267           0 :                         rtp->nr--;
     268             :                 }
     269             :                 /*
     270             :                  * Update the allocation stack trace as this is more useful
     271             :                  * for debugging.
     272             :                  */
     273             :                 kmemleak_update_trace(ret);
     274             :                 goto out;
     275             :         }
     276           6 :         ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
     277             : out:
     278         376 :         BUG_ON(radix_tree_is_internal_node(ret));
     279         376 :         if (ret) {
     280         376 :                 ret->shift = shift;
     281         376 :                 ret->offset = offset;
     282         376 :                 ret->count = count;
     283         376 :                 ret->nr_values = nr_values;
     284         376 :                 ret->parent = parent;
     285         376 :                 ret->array = root;
     286             :         }
     287         376 :         return ret;
     288             : }
     289             : 
     290         237 : void radix_tree_node_rcu_free(struct rcu_head *head)
     291             : {
     292         237 :         struct radix_tree_node *node =
     293         237 :                         container_of(head, struct radix_tree_node, rcu_head);
     294             : 
     295             :         /*
     296             :          * Must only free zeroed nodes into the slab.  We can be left with
     297             :          * non-NULL entries by radix_tree_free_nodes, so clear the entries
     298             :          * and tags here.
     299             :          */
     300         474 :         memset(node->slots, 0, sizeof(node->slots));
     301         474 :         memset(node->tags, 0, sizeof(node->tags));
     302         474 :         INIT_LIST_HEAD(&node->private_list);
     303             : 
     304         237 :         kmem_cache_free(radix_tree_node_cachep, node);
     305         237 : }
     306             : 
     307             : static inline void
     308             : radix_tree_node_free(struct radix_tree_node *node)
     309             : {
     310         239 :         call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
     311             : }
     312             : 
     313             : /*
     314             :  * Load up this CPU's radix_tree_node buffer with sufficient objects to
     315             :  * ensure that the addition of a single element in the tree cannot fail.  On
     316             :  * success, return zero, with preemption disabled.  On error, return -ENOMEM
     317             :  * with preemption not disabled.
     318             :  *
     319             :  * To make use of this facility, the radix tree must be initialised without
     320             :  * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
     321             :  */
     322        8620 : static __must_check int __radix_tree_preload(gfp_t gfp_mask, unsigned nr)
     323             : {
     324             :         struct radix_tree_preload *rtp;
     325             :         struct radix_tree_node *node;
     326        8620 :         int ret = -ENOMEM;
     327             : 
     328             :         /*
     329             :          * Nodes preloaded by one cgroup can be used by another cgroup, so
     330             :          * they should never be accounted to any particular memory cgroup.
     331             :          */
     332        8620 :         gfp_mask &= ~__GFP_ACCOUNT;
     333             : 
     334        8620 :         local_lock(&radix_tree_preloads.lock);
     335        8620 :         rtp = this_cpu_ptr(&radix_tree_preloads);
     336       17251 :         while (rtp->nr < nr) {
     337          11 :                 local_unlock(&radix_tree_preloads.lock);
     338          11 :                 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
     339          11 :                 if (node == NULL)
     340             :                         goto out;
     341          11 :                 local_lock(&radix_tree_preloads.lock);
     342          11 :                 rtp = this_cpu_ptr(&radix_tree_preloads);
     343          11 :                 if (rtp->nr < nr) {
     344          11 :                         node->parent = rtp->nodes;
     345          11 :                         rtp->nodes = node;
     346          11 :                         rtp->nr++;
     347             :                 } else {
     348           0 :                         kmem_cache_free(radix_tree_node_cachep, node);
     349             :                 }
     350             :         }
     351             :         ret = 0;
     352             : out:
     353        8620 :         return ret;
     354             : }
     355             : 
     356             : /*
     357             :  * Load up this CPU's radix_tree_node buffer with sufficient objects to
     358             :  * ensure that the addition of a single element in the tree cannot fail.  On
     359             :  * success, return zero, with preemption disabled.  On error, return -ENOMEM
     360             :  * with preemption not disabled.
     361             :  *
     362             :  * To make use of this facility, the radix tree must be initialised without
     363             :  * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
     364             :  */
     365           0 : int radix_tree_preload(gfp_t gfp_mask)
     366             : {
     367             :         /* Warn on non-sensical use... */
     368           0 :         WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
     369           0 :         return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
     370             : }
     371             : EXPORT_SYMBOL(radix_tree_preload);
     372             : 
     373             : /*
     374             :  * The same as above function, except we don't guarantee preloading happens.
     375             :  * We do it, if we decide it helps. On success, return zero with preemption
     376             :  * disabled. On error, return -ENOMEM with preemption not disabled.
     377             :  */
     378           0 : int radix_tree_maybe_preload(gfp_t gfp_mask)
     379             : {
     380           0 :         if (gfpflags_allow_blocking(gfp_mask))
     381           0 :                 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
     382             :         /* Preloading doesn't help anything with this gfp mask, skip it */
     383           0 :         local_lock(&radix_tree_preloads.lock);
     384           0 :         return 0;
     385             : }
     386             : EXPORT_SYMBOL(radix_tree_maybe_preload);
     387             : 
     388             : static unsigned radix_tree_load_root(const struct radix_tree_root *root,
     389             :                 struct radix_tree_node **nodep, unsigned long *maxindex)
     390             : {
     391        9660 :         struct radix_tree_node *node = rcu_dereference_raw(root->xa_head);
     392             : 
     393        9660 :         *nodep = node;
     394             : 
     395        9660 :         if (likely(radix_tree_is_internal_node(node))) {
     396        9647 :                 node = entry_to_node(node);
     397        9647 :                 *maxindex = node_maxindex(node);
     398        8760 :                 return node->shift + RADIX_TREE_MAP_SHIFT;
     399             :         }
     400             : 
     401             :         *maxindex = 0;
     402             :         return 0;
     403             : }
     404             : 
     405             : /*
     406             :  *      Extend a radix tree so it can store key @index.
     407             :  */
     408         116 : static int radix_tree_extend(struct radix_tree_root *root, gfp_t gfp,
     409             :                                 unsigned long index, unsigned int shift)
     410             : {
     411             :         void *entry;
     412             :         unsigned int maxshift;
     413             :         int tag;
     414             : 
     415             :         /* Figure out what the shift should be.  */
     416         116 :         maxshift = shift;
     417         232 :         while (index > shift_maxindex(maxshift))
     418           0 :                 maxshift += RADIX_TREE_MAP_SHIFT;
     419             : 
     420         116 :         entry = rcu_dereference_raw(root->xa_head);
     421         120 :         if (!entry && (!is_idr(root) || root_tag_get(root, IDR_FREE)))
     422             :                 goto out;
     423             : 
     424             :         do {
     425         114 :                 struct radix_tree_node *node = radix_tree_node_alloc(gfp, NULL,
     426             :                                                         root, shift, 0, 1, 0);
     427         114 :                 if (!node)
     428             :                         return -ENOMEM;
     429             : 
     430         228 :                 if (is_idr(root)) {
     431         114 :                         all_tag_set(node, IDR_FREE);
     432         228 :                         if (!root_tag_get(root, IDR_FREE)) {
     433           0 :                                 tag_clear(node, IDR_FREE, 0);
     434           0 :                                 root_tag_set(root, IDR_FREE);
     435             :                         }
     436             :                 } else {
     437             :                         /* Propagate the aggregated tag info to the new child */
     438           0 :                         for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
     439           0 :                                 if (root_tag_get(root, tag))
     440           0 :                                         tag_set(node, tag, 0);
     441             :                         }
     442             :                 }
     443             : 
     444         114 :                 BUG_ON(shift > BITS_PER_LONG);
     445         114 :                 if (radix_tree_is_internal_node(entry)) {
     446         114 :                         entry_to_node(entry)->parent = node;
     447           0 :                 } else if (xa_is_value(entry)) {
     448             :                         /* Moving a value entry root->xa_head to a node */
     449           0 :                         node->nr_values = 1;
     450             :                 }
     451             :                 /*
     452             :                  * entry was already in the radix tree, so we do not need
     453             :                  * rcu_assign_pointer here
     454             :                  */
     455         114 :                 node->slots[0] = (void __rcu *)entry;
     456         114 :                 entry = node_to_entry(node);
     457         114 :                 rcu_assign_pointer(root->xa_head, entry);
     458         114 :                 shift += RADIX_TREE_MAP_SHIFT;
     459         114 :         } while (shift <= maxshift);
     460             : out:
     461         116 :         return maxshift + RADIX_TREE_MAP_SHIFT;
     462             : }
     463             : 
     464             : /**
     465             :  *      radix_tree_shrink    -    shrink radix tree to minimum height
     466             :  *      @root:          radix tree root
     467             :  */
     468         652 : static inline bool radix_tree_shrink(struct radix_tree_root *root)
     469             : {
     470         652 :         bool shrunk = false;
     471             : 
     472         112 :         for (;;) {
     473         764 :                 struct radix_tree_node *node = rcu_dereference_raw(root->xa_head);
     474             :                 struct radix_tree_node *child;
     475             : 
     476         764 :                 if (!radix_tree_is_internal_node(node))
     477             :                         break;
     478         764 :                 node = entry_to_node(node);
     479             : 
     480             :                 /*
     481             :                  * The candidate node has more than one child, or its child
     482             :                  * is not at the leftmost slot, we cannot shrink.
     483             :                  */
     484         764 :                 if (node->count != 1)
     485             :                         break;
     486         131 :                 child = rcu_dereference_raw(node->slots[0]);
     487         131 :                 if (!child)
     488             :                         break;
     489             : 
     490             :                 /*
     491             :                  * For an IDR, we must not shrink entry 0 into the root in
     492             :                  * case somebody calls idr_replace() with a pointer that
     493             :                  * appears to be an internal entry
     494             :                  */
     495         124 :                 if (!node->shift && is_idr(root))
     496             :                         break;
     497             : 
     498         112 :                 if (radix_tree_is_internal_node(child))
     499         112 :                         entry_to_node(child)->parent = NULL;
     500             : 
     501             :                 /*
     502             :                  * We don't need rcu_assign_pointer(), since we are simply
     503             :                  * moving the node from one part of the tree to another: if it
     504             :                  * was safe to dereference the old pointer to it
     505             :                  * (node->slots[0]), it will be safe to dereference the new
     506             :                  * one (root->xa_head) as far as dependent read barriers go.
     507             :                  */
     508         112 :                 root->xa_head = (void __rcu *)child;
     509         336 :                 if (is_idr(root) && !tag_get(node, IDR_FREE, 0))
     510           0 :                         root_tag_clear(root, IDR_FREE);
     511             : 
     512             :                 /*
     513             :                  * We have a dilemma here. The node's slot[0] must not be
     514             :                  * NULLed in case there are concurrent lookups expecting to
     515             :                  * find the item. However if this was a bottom-level node,
     516             :                  * then it may be subject to the slot pointer being visible
     517             :                  * to callers dereferencing it. If item corresponding to
     518             :                  * slot[0] is subsequently deleted, these callers would expect
     519             :                  * their slot to become empty sooner or later.
     520             :                  *
     521             :                  * For example, lockless pagecache will look up a slot, deref
     522             :                  * the page pointer, and if the page has 0 refcount it means it
     523             :                  * was concurrently deleted from pagecache so try the deref
     524             :                  * again. Fortunately there is already a requirement for logic
     525             :                  * to retry the entire slot lookup -- the indirect pointer
     526             :                  * problem (replacing direct root node with an indirect pointer
     527             :                  * also results in a stale slot). So tag the slot as indirect
     528             :                  * to force callers to retry.
     529             :                  */
     530         112 :                 node->count = 0;
     531         112 :                 if (!radix_tree_is_internal_node(child)) {
     532           0 :                         node->slots[0] = (void __rcu *)RADIX_TREE_RETRY;
     533             :                 }
     534             : 
     535         224 :                 WARN_ON_ONCE(!list_empty(&node->private_list));
     536         112 :                 radix_tree_node_free(node);
     537         112 :                 shrunk = true;
     538             :         }
     539             : 
     540         652 :         return shrunk;
     541             : }
     542             : 
     543        9475 : static bool delete_node(struct radix_tree_root *root,
     544             :                         struct radix_tree_node *node)
     545             : {
     546        9475 :         bool deleted = false;
     547             : 
     548             :         do {
     549             :                 struct radix_tree_node *parent;
     550             : 
     551        9592 :                 if (node->count) {
     552       18930 :                         if (node_to_entry(node) ==
     553        9465 :                                         rcu_dereference_raw(root->xa_head))
     554         652 :                                 deleted |= radix_tree_shrink(root);
     555             :                         return deleted;
     556             :                 }
     557             : 
     558         127 :                 parent = node->parent;
     559         127 :                 if (parent) {
     560         117 :                         parent->slots[node->offset] = NULL;
     561         117 :                         parent->count--;
     562             :                 } else {
     563             :                         /*
     564             :                          * Shouldn't the tags already have all been cleared
     565             :                          * by the caller?
     566             :                          */
     567          20 :                         if (!is_idr(root))
     568           0 :                                 root_tag_clear_all(root);
     569          10 :                         root->xa_head = NULL;
     570             :                 }
     571             : 
     572         254 :                 WARN_ON_ONCE(!list_empty(&node->private_list));
     573         127 :                 radix_tree_node_free(node);
     574         127 :                 deleted = true;
     575             : 
     576         127 :                 node = parent;
     577         127 :         } while (node);
     578             : 
     579             :         return deleted;
     580             : }
     581             : 
     582             : /**
     583             :  *      __radix_tree_create     -       create a slot in a radix tree
     584             :  *      @root:          radix tree root
     585             :  *      @index:         index key
     586             :  *      @nodep:         returns node
     587             :  *      @slotp:         returns slot
     588             :  *
     589             :  *      Create, if necessary, and return the node and slot for an item
     590             :  *      at position @index in the radix tree @root.
     591             :  *
     592             :  *      Until there is more than one item in the tree, no nodes are
     593             :  *      allocated and @root->xa_head is used as a direct slot instead of
     594             :  *      pointing to a node, in which case *@nodep will be NULL.
     595             :  *
     596             :  *      Returns -ENOMEM, or 0 for success.
     597             :  */
     598           0 : static int __radix_tree_create(struct radix_tree_root *root,
     599             :                 unsigned long index, struct radix_tree_node **nodep,
     600             :                 void __rcu ***slotp)
     601             : {
     602           0 :         struct radix_tree_node *node = NULL, *child;
     603           0 :         void __rcu **slot = (void __rcu **)&root->xa_head;
     604             :         unsigned long maxindex;
     605           0 :         unsigned int shift, offset = 0;
     606           0 :         unsigned long max = index;
     607           0 :         gfp_t gfp = root_gfp_mask(root);
     608             : 
     609           0 :         shift = radix_tree_load_root(root, &child, &maxindex);
     610             : 
     611             :         /* Make sure the tree is high enough.  */
     612           0 :         if (max > maxindex) {
     613           0 :                 int error = radix_tree_extend(root, gfp, max, shift);
     614           0 :                 if (error < 0)
     615             :                         return error;
     616           0 :                 shift = error;
     617           0 :                 child = rcu_dereference_raw(root->xa_head);
     618             :         }
     619             : 
     620           0 :         while (shift > 0) {
     621           0 :                 shift -= RADIX_TREE_MAP_SHIFT;
     622           0 :                 if (child == NULL) {
     623             :                         /* Have to add a child node.  */
     624           0 :                         child = radix_tree_node_alloc(gfp, node, root, shift,
     625             :                                                         offset, 0, 0);
     626           0 :                         if (!child)
     627             :                                 return -ENOMEM;
     628           0 :                         rcu_assign_pointer(*slot, node_to_entry(child));
     629           0 :                         if (node)
     630           0 :                                 node->count++;
     631           0 :                 } else if (!radix_tree_is_internal_node(child))
     632             :                         break;
     633             : 
     634             :                 /* Go a level down */
     635           0 :                 node = entry_to_node(child);
     636           0 :                 offset = radix_tree_descend(node, &child, index);
     637           0 :                 slot = &node->slots[offset];
     638             :         }
     639             : 
     640           0 :         if (nodep)
     641           0 :                 *nodep = node;
     642           0 :         if (slotp)
     643           0 :                 *slotp = slot;
     644             :         return 0;
     645             : }
     646             : 
     647             : /*
     648             :  * Free any nodes below this node.  The tree is presumed to not need
     649             :  * shrinking, and any user data in the tree is presumed to not need a
     650             :  * destructor called on it.  If we need to add a destructor, we can
     651             :  * add that functionality later.  Note that we may not clear tags or
     652             :  * slots from the tree as an RCU walker may still have a pointer into
     653             :  * this subtree.  We could replace the entries with RADIX_TREE_RETRY,
     654             :  * but we'll still have to clear those in rcu_free.
     655             :  */
     656           0 : static void radix_tree_free_nodes(struct radix_tree_node *node)
     657             : {
     658           0 :         unsigned offset = 0;
     659           0 :         struct radix_tree_node *child = entry_to_node(node);
     660             : 
     661             :         for (;;) {
     662           0 :                 void *entry = rcu_dereference_raw(child->slots[offset]);
     663           0 :                 if (xa_is_node(entry) && child->shift) {
     664           0 :                         child = entry_to_node(entry);
     665           0 :                         offset = 0;
     666           0 :                         continue;
     667             :                 }
     668           0 :                 offset++;
     669           0 :                 while (offset == RADIX_TREE_MAP_SIZE) {
     670           0 :                         struct radix_tree_node *old = child;
     671           0 :                         offset = child->offset + 1;
     672           0 :                         child = child->parent;
     673           0 :                         WARN_ON_ONCE(!list_empty(&old->private_list));
     674           0 :                         radix_tree_node_free(old);
     675           0 :                         if (old == entry_to_node(node))
     676           0 :                                 return;
     677             :                 }
     678             :         }
     679             : }
     680             : 
     681             : static inline int insert_entries(struct radix_tree_node *node,
     682             :                 void __rcu **slot, void *item)
     683             : {
     684           0 :         if (*slot)
     685             :                 return -EEXIST;
     686           0 :         rcu_assign_pointer(*slot, item);
     687           0 :         if (node) {
     688           0 :                 node->count++;
     689           0 :                 if (xa_is_value(item))
     690           0 :                         node->nr_values++;
     691             :         }
     692             :         return 1;
     693             : }
     694             : 
     695             : /**
     696             :  *      radix_tree_insert    -    insert into a radix tree
     697             :  *      @root:          radix tree root
     698             :  *      @index:         index key
     699             :  *      @item:          item to insert
     700             :  *
     701             :  *      Insert an item into the radix tree at position @index.
     702             :  */
     703           0 : int radix_tree_insert(struct radix_tree_root *root, unsigned long index,
     704             :                         void *item)
     705             : {
     706             :         struct radix_tree_node *node;
     707             :         void __rcu **slot;
     708             :         int error;
     709             : 
     710           0 :         BUG_ON(radix_tree_is_internal_node(item));
     711             : 
     712           0 :         error = __radix_tree_create(root, index, &node, &slot);
     713           0 :         if (error)
     714             :                 return error;
     715             : 
     716           0 :         error = insert_entries(node, slot, item);
     717           0 :         if (error < 0)
     718             :                 return error;
     719             : 
     720           0 :         if (node) {
     721           0 :                 unsigned offset = get_slot_offset(node, slot);
     722           0 :                 BUG_ON(tag_get(node, 0, offset));
     723           0 :                 BUG_ON(tag_get(node, 1, offset));
     724           0 :                 BUG_ON(tag_get(node, 2, offset));
     725             :         } else {
     726           0 :                 BUG_ON(root_tags_get(root));
     727             :         }
     728             : 
     729             :         return 0;
     730             : }
     731             : EXPORT_SYMBOL(radix_tree_insert);
     732             : 
     733             : /**
     734             :  *      __radix_tree_lookup     -       lookup an item in a radix tree
     735             :  *      @root:          radix tree root
     736             :  *      @index:         index key
     737             :  *      @nodep:         returns node
     738             :  *      @slotp:         returns slot
     739             :  *
     740             :  *      Lookup and return the item at position @index in the radix
     741             :  *      tree @root.
     742             :  *
     743             :  *      Until there is more than one item in the tree, no nodes are
     744             :  *      allocated and @root->xa_head is used as a direct slot instead of
     745             :  *      pointing to a node, in which case *@nodep will be NULL.
     746             :  */
     747         712 : void *__radix_tree_lookup(const struct radix_tree_root *root,
     748             :                           unsigned long index, struct radix_tree_node **nodep,
     749             :                           void __rcu ***slotp)
     750             : {
     751             :         struct radix_tree_node *node, *parent;
     752             :         unsigned long maxindex;
     753             :         void __rcu **slot;
     754             : 
     755             :  restart:
     756         712 :         parent = NULL;
     757         712 :         slot = (void __rcu **)&root->xa_head;
     758         712 :         radix_tree_load_root(root, &node, &maxindex);
     759         712 :         if (index > maxindex)
     760             :                 return NULL;
     761             : 
     762        2720 :         while (radix_tree_is_internal_node(node)) {
     763             :                 unsigned offset;
     764             : 
     765        2720 :                 parent = entry_to_node(node);
     766        1360 :                 offset = radix_tree_descend(parent, &node, index);
     767        1360 :                 slot = parent->slots + offset;
     768        1360 :                 if (node == RADIX_TREE_RETRY)
     769             :                         goto restart;
     770        1360 :                 if (parent->shift == 0)
     771             :                         break;
     772             :         }
     773             : 
     774         712 :         if (nodep)
     775         702 :                 *nodep = parent;
     776         712 :         if (slotp)
     777         702 :                 *slotp = slot;
     778             :         return node;
     779             : }
     780             : 
     781             : /**
     782             :  *      radix_tree_lookup_slot    -    lookup a slot in a radix tree
     783             :  *      @root:          radix tree root
     784             :  *      @index:         index key
     785             :  *
     786             :  *      Returns:  the slot corresponding to the position @index in the
     787             :  *      radix tree @root. This is useful for update-if-exists operations.
     788             :  *
     789             :  *      This function can be called under rcu_read_lock iff the slot is not
     790             :  *      modified by radix_tree_replace_slot, otherwise it must be called
     791             :  *      exclusive from other writers. Any dereference of the slot must be done
     792             :  *      using radix_tree_deref_slot.
     793             :  */
     794           0 : void __rcu **radix_tree_lookup_slot(const struct radix_tree_root *root,
     795             :                                 unsigned long index)
     796             : {
     797             :         void __rcu **slot;
     798             : 
     799           0 :         if (!__radix_tree_lookup(root, index, NULL, &slot))
     800             :                 return NULL;
     801           0 :         return slot;
     802             : }
     803             : EXPORT_SYMBOL(radix_tree_lookup_slot);
     804             : 
     805             : /**
     806             :  *      radix_tree_lookup    -    perform lookup operation on a radix tree
     807             :  *      @root:          radix tree root
     808             :  *      @index:         index key
     809             :  *
     810             :  *      Lookup the item at the position @index in the radix tree @root.
     811             :  *
     812             :  *      This function can be called under rcu_read_lock, however the caller
     813             :  *      must manage lifetimes of leaf nodes (eg. RCU may also be used to free
     814             :  *      them safely). No RCU barriers are required to access or modify the
     815             :  *      returned item, however.
     816             :  */
     817          10 : void *radix_tree_lookup(const struct radix_tree_root *root, unsigned long index)
     818             : {
     819          10 :         return __radix_tree_lookup(root, index, NULL, NULL);
     820             : }
     821             : EXPORT_SYMBOL(radix_tree_lookup);
     822             : 
     823             : static void replace_slot(void __rcu **slot, void *item,
     824             :                 struct radix_tree_node *node, int count, int values)
     825             : {
     826        9475 :         if (node && (count || values)) {
     827        9300 :                 node->count += count;
     828        9300 :                 node->nr_values += values;
     829             :         }
     830             : 
     831        9475 :         rcu_assign_pointer(*slot, item);
     832             : }
     833             : 
     834             : static bool node_tag_get(const struct radix_tree_root *root,
     835             :                                 const struct radix_tree_node *node,
     836             :                                 unsigned int tag, unsigned int offset)
     837             : {
     838        8953 :         if (node)
     839        8953 :                 return tag_get(node, tag, offset);
     840           0 :         return root_tag_get(root, tag);
     841             : }
     842             : 
     843             : /*
     844             :  * IDR users want to be able to store NULL in the tree, so if the slot isn't
     845             :  * free, don't adjust the count, even if it's transitioning between NULL and
     846             :  * non-NULL.  For the IDA, we mark slots as being IDR_FREE while they still
     847             :  * have empty bits, but it only stores NULL in slots when they're being
     848             :  * deleted.
     849             :  */
     850        8948 : static int calculate_count(struct radix_tree_root *root,
     851             :                                 struct radix_tree_node *node, void __rcu **slot,
     852             :                                 void *item, void *old)
     853             : {
     854       17896 :         if (is_idr(root)) {
     855        8948 :                 unsigned offset = get_slot_offset(node, slot);
     856        8948 :                 bool free = node_tag_get(root, node, IDR_FREE, offset);
     857        8948 :                 if (!free)
     858             :                         return 0;
     859        8773 :                 if (!old)
     860             :                         return 1;
     861             :         }
     862           0 :         return !!item - !!old;
     863             : }
     864             : 
     865             : /**
     866             :  * __radix_tree_replace         - replace item in a slot
     867             :  * @root:               radix tree root
     868             :  * @node:               pointer to tree node
     869             :  * @slot:               pointer to slot in @node
     870             :  * @item:               new item to store in the slot.
     871             :  *
     872             :  * For use with __radix_tree_lookup().  Caller must hold tree write locked
     873             :  * across slot lookup and replacement.
     874             :  */
     875        8948 : void __radix_tree_replace(struct radix_tree_root *root,
     876             :                           struct radix_tree_node *node,
     877             :                           void __rcu **slot, void *item)
     878             : {
     879        8948 :         void *old = rcu_dereference_raw(*slot);
     880       17896 :         int values = !!xa_is_value(item) - !!xa_is_value(old);
     881        8948 :         int count = calculate_count(root, node, slot, item, old);
     882             : 
     883             :         /*
     884             :          * This function supports replacing value entries and
     885             :          * deleting entries, but that needs accounting against the
     886             :          * node unless the slot is root->xa_head.
     887             :          */
     888        8948 :         WARN_ON_ONCE(!node && (slot != (void __rcu **)&root->xa_head) &&
     889             :                         (count || values));
     890        8948 :         replace_slot(slot, item, node, count, values);
     891             : 
     892        8948 :         if (!node)
     893             :                 return;
     894             : 
     895        8948 :         delete_node(root, node);
     896             : }
     897             : 
     898             : /**
     899             :  * radix_tree_replace_slot      - replace item in a slot
     900             :  * @root:       radix tree root
     901             :  * @slot:       pointer to slot
     902             :  * @item:       new item to store in the slot.
     903             :  *
     904             :  * For use with radix_tree_lookup_slot() and
     905             :  * radix_tree_gang_lookup_tag_slot().  Caller must hold tree write locked
     906             :  * across slot lookup and replacement.
     907             :  *
     908             :  * NOTE: This cannot be used to switch between non-entries (empty slots),
     909             :  * regular entries, and value entries, as that requires accounting
     910             :  * inside the radix tree node. When switching from one type of entry or
     911             :  * deleting, use __radix_tree_lookup() and __radix_tree_replace() or
     912             :  * radix_tree_iter_replace().
     913             :  */
     914           0 : void radix_tree_replace_slot(struct radix_tree_root *root,
     915             :                              void __rcu **slot, void *item)
     916             : {
     917           0 :         __radix_tree_replace(root, NULL, slot, item);
     918           0 : }
     919             : EXPORT_SYMBOL(radix_tree_replace_slot);
     920             : 
     921             : /**
     922             :  * radix_tree_iter_replace - replace item in a slot
     923             :  * @root:       radix tree root
     924             :  * @iter:       iterator state
     925             :  * @slot:       pointer to slot
     926             :  * @item:       new item to store in the slot.
     927             :  *
     928             :  * For use with radix_tree_for_each_slot().
     929             :  * Caller must hold tree write locked.
     930             :  */
     931        8773 : void radix_tree_iter_replace(struct radix_tree_root *root,
     932             :                                 const struct radix_tree_iter *iter,
     933             :                                 void __rcu **slot, void *item)
     934             : {
     935        8773 :         __radix_tree_replace(root, iter->node, slot, item);
     936        8773 : }
     937             : 
     938         527 : static void node_tag_set(struct radix_tree_root *root,
     939             :                                 struct radix_tree_node *node,
     940             :                                 unsigned int tag, unsigned int offset)
     941             : {
     942        1583 :         while (node) {
     943         853 :                 if (tag_get(node, tag, offset))
     944             :                         return;
     945        1058 :                 tag_set(node, tag, offset);
     946         529 :                 offset = node->offset;
     947         529 :                 node = node->parent;
     948             :         }
     949             : 
     950         406 :         if (!root_tag_get(root, tag))
     951           0 :                 root_tag_set(root, tag);
     952             : }
     953             : 
     954             : /**
     955             :  *      radix_tree_tag_set - set a tag on a radix tree node
     956             :  *      @root:          radix tree root
     957             :  *      @index:         index key
     958             :  *      @tag:           tag index
     959             :  *
     960             :  *      Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
     961             :  *      corresponding to @index in the radix tree.  From
     962             :  *      the root all the way down to the leaf node.
     963             :  *
     964             :  *      Returns the address of the tagged item.  Setting a tag on a not-present
     965             :  *      item is a bug.
     966             :  */
     967           0 : void *radix_tree_tag_set(struct radix_tree_root *root,
     968             :                         unsigned long index, unsigned int tag)
     969             : {
     970             :         struct radix_tree_node *node, *parent;
     971             :         unsigned long maxindex;
     972             : 
     973           0 :         radix_tree_load_root(root, &node, &maxindex);
     974           0 :         BUG_ON(index > maxindex);
     975             : 
     976           0 :         while (radix_tree_is_internal_node(node)) {
     977             :                 unsigned offset;
     978             : 
     979           0 :                 parent = entry_to_node(node);
     980           0 :                 offset = radix_tree_descend(parent, &node, index);
     981           0 :                 BUG_ON(!node);
     982             : 
     983           0 :                 if (!tag_get(parent, tag, offset))
     984           0 :                         tag_set(parent, tag, offset);
     985             :         }
     986             : 
     987             :         /* set the root's tag bit */
     988           0 :         if (!root_tag_get(root, tag))
     989           0 :                 root_tag_set(root, tag);
     990             : 
     991           0 :         return node;
     992             : }
     993             : EXPORT_SYMBOL(radix_tree_tag_set);
     994             : 
     995        8773 : static void node_tag_clear(struct radix_tree_root *root,
     996             :                                 struct radix_tree_node *node,
     997             :                                 unsigned int tag, unsigned int offset)
     998             : {
     999       17671 :         while (node) {
    1000        8898 :                 if (!tag_get(node, tag, offset))
    1001             :                         return;
    1002        8898 :                 tag_clear(node, tag, offset);
    1003        8898 :                 if (any_tag_set(node, tag))
    1004             :                         return;
    1005             : 
    1006         125 :                 offset = node->offset;
    1007         125 :                 node = node->parent;
    1008             :         }
    1009             : 
    1010             :         /* clear the root's tag bit */
    1011           0 :         if (root_tag_get(root, tag))
    1012           0 :                 root_tag_clear(root, tag);
    1013             : }
    1014             : 
    1015             : /**
    1016             :  *      radix_tree_tag_clear - clear a tag on a radix tree node
    1017             :  *      @root:          radix tree root
    1018             :  *      @index:         index key
    1019             :  *      @tag:           tag index
    1020             :  *
    1021             :  *      Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
    1022             :  *      corresponding to @index in the radix tree.  If this causes
    1023             :  *      the leaf node to have no tags set then clear the tag in the
    1024             :  *      next-to-leaf node, etc.
    1025             :  *
    1026             :  *      Returns the address of the tagged item on success, else NULL.  ie:
    1027             :  *      has the same return value and semantics as radix_tree_lookup().
    1028             :  */
    1029           0 : void *radix_tree_tag_clear(struct radix_tree_root *root,
    1030             :                         unsigned long index, unsigned int tag)
    1031             : {
    1032             :         struct radix_tree_node *node, *parent;
    1033             :         unsigned long maxindex;
    1034           0 :         int offset = 0;
    1035             : 
    1036           0 :         radix_tree_load_root(root, &node, &maxindex);
    1037           0 :         if (index > maxindex)
    1038             :                 return NULL;
    1039             : 
    1040             :         parent = NULL;
    1041             : 
    1042           0 :         while (radix_tree_is_internal_node(node)) {
    1043           0 :                 parent = entry_to_node(node);
    1044           0 :                 offset = radix_tree_descend(parent, &node, index);
    1045             :         }
    1046             : 
    1047           0 :         if (node)
    1048           0 :                 node_tag_clear(root, parent, tag, offset);
    1049             : 
    1050             :         return node;
    1051             : }
    1052             : EXPORT_SYMBOL(radix_tree_tag_clear);
    1053             : 
    1054             : /**
    1055             :   * radix_tree_iter_tag_clear - clear a tag on the current iterator entry
    1056             :   * @root: radix tree root
    1057             :   * @iter: iterator state
    1058             :   * @tag: tag to clear
    1059             :   */
    1060        8773 : void radix_tree_iter_tag_clear(struct radix_tree_root *root,
    1061             :                         const struct radix_tree_iter *iter, unsigned int tag)
    1062             : {
    1063       17546 :         node_tag_clear(root, iter->node, tag, iter_offset(iter));
    1064        8773 : }
    1065             : 
    1066             : /**
    1067             :  * radix_tree_tag_get - get a tag on a radix tree node
    1068             :  * @root:               radix tree root
    1069             :  * @index:              index key
    1070             :  * @tag:                tag index (< RADIX_TREE_MAX_TAGS)
    1071             :  *
    1072             :  * Return values:
    1073             :  *
    1074             :  *  0: tag not present or not set
    1075             :  *  1: tag set
    1076             :  *
    1077             :  * Note that the return value of this function may not be relied on, even if
    1078             :  * the RCU lock is held, unless tag modification and node deletion are excluded
    1079             :  * from concurrency.
    1080             :  */
    1081         175 : int radix_tree_tag_get(const struct radix_tree_root *root,
    1082             :                         unsigned long index, unsigned int tag)
    1083             : {
    1084             :         struct radix_tree_node *node, *parent;
    1085             :         unsigned long maxindex;
    1086             : 
    1087         350 :         if (!root_tag_get(root, tag))
    1088             :                 return 0;
    1089             : 
    1090         175 :         radix_tree_load_root(root, &node, &maxindex);
    1091         175 :         if (index > maxindex)
    1092             :                 return 0;
    1093             : 
    1094         574 :         while (radix_tree_is_internal_node(node)) {
    1095             :                 unsigned offset;
    1096             : 
    1097         574 :                 parent = entry_to_node(node);
    1098         287 :                 offset = radix_tree_descend(parent, &node, index);
    1099             : 
    1100         287 :                 if (!tag_get(parent, tag, offset))
    1101             :                         return 0;
    1102         112 :                 if (node == RADIX_TREE_RETRY)
    1103             :                         break;
    1104             :         }
    1105             : 
    1106             :         return 1;
    1107             : }
    1108             : EXPORT_SYMBOL(radix_tree_tag_get);
    1109             : 
    1110             : /* Construct iter->tags bit-mask from node->tags[tag] array */
    1111             : static void set_iter_tags(struct radix_tree_iter *iter,
    1112             :                                 struct radix_tree_node *node, unsigned offset,
    1113             :                                 unsigned tag)
    1114             : {
    1115        8773 :         unsigned tag_long = offset / BITS_PER_LONG;
    1116        8773 :         unsigned tag_bit  = offset % BITS_PER_LONG;
    1117             : 
    1118        8773 :         if (!node) {
    1119           0 :                 iter->tags = 1;
    1120             :                 return;
    1121             :         }
    1122             : 
    1123        8773 :         iter->tags = node->tags[tag][tag_long] >> tag_bit;
    1124             : 
    1125             :         /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
    1126             :         if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
    1127             :                 /* Pick tags from next element */
    1128             :                 if (tag_bit)
    1129             :                         iter->tags |= node->tags[tag][tag_long + 1] <<
    1130             :                                                 (BITS_PER_LONG - tag_bit);
    1131             :                 /* Clip chunk size, here only BITS_PER_LONG tags */
    1132             :                 iter->next_index = __radix_tree_iter_add(iter, BITS_PER_LONG);
    1133             :         }
    1134             : }
    1135             : 
    1136           0 : void __rcu **radix_tree_iter_resume(void __rcu **slot,
    1137             :                                         struct radix_tree_iter *iter)
    1138             : {
    1139           0 :         slot++;
    1140           0 :         iter->index = __radix_tree_iter_add(iter, 1);
    1141           0 :         iter->next_index = iter->index;
    1142           0 :         iter->tags = 0;
    1143           0 :         return NULL;
    1144             : }
    1145             : EXPORT_SYMBOL(radix_tree_iter_resume);
    1146             : 
    1147             : /**
    1148             :  * radix_tree_next_chunk - find next chunk of slots for iteration
    1149             :  *
    1150             :  * @root:       radix tree root
    1151             :  * @iter:       iterator state
    1152             :  * @flags:      RADIX_TREE_ITER_* flags and tag index
    1153             :  * Returns:     pointer to chunk first slot, or NULL if iteration is over
    1154             :  */
    1155           0 : void __rcu **radix_tree_next_chunk(const struct radix_tree_root *root,
    1156             :                              struct radix_tree_iter *iter, unsigned flags)
    1157             : {
    1158           0 :         unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
    1159             :         struct radix_tree_node *node, *child;
    1160             :         unsigned long index, offset, maxindex;
    1161             : 
    1162           0 :         if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
    1163             :                 return NULL;
    1164             : 
    1165             :         /*
    1166             :          * Catch next_index overflow after ~0UL. iter->index never overflows
    1167             :          * during iterating; it can be zero only at the beginning.
    1168             :          * And we cannot overflow iter->next_index in a single step,
    1169             :          * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
    1170             :          *
    1171             :          * This condition also used by radix_tree_next_slot() to stop
    1172             :          * contiguous iterating, and forbid switching to the next chunk.
    1173             :          */
    1174           0 :         index = iter->next_index;
    1175           0 :         if (!index && iter->index)
    1176             :                 return NULL;
    1177             : 
    1178             :  restart:
    1179           0 :         radix_tree_load_root(root, &child, &maxindex);
    1180           0 :         if (index > maxindex)
    1181             :                 return NULL;
    1182           0 :         if (!child)
    1183             :                 return NULL;
    1184             : 
    1185           0 :         if (!radix_tree_is_internal_node(child)) {
    1186             :                 /* Single-slot tree */
    1187           0 :                 iter->index = index;
    1188           0 :                 iter->next_index = maxindex + 1;
    1189           0 :                 iter->tags = 1;
    1190           0 :                 iter->node = NULL;
    1191           0 :                 return (void __rcu **)&root->xa_head;
    1192             :         }
    1193             : 
    1194             :         do {
    1195           0 :                 node = entry_to_node(child);
    1196           0 :                 offset = radix_tree_descend(node, &child, index);
    1197             : 
    1198           0 :                 if ((flags & RADIX_TREE_ITER_TAGGED) ?
    1199           0 :                                 !tag_get(node, tag, offset) : !child) {
    1200             :                         /* Hole detected */
    1201           0 :                         if (flags & RADIX_TREE_ITER_CONTIG)
    1202             :                                 return NULL;
    1203             : 
    1204           0 :                         if (flags & RADIX_TREE_ITER_TAGGED)
    1205           0 :                                 offset = radix_tree_find_next_bit(node, tag,
    1206             :                                                 offset + 1);
    1207             :                         else
    1208           0 :                                 while (++offset < RADIX_TREE_MAP_SIZE) {
    1209           0 :                                         void *slot = rcu_dereference_raw(
    1210             :                                                         node->slots[offset]);
    1211           0 :                                         if (slot)
    1212             :                                                 break;
    1213             :                                 }
    1214           0 :                         index &= ~node_maxindex(node);
    1215           0 :                         index += offset << node->shift;
    1216             :                         /* Overflow after ~0UL */
    1217           0 :                         if (!index)
    1218             :                                 return NULL;
    1219           0 :                         if (offset == RADIX_TREE_MAP_SIZE)
    1220             :                                 goto restart;
    1221           0 :                         child = rcu_dereference_raw(node->slots[offset]);
    1222             :                 }
    1223             : 
    1224           0 :                 if (!child)
    1225             :                         goto restart;
    1226           0 :                 if (child == RADIX_TREE_RETRY)
    1227             :                         break;
    1228           0 :         } while (node->shift && radix_tree_is_internal_node(child));
    1229             : 
    1230             :         /* Update the iterator state */
    1231           0 :         iter->index = (index &~ node_maxindex(node)) | offset;
    1232           0 :         iter->next_index = (index | node_maxindex(node)) + 1;
    1233           0 :         iter->node = node;
    1234             : 
    1235           0 :         if (flags & RADIX_TREE_ITER_TAGGED)
    1236           0 :                 set_iter_tags(iter, node, offset, tag);
    1237             : 
    1238           0 :         return node->slots + offset;
    1239             : }
    1240             : EXPORT_SYMBOL(radix_tree_next_chunk);
    1241             : 
    1242             : /**
    1243             :  *      radix_tree_gang_lookup - perform multiple lookup on a radix tree
    1244             :  *      @root:          radix tree root
    1245             :  *      @results:       where the results of the lookup are placed
    1246             :  *      @first_index:   start the lookup from this key
    1247             :  *      @max_items:     place up to this many items at *results
    1248             :  *
    1249             :  *      Performs an index-ascending scan of the tree for present items.  Places
    1250             :  *      them at *@results and returns the number of items which were placed at
    1251             :  *      *@results.
    1252             :  *
    1253             :  *      The implementation is naive.
    1254             :  *
    1255             :  *      Like radix_tree_lookup, radix_tree_gang_lookup may be called under
    1256             :  *      rcu_read_lock. In this case, rather than the returned results being
    1257             :  *      an atomic snapshot of the tree at a single point in time, the
    1258             :  *      semantics of an RCU protected gang lookup are as though multiple
    1259             :  *      radix_tree_lookups have been issued in individual locks, and results
    1260             :  *      stored in 'results'.
    1261             :  */
    1262             : unsigned int
    1263           0 : radix_tree_gang_lookup(const struct radix_tree_root *root, void **results,
    1264             :                         unsigned long first_index, unsigned int max_items)
    1265             : {
    1266             :         struct radix_tree_iter iter;
    1267             :         void __rcu **slot;
    1268           0 :         unsigned int ret = 0;
    1269             : 
    1270           0 :         if (unlikely(!max_items))
    1271             :                 return 0;
    1272             : 
    1273           0 :         radix_tree_for_each_slot(slot, root, &iter, first_index) {
    1274           0 :                 results[ret] = rcu_dereference_raw(*slot);
    1275           0 :                 if (!results[ret])
    1276           0 :                         continue;
    1277           0 :                 if (radix_tree_is_internal_node(results[ret])) {
    1278           0 :                         slot = radix_tree_iter_retry(&iter);
    1279           0 :                         continue;
    1280             :                 }
    1281           0 :                 if (++ret == max_items)
    1282             :                         break;
    1283             :         }
    1284             : 
    1285             :         return ret;
    1286             : }
    1287             : EXPORT_SYMBOL(radix_tree_gang_lookup);
    1288             : 
    1289             : /**
    1290             :  *      radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
    1291             :  *                                   based on a tag
    1292             :  *      @root:          radix tree root
    1293             :  *      @results:       where the results of the lookup are placed
    1294             :  *      @first_index:   start the lookup from this key
    1295             :  *      @max_items:     place up to this many items at *results
    1296             :  *      @tag:           the tag index (< RADIX_TREE_MAX_TAGS)
    1297             :  *
    1298             :  *      Performs an index-ascending scan of the tree for present items which
    1299             :  *      have the tag indexed by @tag set.  Places the items at *@results and
    1300             :  *      returns the number of items which were placed at *@results.
    1301             :  */
    1302             : unsigned int
    1303           0 : radix_tree_gang_lookup_tag(const struct radix_tree_root *root, void **results,
    1304             :                 unsigned long first_index, unsigned int max_items,
    1305             :                 unsigned int tag)
    1306             : {
    1307             :         struct radix_tree_iter iter;
    1308             :         void __rcu **slot;
    1309           0 :         unsigned int ret = 0;
    1310             : 
    1311           0 :         if (unlikely(!max_items))
    1312             :                 return 0;
    1313             : 
    1314           0 :         radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
    1315           0 :                 results[ret] = rcu_dereference_raw(*slot);
    1316           0 :                 if (!results[ret])
    1317           0 :                         continue;
    1318           0 :                 if (radix_tree_is_internal_node(results[ret])) {
    1319           0 :                         slot = radix_tree_iter_retry(&iter);
    1320           0 :                         continue;
    1321             :                 }
    1322           0 :                 if (++ret == max_items)
    1323             :                         break;
    1324             :         }
    1325             : 
    1326             :         return ret;
    1327             : }
    1328             : EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
    1329             : 
    1330             : /**
    1331             :  *      radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
    1332             :  *                                        radix tree based on a tag
    1333             :  *      @root:          radix tree root
    1334             :  *      @results:       where the results of the lookup are placed
    1335             :  *      @first_index:   start the lookup from this key
    1336             :  *      @max_items:     place up to this many items at *results
    1337             :  *      @tag:           the tag index (< RADIX_TREE_MAX_TAGS)
    1338             :  *
    1339             :  *      Performs an index-ascending scan of the tree for present items which
    1340             :  *      have the tag indexed by @tag set.  Places the slots at *@results and
    1341             :  *      returns the number of slots which were placed at *@results.
    1342             :  */
    1343             : unsigned int
    1344           0 : radix_tree_gang_lookup_tag_slot(const struct radix_tree_root *root,
    1345             :                 void __rcu ***results, unsigned long first_index,
    1346             :                 unsigned int max_items, unsigned int tag)
    1347             : {
    1348             :         struct radix_tree_iter iter;
    1349             :         void __rcu **slot;
    1350           0 :         unsigned int ret = 0;
    1351             : 
    1352           0 :         if (unlikely(!max_items))
    1353             :                 return 0;
    1354             : 
    1355           0 :         radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
    1356           0 :                 results[ret] = slot;
    1357           0 :                 if (++ret == max_items)
    1358             :                         break;
    1359             :         }
    1360             : 
    1361             :         return ret;
    1362             : }
    1363             : EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
    1364             : 
    1365         527 : static bool __radix_tree_delete(struct radix_tree_root *root,
    1366             :                                 struct radix_tree_node *node, void __rcu **slot)
    1367             : {
    1368         527 :         void *old = rcu_dereference_raw(*slot);
    1369         527 :         int values = xa_is_value(old) ? -1 : 0;
    1370         527 :         unsigned offset = get_slot_offset(node, slot);
    1371             :         int tag;
    1372             : 
    1373        1054 :         if (is_idr(root))
    1374         527 :                 node_tag_set(root, node, IDR_FREE, offset);
    1375             :         else
    1376           0 :                 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
    1377           0 :                         node_tag_clear(root, node, tag, offset);
    1378             : 
    1379         527 :         replace_slot(slot, NULL, node, -1, values);
    1380         527 :         return node && delete_node(root, node);
    1381             : }
    1382             : 
    1383             : /**
    1384             :  * radix_tree_iter_delete - delete the entry at this iterator position
    1385             :  * @root: radix tree root
    1386             :  * @iter: iterator state
    1387             :  * @slot: pointer to slot
    1388             :  *
    1389             :  * Delete the entry at the position currently pointed to by the iterator.
    1390             :  * This may result in the current node being freed; if it is, the iterator
    1391             :  * is advanced so that it will not reference the freed memory.  This
    1392             :  * function may be called without any locking if there are no other threads
    1393             :  * which can access this tree.
    1394             :  */
    1395           0 : void radix_tree_iter_delete(struct radix_tree_root *root,
    1396             :                                 struct radix_tree_iter *iter, void __rcu **slot)
    1397             : {
    1398           0 :         if (__radix_tree_delete(root, iter->node, slot))
    1399           0 :                 iter->index = iter->next_index;
    1400           0 : }
    1401             : EXPORT_SYMBOL(radix_tree_iter_delete);
    1402             : 
    1403             : /**
    1404             :  * radix_tree_delete_item - delete an item from a radix tree
    1405             :  * @root: radix tree root
    1406             :  * @index: index key
    1407             :  * @item: expected item
    1408             :  *
    1409             :  * Remove @item at @index from the radix tree rooted at @root.
    1410             :  *
    1411             :  * Return: the deleted entry, or %NULL if it was not present
    1412             :  * or the entry at the given @index was not @item.
    1413             :  */
    1414         527 : void *radix_tree_delete_item(struct radix_tree_root *root,
    1415             :                              unsigned long index, void *item)
    1416             : {
    1417         527 :         struct radix_tree_node *node = NULL;
    1418         527 :         void __rcu **slot = NULL;
    1419             :         void *entry;
    1420             : 
    1421         527 :         entry = __radix_tree_lookup(root, index, &node, &slot);
    1422         527 :         if (!slot)
    1423             :                 return NULL;
    1424         542 :         if (!entry && (!is_idr(root) || node_tag_get(root, node, IDR_FREE,
    1425          10 :                                                 get_slot_offset(node, slot))))
    1426             :                 return NULL;
    1427             : 
    1428         527 :         if (item && entry != item)
    1429             :                 return NULL;
    1430             : 
    1431         527 :         __radix_tree_delete(root, node, slot);
    1432             : 
    1433         527 :         return entry;
    1434             : }
    1435             : EXPORT_SYMBOL(radix_tree_delete_item);
    1436             : 
    1437             : /**
    1438             :  * radix_tree_delete - delete an entry from a radix tree
    1439             :  * @root: radix tree root
    1440             :  * @index: index key
    1441             :  *
    1442             :  * Remove the entry at @index from the radix tree rooted at @root.
    1443             :  *
    1444             :  * Return: The deleted entry, or %NULL if it was not present.
    1445             :  */
    1446           0 : void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
    1447             : {
    1448           0 :         return radix_tree_delete_item(root, index, NULL);
    1449             : }
    1450             : EXPORT_SYMBOL(radix_tree_delete);
    1451             : 
    1452             : /**
    1453             :  *      radix_tree_tagged - test whether any items in the tree are tagged
    1454             :  *      @root:          radix tree root
    1455             :  *      @tag:           tag to test
    1456             :  */
    1457           0 : int radix_tree_tagged(const struct radix_tree_root *root, unsigned int tag)
    1458             : {
    1459       17546 :         return root_tag_get(root, tag);
    1460             : }
    1461             : EXPORT_SYMBOL(radix_tree_tagged);
    1462             : 
    1463             : /**
    1464             :  * idr_preload - preload for idr_alloc()
    1465             :  * @gfp_mask: allocation mask to use for preloading
    1466             :  *
    1467             :  * Preallocate memory to use for the next call to idr_alloc().  This function
    1468             :  * returns with preemption disabled.  It will be enabled by idr_preload_end().
    1469             :  */
    1470        8620 : void idr_preload(gfp_t gfp_mask)
    1471             : {
    1472        8620 :         if (__radix_tree_preload(gfp_mask, IDR_PRELOAD_SIZE))
    1473           0 :                 local_lock(&radix_tree_preloads.lock);
    1474        8620 : }
    1475             : EXPORT_SYMBOL(idr_preload);
    1476             : 
    1477        8773 : void __rcu **idr_get_free(struct radix_tree_root *root,
    1478             :                               struct radix_tree_iter *iter, gfp_t gfp,
    1479             :                               unsigned long max)
    1480             : {
    1481        8773 :         struct radix_tree_node *node = NULL, *child;
    1482        8773 :         void __rcu **slot = (void __rcu **)&root->xa_head;
    1483        8773 :         unsigned long maxindex, start = iter->next_index;
    1484        8773 :         unsigned int shift, offset = 0;
    1485             : 
    1486             :  grow:
    1487        8773 :         shift = radix_tree_load_root(root, &child, &maxindex);
    1488        8773 :         if (!radix_tree_tagged(root, IDR_FREE))
    1489           0 :                 start = max(start, maxindex + 1);
    1490        8773 :         if (start > max)
    1491             :                 return ERR_PTR(-ENOSPC);
    1492             : 
    1493        8773 :         if (start > maxindex) {
    1494         116 :                 int error = radix_tree_extend(root, gfp, start, shift);
    1495         116 :                 if (error < 0)
    1496           0 :                         return ERR_PTR(error);
    1497         116 :                 shift = error;
    1498         116 :                 child = rcu_dereference_raw(root->xa_head);
    1499             :         }
    1500        8773 :         if (start == 0 && shift == 0)
    1501          11 :                 shift = RADIX_TREE_MAP_SHIFT;
    1502             : 
    1503       30380 :         while (shift) {
    1504       21607 :                 shift -= RADIX_TREE_MAP_SHIFT;
    1505       21607 :                 if (child == NULL) {
    1506             :                         /* Have to add a child node.  */
    1507         262 :                         child = radix_tree_node_alloc(gfp, node, root, shift,
    1508             :                                                         offset, 0, 0);
    1509         262 :                         if (!child)
    1510             :                                 return ERR_PTR(-ENOMEM);
    1511         524 :                         all_tag_set(child, IDR_FREE);
    1512         524 :                         rcu_assign_pointer(*slot, node_to_entry(child));
    1513         262 :                         if (node)
    1514         249 :                                 node->count++;
    1515       42690 :                 } else if (!radix_tree_is_internal_node(child))
    1516             :                         break;
    1517             : 
    1518       43214 :                 node = entry_to_node(child);
    1519       21607 :                 offset = radix_tree_descend(node, &child, start);
    1520       21607 :                 if (!tag_get(node, IDR_FREE, offset)) {
    1521         294 :                         offset = radix_tree_find_next_bit(node, IDR_FREE,
    1522         147 :                                                         offset + 1);
    1523         294 :                         start = next_index(start, node, offset);
    1524         147 :                         if (start > max || start == 0)
    1525             :                                 return ERR_PTR(-ENOSPC);
    1526         147 :                         while (offset == RADIX_TREE_MAP_SIZE) {
    1527           0 :                                 offset = node->offset + 1;
    1528           0 :                                 node = node->parent;
    1529           0 :                                 if (!node)
    1530             :                                         goto grow;
    1531           0 :                                 shift = node->shift;
    1532             :                         }
    1533         147 :                         child = rcu_dereference_raw(node->slots[offset]);
    1534             :                 }
    1535       21607 :                 slot = &node->slots[offset];
    1536             :         }
    1537             : 
    1538        8773 :         iter->index = start;
    1539        8773 :         if (node)
    1540       17546 :                 iter->next_index = 1 + min(max, (start | node_maxindex(node)));
    1541             :         else
    1542           0 :                 iter->next_index = 1;
    1543        8773 :         iter->node = node;
    1544        8773 :         set_iter_tags(iter, node, offset, IDR_FREE);
    1545             : 
    1546             :         return slot;
    1547             : }
    1548             : 
    1549             : /**
    1550             :  * idr_destroy - release all internal memory from an IDR
    1551             :  * @idr: idr handle
    1552             :  *
    1553             :  * After this function is called, the IDR is empty, and may be reused or
    1554             :  * the data structure containing it may be freed.
    1555             :  *
    1556             :  * A typical clean-up sequence for objects stored in an idr tree will use
    1557             :  * idr_for_each() to free all objects, if necessary, then idr_destroy() to
    1558             :  * free the memory used to keep track of those objects.
    1559             :  */
    1560          10 : void idr_destroy(struct idr *idr)
    1561             : {
    1562          10 :         struct radix_tree_node *node = rcu_dereference_raw(idr->idr_rt.xa_head);
    1563          10 :         if (radix_tree_is_internal_node(node))
    1564           0 :                 radix_tree_free_nodes(node);
    1565          10 :         idr->idr_rt.xa_head = NULL;
    1566          20 :         root_tag_set(&idr->idr_rt, IDR_FREE);
    1567          10 : }
    1568             : EXPORT_SYMBOL(idr_destroy);
    1569             : 
    1570             : static void
    1571         168 : radix_tree_node_ctor(void *arg)
    1572             : {
    1573         168 :         struct radix_tree_node *node = arg;
    1574             : 
    1575         336 :         memset(node, 0, sizeof(*node));
    1576         336 :         INIT_LIST_HEAD(&node->private_list);
    1577         168 : }
    1578             : 
    1579           0 : static int radix_tree_cpu_dead(unsigned int cpu)
    1580             : {
    1581             :         struct radix_tree_preload *rtp;
    1582             :         struct radix_tree_node *node;
    1583             : 
    1584             :         /* Free per-cpu pool of preloaded nodes */
    1585           0 :         rtp = &per_cpu(radix_tree_preloads, cpu);
    1586           0 :         while (rtp->nr) {
    1587           0 :                 node = rtp->nodes;
    1588           0 :                 rtp->nodes = node->parent;
    1589           0 :                 kmem_cache_free(radix_tree_node_cachep, node);
    1590           0 :                 rtp->nr--;
    1591             :         }
    1592           0 :         return 0;
    1593             : }
    1594             : 
    1595           1 : void __init radix_tree_init(void)
    1596             : {
    1597             :         int ret;
    1598             : 
    1599             :         BUILD_BUG_ON(RADIX_TREE_MAX_TAGS + __GFP_BITS_SHIFT > 32);
    1600             :         BUILD_BUG_ON(ROOT_IS_IDR & ~GFP_ZONEMASK);
    1601             :         BUILD_BUG_ON(XA_CHUNK_SIZE > 255);
    1602           1 :         radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
    1603             :                         sizeof(struct radix_tree_node), 0,
    1604             :                         SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
    1605             :                         radix_tree_node_ctor);
    1606           1 :         ret = cpuhp_setup_state_nocalls(CPUHP_RADIX_DEAD, "lib/radix:dead",
    1607             :                                         NULL, radix_tree_cpu_dead);
    1608           1 :         WARN_ON(ret < 0);
    1609           1 : }

Generated by: LCOV version 1.14