diff options
author | David S. Miller <davem@davemloft.net> | 2012-04-10 20:30:45 +0200 |
---|---|---|
committer | David S. Miller <davem@davemloft.net> | 2012-04-10 20:30:45 +0200 |
commit | 06eb4eafbdc0796d741d139a44f1253278da8611 (patch) | |
tree | fbdb44317130c371928154c9e6903e699fe2b995 /lib | |
parent | wiznet: Fix Kconfig dependencies. (diff) | |
parent | Make the "word-at-a-time" helper functions more commonly usable (diff) | |
download | linux-06eb4eafbdc0796d741d139a44f1253278da8611.tar.xz linux-06eb4eafbdc0796d741d139a44f1253278da8611.zip |
Merge git://git.kernel.org/pub/scm/linux/kernel/git/davem/net
Diffstat (limited to 'lib')
-rw-r--r-- | lib/Kconfig | 9 | ||||
-rw-r--r-- | lib/Kconfig.debug | 1 | ||||
-rw-r--r-- | lib/cpumask.c | 12 | ||||
-rw-r--r-- | lib/llist.c | 1 | ||||
-rw-r--r-- | lib/radix-tree.c | 442 | ||||
-rw-r--r-- | lib/raid6/altivec.uc | 2 |
6 files changed, 195 insertions, 272 deletions
diff --git a/lib/Kconfig b/lib/Kconfig index 43359bb1ca90..4a8aba2e5cc0 100644 --- a/lib/Kconfig +++ b/lib/Kconfig @@ -29,6 +29,10 @@ config GENERIC_IOMAP bool select GENERIC_PCI_IOMAP +config GENERIC_IO + boolean + default n + config CRC_CCITT tristate "CRC-CCITT functions" help @@ -84,6 +88,10 @@ choice prompt "CRC32 implementation" depends on CRC32 default CRC32_SLICEBY8 + help + This option allows a kernel builder to override the default choice + of CRC32 algorithm. Choose the default ("slice by 8") unless you + know that you need one of the others. config CRC32_SLICEBY8 bool "Slice by 8 bytes" @@ -277,6 +285,7 @@ config BTREE config HAS_IOMEM boolean depends on !NO_IOMEM + select GENERIC_IO default y config HAS_IOPORT diff --git a/lib/Kconfig.debug b/lib/Kconfig.debug index f7af95d304c5..6777153f18f3 100644 --- a/lib/Kconfig.debug +++ b/lib/Kconfig.debug @@ -499,6 +499,7 @@ config RT_MUTEX_TESTER config DEBUG_SPINLOCK bool "Spinlock and rw-lock debugging: basic checks" depends on DEBUG_KERNEL + select UNINLINE_SPIN_UNLOCK help Say Y here and build SMP to catch missing spinlock initialization and certain other kinds of spinlock errors commonly made. This is diff --git a/lib/cpumask.c b/lib/cpumask.c index 0b660118ed91..402a54ac35cb 100644 --- a/lib/cpumask.c +++ b/lib/cpumask.c @@ -26,18 +26,6 @@ int __next_cpu_nr(int n, const cpumask_t *srcp) EXPORT_SYMBOL(__next_cpu_nr); #endif -int __any_online_cpu(const cpumask_t *mask) -{ - int cpu; - - for_each_cpu(cpu, mask) { - if (cpu_online(cpu)) - break; - } - return cpu; -} -EXPORT_SYMBOL(__any_online_cpu); - /** * cpumask_next_and - get the next cpu in *src1p & *src2p * @n: the cpu prior to the place to search (ie. return will be > @n) diff --git a/lib/llist.c b/lib/llist.c index 8221b3d2fbf0..4a15115e90f8 100644 --- a/lib/llist.c +++ b/lib/llist.c @@ -27,7 +27,6 @@ #include <linux/interrupt.h> #include <linux/llist.h> -#include <asm/system.h> /** * llist_add_batch - add several linked entries in batch diff --git a/lib/radix-tree.c b/lib/radix-tree.c index 3e69c2b66c94..86516f5588e3 100644 --- a/lib/radix-tree.c +++ b/lib/radix-tree.c @@ -3,6 +3,7 @@ * Portions Copyright (C) 2001 Christoph Hellwig * Copyright (C) 2005 SGI, Christoph Lameter * Copyright (C) 2006 Nick Piggin + * Copyright (C) 2012 Konstantin Khlebnikov * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License as @@ -146,6 +147,43 @@ static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag) } return 0; } + +/** + * radix_tree_find_next_bit - find the next set bit in a memory region + * + * @addr: The address to base the search on + * @size: The bitmap size in bits + * @offset: The bitnumber to start searching at + * + * Unrollable variant of find_next_bit() for constant size arrays. + * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero. + * Returns next bit offset, or size if nothing found. + */ +static __always_inline unsigned long +radix_tree_find_next_bit(const unsigned long *addr, + unsigned long size, unsigned long offset) +{ + if (!__builtin_constant_p(size)) + return find_next_bit(addr, size, offset); + + if (offset < size) { + unsigned long tmp; + + addr += offset / BITS_PER_LONG; + tmp = *addr >> (offset % BITS_PER_LONG); + if (tmp) + return __ffs(tmp) + offset; + offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1); + while (offset < size) { + tmp = *++addr; + if (tmp) + return __ffs(tmp) + offset; + offset += BITS_PER_LONG; + } + } + return size; +} + /* * This assumes that the caller has performed appropriate preallocation, and * that the caller has pinned this thread of control to the current CPU. @@ -613,6 +651,119 @@ int radix_tree_tag_get(struct radix_tree_root *root, EXPORT_SYMBOL(radix_tree_tag_get); /** + * radix_tree_next_chunk - find next chunk of slots for iteration + * + * @root: radix tree root + * @iter: iterator state + * @flags: RADIX_TREE_ITER_* flags and tag index + * Returns: pointer to chunk first slot, or NULL if iteration is over + */ +void **radix_tree_next_chunk(struct radix_tree_root *root, + struct radix_tree_iter *iter, unsigned flags) +{ + unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK; + struct radix_tree_node *rnode, *node; + unsigned long index, offset; + + if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag)) + return NULL; + + /* + * Catch next_index overflow after ~0UL. iter->index never overflows + * during iterating; it can be zero only at the beginning. + * And we cannot overflow iter->next_index in a single step, + * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG. + */ + index = iter->next_index; + if (!index && iter->index) + return NULL; + + rnode = rcu_dereference_raw(root->rnode); + if (radix_tree_is_indirect_ptr(rnode)) { + rnode = indirect_to_ptr(rnode); + } else if (rnode && !index) { + /* Single-slot tree */ + iter->index = 0; + iter->next_index = 1; + iter->tags = 1; + return (void **)&root->rnode; + } else + return NULL; + +restart: + shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT; + offset = index >> shift; + + /* Index outside of the tree */ + if (offset >= RADIX_TREE_MAP_SIZE) + return NULL; + + node = rnode; + while (1) { + if ((flags & RADIX_TREE_ITER_TAGGED) ? + !test_bit(offset, node->tags[tag]) : + !node->slots[offset]) { + /* Hole detected */ + if (flags & RADIX_TREE_ITER_CONTIG) + return NULL; + + if (flags & RADIX_TREE_ITER_TAGGED) + offset = radix_tree_find_next_bit( + node->tags[tag], + RADIX_TREE_MAP_SIZE, + offset + 1); + else + while (++offset < RADIX_TREE_MAP_SIZE) { + if (node->slots[offset]) + break; + } + index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1); + index += offset << shift; + /* Overflow after ~0UL */ + if (!index) + return NULL; + if (offset == RADIX_TREE_MAP_SIZE) + goto restart; + } + + /* This is leaf-node */ + if (!shift) + break; + + node = rcu_dereference_raw(node->slots[offset]); + if (node == NULL) + goto restart; + shift -= RADIX_TREE_MAP_SHIFT; + offset = (index >> shift) & RADIX_TREE_MAP_MASK; + } + + /* Update the iterator state */ + iter->index = index; + iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1; + + /* Construct iter->tags bit-mask from node->tags[tag] array */ + if (flags & RADIX_TREE_ITER_TAGGED) { + unsigned tag_long, tag_bit; + + tag_long = offset / BITS_PER_LONG; + tag_bit = offset % BITS_PER_LONG; + iter->tags = node->tags[tag][tag_long] >> tag_bit; + /* This never happens if RADIX_TREE_TAG_LONGS == 1 */ + if (tag_long < RADIX_TREE_TAG_LONGS - 1) { + /* Pick tags from next element */ + if (tag_bit) + iter->tags |= node->tags[tag][tag_long + 1] << + (BITS_PER_LONG - tag_bit); + /* Clip chunk size, here only BITS_PER_LONG tags */ + iter->next_index = index + BITS_PER_LONG; + } + } + + return node->slots + offset; +} +EXPORT_SYMBOL(radix_tree_next_chunk); + +/** * radix_tree_range_tag_if_tagged - for each item in given range set given * tag if item has another tag set * @root: radix tree root @@ -817,57 +968,6 @@ unsigned long radix_tree_prev_hole(struct radix_tree_root *root, } EXPORT_SYMBOL(radix_tree_prev_hole); -static unsigned int -__lookup(struct radix_tree_node *slot, void ***results, unsigned long *indices, - unsigned long index, unsigned int max_items, unsigned long *next_index) -{ - unsigned int nr_found = 0; - unsigned int shift, height; - unsigned long i; - - height = slot->height; - if (height == 0) - goto out; - shift = (height-1) * RADIX_TREE_MAP_SHIFT; - - for ( ; height > 1; height--) { - i = (index >> shift) & RADIX_TREE_MAP_MASK; - for (;;) { - if (slot->slots[i] != NULL) - break; - index &= ~((1UL << shift) - 1); - index += 1UL << shift; - if (index == 0) - goto out; /* 32-bit wraparound */ - i++; - if (i == RADIX_TREE_MAP_SIZE) - goto out; - } - - shift -= RADIX_TREE_MAP_SHIFT; - slot = rcu_dereference_raw(slot->slots[i]); - if (slot == NULL) - goto out; - } - - /* Bottom level: grab some items */ - for (i = index & RADIX_TREE_MAP_MASK; i < RADIX_TREE_MAP_SIZE; i++) { - if (slot->slots[i]) { - results[nr_found] = &(slot->slots[i]); - if (indices) - indices[nr_found] = index; - if (++nr_found == max_items) { - index++; - goto out; - } - } - index++; - } -out: - *next_index = index; - return nr_found; -} - /** * radix_tree_gang_lookup - perform multiple lookup on a radix tree * @root: radix tree root @@ -891,48 +991,19 @@ unsigned int radix_tree_gang_lookup(struct radix_tree_root *root, void **results, unsigned long first_index, unsigned int max_items) { - unsigned long max_index; - struct radix_tree_node *node; - unsigned long cur_index = first_index; - unsigned int ret; + struct radix_tree_iter iter; + void **slot; + unsigned int ret = 0; - node = rcu_dereference_raw(root->rnode); - if (!node) + if (unlikely(!max_items)) return 0; - if (!radix_tree_is_indirect_ptr(node)) { - if (first_index > 0) - return 0; - results[0] = node; - return 1; - } - node = indirect_to_ptr(node); - - max_index = radix_tree_maxindex(node->height); - - ret = 0; - while (ret < max_items) { - unsigned int nr_found, slots_found, i; - unsigned long next_index; /* Index of next search */ - - if (cur_index > max_index) - break; - slots_found = __lookup(node, (void ***)results + ret, NULL, - cur_index, max_items - ret, &next_index); - nr_found = 0; - for (i = 0; i < slots_found; i++) { - struct radix_tree_node *slot; - slot = *(((void ***)results)[ret + i]); - if (!slot) - continue; - results[ret + nr_found] = - indirect_to_ptr(rcu_dereference_raw(slot)); - nr_found++; - } - ret += nr_found; - if (next_index == 0) + radix_tree_for_each_slot(slot, root, &iter, first_index) { + results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot)); + if (!results[ret]) + continue; + if (++ret == max_items) break; - cur_index = next_index; } return ret; @@ -962,112 +1033,25 @@ radix_tree_gang_lookup_slot(struct radix_tree_root *root, void ***results, unsigned long *indices, unsigned long first_index, unsigned int max_items) { - unsigned long max_index; - struct radix_tree_node *node; - unsigned long cur_index = first_index; - unsigned int ret; + struct radix_tree_iter iter; + void **slot; + unsigned int ret = 0; - node = rcu_dereference_raw(root->rnode); - if (!node) + if (unlikely(!max_items)) return 0; - if (!radix_tree_is_indirect_ptr(node)) { - if (first_index > 0) - return 0; - results[0] = (void **)&root->rnode; + radix_tree_for_each_slot(slot, root, &iter, first_index) { + results[ret] = slot; if (indices) - indices[0] = 0; - return 1; - } - node = indirect_to_ptr(node); - - max_index = radix_tree_maxindex(node->height); - - ret = 0; - while (ret < max_items) { - unsigned int slots_found; - unsigned long next_index; /* Index of next search */ - - if (cur_index > max_index) + indices[ret] = iter.index; + if (++ret == max_items) break; - slots_found = __lookup(node, results + ret, - indices ? indices + ret : NULL, - cur_index, max_items - ret, &next_index); - ret += slots_found; - if (next_index == 0) - break; - cur_index = next_index; } return ret; } EXPORT_SYMBOL(radix_tree_gang_lookup_slot); -/* - * FIXME: the two tag_get()s here should use find_next_bit() instead of - * open-coding the search. - */ -static unsigned int -__lookup_tag(struct radix_tree_node *slot, void ***results, unsigned long index, - unsigned int max_items, unsigned long *next_index, unsigned int tag) -{ - unsigned int nr_found = 0; - unsigned int shift, height; - - height = slot->height; - if (height == 0) - goto out; - shift = (height-1) * RADIX_TREE_MAP_SHIFT; - - while (height > 0) { - unsigned long i = (index >> shift) & RADIX_TREE_MAP_MASK ; - - for (;;) { - if (tag_get(slot, tag, i)) - break; - index &= ~((1UL << shift) - 1); - index += 1UL << shift; - if (index == 0) - goto out; /* 32-bit wraparound */ - i++; - if (i == RADIX_TREE_MAP_SIZE) - goto out; - } - height--; - if (height == 0) { /* Bottom level: grab some items */ - unsigned long j = index & RADIX_TREE_MAP_MASK; - - for ( ; j < RADIX_TREE_MAP_SIZE; j++) { - index++; - if (!tag_get(slot, tag, j)) - continue; - /* - * Even though the tag was found set, we need to - * recheck that we have a non-NULL node, because - * if this lookup is lockless, it may have been - * subsequently deleted. - * - * Similar care must be taken in any place that - * lookup ->slots[x] without a lock (ie. can't - * rely on its value remaining the same). - */ - if (slot->slots[j]) { - results[nr_found++] = &(slot->slots[j]); - if (nr_found == max_items) - goto out; - } - } - } - shift -= RADIX_TREE_MAP_SHIFT; - slot = rcu_dereference_raw(slot->slots[i]); - if (slot == NULL) - break; - } -out: - *next_index = index; - return nr_found; -} - /** * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree * based on a tag @@ -1086,52 +1070,19 @@ radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results, unsigned long first_index, unsigned int max_items, unsigned int tag) { - struct radix_tree_node *node; - unsigned long max_index; - unsigned long cur_index = first_index; - unsigned int ret; - - /* check the root's tag bit */ - if (!root_tag_get(root, tag)) - return 0; + struct radix_tree_iter iter; + void **slot; + unsigned int ret = 0; - node = rcu_dereference_raw(root->rnode); - if (!node) + if (unlikely(!max_items)) return 0; - if (!radix_tree_is_indirect_ptr(node)) { - if (first_index > 0) - return 0; - results[0] = node; - return 1; - } - node = indirect_to_ptr(node); - - max_index = radix_tree_maxindex(node->height); - - ret = 0; - while (ret < max_items) { - unsigned int nr_found, slots_found, i; - unsigned long next_index; /* Index of next search */ - - if (cur_index > max_index) - break; - slots_found = __lookup_tag(node, (void ***)results + ret, - cur_index, max_items - ret, &next_index, tag); - nr_found = 0; - for (i = 0; i < slots_found; i++) { - struct radix_tree_node *slot; - slot = *(((void ***)results)[ret + i]); - if (!slot) - continue; - results[ret + nr_found] = - indirect_to_ptr(rcu_dereference_raw(slot)); - nr_found++; - } - ret += nr_found; - if (next_index == 0) + radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) { + results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot)); + if (!results[ret]) + continue; + if (++ret == max_items) break; - cur_index = next_index; } return ret; @@ -1156,42 +1107,17 @@ radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results, unsigned long first_index, unsigned int max_items, unsigned int tag) { - struct radix_tree_node *node; - unsigned long max_index; - unsigned long cur_index = first_index; - unsigned int ret; + struct radix_tree_iter iter; + void **slot; + unsigned int ret = 0; - /* check the root's tag bit */ - if (!root_tag_get(root, tag)) - return 0; - - node = rcu_dereference_raw(root->rnode); - if (!node) + if (unlikely(!max_items)) return 0; - if (!radix_tree_is_indirect_ptr(node)) { - if (first_index > 0) - return 0; - results[0] = (void **)&root->rnode; - return 1; - } - node = indirect_to_ptr(node); - - max_index = radix_tree_maxindex(node->height); - - ret = 0; - while (ret < max_items) { - unsigned int slots_found; - unsigned long next_index; /* Index of next search */ - - if (cur_index > max_index) - break; - slots_found = __lookup_tag(node, results + ret, - cur_index, max_items - ret, &next_index, tag); - ret += slots_found; - if (next_index == 0) + radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) { + results[ret] = slot; + if (++ret == max_items) break; - cur_index = next_index; } return ret; diff --git a/lib/raid6/altivec.uc b/lib/raid6/altivec.uc index 2654d5c854be..b71012b756f4 100644 --- a/lib/raid6/altivec.uc +++ b/lib/raid6/altivec.uc @@ -28,8 +28,8 @@ #include <altivec.h> #ifdef __KERNEL__ -# include <asm/system.h> # include <asm/cputable.h> +# include <asm/switch_to.h> #endif /* |