summaryrefslogtreecommitdiffstats
path: root/arch/powerpc/sysdev/msi_bitmap.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/sysdev/msi_bitmap.c')
-rw-r--r--arch/powerpc/sysdev/msi_bitmap.c25
1 files changed, 10 insertions, 15 deletions
diff --git a/arch/powerpc/sysdev/msi_bitmap.c b/arch/powerpc/sysdev/msi_bitmap.c
index 8b7d8fc2b120..2ff630267e9e 100644
--- a/arch/powerpc/sysdev/msi_bitmap.c
+++ b/arch/powerpc/sysdev/msi_bitmap.c
@@ -14,28 +14,23 @@
#include <asm/msi_bitmap.h>
#include <asm/setup.h>
-int msi_bitmap_alloc_hwirqs_from_offset(struct msi_bitmap *bmp, int offset,
- int num)
+int msi_bitmap_alloc_hwirqs(struct msi_bitmap *bmp, int num)
{
unsigned long flags;
- int index;
- int order = get_count_order(num);
+ int offset, order = get_count_order(num);
spin_lock_irqsave(&bmp->lock, flags);
- index = bitmap_find_next_zero_area(bmp->bitmap, bmp->irq_count,
- offset, num, (1 << order) - 1);
- bitmap_set(bmp->bitmap, index, num);
+ /*
+ * This is fast, but stricter than we need. We might want to add
+ * a fallback routine which does a linear search with no alignment.
+ */
+ offset = bitmap_find_free_region(bmp->bitmap, bmp->irq_count, order);
spin_unlock_irqrestore(&bmp->lock, flags);
- pr_debug("msi_bitmap: found %d free bits starting from offset %d at index %d\n",
- num, offset, index);
-
- return index;
-}
+ pr_debug("msi_bitmap: allocated 0x%x (2^%d) at offset 0x%x\n",
+ num, order, offset);
-int msi_bitmap_alloc_hwirqs(struct msi_bitmap *bmp, int num)
-{
- return msi_bitmap_alloc_hwirqs_from_offset(bmp, 0, num);
+ return offset;
}
void msi_bitmap_free_hwirqs(struct msi_bitmap *bmp, unsigned int offset,