summaryrefslogtreecommitdiffstats
path: root/lib/crc32.c
diff options
context:
space:
mode:
authorBob Pearson <rpearson@systemfabricworks.com>2012-03-23 23:02:24 +0100
committerLinus Torvalds <torvalds@linux-foundation.org>2012-03-24 00:58:37 +0100
commit324eb0f17d9dcead3c60c133aa244f6b3631fec9 (patch)
tree657c564269a0fc1921f4252dbf0184725f5c85fa /lib/crc32.c
parentcrc32: make CRC_*_BITS definition correspond to actual bit counts (diff)
downloadlinux-324eb0f17d9dcead3c60c133aa244f6b3631fec9.tar.xz
linux-324eb0f17d9dcead3c60c133aa244f6b3631fec9.zip
crc32: add slice-by-8 algorithm to existing code
Add slicing-by-8 algorithm to the existing slicing-by-4 algorithm. This consists of: - extend largest BITS size from 32 to 64 - extend tables from tab[4][256] to up to tab[8][256] - Add code for inner loop. [djwong@us.ibm.com: Minor changelog tweaks] Signed-off-by: Bob Pearson <rpearson@systemfabricworks.com> Signed-off-by: Darrick J. Wong <djwong@us.ibm.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'lib/crc32.c')
-rw-r--r--lib/crc32.c38
1 files changed, 27 insertions, 11 deletions
diff --git a/lib/crc32.c b/lib/crc32.c
index 5971f2ad46d5..826e16352e0e 100644
--- a/lib/crc32.c
+++ b/lib/crc32.c
@@ -47,25 +47,28 @@ MODULE_LICENSE("GPL");
#if CRC_LE_BITS > 8 || CRC_BE_BITS > 8
+/* implements slicing-by-4 or slicing-by-8 algorithm */
static inline u32
crc32_body(u32 crc, unsigned char const *buf, size_t len, const u32 (*tab)[256])
{
# ifdef __LITTLE_ENDIAN
# define DO_CRC(x) crc = t0[(crc ^ (x)) & 255] ^ (crc >> 8)
-# define DO_CRC4 crc = t3[(crc) & 255] ^ \
- t2[(crc >> 8) & 255] ^ \
- t1[(crc >> 16) & 255] ^ \
- t0[(crc >> 24) & 255]
+# define DO_CRC4 (t3[(q) & 255] ^ t2[(q >> 8) & 255] ^ \
+ t1[(q >> 16) & 255] ^ t0[(q >> 24) & 255])
+# define DO_CRC8 (t7[(q) & 255] ^ t6[(q >> 8) & 255] ^ \
+ t5[(q >> 16) & 255] ^ t4[(q >> 24) & 255])
# else
# define DO_CRC(x) crc = t0[((crc >> 24) ^ (x)) & 255] ^ (crc << 8)
-# define DO_CRC4 crc = t0[(crc) & 255] ^ \
- t1[(crc >> 8) & 255] ^ \
- t2[(crc >> 16) & 255] ^ \
- t3[(crc >> 24) & 255]
+# define DO_CRC4 (t0[(q) & 255] ^ t1[(q >> 8) & 255] ^ \
+ t2[(q >> 16) & 255] ^ t3[(q >> 24) & 255])
+# define DO_CRC8 (t4[(q) & 255] ^ t5[(q >> 8) & 255] ^ \
+ t6[(q >> 16) & 255] ^ t7[(q >> 24) & 255])
# endif
const u32 *b;
size_t rem_len;
const u32 *t0=tab[0], *t1=tab[1], *t2=tab[2], *t3=tab[3];
+ const u32 *t4 = tab[4], *t5 = tab[5], *t6 = tab[6], *t7 = tab[7];
+ u32 q;
/* Align it */
if (unlikely((long)buf & 3 && len)) {
@@ -73,13 +76,25 @@ crc32_body(u32 crc, unsigned char const *buf, size_t len, const u32 (*tab)[256])
DO_CRC(*buf++);
} while ((--len) && ((long)buf)&3);
}
+
+# if CRC_LE_BITS == 32
rem_len = len & 3;
- /* load data 32 bits wide, xor data 32 bits wide. */
len = len >> 2;
+# else
+ rem_len = len & 7;
+ len = len >> 3;
+# endif
+
b = (const u32 *)buf;
for (--b; len; --len) {
- crc ^= *++b; /* use pre increment for speed */
- DO_CRC4;
+ q = crc ^ *++b; /* use pre increment for speed */
+# if CRC_LE_BITS == 32
+ crc = DO_CRC4;
+# else
+ crc = DO_CRC8;
+ q = *++b;
+ crc ^= DO_CRC4;
+# endif
}
len = rem_len;
/* And the last few bytes */
@@ -92,6 +107,7 @@ crc32_body(u32 crc, unsigned char const *buf, size_t len, const u32 (*tab)[256])
return crc;
#undef DO_CRC
#undef DO_CRC4
+#undef DO_CRC8
}
#endif