summaryrefslogtreecommitdiffstats
path: root/lib/random32.c
diff options
context:
space:
mode:
authorWilly Tarreau <w@1wt.eu>2020-10-24 18:36:27 +0200
committerWilly Tarreau <w@1wt.eu>2020-10-24 20:21:57 +0200
commitc6e169bc146a76d5ccbf4d3825f705414352bd03 (patch)
tree9974f18c3bbc38837e4dd0517bea1f3ae712e05a /lib/random32.c
parentrandom32: add noise from network and scheduling activity (diff)
downloadlinux-c6e169bc146a76d5ccbf4d3825f705414352bd03.tar.xz
linux-c6e169bc146a76d5ccbf4d3825f705414352bd03.zip
random32: add a selftest for the prandom32 code
Given that this code is new, let's add a selftest for it as well. It doesn't rely on fixed sets, instead it picks 1024 numbers and verifies that they're not more correlated than desired. Link: https://lore.kernel.org/netdev/20200808152628.GA27941@SDF.ORG/ Cc: George Spelvin <lkml@sdf.org> Cc: Amit Klein <aksecurity@gmail.com> Cc: Eric Dumazet <edumazet@google.com> Cc: "Jason A. Donenfeld" <Jason@zx2c4.com> Cc: Andy Lutomirski <luto@kernel.org> Cc: Kees Cook <keescook@chromium.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: tytso@mit.edu Cc: Florian Westphal <fw@strlen.de> Cc: Marc Plumb <lkml.mplumb@gmail.com> Signed-off-by: Willy Tarreau <w@1wt.eu>
Diffstat (limited to 'lib/random32.c')
-rw-r--r--lib/random32.c56
1 files changed, 56 insertions, 0 deletions
diff --git a/lib/random32.c b/lib/random32.c
index 7f047844e494..4d0e05e471d7 100644
--- a/lib/random32.c
+++ b/lib/random32.c
@@ -38,6 +38,7 @@
#include <linux/jiffies.h>
#include <linux/random.h>
#include <linux/sched.h>
+#include <linux/bitops.h>
#include <asm/unaligned.h>
#include <trace/events/random.h>
@@ -556,6 +557,61 @@ static void prandom_timer_start(struct random_ready_callback *unused)
mod_timer(&seed_timer, jiffies);
}
+#ifdef CONFIG_RANDOM32_SELFTEST
+/* Principle: True 32-bit random numbers will all have 16 differing bits on
+ * average. For each 32-bit number, there are 601M numbers differing by 16
+ * bits, and 89% of the numbers differ by at least 12 bits. Note that more
+ * than 16 differing bits also implies a correlation with inverted bits. Thus
+ * we take 1024 random numbers and compare each of them to the other ones,
+ * counting the deviation of correlated bits to 16. Constants report 32,
+ * counters 32-log2(TEST_SIZE), and pure randoms, around 6 or lower. With the
+ * u32 total, TEST_SIZE may be as large as 4096 samples.
+ */
+#define TEST_SIZE 1024
+static int __init prandom32_state_selftest(void)
+{
+ unsigned int x, y, bits, samples;
+ u32 xor, flip;
+ u32 total;
+ u32 *data;
+
+ data = kmalloc(sizeof(*data) * TEST_SIZE, GFP_KERNEL);
+ if (!data)
+ return 0;
+
+ for (samples = 0; samples < TEST_SIZE; samples++)
+ data[samples] = prandom_u32();
+
+ flip = total = 0;
+ for (x = 0; x < samples; x++) {
+ for (y = 0; y < samples; y++) {
+ if (x == y)
+ continue;
+ xor = data[x] ^ data[y];
+ flip |= xor;
+ bits = hweight32(xor);
+ total += (bits - 16) * (bits - 16);
+ }
+ }
+
+ /* We'll return the average deviation as 2*sqrt(corr/samples), which
+ * is also sqrt(4*corr/samples) which provides a better resolution.
+ */
+ bits = int_sqrt(total / (samples * (samples - 1)) * 4);
+ if (bits > 6)
+ pr_warn("prandom32: self test failed (at least %u bits"
+ " correlated, fixed_mask=%#x fixed_value=%#x\n",
+ bits, ~flip, data[0] & ~flip);
+ else
+ pr_info("prandom32: self test passed (less than %u bits"
+ " correlated)\n",
+ bits+1);
+ kfree(data);
+ return 0;
+}
+core_initcall(prandom32_state_selftest);
+#endif /* CONFIG_RANDOM32_SELFTEST */
+
/*
* Start periodic full reseeding as soon as strong
* random numbers are available.