1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Unit tests and benchmarks for the CRC library functions
5 * Copyright 2024 Google LLC
9 #include <kunit/test.h>
10 #include <linux/crc16.h>
11 #include <linux/crc-t10dif.h>
12 #include <linux/crc32.h>
13 #include <linux/crc32c.h>
14 #include <linux/crc64.h>
15 #include <linux/prandom.h>
16 #include <linux/vmalloc.h>
18 #define CRC_KUNIT_SEED 42
19 #define CRC_KUNIT_MAX_LEN 16384
20 #define CRC_KUNIT_NUM_TEST_ITERS 1000
22 static struct rnd_state rng;
23 static u8 *test_buffer;
24 static size_t test_buflen;
27 * struct crc_variant - describes a CRC variant
28 * @bits: Number of bits in the CRC, 1 <= @bits <= 64.
29 * @le: true if it's a "little endian" CRC (reversed mapping between bits and
30 * polynomial coefficients in each byte), false if it's a "big endian" CRC
31 * (natural mapping between bits and polynomial coefficients in each byte)
32 * @poly: The generator polynomial with the highest-order term omitted.
33 * Bit-reversed if @le is true.
34 * @func: The function to compute a CRC. The type signature uses u64 so that it
35 * can fit any CRC up to CRC-64.
36 * @combine_func: Optional function to combine two CRCs.
42 u64 (*func)(u64 crc, const u8 *p, size_t len);
43 u64 (*combine_func)(u64 crc1, u64 crc2, size_t len2);
46 static u32 rand32(void)
48 return prandom_u32_state(&rng);
51 static u64 rand64(void)
55 return ((u64)n << 32) | rand32();
58 static u64 crc_mask(const struct crc_variant *v)
60 return (u64)-1 >> (64 - v->bits);
63 /* Reference implementation of any CRC variant */
64 static u64 crc_ref(const struct crc_variant *v,
65 u64 crc, const u8 *p, size_t len)
69 for (i = 0; i < len; i++) {
70 for (j = 0; j < 8; j++) {
72 crc ^= (p[i] >> j) & 1;
73 crc = (crc >> 1) ^ ((crc & 1) ? v->poly : 0);
75 crc ^= (u64)((p[i] >> (7 - j)) & 1) <<
77 if (crc & (1ULL << (v->bits - 1)))
78 crc = ((crc << 1) ^ v->poly) &
88 static int crc_suite_init(struct kunit_suite *suite)
91 * Allocate the test buffer using vmalloc() with a page-aligned length
92 * so that it is immediately followed by a guard page. This allows
93 * buffer overreads to be detected, even in assembly code.
95 test_buflen = round_up(CRC_KUNIT_MAX_LEN, PAGE_SIZE);
96 test_buffer = vmalloc(test_buflen);
100 prandom_seed_state(&rng, CRC_KUNIT_SEED);
101 prandom_bytes_state(&rng, test_buffer, test_buflen);
105 static void crc_suite_exit(struct kunit_suite *suite)
111 /* Generate a random initial CRC. */
112 static u64 generate_random_initial_crc(const struct crc_variant *v)
114 switch (rand32() % 4) {
118 return crc_mask(v); /* All 1 bits */
120 return rand64() & crc_mask(v);
124 /* Generate a random length, preferring small lengths. */
125 static size_t generate_random_length(size_t max_length)
129 switch (rand32() % 3) {
131 len = rand32() % 128;
134 len = rand32() % 3072;
140 return len % (max_length + 1);
143 /* Test that v->func gives the same CRCs as a reference implementation. */
144 static void crc_main_test(struct kunit *test, const struct crc_variant *v)
148 for (i = 0; i < CRC_KUNIT_NUM_TEST_ITERS; i++) {
149 u64 init_crc, expected_crc, actual_crc;
153 init_crc = generate_random_initial_crc(v);
154 len = generate_random_length(CRC_KUNIT_MAX_LEN);
156 /* Generate a random offset. */
157 if (rand32() % 2 == 0) {
158 /* Use a random alignment mod 64 */
159 offset = rand32() % 64;
160 offset = min(offset, CRC_KUNIT_MAX_LEN - len);
162 /* Go up to the guard page, to catch buffer overreads */
163 offset = test_buflen - len;
166 if (rand32() % 8 == 0)
167 /* Refresh the data occasionally. */
168 prandom_bytes_state(&rng, &test_buffer[offset], len);
170 nosimd = rand32() % 8 == 0;
173 * Compute the CRC, and verify that it equals the CRC computed
174 * by a simple bit-at-a-time reference implementation.
176 expected_crc = crc_ref(v, init_crc, &test_buffer[offset], len);
179 actual_crc = v->func(init_crc, &test_buffer[offset], len);
182 KUNIT_EXPECT_EQ_MSG(test, expected_crc, actual_crc,
183 "Wrong result with len=%zu offset=%zu nosimd=%d",
184 len, offset, nosimd);
188 /* Test that CRC(concat(A, B)) == combine_CRCs(CRC(A), CRC(B), len(B)). */
189 static void crc_combine_test(struct kunit *test, const struct crc_variant *v)
193 for (i = 0; i < 100; i++) {
194 u64 init_crc = generate_random_initial_crc(v);
195 size_t len1 = generate_random_length(CRC_KUNIT_MAX_LEN);
196 size_t len2 = generate_random_length(CRC_KUNIT_MAX_LEN - len1);
197 u64 crc1, crc2, expected_crc, actual_crc;
199 prandom_bytes_state(&rng, test_buffer, len1 + len2);
200 crc1 = v->func(init_crc, test_buffer, len1);
201 crc2 = v->func(0, &test_buffer[len1], len2);
202 expected_crc = v->func(init_crc, test_buffer, len1 + len2);
203 actual_crc = v->combine_func(crc1, crc2, len2);
204 KUNIT_EXPECT_EQ_MSG(test, expected_crc, actual_crc,
205 "CRC combination gave wrong result with len1=%zu len2=%zu\n",
210 static void crc_test(struct kunit *test, const struct crc_variant *v)
212 crc_main_test(test, v);
214 crc_combine_test(test, v);
217 static __always_inline void
218 crc_benchmark(struct kunit *test,
219 u64 (*crc_func)(u64 crc, const u8 *p, size_t len))
221 static const size_t lens_to_test[] = {
222 1, 16, 64, 127, 128, 200, 256, 511, 512, 1024, 3173, 4096, 16384,
224 size_t len, i, j, num_iters;
226 * Some of the CRC library functions are marked as __pure, so use
227 * volatile to ensure that all calls are really made as intended.
229 volatile u64 crc = 0;
232 if (!IS_ENABLED(CONFIG_CRC_BENCHMARK))
233 kunit_skip(test, "not enabled");
236 for (i = 0; i < 10000000; i += CRC_KUNIT_MAX_LEN)
237 crc = crc_func(crc, test_buffer, CRC_KUNIT_MAX_LEN);
239 for (i = 0; i < ARRAY_SIZE(lens_to_test); i++) {
240 len = lens_to_test[i];
241 KUNIT_ASSERT_LE(test, len, CRC_KUNIT_MAX_LEN);
242 num_iters = 10000000 / (len + 128);
245 for (j = 0; j < num_iters; j++)
246 crc = crc_func(crc, test_buffer, len);
247 t = ktime_get_ns() - t;
249 kunit_info(test, "len=%zu: %llu MB/s\n",
250 len, div64_u64((u64)len * num_iters * 1000, t));
256 static u64 crc16_wrapper(u64 crc, const u8 *p, size_t len)
258 return crc16(crc, p, len);
261 static const struct crc_variant crc_variant_crc16 = {
265 .func = crc16_wrapper,
268 static void crc16_test(struct kunit *test)
270 crc_test(test, &crc_variant_crc16);
273 static void crc16_benchmark(struct kunit *test)
275 crc_benchmark(test, crc16_wrapper);
280 static u64 crc_t10dif_wrapper(u64 crc, const u8 *p, size_t len)
282 return crc_t10dif_update(crc, p, len);
285 static const struct crc_variant crc_variant_crc_t10dif = {
289 .func = crc_t10dif_wrapper,
292 static void crc_t10dif_test(struct kunit *test)
294 crc_test(test, &crc_variant_crc_t10dif);
297 static void crc_t10dif_benchmark(struct kunit *test)
299 crc_benchmark(test, crc_t10dif_wrapper);
304 static u64 crc32_le_wrapper(u64 crc, const u8 *p, size_t len)
306 return crc32_le(crc, p, len);
309 static u64 crc32_le_combine_wrapper(u64 crc1, u64 crc2, size_t len2)
311 return crc32_le_combine(crc1, crc2, len2);
314 static const struct crc_variant crc_variant_crc32_le = {
318 .func = crc32_le_wrapper,
319 .combine_func = crc32_le_combine_wrapper,
322 static void crc32_le_test(struct kunit *test)
324 crc_test(test, &crc_variant_crc32_le);
327 static void crc32_le_benchmark(struct kunit *test)
329 crc_benchmark(test, crc32_le_wrapper);
334 static u64 crc32_be_wrapper(u64 crc, const u8 *p, size_t len)
336 return crc32_be(crc, p, len);
339 static const struct crc_variant crc_variant_crc32_be = {
343 .func = crc32_be_wrapper,
346 static void crc32_be_test(struct kunit *test)
348 crc_test(test, &crc_variant_crc32_be);
351 static void crc32_be_benchmark(struct kunit *test)
353 crc_benchmark(test, crc32_be_wrapper);
358 static u64 crc32c_wrapper(u64 crc, const u8 *p, size_t len)
360 return crc32c(crc, p, len);
363 static u64 crc32c_combine_wrapper(u64 crc1, u64 crc2, size_t len2)
365 return __crc32c_le_combine(crc1, crc2, len2);
368 static const struct crc_variant crc_variant_crc32c = {
372 .func = crc32c_wrapper,
373 .combine_func = crc32c_combine_wrapper,
376 static void crc32c_test(struct kunit *test)
378 crc_test(test, &crc_variant_crc32c);
381 static void crc32c_benchmark(struct kunit *test)
383 crc_benchmark(test, crc32c_wrapper);
388 static u64 crc64_be_wrapper(u64 crc, const u8 *p, size_t len)
390 return crc64_be(crc, p, len);
393 static const struct crc_variant crc_variant_crc64_be = {
396 .poly = 0x42f0e1eba9ea3693,
397 .func = crc64_be_wrapper,
400 static void crc64_be_test(struct kunit *test)
402 crc_test(test, &crc_variant_crc64_be);
405 static void crc64_be_benchmark(struct kunit *test)
407 crc_benchmark(test, crc64_be_wrapper);
410 static struct kunit_case crc_test_cases[] = {
411 KUNIT_CASE(crc16_test),
412 KUNIT_CASE(crc16_benchmark),
413 KUNIT_CASE(crc_t10dif_test),
414 KUNIT_CASE(crc_t10dif_benchmark),
415 KUNIT_CASE(crc32_le_test),
416 KUNIT_CASE(crc32_le_benchmark),
417 KUNIT_CASE(crc32_be_test),
418 KUNIT_CASE(crc32_be_benchmark),
419 KUNIT_CASE(crc32c_test),
420 KUNIT_CASE(crc32c_benchmark),
421 KUNIT_CASE(crc64_be_test),
422 KUNIT_CASE(crc64_be_benchmark),
426 static struct kunit_suite crc_test_suite = {
428 .test_cases = crc_test_cases,
429 .suite_init = crc_suite_init,
430 .suite_exit = crc_suite_exit,
432 kunit_test_suite(crc_test_suite);
434 MODULE_DESCRIPTION("Unit tests and benchmarks for the CRC library functions");
435 MODULE_LICENSE("GPL");