* See the COPYING file in the top-level directory.
*/
-#include <glib.h>
-#include <stdarg.h>
-#include <string.h>
-#include <sys/types.h>
+#include "qemu/osdep.h"
#include "qemu/hbitmap.h"
+#include "qemu/bitmap.h"
+#include "block/block.h"
#define LOG_BITS_PER_LONG (BITS_PER_LONG == 32 ? 5 : 6)
typedef struct TestHBitmapData {
HBitmap *hb;
+ HBitmap *meta;
unsigned long *bits;
size_t size;
size_t old_size;
size_t n;
data->hb = hbitmap_alloc(size, granularity);
- n = (size + BITS_PER_LONG - 1) / BITS_PER_LONG;
+ n = DIV_ROUND_UP(size, BITS_PER_LONG);
if (n == 0) {
n = 1;
}
}
}
+static void hbitmap_test_init_meta(TestHBitmapData *data,
+ uint64_t size, int granularity,
+ int meta_chunk)
+{
+ hbitmap_test_init(data, size, granularity);
+ data->meta = hbitmap_create_meta(data->hb, meta_chunk);
+}
+
static inline size_t hbitmap_test_array_size(size_t bits)
{
- size_t n = (bits + BITS_PER_LONG - 1) / BITS_PER_LONG;
+ size_t n = DIV_ROUND_UP(bits, BITS_PER_LONG);
return n ? n : 1;
}
const void *unused)
{
if (data->hb) {
+ if (data->meta) {
+ hbitmap_free_meta(data->hb);
+ }
hbitmap_free(data->hb);
data->hb = NULL;
}
- if (data->bits) {
- g_free(data->bits);
- data->bits = NULL;
- }
+ g_free(data->bits);
+ data->bits = NULL;
}
/* Set a range in the HBitmap and in the shadow "simple" bitmap.
}
}
+static void hbitmap_test_reset_all(TestHBitmapData *data)
+{
+ size_t n;
+
+ hbitmap_reset_all(data->hb);
+
+ n = DIV_ROUND_UP(data->size, BITS_PER_LONG);
+ if (n == 0) {
+ n = 1;
+ }
+ memset(data->bits, 0, n * sizeof(unsigned long));
+
+ if (data->granularity == 0) {
+ hbitmap_test_check(data, 0);
+ }
+}
+
static void hbitmap_test_check_get(TestHBitmapData *data)
{
uint64_t count = 0;
hbitmap_test_set(data, L3 / 2, L3);
}
+static void test_hbitmap_reset_all(TestHBitmapData *data,
+ const void *unused)
+{
+ hbitmap_test_init(data, L3 * 2, 0);
+ hbitmap_test_set(data, L1 - 1, L1 + 2);
+ hbitmap_test_reset_all(data);
+ hbitmap_test_set(data, 0, L1 * 3);
+ hbitmap_test_reset_all(data);
+ hbitmap_test_set(data, L2, L1);
+ hbitmap_test_reset_all(data);
+ hbitmap_test_set(data, L2, L3 - L2 + 1);
+ hbitmap_test_reset_all(data);
+ hbitmap_test_set(data, L3 - 1, 3);
+ hbitmap_test_reset_all(data);
+ hbitmap_test_set(data, 0, L3 * 2);
+ hbitmap_test_reset_all(data);
+ hbitmap_test_set(data, L3 / 2, L3);
+ hbitmap_test_reset_all(data);
+}
+
static void test_hbitmap_granularity(TestHBitmapData *data,
const void *unused)
{
hbitmap_test_truncate(data, size, -diff, 0);
}
+static void hbitmap_check_meta(TestHBitmapData *data,
+ int64_t start, int count)
+{
+ int64_t i;
+
+ for (i = 0; i < data->size; i++) {
+ if (i >= start && i < start + count) {
+ g_assert(hbitmap_get(data->meta, i));
+ } else {
+ g_assert(!hbitmap_get(data->meta, i));
+ }
+ }
+}
+
+static void hbitmap_test_meta(TestHBitmapData *data,
+ int64_t start, int count,
+ int64_t check_start, int check_count)
+{
+ hbitmap_reset_all(data->hb);
+ hbitmap_reset_all(data->meta);
+
+ /* Test "unset" -> "unset" will not update meta. */
+ hbitmap_reset(data->hb, start, count);
+ hbitmap_check_meta(data, 0, 0);
+
+ /* Test "unset" -> "set" will update meta */
+ hbitmap_set(data->hb, start, count);
+ hbitmap_check_meta(data, check_start, check_count);
+
+ /* Test "set" -> "set" will not update meta */
+ hbitmap_reset_all(data->meta);
+ hbitmap_set(data->hb, start, count);
+ hbitmap_check_meta(data, 0, 0);
+
+ /* Test "set" -> "unset" will update meta */
+ hbitmap_reset_all(data->meta);
+ hbitmap_reset(data->hb, start, count);
+ hbitmap_check_meta(data, check_start, check_count);
+}
+
+static void hbitmap_test_meta_do(TestHBitmapData *data, int chunk_size)
+{
+ uint64_t size = chunk_size * 100;
+ hbitmap_test_init_meta(data, size, 0, chunk_size);
+
+ hbitmap_test_meta(data, 0, 1, 0, chunk_size);
+ hbitmap_test_meta(data, 0, chunk_size, 0, chunk_size);
+ hbitmap_test_meta(data, chunk_size - 1, 1, 0, chunk_size);
+ hbitmap_test_meta(data, chunk_size - 1, 2, 0, chunk_size * 2);
+ hbitmap_test_meta(data, chunk_size - 1, chunk_size + 1, 0, chunk_size * 2);
+ hbitmap_test_meta(data, chunk_size - 1, chunk_size + 2, 0, chunk_size * 3);
+ hbitmap_test_meta(data, 7 * chunk_size - 1, chunk_size + 2,
+ 6 * chunk_size, chunk_size * 3);
+ hbitmap_test_meta(data, size - 1, 1, size - chunk_size, chunk_size);
+ hbitmap_test_meta(data, 0, size, 0, size);
+}
+
+static void test_hbitmap_meta_byte(TestHBitmapData *data, const void *unused)
+{
+ hbitmap_test_meta_do(data, BITS_PER_BYTE);
+}
+
+static void test_hbitmap_meta_word(TestHBitmapData *data, const void *unused)
+{
+ hbitmap_test_meta_do(data, BITS_PER_LONG);
+}
+
+static void test_hbitmap_meta_sector(TestHBitmapData *data, const void *unused)
+{
+ hbitmap_test_meta_do(data, BDRV_SECTOR_SIZE * BITS_PER_BYTE);
+}
+
+/**
+ * Create an HBitmap and test set/unset.
+ */
+static void test_hbitmap_meta_one(TestHBitmapData *data, const void *unused)
+{
+ int i;
+ int64_t offsets[] = {
+ 0, 1, L1 - 1, L1, L1 + 1, L2 - 1, L2, L2 + 1, L3 - 1, L3, L3 + 1
+ };
+
+ hbitmap_test_init_meta(data, L3 * 2, 0, 1);
+ for (i = 0; i < ARRAY_SIZE(offsets); i++) {
+ hbitmap_test_meta(data, offsets[i], 1, offsets[i], 1);
+ hbitmap_test_meta(data, offsets[i], L1, offsets[i], L1);
+ hbitmap_test_meta(data, offsets[i], L2, offsets[i], L2);
+ }
+}
+
+static void test_hbitmap_serialize_align(TestHBitmapData *data,
+ const void *unused)
+{
+ int r;
+
+ hbitmap_test_init(data, L3 * 2, 3);
+ g_assert(hbitmap_is_serializable(data->hb));
+
+ r = hbitmap_serialization_align(data->hb);
+ g_assert_cmpint(r, ==, 64 << 3);
+}
+
+static void test_hbitmap_meta_zero(TestHBitmapData *data, const void *unused)
+{
+ hbitmap_test_init_meta(data, 0, 0, 1);
+
+ hbitmap_check_meta(data, 0, 0);
+}
+
+static void hbitmap_test_serialize_range(TestHBitmapData *data,
+ uint8_t *buf, size_t buf_size,
+ uint64_t pos, uint64_t count)
+{
+ size_t i;
+ unsigned long *el = (unsigned long *)buf;
+
+ assert(hbitmap_granularity(data->hb) == 0);
+ hbitmap_reset_all(data->hb);
+ memset(buf, 0, buf_size);
+ if (count) {
+ hbitmap_set(data->hb, pos, count);
+ }
+
+ g_assert(hbitmap_is_serializable(data->hb));
+ hbitmap_serialize_part(data->hb, buf, 0, data->size);
+
+ /* Serialized buffer is inherently LE, convert it back manually to test */
+ for (i = 0; i < buf_size / sizeof(unsigned long); i++) {
+ el[i] = (BITS_PER_LONG == 32 ? le32_to_cpu(el[i]) : le64_to_cpu(el[i]));
+ }
+
+ for (i = 0; i < data->size; i++) {
+ int is_set = test_bit(i, (unsigned long *)buf);
+ if (i >= pos && i < pos + count) {
+ g_assert(is_set);
+ } else {
+ g_assert(!is_set);
+ }
+ }
+
+ /* Re-serialize for deserialization testing */
+ memset(buf, 0, buf_size);
+ hbitmap_serialize_part(data->hb, buf, 0, data->size);
+ hbitmap_reset_all(data->hb);
+
+ g_assert(hbitmap_is_serializable(data->hb));
+ hbitmap_deserialize_part(data->hb, buf, 0, data->size, true);
+
+ for (i = 0; i < data->size; i++) {
+ int is_set = hbitmap_get(data->hb, i);
+ if (i >= pos && i < pos + count) {
+ g_assert(is_set);
+ } else {
+ g_assert(!is_set);
+ }
+ }
+}
+
+static void test_hbitmap_serialize_basic(TestHBitmapData *data,
+ const void *unused)
+{
+ int i, j;
+ size_t buf_size;
+ uint8_t *buf;
+ uint64_t positions[] = { 0, 1, L1 - 1, L1, L2 - 1, L2, L2 + 1, L3 - 1 };
+ int num_positions = ARRAY_SIZE(positions);
+
+ hbitmap_test_init(data, L3, 0);
+ g_assert(hbitmap_is_serializable(data->hb));
+ buf_size = hbitmap_serialization_size(data->hb, 0, data->size);
+ buf = g_malloc0(buf_size);
+
+ for (i = 0; i < num_positions; i++) {
+ for (j = 0; j < num_positions; j++) {
+ hbitmap_test_serialize_range(data, buf, buf_size,
+ positions[i],
+ MIN(positions[j], L3 - positions[i]));
+ }
+ }
+
+ g_free(buf);
+}
+
+static void test_hbitmap_serialize_part(TestHBitmapData *data,
+ const void *unused)
+{
+ int i, j, k;
+ size_t buf_size;
+ uint8_t *buf;
+ uint64_t positions[] = { 0, 1, L1 - 1, L1, L2 - 1, L2, L2 + 1, L3 - 1 };
+ int num_positions = ARRAY_SIZE(positions);
+
+ hbitmap_test_init(data, L3, 0);
+ buf_size = L2;
+ buf = g_malloc0(buf_size);
+
+ for (i = 0; i < num_positions; i++) {
+ hbitmap_set(data->hb, positions[i], 1);
+ }
+
+ g_assert(hbitmap_is_serializable(data->hb));
+
+ for (i = 0; i < data->size; i += buf_size) {
+ unsigned long *el = (unsigned long *)buf;
+ hbitmap_serialize_part(data->hb, buf, i, buf_size);
+ for (j = 0; j < buf_size / sizeof(unsigned long); j++) {
+ el[j] = (BITS_PER_LONG == 32 ? le32_to_cpu(el[j]) : le64_to_cpu(el[j]));
+ }
+
+ for (j = 0; j < buf_size; j++) {
+ bool should_set = false;
+ for (k = 0; k < num_positions; k++) {
+ if (positions[k] == j + i) {
+ should_set = true;
+ break;
+ }
+ }
+ g_assert_cmpint(should_set, ==, test_bit(j, (unsigned long *)buf));
+ }
+ }
+
+ g_free(buf);
+}
+
+static void test_hbitmap_serialize_zeroes(TestHBitmapData *data,
+ const void *unused)
+{
+ int i;
+ HBitmapIter iter;
+ int64_t next;
+ uint64_t min_l1 = MAX(L1, 64);
+ uint64_t positions[] = { 0, min_l1, L2, L3 - min_l1};
+ int num_positions = ARRAY_SIZE(positions);
+
+ hbitmap_test_init(data, L3, 0);
+
+ for (i = 0; i < num_positions; i++) {
+ hbitmap_set(data->hb, positions[i], L1);
+ }
+
+ g_assert(hbitmap_is_serializable(data->hb));
+
+ for (i = 0; i < num_positions; i++) {
+ hbitmap_deserialize_zeroes(data->hb, positions[i], min_l1, true);
+ hbitmap_iter_init(&iter, data->hb, 0);
+ next = hbitmap_iter_next(&iter);
+ if (i == num_positions - 1) {
+ g_assert_cmpint(next, ==, -1);
+ } else {
+ g_assert_cmpint(next, ==, positions[i + 1]);
+ }
+ }
+}
+
static void hbitmap_test_add(const char *testpath,
void (*test_func)(TestHBitmapData *data, const void *user_data))
{
hbitmap_test_teardown);
}
+static void test_hbitmap_iter_and_reset(TestHBitmapData *data,
+ const void *unused)
+{
+ HBitmapIter hbi;
+
+ hbitmap_test_init(data, L1 * 2, 0);
+ hbitmap_set(data->hb, 0, data->size);
+
+ hbitmap_iter_init(&hbi, data->hb, BITS_PER_LONG - 1);
+
+ hbitmap_iter_next(&hbi);
+
+ hbitmap_reset_all(data->hb);
+ hbitmap_iter_next(&hbi);
+}
+
+static void test_hbitmap_next_zero_check_range(TestHBitmapData *data,
+ uint64_t start,
+ uint64_t count)
+{
+ int64_t ret1 = hbitmap_next_zero(data->hb, start, count);
+ int64_t ret2 = start;
+ int64_t end = start >= data->size || data->size - start < count ?
+ data->size : start + count;
+
+ for ( ; ret2 < end && hbitmap_get(data->hb, ret2); ret2++) {
+ ;
+ }
+ if (ret2 == end) {
+ ret2 = -1;
+ }
+
+ g_assert_cmpint(ret1, ==, ret2);
+}
+
+static void test_hbitmap_next_zero_check(TestHBitmapData *data, int64_t start)
+{
+ test_hbitmap_next_zero_check_range(data, start, UINT64_MAX);
+}
+
+static void test_hbitmap_next_zero_do(TestHBitmapData *data, int granularity)
+{
+ hbitmap_test_init(data, L3, granularity);
+ test_hbitmap_next_zero_check(data, 0);
+ test_hbitmap_next_zero_check(data, L3 - 1);
+ test_hbitmap_next_zero_check_range(data, 0, 1);
+ test_hbitmap_next_zero_check_range(data, L3 - 1, 1);
+
+ hbitmap_set(data->hb, L2, 1);
+ test_hbitmap_next_zero_check(data, 0);
+ test_hbitmap_next_zero_check(data, L2 - 1);
+ test_hbitmap_next_zero_check(data, L2);
+ test_hbitmap_next_zero_check(data, L2 + 1);
+ test_hbitmap_next_zero_check_range(data, 0, 1);
+ test_hbitmap_next_zero_check_range(data, 0, L2);
+ test_hbitmap_next_zero_check_range(data, L2 - 1, 1);
+ test_hbitmap_next_zero_check_range(data, L2 - 1, 2);
+ test_hbitmap_next_zero_check_range(data, L2, 1);
+ test_hbitmap_next_zero_check_range(data, L2 + 1, 1);
+
+ hbitmap_set(data->hb, L2 + 5, L1);
+ test_hbitmap_next_zero_check(data, 0);
+ test_hbitmap_next_zero_check(data, L2 + 1);
+ test_hbitmap_next_zero_check(data, L2 + 2);
+ test_hbitmap_next_zero_check(data, L2 + 5);
+ test_hbitmap_next_zero_check(data, L2 + L1 - 1);
+ test_hbitmap_next_zero_check(data, L2 + L1);
+ test_hbitmap_next_zero_check_range(data, L2, 6);
+ test_hbitmap_next_zero_check_range(data, L2 + 1, 3);
+ test_hbitmap_next_zero_check_range(data, L2 + 4, L1);
+ test_hbitmap_next_zero_check_range(data, L2 + 5, L1);
+
+ hbitmap_set(data->hb, L2 * 2, L3 - L2 * 2);
+ test_hbitmap_next_zero_check(data, L2 * 2 - L1);
+ test_hbitmap_next_zero_check(data, L2 * 2 - 2);
+ test_hbitmap_next_zero_check(data, L2 * 2 - 1);
+ test_hbitmap_next_zero_check(data, L2 * 2);
+ test_hbitmap_next_zero_check(data, L3 - 1);
+ test_hbitmap_next_zero_check_range(data, L2 * 2 - L1, L1 + 1);
+ test_hbitmap_next_zero_check_range(data, L2 * 2, L2);
+
+ hbitmap_set(data->hb, 0, L3);
+ test_hbitmap_next_zero_check(data, 0);
+}
+
+static void test_hbitmap_next_zero_0(TestHBitmapData *data, const void *unused)
+{
+ test_hbitmap_next_zero_do(data, 0);
+}
+
+static void test_hbitmap_next_zero_4(TestHBitmapData *data, const void *unused)
+{
+ test_hbitmap_next_zero_do(data, 4);
+}
+
+static void test_hbitmap_next_dirty_area_check(TestHBitmapData *data,
+ uint64_t offset,
+ uint64_t count)
+{
+ uint64_t off1, off2;
+ uint64_t len1 = 0, len2;
+ bool ret1, ret2;
+ int64_t end;
+
+ off1 = offset;
+ len1 = count;
+ ret1 = hbitmap_next_dirty_area(data->hb, &off1, &len1);
+
+ end = offset > data->size || data->size - offset < count ? data->size :
+ offset + count;
+
+ for (off2 = offset; off2 < end && !hbitmap_get(data->hb, off2); off2++) {
+ ;
+ }
+
+ for (len2 = 1; off2 + len2 < end && hbitmap_get(data->hb, off2 + len2);
+ len2++) {
+ ;
+ }
+
+ ret2 = off2 < end;
+ if (!ret2) {
+ /* leave unchanged */
+ off2 = offset;
+ len2 = count;
+ }
+
+ g_assert_cmpint(ret1, ==, ret2);
+ g_assert_cmpint(off1, ==, off2);
+ g_assert_cmpint(len1, ==, len2);
+}
+
+static void test_hbitmap_next_dirty_area_do(TestHBitmapData *data,
+ int granularity)
+{
+ hbitmap_test_init(data, L3, granularity);
+ test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
+ test_hbitmap_next_dirty_area_check(data, 0, 1);
+ test_hbitmap_next_dirty_area_check(data, L3 - 1, 1);
+
+ hbitmap_set(data->hb, L2, 1);
+ test_hbitmap_next_dirty_area_check(data, 0, 1);
+ test_hbitmap_next_dirty_area_check(data, 0, L2);
+ test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
+ test_hbitmap_next_dirty_area_check(data, L2 - 1, UINT64_MAX);
+ test_hbitmap_next_dirty_area_check(data, L2 - 1, 1);
+ test_hbitmap_next_dirty_area_check(data, L2 - 1, 2);
+ test_hbitmap_next_dirty_area_check(data, L2 - 1, 3);
+ test_hbitmap_next_dirty_area_check(data, L2, UINT64_MAX);
+ test_hbitmap_next_dirty_area_check(data, L2, 1);
+ test_hbitmap_next_dirty_area_check(data, L2 + 1, 1);
+
+ hbitmap_set(data->hb, L2 + 5, L1);
+ test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
+ test_hbitmap_next_dirty_area_check(data, L2 - 2, 8);
+ test_hbitmap_next_dirty_area_check(data, L2 + 1, 5);
+ test_hbitmap_next_dirty_area_check(data, L2 + 1, 3);
+ test_hbitmap_next_dirty_area_check(data, L2 + 4, L1);
+ test_hbitmap_next_dirty_area_check(data, L2 + 5, L1);
+ test_hbitmap_next_dirty_area_check(data, L2 + 7, L1);
+ test_hbitmap_next_dirty_area_check(data, L2 + L1, L1);
+ test_hbitmap_next_dirty_area_check(data, L2, 0);
+ test_hbitmap_next_dirty_area_check(data, L2 + 1, 0);
+
+ hbitmap_set(data->hb, L2 * 2, L3 - L2 * 2);
+ test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
+ test_hbitmap_next_dirty_area_check(data, L2, UINT64_MAX);
+ test_hbitmap_next_dirty_area_check(data, L2 + 1, UINT64_MAX);
+ test_hbitmap_next_dirty_area_check(data, L2 + 5 + L1 - 1, UINT64_MAX);
+ test_hbitmap_next_dirty_area_check(data, L2 + 5 + L1, 5);
+ test_hbitmap_next_dirty_area_check(data, L2 * 2 - L1, L1 + 1);
+ test_hbitmap_next_dirty_area_check(data, L2 * 2, L2);
+
+ hbitmap_set(data->hb, 0, L3);
+ test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
+}
+
+static void test_hbitmap_next_dirty_area_0(TestHBitmapData *data,
+ const void *unused)
+{
+ test_hbitmap_next_dirty_area_do(data, 0);
+}
+
+static void test_hbitmap_next_dirty_area_1(TestHBitmapData *data,
+ const void *unused)
+{
+ test_hbitmap_next_dirty_area_do(data, 1);
+}
+
+static void test_hbitmap_next_dirty_area_4(TestHBitmapData *data,
+ const void *unused)
+{
+ test_hbitmap_next_dirty_area_do(data, 4);
+}
+
int main(int argc, char **argv)
{
g_test_init(&argc, &argv, NULL);
hbitmap_test_add("/hbitmap/set/overlap", test_hbitmap_set_overlap);
hbitmap_test_add("/hbitmap/reset/empty", test_hbitmap_reset_empty);
hbitmap_test_add("/hbitmap/reset/general", test_hbitmap_reset);
+ hbitmap_test_add("/hbitmap/reset/all", test_hbitmap_reset_all);
hbitmap_test_add("/hbitmap/granularity", test_hbitmap_granularity);
hbitmap_test_add("/hbitmap/truncate/nop", test_hbitmap_truncate_nop);
test_hbitmap_truncate_grow_large);
hbitmap_test_add("/hbitmap/truncate/shrink/large",
test_hbitmap_truncate_shrink_large);
+
+ hbitmap_test_add("/hbitmap/meta/zero", test_hbitmap_meta_zero);
+ hbitmap_test_add("/hbitmap/meta/one", test_hbitmap_meta_one);
+ hbitmap_test_add("/hbitmap/meta/byte", test_hbitmap_meta_byte);
+ hbitmap_test_add("/hbitmap/meta/word", test_hbitmap_meta_word);
+ hbitmap_test_add("/hbitmap/meta/sector", test_hbitmap_meta_sector);
+
+ hbitmap_test_add("/hbitmap/serialize/align",
+ test_hbitmap_serialize_align);
+ hbitmap_test_add("/hbitmap/serialize/basic",
+ test_hbitmap_serialize_basic);
+ hbitmap_test_add("/hbitmap/serialize/part",
+ test_hbitmap_serialize_part);
+ hbitmap_test_add("/hbitmap/serialize/zeroes",
+ test_hbitmap_serialize_zeroes);
+
+ hbitmap_test_add("/hbitmap/iter/iter_and_reset",
+ test_hbitmap_iter_and_reset);
+
+ hbitmap_test_add("/hbitmap/next_zero/next_zero_0",
+ test_hbitmap_next_zero_0);
+ hbitmap_test_add("/hbitmap/next_zero/next_zero_4",
+ test_hbitmap_next_zero_4);
+
+ hbitmap_test_add("/hbitmap/next_dirty_area/next_dirty_area_0",
+ test_hbitmap_next_dirty_area_0);
+ hbitmap_test_add("/hbitmap/next_dirty_area/next_dirty_area_1",
+ test_hbitmap_next_dirty_area_1);
+ hbitmap_test_add("/hbitmap/next_dirty_area/next_dirty_area_4",
+ test_hbitmap_next_dirty_area_4);
+
g_test_run();
return 0;