From 1a2a7e927679d3bd003a2743b2ced9e5f7af00a2 Mon Sep 17 00:00:00 2001 From: Michal Mielewczyk Date: Thu, 3 Oct 2024 12:21:02 +0200 Subject: [PATCH 1/5] Update TF: User-defined timeouts for FS utils Signed-off-by: Michal Mielewczyk --- test/functional/test-framework | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/functional/test-framework b/test/functional/test-framework index 42ebe34da..acedafb5a 160000 --- a/test/functional/test-framework +++ b/test/functional/test-framework @@ -1 +1 @@ -Subproject commit 42ebe34da3e6e10f823c24db7954675978298fe0 +Subproject commit acedafb5afec1ff346d97aa9db5486caf8acc032 From d456674c9aff908715cc87a23552b7ab1aafba33 Mon Sep 17 00:00:00 2001 From: Michal Mielewczyk Date: Wed, 2 Oct 2024 17:03:21 +0200 Subject: [PATCH 2/5] tests: fix test_flush_over_640_gibibytes_with_fs The test requires quite some space(640G) to prepare a file on a separate (not-cached) filesystem. Creating the file in rootfs is prone to (vague) errors due to limited space. To make sure that the test has all the required space available, create the file on a separate disk Signed-off-by: Michal Mielewczyk --- .../lazy_writes/test_flush_huge_dirty_data.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py b/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py index 59d3794fd..133c8ca42 100644 --- a/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py +++ b/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py @@ -1,5 +1,6 @@ # # Copyright(c) 2020-2021 Intel Corporation +# Copyright(c) 2024 Huawei Technologies # SPDX-License-Identifier: BSD-3-Clause # @@ -28,6 +29,7 @@ @pytest.mark.parametrizex("fs", Filesystem) @pytest.mark.parametrizex("cache_mode", CacheMode.with_traits(CacheModeTrait.LazyWrites)) +@pytest.mark.require_disk("separate_dev", DiskTypeSet([DiskType.optane, DiskType.nand])) @pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand])) @pytest.mark.require_disk("core", DiskTypeLowerThan("cache")) def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): @@ -39,6 +41,8 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): - Flushing completes successfully without any errors. """ with TestRun.step("Prepare devices for cache and core."): + separate_dev = TestRun.disks['separate_dev'] + check_disk_size(separate_dev) cache_dev = TestRun.disks['cache'] check_disk_size(cache_dev) cache_dev.create_partitions([required_disk_size]) @@ -59,8 +63,12 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): cache.set_cleaning_policy(CleaningPolicy.nop) cache.set_seq_cutoff_policy(SeqCutOffPolicy.never) - with TestRun.step("Create test file"): - test_file_main = File.create_file("/tmp/test_file_main") + with TestRun.step("Create a test file on a separate disk"): + src_dir_path = "/mnt/flush_640G_test" + separate_dev.create_filesystem(fs) + separate_dev.mount(src_dir_path) + + test_file_main = File.create_file(f"{src_dir_path}/test_file_main") fio = ( Fio().create_command() .io_engine(IoEngine.libaio) @@ -110,6 +118,10 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): core_dev.unmount() remove(mnt_point, True, True, True) + with TestRun.step("Unmount the additional device."): + separate_dev.unmount() + remove(src_dir_path, True, True, True) + @pytest.mark.parametrizex("cache_mode", CacheMode.with_traits(CacheModeTrait.LazyWrites)) @pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand])) From f2f98226d8be71038cb2416920c805923723b694 Mon Sep 17 00:00:00 2001 From: Michal Mielewczyk Date: Thu, 3 Oct 2024 08:45:02 +0200 Subject: [PATCH 3/5] tests: Fix test_flush_over_640_gibibytes_with_fs As crc32 is supposed to be slightly faster than md5 use it to detect DC Signed-off-by: Michal Mielewczyk --- .../tests/lazy_writes/test_flush_huge_dirty_data.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py b/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py index 133c8ca42..e7d32c086 100644 --- a/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py +++ b/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py @@ -83,10 +83,10 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): fio.run() test_file_main.refresh_item() - with TestRun.step("Validate test file and read its md5 sum."): + with TestRun.step("Validate test file and read its crc32 sum."): if test_file_main.size != file_size: TestRun.fail("Created test file hasn't reached its target size.") - test_file_md5sum_main = test_file_main.md5sum() + test_file_crc32sum_main = test_file_main.crc32sum() with TestRun.step("Write data to exported object."): test_file_copy = test_file_main.copy(mnt_point + "test_file_copy") @@ -105,9 +105,9 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): if output.exit_code != 0: TestRun.fail(f"Stopping cache with flush failed!\n{output.stderr}") - with TestRun.step("Mount core device and check md5 sum of test file copy."): + with TestRun.step("Mount core device and check crc32 sum of test file copy."): core_dev.mount(mnt_point) - if test_file_md5sum_main != test_file_copy.md5sum(): + if test_file_crc32sum_main != test_file_copy.crc32sum(): TestRun.LOGGER.error("Md5 sums should be equal.") with TestRun.step("Delete test files."): From c41d5d71766ae4030d3ec798dbff0b66e08a878e Mon Sep 17 00:00:00 2001 From: Michal Mielewczyk Date: Thu, 3 Oct 2024 08:49:44 +0200 Subject: [PATCH 4/5] tests: Fix test_flush_over_640_gibibytes_with_fs 4h timeout for copying and calculating crc Signed-off-by: Michal Mielewczyk --- .../tests/lazy_writes/test_flush_huge_dirty_data.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py b/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py index e7d32c086..7b3a19325 100644 --- a/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py +++ b/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py @@ -86,10 +86,12 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): with TestRun.step("Validate test file and read its crc32 sum."): if test_file_main.size != file_size: TestRun.fail("Created test file hasn't reached its target size.") - test_file_crc32sum_main = test_file_main.crc32sum() + test_file_crc32sum_main = test_file_main.crc32sum(timeout=timedelta(hours=4)) with TestRun.step("Write data to exported object."): - test_file_copy = test_file_main.copy(mnt_point + "test_file_copy") + test_file_copy = test_file_main.copy( + mnt_point + "test_file_copy", timeout=timedelta(hours=4) + ) test_file_copy.refresh_item() sync() @@ -107,7 +109,7 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): with TestRun.step("Mount core device and check crc32 sum of test file copy."): core_dev.mount(mnt_point) - if test_file_crc32sum_main != test_file_copy.crc32sum(): + if test_file_crc32sum_main != test_file_copy.crc32sum(timeout=timedelta(hours=4)): TestRun.LOGGER.error("Md5 sums should be equal.") with TestRun.step("Delete test files."): From 1ae15bdb6b228303d53bde47c9e73b4cf161a2dc Mon Sep 17 00:00:00 2001 From: Michal Mielewczyk Date: Thu, 3 Oct 2024 09:08:08 +0200 Subject: [PATCH 5/5] tests: Fix test_flush_over_640_gibibytes_with_fs Don't interrupt the test if prerequisites are not met Signed-off-by: Michal Mielewczyk --- .../tests/lazy_writes/test_flush_huge_dirty_data.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py b/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py index 7b3a19325..5dfd3eeee 100644 --- a/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py +++ b/test/functional/tests/lazy_writes/test_flush_huge_dirty_data.py @@ -85,7 +85,7 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): with TestRun.step("Validate test file and read its crc32 sum."): if test_file_main.size != file_size: - TestRun.fail("Created test file hasn't reached its target size.") + TestRun.LOGGER.error(f"Expected test file size {file_size}. Got {test_file_main.size}") test_file_crc32sum_main = test_file_main.crc32sum(timeout=timedelta(hours=4)) with TestRun.step("Write data to exported object."): @@ -97,12 +97,16 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): with TestRun.step(f"Check if dirty data exceeded {file_size * 0.98} GiB."): minimum_4KiB_blocks = int((file_size * 0.98).get_value(Unit.Blocks4096)) - if int(cache.get_statistics().usage_stats.dirty) < minimum_4KiB_blocks: - TestRun.fail("There is not enough dirty data in the cache!") + actual_dirty_blocks = int(cache.get_statistics().usage_stats.dirty) + if actual_dirty_blocks < minimum_4KiB_blocks: + TestRun.LOGGER.error( + f"Expected at least: {minimum_4KiB_blocks} dirty blocks." + f"Got {actual_dirty_blocks}" + ) with TestRun.step("Unmount core and stop cache with flush."): core.unmount() - # this operation could take few hours, depending on core disk + # this operation could take a few hours, depending on the core disk output = TestRun.executor.run(stop_cmd(str(cache.cache_id)), timedelta(hours=12)) if output.exit_code != 0: TestRun.fail(f"Stopping cache with flush failed!\n{output.stderr}")