Merge pull request #1546 from mmichal10/fix-huge-test

tests: fix test_flush_over_640_gibibytes_with_fs
This commit is contained in:
Robert Baldyga 2024-10-03 13:11:48 +02:00 committed by GitHub
commit d78b9a71f7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 30 additions and 12 deletions

@ -1 +1 @@
Subproject commit 42ebe34da3e6e10f823c24db7954675978298fe0 Subproject commit acedafb5afec1ff346d97aa9db5486caf8acc032

View File

@ -1,5 +1,6 @@
# #
# Copyright(c) 2020-2021 Intel Corporation # Copyright(c) 2020-2021 Intel Corporation
# Copyright(c) 2024 Huawei Technologies
# SPDX-License-Identifier: BSD-3-Clause # SPDX-License-Identifier: BSD-3-Clause
# #
@ -28,6 +29,7 @@ mnt_point = "/mnt/cas/"
@pytest.mark.parametrizex("fs", Filesystem) @pytest.mark.parametrizex("fs", Filesystem)
@pytest.mark.parametrizex("cache_mode", CacheMode.with_traits(CacheModeTrait.LazyWrites)) @pytest.mark.parametrizex("cache_mode", CacheMode.with_traits(CacheModeTrait.LazyWrites))
@pytest.mark.require_disk("separate_dev", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand])) @pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache")) @pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_flush_over_640_gibibytes_with_fs(cache_mode, fs): def test_flush_over_640_gibibytes_with_fs(cache_mode, fs):
@ -39,6 +41,8 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs):
- Flushing completes successfully without any errors. - Flushing completes successfully without any errors.
""" """
with TestRun.step("Prepare devices for cache and core."): with TestRun.step("Prepare devices for cache and core."):
separate_dev = TestRun.disks['separate_dev']
check_disk_size(separate_dev)
cache_dev = TestRun.disks['cache'] cache_dev = TestRun.disks['cache']
check_disk_size(cache_dev) check_disk_size(cache_dev)
cache_dev.create_partitions([required_disk_size]) cache_dev.create_partitions([required_disk_size])
@ -59,8 +63,12 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs):
cache.set_cleaning_policy(CleaningPolicy.nop) cache.set_cleaning_policy(CleaningPolicy.nop)
cache.set_seq_cutoff_policy(SeqCutOffPolicy.never) cache.set_seq_cutoff_policy(SeqCutOffPolicy.never)
with TestRun.step("Create test file"): with TestRun.step("Create a test file on a separate disk"):
test_file_main = File.create_file("/tmp/test_file_main") src_dir_path = "/mnt/flush_640G_test"
separate_dev.create_filesystem(fs)
separate_dev.mount(src_dir_path)
test_file_main = File.create_file(f"{src_dir_path}/test_file_main")
fio = ( fio = (
Fio().create_command() Fio().create_command()
.io_engine(IoEngine.libaio) .io_engine(IoEngine.libaio)
@ -75,31 +83,37 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs):
fio.run() fio.run()
test_file_main.refresh_item() test_file_main.refresh_item()
with TestRun.step("Validate test file and read its md5 sum."): with TestRun.step("Validate test file and read its crc32 sum."):
if test_file_main.size != file_size: if test_file_main.size != file_size:
TestRun.fail("Created test file hasn't reached its target size.") TestRun.LOGGER.error(f"Expected test file size {file_size}. Got {test_file_main.size}")
test_file_md5sum_main = test_file_main.md5sum() test_file_crc32sum_main = test_file_main.crc32sum(timeout=timedelta(hours=4))
with TestRun.step("Write data to exported object."): with TestRun.step("Write data to exported object."):
test_file_copy = test_file_main.copy(mnt_point + "test_file_copy") test_file_copy = test_file_main.copy(
mnt_point + "test_file_copy", timeout=timedelta(hours=4)
)
test_file_copy.refresh_item() test_file_copy.refresh_item()
sync() sync()
with TestRun.step(f"Check if dirty data exceeded {file_size * 0.98} GiB."): with TestRun.step(f"Check if dirty data exceeded {file_size * 0.98} GiB."):
minimum_4KiB_blocks = int((file_size * 0.98).get_value(Unit.Blocks4096)) minimum_4KiB_blocks = int((file_size * 0.98).get_value(Unit.Blocks4096))
if int(cache.get_statistics().usage_stats.dirty) < minimum_4KiB_blocks: actual_dirty_blocks = int(cache.get_statistics().usage_stats.dirty)
TestRun.fail("There is not enough dirty data in the cache!") if actual_dirty_blocks < minimum_4KiB_blocks:
TestRun.LOGGER.error(
f"Expected at least: {minimum_4KiB_blocks} dirty blocks."
f"Got {actual_dirty_blocks}"
)
with TestRun.step("Unmount core and stop cache with flush."): with TestRun.step("Unmount core and stop cache with flush."):
core.unmount() core.unmount()
# this operation could take few hours, depending on core disk # this operation could take a few hours, depending on the core disk
output = TestRun.executor.run(stop_cmd(str(cache.cache_id)), timedelta(hours=12)) output = TestRun.executor.run(stop_cmd(str(cache.cache_id)), timedelta(hours=12))
if output.exit_code != 0: if output.exit_code != 0:
TestRun.fail(f"Stopping cache with flush failed!\n{output.stderr}") TestRun.fail(f"Stopping cache with flush failed!\n{output.stderr}")
with TestRun.step("Mount core device and check md5 sum of test file copy."): with TestRun.step("Mount core device and check crc32 sum of test file copy."):
core_dev.mount(mnt_point) core_dev.mount(mnt_point)
if test_file_md5sum_main != test_file_copy.md5sum(): if test_file_crc32sum_main != test_file_copy.crc32sum(timeout=timedelta(hours=4)):
TestRun.LOGGER.error("Md5 sums should be equal.") TestRun.LOGGER.error("Md5 sums should be equal.")
with TestRun.step("Delete test files."): with TestRun.step("Delete test files."):
@ -110,6 +124,10 @@ def test_flush_over_640_gibibytes_with_fs(cache_mode, fs):
core_dev.unmount() core_dev.unmount()
remove(mnt_point, True, True, True) remove(mnt_point, True, True, True)
with TestRun.step("Unmount the additional device."):
separate_dev.unmount()
remove(src_dir_path, True, True, True)
@pytest.mark.parametrizex("cache_mode", CacheMode.with_traits(CacheModeTrait.LazyWrites)) @pytest.mark.parametrizex("cache_mode", CacheMode.with_traits(CacheModeTrait.LazyWrites))
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand])) @pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))