Merge pull request #1231 from mmichal10/fix-trim-eviction-test

tests: fix test_trim_eviction
This commit is contained in:
Robert Baldyga 2022-06-20 11:11:08 +02:00 committed by GitHub
commit 57974faa1c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1,5 +1,5 @@
# #
# Copyright(c) 2020-2021 Intel Corporation # Copyright(c) 2020-2022 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause # SPDX-License-Identifier: BSD-3-Clause
# #
@ -11,7 +11,7 @@ from api.cas.cache_config import CacheMode, CacheLineSize, CleaningPolicy
from core.test_run import TestRun from core.test_run import TestRun
from storage_devices.disk import DiskTypeSet, DiskType, DiskTypeLowerThan from storage_devices.disk import DiskTypeSet, DiskType, DiskTypeLowerThan
from test_tools import fs_utils, disk_utils from test_tools import fs_utils, disk_utils
from test_tools.ddrescue import Ddrescue from test_tools.dd import Dd
from test_tools.disk_utils import Filesystem from test_tools.disk_utils import Filesystem
from test_utils import os_utils from test_utils import os_utils
from test_utils.os_utils import Udev from test_utils.os_utils import Udev
@ -38,11 +38,11 @@ def test_trim_eviction(cache_mode, cache_line_size, filesystem, cleaning):
test_file_path = os.path.join(mount_point, "test_file") test_file_path = os.path.join(mount_point, "test_file")
with TestRun.step("Prepare devices."): with TestRun.step("Prepare devices."):
cache_disk = TestRun.disks['cache'] cache_disk = TestRun.disks["cache"]
cache_disk.create_partitions([Size(1, Unit.GibiByte)]) cache_disk.create_partitions([Size(1, Unit.GibiByte)])
cache_dev = cache_disk.partitions[0] cache_dev = cache_disk.partitions[0]
core_disk = TestRun.disks['core'] core_disk = TestRun.disks["core"]
core_disk.create_partitions([Size(1, Unit.GibiByte)]) core_disk.create_partitions([Size(1, Unit.GibiByte)])
core_dev = core_disk.partitions[0] core_dev = core_disk.partitions[0]
@ -76,7 +76,9 @@ def test_trim_eviction(cache_mode, cache_line_size, filesystem, cleaning):
with TestRun.step("Remove file and create a new one."): with TestRun.step("Remove file and create a new one."):
cache_iostats_before = cache_dev.get_io_stats() cache_iostats_before = cache_dev.get_io_stats()
data_reads_before = cache.get_io_class_statistics(io_class_id=0).block_stats.cache.reads data_reads_before = cache.get_io_class_statistics(io_class_id=0).block_stats.cache.reads
metadata_reads_before = cache.get_io_class_statistics(io_class_id=1).block_stats.cache.reads metadata_reads_before = cache.get_io_class_statistics(
io_class_id=1
).block_stats.cache.reads
test_file.remove() test_file.remove()
os_utils.sync() os_utils.sync()
os_utils.drop_caches() os_utils.drop_caches()
@ -106,16 +108,19 @@ def test_trim_eviction(cache_mode, cache_line_size, filesystem, cleaning):
) )
else: else:
TestRun.LOGGER.info( TestRun.LOGGER.info(
"Number of reads from cache before and after removing test file is the same.") "Number of reads from cache before and after removing test file is the same."
)
def create_file_with_ddrescue(core_dev, test_file_path): def create_file_with_ddrescue(core_dev, test_file_path):
ddrescue = Ddrescue() \ dd = (
.block_size(Size(1, Unit.Blocks4096)) \ Dd()
.size(core_dev.size * 0.9) \ .block_size(Size(1, Unit.MebiByte))
.synchronous() \ .count(900)
.source("/dev/urandom") \ .input("/dev/urandom")
.destination(test_file_path) .output(test_file_path)
ddrescue.run() .oflag("sync")
)
dd.run()
return File(test_file_path) return File(test_file_path)