Merge pull request #1063 from Deixx/hotfix-scope-fixes
Test updates addressing test fail false positives
This commit is contained in:
commit
bc5fa9bc04
@ -186,7 +186,7 @@ def test_acp_functional(cache_mode):
|
|||||||
"cache_mode", CacheMode.with_any_trait(CacheModeTrait.LazyWrites)
|
"cache_mode", CacheMode.with_any_trait(CacheModeTrait.LazyWrites)
|
||||||
)
|
)
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
|
@pytest.mark.require_disk("core", DiskTypeSet([DiskType.hdd, DiskType.hdd4k]))
|
||||||
def test_acp_param_flush_max_buffers(cache_line_size, cache_mode):
|
def test_acp_param_flush_max_buffers(cache_line_size, cache_mode):
|
||||||
"""
|
"""
|
||||||
title: Functional test for ACP flush-max-buffers parameter.
|
title: Functional test for ACP flush-max-buffers parameter.
|
||||||
@ -206,15 +206,16 @@ def test_acp_param_flush_max_buffers(cache_line_size, cache_mode):
|
|||||||
|
|
||||||
default_config = FlushParametersAcp.default_acp_params()
|
default_config = FlushParametersAcp.default_acp_params()
|
||||||
acp_configs = [
|
acp_configs = [
|
||||||
FlushParametersAcp(flush_max_buffers=buf) for buf in buffer_values
|
FlushParametersAcp(flush_max_buffers=buf, wake_up_time=Time(seconds=1)) for buf in
|
||||||
|
buffer_values
|
||||||
]
|
]
|
||||||
acp_configs.append(default_config)
|
acp_configs.append(default_config)
|
||||||
|
|
||||||
with TestRun.step("Prepare partitions."):
|
with TestRun.step("Prepare partitions."):
|
||||||
core_size = Size(10, Unit.GibiByte)
|
core_size = Size(5, Unit.GibiByte)
|
||||||
cache_device = TestRun.disks["cache"]
|
cache_device = TestRun.disks["cache"]
|
||||||
core_device = TestRun.disks["core"]
|
core_device = TestRun.disks["core"]
|
||||||
cache_device.create_partitions([Size(5, Unit.GibiByte)])
|
cache_device.create_partitions([Size(10, Unit.GibiByte)])
|
||||||
core_device.create_partitions([core_size])
|
core_device.create_partitions([core_size])
|
||||||
|
|
||||||
with TestRun.step(
|
with TestRun.step(
|
||||||
@ -240,6 +241,7 @@ def test_acp_param_flush_max_buffers(cache_line_size, cache_mode):
|
|||||||
for acp_config in acp_configs:
|
for acp_config in acp_configs:
|
||||||
with TestRun.step(f"Setting {acp_config}"):
|
with TestRun.step(f"Setting {acp_config}"):
|
||||||
cache.set_params_acp(acp_config)
|
cache.set_params_acp(acp_config)
|
||||||
|
|
||||||
with TestRun.step(
|
with TestRun.step(
|
||||||
"Using blktrace verify if there is appropriate number of I/O requests, "
|
"Using blktrace verify if there is appropriate number of I/O requests, "
|
||||||
"which depends on flush-max-buffer parameter."
|
"which depends on flush-max-buffer parameter."
|
||||||
@ -252,7 +254,7 @@ def test_acp_param_flush_max_buffers(cache_line_size, cache_mode):
|
|||||||
cleaning_started = False
|
cleaning_started = False
|
||||||
flush_writes = 0
|
flush_writes = 0
|
||||||
for (prev, curr) in zip(blktrace_output, blktrace_output[1:]):
|
for (prev, curr) in zip(blktrace_output, blktrace_output[1:]):
|
||||||
if cleaning_started and write_to_core(prev, curr):
|
if cleaning_started and write_to_core(prev):
|
||||||
flush_writes += 1
|
flush_writes += 1
|
||||||
if new_acp_iteration(prev, curr):
|
if new_acp_iteration(prev, curr):
|
||||||
if cleaning_started:
|
if cleaning_started:
|
||||||
@ -284,7 +286,7 @@ def test_acp_param_flush_max_buffers(cache_line_size, cache_mode):
|
|||||||
"cache_mode", CacheMode.with_any_trait(CacheModeTrait.LazyWrites)
|
"cache_mode", CacheMode.with_any_trait(CacheModeTrait.LazyWrites)
|
||||||
)
|
)
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
|
@pytest.mark.require_disk("core", DiskTypeSet([DiskType.hdd, DiskType.hdd4k]))
|
||||||
def test_acp_param_wake_up_time(cache_line_size, cache_mode):
|
def test_acp_param_wake_up_time(cache_line_size, cache_mode):
|
||||||
"""
|
"""
|
||||||
title: Functional test for ACP wake-up parameter.
|
title: Functional test for ACP wake-up parameter.
|
||||||
@ -309,10 +311,10 @@ def test_acp_param_wake_up_time(cache_line_size, cache_mode):
|
|||||||
acp_configs.append(FlushParametersAcp.default_acp_params())
|
acp_configs.append(FlushParametersAcp.default_acp_params())
|
||||||
|
|
||||||
with TestRun.step("Prepare partitions."):
|
with TestRun.step("Prepare partitions."):
|
||||||
core_size = Size(10, Unit.GibiByte)
|
core_size = Size(5, Unit.GibiByte)
|
||||||
cache_device = TestRun.disks["cache"]
|
cache_device = TestRun.disks["cache"]
|
||||||
core_device = TestRun.disks["core"]
|
core_device = TestRun.disks["core"]
|
||||||
cache_device.create_partitions([Size(5, Unit.GibiByte)])
|
cache_device.create_partitions([Size(10, Unit.GibiByte)])
|
||||||
core_device.create_partitions([core_size])
|
core_device.create_partitions([core_size])
|
||||||
|
|
||||||
with TestRun.step(
|
with TestRun.step(
|
||||||
@ -385,8 +387,8 @@ def new_acp_iteration(prev, curr):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def write_to_core(prev, curr):
|
def write_to_core(prev):
|
||||||
return prev.action == ActionKind.IoHandled and curr.rwbs & RwbsKind.W
|
return prev.action == ActionKind.IoHandled and prev.rwbs & RwbsKind.W and prev.byte_count > 0
|
||||||
|
|
||||||
|
|
||||||
def get_fio_cmd(core, core_size):
|
def get_fio_cmd(core, core_size):
|
||||||
@ -396,10 +398,10 @@ def get_fio_cmd(core, core_size):
|
|||||||
.target(core)
|
.target(core)
|
||||||
.read_write(ReadWrite.write)
|
.read_write(ReadWrite.write)
|
||||||
.io_engine(IoEngine.libaio)
|
.io_engine(IoEngine.libaio)
|
||||||
.io_size(Size(10, Unit.TebiByte))
|
|
||||||
.size(core_size)
|
.size(core_size)
|
||||||
.block_size(Size(1, Unit.Blocks4096))
|
.block_size(Size(1, Unit.Blocks4096))
|
||||||
.run_time(timedelta(seconds=9999))
|
.run_time(timedelta(hours=99))
|
||||||
|
.time_based()
|
||||||
.io_depth(32)
|
.io_depth(32)
|
||||||
.num_jobs(1)
|
.num_jobs(1)
|
||||||
.direct(1)
|
.direct(1)
|
||||||
|
@ -4,7 +4,9 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from api.cas import casadm
|
from api.cas import casadm
|
||||||
from api.cas.cache_config import CacheMode, CacheModeTrait, CacheLineSize, CleaningPolicy, \
|
from api.cas.cache_config import CacheMode, CacheModeTrait, CacheLineSize, CleaningPolicy, \
|
||||||
FlushParametersAcp
|
FlushParametersAcp
|
||||||
@ -84,6 +86,7 @@ def test_recovery_all_options(cache_mode, cache_line_size, cleaning_policy, file
|
|||||||
with TestRun.step("Reset platform."):
|
with TestRun.step("Reset platform."):
|
||||||
os_utils.sync()
|
os_utils.sync()
|
||||||
core.unmount()
|
core.unmount()
|
||||||
|
os_utils.drop_caches(DropCachesMode.ALL)
|
||||||
TestRun.LOGGER.info(f"Number of dirty blocks in cache: {cache.get_dirty_blocks()}")
|
TestRun.LOGGER.info(f"Number of dirty blocks in cache: {cache.get_dirty_blocks()}")
|
||||||
power_cycle_dut()
|
power_cycle_dut()
|
||||||
|
|
||||||
@ -107,6 +110,9 @@ def test_recovery_all_options(cache_mode, cache_line_size, cleaning_policy, file
|
|||||||
else:
|
else:
|
||||||
TestRun.fail("Source and target file checksums are different.")
|
TestRun.fail("Source and target file checksums are different.")
|
||||||
|
|
||||||
|
core_disk.remove_partitions()
|
||||||
|
cache_disk.remove_partitions()
|
||||||
|
|
||||||
|
|
||||||
def file_operation(target_path, data_pattern, io_pattern):
|
def file_operation(target_path, data_pattern, io_pattern):
|
||||||
fio = (Fio().create_command()
|
fio = (Fio().create_command()
|
||||||
|
@ -4,7 +4,9 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from api.cas import casadm, cli
|
from api.cas import casadm, cli
|
||||||
from api.cas.cache_config import CacheMode, CacheModeTrait, CleaningPolicy, SeqCutOffPolicy
|
from api.cas.cache_config import CacheMode, CacheModeTrait, CleaningPolicy, SeqCutOffPolicy
|
||||||
from core.test_run import TestRun
|
from core.test_run import TestRun
|
||||||
@ -12,7 +14,7 @@ from storage_devices.disk import DiskTypeSet, DiskType, DiskTypeLowerThan
|
|||||||
from test_tools.disk_utils import Filesystem
|
from test_tools.disk_utils import Filesystem
|
||||||
from test_tools.fs_utils import readlink
|
from test_tools.fs_utils import readlink
|
||||||
from test_utils import os_utils
|
from test_utils import os_utils
|
||||||
from test_utils.os_utils import Udev
|
from test_utils.os_utils import Udev, DropCachesMode
|
||||||
from test_utils.output import CmdException
|
from test_utils.output import CmdException
|
||||||
from test_utils.size import Size, Unit
|
from test_utils.size import Size, Unit
|
||||||
from tests.lazy_writes.recovery.recovery_tests_methods import create_test_files, copy_file, \
|
from tests.lazy_writes.recovery.recovery_tests_methods import create_test_files, copy_file, \
|
||||||
@ -65,6 +67,7 @@ def test_recovery_flush_reset_raw(cache_mode):
|
|||||||
raise CmdException("Error during hdparm", output)
|
raise CmdException("Error during hdparm", output)
|
||||||
|
|
||||||
with TestRun.step("Trigger flush."):
|
with TestRun.step("Trigger flush."):
|
||||||
|
os_utils.drop_caches(DropCachesMode.ALL)
|
||||||
TestRun.executor.run_in_background(cli.flush_cache_cmd(f"{cache.cache_id}"))
|
TestRun.executor.run_in_background(cli.flush_cache_cmd(f"{cache.cache_id}"))
|
||||||
|
|
||||||
with TestRun.step("Hard reset DUT during data flushing."):
|
with TestRun.step("Hard reset DUT during data flushing."):
|
||||||
@ -153,6 +156,7 @@ def test_recovery_flush_reset_fs(cache_mode, fs):
|
|||||||
core.unmount()
|
core.unmount()
|
||||||
|
|
||||||
with TestRun.step("Trigger flush."):
|
with TestRun.step("Trigger flush."):
|
||||||
|
os_utils.drop_caches(DropCachesMode.ALL)
|
||||||
TestRun.executor.run_in_background(cli.flush_cache_cmd(f"{cache.cache_id}"))
|
TestRun.executor.run_in_background(cli.flush_cache_cmd(f"{cache.cache_id}"))
|
||||||
|
|
||||||
with TestRun.step("Hard reset DUT during data flushing."):
|
with TestRun.step("Hard reset DUT during data flushing."):
|
||||||
|
Loading…
Reference in New Issue
Block a user