Merge pull request #1399 from jwirkus/add_fuzzy_tests_api
Add fuzzy tests API
This commit is contained in:
commit
5dc6133a09
@ -37,6 +37,7 @@ class Opencas(metaclass=Singleton):
|
|||||||
self.repo_dir = repo_dir
|
self.repo_dir = repo_dir
|
||||||
self.working_dir = working_dir
|
self.working_dir = working_dir
|
||||||
self.already_updated = False
|
self.already_updated = False
|
||||||
|
self.fuzzy_iter_count = 1000
|
||||||
|
|
||||||
|
|
||||||
def pytest_collection_modifyitems(config, items):
|
def pytest_collection_modifyitems(config, items):
|
||||||
@ -106,6 +107,8 @@ def pytest_runtest_setup(item):
|
|||||||
TestRun.usr = Opencas(
|
TestRun.usr = Opencas(
|
||||||
repo_dir=os.path.join(os.path.dirname(__file__), "../../.."),
|
repo_dir=os.path.join(os.path.dirname(__file__), "../../.."),
|
||||||
working_dir=dut_config['working_dir'])
|
working_dir=dut_config['working_dir'])
|
||||||
|
if item.config.getoption('--fuzzy-iter-count'):
|
||||||
|
TestRun.usr.fuzzy_iter_count = int(item.config.getoption('--fuzzy-iter-count'))
|
||||||
|
|
||||||
TestRun.LOGGER.info(f"DUT info: {TestRun.dut}")
|
TestRun.LOGGER.info(f"DUT info: {TestRun.dut}")
|
||||||
TestRun.dut.plugin_manager = TestRun.plugin_manager
|
TestRun.dut.plugin_manager = TestRun.plugin_manager
|
||||||
@ -185,6 +188,7 @@ def pytest_addoption(parser):
|
|||||||
parser.addoption("--log-path", action="store",
|
parser.addoption("--log-path", action="store",
|
||||||
default=f"{os.path.join(os.path.dirname(__file__), '../results')}")
|
default=f"{os.path.join(os.path.dirname(__file__), '../results')}")
|
||||||
parser.addoption("--force-reinstall", action="store_true", default=False)
|
parser.addoption("--force-reinstall", action="store_true", default=False)
|
||||||
|
parser.addoption("--fuzzy-iter-count", action="store")
|
||||||
|
|
||||||
|
|
||||||
def unmount_cas_devices():
|
def unmount_cas_devices():
|
||||||
|
@ -0,0 +1,15 @@
|
|||||||
|
- name: String
|
||||||
|
attributes:
|
||||||
|
name: Cls
|
||||||
|
value: '4'
|
||||||
|
size: '6'
|
||||||
|
mutable: 'true'
|
||||||
|
children:
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: NumericalString
|
||||||
|
value: 'true'
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: ValidValues
|
||||||
|
value: '8;16;32;64'
|
10
test/functional/tests/security/fuzzy/config/cache_mode.yml
Normal file
10
test/functional/tests/security/fuzzy/config/cache_mode.yml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
- name: String
|
||||||
|
attributes:
|
||||||
|
name: ModeValue
|
||||||
|
value: 'wt'
|
||||||
|
mutable: 'true'
|
||||||
|
children:
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: ValidValues
|
||||||
|
value: 'pt;wa;wb;wo'
|
10
test/functional/tests/security/fuzzy/config/device.yml
Normal file
10
test/functional/tests/security/fuzzy/config/device.yml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
- name: String
|
||||||
|
attributes:
|
||||||
|
name: Device
|
||||||
|
value: '<DEV>'
|
||||||
|
mutable: 'true'
|
||||||
|
children:
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: type
|
||||||
|
value: 'path'
|
10
test/functional/tests/security/fuzzy/config/flags.yml
Normal file
10
test/functional/tests/security/fuzzy/config/flags.yml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
- name: String
|
||||||
|
attributes:
|
||||||
|
name: Flags
|
||||||
|
value: '--load'
|
||||||
|
mutable: 'true'
|
||||||
|
children:
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: ValidValues
|
||||||
|
value: '-l;-f;--force;-n;--no-data-flush;-b;--by-id-path'
|
@ -0,0 +1,14 @@
|
|||||||
|
- name: String
|
||||||
|
attributes:
|
||||||
|
name: IoClassAllocation
|
||||||
|
value: '1'
|
||||||
|
mutable: 'true'
|
||||||
|
children:
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: NumericalString
|
||||||
|
value: 'true'
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: ValidValues
|
||||||
|
value: '0;0.;0.00;0.5;0.55;1.;1.0;1.00'
|
@ -0,0 +1,10 @@
|
|||||||
|
- name: String
|
||||||
|
attributes:
|
||||||
|
name: Device
|
||||||
|
value: '/etc/opencas/ioclass-config.csv'
|
||||||
|
mutable: 'true'
|
||||||
|
children:
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: type
|
||||||
|
value: 'path'
|
5
test/functional/tests/security/fuzzy/config/string.yml
Normal file
5
test/functional/tests/security/fuzzy/config/string.yml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
- name: String
|
||||||
|
attributes:
|
||||||
|
name: Name
|
||||||
|
value: 'String'
|
||||||
|
mutable: 'true'
|
11
test/functional/tests/security/fuzzy/config/uint.yml
Normal file
11
test/functional/tests/security/fuzzy/config/uint.yml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
- name: String
|
||||||
|
attributes:
|
||||||
|
name: Uint
|
||||||
|
value: '0'
|
||||||
|
size: '32'
|
||||||
|
mutable: 'true'
|
||||||
|
children:
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: NumericalString
|
||||||
|
value: 'true'
|
10
test/functional/tests/security/fuzzy/config/yes_no.yml
Normal file
10
test/functional/tests/security/fuzzy/config/yes_no.yml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
- name: String
|
||||||
|
attributes:
|
||||||
|
name: ModeValue
|
||||||
|
value: 'no'
|
||||||
|
mutable: 'true'
|
||||||
|
children:
|
||||||
|
- name: Hint
|
||||||
|
attributes:
|
||||||
|
name: ValidValues
|
||||||
|
value: 'yes'
|
@ -2,9 +2,11 @@
|
|||||||
# Copyright(c) 2022 Intel Corporation
|
# Copyright(c) 2022 Intel Corporation
|
||||||
# SPDX-License-Identifier: BSD-3-Clause
|
# SPDX-License-Identifier: BSD-3-Clause
|
||||||
#
|
#
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
from typing import Callable
|
from collections import namedtuple
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@ -22,43 +24,73 @@ def get_fuzz_config(config_name: str):
|
|||||||
return fuzz_config
|
return fuzz_config
|
||||||
|
|
||||||
|
|
||||||
def prepare_cas_instance(cache_disk, core_disk, cache_mode: CacheMode = None,
|
def get_device_fuzz_config(device_paths: List[str]):
|
||||||
|
if len(device_paths) == 0:
|
||||||
|
raise Exception("device_paths parameter cannot be empty list")
|
||||||
|
|
||||||
|
device_base_config = get_fuzz_config("device.yml")
|
||||||
|
device_base_config[0]['attributes']['value'] = device_paths[0]
|
||||||
|
if len(device_paths) > 1:
|
||||||
|
other_valid_devices = {
|
||||||
|
'name': 'Hint',
|
||||||
|
'attributes': {
|
||||||
|
'name': 'ValidValues',
|
||||||
|
'value': ';'.join(device_paths[1:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
device_base_config[0]['children'].append(other_valid_devices)
|
||||||
|
|
||||||
|
return device_base_config
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_cas_instance(cache_device, core_device, cache_mode: CacheMode = None,
|
||||||
cache_line_size: CacheLineSize = None,
|
cache_line_size: CacheLineSize = None,
|
||||||
kernel_params: KernelParameters = KernelParameters(),
|
kernel_params: KernelParameters = KernelParameters(),
|
||||||
cleaning_policy: CleaningPolicy = None, mount_point: str = None):
|
cleaning_policy: CleaningPolicy = None, mount_point: str = None,
|
||||||
|
create_partition=True):
|
||||||
# Change cleaning policy to default for Write Policy different than WB
|
# Change cleaning policy to default for Write Policy different than WB
|
||||||
if cleaning_policy:
|
if cleaning_policy:
|
||||||
cleaning_policy = CleaningPolicy.DEFAULT if cache_mode != CacheMode.WB \
|
cleaning_policy = CleaningPolicy.DEFAULT if cache_mode != CacheMode.WB \
|
||||||
else cleaning_policy
|
else cleaning_policy
|
||||||
|
|
||||||
cache_disk.create_partitions([Size(400, Unit.MebiByte)])
|
if create_partition is True:
|
||||||
cache_device = cache_disk.partitions[0]
|
cache_device.create_partitions([Size(400, Unit.MebiByte)])
|
||||||
|
cache_device = cache_device.partitions[0]
|
||||||
|
|
||||||
cache = casadm.start_cache(cache_device, cache_mode, cache_line_size, 1, True,
|
cache = casadm.start_cache(cache_device, cache_mode, cache_line_size, 1, True,
|
||||||
kernel_params=kernel_params)
|
kernel_params=kernel_params)
|
||||||
if cleaning_policy:
|
if cleaning_policy:
|
||||||
cache.set_cleaning_policy(cleaning_policy)
|
cache.set_cleaning_policy(cleaning_policy)
|
||||||
|
|
||||||
if mount_point:
|
if mount_point:
|
||||||
core_disk.create_filesystem(Filesystem.ext4)
|
core_device.create_filesystem(Filesystem.ext4)
|
||||||
core = cache.add_core(core_disk)
|
core = cache.add_core(core_device)
|
||||||
core.mount(mount_point)
|
core.mount(mount_point)
|
||||||
else:
|
else:
|
||||||
core = cache.add_core(core_disk)
|
core = cache.add_core(core_device)
|
||||||
|
|
||||||
return cache, core
|
return cache, core
|
||||||
|
|
||||||
|
|
||||||
def run_cmd_and_validate(cmd, value_name: str, valid_values: list,
|
def run_cmd_and_validate(cmd, value_name: str, is_valid: bool):
|
||||||
post_process_param_func: Callable = None):
|
|
||||||
TestRun.LOGGER.info(f"{value_name}: {cmd.param}")
|
TestRun.LOGGER.info(f"{value_name}: {cmd.param}")
|
||||||
TestRun.LOGGER.info(f"Encoded command: {cmd.command}")
|
TestRun.LOGGER.info(f"Command: {cmd.command}")
|
||||||
output = TestRun.executor.run(cmd.command)
|
output = TestRun.executor.run(cmd.command)
|
||||||
param = cmd.param
|
|
||||||
if post_process_param_func:
|
|
||||||
param = post_process_param_func(param)
|
|
||||||
|
|
||||||
if output.exit_code == 0 and param not in valid_values:
|
if output.exit_code == 0 and not is_valid:
|
||||||
TestRun.LOGGER.error(f" {param} value is not valid")
|
TestRun.LOGGER.error(f"{cmd.param} value is not valid\n"
|
||||||
elif output.exit_code != 0 and param in valid_values:
|
f"stdout: {output.stdout}\n"
|
||||||
TestRun.LOGGER.error(f" {param} value is valid but command returned with "
|
f"stderr: {output.stderr}")
|
||||||
f"{output.exit_code} exit code")
|
elif output.exit_code != 0 and is_valid:
|
||||||
|
TestRun.LOGGER.error(f"{cmd.param} value is valid but command returned with "
|
||||||
|
f"{output.exit_code} exit code\n"
|
||||||
|
f"stdout: {output.stdout}\n"
|
||||||
|
f"stderr: {output.stderr}")
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def get_cmd(command, param):
|
||||||
|
FuzzedCommand = namedtuple('Command', ['param', 'command'])
|
||||||
|
|
||||||
|
return FuzzedCommand(param, command)
|
||||||
|
@ -16,9 +16,6 @@ from test_tools.peach_fuzzer.peach_fuzzer import PeachFuzzer
|
|||||||
from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_cas_instance, \
|
from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_cas_instance, \
|
||||||
run_cmd_and_validate
|
run_cmd_and_validate
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
|
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
|
||||||
@ -51,9 +48,9 @@ def test_fuzzy_io_class_list_cache_id(cache_mode, cache_line_size, cleaning_poli
|
|||||||
valid_values = [str(core.cache_id).encode('ascii')]
|
valid_values = [str(core.cache_id).encode('ascii')]
|
||||||
PeachFuzzer.generate_config(get_fuzz_config("cache_id.yml"))
|
PeachFuzzer.generate_config(get_fuzz_config("cache_id.yml"))
|
||||||
base_cmd = list_io_classes_cmd("{param}", OutputFormat.table.name).encode('ascii')
|
base_cmd = list_io_classes_cmd("{param}", OutputFormat.table.name).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Cache_id", valid_values)
|
run_cmd_and_validate(cmd, "Cache_id", cmd.param in valid_values)
|
||||||
|
@ -16,9 +16,6 @@ from test_tools.peach_fuzzer.peach_fuzzer import PeachFuzzer
|
|||||||
from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_cas_instance, \
|
from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_cas_instance, \
|
||||||
run_cmd_and_validate
|
run_cmd_and_validate
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
|
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
|
||||||
@ -51,9 +48,9 @@ def test_fuzzy_io_class_list_output_format(cache_mode, cache_line_size, cleaning
|
|||||||
valid_values = [e.name.encode('ascii') for e in list(OutputFormat)]
|
valid_values = [e.name.encode('ascii') for e in list(OutputFormat)]
|
||||||
PeachFuzzer.generate_config(get_fuzz_config("output_format.yml"))
|
PeachFuzzer.generate_config(get_fuzz_config("output_format.yml"))
|
||||||
base_cmd = list_io_classes_cmd(str(core.cache_id), "{param}").encode('ascii')
|
base_cmd = list_io_classes_cmd(str(core.cache_id), "{param}").encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Output_format", valid_values)
|
run_cmd_and_validate(cmd, "Output_format", cmd.param in valid_values)
|
||||||
|
@ -17,7 +17,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -54,12 +53,12 @@ def test_fuzzy_print_statistics_cache_id(cache_mode, cache_line_size, cleaning_p
|
|||||||
valid_values = [str(cache.cache_id).encode('ascii')]
|
valid_values = [str(cache.cache_id).encode('ascii')]
|
||||||
PeachFuzzer.generate_config(get_fuzz_config("cache_id.yml"))
|
PeachFuzzer.generate_config(get_fuzz_config("cache_id.yml"))
|
||||||
base_cmd = print_statistics_cmd(cache_id="{param}", by_id_path=False).encode('ascii')
|
base_cmd = print_statistics_cmd(cache_id="{param}", by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Cache_id", valid_values)
|
run_cmd_and_validate(cmd, "Cache_id", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
@ -17,7 +17,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -55,12 +54,12 @@ def test_fuzzy_print_statistics_core_id(cache_mode, cache_line_size, cleaning_po
|
|||||||
PeachFuzzer.generate_config(get_fuzz_config("core_id.yml"))
|
PeachFuzzer.generate_config(get_fuzz_config("core_id.yml"))
|
||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id="{param}",
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id="{param}",
|
||||||
by_id_path=False).encode('ascii')
|
by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Core_id", valid_values)
|
run_cmd_and_validate(cmd, "Core_id", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
@ -18,7 +18,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -56,12 +55,12 @@ def test_fuzzy_print_statistics_filter_cache(cache_mode, cache_line_size, cleani
|
|||||||
PeachFuzzer.generate_config(get_fuzz_config('filter.yml'))
|
PeachFuzzer.generate_config(get_fuzz_config('filter.yml'))
|
||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), filter="{param}",
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), filter="{param}",
|
||||||
by_id_path=False).encode('ascii')
|
by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Filter", valid_values)
|
run_cmd_and_validate(cmd, "Filter", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
@ -18,7 +18,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -57,12 +56,12 @@ def test_fuzzy_print_statistics_filter_cache_io_class(cache_mode, cache_line_siz
|
|||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), io_class_id="0",
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), io_class_id="0",
|
||||||
per_io_class=True, filter="{param}",
|
per_io_class=True, filter="{param}",
|
||||||
by_id_path=False).encode('ascii')
|
by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Filter", valid_values)
|
run_cmd_and_validate(cmd, "Filter", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
@ -18,7 +18,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -56,12 +55,12 @@ def test_fuzzy_print_statistics_filter_core(cache_mode, cache_line_size, cleanin
|
|||||||
PeachFuzzer.generate_config(get_fuzz_config("filter.yml"))
|
PeachFuzzer.generate_config(get_fuzz_config("filter.yml"))
|
||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
||||||
filter="{param}", by_id_path=False).encode('ascii')
|
filter="{param}", by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Filter", valid_values)
|
run_cmd_and_validate(cmd, "Filter", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
@ -18,7 +18,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -57,12 +56,12 @@ def test_fuzzy_print_statistics_filter_core_io_class(cache_mode, cache_line_size
|
|||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
||||||
io_class_id="0", per_io_class=True, filter="{param}",
|
io_class_id="0", per_io_class=True, filter="{param}",
|
||||||
by_id_path=False).encode('ascii')
|
by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Filter", valid_values)
|
run_cmd_and_validate(cmd, "Filter", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
@ -17,7 +17,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -55,20 +54,20 @@ def test_fuzzy_print_statistics_io_class_id_for_cache(cache_mode, cache_line_siz
|
|||||||
PeachFuzzer.generate_config(get_fuzz_config('io_class_id.yml'))
|
PeachFuzzer.generate_config(get_fuzz_config('io_class_id.yml'))
|
||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), per_io_class=True,
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), per_io_class=True,
|
||||||
io_class_id="{param}", by_id_path=False).encode('ascii')
|
io_class_id="{param}", by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Io_class_id", valid_values,
|
run_cmd_and_validate(cmd, "Io_class_id",
|
||||||
post_process_param_func=__strip_value)
|
__is_valid_io_class_id(cmd.param, valid_values))
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
|
||||||
|
|
||||||
def __strip_value(param):
|
def __is_valid_io_class_id(param, valid_values):
|
||||||
param = param.rstrip(b'\x00\x20\n\t')
|
param = param.rstrip(b'\x00\x20\n\t')
|
||||||
param = b'0' if not len(param.rstrip(b'0')) else param # treat '00' as '0'
|
param = b'0' if not len(param.rstrip(b'0')) else param # treat '00' as '0'
|
||||||
|
|
||||||
return param
|
return param in valid_values
|
||||||
|
@ -17,7 +17,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -56,20 +55,20 @@ def test_fuzzy_print_statistics_io_class_id_for_core(cache_mode, cache_line_size
|
|||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
||||||
per_io_class=True, io_class_id="{param}",
|
per_io_class=True, io_class_id="{param}",
|
||||||
by_id_path=False).encode('ascii')
|
by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Io_class_id", valid_values,
|
run_cmd_and_validate(cmd, "Io_class_id",
|
||||||
post_process_param_func=__strip_value)
|
__is_valid_io_class_id(cmd.param, valid_values))
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
|
||||||
|
|
||||||
def __strip_value(param):
|
def __is_valid_io_class_id(param, valid_values):
|
||||||
param = param.rstrip(b'\x00\x20\n\t')
|
param = param.rstrip(b'\x00\x20\n\t')
|
||||||
param = b'0' if not len(param.rstrip(b'0')) else param # treat '00' as '0'
|
param = b'0' if not len(param.rstrip(b'0')) else param # treat '00' as '0'
|
||||||
|
|
||||||
return param
|
return param in valid_values
|
||||||
|
@ -18,7 +18,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -57,12 +56,12 @@ def test_fuzzy_print_statistics_output_format_for_cache(cache_mode, cache_line_s
|
|||||||
PeachFuzzer.generate_config(get_fuzz_config('output_format.yml'))
|
PeachFuzzer.generate_config(get_fuzz_config('output_format.yml'))
|
||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), output_format="{param}",
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), output_format="{param}",
|
||||||
by_id_path=False).encode('ascii')
|
by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Output_format", valid_values)
|
run_cmd_and_validate(cmd, "Output_format", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
@ -18,7 +18,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -58,12 +57,12 @@ def test_fuzzy_print_statistics_output_format_for_cache_io_class_id(cache_mode,
|
|||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), io_class_id="0",
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), io_class_id="0",
|
||||||
per_io_class=True, output_format="{param}",
|
per_io_class=True, output_format="{param}",
|
||||||
by_id_path=False).encode('ascii')
|
by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Output_format", valid_values)
|
run_cmd_and_validate(cmd, "Output_format", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
@ -18,7 +18,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -56,12 +55,12 @@ def test_fuzzy_print_statistics_output_format_for_core(cache_mode, cache_line_si
|
|||||||
PeachFuzzer.generate_config(get_fuzz_config("output_format.yml"))
|
PeachFuzzer.generate_config(get_fuzz_config("output_format.yml"))
|
||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
||||||
output_format="{param}", by_id_path=False).encode('ascii')
|
output_format="{param}", by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Output_format", valid_values)
|
run_cmd_and_validate(cmd, "Output_format", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
@ -18,7 +18,6 @@ from tests.security.fuzzy.kernel.common.common import get_fuzz_config, prepare_c
|
|||||||
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
from tests.security.fuzzy.kernel.fuzzy_with_io.common.common import get_basic_workload
|
||||||
|
|
||||||
mount_point = "/mnt/test"
|
mount_point = "/mnt/test"
|
||||||
iterations_count = 1000
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -58,12 +57,12 @@ def test_fuzzy_print_statistics_output_format_for_core_io_class_id(cache_mode, c
|
|||||||
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
base_cmd = print_statistics_cmd(cache_id=str(core.cache_id), core_id=str(core.core_id),
|
||||||
io_class_id="0", per_io_class=True, output_format="{param}",
|
io_class_id="0", per_io_class=True, output_format="{param}",
|
||||||
by_id_path=False).encode('ascii')
|
by_id_path=False).encode('ascii')
|
||||||
commands = PeachFuzzer.get_fuzzed_command(base_cmd, iterations_count)
|
commands = PeachFuzzer.get_fuzzed_command(base_cmd, TestRun.usr.fuzzy_iter_count)
|
||||||
|
|
||||||
for index, cmd in TestRun.iteration(enumerate(commands), f"Run command {iterations_count} "
|
for index, cmd in TestRun.iteration(enumerate(commands),
|
||||||
f"times"):
|
f"Run command {TestRun.usr.fuzzy_iter_count} times"):
|
||||||
with TestRun.step(f"Iteration {index + 1}"):
|
with TestRun.step(f"Iteration {index + 1}"):
|
||||||
run_cmd_and_validate(cmd, "Output_format", valid_values)
|
run_cmd_and_validate(cmd, "Output_format", cmd.param in valid_values)
|
||||||
|
|
||||||
with TestRun.step("Stop 'fio'"):
|
with TestRun.step("Stop 'fio'"):
|
||||||
TestRun.executor.kill_process(fio_pid)
|
TestRun.executor.kill_process(fio_pid)
|
||||||
|
Loading…
Reference in New Issue
Block a user