test-api: update cas api after reformat

Signed-off-by: Kamil Gierszewski <kamil.gierszewski@huawei.com>
This commit is contained in:
Kamil Gierszewski 2024-08-14 02:23:48 +02:00
parent d40e2a519d
commit 36903a2c67
No known key found for this signature in database
14 changed files with 241 additions and 287 deletions

View File

@ -7,7 +7,7 @@
from api.cas.casadm_parser import *
from api.cas.core import Core
from api.cas.dmesg import get_metadata_size_on_device
from api.cas.statistics import CacheStats, IoClassStats
from api.cas.statistics import CacheStats, CacheIoClassStats
from test_utils.os_utils import *
from test_utils.output import Output
@ -53,8 +53,7 @@ class Cache:
return stats.config_stats.metadata_memory_footprint
def get_metadata_size_on_disk(self) -> Size:
cache_name = f"cache{self.cache_id}"
return get_metadata_size_on_device(cache_name=cache_name)
return get_metadata_size_on_device(cache_id=self.cache_id)
def get_occupancy(self):
return self.get_statistics().usage_stats.occupancy
@ -104,11 +103,11 @@ class Cache:
def get_io_class_statistics(
self,
io_class_id: int,
io_class_id: int = None,
stat_filter: List[StatsFilter] = None,
percentage_val: bool = False,
) -> IoClassStats:
return IoClassStats(
) -> CacheIoClassStats:
return CacheIoClassStats(
cache_id=self.cache_id,
filter=stat_filter,
io_class_id=io_class_id,
@ -169,31 +168,23 @@ class Cache:
def set_params_acp(self, acp_params: FlushParametersAcp) -> Output:
return casadm.set_param_cleaning_acp(
self.cache_id,
(
int(acp_params.wake_up_time.total_milliseconds())
if acp_params.wake_up_time
else None
),
int(acp_params.wake_up_time.total_milliseconds()) if acp_params.wake_up_time else None,
int(acp_params.flush_max_buffers) if acp_params.flush_max_buffers else None,
)
def set_params_alru(self, alru_params: FlushParametersAlru) -> Output:
return casadm.set_param_cleaning_alru(
self.cache_id,
(
int(alru_params.wake_up_time.total_seconds())
if alru_params.wake_up_time is not None
else None
),
(int(alru_params.wake_up_time.total_seconds()) if alru_params.wake_up_time else None),
(
int(alru_params.staleness_time.total_seconds())
if alru_params.staleness_time is not None
if alru_params.staleness_time
else None
),
(alru_params.flush_max_buffers if alru_params.flush_max_buffers is not None else None),
(alru_params.flush_max_buffers if alru_params.flush_max_buffers else None),
(
int(alru_params.activity_threshold.total_milliseconds())
if alru_params.activity_threshold is not None
if alru_params.activity_threshold
else None
),
)

View File

@ -41,30 +41,33 @@ class CacheMode(Enum):
return self.value
@staticmethod
def get_traits(cache_mode):
if cache_mode == CacheMode.PT:
return CacheModeTrait(0)
elif cache_mode == CacheMode.WT:
return CacheModeTrait.InsertRead | CacheModeTrait.InsertWrite
elif cache_mode == CacheMode.WB:
return (
CacheModeTrait.InsertRead | CacheModeTrait.InsertWrite | CacheModeTrait.LazyWrites
)
elif cache_mode == CacheMode.WO:
return CacheModeTrait.InsertWrite | CacheModeTrait.LazyWrites
elif cache_mode == CacheMode.WA:
return CacheModeTrait.InsertRead
def get_traits(cache_mode) -> CacheModeTrait:
match cache_mode:
case CacheMode.PT:
return CacheModeTrait(0)
case CacheMode.WT:
return CacheModeTrait.InsertRead | CacheModeTrait.InsertWrite
case CacheMode.WB:
return (
CacheModeTrait.InsertRead
| CacheModeTrait.InsertWrite
| CacheModeTrait.LazyWrites
)
case CacheMode.WO:
return CacheModeTrait.InsertWrite | CacheModeTrait.LazyWrites
case CacheMode.WA:
return CacheModeTrait.InsertRead
@staticmethod
def with_traits(flags: CacheModeTrait):
def with_traits(flags: CacheModeTrait) -> list:
return [m for m in CacheMode if all(map(lambda t: t in CacheMode.get_traits(m), flags))]
@staticmethod
def without_traits(flags: CacheModeTrait):
def without_traits(flags: CacheModeTrait) -> list:
return [m for m in CacheMode if not any(map(lambda t: t in CacheMode.get_traits(m), flags))]
@staticmethod
def with_any_trait(flags: CacheModeTrait):
def with_any_trait(flags: CacheModeTrait) -> list:
return [m for m in CacheMode if any(map(lambda t: t in CacheMode.get_traits(m), flags))]
@ -127,10 +130,10 @@ class CacheStatus(Enum):
class FlushParametersAlru:
def __init__(
self,
activity_threshold=None,
flush_max_buffers=None,
staleness_time=None,
wake_up_time=None,
activity_threshold: Time = None,
flush_max_buffers: int = None,
staleness_time: Time = None,
wake_up_time: Time = None,
):
self.activity_threshold = activity_threshold
self.flush_max_buffers = flush_max_buffers
@ -213,7 +216,9 @@ class FlushParametersAcp:
class SeqCutOffParameters:
def __init__(self, policy=None, threshold=None, promotion_count=None):
def __init__(
self, policy: CleaningPolicy = None, threshold: Size = None, promotion_count: int = None
):
self.policy = policy
self.threshold = threshold
self.promotion_count = promotion_count
@ -235,7 +240,7 @@ class SeqCutOffParameters:
class PromotionParametersNhit:
def __init__(self, threshold=None, trigger=None):
def __init__(self, threshold: Size = None, trigger: int = None):
self.threshold = threshold
self.trigger = trigger
@ -362,9 +367,9 @@ class KernelParameters:
class CacheConfig:
def __init__(
self,
cache_line_size=CacheLineSize.DEFAULT,
cache_mode=CacheMode.DEFAULT,
cleaning_policy=CleaningPolicy.DEFAULT,
cache_line_size: CacheLineSize = CacheLineSize.DEFAULT,
cache_mode: CacheMode = CacheMode.DEFAULT,
cleaning_policy: CleaningPolicy = CleaningPolicy.DEFAULT,
kernel_parameters=None,
):
self.cache_line_size = cache_line_size

View File

@ -41,12 +41,12 @@ def start_cache(
reload_kernel_module("cas_cache", kernel_params.get_parameter_dictionary())
_cache_line_size = (
None
if cache_line_size is None
else str(int(cache_line_size.value.get_value(Unit.KibiByte)))
str(int(cache_line_size.value.get_value(Unit.KibiByte)))
if cache_line_size is not None
else None
)
_cache_id = None if cache_id is None else str(cache_id)
_cache_mode = None if cache_mode is None else cache_mode.name.lower()
_cache_id = str(cache_id) if cache_id is not None else None
_cache_mode = cache_mode.name.lower() if cache_mode else None
output = TestRun.executor.run(
start_cmd(
cache_dev=cache_dev.path,
@ -70,11 +70,10 @@ def load_cache(device: Device, shortcut: bool = False) -> Cache:
return Cache(device)
def attach_cache(cache_id: int, device: [Device], force: bool, shortcut: bool = False) -> Output:
cache_dev_paths = ",".join(str(devs_path.path) for devs_path in device)
def attach_cache(cache_id: int, device: Device, force: bool, shortcut: bool = False) -> Output:
output = TestRun.executor.run(
attach_cache_cmd(
cache_dev=cache_dev_paths, cache_id=str(cache_id), force=force, shortcut=shortcut
cache_dev=device.path, cache_id=str(cache_id), force=force, shortcut=shortcut
)
)
if output.exit_code != 0:
@ -106,10 +105,10 @@ def set_param_cutoff(
promotion_count: int = None,
shortcut: bool = False,
) -> Output:
_core_id = None if core_id is None else str(core_id)
_threshold = None if threshold is None else str(int(threshold.get_value(Unit.KibiByte)))
_policy = None if policy is None else policy.name
_promotion_count = None if promotion_count is None else str(promotion_count)
_core_id = str(core_id) if core_id is not None else None
_threshold = str(int(threshold.get_value(Unit.KibiByte))) if threshold else None
_policy = policy.name if policy else None
_promotion_count = str(promotion_count) if promotion_count is not None else None
command = set_param_cutoff_cmd(
cache_id=str(cache_id),
core_id=_core_id,
@ -141,13 +140,17 @@ def set_param_cleaning_alru(
activity_threshold: int = None,
shortcut: bool = False,
) -> Output:
_wake_up = str(wake_up) if wake_up is not None else None
_staleness_time = str(staleness_time) if staleness_time is not None else None
_flush_max_buffers = str(flush_max_buffers) if flush_max_buffers is not None else None
_activity_threshold = str(activity_threshold) if activity_threshold is not None else None
output = TestRun.executor.run(
set_param_cleaning_alru_cmd(
cache_id=str(cache_id),
wake_up=str(wake_up),
staleness_time=str(staleness_time),
flush_max_buffers=str(flush_max_buffers),
activity_threshold=str(activity_threshold),
wake_up=_wake_up,
staleness_time=_staleness_time,
flush_max_buffers=_flush_max_buffers,
activity_threshold=_activity_threshold,
shortcut=shortcut,
)
)
@ -159,11 +162,13 @@ def set_param_cleaning_alru(
def set_param_cleaning_acp(
cache_id: int, wake_up: int = None, flush_max_buffers: int = None, shortcut: bool = False
) -> Output:
_wake_up = str(wake_up) if wake_up is not None else None
_flush_max_buffers = str(flush_max_buffers) if flush_max_buffers is not None else None
output = TestRun.executor.run(
set_param_cleaning_acp_cmd(
cache_id=str(cache_id),
wake_up=str(wake_up) if wake_up is not None else None,
flush_max_buffers=str(flush_max_buffers) if flush_max_buffers else None,
wake_up=_wake_up,
flush_max_buffers=_flush_max_buffers,
shortcut=shortcut,
)
)
@ -175,7 +180,7 @@ def set_param_cleaning_acp(
def get_param_cutoff(
cache_id: int, core_id: int, output_format: OutputFormat = None, shortcut: bool = False
) -> Output:
_output_format = None if output_format is None else output_format.name
_output_format = output_format.name if output_format else None
output = TestRun.executor.run(
get_param_cutoff_cmd(
cache_id=str(cache_id),
@ -190,21 +195,21 @@ def get_param_cutoff(
def get_param_cleaning(cache_id: int, output_format: OutputFormat = None, shortcut: bool = False):
_output_format = None if output_format is None else output_format.name
_output_format = output_format.name if output_format else None
output = TestRun.executor.run(
get_param_cleaning_cmd(
cache_id=str(cache_id), output_format=_output_format, shortcut=shortcut
)
)
if output.exit_code != 0:
raise CmdException("Getting cleaning policy params failed.", output)
raise CmdException("Getting cleaning policy failed.", output)
return output
def get_param_cleaning_alru(
cache_id: int, output_format: OutputFormat = None, shortcut: bool = False
):
_output_format = None if output_format is None else output_format.name
_output_format = output_format.name if output_format else None
output = TestRun.executor.run(
get_param_cleaning_alru_cmd(
cache_id=str(cache_id), output_format=_output_format, shortcut=shortcut
@ -218,7 +223,7 @@ def get_param_cleaning_alru(
def get_param_cleaning_acp(
cache_id: int, output_format: OutputFormat = None, shortcut: bool = False
):
_output_format = None if output_format is None else output_format.name
_output_format = output_format.name if output_format else None
output = TestRun.executor.run(
get_param_cleaning_acp_cmd(
cache_id=str(cache_id), output_format=_output_format, shortcut=shortcut
@ -233,11 +238,8 @@ def set_cache_mode(
cache_mode: CacheMode, cache_id: int, flush=None, shortcut: bool = False
) -> Output:
flush_cache = None
if flush is True:
flush_cache = "yes"
elif flush is False:
flush_cache = "no"
if flush:
flush_cache = "yes" if flush else "no"
output = TestRun.executor.run(
set_cache_mode_cmd(
cache_mode=cache_mode.name.lower(),
@ -252,7 +254,7 @@ def set_cache_mode(
def add_core(cache: Cache, core_dev: Device, core_id: int = None, shortcut: bool = False) -> Core:
_core_id = None if core_id is None else str(core_id)
_core_id = str(core_id) if core_id is not None else None
output = TestRun.executor.run(
add_core_cmd(
cache_id=str(cache.cache_id),
@ -302,7 +304,7 @@ def remove_detached(core_device: Device, shortcut: bool = False) -> Output:
def list_caches(
output_format: OutputFormat = None, by_id_path: bool = True, shortcut: bool = False
) -> Output:
_output_format = None if output_format is None else output_format.name
_output_format = output_format.name if output_format else None
output = TestRun.executor.run(
list_caches_cmd(output_format=_output_format, by_id_path=by_id_path, shortcut=shortcut)
)
@ -321,8 +323,8 @@ def print_statistics(
shortcut: bool = False,
) -> Output:
_output_format = output_format.name if output_format else None
_core_id = str(core_id) if core_id else None
_io_class_id = str(io_class_id) if io_class_id else None
_io_class_id = str(io_class_id) if io_class_id is not None else None
_core_id = str(core_id) if core_id is not None else None
if filter is None:
_filter = filter
else:
@ -345,7 +347,7 @@ def print_statistics(
def reset_counters(cache_id: int, core_id: int = None, shortcut: bool = False) -> Output:
_core_id = None if core_id is None else str(core_id)
_core_id = str(core_id) if core_id is not None else None
output = TestRun.executor.run(
reset_counters_cmd(cache_id=str(cache_id), core_id=_core_id, shortcut=shortcut)
)
@ -362,12 +364,8 @@ def flush_cache(cache_id: int, shortcut: bool = False) -> Output:
return output
def flush_core(
cache_id: int, core_id: int, shortcut: bool = False
) -> Output:
command = flush_core_cmd(
cache_id=str(cache_id), core_id=str(core_id), shortcut=shortcut
)
def flush_core(cache_id: int, core_id: int, shortcut: bool = False) -> Output:
command = flush_core_cmd(cache_id=str(cache_id), core_id=str(core_id), shortcut=shortcut)
output = TestRun.executor.run(command)
if output.exit_code != 0:
raise CmdException("Flushing core failed.", output)
@ -384,7 +382,7 @@ def load_io_classes(cache_id: int, file: str, shortcut: bool = False) -> Output:
def list_io_classes(cache_id: int, output_format: OutputFormat, shortcut: bool = False) -> Output:
_output_format = None if output_format is None else output_format.name
_output_format = output_format.name if output_format else None
output = TestRun.executor.run(
list_io_classes_cmd(cache_id=str(cache_id), output_format=_output_format, shortcut=shortcut)
)
@ -394,7 +392,7 @@ def list_io_classes(cache_id: int, output_format: OutputFormat, shortcut: bool =
def print_version(output_format: OutputFormat = None, shortcut: bool = False) -> Output:
_output_format = None if output_format is None else output_format.name
_output_format = output_format.name if output_format else None
output = TestRun.executor.run(version_cmd(output_format=_output_format, shortcut=shortcut))
if output.exit_code != 0:
raise CmdException("Failed to print version.", output)
@ -415,12 +413,7 @@ def standby_init(
) -> Cache:
if kernel_params != KernelParameters.read_current_settings():
reload_kernel_module("cas_cache", kernel_params.get_parameter_dictionary())
_cache_line_size = (
None
if cache_line_size is None
else str(int(cache_line_size.value.get_value(Unit.KibiByte)))
)
_cache_line_size = str(int(cache_line_size.value.get_value(Unit.KibiByte)))
output = TestRun.executor.run(
standby_init_cmd(
@ -510,18 +503,18 @@ def remove_core_with_script_command(cache_id: int, core_id: int, no_flush: bool
def stop_all_caches() -> None:
from .casadm_parser import get_caches
from api.cas.casadm_parser import get_caches
caches = get_caches()
if not caches:
return
for cache in caches:
stop_cache(cache_id=cache.cache_id)
stop_cache(cache_id=cache.cache_id, no_data_flush=True)
def remove_all_detached_cores() -> None:
from api.cas import casadm_parser
from api.cas.casadm_parser import get_cas_devices_dict
devices = casadm_parser.get_cas_devices_dict()
devices = get_cas_devices_dict()
for dev in devices["core_pool"]:
TestRun.executor.run(remove_detached_cmd(dev["device"]))

View File

@ -7,6 +7,18 @@
from enum import Enum
class ParamName(Enum):
seq_cutoff = "seq-cutoff"
cleaning = "cleaning"
cleaning_alru = "cleaning-alru"
cleaning_acp = "cleaning-acp"
promotion = "promotion"
promotion_nhit = "promotion-nhit"
def __str__(self):
return self.value
class OutputFormat(Enum):
table = 0
csv = 1

View File

@ -75,13 +75,14 @@ def get_cas_devices_dict() -> dict:
core_pool = False
for device in device_list:
if device["type"] == "cache":
cache_id = int(device["id"])
params = [
("id", int(device["id"])),
("id", cache_id),
("device_path", device["disk"]),
("status", device["status"]),
]
devices["caches"][int(device["id"])] = dict([(key, value) for key, value in params])
cache_id = int(device["id"])
devices["caches"][cache_id] = dict([(key, value) for key, value in params])
elif device["type"] == "core":
params = [
("cache_id", cache_id),

View File

@ -174,7 +174,6 @@ def get_param_cutoff_cmd(
command = _get_param_cmd(
name=name,
cache_id=cache_id,
core_id=core_id,
output_format=output_format,
shortcut=shortcut,
)
@ -182,6 +181,32 @@ def get_param_cutoff_cmd(
return casadm_bin + command
def get_param_promotion_cmd(
cache_id: str, output_format: str = None, shortcut: bool = False
) -> str:
name = "promotion"
command = _get_param_cmd(
name=name,
cache_id=cache_id,
output_format=output_format,
shortcut=shortcut,
)
return casadm_bin + command
def get_param_promotion_nhit_cmd(
cache_id: str, output_format: str = None, shortcut: bool = False
) -> str:
name = "promotion-nhit"
command = _get_param_cmd(
name=name,
cache_id=cache_id,
output_format=output_format,
shortcut=shortcut,
)
return casadm_bin + command
def get_param_cleaning_cmd(cache_id: str, output_format: str = None, shortcut: bool = False) -> str:
name = "cleaning"
command = _get_param_cmd(
@ -412,36 +437,36 @@ def ctl_init(force: bool = False) -> str:
# casadm script
def script_try_add_cmd(cache_id: str, core_dev: str, core_id: str = None) -> str:
def script_try_add_cmd(cache_id: str, core_dev: str, core_id: str) -> str:
command = " --script --add-core --try-add"
command += " --cache-id " + cache_id
if core_id:
command += " --core-device " + core_dev
command += " --core-device " + core_dev
command += f" --core-id " + core_id
return casadm_bin + command
def script_purge_cache_cmd(cache_id: str) -> str:
command = "--script --purge-cache"
command = " --script --purge-cache"
command += " --cache-id " + cache_id
return casadm_bin + command
def script_purge_core_cmd(cache_id: str, core_id: str) -> str:
command = "--script --purge-core"
command = " --script --purge-core"
command += " --cache-id " + cache_id
command += " --core-id " + core_id
return casadm_bin + command
def script_detach_core_cmd(cache_id: str, core_id: str) -> str:
command = "--script --remove-core --detach"
command = " --script --remove-core --detach"
command += " --cache-id " + cache_id
command += " --core-id " + core_id
return casadm_bin + command
def script_remove_core_cmd(cache_id: str, core_id: str, no_flush: bool = False) -> str:
command = "--script --remove-core"
command = " --script --remove-core"
command += " --cache-id " + cache_id
command += " --core-id " + core_id
if no_flush:

View File

@ -138,7 +138,7 @@ operation_forbiden_in_standby = [
]
mutually_exclusive_params_init = [
r"Can\'t use \'load\' and \'init\' options simultaneously\n" r"Error during options handling"
r"Can\'t use \'load\' and \'init\' options simultaneously\nError during options handling"
]
mutually_exclusive_params_load = [

View File

@ -12,7 +12,7 @@ from api.cas import casadm
from api.cas.cache_config import SeqCutOffParameters, SeqCutOffPolicy
from api.cas.casadm_params import StatsFilter
from api.cas.casadm_parser import get_seq_cut_off_parameters, get_core_info_by_path
from api.cas.statistics import CoreStats, IoClassStats
from api.cas.statistics import CoreStats, CoreIoClassStats
from core.test_run_utils import TestRun
from storage_devices.device import Device
from test_tools import fs_utils, disk_utils
@ -57,8 +57,8 @@ class Core(Device):
io_class_id: int,
stat_filter: List[StatsFilter] = None,
percentage_val: bool = False,
):
return IoClassStats(
) -> CoreIoClassStats:
return CoreIoClassStats(
cache_id=self.cache_id,
filter=stat_filter,
io_class_id=io_class_id,

View File

@ -1,5 +1,6 @@
#
# Copyright(c) 2019-2022 Intel Corporation
# Copyright(c) 2024 Huawei Technologies Co., Ltd.
# SPDX-License-Identifier: BSD-3-Clause
#
@ -9,9 +10,10 @@ from test_utils.dmesg import get_dmesg
from test_utils.size import Size, Unit
def get_metadata_size_on_device(cache_name: str) -> Size:
def get_metadata_size_on_device(cache_id: int) -> Size:
dmesg_reversed = list(reversed(get_dmesg().split("\n")))
cache_dmesg = "\n".join(line for line in dmesg_reversed if cache_name in line)
cache_name = "cache" + str(cache_id)
cache_dmesg = "\n".join(line for line in dmesg_reversed if re.search(f"{cache_name}:", line))
try:
return _get_metadata_info(dmesg=cache_dmesg, section_name="Metadata size on device")
except ValueError:

View File

@ -1,113 +0,0 @@
#
# Copyright(c) 2019-2022 Intel Corporation
# Copyright(c) 2024 Huawei Technologies Co., Ltd.
# SPDX-License-Identifier: BSD-3-Clause
#
import os
from core.test_run import TestRun
from connection.local_executor import LocalExecutor
from test_utils.output import CmdException
def get_submodules_paths(from_dut: bool = False):
executor = TestRun.executor if from_dut else LocalExecutor()
repo_path = TestRun.usr.working_dir if from_dut else TestRun.usr.repo_dir
git_params = "config --file .gitmodules --get-regexp path | cut -d' ' -f2"
output = executor.run(f"git -C {repo_path} {git_params}")
if output.exit_code != 0:
raise CmdException("Failed to get submodules paths", output)
return output.stdout.splitlines()
def get_repo_files(
branch: str = "HEAD",
with_submodules: bool = True,
with_dirs: bool = False,
from_dut: bool = False,
):
executor = TestRun.executor if from_dut else LocalExecutor()
repo_path = TestRun.usr.working_dir if from_dut else TestRun.usr.repo_dir
git_params = f"ls-tree -r --name-only --full-tree {branch}"
output = executor.run(f"git -C {repo_path} {git_params}")
if output.exit_code != 0:
raise CmdException("Failed to get repo files list", output)
files = output.stdout.splitlines()
if with_submodules:
for subm_path in get_submodules_paths(from_dut):
output = executor.run(f"git -C {os.path.join(repo_path, subm_path)} {git_params}")
if output.exit_code != 0:
raise CmdException(f"Failed to get {subm_path} submodule repo files list", output)
subm_files = [os.path.join(subm_path, file) for file in output.stdout.splitlines()]
files.extend(subm_files)
if with_dirs:
# use set() to get unique values only
dirs = set(os.path.dirname(file) for file in files)
files.extend(dirs)
# change to absolute paths and remove empty values
files = [os.path.realpath(os.path.join(repo_path, file)) for file in files if file]
return files
def get_current_commit_hash(from_dut: bool = False):
executor = TestRun.executor if from_dut else LocalExecutor()
repo_path = TestRun.usr.working_dir if from_dut else TestRun.usr.repo_dir
return executor.run(f"cd {repo_path} &&" f'git show HEAD -s --pretty=format:"%H"').stdout
def get_current_commit_message():
local_executor = LocalExecutor()
return local_executor.run(
f"cd {TestRun.usr.repo_dir} &&" f'git show HEAD -s --pretty=format:"%B"'
).stdout
def get_commit_hash(cas_version, from_dut: bool = False):
executor = TestRun.executor if from_dut else LocalExecutor()
repo_path = TestRun.usr.working_dir if from_dut else TestRun.usr.repo_dir
output = executor.run(f"cd {repo_path} && " f"git rev-parse {cas_version}")
if output.exit_code != 0:
raise CmdException(f"Failed to resolve '{cas_version}' to commit hash", output)
TestRun.LOGGER.info(f"Resolved '{cas_version}' as commit {output.stdout}")
return output.stdout
def get_release_tags():
repo_path = os.path.join(TestRun.usr.working_dir, ".git")
output = TestRun.executor.run_expect_success(f"git --git-dir={repo_path} tag").stdout
# Tags containing '-' or '_' are not CAS release versions
tags = [v for v in output.splitlines() if "-" not in v and "_" not in v]
return tags
def checkout_cas_version(cas_version):
commit_hash = get_commit_hash(cas_version)
TestRun.LOGGER.info(f"Checkout CAS to {commit_hash}")
output = TestRun.executor.run(
f"cd {TestRun.usr.working_dir} && " f"git checkout --force {commit_hash}"
)
if output.exit_code != 0:
raise CmdException(f"Failed to checkout to {commit_hash}", output)
output = TestRun.executor.run(
f"cd {TestRun.usr.working_dir} && " f"git submodule update --force"
)
if output.exit_code != 0:
raise CmdException(f"Failed to update submodules", output)

View File

@ -1,17 +1,15 @@
#
# Copyright(c) 2019-2022 Intel Corporation
# Copyright(c) 2024 Huawei Technologies Co., Ltd.
# SPDX-License-Identifier: BSD-3-Clause
#
import logging
import os
from tests import conftest
from core.test_run import TestRun
from api.cas import cas_module, git
from api.cas import cas_module
from api.cas.version import get_installed_cas_version
from test_utils import os_utils
from test_utils import os_utils, git
from test_utils.output import CmdException
@ -31,14 +29,14 @@ def rsync_opencas_sources():
def clean_opencas_repo():
TestRun.LOGGER.info("Cleaning Open CAS repo")
output = TestRun.executor.run(f"cd {TestRun.usr.working_dir} && " "make distclean")
output = TestRun.executor.run(f"cd {TestRun.usr.working_dir} && make distclean")
if output.exit_code != 0:
raise CmdException("make distclean command executed with nonzero status", output)
def build_opencas():
TestRun.LOGGER.info("Building Open CAS")
output = TestRun.executor.run(f"cd {TestRun.usr.working_dir} && " "./configure && " "make -j")
output = TestRun.executor.run(f"cd {TestRun.usr.working_dir} && ./configure && make -j")
if output.exit_code != 0:
raise CmdException("Make command executed with nonzero status", output)
@ -50,7 +48,7 @@ def install_opencas(destdir: str = ""):
destdir = os.path.join(TestRun.usr.working_dir, destdir)
output = TestRun.executor.run(
f"cd {TestRun.usr.working_dir} && " f"make {'DESTDIR='+destdir if destdir else ''} install"
f"cd {TestRun.usr.working_dir} && make {'DESTDIR='+destdir if destdir else ''} install"
)
if output.exit_code != 0:
raise CmdException("Failed to install Open CAS", output)
@ -74,7 +72,7 @@ def set_up_opencas(version: str = ""):
clean_opencas_repo()
if version:
git.checkout_cas_version(version)
git.checkout_version(version)
build_opencas()
install_opencas()
@ -86,7 +84,7 @@ def uninstall_opencas():
if output.exit_code != 0:
raise CmdException("Open CAS is not properly installed", output)
else:
TestRun.executor.run(f"cd {TestRun.usr.working_dir} && " f"make uninstall")
TestRun.executor.run(f"cd {TestRun.usr.working_dir} && make uninstall")
if output.exit_code != 0:
raise CmdException("There was an error during uninstall process", output)

View File

@ -1,5 +1,6 @@
#
# Copyright(c) 2019-2022 Intel Corporation
# Copyright(c) 2024 Huawei Technologies Co., Ltd.
# SPDX-License-Identifier: BSD-3-Clause
#
@ -49,7 +50,12 @@ class IoClass:
)
def __eq__(self, other):
return (self.id, self.rule, self.priority, self.allocation) == (
return (
self.id,
self.rule,
self.priority,
self.allocation,
) == (
other.id,
other.rule,
other.priority,
@ -57,7 +63,12 @@ class IoClass:
)
def __lt__(self, other):
return (self.id, self.rule, self.priority, self.allocation) < (
return (
self.id,
self.rule,
self.priority,
self.allocation,
) < (
other.id,
other.rule,
other.priority,
@ -216,7 +227,7 @@ def remove_ioclass_config(ioclass_config_path: str = default_config_file_path):
output = TestRun.executor.run(f"rm -f {ioclass_config_path}")
if output.exit_code != 0:
raise Exception(
"Failed to remove config file. " + f"stdout: {output.stdout} \n stderr :{output.stderr}"
f"Failed to remove config file. stdout: {output.stdout} \n stderr :{output.stderr}"
)
@ -228,7 +239,7 @@ def add_ioclass(
ioclass_config_path: str = default_config_file_path,
):
new_ioclass = f"{ioclass_id},{rule},{eviction_priority},{allocation}"
TestRun.LOGGER.info(f"Adding rule {new_ioclass} " + f"to config file {ioclass_config_path}")
TestRun.LOGGER.info(f"Adding rule {new_ioclass} to config file {ioclass_config_path}")
output = TestRun.executor.run(f'echo "{new_ioclass}" >> {ioclass_config_path}')
if output.exit_code != 0:
@ -239,9 +250,7 @@ def add_ioclass(
def get_ioclass(ioclass_id: int, ioclass_config_path: str = default_config_file_path):
TestRun.LOGGER.info(
f"Retrieving rule no. {ioclass_id} " + f"from config file {ioclass_config_path}"
)
TestRun.LOGGER.info(f"Retrieving rule no. {ioclass_id} from config file {ioclass_config_path}")
output = TestRun.executor.run(f"cat {ioclass_config_path}")
if output.exit_code != 0:
raise Exception(
@ -257,9 +266,7 @@ def get_ioclass(ioclass_id: int, ioclass_config_path: str = default_config_file_
def remove_ioclass(ioclass_id: int, ioclass_config_path: str = default_config_file_path):
TestRun.LOGGER.info(
f"Removing rule no.{ioclass_id} " + f"from config file {ioclass_config_path}"
)
TestRun.LOGGER.info(f"Removing rule no.{ioclass_id} from config file {ioclass_config_path}")
output = TestRun.executor.run(f"cat {ioclass_config_path}")
if output.exit_code != 0:
raise Exception(

View File

@ -4,9 +4,9 @@
# SPDX-License-Identifier: BSD-3-Clause
#
import csv
from enum import Enum
from datetime import timedelta
from typing import List
@ -15,6 +15,27 @@ from api.cas.casadm_params import StatsFilter
from test_utils.size import Size, Unit
class UnitType(Enum):
requests = "[Requests]"
percentage = "[%]"
block_4k = "[4KiB Blocks]"
mebibyte = "[MiB]"
kibibyte = "[KiB]"
gibibyte = "[GiB]"
seconds = "[s]"
def __str__(self):
return self.value
class OperationType(Enum):
read = "Read"
write = "Write"
def __str__(self):
return self.value
class CacheStats:
def __init__(
self,
@ -57,7 +78,7 @@ class CacheStats:
case StatsFilter.req:
self.request_stats = RequestStats(stats_dict, percentage_val)
case StatsFilter.blk:
self.block_stats_cache = BlockStats(stats_dict, percentage_val)
self.block_stats = BlockStats(stats_dict, percentage_val)
case StatsFilter.err:
self.error_stats = ErrorStats(stats_dict, percentage_val)
@ -101,11 +122,6 @@ class CoreStats:
).stdout.splitlines()
stat_keys, stat_values = csv.reader(csv_stats)
# Unify names in block stats for core and cache:
# cache stats: Reads from core(s)
# core stats: Reads from core
stat_keys = [x.replace("(s)", "") for x in stat_keys]
stats_dict = dict(zip(stat_keys, stat_values))
for filter in filters:
@ -117,7 +133,7 @@ class CoreStats:
case StatsFilter.req:
self.request_stats = RequestStats(stats_dict, percentage_val)
case StatsFilter.blk:
self.block_stats_cache = BlockStats(stats_dict, percentage_val)
self.block_stats = BlockStats(stats_dict, percentage_val)
case StatsFilter.err:
self.error_stats = ErrorStats(stats_dict, percentage_val)
@ -133,12 +149,7 @@ class CoreStats:
]
class IoClassStats:
def __str__(self):
# stats_list contains all Class.__str__ methods initialized in CacheStats
stats_list = [str(getattr(self, stats_item)) for stats_item in self.__dict__]
return "\n".join(stats_list)
class CoreIoClassStats:
def __init__(
self,
cache_id: int,
@ -147,7 +158,6 @@ class IoClassStats:
filter: List[StatsFilter] = None,
percentage_val: bool = False,
):
if filter is None:
filters = [
StatsFilter.conf,
@ -177,13 +187,13 @@ class IoClassStats:
for filter in filters:
match filter:
case StatsFilter.conf:
self.config_stats = IoClassConfigStats(stats_dict, percentage_val)
self.config_stats = IoClassConfigStats(stats_dict)
case StatsFilter.usage:
self.usage_stats = IoClassUsageStats(stats_dict, percentage_val)
case StatsFilter.req:
self.request_stats = RequestStats(stats_dict, percentage_val)
case StatsFilter.blk:
self.block_stats_cache = BlockStats(stats_dict, percentage_val)
self.block_stats = BlockStats(stats_dict, percentage_val)
def __eq__(self, other):
# check if all initialized variable in self(CacheStats) match other(CacheStats)
@ -191,12 +201,34 @@ class IoClassStats:
getattr(other, stats_item) for stats_item in other.__dict__
]
def __str__(self):
# stats_list contains all Class.__str__ methods initialized in CacheStats
stats_list = [str(getattr(self, stats_item)) for stats_item in self.__dict__]
return "\n".join(stats_list)
class CacheIoClassStats(CoreIoClassStats):
def __init__(
self,
cache_id: int,
io_class_id: int,
filter: List[StatsFilter] = None,
percentage_val: bool = False,
):
super().__init__(
cache_id=cache_id,
io_class_id=io_class_id,
core_id=None,
filter=filter,
percentage_val=percentage_val,
)
class CacheConfigStats:
def __init__(self, stats_dict):
self.cache_id = stats_dict["Cache Id"]
self.cache_size = parse_value(
value=stats_dict["Cache Size [4KiB Blocks]"], unit_type="[4KiB Blocks]"
value=stats_dict["Cache Size [4KiB Blocks]"], unit_type=UnitType.block_4k
)
self.cache_dev = stats_dict["Cache Device"]
self.exp_obj = stats_dict["Exported Object"]
@ -206,10 +238,10 @@ class CacheConfigStats:
self.cleaning_policy = stats_dict["Cleaning Policy"]
self.promotion_policy = stats_dict["Promotion Policy"]
self.cache_line_size = parse_value(
value=stats_dict["Cache line size [KiB]"], unit_type="[KiB]"
value=stats_dict["Cache line size [KiB]"], unit_type=UnitType.kibibyte
)
self.metadata_memory_footprint = parse_value(
value=stats_dict["Metadata Memory Footprint [MiB]"], unit_type="[MiB]"
value=stats_dict["Metadata Memory Footprint [MiB]"], unit_type=UnitType.mebibyte
)
self.dirty_for = parse_value(value=stats_dict["Dirty for [s]"], unit_type="[s]")
self.status = stats_dict["Status"]
@ -258,12 +290,12 @@ class CoreConfigStats:
self.core_dev = stats_dict["Core Device"]
self.exp_obj = stats_dict["Exported Object"]
self.core_size = parse_value(
value=stats_dict["Core Size [4KiB Blocks]"], unit_type=" [4KiB Blocks]"
value=stats_dict["Core Size [4KiB Blocks]"], unit_type=UnitType.block_4k
)
self.dirty_for = parse_value(value=stats_dict["Dirty for [s]"], unit_type="[s]")
self.dirty_for = parse_value(value=stats_dict["Dirty for [s]"], unit_type=UnitType.seconds)
self.status = stats_dict["Status"]
self.seq_cutoff_threshold = parse_value(
value=stats_dict["Seq cutoff threshold [KiB]"], unit_type="[KiB]"
value=stats_dict["Seq cutoff threshold [KiB]"], unit_type=UnitType.kibibyte
)
self.seq_cutoff_policy = stats_dict["Seq cutoff policy"]
@ -324,7 +356,7 @@ class IoClassConfigStats:
class UsageStats:
def __init__(self, stats_dict, percentage_val):
unit = "[%]" if percentage_val else "[4KiB Blocks]"
unit = UnitType.percentage if percentage_val else UnitType.block_4k
self.occupancy = parse_value(value=stats_dict[f"Occupancy {unit}"], unit_type=unit)
self.free = parse_value(value=stats_dict[f"Free {unit}"], unit_type=unit)
self.clean = parse_value(value=stats_dict[f"Clean {unit}"], unit_type=unit)
@ -373,7 +405,7 @@ class UsageStats:
class IoClassUsageStats:
def __init__(self, stats_dict, percentage_val):
unit = "[%]" if percentage_val else "[4KiB Blocks]"
unit = UnitType.percentage if percentage_val else UnitType.block_4k
self.occupancy = parse_value(value=stats_dict[f"Occupancy {unit}"], unit_type=unit)
self.clean = parse_value(value=stats_dict[f"Clean {unit}"], unit_type=unit)
self.dirty = parse_value(value=stats_dict[f"Dirty {unit}"], unit_type=unit)
@ -441,12 +473,12 @@ class InactiveUsageStats:
class RequestStats:
def __init__(self, stats_dict, percentage_val):
unit = "[%]" if percentage_val else "[Requests]"
unit = UnitType.percentage if percentage_val else UnitType.requests
self.read = RequestStatsChunk(
stats_dict=stats_dict, percentage_val=percentage_val, operation="Read"
stats_dict=stats_dict, percentage_val=percentage_val, operation=OperationType.read
)
self.write = RequestStatsChunk(
stats_dict=stats_dict, percentage_val=percentage_val, operation="Write"
stats_dict=stats_dict, percentage_val=percentage_val, operation=OperationType.write
)
self.pass_through_reads = parse_value(
value=stats_dict[f"Pass-Through reads {unit}"], unit_type=unit
@ -486,8 +518,8 @@ class RequestStats:
class RequestStatsChunk:
def __init__(self, stats_dict, percentage_val, operation: str):
unit = "[%]" if percentage_val else "[Requests]"
def __init__(self, stats_dict, percentage_val: bool, operation: OperationType):
unit = UnitType.percentage if percentage_val else UnitType.requests
self.hits = parse_value(value=stats_dict[f"{operation} hits {unit}"], unit_type=unit)
self.part_misses = parse_value(
value=stats_dict[f"{operation} partial misses {unit}"], unit_type=unit
@ -548,7 +580,7 @@ class BlockStats:
class ErrorStats:
def __init__(self, stats_dict, percentage_val):
unit = "[%]" if percentage_val else "[Requests]"
unit = UnitType.percentage if percentage_val else UnitType.requests
self.cache = BasicStatsChunkError(
stats_dict=stats_dict, percentage_val=percentage_val, device="Cache"
)
@ -577,7 +609,7 @@ class ErrorStats:
class BasicStatsChunk:
def __init__(self, stats_dict: dict, percentage_val: bool, device: str):
unit = "[%]" if percentage_val else "[4KiB Blocks]"
unit = UnitType.percentage if percentage_val else UnitType.block_4k
self.reads = parse_value(value=stats_dict[f"Reads from {device} {unit}"], unit_type=unit)
self.writes = parse_value(value=stats_dict[f"Writes to {device} {unit}"], unit_type=unit)
self.total = parse_value(value=stats_dict[f"Total to/from {device} {unit}"], unit_type=unit)
@ -595,7 +627,7 @@ class BasicStatsChunk:
class BasicStatsChunkError:
def __init__(self, stats_dict: dict, percentage_val: bool, device: str):
unit = "[%]" if percentage_val else "[Requests]"
unit = UnitType.percentage if percentage_val else UnitType.requests
self.reads = parse_value(value=stats_dict[f"{device} read errors {unit}"], unit_type=unit)
self.writes = parse_value(value=stats_dict[f"{device} write errors {unit}"], unit_type=unit)
self.total = parse_value(value=stats_dict[f"{device} total errors {unit}"], unit_type=unit)
@ -611,21 +643,21 @@ class BasicStatsChunkError:
)
def parse_value(value: str, unit_type: str) -> int | float | Size | timedelta | str:
def parse_value(value: str, unit_type: UnitType) -> int | float | Size | timedelta | str:
match unit_type:
case "[Requests]":
case UnitType.requests:
stat_unit = int(value)
case "[%]":
case UnitType.percentage:
stat_unit = float(value)
case "[4KiB Blocks]":
case UnitType.block_4k:
stat_unit = Size(float(value), Unit.Blocks4096)
case "[MiB]":
case UnitType.mebibyte:
stat_unit = Size(float(value), Unit.MebiByte)
case "[KiB]":
case UnitType.kibibyte:
stat_unit = Size(float(value), Unit.KibiByte)
case "[GiB]":
case UnitType.gibibyte:
stat_unit = Size(float(value), Unit.GibiByte)
case "[s]":
case UnitType.seconds:
stat_unit = timedelta(seconds=float(value))
case _:
stat_unit = value

View File

@ -1,11 +1,12 @@
#
# Copyright(c) 2019-2022 Intel Corporation
# Copyright(c) 2024 Huawei Technologies Co., Ltd.
# SPDX-License-Identifier: BSD-3-Clause
#
import re
from api.cas import git
from test_utils import git
from core.test_run import TestRun
from test_utils.output import CmdException