From 7765c663e535a42c2f71e15ec9923ea5f4d0e725 Mon Sep 17 00:00:00 2001 From: Jan Musial Date: Fri, 28 Jul 2023 12:58:28 +0200 Subject: [PATCH 01/15] test-framework: Properly detect virtio drives in disk_finder Signed-off-by: Jan Musial --- test_utils/disk_finder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test_utils/disk_finder.py b/test_utils/disk_finder.py index 21eb0db..90ad46d 100644 --- a/test_utils/disk_finder.py +++ b/test_utils/disk_finder.py @@ -39,7 +39,7 @@ def get_block_devices_list(): block_devices = [] for dev in devices: - if ('sd' in dev or 'nvme' in dev) and dev not in os_disks: + if any([prefix in dev for prefix in ["sd", "nvme", "vd"]]) and dev not in os_disks: block_devices.append(dev) return block_devices From 95fbb5fcf0270822eeee3dc3aa5ed2a786490940 Mon Sep 17 00:00:00 2001 From: Jan Musial Date: Mon, 31 Jul 2023 13:42:05 +0200 Subject: [PATCH 02/15] test-framework: Implement VirtioDevice Signed-off-by: Jan Musial --- storage_devices/disk.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/storage_devices/disk.py b/storage_devices/disk.py index 83b8c9d..e4ad6a7 100644 --- a/storage_devices/disk.py +++ b/storage_devices/disk.py @@ -235,3 +235,37 @@ class SataDisk(Disk): self.plug_command = f"echo '{port_id} {target_id} {lun}' > " \ f"{host_path}/host{controller_id}/scsi_host/host{controller_id}/scan" return sysfs_addr + + +class VirtioDisk(Disk): + plug_all_command = "echo 1 > /sys/bus/pci/rescan" + + def __init__(self, path, disk_type, serial_number, block_size): + Disk.__init__(self, path, disk_type, serial_number, block_size) + self.plug_command = VirtioDisk.plug_all_command + self.unplug_command = \ + f"echo 1 > {self.get_unplug_path()}" + + def get_unplug_path(self): + device_id = self.get_device_id() + + ls_command = f"$(find -H /sys/devices/ -name {device_id} -type d)" + output = fs_utils.ls_item(f"{ls_command}") + sysfs_addr = fs_utils.parse_ls_output(output)[0] + if not sysfs_addr: + raise Exception(f"Failed to find sysfs address: ls -l {ls_command}") + + dirs = sysfs_addr.full_path.split("/") + + for i, path_component in enumerate(dirs[::-1]): + # Search for scsi address in sysfs path + matches = re.search( + r"^\d+:\d+:\d+.\d+$", + path_component) + if matches: + break + else: + raise Exception(f"Failed to find controller for {device_id}") + + return "/".join(dirs[:-i]) + "/remove" + From e1401fda3440a3149a34ebf2e6663e7c6d7cefdf Mon Sep 17 00:00:00 2001 From: Jan Musial Date: Mon, 31 Jul 2023 14:07:19 +0200 Subject: [PATCH 03/15] test-framework: Add device type autodetection for TestFramework Signed-off-by: Jan Musial --- storage_devices/disk.py | 60 ++++++++++++++++++++++++++--------------- 1 file changed, 39 insertions(+), 21 deletions(-) diff --git a/storage_devices/disk.py b/storage_devices/disk.py index e4ad6a7..549a25a 100644 --- a/storage_devices/disk.py +++ b/storage_devices/disk.py @@ -173,10 +173,18 @@ class Disk(Device): disk_type: DiskType, serial_number, block_size): - if disk_type is DiskType.nand or disk_type is DiskType.optane: - return NvmeDisk(path, disk_type, serial_number, block_size) - else: - return SataDisk(path, disk_type, serial_number, block_size) + + resolved_disk_type = None + for resolved_disk_type in [NvmeDisk, SataDisk, VirtioDisk]: + try: + resolved_disk_type.get_unplug_path() + except: + continue + + if resolved_disk_type is None: + raise Exception(f"Unrecognized device type for {path}") + + return resolved_disk_type(path, disk_type, serial_number, block_size) class NvmeDisk(Disk): @@ -204,6 +212,20 @@ class NvmeDisk(Disk): def get_lba_format_in_use(self): return nvme_cli.get_lba_format_in_use(self) + @classmethod + def get_unplug_path(cls, device_id): + base = f"/sys/block/{device_id}/device" + for suffix in ["/remove", "/device/remove"]: + try: + output = fs_utils.ls_item(base + suffix) + fs_utils.parse_ls_output(output)[0] + except: + continue + + return base + suffix + + raise Exception(f"Couldn't create unplug path for {device_id}") + class SataDisk(Disk): plug_all_command = "for i in $(find -H /sys/devices/ -path '*/scsi_host/*/scan' -type f); " \ @@ -213,9 +235,10 @@ class SataDisk(Disk): Disk.__init__(self, path, disk_type, serial_number, block_size) self.plug_command = SataDisk.plug_all_command self.unplug_command = \ - f"echo 1 > {self.get_sysfs_properties(self.get_device_id()).full_path}/device/delete" + f"echo 1 > {self.get_unplug_path(self.get_device_id())}" - def get_sysfs_properties(self, device_id): + @classmethod + def get_unplug_path(cls, device_id): ls_command = f"$(find -H /sys/devices/ -name {device_id} -type d)" output = fs_utils.ls_item(f"{ls_command}") sysfs_addr = fs_utils.parse_ls_output(output)[0] @@ -223,18 +246,14 @@ class SataDisk(Disk): raise Exception(f"Failed to find sysfs address: ls -l {ls_command}") dirs = sysfs_addr.full_path.split('/') scsi_address = dirs[-3] - matches = re.search( - r"^(?P\d+)[-:](?P\d+)[-:](?P\d+)[-:](?P\d+)$", - scsi_address) - controller_id = matches["controller"] - port_id = matches["port"] - target_id = matches["target"] - lun = matches["lun"] + try: + re.search( + r"^\d+[-:]\d+[-:]\d+[-:]\d+$", + scsi_address) + except: + raise Exception(f"Failed to find controller for {device_id}") - host_path = "/".join(itertools.takewhile(lambda x: not x.startswith("host"), dirs)) - self.plug_command = f"echo '{port_id} {target_id} {lun}' > " \ - f"{host_path}/host{controller_id}/scsi_host/host{controller_id}/scan" - return sysfs_addr + return sysfs_addr.full_path + "/device/delete" class VirtioDisk(Disk): @@ -244,11 +263,10 @@ class VirtioDisk(Disk): Disk.__init__(self, path, disk_type, serial_number, block_size) self.plug_command = VirtioDisk.plug_all_command self.unplug_command = \ - f"echo 1 > {self.get_unplug_path()}" - - def get_unplug_path(self): - device_id = self.get_device_id() + f"echo 1 > {self.get_unplug_path(self.get_device_id())}" + @classmethod + def get_unplug_path(cls, device_id): ls_command = f"$(find -H /sys/devices/ -name {device_id} -type d)" output = fs_utils.ls_item(f"{ls_command}") sysfs_addr = fs_utils.parse_ls_output(output)[0] From d2835c1059b5d57159cf58119e67915e85c9835c Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Tue, 1 Aug 2023 11:29:48 +0200 Subject: [PATCH 04/15] test-framework: Allow remote execution using ProxyJump Uses configuration from SSH config Signed-off-by: Daniel Madej Signed-off-by: Kamil Gierszewski --- connection/ssh_executor.py | 59 +++++++++++++++++++++++++++++++------- core/test_run_utils.py | 17 +++++------ 2 files changed, 57 insertions(+), 19 deletions(-) diff --git a/connection/ssh_executor.py b/connection/ssh_executor.py index 237420f..9917df7 100644 --- a/connection/ssh_executor.py +++ b/connection/ssh_executor.py @@ -6,19 +6,21 @@ import socket import subprocess import paramiko +import os from datetime import timedelta, datetime from connection.base_executor import BaseExecutor -from core.test_run import TestRun +from core.test_run import TestRun, Blocked from test_utils.output import Output class SshExecutor(BaseExecutor): - def __init__(self, ip, username, port=22): - self.ip = ip + def __init__(self, host, username, port=22): + self.host = host self.user = username self.port = port self.ssh = paramiko.SSHClient() + self.ssh_config = None self._check_config_for_reboot_timeout() def __del__(self): @@ -26,26 +28,61 @@ class SshExecutor(BaseExecutor): def connect(self, user=None, port=None, timeout: timedelta = timedelta(seconds=30)): + hostname = self.host user = user or self.user port = port or self.port self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + config, sock, key_filename = None, None, None + # search for 'host' in SSH config try: - self.ssh.connect(self.ip, username=user, + path = os.path.expanduser('~/.ssh/config') + config = paramiko.SSHConfig.from_path(path) + except FileNotFoundError: + pass + + if config is not None: + target = config.lookup(self.host) + hostname = target['hostname'] + key_filename = target.get('identityfile', None) + user = target.get('user', user) + port = target.get('port', port) + if target.get('proxyjump', None) is not None: + proxy = config.lookup(target['proxyjump']) + jump = paramiko.SSHClient() + jump.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + try: + jump.connect(proxy['hostname'], username=proxy['user'], + port=int(proxy.get('port', 22)), key_filename=proxy.get('identityfile', None)) + transport = jump.get_transport() + local_addr = (proxy['hostname'], int(proxy.get('port', 22))) + dest_addr = (hostname, port) + sock = transport.open_channel("direct-tcpip", dest_addr, local_addr) + except Exception as e: + raise ConnectionError(f"An exception of type '{type(e)}' occurred while trying to " + f"connect to proxy '{proxy['hostname']}'.\n {e}") + + if user is None: + TestRun.block("There is no user given in config.") + + try: + self.ssh.connect(hostname, username=user, port=port, timeout=timeout.total_seconds(), - banner_timeout=timeout.total_seconds()) + banner_timeout=timeout.total_seconds(), + sock=sock, key_filename=key_filename) + self.ssh_config = config except paramiko.AuthenticationException as e: raise paramiko.AuthenticationException( f"Authentication exception occurred while trying to connect to DUT. " f"Please check your SSH key-based authentication.\n{e}") except (paramiko.SSHException, socket.timeout) as e: raise ConnectionError(f"An exception of type '{type(e)}' occurred while trying to " - f"connect to {self.ip}.\n {e}") + f"connect to {hostname}.\n {e}") def disconnect(self): try: self.ssh.close() except Exception: - raise Exception(f"An exception occurred while trying to disconnect from {self.ip}") + raise Exception(f"An exception occurred while trying to disconnect from {self.host}") def _execute(self, command, timeout): try: @@ -53,7 +90,7 @@ class SshExecutor(BaseExecutor): timeout=timeout.total_seconds()) except paramiko.SSHException as e: raise ConnectionError(f"An exception occurred while executing command '{command}' on" - f" {self.ip}\n{e}") + f" {self.host}\n{e}") return Output(stdout.read(), stderr.read(), stdout.channel.recv_exit_status()) @@ -71,8 +108,8 @@ class SshExecutor(BaseExecutor): for exclude in exclude_list: options.append(f"--exclude {exclude}") - src_to_dst = f"{self.user}@{self.ip}:{src} {dst} " if dut_to_controller else\ - f"{src} {self.user}@{self.ip}:{dst} " + src_to_dst = f"{self.user}@{self.host}:{src} {dst} " if dut_to_controller else\ + f"{src} {self.user}@{self.host}:{dst} " try: completed_process = subprocess.run( @@ -124,7 +161,7 @@ class SshExecutor(BaseExecutor): try: self.connect() return - except paramiko.AuthenticationException: + except (paramiko.AuthenticationException, Blocked): raise except Exception: continue diff --git a/core/test_run_utils.py b/core/test_run_utils.py index a27d018..ab0c26f 100644 --- a/core/test_run_utils.py +++ b/core/test_run_utils.py @@ -133,19 +133,20 @@ def __presetup(cls): if cls.config['type'] == 'ssh': try: IP(cls.config['ip']) + cls.config['host'] = cls.config['ip'] except ValueError: TestRun.block("IP address from config is in invalid format.") + except KeyError: + if 'host' not in cls.config: + TestRun.block("No IP address or host defined in config") port = cls.config.get('port', 22) - if 'user' in cls.config: - cls.executor = SshExecutor( - cls.config['ip'], - cls.config['user'], - port - ) - else: - TestRun.block("There is no user given in config.") + cls.executor = SshExecutor( + cls.config['host'], + cls.config.get('user', None), + port + ) elif cls.config['type'] == 'local': cls.executor = LocalExecutor() else: From be9286025d68e89d21e382c92316d7ec82d354aa Mon Sep 17 00:00:00 2001 From: Daniel Madej Date: Tue, 1 Aug 2023 11:46:12 +0200 Subject: [PATCH 05/15] test-framework: Resolve IP for DUT configured in SSH config Signed-off-by: Daniel Madej --- connection/base_executor.py | 3 +++ connection/ssh_executor.py | 48 +++++++++++++++++++++++++++++++++---- core/test_run_utils.py | 1 + test_utils/dut.py | 2 +- 4 files changed, 49 insertions(+), 5 deletions(-) diff --git a/connection/base_executor.py b/connection/base_executor.py index f47867e..40564a3 100644 --- a/connection/base_executor.py +++ b/connection/base_executor.py @@ -35,6 +35,9 @@ class BaseExecutor: def wait_for_connection(self, timeout: timedelta = None): pass + def resolve_ip_address(self): + return "127.0.0.1" + def run(self, command, timeout: timedelta = timedelta(minutes=30)): if TestRun.dut and TestRun.dut.env: command = f"{TestRun.dut.env} && {command}" diff --git a/connection/ssh_executor.py b/connection/ssh_executor.py index 9917df7..a05c34f 100644 --- a/connection/ssh_executor.py +++ b/connection/ssh_executor.py @@ -2,13 +2,14 @@ # Copyright(c) 2019-2021 Intel Corporation # SPDX-License-Identifier: BSD-3-Clause # - +import os +import re import socket import subprocess -import paramiko -import os - from datetime import timedelta, datetime + +import paramiko + from connection.base_executor import BaseExecutor from core.test_run import TestRun, Blocked from test_utils.output import Output @@ -177,3 +178,42 @@ class SshExecutor(BaseExecutor): except Exception: return raise ConnectionError("Timeout occurred before ssh connection loss") + + def resolve_ip_address(self): + user, hostname, port = self.user, self.host, self.port + key_file = None + pattern = br"^Authenticated to.+\[(\d+\.\d+\.\d+\.\d+)].*$" + param, command = " -v", "''" + try: + if self.ssh_config: + host = self.ssh_config.lookup(self.host) + if re.fullmatch(r"^\d+\.\d+\.\d+\.\d+$", host['hostname']): + return host['hostname'] + + if host.get('proxyjump', None) is not None: + proxy = self.ssh_config.lookup(host['proxyjump']) + + user = proxy.get('user', user) + hostname = proxy['hostname'] + port = proxy.get('port', port) + key_file = proxy.get('identityfile', key_file) + command = f"nslookup {host['hostname']}" + pattern = br"^Address:\s+(\d+\.\d+\.\d+\.\d+)\s*$" + param = "" + else: + user = host.get('user', user) + port = host.get('port', port) + key_file = host.get('identityfile', key_file) + user_str = f"{user}@" + identity_str = f" -i {os.path.abspath(key_file[0])}" if key_file else "" + + completed_process = subprocess.run( + f"ssh{identity_str} -p {port}{param} {user_str}{hostname} {command}", + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + timeout=30) + matches = re.findall(pattern, completed_process.stdout + completed_process.stderr, re.MULTILINE) + return matches[-1].decode('utf-8') + except: + return None diff --git a/core/test_run_utils.py b/core/test_run_utils.py index ab0c26f..889c490 100644 --- a/core/test_run_utils.py +++ b/core/test_run_utils.py @@ -171,6 +171,7 @@ def __setup(cls): except Exception as ex: raise Exception(f"Failed to setup DUT instance:\n" f"{str(ex)}\n{traceback.format_exc()}") + cls.dut.ip = cls.dut.ip or cls.executor.resolve_ip_address() cls.__setup_disks() TestRun.LOGGER.info(f"Re-seeding random number generator with seed: {cls.random_seed}") diff --git a/test_utils/dut.py b/test_utils/dut.py index 0780dcb..85cd184 100644 --- a/test_utils/dut.py +++ b/test_utils/dut.py @@ -21,7 +21,7 @@ class Dut: self.spider = dut_info['spider'] if 'spider' in dut_info else None self.wps = dut_info['wps'] if 'wps' in dut_info else None self.env = dut_info['env'] if 'env' in dut_info else None - self.ip = dut_info['ip'] if 'ip' in dut_info else "127.0.0.1" + self.ip = dut_info['ip'] if 'ip' in dut_info else None def __str__(self): dut_str = f'ip: {self.ip}\n' From d62106e850fd91fac24164a21ffba31fe324d77d Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Mon, 2 Oct 2023 12:28:22 +0200 Subject: [PATCH 06/15] test-framework: Rename get_free_memory funtion Signed-off-by: Kamil Gierszewski --- test_utils/os_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test_utils/os_utils.py b/test_utils/os_utils.py index 8d68662..bd040fc 100644 --- a/test_utils/os_utils.py +++ b/test_utils/os_utils.py @@ -199,7 +199,7 @@ def defaultize_memory_affecting_functions(): TestRun.executor.run_expect_success("swapon --all") -def get_free_memory(): +def get_mem_free(): """Returns free amount of memory in bytes""" output = TestRun.executor.run_expect_success("free -b") output = output.stdout.splitlines() From 83e8064bfb087c94516ae894380b9e0676eeb935 Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Tue, 14 Nov 2023 16:23:45 +0100 Subject: [PATCH 07/15] test-framework: Add nullblock to test-framework Signed-off-by: Kamil Gierszewski --- test_tools/disk_utils.py | 9 ++++++++- test_tools/fs_utils.py | 8 ++++++-- test_utils/filesystem/symlink.py | 7 +++++++ 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/test_tools/disk_utils.py b/test_tools/disk_utils.py index 38ac3a7..9092900 100644 --- a/test_tools/disk_utils.py +++ b/test_tools/disk_utils.py @@ -1,5 +1,6 @@ # # Copyright(c) 2019-2022 Intel Corporation +# Copyright(c) 2023 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -375,7 +376,13 @@ def _is_by_id_path(path: str): def _is_dev_path_whitelisted(path: str): """check if given path is whitelisted""" - whitelisted_paths = [r"cas\d+-\d+", r"/dev/dm-\d+"] + whitelisted_paths = [ + r"/dev/ram\d+", + r"/nullb\d+", + r"/dev/drbd\d+", + r"cas\d+-\d+", + r"/dev/dm-\d+" + ] for whitelisted_path in whitelisted_paths: if re.search(whitelisted_path, path) is not None: diff --git a/test_tools/fs_utils.py b/test_tools/fs_utils.py index 512bacc..e620bfd 100644 --- a/test_tools/fs_utils.py +++ b/test_tools/fs_utils.py @@ -1,5 +1,6 @@ # # Copyright(c) 2019-2022 Intel Corporation +# Copyright(c) 2023 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -99,6 +100,10 @@ def check_if_regular_file_exists(path): return TestRun.executor.run(f"test -f \"{path}\"").exit_code == 0 +def check_if_special_block_exist(path): + return TestRun.executor.run(f"test -b \"{path}\"").exit_code == 0 + + def check_if_symlink_exists(path): return TestRun.executor.run(f"test -L \"{path}\"").exit_code == 0 @@ -266,7 +271,7 @@ def uncompress_archive(file, destination=None): def ls(path, options=''): default_options = "-lA --time-style=+'%Y-%m-%d %H:%M:%S'" output = TestRun.executor.run( - f"ls {default_options} {options} \"{path}\"") + f"ls {default_options} {options} {path}") return output.stdout @@ -308,7 +313,6 @@ def parse_ls_output(ls_output, dir_path=''): from test_utils.filesystem.file import File, FsItem from test_utils.filesystem.directory import Directory from test_utils.filesystem.symlink import Symlink - if file_type == '-': fs_item = File(full_path) elif file_type == 'd': diff --git a/test_utils/filesystem/symlink.py b/test_utils/filesystem/symlink.py index e67906c..7765dc8 100644 --- a/test_utils/filesystem/symlink.py +++ b/test_utils/filesystem/symlink.py @@ -1,5 +1,6 @@ # # Copyright(c) 2019-2021 Intel Corporation +# Copyright(c) 2023 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -9,6 +10,7 @@ from test_tools.fs_utils import ( create_directory, check_if_symlink_exists, check_if_directory_exists, + check_if_special_block_exist ) from test_utils.filesystem.file import File @@ -76,6 +78,11 @@ class Symlink(File): elif not create: raise FileNotFoundError("Requested symlink does not exist.") + is_special_block = check_if_special_block_exist(link_path) + if is_special_block: + if not target or readlink(link_path) == readlink(target): + return cls(link_path) + is_dir = check_if_directory_exists(link_path) if is_dir: raise IsADirectoryError( From 5e9bba66c2f7aadb1639ccb9e5bbf2e997ea6d94 Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Thu, 14 Dec 2023 03:37:33 +0100 Subject: [PATCH 08/15] test-framework: Update test-framework to work with pytest >= 7.X and python >= 3.7 Signed-off-by: Kamil Gierszewski --- core/pair_testing.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/core/pair_testing.py b/core/pair_testing.py index b7eea73..fb49128 100644 --- a/core/pair_testing.py +++ b/core/pair_testing.py @@ -1,5 +1,6 @@ # # Copyright(c) 2020-2021 Intel Corporation +# Copyright(c) 2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -31,6 +32,7 @@ import random from core.test_run import TestRun + def testcase_id(param_set): if len(param_set.values) == 1: return param_set.values[0] @@ -77,7 +79,6 @@ def register_testcases(metafunc, argnames, argvals): """ from _pytest.python import CallSpec2, _find_parametrized_scope from _pytest.mark import ParameterSet - from _pytest.fixtures import scope2index parameter_sets = [ParameterSet(values=val, marks=[], id=None) for val in argvals] metafunc._validate_if_using_arg_names(argnames, False) @@ -86,21 +87,20 @@ def register_testcases(metafunc, argnames, argvals): ids = [testcase_id(param_set) for param_set in parameter_sets] - scope = _find_parametrized_scope(argnames, metafunc._arg2fixturedefs, False) - scopenum = scope2index(scope, descr=f"parametrizex() call in {metafunc.function.__name__}") + scope_ = _find_parametrized_scope(argnames=argnames, arg2fixturedefs=metafunc._arg2fixturedefs, + indirect=False) calls = [] - for callspec in metafunc._calls or [CallSpec2(metafunc)]: + for callspec in metafunc._calls or [CallSpec2()]: for param_index, (param_id, param_set) in enumerate(zip(ids, parameter_sets)): - newcallspec = callspec.copy() - newcallspec.setmulti2( - arg_value_types, - argnames, - param_set.values, - param_id, - param_set.marks, - scopenum, - param_index, + newcallspec = callspec.setmulti( + valtypes=arg_value_types, + argnames=argnames, + valset=param_set.values, + id=str(param_id), + marks=param_set.marks, + scope=scope_, + param_index=param_index, ) calls.append(newcallspec) From 5051ef1f1a2f47909cec4334601d4d86870beb69 Mon Sep 17 00:00:00 2001 From: Rafal Stefanowski Date: Thu, 4 Apr 2024 11:58:34 +0200 Subject: [PATCH 09/15] test-framework: Fix Python subprocess shell Signed-off-by: Rafal Stefanowski --- connection/local_executor.py | 3 +++ connection/ssh_executor.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/connection/local_executor.py b/connection/local_executor.py index fae9e28..924ebfb 100644 --- a/connection/local_executor.py +++ b/connection/local_executor.py @@ -1,5 +1,6 @@ # # Copyright(c) 2019-2021 Intel Corporation +# Copyright(c) 2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -15,6 +16,7 @@ class LocalExecutor(BaseExecutor): completed_process = subprocess.run( command, shell=True, + executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout.total_seconds()) @@ -40,6 +42,7 @@ class LocalExecutor(BaseExecutor): completed_process = subprocess.run( f'rsync -r {src} {dst} {" ".join(options)}', shell=True, + executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout.total_seconds()) diff --git a/connection/ssh_executor.py b/connection/ssh_executor.py index a05c34f..efd60a5 100644 --- a/connection/ssh_executor.py +++ b/connection/ssh_executor.py @@ -1,5 +1,6 @@ # # Copyright(c) 2019-2021 Intel Corporation +# Copyright(c) 2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # import os @@ -118,6 +119,7 @@ class SshExecutor(BaseExecutor): f'-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" ' + src_to_dst + f'{" ".join(options)}', shell=True, + executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout.total_seconds()) @@ -210,6 +212,7 @@ class SshExecutor(BaseExecutor): completed_process = subprocess.run( f"ssh{identity_str} -p {port}{param} {user_str}{hostname} {command}", shell=True, + executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=30) From e0b7c56b1671c08d64e2d292636cc5061d994c6f Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Wed, 15 May 2024 23:12:10 +0200 Subject: [PATCH 10/15] test-framework: fix code style Signed-off-by: Kamil Gierszewski --- connection/local_executor.py | 25 +++++-- connection/ssh_executor.py | 139 ++++++++++++++++++++++------------- 2 files changed, 105 insertions(+), 59 deletions(-) diff --git a/connection/local_executor.py b/connection/local_executor.py index 924ebfb..9a64a19 100644 --- a/connection/local_executor.py +++ b/connection/local_executor.py @@ -19,14 +19,24 @@ class LocalExecutor(BaseExecutor): executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.PIPE, - timeout=timeout.total_seconds()) + timeout=timeout.total_seconds(), + ) - return Output(completed_process.stdout, - completed_process.stderr, - completed_process.returncode) + return Output( + completed_process.stdout, completed_process.stderr, completed_process.returncode + ) - def _rsync(self, src, dst, delete=False, symlinks=False, checksum=False, exclude_list=[], - timeout: timedelta = timedelta(seconds=90), dut_to_controller=False): + def _rsync( + self, + src, + dst, + delete=False, + symlinks=False, + checksum=False, + exclude_list=[], + timeout: timedelta = timedelta(seconds=90), + dut_to_controller=False, + ): options = [] if delete: @@ -45,7 +55,8 @@ class LocalExecutor(BaseExecutor): executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.PIPE, - timeout=timeout.total_seconds()) + timeout=timeout.total_seconds(), + ) if completed_process.returncode: raise Exception(f"rsync failed:\n{completed_process}") diff --git a/connection/ssh_executor.py b/connection/ssh_executor.py index efd60a5..c99c67d 100644 --- a/connection/ssh_executor.py +++ b/connection/ssh_executor.py @@ -28,8 +28,7 @@ class SshExecutor(BaseExecutor): def __del__(self): self.ssh.close() - def connect(self, user=None, port=None, - timeout: timedelta = timedelta(seconds=30)): + def connect(self, user=None, port=None, timeout: timedelta = timedelta(seconds=30)): hostname = self.host user = user or self.user port = port or self.port @@ -37,48 +36,62 @@ class SshExecutor(BaseExecutor): config, sock, key_filename = None, None, None # search for 'host' in SSH config try: - path = os.path.expanduser('~/.ssh/config') + path = os.path.expanduser("~/.ssh/config") config = paramiko.SSHConfig.from_path(path) except FileNotFoundError: pass if config is not None: target = config.lookup(self.host) - hostname = target['hostname'] - key_filename = target.get('identityfile', None) - user = target.get('user', user) - port = target.get('port', port) - if target.get('proxyjump', None) is not None: - proxy = config.lookup(target['proxyjump']) + hostname = target["hostname"] + key_filename = target.get("identityfile", None) + user = target.get("user", user) + port = target.get("port", port) + if target.get("proxyjump", None) is not None: + proxy = config.lookup(target["proxyjump"]) jump = paramiko.SSHClient() jump.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: - jump.connect(proxy['hostname'], username=proxy['user'], - port=int(proxy.get('port', 22)), key_filename=proxy.get('identityfile', None)) + jump.connect( + proxy["hostname"], + username=proxy["user"], + port=int(proxy.get("port", 22)), + key_filename=proxy.get("identityfile", None), + ) transport = jump.get_transport() - local_addr = (proxy['hostname'], int(proxy.get('port', 22))) + local_addr = (proxy["hostname"], int(proxy.get("port", 22))) dest_addr = (hostname, port) sock = transport.open_channel("direct-tcpip", dest_addr, local_addr) except Exception as e: - raise ConnectionError(f"An exception of type '{type(e)}' occurred while trying to " - f"connect to proxy '{proxy['hostname']}'.\n {e}") + raise ConnectionError( + f"An exception of type '{type(e)}' occurred while trying to " + f"connect to proxy '{proxy['hostname']}'.\n {e}" + ) if user is None: TestRun.block("There is no user given in config.") try: - self.ssh.connect(hostname, username=user, - port=port, timeout=timeout.total_seconds(), - banner_timeout=timeout.total_seconds(), - sock=sock, key_filename=key_filename) + self.ssh.connect( + hostname, + username=user, + port=port, + timeout=timeout.total_seconds(), + banner_timeout=timeout.total_seconds(), + sock=sock, + key_filename=key_filename, + ) self.ssh_config = config except paramiko.AuthenticationException as e: raise paramiko.AuthenticationException( f"Authentication exception occurred while trying to connect to DUT. " - f"Please check your SSH key-based authentication.\n{e}") + f"Please check your SSH key-based authentication.\n{e}" + ) except (paramiko.SSHException, socket.timeout) as e: - raise ConnectionError(f"An exception of type '{type(e)}' occurred while trying to " - f"connect to {hostname}.\n {e}") + raise ConnectionError( + f"An exception of type '{type(e)}' occurred while trying to " + f"connect to {hostname}.\n {e}" + ) def disconnect(self): try: @@ -88,16 +101,27 @@ class SshExecutor(BaseExecutor): def _execute(self, command, timeout): try: - (stdin, stdout, stderr) = self.ssh.exec_command(command, - timeout=timeout.total_seconds()) + (stdin, stdout, stderr) = self.ssh.exec_command( + command, timeout=timeout.total_seconds() + ) except paramiko.SSHException as e: - raise ConnectionError(f"An exception occurred while executing command '{command}' on" - f" {self.host}\n{e}") + raise ConnectionError( + f"An exception occurred while executing command '{command}' on" f" {self.host}\n{e}" + ) return Output(stdout.read(), stderr.read(), stdout.channel.recv_exit_status()) - def _rsync(self, src, dst, delete=False, symlinks=False, checksum=False, exclude_list=[], - timeout: timedelta = timedelta(seconds=90), dut_to_controller=False): + def _rsync( + self, + src, + dst, + delete=False, + symlinks=False, + checksum=False, + exclude_list=[], + timeout: timedelta = timedelta(seconds=90), + dut_to_controller=False, + ): options = [] if delete: @@ -110,22 +134,29 @@ class SshExecutor(BaseExecutor): for exclude in exclude_list: options.append(f"--exclude {exclude}") - src_to_dst = f"{self.user}@{self.host}:{src} {dst} " if dut_to_controller else\ - f"{src} {self.user}@{self.host}:{dst} " + src_to_dst = ( + f"{self.user}@{self.host}:{src} {dst} " + if dut_to_controller + else f"{src} {self.user}@{self.host}:{dst} " + ) try: completed_process = subprocess.run( f'rsync -r -e "ssh -p {self.port} ' f'-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" ' - + src_to_dst + f'{" ".join(options)}', + + src_to_dst + + f'{" ".join(options)}', shell=True, executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.PIPE, - timeout=timeout.total_seconds()) + timeout=timeout.total_seconds(), + ) except Exception as e: - TestRun.LOGGER.exception(f"Exception occurred during rsync process. " - f"Please check your SSH key-based authentication.\n{e}") + TestRun.LOGGER.exception( + f"Exception occurred during rsync process. " + f"Please check your SSH key-based authentication.\n{e}" + ) if completed_process.returncode: raise Exception(f"rsync failed:\n{completed_process}") @@ -147,12 +178,13 @@ class SshExecutor(BaseExecutor): def reboot(self): self.run("reboot") self.wait_for_connection_loss() - self.wait_for_connection(timedelta(seconds=self.reboot_timeout)) \ - if self.reboot_timeout is not None else self.wait_for_connection() + self.wait_for_connection( + timedelta(seconds=self.reboot_timeout) + ) if self.reboot_timeout is not None else self.wait_for_connection() def is_active(self): try: - self.ssh.exec_command('', timeout=5) + self.ssh.exec_command("", timeout=5) return True except Exception: return False @@ -184,28 +216,28 @@ class SshExecutor(BaseExecutor): def resolve_ip_address(self): user, hostname, port = self.user, self.host, self.port key_file = None - pattern = br"^Authenticated to.+\[(\d+\.\d+\.\d+\.\d+)].*$" + pattern = rb"^Authenticated to.+\[(\d+\.\d+\.\d+\.\d+)].*$" param, command = " -v", "''" try: if self.ssh_config: host = self.ssh_config.lookup(self.host) - if re.fullmatch(r"^\d+\.\d+\.\d+\.\d+$", host['hostname']): - return host['hostname'] + if re.fullmatch(r"^\d+\.\d+\.\d+\.\d+$", host["hostname"]): + return host["hostname"] - if host.get('proxyjump', None) is not None: - proxy = self.ssh_config.lookup(host['proxyjump']) + if host.get("proxyjump", None) is not None: + proxy = self.ssh_config.lookup(host["proxyjump"]) - user = proxy.get('user', user) - hostname = proxy['hostname'] - port = proxy.get('port', port) - key_file = proxy.get('identityfile', key_file) + user = proxy.get("user", user) + hostname = proxy["hostname"] + port = proxy.get("port", port) + key_file = proxy.get("identityfile", key_file) command = f"nslookup {host['hostname']}" - pattern = br"^Address:\s+(\d+\.\d+\.\d+\.\d+)\s*$" + pattern = rb"^Address:\s+(\d+\.\d+\.\d+\.\d+)\s*$" param = "" else: - user = host.get('user', user) - port = host.get('port', port) - key_file = host.get('identityfile', key_file) + user = host.get("user", user) + port = host.get("port", port) + key_file = host.get("identityfile", key_file) user_str = f"{user}@" identity_str = f" -i {os.path.abspath(key_file[0])}" if key_file else "" @@ -215,8 +247,11 @@ class SshExecutor(BaseExecutor): executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.PIPE, - timeout=30) - matches = re.findall(pattern, completed_process.stdout + completed_process.stderr, re.MULTILINE) - return matches[-1].decode('utf-8') + timeout=30, + ) + matches = re.findall( + pattern, completed_process.stdout + completed_process.stderr, re.MULTILINE + ) + return matches[-1].decode("utf-8") except: return None From 0a82b7a3c547fb9f9fc8c9846cce20c17c268714 Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Wed, 15 May 2024 23:42:31 +0200 Subject: [PATCH 11/15] test-framework: add bash_path to fix LocalRun on windows Signed-off-by: Kamil Gierszewski --- connection/local_executor.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/connection/local_executor.py b/connection/local_executor.py index 9a64a19..4bafeb0 100644 --- a/connection/local_executor.py +++ b/connection/local_executor.py @@ -8,15 +8,18 @@ import subprocess from datetime import timedelta from connection.base_executor import BaseExecutor +from core.test_run import TestRun from test_utils.output import Output class LocalExecutor(BaseExecutor): def _execute(self, command, timeout): + bash_path = TestRun.config.get("bash_path", "/bin/bash") + completed_process = subprocess.run( command, shell=True, - executable="/bin/bash", + executable=bash_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout.total_seconds(), @@ -38,6 +41,7 @@ class LocalExecutor(BaseExecutor): dut_to_controller=False, ): options = [] + bash_path = TestRun.config.get("bash_path", "/bin/bash") if delete: options.append("--delete") @@ -52,7 +56,7 @@ class LocalExecutor(BaseExecutor): completed_process = subprocess.run( f'rsync -r {src} {dst} {" ".join(options)}', shell=True, - executable="/bin/bash", + executable=bash_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout.total_seconds(), From db06ac9d3ca5bef8096ac65041bc45f92f2eac72 Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Thu, 11 Apr 2024 03:30:31 +0200 Subject: [PATCH 12/15] test-framework: update plugins to work with TIO project + fix plugins load Signed-off-by: Kamil Gierszewski --- core/test_run_utils.py | 5 +- .../power_control_libvirt/__init__.py | 47 ++++++++++---- internal_plugins/vdbench/__init__.py | 7 +- test_utils/dut.py | 65 ++++++++++++++----- 4 files changed, 88 insertions(+), 36 deletions(-) diff --git a/core/test_run_utils.py b/core/test_run_utils.py index 889c490..3871bab 100644 --- a/core/test_run_utils.py +++ b/core/test_run_utils.py @@ -1,5 +1,6 @@ # # Copyright(c) 2019-2021 Intel Corporation +# Copyright(c) 2023-2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -127,8 +128,6 @@ TestRun.__setup_disks = __setup_disks @classmethod def __presetup(cls): - cls.plugin_manager = PluginManager(cls.item, cls.config) - cls.plugin_manager.hook_pre_setup() if cls.config['type'] == 'ssh': try: @@ -151,6 +150,8 @@ def __presetup(cls): cls.executor = LocalExecutor() else: TestRun.block("Execution type (local/ssh) is missing in DUT config!") + cls.plugin_manager = PluginManager(cls.item, cls.config) + cls.plugin_manager.hook_pre_setup() TestRun.presetup = __presetup diff --git a/internal_plugins/power_control_libvirt/__init__.py b/internal_plugins/power_control_libvirt/__init__.py index ca2993e..2823c7f 100644 --- a/internal_plugins/power_control_libvirt/__init__.py +++ b/internal_plugins/power_control_libvirt/__init__.py @@ -1,31 +1,42 @@ # # Copyright(c) 2020-2021 Intel Corporation +# Copyright(c) 2023-2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # + from datetime import timedelta from connection.local_executor import LocalExecutor from connection.ssh_executor import SshExecutor from core.test_run import TestRun +DEFAULT_REBOOT_TIMEOUT = 60 + class PowerControlPlugin: def __init__(self, params, config): print("Power Control LibVirt Plugin initialization") try: - self.ip = config['ip'] - self.user = config['user'] - except Exception: - raise Exception("Missing fields in config! ('ip' and 'user' required)") + self.host = config["host"] + self.user = config["user"] + self.connection_type = config["connection_type"] + self.port = config.get("port", 22) + + except AttributeError: + raise ( + "Missing fields in config! ('host','user','connection_type','vm_name' " + "are required fields)" + ) def pre_setup(self): print("Power Control LibVirt Plugin pre setup") - if self.config['connection_type'] == 'ssh': + if self.connection_type == "ssh": self.executor = SshExecutor( - self.ip, + self.host, self.user, - self.config.get('port', 22) + self.port, ) + self.executor.connect() else: self.executor = LocalExecutor() @@ -36,13 +47,21 @@ class PowerControlPlugin: pass def power_cycle(self): - self.executor.run(f"virsh reset {self.config['domain']}") - TestRun.executor.wait_for_connection_loss() - timeout = TestRun.config.get('reboot_timeout') - if timeout: - TestRun.executor.wait_for_connection(timedelta(seconds=int(timeout))) - else: - TestRun.executor.wait_for_connection() + self.executor.run_expect_success(f"sudo virsh reset {TestRun.dut.virsh['vm_name']}") + TestRun.executor.disconnect() + TestRun.executor.wait_for_connection(timedelta(seconds=TestRun.dut.virsh["reboot_timeout"])) + + def check_if_vm_exists(self, vm_name) -> bool: + return self.executor.run(f"sudo virsh list|grep -w {vm_name}").exit_code == 0 + + def parse_virsh_config(self, vm_name, reboot_timeout=DEFAULT_REBOOT_TIMEOUT) -> dict | None: + if not self.check_if_vm_exists(vm_name=vm_name): + raise ValueError(f"Virsh power plugin error: couldn't find VM {vm_name} on host " + f"{self.host}") + return { + "vm_name": vm_name, + "reboot_timeout": reboot_timeout, + } plugin_class = PowerControlPlugin diff --git a/internal_plugins/vdbench/__init__.py b/internal_plugins/vdbench/__init__.py index d0f7f71..ecf8d1a 100644 --- a/internal_plugins/vdbench/__init__.py +++ b/internal_plugins/vdbench/__init__.py @@ -1,5 +1,6 @@ # # Copyright(c) 2020-2021 Intel Corporation +# Copyright(c) 2023-2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -39,9 +40,9 @@ class Vdbench: fs_utils.create_directory(self.working_dir) TestRun.LOGGER.info("Copying vdbench to working dir.") - fs_utils.copy(posixpath.join(self.source_dir, "*"), self.working_dir, - True, True) - pass + fs_utils.copy( + source=self.source_dir, destination=self.working_dir, force=True, recursive=True + ) def teardown(self): pass diff --git a/test_utils/dut.py b/test_utils/dut.py index 85cd184..0eb0a5a 100644 --- a/test_utils/dut.py +++ b/test_utils/dut.py @@ -1,5 +1,6 @@ # # Copyright(c) 2019-2021 Intel Corporation +# Copyright(c) 2023-2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -9,27 +10,43 @@ from storage_devices.disk import Disk, DiskType class Dut: def __init__(self, dut_info): self.config = dut_info - self.disks = [] - for disk_info in dut_info.get('disks', []): - self.disks.append(Disk.create_disk(disk_info['path'], - DiskType[disk_info['type']], - disk_info['serial'], - disk_info['blocksize'])) + self.disks = [ + Disk.create_disk( + disk_info["path"], + DiskType[disk_info["type"]], + disk_info["serial"], + disk_info["blocksize"], + ) + for disk_info in dut_info.get("disks", []) + ] + self.disks.sort(key=lambda disk: disk.disk_type, reverse=True) - self.ipmi = dut_info['ipmi'] if 'ipmi' in dut_info else None - self.spider = dut_info['spider'] if 'spider' in dut_info else None - self.wps = dut_info['wps'] if 'wps' in dut_info else None - self.env = dut_info['env'] if 'env' in dut_info else None - self.ip = dut_info['ip'] if 'ip' in dut_info else None + self.ipmi = dut_info.get("ipmi") + self.spider = dut_info.get("spider") + self.wps = dut_info.get("wps") + self.env = dut_info.get("env") + self.ip = dut_info.get("ip") + self.virsh = self.__parse_virsh_config(dut_info) def __str__(self): - dut_str = f'ip: {self.ip}\n' - dut_str += f'ipmi: {self.ipmi["ip"]}\n' if self.ipmi is not None else '' - dut_str += f'spider: {self.spider["ip"]}\n' if self.spider is not None else '' - dut_str += f'wps: {self.wps["ip"]} port: {self.wps["port"]}\n' \ - if self.wps is not None else '' - dut_str += f'disks:\n' + dut_str = f"ip: {self.ip}\n" + dut_str += f'ipmi: {self.ipmi["ip"]}\n' if self.ipmi is not None else "" + dut_str += f'spider: {self.spider["ip"]}\n' if self.spider is not None else "" + dut_str += ( + f'wps: {self.wps["ip"]} port: {self.wps["port"]}\n' if self.wps is not None else "" + ) + dut_str += ( + f'virsh.vm_name: {self.virsh["vm_name"]}\n' + if (self.virsh is not None) + else "" + ) + dut_str += ( + f'virsh.reboot_timeout: {self.virsh["reboot_timeout"]}\n' + if (self.virsh is not None) + else "" + ) + dut_str += f"disks:\n" for disk in self.disks: dut_str += f"\t{disk}" dut_str += "\n" @@ -41,3 +58,17 @@ class Dut: if d.disk_type == disk_type: ret_list.append(d) return ret_list + + @staticmethod + def __parse_virsh_config(dut_info) -> dict | None: + from core.test_run import TestRun + if "power_control" not in TestRun.plugin_manager.req_plugins.keys(): + return None + try: + virsh_controller = TestRun.plugin_manager.get_plugin("power_control") + return virsh_controller.parse_virsh_config( + vm_name=dut_info["vm_name"], reboot_timeout=dut_info.get("reboot_timeout") + ) + except NameError: + return None + From 0cd936ee7292428fd6c3d9cac41ae3b7473a6bc4 Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Thu, 11 Apr 2024 03:38:43 +0200 Subject: [PATCH 13/15] test-framework: fix blktrace,file copy,trim support detection Signed-off-by: Kamil Gierszewski --- storage_devices/device.py | 1 + test_tools/blktrace.py | 7 +++++++ test_tools/disk_utils.py | 10 +++++++--- test_tools/fs_utils.py | 4 ++-- 4 files changed, 17 insertions(+), 5 deletions(-) diff --git a/storage_devices/device.py b/storage_devices/device.py index 566f403..b0009b5 100644 --- a/storage_devices/device.py +++ b/storage_devices/device.py @@ -1,5 +1,6 @@ # # Copyright(c) 2019-2022 Intel Corporation +# Copyright(c) 2023-2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # import posixpath diff --git a/test_tools/blktrace.py b/test_tools/blktrace.py index c9c5653..494cd0b 100644 --- a/test_tools/blktrace.py +++ b/test_tools/blktrace.py @@ -1,8 +1,11 @@ # # Copyright(c) 2019-2022 Intel Corporation +# Copyright(c) 2023-2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # + import math +import time from aenum import IntFlag, Enum @@ -133,6 +136,10 @@ class BlkTrace: drop_caches(DropCachesMode.ALL) TestRun.executor.run_expect_success(f"kill -s SIGINT {self.blktrace_pid}") + + time.sleep(3) + if TestRun.executor.check_if_process_exists(self.blktrace_pid): + TestRun.fail("blktrace monitoring for device is still active") self.blktrace_pid = -1 # dummy command for swallowing output of killed command diff --git a/test_tools/disk_utils.py b/test_tools/disk_utils.py index 9092900..56edd2a 100644 --- a/test_tools/disk_utils.py +++ b/test_tools/disk_utils.py @@ -1,6 +1,6 @@ # # Copyright(c) 2019-2022 Intel Corporation -# Copyright(c) 2023 Huawei Technologies Co., Ltd. +# Copyright(c) 2023-2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -341,9 +341,13 @@ def wipe_filesystem(device, force=True): def check_if_device_supports_trim(device): if device.get_device_id().startswith("nvme"): return True + command_output = TestRun.executor.run(f'hdparm -I {device.path} | grep "TRIM supported"') + if command_output.exit_code == 0: + return True command_output = TestRun.executor.run( - f'hdparm -I {device.path} | grep "TRIM supported"') - return command_output.exit_code == 0 + f"lsblk -dn {device.path} -o DISC-MAX | grep -o \'[0-9]\\+\'" + ) + return int(command_output.stdout) > 0 def get_device_filesystem_type(device_id): diff --git a/test_tools/fs_utils.py b/test_tools/fs_utils.py index e620bfd..8c2a5fb 100644 --- a/test_tools/fs_utils.py +++ b/test_tools/fs_utils.py @@ -1,6 +1,6 @@ # # Copyright(c) 2019-2022 Intel Corporation -# Copyright(c) 2023 Huawei Technologies Co., Ltd. +# Copyright(c) 2023-2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -116,7 +116,7 @@ def copy(source: str, cmd = f"cp{' --force' if force else ''}" \ f"{' --recursive' if recursive else ''}" \ f"{' --dereference' if dereference else ''} " \ - f"\"{source}\" \"{destination}\"" + f"{source} {destination}" return TestRun.executor.run_expect_success(cmd) From 078f0f4f36462b08ed1b70ace09f151e91210490 Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Mon, 10 Jun 2024 10:23:55 +0200 Subject: [PATCH 14/15] test-framework: simplify return Signed-off-by: Kamil Gierszewski --- connection/base_executor.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/connection/base_executor.py b/connection/base_executor.py index 40564a3..3f5f02a 100644 --- a/connection/base_executor.py +++ b/connection/base_executor.py @@ -1,5 +1,6 @@ # # Copyright(c) 2019-2021 Intel Corporation +# Copyright(c) 2023-2024 Huawei Technologies Co., Ltd. # SPDX-License-Identifier: BSD-3-Clause # @@ -63,7 +64,7 @@ class BaseExecutor: def check_if_process_exists(self, pid: int): output = self.run(f"ps aux | awk '{{print $2 }}' | grep ^{pid}$", timedelta(seconds=10)) - return True if output.exit_code == 0 else False + return output.exit_code == 0 def kill_process(self, pid: int): # TERM signal should be used in preference to the KILL signal, since a From 0f36982b4c6975b5734937fafad0d61c9b015c0a Mon Sep 17 00:00:00 2001 From: Kamil Gierszewski Date: Wed, 19 Jun 2024 14:55:39 +0200 Subject: [PATCH 15/15] test-framework: separate dmesg from api Signed-off-by: Kamil Gierszewski --- test_utils/dmesg.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 test_utils/dmesg.py diff --git a/test_utils/dmesg.py b/test_utils/dmesg.py new file mode 100644 index 0000000..f5ce808 --- /dev/null +++ b/test_utils/dmesg.py @@ -0,0 +1,15 @@ +# +# Copyright(c) 2024 Huawei Technologies Co., Ltd. +# SPDX-License-Identifier: BSD-3-Clause +# + +from core.test_run import TestRun +from test_utils.output import Output + + +def get_dmesg() -> str: + return TestRun.executor.run("dmesg").stdout + + +def clear_dmesg() -> Output: + return TestRun.executor.run("dmesg -C")