Refactor/update existing tests in test_ioclass_stats.py

Signed-off-by: Daniel Madej <daniel.madej@intel.com>
This commit is contained in:
Daniel Madej 2020-01-20 17:36:01 +01:00
parent f31587fc9d
commit 8d0650ecb3

View File

@ -1,5 +1,5 @@
#
# Copyright(c) 2019 Intel Corporation
# Copyright(c) 2019-2020 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause-Clear
#
@ -7,105 +7,145 @@
import pytest
from api.cas import casadm
from api.cas import casadm_parser
from api.cas import ioclass_config
from api.cas.cache_config import CleaningPolicy
from api.cas.casadm import StatsFilter
from api.cas.cli_messages import (
check_stderr_msg,
get_stats_ioclass_id_not_configured,
get_stats_ioclass_id_out_of_range
)
from core.test_run import TestRun
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
from test_tools.disk_utils import Filesystem
from test_utils.filesystem.file import File
from test_utils.os_utils import sync, Udev
from test_utils.output import CmdException
from test_utils.size import Size, Unit
ioclass_config_path = "/tmp/opencas_ioclass.conf"
IoClass = ioclass_config.IoClass
mountpoint = "/tmp/cas1-1"
cache_id = 1
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_stats_set():
"""Try to retrieve stats for all set ioclasses"""
prepare()
min_ioclass_id = 1
max_ioclass_id = 11
def test_ioclass_stats_basic():
"""
title: Basic test for retrieving IO class statistics.
description: |
Check if statistics are retrieved only for configured IO classes.
pass_criteria:
- Statistics are retrieved for configured IO classes.
- Error is displayed when retrieving statistics for non-configured IO class.
- Error is displayed when retrieving statistics for out of range IO class id.
"""
ioclass_config.create_ioclass_config(
add_default_rule=True, ioclass_config_path=ioclass_config_path
)
min_ioclass_id = 11
max_ioclass_id = 21
TestRun.LOGGER.info("Preparing ioclass config file")
for i in range(min_ioclass_id, max_ioclass_id):
ioclass_config.add_ioclass(
ioclass_id=(i + 10),
eviction_priority=22,
allocation=True,
rule=f"file_size:le:{4096*i}&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id, file=ioclass_config_path)
with TestRun.step("Test prepare"):
prepare()
TestRun.LOGGER.info("Preparing ioclass config file")
for i in range(32):
if i != 0 or i not in range(min_ioclass_id, max_ioclass_id):
with pytest.raises(Exception):
assert casadm_parser.get_statistics(
cache_id=cache_id, io_class_id=True, filter=[StatsFilter.conf]
)
with TestRun.step("Prepare IO class config file"):
ioclass_list = []
for class_id in range(min_ioclass_id, max_ioclass_id):
ioclass_list.append(IoClass(
class_id=class_id,
rule=f"file_size:le:{4096 * class_id}&done",
priority=22
))
IoClass.save_list_to_config_file(ioclass_list, True)
with TestRun.step("Load IO class config file"):
casadm.load_io_classes(cache_id, file=ioclass_config.default_config_file_path)
with TestRun.step("Try retrieving IO class stats for all allowed id values "
"and one out of range id"):
for class_id in range(ioclass_config.MAX_IO_CLASS_ID + 2):
out_of_range = " out of range" if class_id > ioclass_config.MAX_IO_CLASS_ID else ""
with TestRun.group(f"Checking{out_of_range} IO class id {class_id}..."):
expected = class_id == 0 or class_id in range(min_ioclass_id, max_ioclass_id)
try:
casadm.print_statistics(
cache_id=cache_id,
io_class_id=class_id,
per_io_class=True)
if not expected:
TestRun.LOGGER.error(
f"Stats retrieved for not configured IO class {class_id}")
except CmdException as e:
if expected:
TestRun.LOGGER.error(f"Stats not retrieved for IO class id: {class_id}")
elif class_id <= ioclass_config.MAX_IO_CLASS_ID:
if not check_stderr_msg(e.output, get_stats_ioclass_id_not_configured):
TestRun.LOGGER.error(
f"Wrong message for unused IO class id: {class_id}")
elif not check_stderr_msg(e.output, get_stats_ioclass_id_out_of_range):
TestRun.LOGGER.error(
f"Wrong message for out of range IO class id: {class_id}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_stats_sum():
"""Check if stats for all set ioclasses sum up to cache stats"""
cache, core = prepare()
"""
title: Test for sum of IO class statistics.
description: |
Check if statistics for configured IO classes sum up to cache/core statistics.
pass_criteria:
- Per class cache IO class statistics sum up to cache statistics.
- Per class core IO class statistics sum up to core statistics.
"""
min_ioclass_id = 1
max_ioclass_id = 11
file_size_base = Unit.KibiByte.value * 4
file_size_base = Unit.Blocks4096.value
TestRun.LOGGER.info("Preparing ioclass config file")
ioclass_config.create_ioclass_config(
add_default_rule=True, ioclass_config_path=ioclass_config_path
)
for i in range(min_ioclass_id, max_ioclass_id):
ioclass_config.add_ioclass(
ioclass_id=i,
eviction_priority=22,
allocation=True,
rule=f"file_size:le:{file_size_base*i}&done",
ioclass_config_path=ioclass_config_path,
)
cache.load_io_class(ioclass_config_path)
with TestRun.step("Test prepare"):
cache, core = prepare()
TestRun.LOGGER.info("Generating files with particular sizes")
files_list = []
for i in range(min_ioclass_id, max_ioclass_id):
path = f"/tmp/test_file_{file_size_base*i}"
File.create_file(path)
f = File(path)
f.padding(Size(file_size_base * i, Unit.Byte))
files_list.append(f)
with TestRun.step("Prepare IO class config file"):
ioclass_list = []
for class_id in range(min_ioclass_id, max_ioclass_id):
ioclass_list.append(IoClass(
class_id=class_id,
rule=f"file_size:le:{file_size_base * class_id}&done",
priority=22
))
IoClass.save_list_to_config_file(ioclass_list, True)
core.create_filesystem(Filesystem.ext4)
with TestRun.step("Load IO class config file"):
cache.load_io_class(ioclass_config.default_config_file_path)
cache.reset_counters()
with TestRun.step("Generate files with particular sizes in temporary folder"):
files_list = []
for class_id in range(min_ioclass_id, max_ioclass_id):
path = f"/tmp/test_file_{file_size_base * class_id}"
File.create_file(path)
f = File(path)
f.padding(Size(file_size_base * class_id, Unit.Byte))
files_list.append(f)
# Name of stats, which should not be compared
not_compare_stats = ["clean", "occupancy"]
ioclass_id_list = list(range(min_ioclass_id, max_ioclass_id))
# Append default ioclass id
ioclass_id_list.append(0)
TestRun.LOGGER.info("Copying files to mounted core and stats check")
for f in files_list:
# To prevent stats pollution by filesystem requests, umount core device
# after file is copied
with TestRun.step("Copy files to mounted core"):
core.mount(mountpoint)
f.copy(mountpoint)
sync()
for f in files_list:
TestRun.LOGGER.info(f"Copying file {f.name} to mounted core")
f.copy(mountpoint)
sync()
# To prevent stats pollution by filesystem requests, umount core device
# after files are copied
core.unmount()
sync()
with TestRun.step("Check if per class cache IO class statistics sum up to cache statistics"):
# Name of stats, which should not be compared
not_compare_stats = ["clean", "occupancy"]
ioclass_id_list = list(range(min_ioclass_id, max_ioclass_id))
# Append default IO class id
ioclass_id_list.append(0)
cache_stats = cache.get_statistics_flat(
stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk]
)
@ -127,24 +167,42 @@ def test_ioclass_stats_sum():
if isinstance(cache_stats[stat_name], Size)
else cache_stats[stat_name]
)
assert stat_val == 0, f"{stat_name} diverged!\n"
if stat_val != 0:
TestRun.LOGGER.error(f"{stat_name} diverged for cache!\n")
# Test cleanup
for f in files_list:
f.remove()
with TestRun.step("Check if per class core IO class statistics sum up to core statistics"):
core_stats = core.get_statistics_flat(
stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk]
)
for ioclass_id in ioclass_id_list:
ioclass_stats = core.get_statistics_flat(
stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk],
io_class_id=ioclass_id,
)
for stat_name in core_stats:
if stat_name in not_compare_stats:
continue
core_stats[stat_name] -= ioclass_stats[stat_name]
for stat_name in core_stats:
if stat_name in not_compare_stats:
continue
stat_val = (
core_stats[stat_name].get_value()
if isinstance(core_stats[stat_name], Size)
else core_stats[stat_name]
)
if stat_val != 0:
TestRun.LOGGER.error(f"{stat_name} diverged for core!\n")
def flush_cache(cache_id):
casadm.flush(cache_id=cache_id)
sync()
casadm.reset_counters(cache_id=cache_id)
stats = casadm_parser.get_statistics(cache_id=cache_id, filter=[StatsFilter.blk])
for key, value in stats.items():
assert value.get_value(Unit.Blocks4096) == 0
with TestRun.step("Test cleanup"):
for f in files_list:
f.remove()
def prepare():
ioclass_config.remove_ioclass_config()
cache_device = TestRun.disks['cache']
core_device = TestRun.disks['core']
@ -152,19 +210,18 @@ def prepare():
core_device.create_partitions([Size(2, Unit.GibiByte)])
cache_device = cache_device.partitions[0]
core_device_1 = core_device.partitions[0]
core_device = core_device.partitions[0]
core_device.create_filesystem(Filesystem.ext4)
Udev.disable()
TestRun.LOGGER.info(f"Staring cache")
TestRun.LOGGER.info(f"Starting cache")
cache = casadm.start_cache(cache_device, force=True)
TestRun.LOGGER.info(f"Setting cleaning policy to NOP")
cache.set_cleaning_policy(CleaningPolicy.nop)
TestRun.LOGGER.info(f"Adding core devices")
core = cache.add_core(core_dev=core_device_1)
TestRun.LOGGER.info(f"Adding core device")
core = cache.add_core(core_dev=core_device)
output = TestRun.executor.run(f"mkdir -p {mountpoint}")
if output.exit_code != 0:
raise Exception(f"Failed to create mountpoint")
TestRun.executor.run_expect_success(f"mkdir -p {mountpoint}")
return cache, core