commit
ed6cf8a76b
@ -3,12 +3,9 @@
|
|||||||
# SPDX-License-Identifier: BSD-3-Clause-Clear
|
# SPDX-License-Identifier: BSD-3-Clause-Clear
|
||||||
#
|
#
|
||||||
|
|
||||||
from api.cas.cache_config import *
|
|
||||||
from api.cas.casadm_params import *
|
|
||||||
from api.cas.casadm_parser import *
|
from api.cas.casadm_parser import *
|
||||||
from api.cas.cli import *
|
from api.cas.cli import *
|
||||||
from api.cas.statistics import CacheStats, IoClassStats
|
from api.cas.statistics import CacheStats, IoClassStats
|
||||||
from core.test_run import TestRun
|
|
||||||
from storage_devices.device import Device
|
from storage_devices.device import Device
|
||||||
from test_utils.os_utils import *
|
from test_utils.os_utils import *
|
||||||
|
|
||||||
@ -108,13 +105,10 @@ class Cache:
|
|||||||
stat_filter, percentage_val)
|
stat_filter, percentage_val)
|
||||||
return CacheStats(stats)
|
return CacheStats(stats)
|
||||||
|
|
||||||
# TODO: Get rid of this method below by tuning 'stats' and 'io_class' tests
|
def get_statistics_flat(self,
|
||||||
# to utilize new statistics API with method above.
|
io_class_id: int = None,
|
||||||
|
stat_filter: List[StatsFilter] = None,
|
||||||
def get_statistics_deprecated(self,
|
percentage_val: bool = False):
|
||||||
io_class_id: int = None,
|
|
||||||
stat_filter: List[StatsFilter] = None,
|
|
||||||
percentage_val: bool = False):
|
|
||||||
return get_statistics(self.cache_id, None, io_class_id,
|
return get_statistics(self.cache_id, None, io_class_id,
|
||||||
stat_filter, percentage_val)
|
stat_filter, percentage_val)
|
||||||
|
|
||||||
|
@ -4,9 +4,6 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
from datetime import timedelta
|
|
||||||
|
|
||||||
from api.cas.cache import Device
|
|
||||||
from api.cas.casadm_parser import *
|
from api.cas.casadm_parser import *
|
||||||
from api.cas.cli import *
|
from api.cas.cli import *
|
||||||
from api.cas.statistics import CoreStats, IoClassStats
|
from api.cas.statistics import CoreStats, IoClassStats
|
||||||
@ -67,13 +64,10 @@ class Core(Device):
|
|||||||
stat_filter, percentage_val)
|
stat_filter, percentage_val)
|
||||||
return CoreStats(stats)
|
return CoreStats(stats)
|
||||||
|
|
||||||
# TODO: Get rid of this method below by tuning 'stats' and 'io_class' tests
|
def get_statistics_flat(self,
|
||||||
# to utilize new statistics API with method above.
|
io_class_id: int = None,
|
||||||
|
stat_filter: List[StatsFilter] = None,
|
||||||
def get_statistics_deprecated(self,
|
percentage_val: bool = False):
|
||||||
io_class_id: int = None,
|
|
||||||
stat_filter: List[StatsFilter] = None,
|
|
||||||
percentage_val: bool = False):
|
|
||||||
return get_statistics(self.cache_id, self.core_id, io_class_id,
|
return get_statistics(self.cache_id, self.core_id, io_class_id,
|
||||||
stat_filter, percentage_val)
|
stat_filter, percentage_val)
|
||||||
|
|
||||||
|
@ -8,13 +8,13 @@ from datetime import datetime
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
||||||
from test_tools import fs_utils
|
from test_tools import fs_utils
|
||||||
from test_tools.dd import Dd
|
from test_tools.dd import Dd
|
||||||
from test_tools.disk_utils import Filesystem
|
from test_tools.disk_utils import Filesystem
|
||||||
from test_utils.filesystem.directory import Directory
|
from test_utils.filesystem.directory import Directory
|
||||||
from test_utils.filesystem.file import File
|
from test_utils.filesystem.file import File
|
||||||
from test_utils.os_utils import drop_caches, DropCachesMode, sync, Udev
|
from test_utils.os_utils import drop_caches, DropCachesMode, sync, Udev
|
||||||
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
|
||||||
from .io_class_common import *
|
from .io_class_common import *
|
||||||
|
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ def test_ioclass_directory_depth(filesystem):
|
|||||||
)
|
)
|
||||||
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
|
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
|
||||||
|
|
||||||
base_occupancy = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
base_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
TestRun.LOGGER.info("Reading the file in the nested directory")
|
TestRun.LOGGER.info("Reading the file in the nested directory")
|
||||||
dd = (
|
dd = (
|
||||||
Dd()
|
Dd()
|
||||||
@ -82,7 +82,7 @@ def test_ioclass_directory_depth(filesystem):
|
|||||||
)
|
)
|
||||||
dd.run()
|
dd.run()
|
||||||
|
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
assert new_occupancy == base_occupancy + test_file_1.size, \
|
assert new_occupancy == base_occupancy + test_file_1.size, \
|
||||||
"Wrong occupancy after reading file!\n" \
|
"Wrong occupancy after reading file!\n" \
|
||||||
f"Expected: {base_occupancy + test_file_1.size}, actual: {new_occupancy}"
|
f"Expected: {base_occupancy + test_file_1.size}, actual: {new_occupancy}"
|
||||||
@ -103,7 +103,7 @@ def test_ioclass_directory_depth(filesystem):
|
|||||||
drop_caches(DropCachesMode.ALL)
|
drop_caches(DropCachesMode.ALL)
|
||||||
test_file_2.refresh_item()
|
test_file_2.refresh_item()
|
||||||
|
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
assert new_occupancy == base_occupancy + test_file_2.size, \
|
assert new_occupancy == base_occupancy + test_file_2.size, \
|
||||||
"Wrong occupancy after creating file!\n" \
|
"Wrong occupancy after creating file!\n" \
|
||||||
f"Expected: {base_occupancy + test_file_2.size}, actual: {new_occupancy}"
|
f"Expected: {base_occupancy + test_file_2.size}, actual: {new_occupancy}"
|
||||||
@ -122,7 +122,8 @@ def test_ioclass_directory_dir_operations(filesystem):
|
|||||||
"""
|
"""
|
||||||
def create_files_with_classification_delay_check(directory: Directory, ioclass_id: int):
|
def create_files_with_classification_delay_check(directory: Directory, ioclass_id: int):
|
||||||
start_time = datetime.now()
|
start_time = datetime.now()
|
||||||
occupancy_after = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
occupancy_after = cache.get_io_class_statistics(
|
||||||
|
io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
dd_blocks = 10
|
dd_blocks = 10
|
||||||
dd_size = Size(dd_blocks, Unit.Blocks4096)
|
dd_size = Size(dd_blocks, Unit.Blocks4096)
|
||||||
file_counter = 0
|
file_counter = 0
|
||||||
@ -135,7 +136,8 @@ def test_ioclass_directory_dir_operations(filesystem):
|
|||||||
time_from_start = datetime.now() - start_time
|
time_from_start = datetime.now() - start_time
|
||||||
(Dd().input("/dev/zero").output(file_path).oflag("sync")
|
(Dd().input("/dev/zero").output(file_path).oflag("sync")
|
||||||
.block_size(Size(1, Unit.Blocks4096)).count(dd_blocks).run())
|
.block_size(Size(1, Unit.Blocks4096)).count(dd_blocks).run())
|
||||||
occupancy_after = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
occupancy_after = cache.get_io_class_statistics(
|
||||||
|
io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
if occupancy_after - occupancy_before < dd_size:
|
if occupancy_after - occupancy_before < dd_size:
|
||||||
unclassified_files.append(file_path)
|
unclassified_files.append(file_path)
|
||||||
|
|
||||||
@ -151,10 +153,10 @@ def test_ioclass_directory_dir_operations(filesystem):
|
|||||||
def read_files_with_reclassification_check(
|
def read_files_with_reclassification_check(
|
||||||
target_ioclass_id: int, source_ioclass_id: int, directory: Directory, with_delay: bool):
|
target_ioclass_id: int, source_ioclass_id: int, directory: Directory, with_delay: bool):
|
||||||
start_time = datetime.now()
|
start_time = datetime.now()
|
||||||
target_occupancy_after = cache.get_statistics_deprecated(
|
target_occupancy_after = cache.get_io_class_statistics(
|
||||||
io_class_id=target_ioclass_id)["occupancy"]
|
io_class_id=target_ioclass_id).usage_stats.occupancy
|
||||||
source_occupancy_after = cache.get_statistics_deprecated(
|
source_occupancy_after = cache.get_io_class_statistics(
|
||||||
io_class_id=source_ioclass_id)["occupancy"]
|
io_class_id=source_ioclass_id).usage_stats.occupancy
|
||||||
unclassified_files = []
|
unclassified_files = []
|
||||||
|
|
||||||
for file in [item for item in directory.ls() if isinstance(item, File)]:
|
for file in [item for item in directory.ls() if isinstance(item, File)]:
|
||||||
@ -163,10 +165,10 @@ def test_ioclass_directory_dir_operations(filesystem):
|
|||||||
time_from_start = datetime.now() - start_time
|
time_from_start = datetime.now() - start_time
|
||||||
(Dd().input(file.full_path).output("/dev/null")
|
(Dd().input(file.full_path).output("/dev/null")
|
||||||
.block_size(Size(1, Unit.Blocks4096)).run())
|
.block_size(Size(1, Unit.Blocks4096)).run())
|
||||||
target_occupancy_after = cache.get_statistics_deprecated(
|
target_occupancy_after = cache.get_io_class_statistics(
|
||||||
io_class_id=target_ioclass_id)["occupancy"]
|
io_class_id=target_ioclass_id).usage_stats.occupancy
|
||||||
source_occupancy_after = cache.get_statistics_deprecated(
|
source_occupancy_after = cache.get_io_class_statistics(
|
||||||
io_class_id=source_ioclass_id)["occupancy"]
|
io_class_id=source_ioclass_id).usage_stats.occupancy
|
||||||
if target_occupancy_after < target_occupancy_before:
|
if target_occupancy_after < target_occupancy_before:
|
||||||
pytest.xfail("Target IO class occupancy lowered!")
|
pytest.xfail("Target IO class occupancy lowered!")
|
||||||
elif target_occupancy_after - target_occupancy_before < file.size:
|
elif target_occupancy_after - target_occupancy_before < file.size:
|
||||||
@ -323,7 +325,8 @@ def test_ioclass_directory_file_operations(filesystem):
|
|||||||
drop_caches(DropCachesMode.ALL)
|
drop_caches(DropCachesMode.ALL)
|
||||||
|
|
||||||
TestRun.LOGGER.info("Creating test file")
|
TestRun.LOGGER.info("Creating test file")
|
||||||
classified_before = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
classified_before = cache.get_io_class_statistics(
|
||||||
|
io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
file_path = f"{test_dir_path}/test_file"
|
file_path = f"{test_dir_path}/test_file"
|
||||||
(Dd().input("/dev/urandom").output(file_path).oflag("sync")
|
(Dd().input("/dev/urandom").output(file_path).oflag("sync")
|
||||||
.block_size(Size(1, Unit.MebiByte)).count(dd_blocks).run())
|
.block_size(Size(1, Unit.MebiByte)).count(dd_blocks).run())
|
||||||
@ -332,21 +335,22 @@ def test_ioclass_directory_file_operations(filesystem):
|
|||||||
test_file = File(file_path).refresh_item()
|
test_file = File(file_path).refresh_item()
|
||||||
|
|
||||||
TestRun.LOGGER.info("Checking classified occupancy")
|
TestRun.LOGGER.info("Checking classified occupancy")
|
||||||
classified_after = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
classified_after = cache.get_io_class_statistics(
|
||||||
|
io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
check_occupancy(classified_before + test_file.size, classified_after)
|
check_occupancy(classified_before + test_file.size, classified_after)
|
||||||
|
|
||||||
TestRun.LOGGER.info("Moving test file out of classified directory")
|
TestRun.LOGGER.info("Moving test file out of classified directory")
|
||||||
classified_before = classified_after
|
classified_before = classified_after
|
||||||
non_classified_before = cache.get_statistics_deprecated(io_class_id=0)["occupancy"]
|
non_classified_before = cache.get_io_class_statistics(io_class_id=0).usage_stats.occupancy
|
||||||
test_file.move(destination=mountpoint)
|
test_file.move(destination=mountpoint)
|
||||||
sync()
|
sync()
|
||||||
drop_caches(DropCachesMode.ALL)
|
drop_caches(DropCachesMode.ALL)
|
||||||
|
|
||||||
TestRun.LOGGER.info("Checking classified occupancy")
|
TestRun.LOGGER.info("Checking classified occupancy")
|
||||||
classified_after = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
classified_after = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
check_occupancy(classified_before, classified_after)
|
check_occupancy(classified_before, classified_after)
|
||||||
TestRun.LOGGER.info("Checking non-classified occupancy")
|
TestRun.LOGGER.info("Checking non-classified occupancy")
|
||||||
non_classified_after = cache.get_statistics_deprecated(io_class_id=0)["occupancy"]
|
non_classified_after = cache.get_io_class_statistics(io_class_id=0).usage_stats.occupancy
|
||||||
check_occupancy(non_classified_before, non_classified_after)
|
check_occupancy(non_classified_before, non_classified_after)
|
||||||
|
|
||||||
TestRun.LOGGER.info("Reading test file")
|
TestRun.LOGGER.info("Reading test file")
|
||||||
@ -356,10 +360,10 @@ def test_ioclass_directory_file_operations(filesystem):
|
|||||||
.block_size(Size(1, Unit.MebiByte)).run())
|
.block_size(Size(1, Unit.MebiByte)).run())
|
||||||
|
|
||||||
TestRun.LOGGER.info("Checking classified occupancy")
|
TestRun.LOGGER.info("Checking classified occupancy")
|
||||||
classified_after = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
classified_after = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
check_occupancy(classified_before - test_file.size, classified_after)
|
check_occupancy(classified_before - test_file.size, classified_after)
|
||||||
TestRun.LOGGER.info("Checking non-classified occupancy")
|
TestRun.LOGGER.info("Checking non-classified occupancy")
|
||||||
non_classified_after = cache.get_statistics_deprecated(io_class_id=0)["occupancy"]
|
non_classified_after = cache.get_io_class_statistics(io_class_id=0).usage_stats.occupancy
|
||||||
check_occupancy(non_classified_before + test_file.size, non_classified_after)
|
check_occupancy(non_classified_before + test_file.size, non_classified_after)
|
||||||
|
|
||||||
TestRun.LOGGER.info(f"Moving test file to {nested_dir_path}")
|
TestRun.LOGGER.info(f"Moving test file to {nested_dir_path}")
|
||||||
@ -370,10 +374,10 @@ def test_ioclass_directory_file_operations(filesystem):
|
|||||||
drop_caches(DropCachesMode.ALL)
|
drop_caches(DropCachesMode.ALL)
|
||||||
|
|
||||||
TestRun.LOGGER.info("Checking classified occupancy")
|
TestRun.LOGGER.info("Checking classified occupancy")
|
||||||
classified_after = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
classified_after = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
check_occupancy(classified_before, classified_after)
|
check_occupancy(classified_before, classified_after)
|
||||||
TestRun.LOGGER.info("Checking non-classified occupancy")
|
TestRun.LOGGER.info("Checking non-classified occupancy")
|
||||||
non_classified_after = cache.get_statistics_deprecated(io_class_id=0)["occupancy"]
|
non_classified_after = cache.get_io_class_statistics(io_class_id=0).usage_stats.occupancy
|
||||||
check_occupancy(non_classified_before, non_classified_after)
|
check_occupancy(non_classified_before, non_classified_after)
|
||||||
|
|
||||||
TestRun.LOGGER.info("Reading test file")
|
TestRun.LOGGER.info("Reading test file")
|
||||||
@ -383,8 +387,8 @@ def test_ioclass_directory_file_operations(filesystem):
|
|||||||
.block_size(Size(1, Unit.MebiByte)).run())
|
.block_size(Size(1, Unit.MebiByte)).run())
|
||||||
|
|
||||||
TestRun.LOGGER.info("Checking classified occupancy")
|
TestRun.LOGGER.info("Checking classified occupancy")
|
||||||
classified_after = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
classified_after = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
check_occupancy(classified_before + test_file.size, classified_after)
|
check_occupancy(classified_before + test_file.size, classified_after)
|
||||||
TestRun.LOGGER.info("Checking non-classified occupancy")
|
TestRun.LOGGER.info("Checking non-classified occupancy")
|
||||||
non_classified_after = cache.get_statistics_deprecated(io_class_id=0)["occupancy"]
|
non_classified_after = cache.get_io_class_statistics(io_class_id=0).usage_stats.occupancy
|
||||||
check_occupancy(non_classified_before - test_file.size, non_classified_after)
|
check_occupancy(non_classified_before - test_file.size, non_classified_after)
|
||||||
|
@ -7,11 +7,11 @@ import random
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
||||||
from test_tools.dd import Dd
|
from test_tools.dd import Dd
|
||||||
from test_tools.disk_utils import Filesystem
|
from test_tools.disk_utils import Filesystem
|
||||||
from test_utils.filesystem.file import File
|
from test_utils.filesystem.file import File
|
||||||
from test_utils.os_utils import sync, Udev, DropCachesMode, drop_caches
|
from test_utils.os_utils import sync, Udev, DropCachesMode, drop_caches
|
||||||
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
|
||||||
from .io_class_common import *
|
from .io_class_common import *
|
||||||
|
|
||||||
|
|
||||||
@ -56,8 +56,9 @@ def test_ioclass_file_extension():
|
|||||||
for i in range(iterations):
|
for i in range(iterations):
|
||||||
dd.run()
|
dd.run()
|
||||||
sync()
|
sync()
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
assert stats["dirty"].get_value(Unit.Blocks4096) == (i + 1) * dd_count
|
if dirty.get_value(Unit.Blocks4096) != (i + 1) * dd_count:
|
||||||
|
TestRun.LOGGER.error(f"Wrong amount of dirty data ({dirty}).")
|
||||||
|
|
||||||
cache.flush_cache()
|
cache.flush_cache()
|
||||||
|
|
||||||
@ -73,8 +74,9 @@ def test_ioclass_file_extension():
|
|||||||
)
|
)
|
||||||
dd.run()
|
dd.run()
|
||||||
sync()
|
sync()
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
assert stats["dirty"].get_value(Unit.Blocks4096) == 0
|
if dirty.get_value(Unit.Blocks4096) != 0:
|
||||||
|
TestRun.LOGGER.error(f"Wrong amount of dirty data ({dirty}).")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -213,11 +215,9 @@ def test_ioclass_file_extension_preexisting_filesystem():
|
|||||||
)
|
)
|
||||||
dd.run()
|
dd.run()
|
||||||
sync()
|
sync()
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
assert (
|
if dirty.get_value(Unit.Blocks4096) != (extensions.index(ext) + 1) * dd_count:
|
||||||
stats["dirty"].get_value(Unit.Blocks4096)
|
TestRun.LOGGER.error(f"Wrong amount of dirty data ({dirty}).")
|
||||||
== (extensions.index(ext) + 1) * dd_count
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -269,10 +269,9 @@ def test_ioclass_file_offset():
|
|||||||
)
|
)
|
||||||
dd.run()
|
dd.run()
|
||||||
sync()
|
sync()
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
assert (
|
if dirty.get_value(Unit.Blocks4096) != 1:
|
||||||
stats["dirty"].get_value(Unit.Blocks4096) == 1
|
TestRun.LOGGER.error(f"Offset not cached: {file_offset}")
|
||||||
), f"Offset not cached: {file_offset}"
|
|
||||||
cache.flush_cache()
|
cache.flush_cache()
|
||||||
|
|
||||||
min_seek = 0
|
min_seek = 0
|
||||||
@ -290,10 +289,9 @@ def test_ioclass_file_offset():
|
|||||||
)
|
)
|
||||||
dd.run()
|
dd.run()
|
||||||
sync()
|
sync()
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
assert (
|
if dirty.get_value(Unit.Blocks4096) != 0:
|
||||||
stats["dirty"].get_value(Unit.Blocks4096) == 0
|
TestRun.LOGGER.error(f"Inappropriately cached offset: {file_offset}")
|
||||||
), f"Inappropriately cached offset: {file_offset}"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -349,10 +347,12 @@ def test_ioclass_file_size(filesystem):
|
|||||||
TestRun.LOGGER.info("Creating files belonging to different IO classes "
|
TestRun.LOGGER.info("Creating files belonging to different IO classes "
|
||||||
"(classification by writes).")
|
"(classification by writes).")
|
||||||
for size, ioclass_id in size_to_class.items():
|
for size, ioclass_id in size_to_class.items():
|
||||||
occupancy_before = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
occupancy_before = cache.get_io_class_statistics(
|
||||||
|
io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
file_path = f"{mountpoint}/test_file_{size.get_value()}"
|
file_path = f"{mountpoint}/test_file_{size.get_value()}"
|
||||||
Dd().input("/dev/zero").output(file_path).oflag("sync").block_size(size).count(1).run()
|
Dd().input("/dev/zero").output(file_path).oflag("sync").block_size(size).count(1).run()
|
||||||
occupancy_after = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
occupancy_after = cache.get_io_class_statistics(
|
||||||
|
io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
if occupancy_after != occupancy_before + size:
|
if occupancy_after != occupancy_before + size:
|
||||||
pytest.xfail("File not cached properly!\n"
|
pytest.xfail("File not cached properly!\n"
|
||||||
f"Expected {occupancy_before + size}\n"
|
f"Expected {occupancy_before + size}\n"
|
||||||
@ -366,9 +366,11 @@ def test_ioclass_file_size(filesystem):
|
|||||||
"(classification by reads).")
|
"(classification by reads).")
|
||||||
for file in test_files:
|
for file in test_files:
|
||||||
ioclass_id = size_to_class[file.size]
|
ioclass_id = size_to_class[file.size]
|
||||||
occupancy_before = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
occupancy_before = cache.get_io_class_statistics(
|
||||||
|
io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
Dd().input(file.full_path).output("/dev/null").block_size(file.size).run()
|
Dd().input(file.full_path).output("/dev/null").block_size(file.size).run()
|
||||||
occupancy_after = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
occupancy_after = cache.get_io_class_statistics(
|
||||||
|
io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
if occupancy_after != occupancy_before + file.size:
|
if occupancy_after != occupancy_before + file.size:
|
||||||
pytest.xfail("File not reclassified properly!\n"
|
pytest.xfail("File not reclassified properly!\n"
|
||||||
f"Expected {occupancy_before + file.size}\n"
|
f"Expected {occupancy_before + file.size}\n"
|
||||||
@ -390,10 +392,10 @@ def test_ioclass_file_size(filesystem):
|
|||||||
ioclass_config_path=ioclass_config_path,
|
ioclass_config_path=ioclass_config_path,
|
||||||
)
|
)
|
||||||
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
|
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
|
||||||
occupancy_before = cache.get_statistics_deprecated(io_class_id=0)["occupancy"]
|
occupancy_before = cache.get_io_class_statistics(io_class_id=0).usage_stats.occupancy
|
||||||
for file in test_files:
|
for file in test_files:
|
||||||
Dd().input(file.full_path).output("/dev/null").block_size(file.size).run()
|
Dd().input(file.full_path).output("/dev/null").block_size(file.size).run()
|
||||||
occupancy_after = cache.get_statistics_deprecated(io_class_id=0)["occupancy"]
|
occupancy_after = cache.get_io_class_statistics(io_class_id=0).usage_stats.occupancy
|
||||||
if occupancy_after != occupancy_before + file.size:
|
if occupancy_after != occupancy_before + file.size:
|
||||||
pytest.xfail("File not reclassified properly!\n"
|
pytest.xfail("File not reclassified properly!\n"
|
||||||
f"Expected {occupancy_before + file.size}\n"
|
f"Expected {occupancy_before + file.size}\n"
|
||||||
|
@ -7,9 +7,9 @@ import time
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
||||||
from test_tools.dd import Dd
|
from test_tools.dd import Dd
|
||||||
from test_utils.os_utils import sync, Udev
|
from test_utils.os_utils import sync, Udev
|
||||||
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
|
||||||
from .io_class_common import *
|
from .io_class_common import *
|
||||||
|
|
||||||
|
|
||||||
@ -50,8 +50,9 @@ def test_ioclass_process_name():
|
|||||||
dd.run()
|
dd.run()
|
||||||
sync()
|
sync()
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
assert stats["dirty"].get_value(Unit.Blocks4096) == (i + 1) * dd_count
|
if dirty.get_value(Unit.Blocks4096) != (i + 1) * dd_count:
|
||||||
|
TestRun.LOGGER.error(f"Wrong amount of dirty data ({dirty}).")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -109,7 +110,7 @@ def test_ioclass_pid():
|
|||||||
f"stdout: {output.stdout} \n stderr :{output.stderr}"
|
f"stdout: {output.stdout} \n stderr :{output.stderr}"
|
||||||
)
|
)
|
||||||
sync()
|
sync()
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
assert stats["dirty"].get_value(Unit.Blocks4096) == dd_count
|
if dirty.get_value(Unit.Blocks4096) != dd_count:
|
||||||
|
TestRun.LOGGER.error(f"Wrong amount of dirty data ({dirty}).")
|
||||||
ioclass_config.remove_ioclass(ioclass_id)
|
ioclass_config.remove_ioclass(ioclass_id)
|
||||||
|
@ -64,9 +64,9 @@ def test_ioclass_lba():
|
|||||||
sync()
|
sync()
|
||||||
dirty_count += 1
|
dirty_count += 1
|
||||||
|
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
if stats["dirty"].get_value(Unit.Blocks4096) != dirty_count:
|
if dirty.get_value(Unit.Blocks4096) != dirty_count:
|
||||||
TestRun.fail(f"LBA {lba} not cached")
|
TestRun.LOGGER.error(f"LBA {lba} not cached")
|
||||||
|
|
||||||
cache.flush_cache()
|
cache.flush_cache()
|
||||||
|
|
||||||
@ -87,9 +87,9 @@ def test_ioclass_lba():
|
|||||||
dd.run()
|
dd.run()
|
||||||
sync()
|
sync()
|
||||||
|
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
if stats["dirty"].get_value(Unit.Blocks4096) != 0:
|
if dirty.get_value(Unit.Blocks4096) != 0:
|
||||||
TestRun.fail(f"Inappropriately cached lba: {rand_lba}")
|
TestRun.LOGGER.error(f"Inappropriately cached lba: {rand_lba}")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -128,8 +128,8 @@ def test_ioclass_request_size():
|
|||||||
.oflag("direct")
|
.oflag("direct")
|
||||||
)
|
)
|
||||||
dd.run()
|
dd.run()
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
if stats["dirty"].get_value(Unit.Blocks4096) != req_size.value / Unit.Blocks4096.value:
|
if dirty.get_value(Unit.Blocks4096) != req_size.value / Unit.Blocks4096.value:
|
||||||
TestRun.fail("Incorrect number of dirty blocks!")
|
TestRun.fail("Incorrect number of dirty blocks!")
|
||||||
|
|
||||||
cache.flush_cache()
|
cache.flush_cache()
|
||||||
@ -154,8 +154,8 @@ def test_ioclass_request_size():
|
|||||||
.oflag("direct")
|
.oflag("direct")
|
||||||
)
|
)
|
||||||
dd.run()
|
dd.run()
|
||||||
stats = cache.get_statistics_deprecated(io_class_id=ioclass_id)
|
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
|
||||||
if stats["dirty"].get_value(Unit.Blocks4096) != 0:
|
if dirty.get_value(Unit.Blocks4096) != 0:
|
||||||
TestRun.fail("Dirty data present!")
|
TestRun.fail("Dirty data present!")
|
||||||
|
|
||||||
|
|
||||||
@ -205,12 +205,12 @@ def test_ioclass_direct(filesystem):
|
|||||||
else:
|
else:
|
||||||
TestRun.LOGGER.info("Testing on raw exported object")
|
TestRun.LOGGER.info("Testing on raw exported object")
|
||||||
|
|
||||||
base_occupancy = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
base_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
|
|
||||||
TestRun.LOGGER.info(f"Buffered writes to {'file' if filesystem else 'device'}")
|
TestRun.LOGGER.info(f"Buffered writes to {'file' if filesystem else 'device'}")
|
||||||
fio.run()
|
fio.run()
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
if new_occupancy != base_occupancy:
|
if new_occupancy != base_occupancy:
|
||||||
TestRun.fail("Buffered writes were cached!\n"
|
TestRun.fail("Buffered writes were cached!\n"
|
||||||
f"Expected: {base_occupancy}, actual: {new_occupancy}")
|
f"Expected: {base_occupancy}, actual: {new_occupancy}")
|
||||||
@ -219,7 +219,7 @@ def test_ioclass_direct(filesystem):
|
|||||||
fio.direct()
|
fio.direct()
|
||||||
fio.run()
|
fio.run()
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
if new_occupancy != base_occupancy + io_size:
|
if new_occupancy != base_occupancy + io_size:
|
||||||
TestRun.fail("Wrong number of direct writes was cached!\n"
|
TestRun.fail("Wrong number of direct writes was cached!\n"
|
||||||
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
|
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
|
||||||
@ -229,7 +229,7 @@ def test_ioclass_direct(filesystem):
|
|||||||
fio.read_write(ReadWrite.read)
|
fio.read_write(ReadWrite.read)
|
||||||
fio.run()
|
fio.run()
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
if new_occupancy != base_occupancy:
|
if new_occupancy != base_occupancy:
|
||||||
TestRun.fail("Buffered reads did not cause reclassification!"
|
TestRun.fail("Buffered reads did not cause reclassification!"
|
||||||
f"Expected occupancy: {base_occupancy}, actual: {new_occupancy}")
|
f"Expected occupancy: {base_occupancy}, actual: {new_occupancy}")
|
||||||
@ -238,7 +238,7 @@ def test_ioclass_direct(filesystem):
|
|||||||
fio.direct()
|
fio.direct()
|
||||||
fio.run()
|
fio.run()
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=ioclass_id)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
|
||||||
if new_occupancy != base_occupancy + io_size:
|
if new_occupancy != base_occupancy + io_size:
|
||||||
TestRun.fail("Wrong number of direct reads was cached!\n"
|
TestRun.fail("Wrong number of direct reads was cached!\n"
|
||||||
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
|
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
|
||||||
@ -273,8 +273,8 @@ def test_ioclass_metadata(filesystem):
|
|||||||
core.mount(mountpoint)
|
core.mount(mountpoint)
|
||||||
sync()
|
sync()
|
||||||
|
|
||||||
requests_to_metadata_before = cache.get_statistics_deprecated(
|
requests_to_metadata_before = cache.get_io_class_statistics(
|
||||||
io_class_id=ioclass_id)["write total"]
|
io_class_id=ioclass_id).request_stats.write
|
||||||
TestRun.LOGGER.info("Creating 20 test files")
|
TestRun.LOGGER.info("Creating 20 test files")
|
||||||
files = []
|
files = []
|
||||||
for i in range(1, 21):
|
for i in range(1, 21):
|
||||||
@ -291,10 +291,10 @@ def test_ioclass_metadata(filesystem):
|
|||||||
files.append(File(file_path))
|
files.append(File(file_path))
|
||||||
|
|
||||||
TestRun.LOGGER.info("Checking requests to metadata")
|
TestRun.LOGGER.info("Checking requests to metadata")
|
||||||
requests_to_metadata_after = cache.get_statistics_deprecated(
|
requests_to_metadata_after = cache.get_io_class_statistics(
|
||||||
io_class_id=ioclass_id)["write total"]
|
io_class_id=ioclass_id).request_stats.write
|
||||||
if requests_to_metadata_after == requests_to_metadata_before:
|
if requests_to_metadata_after == requests_to_metadata_before:
|
||||||
pytest.xfail("No requests to metadata while creating files!")
|
TestRun.fail("No requests to metadata while creating files!")
|
||||||
|
|
||||||
requests_to_metadata_before = requests_to_metadata_after
|
requests_to_metadata_before = requests_to_metadata_after
|
||||||
TestRun.LOGGER.info("Renaming all test files")
|
TestRun.LOGGER.info("Renaming all test files")
|
||||||
@ -303,10 +303,10 @@ def test_ioclass_metadata(filesystem):
|
|||||||
sync()
|
sync()
|
||||||
|
|
||||||
TestRun.LOGGER.info("Checking requests to metadata")
|
TestRun.LOGGER.info("Checking requests to metadata")
|
||||||
requests_to_metadata_after = cache.get_statistics_deprecated(
|
requests_to_metadata_after = cache.get_io_class_statistics(
|
||||||
io_class_id=ioclass_id)["write total"]
|
io_class_id=ioclass_id).request_stats.write
|
||||||
if requests_to_metadata_after == requests_to_metadata_before:
|
if requests_to_metadata_after == requests_to_metadata_before:
|
||||||
pytest.xfail("No requests to metadata while renaming files!")
|
TestRun.fail("No requests to metadata while renaming files!")
|
||||||
|
|
||||||
requests_to_metadata_before = requests_to_metadata_after
|
requests_to_metadata_before = requests_to_metadata_after
|
||||||
test_dir_path = f"{mountpoint}/test_dir"
|
test_dir_path = f"{mountpoint}/test_dir"
|
||||||
@ -319,19 +319,19 @@ def test_ioclass_metadata(filesystem):
|
|||||||
sync()
|
sync()
|
||||||
|
|
||||||
TestRun.LOGGER.info("Checking requests to metadata")
|
TestRun.LOGGER.info("Checking requests to metadata")
|
||||||
requests_to_metadata_after = cache.get_statistics_deprecated(
|
requests_to_metadata_after = cache.get_io_class_statistics(
|
||||||
io_class_id=ioclass_id)["write total"]
|
io_class_id=ioclass_id).request_stats.write
|
||||||
if requests_to_metadata_after == requests_to_metadata_before:
|
if requests_to_metadata_after == requests_to_metadata_before:
|
||||||
pytest.xfail("No requests to metadata while moving files!")
|
TestRun.fail("No requests to metadata while moving files!")
|
||||||
|
|
||||||
TestRun.LOGGER.info(f"Removing {test_dir_path}")
|
TestRun.LOGGER.info(f"Removing {test_dir_path}")
|
||||||
fs_utils.remove(path=test_dir_path, force=True, recursive=True)
|
fs_utils.remove(path=test_dir_path, force=True, recursive=True)
|
||||||
|
|
||||||
TestRun.LOGGER.info("Checking requests to metadata")
|
TestRun.LOGGER.info("Checking requests to metadata")
|
||||||
requests_to_metadata_after = cache.get_statistics_deprecated(
|
requests_to_metadata_after = cache.get_io_class_statistics(
|
||||||
io_class_id=ioclass_id)["write total"]
|
io_class_id=ioclass_id).request_stats.write
|
||||||
if requests_to_metadata_after == requests_to_metadata_before:
|
if requests_to_metadata_after == requests_to_metadata_before:
|
||||||
pytest.xfail("No requests to metadata while deleting directory with files!")
|
TestRun.fail("No requests to metadata while deleting directory with files!")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@ -408,7 +408,7 @@ def test_ioclass_id_as_condition(filesystem):
|
|||||||
|
|
||||||
# IO fulfilling IO class 1 condition (and not IO class 2)
|
# IO fulfilling IO class 1 condition (and not IO class 2)
|
||||||
# Should be classified as IO class 4
|
# Should be classified as IO class 4
|
||||||
base_occupancy = cache.get_statistics_deprecated(io_class_id=4)["occupancy"]
|
base_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
|
||||||
non_ioclass_file_size = Size(random.randrange(1, 25), Unit.MebiByte)
|
non_ioclass_file_size = Size(random.randrange(1, 25), Unit.MebiByte)
|
||||||
(Fio().create_command()
|
(Fio().create_command()
|
||||||
.io_engine(IoEngine.libaio)
|
.io_engine(IoEngine.libaio)
|
||||||
@ -417,7 +417,7 @@ def test_ioclass_id_as_condition(filesystem):
|
|||||||
.target(f"{base_dir_path}/test_file_1")
|
.target(f"{base_dir_path}/test_file_1")
|
||||||
.run())
|
.run())
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=4)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
|
||||||
|
|
||||||
if new_occupancy != base_occupancy + non_ioclass_file_size:
|
if new_occupancy != base_occupancy + non_ioclass_file_size:
|
||||||
TestRun.fail("Writes were not properly cached!\n"
|
TestRun.fail("Writes were not properly cached!\n"
|
||||||
@ -425,7 +425,7 @@ def test_ioclass_id_as_condition(filesystem):
|
|||||||
|
|
||||||
# IO fulfilling IO class 2 condition (and not IO class 1)
|
# IO fulfilling IO class 2 condition (and not IO class 1)
|
||||||
# Should be classified as IO class 5
|
# Should be classified as IO class 5
|
||||||
base_occupancy = cache.get_statistics_deprecated(io_class_id=5)["occupancy"]
|
base_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
|
||||||
(Fio().create_command()
|
(Fio().create_command()
|
||||||
.io_engine(IoEngine.libaio)
|
.io_engine(IoEngine.libaio)
|
||||||
.size(ioclass_file_size)
|
.size(ioclass_file_size)
|
||||||
@ -433,7 +433,7 @@ def test_ioclass_id_as_condition(filesystem):
|
|||||||
.target(f"{mountpoint}/test_file_2")
|
.target(f"{mountpoint}/test_file_2")
|
||||||
.run())
|
.run())
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=5)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
|
||||||
|
|
||||||
if new_occupancy != base_occupancy + ioclass_file_size:
|
if new_occupancy != base_occupancy + ioclass_file_size:
|
||||||
TestRun.fail("Writes were not properly cached!\n"
|
TestRun.fail("Writes were not properly cached!\n"
|
||||||
@ -449,7 +449,7 @@ def test_ioclass_id_as_condition(filesystem):
|
|||||||
.target(f"{base_dir_path}/test_file_3")
|
.target(f"{base_dir_path}/test_file_3")
|
||||||
.run())
|
.run())
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=5)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
|
||||||
|
|
||||||
if new_occupancy != base_occupancy + ioclass_file_size:
|
if new_occupancy != base_occupancy + ioclass_file_size:
|
||||||
TestRun.fail("Writes were not properly cached!\n"
|
TestRun.fail("Writes were not properly cached!\n"
|
||||||
@ -457,7 +457,7 @@ def test_ioclass_id_as_condition(filesystem):
|
|||||||
|
|
||||||
# Same IO but direct
|
# Same IO but direct
|
||||||
# Should be classified as IO class 6
|
# Should be classified as IO class 6
|
||||||
base_occupancy = cache.get_statistics_deprecated(io_class_id=6)["occupancy"]
|
base_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
|
||||||
(Fio().create_command()
|
(Fio().create_command()
|
||||||
.io_engine(IoEngine.libaio)
|
.io_engine(IoEngine.libaio)
|
||||||
.size(ioclass_file_size)
|
.size(ioclass_file_size)
|
||||||
@ -466,7 +466,7 @@ def test_ioclass_id_as_condition(filesystem):
|
|||||||
.direct()
|
.direct()
|
||||||
.run())
|
.run())
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=6)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
|
||||||
|
|
||||||
if new_occupancy != base_occupancy + ioclass_file_size:
|
if new_occupancy != base_occupancy + ioclass_file_size:
|
||||||
TestRun.fail("Writes were not properly cached!\n"
|
TestRun.fail("Writes were not properly cached!\n"
|
||||||
@ -506,7 +506,7 @@ def test_ioclass_conditions_or(filesystem):
|
|||||||
# Perform IO fulfilling each condition and check if occupancy raises
|
# Perform IO fulfilling each condition and check if occupancy raises
|
||||||
for i in range(1, 6):
|
for i in range(1, 6):
|
||||||
file_size = Size(random.randint(25, 50), Unit.MebiByte)
|
file_size = Size(random.randint(25, 50), Unit.MebiByte)
|
||||||
base_occupancy = cache.get_statistics_deprecated(io_class_id=1)["occupancy"]
|
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
|
||||||
(Fio().create_command()
|
(Fio().create_command()
|
||||||
.io_engine(IoEngine.libaio)
|
.io_engine(IoEngine.libaio)
|
||||||
.size(file_size)
|
.size(file_size)
|
||||||
@ -514,7 +514,7 @@ def test_ioclass_conditions_or(filesystem):
|
|||||||
.target(f"{mountpoint}/dir{i}/test_file")
|
.target(f"{mountpoint}/dir{i}/test_file")
|
||||||
.run())
|
.run())
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=1)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
|
||||||
|
|
||||||
if new_occupancy != base_occupancy + file_size:
|
if new_occupancy != base_occupancy + file_size:
|
||||||
TestRun.fail("Occupancy has not increased correctly!\n"
|
TestRun.fail("Occupancy has not increased correctly!\n"
|
||||||
@ -553,7 +553,7 @@ def test_ioclass_conditions_and(filesystem):
|
|||||||
core.mount(mountpoint)
|
core.mount(mountpoint)
|
||||||
sync()
|
sync()
|
||||||
|
|
||||||
base_occupancy = cache.get_statistics_deprecated(io_class_id=1)["occupancy"]
|
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
|
||||||
# Perform IO
|
# Perform IO
|
||||||
for size in [file_size, file_size + Size(1, Unit.MebiByte), file_size - Size(1, Unit.MebiByte)]:
|
for size in [file_size, file_size + Size(1, Unit.MebiByte), file_size - Size(1, Unit.MebiByte)]:
|
||||||
(Fio().create_command()
|
(Fio().create_command()
|
||||||
@ -563,7 +563,7 @@ def test_ioclass_conditions_and(filesystem):
|
|||||||
.target(f"{mountpoint}/test_file")
|
.target(f"{mountpoint}/test_file")
|
||||||
.run())
|
.run())
|
||||||
sync()
|
sync()
|
||||||
new_occupancy = cache.get_statistics_deprecated(io_class_id=1)["occupancy"]
|
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
|
||||||
|
|
||||||
if new_occupancy != base_occupancy:
|
if new_occupancy != base_occupancy:
|
||||||
TestRun.fail("Unexpected occupancy increase!\n"
|
TestRun.fail("Unexpected occupancy increase!\n"
|
||||||
|
@ -5,15 +5,16 @@
|
|||||||
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from api.cas.casadm import StatsFilter
|
|
||||||
from api.cas import casadm
|
from api.cas import casadm
|
||||||
from api.cas import ioclass_config
|
from api.cas import ioclass_config
|
||||||
from test_tools.dd import Dd
|
|
||||||
from api.cas.cache_config import CacheMode, CleaningPolicy
|
from api.cas.cache_config import CacheMode, CleaningPolicy
|
||||||
|
from api.cas.casadm import StatsFilter
|
||||||
from core.test_run import TestRun
|
from core.test_run import TestRun
|
||||||
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
||||||
from test_utils.size import Size, Unit
|
from test_tools.dd import Dd
|
||||||
from test_utils.os_utils import Udev
|
from test_utils.os_utils import Udev
|
||||||
|
from test_utils.size import Size, Unit
|
||||||
|
|
||||||
ioclass_config_path = "/tmp/opencas_ioclass.conf"
|
ioclass_config_path = "/tmp/opencas_ioclass.conf"
|
||||||
mountpoint = "/tmp/cas1-1"
|
mountpoint = "/tmp/cas1-1"
|
||||||
@ -112,8 +113,8 @@ def test_block_stats_write(cache_mode, zero_stats):
|
|||||||
for i in range(iterations):
|
for i in range(iterations):
|
||||||
dd.seek(dd_seek)
|
dd.seek(dd_seek)
|
||||||
dd.run()
|
dd.run()
|
||||||
cache_stats = cache.get_statistics_deprecated(stat_filter=[StatsFilter.blk])
|
cache_stats = cache.get_statistics_flat(stat_filter=[StatsFilter.blk])
|
||||||
core_stats = core.get_statistics_deprecated(stat_filter=[StatsFilter.blk])
|
core_stats = core.get_statistics_flat(stat_filter=[StatsFilter.blk])
|
||||||
|
|
||||||
# Check cache stats
|
# Check cache stats
|
||||||
assumed_value = (dd_size.get_value(Unit.Blocks4096) * dd_count) * (i + 1)
|
assumed_value = (dd_size.get_value(Unit.Blocks4096) * dd_count) * (i + 1)
|
||||||
@ -237,8 +238,8 @@ def test_block_stats_read(cache_mode, zero_stats):
|
|||||||
for i in range(iterations):
|
for i in range(iterations):
|
||||||
dd.skip(dd_skip)
|
dd.skip(dd_skip)
|
||||||
dd.run()
|
dd.run()
|
||||||
cache_stats = cache.get_statistics_deprecated(stat_filter=[StatsFilter.blk])
|
cache_stats = cache.get_statistics_flat(stat_filter=[StatsFilter.blk])
|
||||||
core_stats = core.get_statistics_deprecated(stat_filter=[StatsFilter.blk])
|
core_stats = core.get_statistics_flat(stat_filter=[StatsFilter.blk])
|
||||||
|
|
||||||
# Check cache stats
|
# Check cache stats
|
||||||
assumed_value = (dd_size.get_value(Unit.Blocks4096) * dd_count) * (i + 1)
|
assumed_value = (dd_size.get_value(Unit.Blocks4096) * dd_count) * (i + 1)
|
||||||
@ -283,7 +284,7 @@ def test_block_stats_read(cache_mode, zero_stats):
|
|||||||
def flush(cache):
|
def flush(cache):
|
||||||
cache.flush_cache()
|
cache.flush_cache()
|
||||||
cache.reset_counters()
|
cache.reset_counters()
|
||||||
stats = cache.get_statistics_deprecated(stat_filter=[StatsFilter.blk])
|
stats = cache.get_statistics_flat(stat_filter=[StatsFilter.blk])
|
||||||
for key, value in stats.items():
|
for key, value in stats.items():
|
||||||
assert value.get_value(Unit.Blocks4096) == 0
|
assert value.get_value(Unit.Blocks4096) == 0
|
||||||
|
|
||||||
|
@ -5,17 +5,18 @@
|
|||||||
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from api.cas.casadm import StatsFilter
|
|
||||||
from api.cas import casadm
|
from api.cas import casadm
|
||||||
from api.cas import ioclass_config
|
|
||||||
from api.cas import casadm_parser
|
from api.cas import casadm_parser
|
||||||
|
from api.cas import ioclass_config
|
||||||
from api.cas.cache_config import CleaningPolicy
|
from api.cas.cache_config import CleaningPolicy
|
||||||
|
from api.cas.casadm import StatsFilter
|
||||||
from core.test_run import TestRun
|
from core.test_run import TestRun
|
||||||
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
|
||||||
from test_tools.disk_utils import Filesystem
|
from test_tools.disk_utils import Filesystem
|
||||||
from test_utils.size import Size, Unit
|
|
||||||
from test_utils.os_utils import sync, Udev
|
|
||||||
from test_utils.filesystem.file import File
|
from test_utils.filesystem.file import File
|
||||||
|
from test_utils.os_utils import sync, Udev
|
||||||
|
from test_utils.size import Size, Unit
|
||||||
|
|
||||||
ioclass_config_path = "/tmp/opencas_ioclass.conf"
|
ioclass_config_path = "/tmp/opencas_ioclass.conf"
|
||||||
mountpoint = "/tmp/cas1-1"
|
mountpoint = "/tmp/cas1-1"
|
||||||
@ -105,11 +106,11 @@ def test_ioclass_stats_sum():
|
|||||||
core.unmount()
|
core.unmount()
|
||||||
sync()
|
sync()
|
||||||
|
|
||||||
cache_stats = cache.get_statistics_deprecated(
|
cache_stats = cache.get_statistics_flat(
|
||||||
stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk]
|
stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk]
|
||||||
)
|
)
|
||||||
for ioclass_id in ioclass_id_list:
|
for ioclass_id in ioclass_id_list:
|
||||||
ioclass_stats = cache.get_statistics_deprecated(
|
ioclass_stats = cache.get_statistics_flat(
|
||||||
stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk],
|
stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk],
|
||||||
io_class_id=ioclass_id,
|
io_class_id=ioclass_id,
|
||||||
)
|
)
|
||||||
|
Loading…
Reference in New Issue
Block a user