tests: Add test for classification by file name prefix
Signed-off-by: Mariusz Barczak <mariusz.barczak@intel.com> Signed-off-by: Robert Baldyga <robert.baldyga@intel.com>
This commit is contained in:
parent
7facb1e926
commit
7f86961447
@ -99,8 +99,9 @@ class IoClass:
|
|||||||
return random_list
|
return random_list
|
||||||
|
|
||||||
def set_random_rule(self):
|
def set_random_rule(self):
|
||||||
rules = ["metadata", "direct", "file_size", "directory", "io_class", "extension", "lba",
|
rules = ["metadata", "direct", "file_size", "directory", "io_class",
|
||||||
"pid", "process_name", "file_offset", "request_size"]
|
"extension", "file_name_prefix", "lba", "pid", "process_name",
|
||||||
|
"file_offset", "request_size"]
|
||||||
if os_utils.get_kernel_version() >= version.Version("4.13"):
|
if os_utils.get_kernel_version() >= version.Version("4.13"):
|
||||||
rules.append("wlth")
|
rules.append("wlth")
|
||||||
|
|
||||||
@ -117,7 +118,7 @@ class IoClass:
|
|||||||
rule += f":{Operator(random.randrange(len(Operator))).name}:{random.randrange(1000000)}"
|
rule += f":{Operator(random.randrange(len(Operator))).name}:{random.randrange(1000000)}"
|
||||||
elif rule == "io_class":
|
elif rule == "io_class":
|
||||||
rule += f":{random.randrange(MAX_IO_CLASS_PRIORITY + 1)}"
|
rule += f":{random.randrange(MAX_IO_CLASS_PRIORITY + 1)}"
|
||||||
elif rule in ["extension", "process_name"]:
|
elif rule in ["extension", "process_name", "file_name_prefix"]:
|
||||||
rule += f":{random_string(random.randint(1, 10))}"
|
rule += f":{random_string(random.randint(1, 10))}"
|
||||||
if random.randrange(2):
|
if random.randrange(2):
|
||||||
rule += "&done"
|
rule += "&done"
|
||||||
|
@ -77,6 +77,84 @@ def test_ioclass_file_extension():
|
|||||||
assert stats["dirty"].get_value(Unit.Blocks4096) == 0
|
assert stats["dirty"].get_value(Unit.Blocks4096) == 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
|
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
|
||||||
|
def test_ioclass_file_name_prefix():
|
||||||
|
cache, core = prepare()
|
||||||
|
ioclass_id = 1
|
||||||
|
cached_files = ["test", "test.txt", "test1", "test1.txt"]
|
||||||
|
not_cached_files = ["file1", "file2", "file4", "file5", "tes"]
|
||||||
|
dd_size = Size(4, Unit.KibiByte)
|
||||||
|
dd_count = 10
|
||||||
|
|
||||||
|
ioclass_config.remove_ioclass_config()
|
||||||
|
ioclass_config.create_ioclass_config(False)
|
||||||
|
|
||||||
|
# Avoid caching anything else than files with specified prefix
|
||||||
|
ioclass_config.add_ioclass(
|
||||||
|
ioclass_id=0,
|
||||||
|
eviction_priority=255,
|
||||||
|
allocation=False,
|
||||||
|
rule=f"unclassified",
|
||||||
|
ioclass_config_path=ioclass_config_path,
|
||||||
|
)
|
||||||
|
# Enables file with specified prefix to be cached
|
||||||
|
ioclass_config.add_ioclass(
|
||||||
|
ioclass_id=ioclass_id,
|
||||||
|
eviction_priority=1,
|
||||||
|
allocation=True,
|
||||||
|
rule=f"file_name_prefix:test&done",
|
||||||
|
ioclass_config_path=ioclass_config_path,
|
||||||
|
)
|
||||||
|
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
|
||||||
|
|
||||||
|
TestRun.LOGGER.info(
|
||||||
|
f"Preparing filesystem and mounting {core.system_path} at {mountpoint}"
|
||||||
|
)
|
||||||
|
|
||||||
|
previous_occupancy = cache.get_occupancy()
|
||||||
|
|
||||||
|
core.create_filesystem(Filesystem.ext3)
|
||||||
|
core.mount(mountpoint)
|
||||||
|
|
||||||
|
assert previous_occupancy.get_value() <= cache.get_occupancy().get_value()
|
||||||
|
|
||||||
|
# Filesystem creation caused metadata IO which is not supposed
|
||||||
|
# to be cached
|
||||||
|
|
||||||
|
# Check if files with proper prefix are cached
|
||||||
|
TestRun.LOGGER.info(f"Writing files which are supposed to be cached.")
|
||||||
|
for f in cached_files:
|
||||||
|
dd = (
|
||||||
|
Dd()
|
||||||
|
.input("/dev/zero")
|
||||||
|
.output(f"{mountpoint}/{f}")
|
||||||
|
.count(dd_count)
|
||||||
|
.block_size(dd_size)
|
||||||
|
)
|
||||||
|
dd.run()
|
||||||
|
sync()
|
||||||
|
current_occupancy = cache.get_occupancy()
|
||||||
|
assert current_occupancy == previous_occupancy
|
||||||
|
previous_occupancy = current_occupancy
|
||||||
|
|
||||||
|
cache.flush_cache()
|
||||||
|
|
||||||
|
# Check if file with improper extension is not cached
|
||||||
|
TestRun.LOGGER.info(f"Writing files which are not supposed to be cached.")
|
||||||
|
for f in not_cached_files:
|
||||||
|
dd = (
|
||||||
|
Dd()
|
||||||
|
.input("/dev/zero")
|
||||||
|
.output(f"{mountpoint}/{f}")
|
||||||
|
.count(dd_count)
|
||||||
|
.block_size(dd_size)
|
||||||
|
)
|
||||||
|
dd.run()
|
||||||
|
sync()
|
||||||
|
current_occupancy = cache.get_occupancy()
|
||||||
|
assert current_occupancy != previous_occupancy
|
||||||
|
|
||||||
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
|
||||||
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
|
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
|
||||||
def test_ioclass_file_extension_preexisting_filesystem():
|
def test_ioclass_file_extension_preexisting_filesystem():
|
||||||
|
Loading…
Reference in New Issue
Block a user