Adapt all python code to PEP8 style standards

Signed-off-by: Kamil Lepek <kamil.lepek94@gmail.com>
This commit is contained in:
Kamil Lepek 2019-06-10 15:49:15 +02:00
parent 1e2b8f1980
commit e52d34c1c8
15 changed files with 822 additions and 765 deletions

View File

@ -8,5 +8,6 @@ pycodestyle:
max-line-length: 100 max-line-length: 100
ignore: ignore:
- E402 # module level import not at top of file - E402 # module level import not at top of file
- W503 # line break after binary operator
no_blank_comment: True no_blank_comment: True

View File

@ -86,6 +86,7 @@ class CacheMode(IntEnum):
def read_insert(self): def read_insert(self):
return self.value not in [CacheMode.PT, CacheMode.WO] return self.value not in [CacheMode.PT, CacheMode.WO]
class EvictionPolicy(IntEnum): class EvictionPolicy(IntEnum):
LRU = 0 LRU = 0
DEFAULT = LRU DEFAULT = LRU
@ -306,7 +307,7 @@ class Cache:
c.start_cache() c.start_cache()
try: try:
c.load_cache(device) c.load_cache(device)
except: except: # noqa E722
c.stop() c.stop()
raise raise
@ -319,7 +320,7 @@ class Cache:
c.start_cache() c.start_cache()
try: try:
c.attach_device(device, force=True) c.attach_device(device, force=True)
except: except: # noqa E722
c.stop() c.stop()
raise raise
@ -529,13 +530,12 @@ class Cache:
if c.results["error"]: if c.results["error"]:
raise OcfError("Couldn't flush cache", c.results["error"]) raise OcfError("Couldn't flush cache", c.results["error"])
def get_name(self): def get_name(self):
self.read_lock() self.read_lock()
try: try:
return str(self.owner.lib.ocf_cache_get_name(self), encoding="ascii") return str(self.owner.lib.ocf_cache_get_name(self), encoding="ascii")
except: except: # noqa E722
raise OcfError("Couldn't get cache name") raise OcfError("Couldn't get cache name")
finally: finally:
self.read_unlock() self.read_unlock()

View File

@ -56,7 +56,7 @@ class DataOps(Structure):
class Data: class Data:
DATA_POISON=0xA5 DATA_POISON = 0xA5
PAGE_SIZE = 4096 PAGE_SIZE = 4096
_instances_ = {} _instances_ = {}
@ -109,7 +109,7 @@ class Data:
def from_string(cls, source: str, encoding: str = "ascii"): def from_string(cls, source: str, encoding: str = "ascii"):
b = bytes(source, encoding) b = bytes(source, encoding)
# duplicate string to fill space up to sector boundary # duplicate string to fill space up to sector boundary
padding_len = S.from_B(len(b), sector_aligned = True).B - len(b) padding_len = S.from_B(len(b), sector_aligned=True).B - len(b)
padding = b * (padding_len // len(b) + 1) padding = b * (padding_len // len(b) + 1)
padding = padding[:padding_len] padding = padding[:padding_len]
b = b + padding b = b + padding

View File

@ -92,7 +92,7 @@ class Io(Structure):
def end(self, err): def end(self, err):
try: try:
self.callback(err) self.callback(err)
except: except: # noqa E722
pass pass
self.put() self.put()

View File

@ -36,6 +36,7 @@ def io_queue_run(*, queue: Queue, kick: Condition, stop: Event):
if stop.is_set() and not OcfLib.getInstance().ocf_queue_pending_io(queue): if stop.is_set() and not OcfLib.getInstance().ocf_queue_pending_io(queue):
break break
class Queue: class Queue:
_instances_ = {} _instances_ = {}
@ -102,4 +103,3 @@ class Queue:
self.kick_condition.notify_all() self.kick_condition.notify_all()
self.thread.join() self.thread.join()

View File

@ -102,7 +102,7 @@ class SharedOcfObject(Structure):
def get_instance(cls, ref: int): def get_instance(cls, ref: int):
try: try:
return cls._instances_[ref] return cls._instances_[ref]
except: except: # noqa E722
logging.getLogger("pyocf").error( logging.getLogger("pyocf").error(
"OcfSharedObject corruption. wanted: {} instances: {}".format( "OcfSharedObject corruption. wanted: {} instances: {}".format(
ref, cls._instances_ ref, cls._instances_

View File

@ -74,7 +74,7 @@ class VolumeIoPriv(Structure):
class Volume(Structure): class Volume(Structure):
VOLUME_POISON=0x13 VOLUME_POISON = 0x13
_fields_ = [("_storage", c_void_p)] _fields_ = [("_storage", c_void_p)]
_instances_ = {} _instances_ = {}
@ -184,7 +184,7 @@ class Volume(Structure):
uuid = str(uuid_ptr.contents._data, encoding="ascii") uuid = str(uuid_ptr.contents._data, encoding="ascii")
try: try:
volume = Volume.get_by_uuid(uuid) volume = Volume.get_by_uuid(uuid)
except: except: # noqa E722 TODO:Investigate whether this really should be so broad
print("Tried to access unallocated volume {}".format(uuid)) print("Tried to access unallocated volume {}".format(uuid))
print("{}".format(Volume._uuid_)) print("{}".format(Volume._uuid_))
return -1 return -1
@ -255,7 +255,7 @@ class Volume(Structure):
memset(dst, 0, discard.contents._bytes) memset(dst, 0, discard.contents._bytes)
discard.contents._end(discard, 0) discard.contents._end(discard, 0)
except: except: # noqa E722
discard.contents._end(discard, -5) discard.contents._end(discard, -5)
def get_stats(self): def get_stats(self):
@ -269,8 +269,7 @@ class Volume(Structure):
self.stats[IoDir(io.contents._dir)] += 1 self.stats[IoDir(io.contents._dir)] += 1
io_priv = cast( io_priv = cast(
OcfLib.getInstance().ocf_io_get_priv(io), POINTER(VolumeIoPriv) OcfLib.getInstance().ocf_io_get_priv(io), POINTER(VolumeIoPriv))
)
offset = io_priv.contents._offset offset = io_priv.contents._offset
if io.contents._dir == IoDir.WRITE: if io.contents._dir == IoDir.WRITE:
@ -286,7 +285,7 @@ class Volume(Structure):
io_priv.contents._offset += io.contents._bytes io_priv.contents._offset += io.contents._bytes
io.contents._end(io, 0) io.contents._end(io, 0)
except: except: # noqa E722
io.contents._end(io, -5) io.contents._end(io, -5)
def dump(self, offset=0, size=0, ignore=VOLUME_POISON, **kwargs): def dump(self, offset=0, size=0, ignore=VOLUME_POISON, **kwargs):
@ -325,10 +324,11 @@ class ErrorDevice(Volume):
super().reset_stats() super().reset_stats()
self.stats["errors"] = {IoDir.WRITE: 0, IoDir.READ: 0} self.stats["errors"] = {IoDir.WRITE: 0, IoDir.READ: 0}
class TraceDevice(Volume): class TraceDevice(Volume):
def __init__(self, size, trace_fcn=None, uuid=None): def __init__(self, size, trace_fcn=None, uuid=None):
super().__init__(size, uuid) super().__init__(size, uuid)
self.trace_fcn=trace_fcn self.trace_fcn = trace_fcn
def submit_io(self, io): def submit_io(self, io):
submit = True submit = True

View File

@ -6,7 +6,8 @@
from ctypes import string_at from ctypes import string_at
def print_buffer(buf, length, offset=0, width=16, ignore=0, stop_after_count_ignored=0, print_fcn=print): def print_buffer(buf, length, offset=0, width=16, ignore=0,
stop_after_count_ignored=0, print_fcn=print):
end = int(offset) + int(length) end = int(offset) + int(length)
offset = int(offset) offset = int(offset)
ignored_lines = 0 ignored_lines = 0
@ -15,16 +16,13 @@ def print_buffer(buf, length, offset=0, width=16, ignore=0, stop_after_count_ign
stop_after_count_ignored = int(stop_after_count_ignored / width) stop_after_count_ignored = int(stop_after_count_ignored / width)
for addr in range(offset, end, width): for addr in range(offset, end, width):
cur_line = buf[addr : min(end, addr + width)] cur_line = buf[addr: min(end, addr + width)]
byteline = "" byteline = ""
asciiline = "" asciiline = ""
if not any(x != ignore for x in cur_line): if not any(x != ignore for x in cur_line):
if stop_after_count_ignored and ignored_lines > stop_after_count_ignored: if stop_after_count_ignored and ignored_lines > stop_after_count_ignored:
print_fcn( print_fcn("<{} bytes of '0x{:02X}' encountered, stopping>".
"<{} bytes of '0x{:02X}' encountered, stopping>".format( format(stop_after_count_ignored * width, ignore))
stop_after_count_ignored * width, ignore
)
)
return return
ignored_lines += 1 ignored_lines += 1
continue continue
@ -71,23 +69,23 @@ class Size:
return self.bytes return self.bytes
@classmethod @classmethod
def from_B(cls, value, sector_aligned = False): def from_B(cls, value, sector_aligned=False):
return cls(value, sector_aligned) return cls(value, sector_aligned)
@classmethod @classmethod
def from_KiB(cls, value, sector_aligned = False): def from_KiB(cls, value, sector_aligned=False):
return cls(value * cls._KiB, sector_aligned) return cls(value * cls._KiB, sector_aligned)
@classmethod @classmethod
def from_MiB(cls, value, sector_aligned = False): def from_MiB(cls, value, sector_aligned=False):
return cls(value * cls._MiB, sector_aligned) return cls(value * cls._MiB, sector_aligned)
@classmethod @classmethod
def from_GiB(cls, value, sector_aligned = False): def from_GiB(cls, value, sector_aligned=False):
return cls(value * cls._GiB, sector_aligned) return cls(value * cls._GiB, sector_aligned)
@classmethod @classmethod
def from_TiB(cls, value, sector_aligned = False): def from_TiB(cls, value, sector_aligned=False):
return cls(value * cls._TiB, sector_aligned) return cls(value * cls._TiB, sector_aligned)
@classmethod @classmethod

View File

@ -3,7 +3,6 @@
# SPDX-License-Identifier: BSD-3-Clause-Clear # SPDX-License-Identifier: BSD-3-Clause-Clear
# #
import pytest
from ctypes import c_int, memmove, cast, c_void_p from ctypes import c_int, memmove, cast, c_void_p
from enum import IntEnum from enum import IntEnum
from itertools import product from itertools import product
@ -11,11 +10,12 @@ import random
from pyocf.types.cache import Cache, CacheMode from pyocf.types.cache import Cache, CacheMode
from pyocf.types.core import Core from pyocf.types.core import Core
from pyocf.types.volume import Volume, ErrorDevice from pyocf.types.volume import Volume
from pyocf.types.data import Data from pyocf.types.data import Data
from pyocf.types.io import IoDir from pyocf.types.io import IoDir
from pyocf.utils import Size from pyocf.utils import Size
from pyocf.types.shared import OcfError, OcfCompletion from pyocf.types.shared import OcfCompletion
def __io(io, queue, address, size, data, direction): def __io(io, queue, address, size, data, direction):
io.set_data(data, 0) io.set_data(data, 0)
@ -38,13 +38,16 @@ def _io(io, queue, address, size, data, offset, direction):
memmove(cast(data, c_void_p).value + offset, _data.handle, size) memmove(cast(data, c_void_p).value + offset, _data.handle, size)
return ret return ret
def io_to_core(core, address, size, data, offset, direction): def io_to_core(core, address, size, data, offset, direction):
return _io(core.new_core_io(), core.cache.get_default_queue(), address, size, return _io(core.new_core_io(), core.cache.get_default_queue(), address, size,
data, offset, direction) data, offset, direction)
def io_to_exp_obj(core, address, size, data, offset, direction): def io_to_exp_obj(core, address, size, data, offset, direction):
return _io(core.new_io(), core.cache.get_default_queue(), address, size, data, return _io(core.new_io(), core.cache.get_default_queue(), address, size, data,
offset, direction) offset, direction)
def sector_to_region(sector, region_start): def sector_to_region(sector, region_start):
i = 0 i = 0
@ -52,10 +55,12 @@ def sector_to_region(sector, region_start):
i += 1 i += 1
return i return i
class SectorStatus(IntEnum): class SectorStatus(IntEnum):
DIRTY = 0, DIRTY = 0,
CLEAN = 1, CLEAN = 1,
INVALID = 2, INVALID = 2,
I = SectorStatus.INVALID I = SectorStatus.INVALID
D = SectorStatus.DIRTY D = SectorStatus.DIRTY
@ -85,6 +90,8 @@ C = SectorStatus.CLEAN
# - if clean, exported object sector no @n is filled with 100 + @n # - if clean, exported object sector no @n is filled with 100 + @n
# - if dirty, exported object sector no @n is filled with 200 + @n # - if dirty, exported object sector no @n is filled with 200 + @n
# #
def test_wo_read_data_consistency(pyocf_ctx): def test_wo_read_data_consistency(pyocf_ctx):
# start sector for each region # start sector for each region
region_start = [0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17] region_start = [0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17]
@ -114,11 +121,11 @@ def test_wo_read_data_consistency(pyocf_ctx):
data = {} data = {}
# memset n-th sector of core data with n # memset n-th sector of core data with n
data[SectorStatus.INVALID] = bytes([x // SECTOR_SIZE for x in range (WORKSET_SIZE)]) data[SectorStatus.INVALID] = bytes([x // SECTOR_SIZE for x in range(WORKSET_SIZE)])
# memset n-th sector of clean data with n + 100 # memset n-th sector of clean data with n + 100
data[SectorStatus.CLEAN] = bytes([100 + x // SECTOR_SIZE for x in range (WORKSET_SIZE)]) data[SectorStatus.CLEAN] = bytes([100 + x // SECTOR_SIZE for x in range(WORKSET_SIZE)])
# memset n-th sector of dirty data with n + 200 # memset n-th sector of dirty data with n + 200
data[SectorStatus.DIRTY] = bytes([200 + x // SECTOR_SIZE for x in range (WORKSET_SIZE)]) data[SectorStatus.DIRTY] = bytes([200 + x // SECTOR_SIZE for x in range(WORKSET_SIZE)])
result_b = bytes(WORKSET_SIZE) result_b = bytes(WORKSET_SIZE)
@ -137,30 +144,30 @@ def test_wo_read_data_consistency(pyocf_ctx):
combinations.append(S) combinations.append(S)
random.shuffle(combinations) random.shuffle(combinations)
# add fixed test cases at the beginnning # add fixed test cases at the beginning
combinations = fixed_combinations + combinations combinations = fixed_combinations + combinations
for S in combinations[:ITRATION_COUNT]: for S in combinations[:ITRATION_COUNT]:
# write data to core and invalidate all CL # write data to core and invalidate all CL
cache.change_cache_mode(cache_mode = CacheMode.PT) cache.change_cache_mode(cache_mode=CacheMode.PT)
io_to_exp_obj(core, WORKSET_OFFSET, len(data[SectorStatus.INVALID]), \ io_to_exp_obj(core, WORKSET_OFFSET, len(data[SectorStatus.INVALID]),
data[SectorStatus.INVALID], 0, IoDir.WRITE) data[SectorStatus.INVALID], 0, IoDir.WRITE)
# insert clean sectors # insert clean sectors
cache.change_cache_mode(cache_mode = CacheMode.WT) cache.change_cache_mode(cache_mode=CacheMode.WT)
for sec in range(SECTOR_COUNT): for sec in range(SECTOR_COUNT):
region = sector_to_region(sec, region_start) region = sector_to_region(sec, region_start)
if S[region] == SectorStatus.CLEAN: if S[region] == SectorStatus.CLEAN:
io_to_exp_obj(core, WORKSET_OFFSET + SECTOR_SIZE * sec, SECTOR_SIZE, \ io_to_exp_obj(core, WORKSET_OFFSET + SECTOR_SIZE * sec, SECTOR_SIZE,
data[SectorStatus.CLEAN], sec * SECTOR_SIZE, IoDir.WRITE) data[SectorStatus.CLEAN], sec * SECTOR_SIZE, IoDir.WRITE)
# write dirty sectors # write dirty sectors
cache.change_cache_mode(cache_mode = CacheMode.WO) cache.change_cache_mode(cache_mode=CacheMode.WO)
for sec in range(SECTOR_COUNT): for sec in range(SECTOR_COUNT):
region = sector_to_region(sec, region_start) region = sector_to_region(sec, region_start)
if S[region] == SectorStatus.DIRTY: if S[region] == SectorStatus.DIRTY:
io_to_exp_obj(core, WORKSET_OFFSET + SECTOR_SIZE * sec, SECTOR_SIZE, \ io_to_exp_obj(core, WORKSET_OFFSET + SECTOR_SIZE * sec, SECTOR_SIZE,
data[SectorStatus.DIRTY], sec * SECTOR_SIZE, IoDir.WRITE) data[SectorStatus.DIRTY], sec * SECTOR_SIZE, IoDir.WRITE)
for s in start_sec: for s in start_sec:
for e in end_sec: for e in end_sec:
@ -171,10 +178,9 @@ def test_wo_read_data_consistency(pyocf_ctx):
START = s * SECTOR_SIZE START = s * SECTOR_SIZE
END = e * SECTOR_SIZE END = e * SECTOR_SIZE
size = (e - s + 1) * SECTOR_SIZE size = (e - s + 1) * SECTOR_SIZE
assert(0 == io_to_exp_obj(core, WORKSET_OFFSET + START, size, \ assert(0 == io_to_exp_obj(core, WORKSET_OFFSET + START, size,
result_b, START, IoDir.READ)), \ result_b, START, IoDir.READ)),\
"error reading in WO mode: S={}, start={}, end={}".format( \ "error reading in WO mode: S={}, start={}, end={}".format(S, s, e)
S, s, e)
# verify read data # verify read data
for sec in range(s, e + 1): for sec in range(s, e + 1):
@ -182,6 +188,4 @@ def test_wo_read_data_consistency(pyocf_ctx):
region = sector_to_region(sec, region_start) region = sector_to_region(sec, region_start)
check_byte = sec * SECTOR_SIZE check_byte = sec * SECTOR_SIZE
assert(result_b[check_byte] == data[S[region]][check_byte]), \ assert(result_b[check_byte] == data[S[region]][check_byte]), \
"unexpected data in sector {}, S={}, s={}, e={}\n".format( \ "unexpected data in sector {}, S={}, s={}, e={}\n".format(sec, S, s, e)
sec, S, s, e)

View File

@ -111,8 +111,9 @@ def test_start_read_first_and_check_mode(pyocf_ctx, mode: CacheMode, cls: CacheL
core_device.reset_stats() core_device.reset_stats()
test_data = Data.from_string("Changed test data") test_data = Data.from_string("Changed test data")
io_to_core(core_exported, test_data, Size.from_sector(1).B) io_to_core(core_exported, test_data, Size.from_sector(1).B)
check_stats_write_after_read(core_exported, mode, cls, True) check_stats_write_after_read(core_exported, mode, cls, True)
logger.info("[STAGE] Read from exported object after write") logger.info("[STAGE] Read from exported object after write")
@ -159,7 +160,8 @@ def test_start_params(pyocf_ctx, mode: CacheMode, cls: CacheLineSize, layout: Me
assert stats["conf"]["eviction_policy"] == EvictionPolicy.DEFAULT, "Eviction policy" assert stats["conf"]["eviction_policy"] == EvictionPolicy.DEFAULT, "Eviction policy"
assert stats["conf"]["cache_id"] == cache_id, "Cache id" assert stats["conf"]["cache_id"] == cache_id, "Cache id"
assert cache.get_name() == name, "Cache name" assert cache.get_name() == name, "Cache name"
# TODO: metadata_layout, metadata_volatile, max_queue_size, queue_unblock_size, pt_unaligned_io, use_submit_fast # TODO: metadata_layout, metadata_volatile, max_queue_size,
# queue_unblock_size, pt_unaligned_io, use_submit_fast
# TODO: test in functional tests # TODO: test in functional tests
@ -254,8 +256,9 @@ def test_100_start_stop(pyocf_ctx):
def test_start_stop_incrementally(pyocf_ctx): def test_start_stop_incrementally(pyocf_ctx):
"""Starting/stopping multiple caches incrementally. """Starting/stopping multiple caches incrementally.
Check whether OCF behaves correctly when few caches at a time are in turns added and removed (#added > #removed) Check whether OCF behaves correctly when few caches at a time are
until their number reaches limit, and then proportions are reversed and number of caches gradually falls to 0. in turns added and removed (#added > #removed) until their number reaches limit,
and then proportions are reversed and number of caches gradually falls to 0.
""" """
caches = [] caches = []
@ -292,7 +295,8 @@ def test_start_stop_incrementally(pyocf_ctx):
stats = cache.get_stats() stats = cache.get_stats()
cache_id = stats["conf"]["cache_id"] cache_id = stats["conf"]["cache_id"]
cache.stop() cache.stop()
assert get_cache_by_id(pyocf_ctx, cache_id) != 0, "Try getting cache after stopping it" assert get_cache_by_id(pyocf_ctx, cache_id) !=\
0, "Try getting cache after stopping it"
add = not add add = not add
@ -306,11 +310,17 @@ def test_start_cache_same_id(pyocf_ctx, mode, cls):
cache_device1 = Volume(Size.from_MiB(20)) cache_device1 = Volume(Size.from_MiB(20))
cache_device2 = Volume(Size.from_MiB(20)) cache_device2 = Volume(Size.from_MiB(20))
cache_id = randrange(1, 16385) cache_id = randrange(1, 16385)
cache = Cache.start_on_device(cache_device1, cache_mode=mode, cache_line_size=cls, cache_id=cache_id) cache = Cache.start_on_device(cache_device1,
cache_mode=mode,
cache_line_size=cls,
cache_id=cache_id)
cache.get_stats() cache.get_stats()
with pytest.raises(OcfError, match="OCF_ERR_CACHE_EXIST"): with pytest.raises(OcfError, match="OCF_ERR_CACHE_EXIST"):
cache = Cache.start_on_device(cache_device2, cache_mode=mode, cache_line_size=cls, cache_id=cache_id) cache = Cache.start_on_device(cache_device2,
cache_mode=mode,
cache_line_size=cls,
cache_id=cache_id)
cache.get_stats() cache.get_stats()
@ -418,14 +428,20 @@ def check_stats_write_empty(exported_obj: Core, mode: CacheMode, cls: CacheLineS
"Occupancy" "Occupancy"
def check_stats_write_after_read(exported_obj: Core, mode: CacheMode, cls: CacheLineSize, read_from_empty=False): def check_stats_write_after_read(exported_obj: Core,
mode: CacheMode,
cls: CacheLineSize,
read_from_empty=False):
stats = exported_obj.cache.get_stats() stats = exported_obj.cache.get_stats()
assert exported_obj.cache.device.get_stats()[IoDir.WRITE] == \ assert exported_obj.cache.device.get_stats()[IoDir.WRITE] == \
(0 if mode in {CacheMode.WI, CacheMode.PT} else (2 if read_from_empty and mode.lazy_write() else 1)), \ (0 if mode in {CacheMode.WI, CacheMode.PT} else
(2 if read_from_empty and mode.lazy_write() else 1)), \
"Writes to cache device" "Writes to cache device"
assert exported_obj.device.get_stats()[IoDir.WRITE] == (0 if mode.lazy_write() else 1), \ assert exported_obj.device.get_stats()[IoDir.WRITE] == (0 if mode.lazy_write() else 1), \
"Writes to core device" "Writes to core device"
assert stats["req"]["wr_hits"]["value"] == (1 if (mode.read_insert() and mode != CacheMode.WI) or (mode.write_insert() and not read_from_empty) else 0), \ assert stats["req"]["wr_hits"]["value"] == \
(1 if (mode.read_insert() and mode != CacheMode.WI)
or (mode.write_insert() and not read_from_empty) else 0), \
"Write hits" "Write hits"
assert stats["usage"]["occupancy"]["value"] == \ assert stats["usage"]["occupancy"]["value"] == \
(0 if mode in {CacheMode.WI, CacheMode.PT} else (cls / CacheLineSize.LINE_4KiB)), \ (0 if mode in {CacheMode.WI, CacheMode.PT} else (cls / CacheLineSize.LINE_4KiB)), \
@ -438,16 +454,20 @@ def check_stats_read_after_write(exported_obj, mode, cls, write_to_empty=False):
(2 if mode.lazy_write() else (0 if mode == CacheMode.PT else 1)), \ (2 if mode.lazy_write() else (0 if mode == CacheMode.PT else 1)), \
"Writes to cache device" "Writes to cache device"
assert exported_obj.cache.device.get_stats()[IoDir.READ] == \ assert exported_obj.cache.device.get_stats()[IoDir.READ] == \
(1 if mode in {CacheMode.WT, CacheMode.WB, CacheMode.WO} or (mode == CacheMode.WA and not write_to_empty) else 0), \ (1 if mode in {CacheMode.WT, CacheMode.WB, CacheMode.WO}
or (mode == CacheMode.WA and not write_to_empty) else 0), \
"Reads from cache device" "Reads from cache device"
assert exported_obj.device.get_stats()[IoDir.READ] == \ assert exported_obj.device.get_stats()[IoDir.READ] == \
(0 if mode in {CacheMode.WB, CacheMode.WO, CacheMode.WT} or (mode == CacheMode.WA and not write_to_empty) else 1), \ (0 if mode in {CacheMode.WB, CacheMode.WO, CacheMode.WT}
or (mode == CacheMode.WA and not write_to_empty) else 1), \
"Reads from core device" "Reads from core device"
assert stats["req"]["rd_full_misses"]["value"] == (1 if mode in {CacheMode.WA, CacheMode.WI} else 0) \ assert stats["req"]["rd_full_misses"]["value"] == \
(1 if mode in {CacheMode.WA, CacheMode.WI} else 0) \
+ (0 if write_to_empty or mode in {CacheMode.PT, CacheMode.WA} else 1), \ + (0 if write_to_empty or mode in {CacheMode.PT, CacheMode.WA} else 1), \
"Read full misses" "Read full misses"
assert stats["req"]["rd_hits"]["value"] == \ assert stats["req"]["rd_hits"]["value"] == \
(1 if mode in {CacheMode.WT, CacheMode.WB, CacheMode.WO} or (mode == CacheMode.WA and not write_to_empty) else 0), \ (1 if mode in {CacheMode.WT, CacheMode.WB, CacheMode.WO}
or (mode == CacheMode.WA and not write_to_empty) else 0), \
"Read hits" "Read hits"
assert stats["usage"]["occupancy"]["value"] == \ assert stats["usage"]["occupancy"]["value"] == \
(0 if mode == CacheMode.PT else (cls / CacheLineSize.LINE_4KiB)), "Occupancy" (0 if mode == CacheMode.PT else (cls / CacheLineSize.LINE_4KiB)), "Occupancy"
@ -467,4 +487,6 @@ def check_md5_sums(exported_obj: Core, mode: CacheMode):
def get_cache_by_id(ctx, cache_id): def get_cache_by_id(ctx, cache_id):
cache_pointer = c_void_p() cache_pointer = c_void_p()
return OcfLib.getInstance().ocf_mngt_cache_get_by_id(ctx.ctx_handle, cache_id, byref(cache_pointer)) return OcfLib.getInstance().ocf_mngt_cache_get_by_id(ctx.ctx_handle,
cache_id,
byref(cache_pointer))

View File

@ -12,7 +12,6 @@ from pyocf.utils import Size
from pyocf.types.shared import OcfError, CacheLineSize from pyocf.types.shared import OcfError, CacheLineSize
from ctypes import c_uint32 from ctypes import c_uint32
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -51,7 +50,8 @@ def test_fuzzy_start_cache_line_size(pyocf_ctx, c_uint64_randomize, cm):
with pytest.raises(OcfError, match="OCF_ERR_INVALID_CACHE_LINE_SIZE"): with pytest.raises(OcfError, match="OCF_ERR_INVALID_CACHE_LINE_SIZE"):
try_start_cache(cache_mode=cm, cache_line_size=c_uint64_randomize) try_start_cache(cache_mode=cm, cache_line_size=c_uint64_randomize)
else: else:
logger.warning(f"Test skipped for valid cache line size enum value: '{c_uint64_randomize}'. ") logger.warning(
f"Test skipped for valid cache line size enum value: '{c_uint64_randomize}'. ")
@pytest.mark.security @pytest.mark.security
@ -67,8 +67,9 @@ def test_fuzzy_start_name(pyocf_ctx, string_randomize, cm, cls):
""" """
cache_device = Volume(Size.from_MiB(30)) cache_device = Volume(Size.from_MiB(30))
try: try:
cache = Cache.start_on_device(cache_device, name=string_randomize, cache_mode=cm, cache_line_size=cls) cache = Cache.start_on_device(cache_device, name=string_randomize, cache_mode=cm,
except: cache_line_size=cls)
except OcfError:
logger.error(f"Cache did not start properly with correct name value: {string_randomize}") logger.error(f"Cache did not start properly with correct name value: {string_randomize}")
cache.stop() cache.stop()
@ -107,7 +108,8 @@ def test_fuzzy_start_eviction_policy(pyocf_ctx, c_uint32_randomize, cm, cls):
with pytest.raises(OcfError, match="OCF_ERR_INVAL"): with pytest.raises(OcfError, match="OCF_ERR_INVAL"):
try_start_cache(eviction_policy=c_uint32_randomize, cache_mode=cm, cache_line_size=cls) try_start_cache(eviction_policy=c_uint32_randomize, cache_mode=cm, cache_line_size=cls)
else: else:
logger.warning(f"Test skipped for valid eviction policy enum value: '{c_uint32_randomize}'. ") logger.warning(
f"Test skipped for valid eviction policy enum value: '{c_uint32_randomize}'. ")
@pytest.mark.security @pytest.mark.security
@ -125,7 +127,8 @@ def test_fuzzy_start_metadata_layout(pyocf_ctx, c_uint32_randomize, cm, cls):
with pytest.raises(OcfError, match="OCF_ERR_INVAL"): with pytest.raises(OcfError, match="OCF_ERR_INVAL"):
try_start_cache(metadata_layout=c_uint32_randomize, cache_mode=cm, cache_line_size=cls) try_start_cache(metadata_layout=c_uint32_randomize, cache_mode=cm, cache_line_size=cls)
else: else:
logger.warning(f"Test skipped for valid metadata layout enum value: '{c_uint32_randomize}'. ") logger.warning(
f"Test skipped for valid metadata layout enum value: '{c_uint32_randomize}'. ")
@pytest.mark.security @pytest.mark.security
@ -133,7 +136,8 @@ def test_fuzzy_start_metadata_layout(pyocf_ctx, c_uint32_randomize, cm, cls):
@pytest.mark.parametrize('max_wb_queue_size', generate_random_numbers(c_uint32, 10)) @pytest.mark.parametrize('max_wb_queue_size', generate_random_numbers(c_uint32, 10))
def test_fuzzy_start_max_queue_size(pyocf_ctx, max_wb_queue_size, c_uint32_randomize, cls): def test_fuzzy_start_max_queue_size(pyocf_ctx, max_wb_queue_size, c_uint32_randomize, cls):
""" """
Test whether it is impossible to start cache with invalid dependence between max queue size and queue unblock size. Test whether it is impossible to start cache with invalid dependence between max queue size
and queue unblock size.
:param pyocf_ctx: basic pyocf context fixture :param pyocf_ctx: basic pyocf context fixture
:param max_wb_queue_size: max queue size value to start cache with :param max_wb_queue_size: max queue size value to start cache with
:param c_uint32_randomize: queue unblock size value to start cache with :param c_uint32_randomize: queue unblock size value to start cache with
@ -148,4 +152,5 @@ def test_fuzzy_start_max_queue_size(pyocf_ctx, max_wb_queue_size, c_uint32_rando
cache_line_size=cls) cache_line_size=cls)
else: else:
logger.warning(f"Test skipped for valid values: " logger.warning(f"Test skipped for valid values: "
f"'max_queue_size={max_wb_queue_size}, queue_unblock_size={c_uint32_randomize}'.") f"'max_queue_size={max_wb_queue_size}, "
f"queue_unblock_size={c_uint32_randomize}'.")

View File

@ -11,167 +11,169 @@ import os
import sys import sys
import textwrap import textwrap
class TestGenerator(object): class TestGenerator(object):
main_UT_dir = "" main_UT_dir = ""
main_tested_dir = "" main_tested_dir = ""
tested_file_path = "" tested_file_path = ""
tested_function_name = "" tested_function_name = ""
def __init__(self, main_UT_dir, main_tested_dir, file_path, func_name): def __init__(self, main_UT_dir, main_tested_dir, file_path, func_name):
self.set_main_UT_dir(main_UT_dir) self.set_main_UT_dir(main_UT_dir)
self.set_main_tested_dir(main_tested_dir) self.set_main_tested_dir(main_tested_dir)
self.set_tested_file_path(file_path) self.set_tested_file_path(file_path)
self.tested_function_name = func_name self.tested_function_name = func_name
def create_empty_test_file(self): def create_empty_test_file(self):
dst_dir = os.path.dirname(self.get_tested_file_path()[::-1])[::-1] dst_dir = os.path.dirname(self.get_tested_file_path()[::-1])[::-1]
self.create_dir_if_not_exist(self.get_main_UT_dir() + dst_dir) self.create_dir_if_not_exist(self.get_main_UT_dir() + dst_dir)
test_file_name = os.path.basename(self.get_tested_file_path()) test_file_name = os.path.basename(self.get_tested_file_path())
dst_path = self.get_main_UT_dir() + dst_dir + "/" + test_file_name dst_path = self.get_main_UT_dir() + dst_dir + "/" + test_file_name
no_str = "" no_str = ""
no = 0 no = 0
while True: while True:
if not os.path.isfile(dst_path.rsplit(".", 1)[0] + no_str + "." + dst_path.rsplit(".", 1)[1]): if not os.path.isfile("{0}{1}.{2}".format(dst_path.rsplit(".", 1)[0], no_str,
break dst_path.rsplit(".", 1)[1])):
no += 1 break
no_str = str(no) no += 1
no_str = str(no)
dst_path = dst_path.rsplit(".", 1)[0] + no_str + "." + dst_path.rsplit(".", 1)[1] dst_path = dst_path.rsplit(".", 1)[0] + no_str + "." + dst_path.rsplit(".", 1)[1]
buf = self.get_markups() buf = self.get_markups()
buf += "#undef static\n\n" buf += "#undef static\n\n"
buf += "#undef inline\n\n" buf += "#undef inline\n\n"
buf += self.get_UT_includes() buf += self.get_UT_includes()
buf += self.get_includes(self.get_main_tested_dir() + self.get_tested_file_path()) buf += self.get_includes(self.get_main_tested_dir() + self.get_tested_file_path())
buf += self.get_autowrap_file_include(dst_path) buf += self.get_autowrap_file_include(dst_path)
buf += self.get_empty_test_function() buf += self.get_empty_test_function()
buf += self.get_test_main() buf += self.get_test_main()
with open(dst_path, "w") as f: with open(dst_path, "w") as f:
f.writelines(buf) f.writelines(buf)
print(f"{dst_path} generated successfully!") print(f"{dst_path} generated successfully!")
def get_markups(self): def get_markups(self):
ret = "/*\n" ret = "/*\n"
ret += " * <tested_file_path>" + self.get_tested_file_path() + "</tested_file_path>\n" ret += " * <tested_file_path>" + self.get_tested_file_path() + "</tested_file_path>\n"
ret += " * <tested_function>" + self.get_tested_function_name() + "</tested_function>\n" ret += " * <tested_function>" + self.get_tested_function_name() + "</tested_function>\n"
ret += " * <functions_to_leave>\n" ret += " * <functions_to_leave>\n"
ret += " *\tINSERT HERE LIST OF FUNCTIONS YOU WANT TO LEAVE\n" ret += " *\tINSERT HERE LIST OF FUNCTIONS YOU WANT TO LEAVE\n"
ret += " *\tONE FUNCTION PER LINE\n" ret += " *\tONE FUNCTION PER LINE\n"
ret += " * </functions_to_leave>\n" ret += " * </functions_to_leave>\n"
ret += " */\n\n" ret += " */\n\n"
return ret return ret
def create_dir_if_not_exist(self, path): def create_dir_if_not_exist(self, path):
if not os.path.isdir(path): if not os.path.isdir(path):
try: try:
os.makedirs(path) os.makedirs(path)
except Exception: except Exception:
pass pass
return True return True
return None return None
def get_UT_includes(self):
ret = '''
#include <stdarg.h>
#include <stddef.h>
#include <setjmp.h>
#include <cmocka.h>
#include "print_desc.h"\n\n'''
def get_UT_includes(self): return textwrap.dedent(ret)
ret = '''
#include <stdarg.h>
#include <stddef.h>
#include <setjmp.h>
#include <cmocka.h>
#include "print_desc.h"\n\n'''
return textwrap.dedent(ret) def get_autowrap_file_include(self, test_file_path):
autowrap_file = test_file_path.rsplit(".", 1)[0]
autowrap_file = autowrap_file.replace(self.main_UT_dir, "")
autowrap_file += "_generated_warps.c"
return "#include \"" + autowrap_file + "\"\n\n"
def get_autowrap_file_include(self, test_file_path): def get_includes(self, abs_path_to_tested_file):
autowrap_file = test_file_path.rsplit(".", 1)[0] with open(abs_path_to_tested_file, "r") as f:
autowrap_file = autowrap_file.replace(self.main_UT_dir, "") code = f.readlines()
autowrap_file += "_generated_warps.c"
return "#include \"" + autowrap_file + "\"\n\n"
def get_includes(self, abs_path_to_tested_file): ret = [line for line in code if re.search(r'#include', line)]
with open(abs_path_to_tested_file, "r") as f:
code = f.readlines()
ret = [line for line in code if re.search(r'#include', line)] return "".join(ret) + "\n"
return "".join(ret) + "\n" def get_empty_test_function(self):
ret = "static void " + self.get_tested_function_name() + "_test01(void **state)\n"
ret += "{\n"
ret += "\tprint_test_description(\"Put test description here\");\n"
ret += "\tassert_int_equal(1,1);\n"
ret += "}\n\n"
def get_empty_test_function(self): return ret
ret = "static void " + self.get_tested_function_name() + "_test01(void **state)\n"
ret += "{\n"
ret += "\tprint_test_description(\"Put test description here\");\n"
ret += "\tassert_int_equal(1,1);\n"
ret += "}\n\n"
return ret def get_test_main(self):
ret = "int main(void)\n"
ret += "{\n"
ret += "\tconst struct CMUnitTest tests[] = {\n"
ret += "\t\tcmocka_unit_test(" + self.get_tested_function_name() + "_test01)\n"
ret += "\t};\n\n"
ret += "\tprint_message(\"Unit test of " + self.get_tested_file_path() + "\");\n\n"
ret += "\treturn cmocka_run_group_tests(tests, NULL, NULL);\n"
ret += "}"
def get_test_main(self): return ret
ret = "int main(void)\n"
ret += "{\n"
ret += "\tconst struct CMUnitTest tests[] = {\n"
ret += "\t\tcmocka_unit_test(" + self.get_tested_function_name() + "_test01)\n"
ret += "\t};\n\n"
ret += "\tprint_message(\"Unit test of " + self.get_tested_file_path() + "\");\n\n"
ret += "\treturn cmocka_run_group_tests(tests, NULL, NULL);\n"
ret += "}"
return ret def set_tested_file_path(self, path):
call_dir = os.getcwd() + os.sep
p = os.path.normpath(call_dir + path)
def set_tested_file_path(self, path): if os.path.isfile(p):
call_dir = os.getcwd() + os.sep self.tested_file_path = p.split(self.get_main_tested_dir(), 1)[1]
p = os.path.normpath(call_dir + path) return
elif os.path.isfile(self.get_main_tested_dir() + path):
self.tested_file_path = path
return
if os.path.isfile(p): print(f"{os.path.join(self.get_main_tested_dir(), path)}")
self.tested_file_path = p.split(self.get_main_tested_dir(), 1)[1] print("Given path not exists!")
return exit(1)
elif os.path.isfile(self.get_main_tested_dir() + path):
self.tested_file_path = path
return
print(f"{os.path.join(self.get_main_tested_dir(), path)}") def set_main_UT_dir(self, path):
print("Given path not exists!") p = os.path.dirname(os.path.realpath(__file__)) + os.sep + path
exit(1) p = os.path.normpath(os.path.dirname(p)) + os.sep
self.main_UT_dir = p
def get_main_UT_dir(self):
return self.main_UT_dir
def set_main_UT_dir(self, path): def set_main_tested_dir(self, path):
p = os.path.dirname(os.path.realpath(__file__)) + os.sep + path p = os.path.dirname(os.path.realpath(__file__)) + os.sep + path
p = os.path.normpath(os.path.dirname(p)) + os.sep p = os.path.normpath(os.path.dirname(p)) + os.sep
self.main_UT_dir = p self.main_tested_dir = p
def get_main_UT_dir(self): def get_main_tested_dir(self):
return self.main_UT_dir return self.main_tested_dir
def set_main_tested_dir(self, path): def get_tested_file_path(self):
p = os.path.dirname(os.path.realpath(__file__)) + os.sep + path return self.tested_file_path
p = os.path.normpath(os.path.dirname(p)) + os.sep
self.main_tested_dir = p
def get_main_tested_dir(self): def get_tested_function_name(self):
return self.main_tested_dir return self.tested_function_name
def get_tested_file_path(self):
return self.tested_file_path
def get_tested_function_name(self):
return self.tested_function_name
def __main__(): def __main__():
if len(sys.argv) < 3: if len(sys.argv) < 3:
print("No path to tested file or tested function name given !") print("No path to tested file or tested function name given !")
sys.exit(1) sys.exit(1)
tested_file_path = sys.argv[1] tested_file_path = sys.argv[1]
tested_function_name = sys.argv[2] tested_function_name = sys.argv[2]
generator = TestGenerator(tests_config.MAIN_DIRECTORY_OF_UNIT_TESTS,\ generator = TestGenerator(tests_config.MAIN_DIRECTORY_OF_UNIT_TESTS,
tests_config.MAIN_DIRECTORY_OF_TESTED_PROJECT,\ tests_config.MAIN_DIRECTORY_OF_TESTED_PROJECT,
tested_file_path, tested_function_name) tested_file_path, tested_function_name)
generator.create_empty_test_file()
generator.create_empty_test_file()
if __name__ == "__main__": if __name__ == "__main__":
__main__() __main__()

File diff suppressed because it is too large Load Diff

View File

@ -10,13 +10,15 @@ import os
import sys import sys
import subprocess import subprocess
def run_command(args): def run_command(args):
result = subprocess.run(" ".join(args), shell=True, result = subprocess.run(" ".join(args), shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result.stdout = result.stdout.decode("ASCII", errors='ignore') result.stdout = result.stdout.decode("ASCII", errors='ignore')
result.stderr = result.stderr.decode("ASCII", errors='ignore') result.stderr = result.stderr.decode("ASCII", errors='ignore')
return result return result
script_path = os.path.dirname(os.path.realpath(__file__)) script_path = os.path.dirname(os.path.realpath(__file__))
main_UT_dir = os.path.join(script_path, tests_config.MAIN_DIRECTORY_OF_UNIT_TESTS) main_UT_dir = os.path.join(script_path, tests_config.MAIN_DIRECTORY_OF_UNIT_TESTS)
@ -29,13 +31,13 @@ if not os.path.isdir(os.path.join(main_UT_dir, "ocf_env", "ocf")):
except Exception: except Exception:
raise Exception("Cannot create ocf_env/ocf directory!") raise Exception("Cannot create ocf_env/ocf directory!")
result = run_command([ "cp", "-r", result = run_command(["cp", "-r",
os.path.join(main_tested_dir, "inc", "*"), os.path.join(main_tested_dir, "inc", "*"),
os.path.join(main_UT_dir, "ocf_env", "ocf") ]) os.path.join(main_UT_dir, "ocf_env", "ocf")])
if result.returncode != 0: if result.returncode != 0:
raise Exception("Preparing sources for testing failed!") raise Exception("Preparing sources for testing failed!")
result = run_command([ os.path.join(script_path, "prepare_sources_for_testing.py") ]) result = run_command([os.path.join(script_path, "prepare_sources_for_testing.py")])
if result.returncode != 0: if result.returncode != 0:
raise Exception("Preparing sources for testing failed!") raise Exception("Preparing sources for testing failed!")
@ -52,7 +54,7 @@ except Exception:
os.chdir(build_dir) os.chdir(build_dir)
cmake_result = run_command([ "cmake", ".." ]) cmake_result = run_command(["cmake", ".."])
print(cmake_result.stdout) print(cmake_result.stdout)
with open(os.path.join(logs_dir, "cmake.output"), "w") as f: with open(os.path.join(logs_dir, "cmake.output"), "w") as f:
@ -64,20 +66,20 @@ if cmake_result.returncode != 0:
f.write("Cmake step failed! More details in cmake.output.") f.write("Cmake step failed! More details in cmake.output.")
sys.exit(1) sys.exit(1)
make_result = run_command([ "make", "-j" ]) make_result = run_command(["make", "-j"])
print(make_result.stdout) print(make_result.stdout)
with open(os.path.join(logs_dir, "make.output"), "w") as f: with open(os.path.join(logs_dir, "make.output"), "w") as f:
f.write(make_result.stdout) f.write(make_result.stdout)
f.write(make_result.stderr) f.write(make_result.stderr)
if make_result.returncode != 0: if make_result.returncode != 0:
with open(os.path.join(logs_dir, "tests.output"), "w") as f: with open(os.path.join(logs_dir, "tests.output"), "w") as f:
f.write("Make step failed! More details in make.output.") f.write("Make step failed! More details in make.output.")
sys.exit(1) sys.exit(1)
test_result = run_command([ "make", "test" ]) test_result = run_command(["make", "test"])
print(test_result.stdout) print(test_result.stdout)
with open(os.path.join(logs_dir , "tests.output"), "w") as f: with open(os.path.join(logs_dir, "tests.output"), "w") as f:
f.write(test_result.stdout) f.write(test_result.stdout)

View File

@ -11,25 +11,34 @@ MAIN_DIRECTORY_OF_TESTED_PROJECT = "../../../"
MAIN_DIRECTORY_OF_UNIT_TESTS = "../tests/" MAIN_DIRECTORY_OF_UNIT_TESTS = "../tests/"
# Paths to all directories, in which tests are stored. All paths should be relative to MAIN_DIRECTORY_OF_UNIT_TESTS # Paths to all directories, in which tests are stored. All paths should be relative to
DIRECTORIES_WITH_TESTS_LIST = ["cleaning/", "metadata/", "mngt/", "concurrency/", "engine/", "eviction/", "utils/"] # MAIN_DIRECTORY_OF_UNIT_TESTS
DIRECTORIES_WITH_TESTS_LIST = ["cleaning/", "metadata/", "mngt/", "concurrency/", "engine/",
"eviction/", "utils/"]
# Paths to all directories containing files with sources. All paths should be relative to MAIN_DIRECTORY_OF_TESTED_PROJECT # Paths to all directories containing files with sources. All paths should be relative to
DIRECTORIES_TO_INCLUDE_FROM_PROJECT_LIST = ["src/", "src/cleaning/", "src/engine/", "src/metadata/", "src/eviction/", "src/mngt/", "src/concurrency/", "src/utils/", "inc/"] # MAIN_DIRECTORY_OF_TESTED_PROJECT
DIRECTORIES_TO_INCLUDE_FROM_PROJECT_LIST = ["src/", "src/cleaning/", "src/engine/", "src/metadata/",
"src/eviction/", "src/mngt/", "src/concurrency/",
"src/utils/", "inc/"]
# Paths to all directories from directory with tests, which should also be included # Paths to all directories from directory with tests, which should also be included
DIRECTORIES_TO_INCLUDE_FROM_UT_LIST = ["ocf_env/"] DIRECTORIES_TO_INCLUDE_FROM_UT_LIST = ["ocf_env/"]
# Paths to include, required by cmake, cmocka, cunit # Paths to include, required by cmake, cmocka, cunit
FRAMEWORK_DIRECTORIES_TO_INCLUDE_LIST = ["${CMOCKA_PUBLIC_INCLUDE_DIRS}" ,"${CMAKE_BINARY_DIR}", "${CMAKE_CURRENT_SOURCE_DIR}"] FRAMEWORK_DIRECTORIES_TO_INCLUDE_LIST = ["${CMOCKA_PUBLIC_INCLUDE_DIRS}", "${CMAKE_BINARY_DIR}",
"${CMAKE_CURRENT_SOURCE_DIR}"]
# Path to directory containing all sources after preprocessing. Should be relative to MAIN_DIRECTORY_OF_UNIT_TESTS # Path to directory containing all sources after preprocessing. Should be relative to
# MAIN_DIRECTORY_OF_UNIT_TESTS
PREPROCESSED_SOURCES_REPOSITORY = "preprocessed_sources_repository/" PREPROCESSED_SOURCES_REPOSITORY = "preprocessed_sources_repository/"
# Path to directory containing all sources after removing unneeded functions and cmake files for tests # Path to directory containing all sources after removing unneeded functions and cmake files for
# tests
SOURCES_TO_TEST_REPOSITORY = "sources_to_test_repository/" SOURCES_TO_TEST_REPOSITORY = "sources_to_test_repository/"
# List of includes. Directories will be recursively copied to given destinations in directory with tests. # List of includes.
# Directories will be recursively copied to given destinations in directory with tests.
# key - destination in dir with tests # key - destination in dir with tests
# value - path in tested project to dir which should be copied # value - path in tested project to dir which should be copied
INCLUDES_TO_COPY_DICT = { 'ocf_env/ocf/' : "inc/" } INCLUDES_TO_COPY_DICT = {'ocf_env/ocf/': "inc/"}