Adapt all python code to PEP8 style standards
Signed-off-by: Kamil Lepek <kamil.lepek94@gmail.com>
This commit is contained in:
parent
1e2b8f1980
commit
e52d34c1c8
@ -8,5 +8,6 @@ pycodestyle:
|
|||||||
max-line-length: 100
|
max-line-length: 100
|
||||||
ignore:
|
ignore:
|
||||||
- E402 # module level import not at top of file
|
- E402 # module level import not at top of file
|
||||||
|
- W503 # line break after binary operator
|
||||||
|
|
||||||
no_blank_comment: True
|
no_blank_comment: True
|
||||||
|
@ -86,6 +86,7 @@ class CacheMode(IntEnum):
|
|||||||
def read_insert(self):
|
def read_insert(self):
|
||||||
return self.value not in [CacheMode.PT, CacheMode.WO]
|
return self.value not in [CacheMode.PT, CacheMode.WO]
|
||||||
|
|
||||||
|
|
||||||
class EvictionPolicy(IntEnum):
|
class EvictionPolicy(IntEnum):
|
||||||
LRU = 0
|
LRU = 0
|
||||||
DEFAULT = LRU
|
DEFAULT = LRU
|
||||||
@ -306,7 +307,7 @@ class Cache:
|
|||||||
c.start_cache()
|
c.start_cache()
|
||||||
try:
|
try:
|
||||||
c.load_cache(device)
|
c.load_cache(device)
|
||||||
except:
|
except: # noqa E722
|
||||||
c.stop()
|
c.stop()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -319,7 +320,7 @@ class Cache:
|
|||||||
c.start_cache()
|
c.start_cache()
|
||||||
try:
|
try:
|
||||||
c.attach_device(device, force=True)
|
c.attach_device(device, force=True)
|
||||||
except:
|
except: # noqa E722
|
||||||
c.stop()
|
c.stop()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -529,13 +530,12 @@ class Cache:
|
|||||||
if c.results["error"]:
|
if c.results["error"]:
|
||||||
raise OcfError("Couldn't flush cache", c.results["error"])
|
raise OcfError("Couldn't flush cache", c.results["error"])
|
||||||
|
|
||||||
|
|
||||||
def get_name(self):
|
def get_name(self):
|
||||||
self.read_lock()
|
self.read_lock()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return str(self.owner.lib.ocf_cache_get_name(self), encoding="ascii")
|
return str(self.owner.lib.ocf_cache_get_name(self), encoding="ascii")
|
||||||
except:
|
except: # noqa E722
|
||||||
raise OcfError("Couldn't get cache name")
|
raise OcfError("Couldn't get cache name")
|
||||||
finally:
|
finally:
|
||||||
self.read_unlock()
|
self.read_unlock()
|
||||||
|
@ -56,7 +56,7 @@ class DataOps(Structure):
|
|||||||
|
|
||||||
|
|
||||||
class Data:
|
class Data:
|
||||||
DATA_POISON=0xA5
|
DATA_POISON = 0xA5
|
||||||
PAGE_SIZE = 4096
|
PAGE_SIZE = 4096
|
||||||
|
|
||||||
_instances_ = {}
|
_instances_ = {}
|
||||||
@ -109,7 +109,7 @@ class Data:
|
|||||||
def from_string(cls, source: str, encoding: str = "ascii"):
|
def from_string(cls, source: str, encoding: str = "ascii"):
|
||||||
b = bytes(source, encoding)
|
b = bytes(source, encoding)
|
||||||
# duplicate string to fill space up to sector boundary
|
# duplicate string to fill space up to sector boundary
|
||||||
padding_len = S.from_B(len(b), sector_aligned = True).B - len(b)
|
padding_len = S.from_B(len(b), sector_aligned=True).B - len(b)
|
||||||
padding = b * (padding_len // len(b) + 1)
|
padding = b * (padding_len // len(b) + 1)
|
||||||
padding = padding[:padding_len]
|
padding = padding[:padding_len]
|
||||||
b = b + padding
|
b = b + padding
|
||||||
|
@ -92,7 +92,7 @@ class Io(Structure):
|
|||||||
def end(self, err):
|
def end(self, err):
|
||||||
try:
|
try:
|
||||||
self.callback(err)
|
self.callback(err)
|
||||||
except:
|
except: # noqa E722
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.put()
|
self.put()
|
||||||
|
@ -36,6 +36,7 @@ def io_queue_run(*, queue: Queue, kick: Condition, stop: Event):
|
|||||||
if stop.is_set() and not OcfLib.getInstance().ocf_queue_pending_io(queue):
|
if stop.is_set() and not OcfLib.getInstance().ocf_queue_pending_io(queue):
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
class Queue:
|
class Queue:
|
||||||
_instances_ = {}
|
_instances_ = {}
|
||||||
|
|
||||||
@ -102,4 +103,3 @@ class Queue:
|
|||||||
self.kick_condition.notify_all()
|
self.kick_condition.notify_all()
|
||||||
|
|
||||||
self.thread.join()
|
self.thread.join()
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ class SharedOcfObject(Structure):
|
|||||||
def get_instance(cls, ref: int):
|
def get_instance(cls, ref: int):
|
||||||
try:
|
try:
|
||||||
return cls._instances_[ref]
|
return cls._instances_[ref]
|
||||||
except:
|
except: # noqa E722
|
||||||
logging.getLogger("pyocf").error(
|
logging.getLogger("pyocf").error(
|
||||||
"OcfSharedObject corruption. wanted: {} instances: {}".format(
|
"OcfSharedObject corruption. wanted: {} instances: {}".format(
|
||||||
ref, cls._instances_
|
ref, cls._instances_
|
||||||
|
@ -74,7 +74,7 @@ class VolumeIoPriv(Structure):
|
|||||||
|
|
||||||
|
|
||||||
class Volume(Structure):
|
class Volume(Structure):
|
||||||
VOLUME_POISON=0x13
|
VOLUME_POISON = 0x13
|
||||||
|
|
||||||
_fields_ = [("_storage", c_void_p)]
|
_fields_ = [("_storage", c_void_p)]
|
||||||
_instances_ = {}
|
_instances_ = {}
|
||||||
@ -184,7 +184,7 @@ class Volume(Structure):
|
|||||||
uuid = str(uuid_ptr.contents._data, encoding="ascii")
|
uuid = str(uuid_ptr.contents._data, encoding="ascii")
|
||||||
try:
|
try:
|
||||||
volume = Volume.get_by_uuid(uuid)
|
volume = Volume.get_by_uuid(uuid)
|
||||||
except:
|
except: # noqa E722 TODO:Investigate whether this really should be so broad
|
||||||
print("Tried to access unallocated volume {}".format(uuid))
|
print("Tried to access unallocated volume {}".format(uuid))
|
||||||
print("{}".format(Volume._uuid_))
|
print("{}".format(Volume._uuid_))
|
||||||
return -1
|
return -1
|
||||||
@ -255,7 +255,7 @@ class Volume(Structure):
|
|||||||
memset(dst, 0, discard.contents._bytes)
|
memset(dst, 0, discard.contents._bytes)
|
||||||
|
|
||||||
discard.contents._end(discard, 0)
|
discard.contents._end(discard, 0)
|
||||||
except:
|
except: # noqa E722
|
||||||
discard.contents._end(discard, -5)
|
discard.contents._end(discard, -5)
|
||||||
|
|
||||||
def get_stats(self):
|
def get_stats(self):
|
||||||
@ -269,8 +269,7 @@ class Volume(Structure):
|
|||||||
self.stats[IoDir(io.contents._dir)] += 1
|
self.stats[IoDir(io.contents._dir)] += 1
|
||||||
|
|
||||||
io_priv = cast(
|
io_priv = cast(
|
||||||
OcfLib.getInstance().ocf_io_get_priv(io), POINTER(VolumeIoPriv)
|
OcfLib.getInstance().ocf_io_get_priv(io), POINTER(VolumeIoPriv))
|
||||||
)
|
|
||||||
offset = io_priv.contents._offset
|
offset = io_priv.contents._offset
|
||||||
|
|
||||||
if io.contents._dir == IoDir.WRITE:
|
if io.contents._dir == IoDir.WRITE:
|
||||||
@ -286,7 +285,7 @@ class Volume(Structure):
|
|||||||
io_priv.contents._offset += io.contents._bytes
|
io_priv.contents._offset += io.contents._bytes
|
||||||
|
|
||||||
io.contents._end(io, 0)
|
io.contents._end(io, 0)
|
||||||
except:
|
except: # noqa E722
|
||||||
io.contents._end(io, -5)
|
io.contents._end(io, -5)
|
||||||
|
|
||||||
def dump(self, offset=0, size=0, ignore=VOLUME_POISON, **kwargs):
|
def dump(self, offset=0, size=0, ignore=VOLUME_POISON, **kwargs):
|
||||||
@ -325,10 +324,11 @@ class ErrorDevice(Volume):
|
|||||||
super().reset_stats()
|
super().reset_stats()
|
||||||
self.stats["errors"] = {IoDir.WRITE: 0, IoDir.READ: 0}
|
self.stats["errors"] = {IoDir.WRITE: 0, IoDir.READ: 0}
|
||||||
|
|
||||||
|
|
||||||
class TraceDevice(Volume):
|
class TraceDevice(Volume):
|
||||||
def __init__(self, size, trace_fcn=None, uuid=None):
|
def __init__(self, size, trace_fcn=None, uuid=None):
|
||||||
super().__init__(size, uuid)
|
super().__init__(size, uuid)
|
||||||
self.trace_fcn=trace_fcn
|
self.trace_fcn = trace_fcn
|
||||||
|
|
||||||
def submit_io(self, io):
|
def submit_io(self, io):
|
||||||
submit = True
|
submit = True
|
||||||
|
@ -6,7 +6,8 @@
|
|||||||
from ctypes import string_at
|
from ctypes import string_at
|
||||||
|
|
||||||
|
|
||||||
def print_buffer(buf, length, offset=0, width=16, ignore=0, stop_after_count_ignored=0, print_fcn=print):
|
def print_buffer(buf, length, offset=0, width=16, ignore=0,
|
||||||
|
stop_after_count_ignored=0, print_fcn=print):
|
||||||
end = int(offset) + int(length)
|
end = int(offset) + int(length)
|
||||||
offset = int(offset)
|
offset = int(offset)
|
||||||
ignored_lines = 0
|
ignored_lines = 0
|
||||||
@ -15,16 +16,13 @@ def print_buffer(buf, length, offset=0, width=16, ignore=0, stop_after_count_ign
|
|||||||
stop_after_count_ignored = int(stop_after_count_ignored / width)
|
stop_after_count_ignored = int(stop_after_count_ignored / width)
|
||||||
|
|
||||||
for addr in range(offset, end, width):
|
for addr in range(offset, end, width):
|
||||||
cur_line = buf[addr : min(end, addr + width)]
|
cur_line = buf[addr: min(end, addr + width)]
|
||||||
byteline = ""
|
byteline = ""
|
||||||
asciiline = ""
|
asciiline = ""
|
||||||
if not any(x != ignore for x in cur_line):
|
if not any(x != ignore for x in cur_line):
|
||||||
if stop_after_count_ignored and ignored_lines > stop_after_count_ignored:
|
if stop_after_count_ignored and ignored_lines > stop_after_count_ignored:
|
||||||
print_fcn(
|
print_fcn("<{} bytes of '0x{:02X}' encountered, stopping>".
|
||||||
"<{} bytes of '0x{:02X}' encountered, stopping>".format(
|
format(stop_after_count_ignored * width, ignore))
|
||||||
stop_after_count_ignored * width, ignore
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
ignored_lines += 1
|
ignored_lines += 1
|
||||||
continue
|
continue
|
||||||
@ -71,23 +69,23 @@ class Size:
|
|||||||
return self.bytes
|
return self.bytes
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_B(cls, value, sector_aligned = False):
|
def from_B(cls, value, sector_aligned=False):
|
||||||
return cls(value, sector_aligned)
|
return cls(value, sector_aligned)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_KiB(cls, value, sector_aligned = False):
|
def from_KiB(cls, value, sector_aligned=False):
|
||||||
return cls(value * cls._KiB, sector_aligned)
|
return cls(value * cls._KiB, sector_aligned)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_MiB(cls, value, sector_aligned = False):
|
def from_MiB(cls, value, sector_aligned=False):
|
||||||
return cls(value * cls._MiB, sector_aligned)
|
return cls(value * cls._MiB, sector_aligned)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_GiB(cls, value, sector_aligned = False):
|
def from_GiB(cls, value, sector_aligned=False):
|
||||||
return cls(value * cls._GiB, sector_aligned)
|
return cls(value * cls._GiB, sector_aligned)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_TiB(cls, value, sector_aligned = False):
|
def from_TiB(cls, value, sector_aligned=False):
|
||||||
return cls(value * cls._TiB, sector_aligned)
|
return cls(value * cls._TiB, sector_aligned)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
# SPDX-License-Identifier: BSD-3-Clause-Clear
|
# SPDX-License-Identifier: BSD-3-Clause-Clear
|
||||||
#
|
#
|
||||||
|
|
||||||
import pytest
|
|
||||||
from ctypes import c_int, memmove, cast, c_void_p
|
from ctypes import c_int, memmove, cast, c_void_p
|
||||||
from enum import IntEnum
|
from enum import IntEnum
|
||||||
from itertools import product
|
from itertools import product
|
||||||
@ -11,11 +10,12 @@ import random
|
|||||||
|
|
||||||
from pyocf.types.cache import Cache, CacheMode
|
from pyocf.types.cache import Cache, CacheMode
|
||||||
from pyocf.types.core import Core
|
from pyocf.types.core import Core
|
||||||
from pyocf.types.volume import Volume, ErrorDevice
|
from pyocf.types.volume import Volume
|
||||||
from pyocf.types.data import Data
|
from pyocf.types.data import Data
|
||||||
from pyocf.types.io import IoDir
|
from pyocf.types.io import IoDir
|
||||||
from pyocf.utils import Size
|
from pyocf.utils import Size
|
||||||
from pyocf.types.shared import OcfError, OcfCompletion
|
from pyocf.types.shared import OcfCompletion
|
||||||
|
|
||||||
|
|
||||||
def __io(io, queue, address, size, data, direction):
|
def __io(io, queue, address, size, data, direction):
|
||||||
io.set_data(data, 0)
|
io.set_data(data, 0)
|
||||||
@ -38,25 +38,30 @@ def _io(io, queue, address, size, data, offset, direction):
|
|||||||
memmove(cast(data, c_void_p).value + offset, _data.handle, size)
|
memmove(cast(data, c_void_p).value + offset, _data.handle, size)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def io_to_core(core, address, size, data, offset, direction):
|
def io_to_core(core, address, size, data, offset, direction):
|
||||||
return _io(core.new_core_io(), core.cache.get_default_queue(), address, size,
|
return _io(core.new_core_io(), core.cache.get_default_queue(), address, size,
|
||||||
data, offset, direction)
|
data, offset, direction)
|
||||||
|
|
||||||
|
|
||||||
def io_to_exp_obj(core, address, size, data, offset, direction):
|
def io_to_exp_obj(core, address, size, data, offset, direction):
|
||||||
return _io(core.new_io(), core.cache.get_default_queue(), address, size, data,
|
return _io(core.new_io(), core.cache.get_default_queue(), address, size, data,
|
||||||
offset, direction)
|
offset, direction)
|
||||||
|
|
||||||
|
|
||||||
def sector_to_region(sector, region_start):
|
def sector_to_region(sector, region_start):
|
||||||
i = 0
|
i = 0
|
||||||
while i < len(region_start) - 1 and sector >= region_start[i + 1]:
|
while i < len(region_start) - 1 and sector >= region_start[i + 1]:
|
||||||
i += 1
|
i += 1
|
||||||
return i
|
return i
|
||||||
|
|
||||||
|
|
||||||
class SectorStatus(IntEnum):
|
class SectorStatus(IntEnum):
|
||||||
DIRTY = 0,
|
DIRTY = 0,
|
||||||
CLEAN = 1,
|
CLEAN = 1,
|
||||||
INVALID = 2,
|
INVALID = 2,
|
||||||
|
|
||||||
|
|
||||||
I = SectorStatus.INVALID
|
I = SectorStatus.INVALID
|
||||||
D = SectorStatus.DIRTY
|
D = SectorStatus.DIRTY
|
||||||
C = SectorStatus.CLEAN
|
C = SectorStatus.CLEAN
|
||||||
@ -85,6 +90,8 @@ C = SectorStatus.CLEAN
|
|||||||
# - if clean, exported object sector no @n is filled with 100 + @n
|
# - if clean, exported object sector no @n is filled with 100 + @n
|
||||||
# - if dirty, exported object sector no @n is filled with 200 + @n
|
# - if dirty, exported object sector no @n is filled with 200 + @n
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
def test_wo_read_data_consistency(pyocf_ctx):
|
def test_wo_read_data_consistency(pyocf_ctx):
|
||||||
# start sector for each region
|
# start sector for each region
|
||||||
region_start = [0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17]
|
region_start = [0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17]
|
||||||
@ -114,11 +121,11 @@ def test_wo_read_data_consistency(pyocf_ctx):
|
|||||||
|
|
||||||
data = {}
|
data = {}
|
||||||
# memset n-th sector of core data with n
|
# memset n-th sector of core data with n
|
||||||
data[SectorStatus.INVALID] = bytes([x // SECTOR_SIZE for x in range (WORKSET_SIZE)])
|
data[SectorStatus.INVALID] = bytes([x // SECTOR_SIZE for x in range(WORKSET_SIZE)])
|
||||||
# memset n-th sector of clean data with n + 100
|
# memset n-th sector of clean data with n + 100
|
||||||
data[SectorStatus.CLEAN] = bytes([100 + x // SECTOR_SIZE for x in range (WORKSET_SIZE)])
|
data[SectorStatus.CLEAN] = bytes([100 + x // SECTOR_SIZE for x in range(WORKSET_SIZE)])
|
||||||
# memset n-th sector of dirty data with n + 200
|
# memset n-th sector of dirty data with n + 200
|
||||||
data[SectorStatus.DIRTY] = bytes([200 + x // SECTOR_SIZE for x in range (WORKSET_SIZE)])
|
data[SectorStatus.DIRTY] = bytes([200 + x // SECTOR_SIZE for x in range(WORKSET_SIZE)])
|
||||||
|
|
||||||
result_b = bytes(WORKSET_SIZE)
|
result_b = bytes(WORKSET_SIZE)
|
||||||
|
|
||||||
@ -137,29 +144,29 @@ def test_wo_read_data_consistency(pyocf_ctx):
|
|||||||
combinations.append(S)
|
combinations.append(S)
|
||||||
random.shuffle(combinations)
|
random.shuffle(combinations)
|
||||||
|
|
||||||
# add fixed test cases at the beginnning
|
# add fixed test cases at the beginning
|
||||||
combinations = fixed_combinations + combinations
|
combinations = fixed_combinations + combinations
|
||||||
|
|
||||||
for S in combinations[:ITRATION_COUNT]:
|
for S in combinations[:ITRATION_COUNT]:
|
||||||
# write data to core and invalidate all CL
|
# write data to core and invalidate all CL
|
||||||
cache.change_cache_mode(cache_mode = CacheMode.PT)
|
cache.change_cache_mode(cache_mode=CacheMode.PT)
|
||||||
io_to_exp_obj(core, WORKSET_OFFSET, len(data[SectorStatus.INVALID]), \
|
io_to_exp_obj(core, WORKSET_OFFSET, len(data[SectorStatus.INVALID]),
|
||||||
data[SectorStatus.INVALID], 0, IoDir.WRITE)
|
data[SectorStatus.INVALID], 0, IoDir.WRITE)
|
||||||
|
|
||||||
# insert clean sectors
|
# insert clean sectors
|
||||||
cache.change_cache_mode(cache_mode = CacheMode.WT)
|
cache.change_cache_mode(cache_mode=CacheMode.WT)
|
||||||
for sec in range(SECTOR_COUNT):
|
for sec in range(SECTOR_COUNT):
|
||||||
region = sector_to_region(sec, region_start)
|
region = sector_to_region(sec, region_start)
|
||||||
if S[region] == SectorStatus.CLEAN:
|
if S[region] == SectorStatus.CLEAN:
|
||||||
io_to_exp_obj(core, WORKSET_OFFSET + SECTOR_SIZE * sec, SECTOR_SIZE, \
|
io_to_exp_obj(core, WORKSET_OFFSET + SECTOR_SIZE * sec, SECTOR_SIZE,
|
||||||
data[SectorStatus.CLEAN], sec * SECTOR_SIZE, IoDir.WRITE)
|
data[SectorStatus.CLEAN], sec * SECTOR_SIZE, IoDir.WRITE)
|
||||||
|
|
||||||
# write dirty sectors
|
# write dirty sectors
|
||||||
cache.change_cache_mode(cache_mode = CacheMode.WO)
|
cache.change_cache_mode(cache_mode=CacheMode.WO)
|
||||||
for sec in range(SECTOR_COUNT):
|
for sec in range(SECTOR_COUNT):
|
||||||
region = sector_to_region(sec, region_start)
|
region = sector_to_region(sec, region_start)
|
||||||
if S[region] == SectorStatus.DIRTY:
|
if S[region] == SectorStatus.DIRTY:
|
||||||
io_to_exp_obj(core, WORKSET_OFFSET + SECTOR_SIZE * sec, SECTOR_SIZE, \
|
io_to_exp_obj(core, WORKSET_OFFSET + SECTOR_SIZE * sec, SECTOR_SIZE,
|
||||||
data[SectorStatus.DIRTY], sec * SECTOR_SIZE, IoDir.WRITE)
|
data[SectorStatus.DIRTY], sec * SECTOR_SIZE, IoDir.WRITE)
|
||||||
|
|
||||||
for s in start_sec:
|
for s in start_sec:
|
||||||
@ -171,10 +178,9 @@ def test_wo_read_data_consistency(pyocf_ctx):
|
|||||||
START = s * SECTOR_SIZE
|
START = s * SECTOR_SIZE
|
||||||
END = e * SECTOR_SIZE
|
END = e * SECTOR_SIZE
|
||||||
size = (e - s + 1) * SECTOR_SIZE
|
size = (e - s + 1) * SECTOR_SIZE
|
||||||
assert(0 == io_to_exp_obj(core, WORKSET_OFFSET + START, size, \
|
assert(0 == io_to_exp_obj(core, WORKSET_OFFSET + START, size,
|
||||||
result_b, START, IoDir.READ)), \
|
result_b, START, IoDir.READ)),\
|
||||||
"error reading in WO mode: S={}, start={}, end={}".format( \
|
"error reading in WO mode: S={}, start={}, end={}".format(S, s, e)
|
||||||
S, s, e)
|
|
||||||
|
|
||||||
# verify read data
|
# verify read data
|
||||||
for sec in range(s, e + 1):
|
for sec in range(s, e + 1):
|
||||||
@ -182,6 +188,4 @@ def test_wo_read_data_consistency(pyocf_ctx):
|
|||||||
region = sector_to_region(sec, region_start)
|
region = sector_to_region(sec, region_start)
|
||||||
check_byte = sec * SECTOR_SIZE
|
check_byte = sec * SECTOR_SIZE
|
||||||
assert(result_b[check_byte] == data[S[region]][check_byte]), \
|
assert(result_b[check_byte] == data[S[region]][check_byte]), \
|
||||||
"unexpected data in sector {}, S={}, s={}, e={}\n".format( \
|
"unexpected data in sector {}, S={}, s={}, e={}\n".format(sec, S, s, e)
|
||||||
sec, S, s, e)
|
|
||||||
|
|
||||||
|
@ -113,6 +113,7 @@ def test_start_read_first_and_check_mode(pyocf_ctx, mode: CacheMode, cls: CacheL
|
|||||||
test_data = Data.from_string("Changed test data")
|
test_data = Data.from_string("Changed test data")
|
||||||
|
|
||||||
io_to_core(core_exported, test_data, Size.from_sector(1).B)
|
io_to_core(core_exported, test_data, Size.from_sector(1).B)
|
||||||
|
|
||||||
check_stats_write_after_read(core_exported, mode, cls, True)
|
check_stats_write_after_read(core_exported, mode, cls, True)
|
||||||
|
|
||||||
logger.info("[STAGE] Read from exported object after write")
|
logger.info("[STAGE] Read from exported object after write")
|
||||||
@ -159,7 +160,8 @@ def test_start_params(pyocf_ctx, mode: CacheMode, cls: CacheLineSize, layout: Me
|
|||||||
assert stats["conf"]["eviction_policy"] == EvictionPolicy.DEFAULT, "Eviction policy"
|
assert stats["conf"]["eviction_policy"] == EvictionPolicy.DEFAULT, "Eviction policy"
|
||||||
assert stats["conf"]["cache_id"] == cache_id, "Cache id"
|
assert stats["conf"]["cache_id"] == cache_id, "Cache id"
|
||||||
assert cache.get_name() == name, "Cache name"
|
assert cache.get_name() == name, "Cache name"
|
||||||
# TODO: metadata_layout, metadata_volatile, max_queue_size, queue_unblock_size, pt_unaligned_io, use_submit_fast
|
# TODO: metadata_layout, metadata_volatile, max_queue_size,
|
||||||
|
# queue_unblock_size, pt_unaligned_io, use_submit_fast
|
||||||
# TODO: test in functional tests
|
# TODO: test in functional tests
|
||||||
|
|
||||||
|
|
||||||
@ -254,8 +256,9 @@ def test_100_start_stop(pyocf_ctx):
|
|||||||
|
|
||||||
def test_start_stop_incrementally(pyocf_ctx):
|
def test_start_stop_incrementally(pyocf_ctx):
|
||||||
"""Starting/stopping multiple caches incrementally.
|
"""Starting/stopping multiple caches incrementally.
|
||||||
Check whether OCF behaves correctly when few caches at a time are in turns added and removed (#added > #removed)
|
Check whether OCF behaves correctly when few caches at a time are
|
||||||
until their number reaches limit, and then proportions are reversed and number of caches gradually falls to 0.
|
in turns added and removed (#added > #removed) until their number reaches limit,
|
||||||
|
and then proportions are reversed and number of caches gradually falls to 0.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
caches = []
|
caches = []
|
||||||
@ -292,7 +295,8 @@ def test_start_stop_incrementally(pyocf_ctx):
|
|||||||
stats = cache.get_stats()
|
stats = cache.get_stats()
|
||||||
cache_id = stats["conf"]["cache_id"]
|
cache_id = stats["conf"]["cache_id"]
|
||||||
cache.stop()
|
cache.stop()
|
||||||
assert get_cache_by_id(pyocf_ctx, cache_id) != 0, "Try getting cache after stopping it"
|
assert get_cache_by_id(pyocf_ctx, cache_id) !=\
|
||||||
|
0, "Try getting cache after stopping it"
|
||||||
add = not add
|
add = not add
|
||||||
|
|
||||||
|
|
||||||
@ -306,11 +310,17 @@ def test_start_cache_same_id(pyocf_ctx, mode, cls):
|
|||||||
cache_device1 = Volume(Size.from_MiB(20))
|
cache_device1 = Volume(Size.from_MiB(20))
|
||||||
cache_device2 = Volume(Size.from_MiB(20))
|
cache_device2 = Volume(Size.from_MiB(20))
|
||||||
cache_id = randrange(1, 16385)
|
cache_id = randrange(1, 16385)
|
||||||
cache = Cache.start_on_device(cache_device1, cache_mode=mode, cache_line_size=cls, cache_id=cache_id)
|
cache = Cache.start_on_device(cache_device1,
|
||||||
|
cache_mode=mode,
|
||||||
|
cache_line_size=cls,
|
||||||
|
cache_id=cache_id)
|
||||||
cache.get_stats()
|
cache.get_stats()
|
||||||
|
|
||||||
with pytest.raises(OcfError, match="OCF_ERR_CACHE_EXIST"):
|
with pytest.raises(OcfError, match="OCF_ERR_CACHE_EXIST"):
|
||||||
cache = Cache.start_on_device(cache_device2, cache_mode=mode, cache_line_size=cls, cache_id=cache_id)
|
cache = Cache.start_on_device(cache_device2,
|
||||||
|
cache_mode=mode,
|
||||||
|
cache_line_size=cls,
|
||||||
|
cache_id=cache_id)
|
||||||
cache.get_stats()
|
cache.get_stats()
|
||||||
|
|
||||||
|
|
||||||
@ -418,14 +428,20 @@ def check_stats_write_empty(exported_obj: Core, mode: CacheMode, cls: CacheLineS
|
|||||||
"Occupancy"
|
"Occupancy"
|
||||||
|
|
||||||
|
|
||||||
def check_stats_write_after_read(exported_obj: Core, mode: CacheMode, cls: CacheLineSize, read_from_empty=False):
|
def check_stats_write_after_read(exported_obj: Core,
|
||||||
|
mode: CacheMode,
|
||||||
|
cls: CacheLineSize,
|
||||||
|
read_from_empty=False):
|
||||||
stats = exported_obj.cache.get_stats()
|
stats = exported_obj.cache.get_stats()
|
||||||
assert exported_obj.cache.device.get_stats()[IoDir.WRITE] == \
|
assert exported_obj.cache.device.get_stats()[IoDir.WRITE] == \
|
||||||
(0 if mode in {CacheMode.WI, CacheMode.PT} else (2 if read_from_empty and mode.lazy_write() else 1)), \
|
(0 if mode in {CacheMode.WI, CacheMode.PT} else
|
||||||
|
(2 if read_from_empty and mode.lazy_write() else 1)), \
|
||||||
"Writes to cache device"
|
"Writes to cache device"
|
||||||
assert exported_obj.device.get_stats()[IoDir.WRITE] == (0 if mode.lazy_write() else 1), \
|
assert exported_obj.device.get_stats()[IoDir.WRITE] == (0 if mode.lazy_write() else 1), \
|
||||||
"Writes to core device"
|
"Writes to core device"
|
||||||
assert stats["req"]["wr_hits"]["value"] == (1 if (mode.read_insert() and mode != CacheMode.WI) or (mode.write_insert() and not read_from_empty) else 0), \
|
assert stats["req"]["wr_hits"]["value"] == \
|
||||||
|
(1 if (mode.read_insert() and mode != CacheMode.WI)
|
||||||
|
or (mode.write_insert() and not read_from_empty) else 0), \
|
||||||
"Write hits"
|
"Write hits"
|
||||||
assert stats["usage"]["occupancy"]["value"] == \
|
assert stats["usage"]["occupancy"]["value"] == \
|
||||||
(0 if mode in {CacheMode.WI, CacheMode.PT} else (cls / CacheLineSize.LINE_4KiB)), \
|
(0 if mode in {CacheMode.WI, CacheMode.PT} else (cls / CacheLineSize.LINE_4KiB)), \
|
||||||
@ -438,16 +454,20 @@ def check_stats_read_after_write(exported_obj, mode, cls, write_to_empty=False):
|
|||||||
(2 if mode.lazy_write() else (0 if mode == CacheMode.PT else 1)), \
|
(2 if mode.lazy_write() else (0 if mode == CacheMode.PT else 1)), \
|
||||||
"Writes to cache device"
|
"Writes to cache device"
|
||||||
assert exported_obj.cache.device.get_stats()[IoDir.READ] == \
|
assert exported_obj.cache.device.get_stats()[IoDir.READ] == \
|
||||||
(1 if mode in {CacheMode.WT, CacheMode.WB, CacheMode.WO} or (mode == CacheMode.WA and not write_to_empty) else 0), \
|
(1 if mode in {CacheMode.WT, CacheMode.WB, CacheMode.WO}
|
||||||
|
or (mode == CacheMode.WA and not write_to_empty) else 0), \
|
||||||
"Reads from cache device"
|
"Reads from cache device"
|
||||||
assert exported_obj.device.get_stats()[IoDir.READ] == \
|
assert exported_obj.device.get_stats()[IoDir.READ] == \
|
||||||
(0 if mode in {CacheMode.WB, CacheMode.WO, CacheMode.WT} or (mode == CacheMode.WA and not write_to_empty) else 1), \
|
(0 if mode in {CacheMode.WB, CacheMode.WO, CacheMode.WT}
|
||||||
|
or (mode == CacheMode.WA and not write_to_empty) else 1), \
|
||||||
"Reads from core device"
|
"Reads from core device"
|
||||||
assert stats["req"]["rd_full_misses"]["value"] == (1 if mode in {CacheMode.WA, CacheMode.WI} else 0) \
|
assert stats["req"]["rd_full_misses"]["value"] == \
|
||||||
|
(1 if mode in {CacheMode.WA, CacheMode.WI} else 0) \
|
||||||
+ (0 if write_to_empty or mode in {CacheMode.PT, CacheMode.WA} else 1), \
|
+ (0 if write_to_empty or mode in {CacheMode.PT, CacheMode.WA} else 1), \
|
||||||
"Read full misses"
|
"Read full misses"
|
||||||
assert stats["req"]["rd_hits"]["value"] == \
|
assert stats["req"]["rd_hits"]["value"] == \
|
||||||
(1 if mode in {CacheMode.WT, CacheMode.WB, CacheMode.WO} or (mode == CacheMode.WA and not write_to_empty) else 0), \
|
(1 if mode in {CacheMode.WT, CacheMode.WB, CacheMode.WO}
|
||||||
|
or (mode == CacheMode.WA and not write_to_empty) else 0), \
|
||||||
"Read hits"
|
"Read hits"
|
||||||
assert stats["usage"]["occupancy"]["value"] == \
|
assert stats["usage"]["occupancy"]["value"] == \
|
||||||
(0 if mode == CacheMode.PT else (cls / CacheLineSize.LINE_4KiB)), "Occupancy"
|
(0 if mode == CacheMode.PT else (cls / CacheLineSize.LINE_4KiB)), "Occupancy"
|
||||||
@ -467,4 +487,6 @@ def check_md5_sums(exported_obj: Core, mode: CacheMode):
|
|||||||
|
|
||||||
def get_cache_by_id(ctx, cache_id):
|
def get_cache_by_id(ctx, cache_id):
|
||||||
cache_pointer = c_void_p()
|
cache_pointer = c_void_p()
|
||||||
return OcfLib.getInstance().ocf_mngt_cache_get_by_id(ctx.ctx_handle, cache_id, byref(cache_pointer))
|
return OcfLib.getInstance().ocf_mngt_cache_get_by_id(ctx.ctx_handle,
|
||||||
|
cache_id,
|
||||||
|
byref(cache_pointer))
|
||||||
|
@ -12,7 +12,6 @@ from pyocf.utils import Size
|
|||||||
from pyocf.types.shared import OcfError, CacheLineSize
|
from pyocf.types.shared import OcfError, CacheLineSize
|
||||||
from ctypes import c_uint32
|
from ctypes import c_uint32
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -51,7 +50,8 @@ def test_fuzzy_start_cache_line_size(pyocf_ctx, c_uint64_randomize, cm):
|
|||||||
with pytest.raises(OcfError, match="OCF_ERR_INVALID_CACHE_LINE_SIZE"):
|
with pytest.raises(OcfError, match="OCF_ERR_INVALID_CACHE_LINE_SIZE"):
|
||||||
try_start_cache(cache_mode=cm, cache_line_size=c_uint64_randomize)
|
try_start_cache(cache_mode=cm, cache_line_size=c_uint64_randomize)
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Test skipped for valid cache line size enum value: '{c_uint64_randomize}'. ")
|
logger.warning(
|
||||||
|
f"Test skipped for valid cache line size enum value: '{c_uint64_randomize}'. ")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.security
|
@pytest.mark.security
|
||||||
@ -67,8 +67,9 @@ def test_fuzzy_start_name(pyocf_ctx, string_randomize, cm, cls):
|
|||||||
"""
|
"""
|
||||||
cache_device = Volume(Size.from_MiB(30))
|
cache_device = Volume(Size.from_MiB(30))
|
||||||
try:
|
try:
|
||||||
cache = Cache.start_on_device(cache_device, name=string_randomize, cache_mode=cm, cache_line_size=cls)
|
cache = Cache.start_on_device(cache_device, name=string_randomize, cache_mode=cm,
|
||||||
except:
|
cache_line_size=cls)
|
||||||
|
except OcfError:
|
||||||
logger.error(f"Cache did not start properly with correct name value: {string_randomize}")
|
logger.error(f"Cache did not start properly with correct name value: {string_randomize}")
|
||||||
cache.stop()
|
cache.stop()
|
||||||
|
|
||||||
@ -107,7 +108,8 @@ def test_fuzzy_start_eviction_policy(pyocf_ctx, c_uint32_randomize, cm, cls):
|
|||||||
with pytest.raises(OcfError, match="OCF_ERR_INVAL"):
|
with pytest.raises(OcfError, match="OCF_ERR_INVAL"):
|
||||||
try_start_cache(eviction_policy=c_uint32_randomize, cache_mode=cm, cache_line_size=cls)
|
try_start_cache(eviction_policy=c_uint32_randomize, cache_mode=cm, cache_line_size=cls)
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Test skipped for valid eviction policy enum value: '{c_uint32_randomize}'. ")
|
logger.warning(
|
||||||
|
f"Test skipped for valid eviction policy enum value: '{c_uint32_randomize}'. ")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.security
|
@pytest.mark.security
|
||||||
@ -125,7 +127,8 @@ def test_fuzzy_start_metadata_layout(pyocf_ctx, c_uint32_randomize, cm, cls):
|
|||||||
with pytest.raises(OcfError, match="OCF_ERR_INVAL"):
|
with pytest.raises(OcfError, match="OCF_ERR_INVAL"):
|
||||||
try_start_cache(metadata_layout=c_uint32_randomize, cache_mode=cm, cache_line_size=cls)
|
try_start_cache(metadata_layout=c_uint32_randomize, cache_mode=cm, cache_line_size=cls)
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Test skipped for valid metadata layout enum value: '{c_uint32_randomize}'. ")
|
logger.warning(
|
||||||
|
f"Test skipped for valid metadata layout enum value: '{c_uint32_randomize}'. ")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.security
|
@pytest.mark.security
|
||||||
@ -133,7 +136,8 @@ def test_fuzzy_start_metadata_layout(pyocf_ctx, c_uint32_randomize, cm, cls):
|
|||||||
@pytest.mark.parametrize('max_wb_queue_size', generate_random_numbers(c_uint32, 10))
|
@pytest.mark.parametrize('max_wb_queue_size', generate_random_numbers(c_uint32, 10))
|
||||||
def test_fuzzy_start_max_queue_size(pyocf_ctx, max_wb_queue_size, c_uint32_randomize, cls):
|
def test_fuzzy_start_max_queue_size(pyocf_ctx, max_wb_queue_size, c_uint32_randomize, cls):
|
||||||
"""
|
"""
|
||||||
Test whether it is impossible to start cache with invalid dependence between max queue size and queue unblock size.
|
Test whether it is impossible to start cache with invalid dependence between max queue size
|
||||||
|
and queue unblock size.
|
||||||
:param pyocf_ctx: basic pyocf context fixture
|
:param pyocf_ctx: basic pyocf context fixture
|
||||||
:param max_wb_queue_size: max queue size value to start cache with
|
:param max_wb_queue_size: max queue size value to start cache with
|
||||||
:param c_uint32_randomize: queue unblock size value to start cache with
|
:param c_uint32_randomize: queue unblock size value to start cache with
|
||||||
@ -148,4 +152,5 @@ def test_fuzzy_start_max_queue_size(pyocf_ctx, max_wb_queue_size, c_uint32_rando
|
|||||||
cache_line_size=cls)
|
cache_line_size=cls)
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Test skipped for valid values: "
|
logger.warning(f"Test skipped for valid values: "
|
||||||
f"'max_queue_size={max_wb_queue_size}, queue_unblock_size={c_uint32_randomize}'.")
|
f"'max_queue_size={max_wb_queue_size}, "
|
||||||
|
f"queue_unblock_size={c_uint32_randomize}'.")
|
||||||
|
@ -11,6 +11,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
|
|
||||||
class TestGenerator(object):
|
class TestGenerator(object):
|
||||||
main_UT_dir = ""
|
main_UT_dir = ""
|
||||||
main_tested_dir = ""
|
main_tested_dir = ""
|
||||||
@ -34,7 +35,8 @@ class TestGenerator(object):
|
|||||||
no_str = ""
|
no_str = ""
|
||||||
no = 0
|
no = 0
|
||||||
while True:
|
while True:
|
||||||
if not os.path.isfile(dst_path.rsplit(".", 1)[0] + no_str + "." + dst_path.rsplit(".", 1)[1]):
|
if not os.path.isfile("{0}{1}.{2}".format(dst_path.rsplit(".", 1)[0], no_str,
|
||||||
|
dst_path.rsplit(".", 1)[1])):
|
||||||
break
|
break
|
||||||
no += 1
|
no += 1
|
||||||
no_str = str(no)
|
no_str = str(no)
|
||||||
@ -75,7 +77,6 @@ class TestGenerator(object):
|
|||||||
return True
|
return True
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_UT_includes(self):
|
def get_UT_includes(self):
|
||||||
ret = '''
|
ret = '''
|
||||||
#include <stdarg.h>
|
#include <stdarg.h>
|
||||||
@ -136,7 +137,6 @@ class TestGenerator(object):
|
|||||||
print("Given path not exists!")
|
print("Given path not exists!")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
def set_main_UT_dir(self, path):
|
def set_main_UT_dir(self, path):
|
||||||
p = os.path.dirname(os.path.realpath(__file__)) + os.sep + path
|
p = os.path.dirname(os.path.realpath(__file__)) + os.sep + path
|
||||||
p = os.path.normpath(os.path.dirname(p)) + os.sep
|
p = os.path.normpath(os.path.dirname(p)) + os.sep
|
||||||
@ -159,6 +159,7 @@ class TestGenerator(object):
|
|||||||
def get_tested_function_name(self):
|
def get_tested_function_name(self):
|
||||||
return self.tested_function_name
|
return self.tested_function_name
|
||||||
|
|
||||||
|
|
||||||
def __main__():
|
def __main__():
|
||||||
if len(sys.argv) < 3:
|
if len(sys.argv) < 3:
|
||||||
print("No path to tested file or tested function name given !")
|
print("No path to tested file or tested function name given !")
|
||||||
@ -167,11 +168,12 @@ def __main__():
|
|||||||
tested_file_path = sys.argv[1]
|
tested_file_path = sys.argv[1]
|
||||||
tested_function_name = sys.argv[2]
|
tested_function_name = sys.argv[2]
|
||||||
|
|
||||||
generator = TestGenerator(tests_config.MAIN_DIRECTORY_OF_UNIT_TESTS,\
|
generator = TestGenerator(tests_config.MAIN_DIRECTORY_OF_UNIT_TESTS,
|
||||||
tests_config.MAIN_DIRECTORY_OF_TESTED_PROJECT,\
|
tests_config.MAIN_DIRECTORY_OF_TESTED_PROJECT,
|
||||||
tested_file_path, tested_function_name)
|
tested_file_path, tested_function_name)
|
||||||
|
|
||||||
generator.create_empty_test_file()
|
generator.create_empty_test_file()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
__main__()
|
__main__()
|
||||||
|
@ -9,8 +9,9 @@ import shutil
|
|||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
import os.path
|
import os.path
|
||||||
from collections import defaultdict
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import tests_config
|
||||||
|
|
||||||
|
|
||||||
def run_command(args, verbose=True):
|
def run_command(args, verbose=True):
|
||||||
result = subprocess.run(" ".join(args), shell=True,
|
result = subprocess.run(" ".join(args), shell=True,
|
||||||
@ -21,7 +22,7 @@ def run_command(args, verbose=True):
|
|||||||
print(result.stderr)
|
print(result.stderr)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
import tests_config
|
|
||||||
#
|
#
|
||||||
# This script purpose is to remove unused functions definitions
|
# This script purpose is to remove unused functions definitions
|
||||||
# It is giving the opportunity to unit test all functions from OCF.
|
# It is giving the opportunity to unit test all functions from OCF.
|
||||||
@ -45,7 +46,7 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
|
|
||||||
ctags_path = ""
|
ctags_path = ""
|
||||||
|
|
||||||
test_catalouges_list = []
|
test_catalogues_list = []
|
||||||
dirs_to_include_list = []
|
dirs_to_include_list = []
|
||||||
|
|
||||||
tests_internal_includes_list = []
|
tests_internal_includes_list = []
|
||||||
@ -63,14 +64,15 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.script_file_abs_path = os.path.realpath(__file__)
|
self.script_file_abs_path = os.path.realpath(__file__)
|
||||||
self.script_dir_abs_path = os.path.normpath(os.path.dirname(self.script_file_abs_path) + os.sep)
|
self.script_dir_abs_path = os.path.normpath(
|
||||||
|
os.path.dirname(self.script_file_abs_path) + os.sep)
|
||||||
|
|
||||||
self.set_ctags_path()
|
self.set_ctags_path()
|
||||||
|
|
||||||
self.set_main_UT_dir()
|
self.set_main_UT_dir()
|
||||||
self.set_main_tested_dir()
|
self.set_main_tested_dir()
|
||||||
|
|
||||||
self.test_catalouges_list = tests_config.DIRECTORIES_WITH_TESTS_LIST
|
self.test_catalogues_list = tests_config.DIRECTORIES_WITH_TESTS_LIST
|
||||||
self.set_includes_to_copy_dict(tests_config.INCLUDES_TO_COPY_DICT)
|
self.set_includes_to_copy_dict(tests_config.INCLUDES_TO_COPY_DICT)
|
||||||
self.set_dirs_to_include()
|
self.set_dirs_to_include()
|
||||||
|
|
||||||
@ -98,13 +100,12 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
gcc_command_template += gcc_flags
|
gcc_command_template += gcc_flags
|
||||||
|
|
||||||
for path in tested_files_list:
|
for path in tested_files_list:
|
||||||
preprocessing_dst = self.get_preprocessing_repo() +\
|
preprocessing_dst = self.get_preprocessing_repo() \
|
||||||
self.get_relative_path(path, self.get_main_tested_dir())
|
+ self.get_relative_path(path, self.get_main_tested_dir())
|
||||||
preprocessing_dst_dir = os.path.dirname(preprocessing_dst)
|
preprocessing_dst_dir = os.path.dirname(preprocessing_dst)
|
||||||
self.create_dir_if_not_exist(preprocessing_dst_dir)
|
self.create_dir_if_not_exist(preprocessing_dst_dir)
|
||||||
|
|
||||||
gcc_command = gcc_command_template +\
|
gcc_command = gcc_command_template + path + " > " + preprocessing_dst
|
||||||
path + " > " + preprocessing_dst
|
|
||||||
|
|
||||||
result = run_command([gcc_command])
|
result = run_command([gcc_command])
|
||||||
|
|
||||||
@ -133,7 +134,7 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
|
|
||||||
def get_user_wraps(self, path):
|
def get_user_wraps(self, path):
|
||||||
functions_list = self.get_functions_list(path)
|
functions_list = self.get_functions_list(path)
|
||||||
functions_list = [re.sub(r'__wrap_([\S]+)\s*[\d]+', r'\1', line) \
|
functions_list = [re.sub(r'__wrap_([\S]+)\s*[\d]+', r'\1', line)
|
||||||
for line in functions_list if re.search("__wrap_", line)]
|
for line in functions_list if re.search("__wrap_", line)]
|
||||||
|
|
||||||
return functions_list
|
return functions_list
|
||||||
@ -187,11 +188,14 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
|
|
||||||
tested_src = self.get_src_to_test(test_path, preprocessed_tested_path)
|
tested_src = self.get_src_to_test(test_path, preprocessed_tested_path)
|
||||||
|
|
||||||
self.create_dir_if_not_exist(self.get_sources_to_test_repo() + os.path.dirname(test_path))
|
self.create_dir_if_not_exist(
|
||||||
|
self.get_sources_to_test_repo() + os.path.dirname(test_path))
|
||||||
|
|
||||||
with open(self.get_sources_to_test_repo() + test_path, "w") as f:
|
with open(self.get_sources_to_test_repo() + test_path, "w") as f:
|
||||||
f.writelines(tested_src)
|
f.writelines(tested_src)
|
||||||
print(f"Sources for {test_path} saved in {self.get_sources_to_test_repo() + test_path}")
|
print(
|
||||||
|
f"Sources for {test_path} saved in + \
|
||||||
|
{self.get_sources_to_test_repo() + test_path}")
|
||||||
|
|
||||||
self.prepare_autowraps(test_path, preprocessed_tested_path)
|
self.prepare_autowraps(test_path, preprocessed_tested_path)
|
||||||
|
|
||||||
@ -202,7 +206,7 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
buf += "enable_testing()\n\n"
|
buf += "enable_testing()\n\n"
|
||||||
|
|
||||||
buf += "include_directories(\n"
|
buf += "include_directories(\n"
|
||||||
dirs_to_inc = self.get_dirs_to_include_list() + self.get_framework_includes()\
|
dirs_to_inc = self.get_dirs_to_include_list() + self.get_framework_includes() \
|
||||||
+ self.get_tests_internal_includes_list()
|
+ self.get_tests_internal_includes_list()
|
||||||
for path in dirs_to_inc:
|
for path in dirs_to_inc:
|
||||||
buf += "\t" + path + "\n"
|
buf += "\t" + path + "\n"
|
||||||
@ -221,7 +225,6 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
for path in test_dirs_to_include:
|
for path in test_dirs_to_include:
|
||||||
buf += "\nadd_subdirectory(" + self.get_sources_to_test_repo() + path + ")"
|
buf += "\nadd_subdirectory(" + self.get_sources_to_test_repo() + path + ")"
|
||||||
|
|
||||||
|
|
||||||
with open(self.get_main_UT_dir() + "CMakeLists.txt", "w") as f:
|
with open(self.get_main_UT_dir() + "CMakeLists.txt", "w") as f:
|
||||||
f.writelines(buf)
|
f.writelines(buf)
|
||||||
|
|
||||||
@ -253,7 +256,8 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
test_file_name = os.path.basename(test_file_path)
|
test_file_name = os.path.basename(test_file_path)
|
||||||
target_name = os.path.splitext(test_file_name)[0]
|
target_name = os.path.splitext(test_file_name)[0]
|
||||||
|
|
||||||
add_executable = "add_executable(" + target_name + " " + test_file_path + " " + tested_file_path + ")\n"
|
add_executable = "add_executable(" + target_name + " " + test_file_path + " " + \
|
||||||
|
tested_file_path + ")\n"
|
||||||
|
|
||||||
libraries = "target_link_libraries(" + target_name + " libcmocka.so ocf_env)\n"
|
libraries = "target_link_libraries(" + target_name + " libcmocka.so ocf_env)\n"
|
||||||
|
|
||||||
@ -289,21 +293,22 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
f.seek(0, os.SEEK_SET)
|
f.seek(0, os.SEEK_SET)
|
||||||
new_line = "include(" + os.path.basename(cmake_name) + ")\n"
|
new_line = "include(" + os.path.basename(cmake_name) + ")\n"
|
||||||
|
|
||||||
if not new_line in f.read():
|
if new_line not in f.read():
|
||||||
f.write(new_line)
|
f.write(new_line)
|
||||||
|
|
||||||
def get_functions_to_wrap(self, path):
|
def get_functions_to_wrap(self, path):
|
||||||
functions_list = self.get_functions_list(path)
|
functions_list = self.get_functions_list(path)
|
||||||
functions_list = [re.sub(r'__wrap_([\S]+)\s*[\d]+', r'\1', line) for line in functions_list if re.search("__wrap_", line)]
|
functions_list = [re.sub(r'__wrap_([\S]+)\s*[\d]+', r'\1', line) for line in functions_list
|
||||||
|
if re.search("__wrap_", line)]
|
||||||
|
|
||||||
return functions_list
|
return functions_list
|
||||||
|
|
||||||
def get_functions_to_leave(self, path):
|
def get_functions_to_leave(self, path):
|
||||||
with open(path) as f:
|
with open(path) as f:
|
||||||
l = f.readlines()
|
lines = f.readlines()
|
||||||
buf = ''.join(l)
|
buf = ''.join(lines)
|
||||||
|
|
||||||
tags_pattern = re.compile("<functions_to_leave>[\s\S]*</functions_to_leave>")
|
tags_pattern = re.compile(r"<functions_to_leave>[\s\S]*</functions_to_leave>")
|
||||||
|
|
||||||
buf = re.findall(tags_pattern, buf)
|
buf = re.findall(tags_pattern, buf)
|
||||||
if not len(buf) > 0:
|
if not len(buf) > 0:
|
||||||
@ -322,12 +327,13 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
ctags_path = self.get_ctags_path()
|
ctags_path = self.get_ctags_path()
|
||||||
|
|
||||||
ctags_args = "--c-types=f"
|
ctags_args = "--c-types=f"
|
||||||
if prototypes == True:
|
if prototypes:
|
||||||
ctags_args += " --c-kinds=+p"
|
ctags_args += " --c-kinds=+p"
|
||||||
# find all functions' definitions | put tabs instead of spaces |
|
# find all functions' definitions | put tabs instead of spaces |
|
||||||
# take only columns with function name and line number | sort in descending order
|
# take only columns with function name and line number | sort in descending order
|
||||||
result = run_command([ctags_path, "-x", ctags_args, file_path,
|
result = run_command([ctags_path, "-x", ctags_args, file_path,
|
||||||
"--language-force=c | sed \"s/ \\+/\t/g\" | cut -f 1,3 | sort -nsr -k 2"])
|
"--language-force=c | sed \"s/ \\+/\t/g\" | cut -f 1,3 | sort -nsr "
|
||||||
|
"-k 2"])
|
||||||
|
|
||||||
# 'output' is string, but it has to be changed to list
|
# 'output' is string, but it has to be changed to list
|
||||||
output = list(filter(None, result.stdout.split("\n")))
|
output = list(filter(None, result.stdout.split("\n")))
|
||||||
@ -361,10 +367,12 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
test_files_list = self.get_files_with_tests_list()
|
test_files_list = self.get_files_with_tests_list()
|
||||||
|
|
||||||
for f in test_files_list:
|
for f in test_files_list:
|
||||||
self.tested_files_paths_list.append(self.get_main_tested_dir() +\
|
self.tested_files_paths_list.append(self.get_main_tested_dir()
|
||||||
self.get_tested_file_path(self.get_main_UT_dir() + f))
|
+ self.get_tested_file_path(
|
||||||
|
self.get_main_UT_dir() + f))
|
||||||
|
|
||||||
self.tested_files_paths_list = self.remove_duplicates_from_list(self.tested_files_paths_list)
|
self.tested_files_paths_list = self.remove_duplicates_from_list(
|
||||||
|
self.tested_files_paths_list)
|
||||||
|
|
||||||
def get_tested_files_paths_list(self):
|
def get_tested_files_paths_list(self):
|
||||||
return self.tested_files_paths_list
|
return self.tested_files_paths_list
|
||||||
@ -381,7 +389,8 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
test_files = self.get_test_files_from_dir(path + os.sep)
|
test_files = self.get_test_files_from_dir(path + os.sep)
|
||||||
|
|
||||||
for test_file_name in test_files:
|
for test_file_name in test_files:
|
||||||
test_rel_path = os.path.relpath(path + os.sep + test_file_name, self.get_main_UT_dir())
|
test_rel_path = os.path.relpath(path + os.sep + test_file_name,
|
||||||
|
self.get_main_UT_dir())
|
||||||
self.test_files_paths_list.append(test_rel_path)
|
self.test_files_paths_list.append(test_rel_path)
|
||||||
|
|
||||||
def are_markups_valid(self, path):
|
def are_markups_valid(self, path):
|
||||||
@ -415,7 +424,7 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
buf = f.readlines()
|
buf = f.readlines()
|
||||||
buf = ''.join(buf)
|
buf = ''.join(buf)
|
||||||
|
|
||||||
tags_pattern = re.compile("<tested_file_path>[\s\S]*</tested_file_path>")
|
tags_pattern = re.compile(r"<tested_file_path>[\s\S]*</tested_file_path>")
|
||||||
buf = re.findall(tags_pattern, buf)
|
buf = re.findall(tags_pattern, buf)
|
||||||
|
|
||||||
if not len(buf) > 0:
|
if not len(buf) > 0:
|
||||||
@ -436,7 +445,7 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
buf = f.readlines()
|
buf = f.readlines()
|
||||||
buf = ''.join(buf)
|
buf = ''.join(buf)
|
||||||
|
|
||||||
tags_pattern = re.compile("<tested_function>[\s\S]*</tested_function>")
|
tags_pattern = re.compile(r"<tested_function>[\s\S]*</tested_function>")
|
||||||
buf = re.findall(tags_pattern, buf)
|
buf = re.findall(tags_pattern, buf)
|
||||||
|
|
||||||
if not len(buf) > 0:
|
if not len(buf) > 0:
|
||||||
@ -455,7 +464,8 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
|
|
||||||
def get_test_files_from_dir(self, path):
|
def get_test_files_from_dir(self, path):
|
||||||
ret = os.listdir(path)
|
ret = os.listdir(path)
|
||||||
ret = [name for name in ret if os.path.isfile(path + os.sep + name) and (name.endswith(".c") or name.endswith(".h"))]
|
ret = [name for name in ret if os.path.isfile(path + os.sep + name)
|
||||||
|
and (name.endswith(".c") or name.endswith(".h"))]
|
||||||
ret = [name for name in ret if self.are_markups_valid(path + name)]
|
ret = [name for name in ret if self.are_markups_valid(path + name)]
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
@ -490,7 +500,7 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
f.writelines(padding)
|
f.writelines(padding)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
def find_function_end(self,code_lines_list, first_line_of_function_index):
|
def find_function_end(self, code_lines_list, first_line_of_function_index):
|
||||||
brackets_counter = 0
|
brackets_counter = 0
|
||||||
current_line_index = first_line_of_function_index
|
current_line_index = first_line_of_function_index
|
||||||
|
|
||||||
@ -521,7 +531,6 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
result = run_command(["/usr/bin/nm -u " + out_file + " | cut -f2 -d\'U\'"])
|
result = run_command(["/usr/bin/nm -u " + out_file + " | cut -f2 -d\'U\'"])
|
||||||
return set(result.stdout.split())
|
return set(result.stdout.split())
|
||||||
|
|
||||||
|
|
||||||
def remove_function_body(self, code_lines_list, line_id):
|
def remove_function_body(self, code_lines_list, line_id):
|
||||||
try:
|
try:
|
||||||
while "{" not in code_lines_list[line_id]:
|
while "{" not in code_lines_list[line_id]:
|
||||||
@ -538,7 +547,6 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
|
|
||||||
del code_lines_list[line_id + 1: last_line_id + 1]
|
del code_lines_list[line_id + 1: last_line_id + 1]
|
||||||
|
|
||||||
|
|
||||||
def get_function_wrap(self, code_lines_list, line_id):
|
def get_function_wrap(self, code_lines_list, line_id):
|
||||||
ret = []
|
ret = []
|
||||||
# Line numbering starts with one, list indexing with zero
|
# Line numbering starts with one, list indexing with zero
|
||||||
@ -563,9 +571,9 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
if "{" in ret[-1] or "{" in ret[-2]:
|
if "{" in ret[-1] or "{" in ret[-2]:
|
||||||
delimter = "{"
|
delimter = "{"
|
||||||
else:
|
else:
|
||||||
delimiter =";"
|
delimiter = ";"
|
||||||
except IndexError:
|
except IndexError:
|
||||||
delimiter =";"
|
delimiter = ";"
|
||||||
|
|
||||||
ret[-1] = ret[-1].split(delimiter)[0]
|
ret[-1] = ret[-1].split(delimiter)[0]
|
||||||
ret[-1] += "{}"
|
ret[-1] += "{}"
|
||||||
@ -607,7 +615,7 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
return self.ctags_path
|
return self.ctags_path
|
||||||
|
|
||||||
def get_tests_catalouges_list(self):
|
def get_tests_catalouges_list(self):
|
||||||
return self.test_catalouges_list
|
return self.test_catalogues_list
|
||||||
|
|
||||||
def get_relative_path(self, original_path, part_to_remove):
|
def get_relative_path(self, original_path, part_to_remove):
|
||||||
return original_path.split(part_to_remove, 1)[1]
|
return original_path.split(part_to_remove, 1)[1]
|
||||||
@ -616,20 +624,21 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
return self.dirs_to_include_list
|
return self.dirs_to_include_list
|
||||||
|
|
||||||
def set_dirs_to_include(self):
|
def set_dirs_to_include(self):
|
||||||
self.dirs_to_include_list = [self.get_main_tested_dir() + name\
|
self.dirs_to_include_list = [self.get_main_tested_dir() + name
|
||||||
for name in tests_config.DIRECTORIES_TO_INCLUDE_FROM_PROJECT_LIST]
|
for name in
|
||||||
|
tests_config.DIRECTORIES_TO_INCLUDE_FROM_PROJECT_LIST]
|
||||||
|
|
||||||
def set_tests_internal_includes_list(self):
|
def set_tests_internal_includes_list(self):
|
||||||
self.tests_internal_includes_list = [self.get_main_UT_dir() + name\
|
self.tests_internal_includes_list = [self.get_main_UT_dir() + name
|
||||||
for name in tests_config.DIRECTORIES_TO_INCLUDE_FROM_UT_LIST]
|
for name in
|
||||||
|
tests_config.DIRECTORIES_TO_INCLUDE_FROM_UT_LIST]
|
||||||
|
|
||||||
def set_preprocessing_repo(self):
|
def set_preprocessing_repo(self):
|
||||||
self.preprocessing_repo = self.get_main_UT_dir() +\
|
self.preprocessing_repo = self.get_main_UT_dir() \
|
||||||
tests_config.PREPROCESSED_SOURCES_REPOSITORY
|
+ tests_config.PREPROCESSED_SOURCES_REPOSITORY
|
||||||
|
|
||||||
def set_sources_to_test_repo(self):
|
def set_sources_to_test_repo(self):
|
||||||
self.sources_to_test_repo = self.get_main_UT_dir() +\
|
self.sources_to_test_repo = self.get_main_UT_dir() + tests_config.SOURCES_TO_TEST_REPOSITORY
|
||||||
tests_config.SOURCES_TO_TEST_REPOSITORY
|
|
||||||
|
|
||||||
def get_sources_to_test_repo(self):
|
def get_sources_to_test_repo(self):
|
||||||
return self.sources_to_test_repo
|
return self.sources_to_test_repo
|
||||||
@ -665,8 +674,10 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
return self.includes_to_copy_dict
|
return self.includes_to_copy_dict
|
||||||
|
|
||||||
def set_main_UT_dir(self):
|
def set_main_UT_dir(self):
|
||||||
main_UT_dir = os.path.normpath(os.path.normpath(self.get_script_dir_path()\
|
main_UT_dir = os.path.normpath(os.path.normpath(self.get_script_dir_path()
|
||||||
+ os.sep + tests_config.MAIN_DIRECTORY_OF_UNIT_TESTS))
|
+ os.sep
|
||||||
|
+ tests_config.
|
||||||
|
MAIN_DIRECTORY_OF_UNIT_TESTS))
|
||||||
if not os.path.isdir(main_UT_dir):
|
if not os.path.isdir(main_UT_dir):
|
||||||
print("Given path to main UT directory is wrong!")
|
print("Given path to main UT directory is wrong!")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -674,16 +685,18 @@ class UnitTestsSourcesGenerator(object):
|
|||||||
self.main_UT_dir = main_UT_dir
|
self.main_UT_dir = main_UT_dir
|
||||||
|
|
||||||
def set_main_tested_dir(self):
|
def set_main_tested_dir(self):
|
||||||
main_tested_dir = os.path.normpath(os.path.normpath(self.get_script_dir_path()\
|
main_tested_dir = os.path.normpath(os.path.normpath(self.get_script_dir_path()
|
||||||
+ os.sep + tests_config.MAIN_DIRECTORY_OF_TESTED_PROJECT))
|
+ os.sep
|
||||||
|
+ tests_config.
|
||||||
|
MAIN_DIRECTORY_OF_TESTED_PROJECT))
|
||||||
if not os.path.isdir(main_tested_dir):
|
if not os.path.isdir(main_tested_dir):
|
||||||
print("Given path to main tested directory is wrong!")
|
print("Given path to main tested directory is wrong!")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
self.main_tested_dir = main_tested_dir
|
self.main_tested_dir = main_tested_dir
|
||||||
|
|
||||||
def __main__():
|
|
||||||
|
|
||||||
|
def __main__():
|
||||||
generator = UnitTestsSourcesGenerator()
|
generator = UnitTestsSourcesGenerator()
|
||||||
generator.copy_includes()
|
generator.copy_includes()
|
||||||
generator.preprocessing()
|
generator.preprocessing()
|
||||||
@ -693,5 +706,6 @@ def __main__():
|
|||||||
|
|
||||||
print("Files for testing generated!")
|
print("Files for testing generated!")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
__main__()
|
__main__()
|
||||||
|
@ -10,6 +10,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
def run_command(args):
|
def run_command(args):
|
||||||
result = subprocess.run(" ".join(args), shell=True,
|
result = subprocess.run(" ".join(args), shell=True,
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
@ -17,6 +18,7 @@ def run_command(args):
|
|||||||
result.stderr = result.stderr.decode("ASCII", errors='ignore')
|
result.stderr = result.stderr.decode("ASCII", errors='ignore')
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
script_path = os.path.dirname(os.path.realpath(__file__))
|
script_path = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
main_UT_dir = os.path.join(script_path, tests_config.MAIN_DIRECTORY_OF_UNIT_TESTS)
|
main_UT_dir = os.path.join(script_path, tests_config.MAIN_DIRECTORY_OF_UNIT_TESTS)
|
||||||
@ -29,13 +31,13 @@ if not os.path.isdir(os.path.join(main_UT_dir, "ocf_env", "ocf")):
|
|||||||
except Exception:
|
except Exception:
|
||||||
raise Exception("Cannot create ocf_env/ocf directory!")
|
raise Exception("Cannot create ocf_env/ocf directory!")
|
||||||
|
|
||||||
result = run_command([ "cp", "-r",
|
result = run_command(["cp", "-r",
|
||||||
os.path.join(main_tested_dir, "inc", "*"),
|
os.path.join(main_tested_dir, "inc", "*"),
|
||||||
os.path.join(main_UT_dir, "ocf_env", "ocf") ])
|
os.path.join(main_UT_dir, "ocf_env", "ocf")])
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
raise Exception("Preparing sources for testing failed!")
|
raise Exception("Preparing sources for testing failed!")
|
||||||
|
|
||||||
result = run_command([ os.path.join(script_path, "prepare_sources_for_testing.py") ])
|
result = run_command([os.path.join(script_path, "prepare_sources_for_testing.py")])
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
raise Exception("Preparing sources for testing failed!")
|
raise Exception("Preparing sources for testing failed!")
|
||||||
|
|
||||||
@ -52,7 +54,7 @@ except Exception:
|
|||||||
|
|
||||||
os.chdir(build_dir)
|
os.chdir(build_dir)
|
||||||
|
|
||||||
cmake_result = run_command([ "cmake", ".." ])
|
cmake_result = run_command(["cmake", ".."])
|
||||||
|
|
||||||
print(cmake_result.stdout)
|
print(cmake_result.stdout)
|
||||||
with open(os.path.join(logs_dir, "cmake.output"), "w") as f:
|
with open(os.path.join(logs_dir, "cmake.output"), "w") as f:
|
||||||
@ -64,7 +66,7 @@ if cmake_result.returncode != 0:
|
|||||||
f.write("Cmake step failed! More details in cmake.output.")
|
f.write("Cmake step failed! More details in cmake.output.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
make_result = run_command([ "make", "-j" ])
|
make_result = run_command(["make", "-j"])
|
||||||
|
|
||||||
print(make_result.stdout)
|
print(make_result.stdout)
|
||||||
with open(os.path.join(logs_dir, "make.output"), "w") as f:
|
with open(os.path.join(logs_dir, "make.output"), "w") as f:
|
||||||
@ -76,8 +78,8 @@ if make_result.returncode != 0:
|
|||||||
f.write("Make step failed! More details in make.output.")
|
f.write("Make step failed! More details in make.output.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
test_result = run_command([ "make", "test" ])
|
test_result = run_command(["make", "test"])
|
||||||
|
|
||||||
print(test_result.stdout)
|
print(test_result.stdout)
|
||||||
with open(os.path.join(logs_dir , "tests.output"), "w") as f:
|
with open(os.path.join(logs_dir, "tests.output"), "w") as f:
|
||||||
f.write(test_result.stdout)
|
f.write(test_result.stdout)
|
||||||
|
@ -11,25 +11,34 @@ MAIN_DIRECTORY_OF_TESTED_PROJECT = "../../../"
|
|||||||
|
|
||||||
MAIN_DIRECTORY_OF_UNIT_TESTS = "../tests/"
|
MAIN_DIRECTORY_OF_UNIT_TESTS = "../tests/"
|
||||||
|
|
||||||
# Paths to all directories, in which tests are stored. All paths should be relative to MAIN_DIRECTORY_OF_UNIT_TESTS
|
# Paths to all directories, in which tests are stored. All paths should be relative to
|
||||||
DIRECTORIES_WITH_TESTS_LIST = ["cleaning/", "metadata/", "mngt/", "concurrency/", "engine/", "eviction/", "utils/"]
|
# MAIN_DIRECTORY_OF_UNIT_TESTS
|
||||||
|
DIRECTORIES_WITH_TESTS_LIST = ["cleaning/", "metadata/", "mngt/", "concurrency/", "engine/",
|
||||||
|
"eviction/", "utils/"]
|
||||||
|
|
||||||
# Paths to all directories containing files with sources. All paths should be relative to MAIN_DIRECTORY_OF_TESTED_PROJECT
|
# Paths to all directories containing files with sources. All paths should be relative to
|
||||||
DIRECTORIES_TO_INCLUDE_FROM_PROJECT_LIST = ["src/", "src/cleaning/", "src/engine/", "src/metadata/", "src/eviction/", "src/mngt/", "src/concurrency/", "src/utils/", "inc/"]
|
# MAIN_DIRECTORY_OF_TESTED_PROJECT
|
||||||
|
DIRECTORIES_TO_INCLUDE_FROM_PROJECT_LIST = ["src/", "src/cleaning/", "src/engine/", "src/metadata/",
|
||||||
|
"src/eviction/", "src/mngt/", "src/concurrency/",
|
||||||
|
"src/utils/", "inc/"]
|
||||||
|
|
||||||
# Paths to all directories from directory with tests, which should also be included
|
# Paths to all directories from directory with tests, which should also be included
|
||||||
DIRECTORIES_TO_INCLUDE_FROM_UT_LIST = ["ocf_env/"]
|
DIRECTORIES_TO_INCLUDE_FROM_UT_LIST = ["ocf_env/"]
|
||||||
|
|
||||||
# Paths to include, required by cmake, cmocka, cunit
|
# Paths to include, required by cmake, cmocka, cunit
|
||||||
FRAMEWORK_DIRECTORIES_TO_INCLUDE_LIST = ["${CMOCKA_PUBLIC_INCLUDE_DIRS}" ,"${CMAKE_BINARY_DIR}", "${CMAKE_CURRENT_SOURCE_DIR}"]
|
FRAMEWORK_DIRECTORIES_TO_INCLUDE_LIST = ["${CMOCKA_PUBLIC_INCLUDE_DIRS}", "${CMAKE_BINARY_DIR}",
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}"]
|
||||||
|
|
||||||
# Path to directory containing all sources after preprocessing. Should be relative to MAIN_DIRECTORY_OF_UNIT_TESTS
|
# Path to directory containing all sources after preprocessing. Should be relative to
|
||||||
|
# MAIN_DIRECTORY_OF_UNIT_TESTS
|
||||||
PREPROCESSED_SOURCES_REPOSITORY = "preprocessed_sources_repository/"
|
PREPROCESSED_SOURCES_REPOSITORY = "preprocessed_sources_repository/"
|
||||||
|
|
||||||
# Path to directory containing all sources after removing unneeded functions and cmake files for tests
|
# Path to directory containing all sources after removing unneeded functions and cmake files for
|
||||||
|
# tests
|
||||||
SOURCES_TO_TEST_REPOSITORY = "sources_to_test_repository/"
|
SOURCES_TO_TEST_REPOSITORY = "sources_to_test_repository/"
|
||||||
|
|
||||||
# List of includes. Directories will be recursively copied to given destinations in directory with tests.
|
# List of includes.
|
||||||
|
# Directories will be recursively copied to given destinations in directory with tests.
|
||||||
# key - destination in dir with tests
|
# key - destination in dir with tests
|
||||||
# value - path in tested project to dir which should be copied
|
# value - path in tested project to dir which should be copied
|
||||||
INCLUDES_TO_COPY_DICT = { 'ocf_env/ocf/' : "inc/" }
|
INCLUDES_TO_COPY_DICT = {'ocf_env/ocf/': "inc/"}
|
||||||
|
Loading…
Reference in New Issue
Block a user