Advancing on UDSFS

This commit is contained in:
Adolfo Gómez García 2021-11-04 12:46:40 +01:00
parent 6792283e44
commit b55df32db0
5 changed files with 221 additions and 38 deletions

View File

@ -29,7 +29,7 @@ from signal import signal, SIGINT, SIG_DFL
from stat import S_IFDIR
from functools import partial
log = logging.getLogger(__name__)
logger = logging.getLogger(__name__)
_system = system()
_machine = machine()
@ -869,7 +869,7 @@ class FUSE:
except OSError as e:
if e.errno > 0:
log.debug(
logger.debug(
"FUSE operation %s raised a %s, returning errno %s.",
func.__name__,
type(e),
@ -877,7 +877,7 @@ class FUSE:
)
return -e.errno
else:
log.error(
logger.error(
"FUSE operation %s raised an OSError with negative "
"errno %s, returning errno.EINVAL.",
func.__name__,
@ -887,7 +887,7 @@ class FUSE:
return -errno.EINVAL
except Exception:
log.error(
logger.error(
"Uncaught exception from FUSE operation %s, "
"returning errno.EINVAL.",
func.__name__,
@ -899,7 +899,7 @@ class FUSE:
if len(args) > 0 and isinstance(args[0], FUSE):
self = args[0]
self.__critical_exception = e
log.critical(
logger.critical(
"Uncaught critical exception from FUSE operation %s, aborting.",
func.__name__,
exc_info=True,
@ -991,17 +991,20 @@ class FUSE:
else:
fh = fip.contents.fh
logger.debug(
'Invoking read operation on %s(%s, %s, %s)', path, size, offset, fh
)
ret = self.operations.read(self._decode_optional_path(path), size, offset, fh)
if not ret:
return 0
retsize = len(ret)
logger.debug('Read operation on %s returned %d bytes', path, retsize)
if retsize > size:
raise RuntimeError(
"read too much data ({} bytes, expected {})".format(
retsize, size
)
"read too much data ({} bytes, expected {})".format(retsize, size)
)
ctypes.memmove(buf, ret, retsize)
@ -1152,11 +1155,15 @@ class FUSE:
def releasedir(self, path: typing.Optional[bytes], fip: typing.Any):
# Ignore raw_fi
return self.operations.releasedir(self._decode_optional_path(path), fip.contents.fh)
return self.operations.releasedir(
self._decode_optional_path(path), fip.contents.fh
)
def fsyncdir(self, path: typing.Optional[bytes], datasync: bool, fip: typing.Any):
# Ignore raw_fi
return self.operations.fsyncdir(self._decode_optional_path(path), datasync, fip.contents.fh)
return self.operations.fsyncdir(
self._decode_optional_path(path), datasync, fip.contents.fh
)
def init(self, conn: typing.Any) -> None:
return self.operations.init('/')
@ -1164,7 +1171,7 @@ class FUSE:
def destroy(self, private_data: typing.Any) -> None:
return self.operations.destroy('/')
def access(self, path:bytes, amode: int) -> None:
def access(self, path: bytes, amode: int) -> None:
return self.operations.access(path.decode(self.encoding), amode)
def create(self, path: bytes, mode: int, fip: typing.Any) -> None:

View File

@ -11,7 +11,9 @@ from uds import models
from uds.core.util.fuse import FUSE, FuseOSError, Operations
from . import types
from . import events
from . import stats
logger = logging.getLogger(__name__)
@ -19,28 +21,37 @@ logger = logging.getLogger(__name__)
class UDSFS(Operations):
dispatchers: typing.ClassVar[typing.Dict[str, types.UDSFSInterface]] = {
'events': events.EventFS()
'events': events.EventFS(),
'stats': stats.StatsFS(),
}
# Own stats are the service creation date and 2 hardlinks because of the root folder
_own_stats = types.StatType(st_mode=(stat.S_IFDIR | 0o755), st_nlink=2 + len(dispatchers))
_own_stats = types.StatType(
st_mode=(stat.S_IFDIR | 0o755), st_nlink=2 + len(dispatchers)
)
def __init__(self):
pass
def _dispatch(self, path: typing.Optional[str], operation: str, *args, **kwargs) -> typing.Any:
def _dispatch(
self, path: typing.Optional[str], operation: str, *args, **kwargs
) -> typing.Any:
try:
if path:
path_parts = path.split('/')
logger.debug('Dispatching %s for %s', operation, path_parts)
if path_parts[1] in self.dispatchers:
return getattr(self.dispatchers[path_parts[1]], operation)(path_parts[2:], *args, **kwargs)
return getattr(self.dispatchers[path_parts[1]], operation)(
path_parts[2:], *args, **kwargs
)
except Exception as e:
logger.error('Error while dispatching %s for %s: %s', operation, path, e)
raise FuseOSError(errno.ENOENT)
def getattr(self, path: typing.Optional[str], fh: typing.Any = None) -> typing.Dict[str, int]:
def getattr(
self, path: typing.Optional[str], fh: typing.Any = None
) -> typing.Dict[str, int]:
# If root folder, return service creation date
if path == '/':
return self._own_stats.as_dict()
@ -65,12 +76,21 @@ class UDSFS(Operations):
return ['.', '..'] + list(self.dispatchers.keys())
return typing.cast(typing.List[str], self._dispatch(path, 'readdir'))
def read(self, path: typing.Optional[str], size: int, offset: int, fh: typing.Any) -> bytes:
def read(
self, path: typing.Optional[str], size: int, offset: int, fh: typing.Any
) -> bytes:
'''
Reads the content of the "virtual" file
'''
return typing.cast(bytes, self._dispatch(path, 'read', size, offset))
def flush(self, path: typing.Optional[str], fh: typing.Any) -> None:
'''
Flushes the content of the "virtual" file
'''
self._dispatch(path, 'flush')
class Command(BaseCommand):
args = "<mod.name=value mod.name=value mod.name=value...>"
help = "Updates configuration values. If mod is omitted, UDS will be used. Omit whitespaces betwen name, =, and value (they must be a single param)"
@ -79,9 +99,11 @@ class Command(BaseCommand):
parser.add_argument(
'mount_point', type=str, help='Mount point for the FUSE filesystem'
)
# parser.add_argument('-d', '--debug', action='store_true', help='Enable debug logging')
parser.add_argument(
'-d', '--debug', action='store_true', help='Enable debug logging'
)
def handle(self, *args, **options):
logger.debug("Handling UDS FS")
fuse = FUSE(UDSFS(), options['mount_point'], foreground=True, allow_other=True)
fuse = FUSE(UDSFS(), options['mount_point'], foreground=True, allow_other=True, debug=options['debug'])

View File

@ -27,12 +27,29 @@ def pretty_print(event: StatsEvents) -> str:
# Get event description
return f'{dt} - {event_name} {name} - {event.fld1}|{event.fld2}|{event.fld3}|{event.fld3}'
class EventFS(types.UDSFSInterface):
"""
Class to handle events fs in UDS.
"""
_directory_stats: typing.ClassVar[types.StatType] = types.StatType(st_mode=(stat.S_IFDIR | 0o755), st_nlink=1)
_months: typing.ClassVar[typing.List[str]] = ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']
_directory_stats: typing.ClassVar[types.StatType] = types.StatType(
st_mode=(stat.S_IFDIR | 0o755), st_nlink=1
)
_months: typing.ClassVar[typing.List[str]] = [
'01',
'02',
'03',
'04',
'05',
'06',
'07',
'08',
'09',
'10',
'11',
'12',
]
def __init__(self):
pass
@ -40,7 +57,7 @@ class EventFS(types.UDSFSInterface):
def getattr(self, path: typing.List[str]) -> types.StatType:
if len(path) < 1:
return EventFS._directory_stats
years = EventFS.last_years()
if len(path) >= 1 and path[0] not in years:
raise FileNotFoundError('No such file or directory')
@ -49,11 +66,17 @@ class EventFS(types.UDSFSInterface):
if len(path) >= 2 and path[1] in EventFS._months:
if len(path) == 2:
return EventFS._directory_stats
if len(path) == 3 and int(path[2]) in range(1, EventFS.number_of_days(int(path[0]), int(path[1])) + 1):
# TODO: calculate size of file
size = LINELEN * EventFS.get_events(int(path[0]), int(path[1]), int(path[2]), 0).count()
if len(path) == 3 and int(path[2]) in range(
1, EventFS.number_of_days(int(path[0]), int(path[1])) + 1
):
size = (
LINELEN
* EventFS.get_events(
int(path[0]), int(path[1]), int(path[2]), 0
).count()
)
return types.StatType(st_mode=stat.S_IFREG | 0o444, st_size=size)
raise FileNotFoundError('No such file or directory')
def readdir(self, path: typing.List[str]) -> typing.List[str]:
@ -67,9 +90,14 @@ class EventFS(types.UDSFSInterface):
if len(path) == 1:
return ['.', '..'] + EventFS._months
if len(path) == 2 and path[1] in EventFS._months: # Return days of month as indicated on path
if (
len(path) == 2 and path[1] in EventFS._months
): # Return days of month as indicated on path
month = int(path[1])
return ['.', '..'] + ['{:02d}'.format(x) for x in range(1, EventFS.number_of_days(year, month) + 1)]
return ['.', '..'] + [
'{:02d}'.format(x)
for x in range(1, EventFS.number_of_days(year, month) + 1)
]
raise FileNotFoundError('No such file or directory')
@ -82,18 +110,30 @@ class EventFS(types.UDSFSInterface):
lines = size // LINELEN + 1
# Read lines from get_events
year, month, day = int(path[0]), int(path[1]), int(path[2])
logger.debug('Reading %a lines, skiping %s for day %s/%s/%s', lines, skip, year, month, day)
logger.debug(
'Reading %a lines, skiping %s for day %s/%s/%s',
lines,
skip,
year,
month,
day,
)
events = EventFS.get_events(year, month, day, skip)
# Compose lines, adjsting each line length to LINELEN
theLines = [pretty_print(x).encode('utf-8') for x in events[:lines]]
# Adjust each line length to LINELEN
# Adjust each line length to LINELEN (after encoding from utf8)
theLines = [x + b' ' * (LINELEN - len(x) - 1) + b'\n' for x in theLines]
# Return lines
return b''.join(theLines)[offset:offset+size]
return b''.join(theLines)[offset : offset + size]
@staticmethod
def last_years() -> typing.List[str]:
return [str(x) for x in range(datetime.datetime.now().year - 4, datetime.datetime.now().year + 1)]
return [
str(x)
for x in range(
datetime.datetime.now().year - 4, datetime.datetime.now().year + 1
)
]
@staticmethod
def number_of_days(year: int, month: int) -> int:
@ -101,11 +141,15 @@ class EventFS(types.UDSFSInterface):
# retrieve Events from a year as a list of events
@staticmethod
def get_events(year: int, month: int, day: int, skip: int = 0) -> QuerySet[StatsEvents]:
def get_events(
year: int, month: int, day: int, skip: int = 0
) -> QuerySet[StatsEvents]:
# Calculate starting and ending stamp as unix timestamp from year and month
start = calendar.timegm((year, month, day, 0, 0, 0, 0, 0, 0))
end = calendar.timegm((year, month, day, 23, 59, 59, 0, 0, 0))
logger.debug('Reading stats events from %s to %s, skiping %s first', start, end, skip)
return StatsEvents.objects.filter(stamp__gte=start, stamp__lte=end).order_by('stamp')[skip:]
logger.debug(
'Reading stats events from %s to %s, skiping %s first', start, end, skip
)
return StatsEvents.objects.filter(stamp__gte=start, stamp__lte=end).order_by(
'stamp'
)[skip:]

View File

@ -0,0 +1,85 @@
import stat
import calendar
import datetime
import typing
import logging
from django.db.models import QuerySet
from uds.management.commands.udsfs.events import EventFS
from uds.models import StatsEvents
from uds.core.util.stats.events import EVENT_NAMES, getOwner
from . import types
logger = logging.getLogger(__name__)
LINELEN = 160
class StatsFS(types.UDSFSInterface):
"""
Class to handle stats fs in UDS.
"""
_directory_stats: typing.ClassVar[types.StatType] = types.StatType(
st_mode=(stat.S_IFDIR | 0o755), st_nlink=1
)
_dispatchers: typing.Mapping[str, typing.Callable[[int, int], bytes]]
def __init__(self) -> None:
# Initialize _dispatchers
self._dispatchers = {
'events.csv': self._read_events,
'pools.csv': self._read_pools,
}
def readdir(self, path: typing.List[str]) -> typing.List[str]:
# If len(path) == 0, return the list of possible stats files (from _dispatchers)
# else, raise an FileNotFoundError
if len(path) == 0:
return ['.', '..'] + list(self._dispatchers.keys())
raise FileNotFoundError
def getattr(self, path: typing.List[str]) -> types.StatType:
if len(path) < 1:
return StatsFS._directory_stats
# Ensure that the path is valid
if len(path) != 1:
raise FileNotFoundError
# Ensure that the path is a valid stats file
if path[0] not in self._dispatchers:
raise FileNotFoundError
# Calculate the size of the file
size = len(self._dispatchers[path[0]](0, 0))
logger.debug('Size of %s: %s', path[0], size)
return types.StatType(st_mode=(stat.S_IFREG | 0o755), st_nlink=1, st_size=size)
def read(self, path: typing.List[str], size: int, offset: int) -> bytes:
logger.debug('Reading data from %s: offset: %s, size: %s', path, offset, size)
# Ensure that the path is valid
if len(path) != 1:
raise FileNotFoundError
# Ensure that the path is a valid stats file
if path[0] not in self._dispatchers:
raise FileNotFoundError
# Dispatch the read to the dispatcher
data = self._dispatchers[path[0]](size, offset)
logger.debug('Readed %s data length', len(data))
return data
# Dispatchers for different stats files
def _read_events(self, size: int, offset: int) -> bytes:
logger.debug('Reading events. offset: %s, size: %s', offset, size)
return b'Events'
def _read_pools(self, size: int, offset: int) -> bytes:
logger.debug('Reading pools. offset: %s, size: %s', offset, size)
return b'Pools'

View File

@ -7,12 +7,19 @@ logger = logging.getLogger(__name__)
class StatType(typing.NamedTuple):
st_dev: int = -1
st_ino: int = -1
st_nlink: int = 1
st_mode: int = stat.S_IFREG
st_uid: int = -1
st_gid: int = -1
st_rdev: int = -1
st_size: int = -1
st_blksize: int = -1
st_blocks: int = -1
st_ctime: int = time.time_ns()
st_mtime: int = time.time_ns()
st_atime: int = time.time_ns()
st_nlink: int = 1
def as_dict(self) -> typing.Dict[str, int]:
rst = {
@ -23,8 +30,23 @@ class StatType(typing.NamedTuple):
'st_nlink': self.st_nlink
}
# Append optional fields
if self.st_dev != -1:
rst['st_dev'] = self.st_dev
if self.st_ino != -1:
rst['st_ino'] = self.st_ino
if self.st_uid != -1:
rst['st_uid'] = self.st_uid
if self.st_gid != -1:
rst['st_gid'] = self.st_gid
if self.st_rdev != -1:
rst['st_rdev'] = self.st_rdev
if self.st_size != -1:
rst['st_size'] = self.st_size
if self.st_blksize != -1:
rst['st_blksize'] = self.st_blksize
if self.st_blocks != -1:
rst['st_blocks'] = self.st_blocks
return rst
@ -49,3 +71,6 @@ class UDSFSInterface:
Read a file. Path is the full path to the file, already splitted.
"""
raise NotImplementedError
def flush(self, path: typing.List[str]) -> None:
return