forked from shaba/openuds
Added token alias to secure unmanaged machine token
This commit is contained in:
parent
11f6eec913
commit
eeae98ca79
@ -270,6 +270,7 @@ class UDSServerApi(UDSApi):
|
||||
)
|
||||
if r['os']
|
||||
else None,
|
||||
alias_token=r.get('alias_token'), # Possible alias for unmanaged
|
||||
)
|
||||
|
||||
def ready(
|
||||
|
@ -274,7 +274,8 @@ class CommonService: # pylint: disable=too-many-instance-attributes
|
||||
return False
|
||||
|
||||
# Only removes master token for managed machines (will need it on next client execution)
|
||||
master_token = None if self.isManaged() else self._cfg.master_token
|
||||
# For unmanaged, if alias is present, replace master token with it
|
||||
master_token = None if self.isManaged() else (initResult.alias_token or self._cfg.master_token)
|
||||
self._cfg = self._cfg._replace(
|
||||
master_token=master_token,
|
||||
own_token=initResult.own_token,
|
||||
@ -284,8 +285,9 @@ class CommonService: # pylint: disable=too-many-instance-attributes
|
||||
)
|
||||
)
|
||||
|
||||
# On first successfull initialization request, master token will dissapear for managed hosts so it will be no more available (not needed anyway)
|
||||
if self.isManaged():
|
||||
# On first successfull initialization request, master token will dissapear for managed hosts
|
||||
# so it will be no more available (not needed anyway). For unmanaged, the master token will
|
||||
# be replaced with an alias token.
|
||||
platform.store.writeConfig(self._cfg)
|
||||
|
||||
# Setup logger now
|
||||
|
@ -50,6 +50,7 @@ class InitializationResultType(typing.NamedTuple):
|
||||
own_token: typing.Optional[str] = None
|
||||
unique_id: typing.Optional[str] = None
|
||||
os: typing.Optional[ActorOsConfigurationType] = None
|
||||
alias_token: typing.Optional[str] = None
|
||||
|
||||
class LoginResultInfoType(typing.NamedTuple):
|
||||
ip: str
|
||||
|
@ -42,12 +42,13 @@ from uds.models import (
|
||||
)
|
||||
|
||||
# from uds.core import VERSION
|
||||
from uds.core.managers import userServiceManager
|
||||
from uds.core.managers import userServiceManager, cryptoManager
|
||||
from uds.core import osmanagers
|
||||
from uds.core.util import log, certs
|
||||
from uds.core.util.state import State
|
||||
from uds.core.util.cache import Cache
|
||||
from uds.core.util.config import GlobalConfig
|
||||
from uds.models.service import ServiceTokenAlias
|
||||
|
||||
from ..handlers import Handler, AccessDenied, RequestError
|
||||
|
||||
@ -241,11 +242,23 @@ class Initialize(ActorV3Action):
|
||||
# First, validate token...
|
||||
logger.debug('Args: %s, Params: %s', self._args, self._params)
|
||||
service: typing.Optional[Service] = None
|
||||
alias_token: typing.Optional[str] = None
|
||||
try:
|
||||
token = self._params['token']
|
||||
# First, try to locate an user service providing this token.
|
||||
if self._params['type'] == UNMANAGED:
|
||||
# If unmanaged, use Service locator
|
||||
service = Service.objects.get(token=self._params['token'])
|
||||
alias_token = token # Store token as possible alias
|
||||
# First, try to locate on alias table
|
||||
if ServiceTokenAlias.objects.get(alias=token).exists():
|
||||
# Retrieve real service from token alias
|
||||
service = ServiceTokenAlias.objects.get(alias=token).service
|
||||
# If not found, try to locate on service table
|
||||
if service is None: # Not on alias token, try to locate on Service table
|
||||
service = Service.objects.get(token=token)
|
||||
# And create a new alias for it, and save
|
||||
alias_token = cryptoManager().randomString() # fix alias with new token
|
||||
service.aliases.create(alias=alias_token)
|
||||
|
||||
# Locate an userService that belongs to this service and which
|
||||
# Build the possible ids and make initial filter to match service
|
||||
idsList = [x['ip'] for x in self._params['id']] + [
|
||||
@ -255,7 +268,7 @@ class Initialize(ActorV3Action):
|
||||
else:
|
||||
# If not service provided token, use actor tokens
|
||||
ActorToken.objects.get(
|
||||
token=self._params['token']
|
||||
token=token
|
||||
) # Not assigned, because only needs check
|
||||
# Build the possible ids and make initial filter to match ANY userservice with provided MAC
|
||||
idsList = [i['mac'] for i in self._params['id'][:5]]
|
||||
@ -280,7 +293,7 @@ class Initialize(ActorV3Action):
|
||||
except Exception as e:
|
||||
logger.info('Unmanaged host request: %s, %s', self._params, e)
|
||||
return ActorV3Action.actorResult(
|
||||
{'own_token': None, 'max_idle': None, 'unique_id': None, 'os': None}
|
||||
{'own_token': None, 'max_idle': None, 'unique_id': None, 'os': None, 'alias': None}
|
||||
)
|
||||
|
||||
# Managed by UDS, get initialization data from osmanager and return it
|
||||
@ -296,6 +309,9 @@ class Initialize(ActorV3Action):
|
||||
'own_token': userService.uuid,
|
||||
'unique_id': userService.unique_id,
|
||||
'os': osData,
|
||||
# alias will contain a new master token (or same alias if not a token) to allow change on unmanaged machines.
|
||||
# Managed machines will not use this field (will return None)
|
||||
'alias_token': alias_token,
|
||||
}
|
||||
)
|
||||
except (ActorToken.DoesNotExist, Service.DoesNotExist):
|
||||
|
@ -5,8 +5,8 @@ import typing
|
||||
import logging
|
||||
|
||||
from uds import models
|
||||
from uds.core.util.stats.events import EVENT_NAMES
|
||||
from uds.core.util.cache import Cache
|
||||
from uds.core.util.stats import events, counters
|
||||
|
||||
from . import types
|
||||
|
||||
@ -19,13 +19,14 @@ class StatInterval(typing.NamedTuple):
|
||||
end: datetime.datetime
|
||||
|
||||
@property
|
||||
def start_poxix(self) -> int:
|
||||
def start_timestamp(self) -> int:
|
||||
return calendar.timegm(self.start.timetuple())
|
||||
|
||||
@property
|
||||
def end_poxix(self) -> int:
|
||||
def end_timestamp(self) -> int:
|
||||
return calendar.timegm(self.end.timetuple())
|
||||
|
||||
|
||||
class VirtualFileInfo(typing.NamedTuple):
|
||||
name: str
|
||||
size: int
|
||||
@ -34,6 +35,7 @@ class VirtualFileInfo(typing.NamedTuple):
|
||||
# Cache stamp
|
||||
stamp: int = -1
|
||||
|
||||
|
||||
# Dispatcher needs an Interval, an extensio, the size and the offset
|
||||
DispatcherType = typing.Callable[[StatInterval, str, int, int], bytes]
|
||||
|
||||
@ -77,6 +79,7 @@ class StatsFS(types.UDSFSInterface):
|
||||
self._dispatchers = {
|
||||
'events': (self._read_events, True),
|
||||
'pools': (self._read_pools, False),
|
||||
'auths': (self._read_auths, False),
|
||||
}
|
||||
|
||||
# Splits the filename and returns a tuple with "dispatcher", "interval", "extension"
|
||||
@ -92,7 +95,12 @@ class StatsFS(types.UDSFSInterface):
|
||||
except ValueError:
|
||||
raise FileNotFoundError
|
||||
|
||||
logger.debug('Dispatcher: %s, interval: %s, extension: %s', dispatcher, interval, extension)
|
||||
logger.debug(
|
||||
'Dispatcher: %s, interval: %s, extension: %s',
|
||||
dispatcher,
|
||||
interval,
|
||||
extension,
|
||||
)
|
||||
|
||||
if dispatcher not in self._dispatchers:
|
||||
raise FileNotFoundError
|
||||
@ -108,7 +116,9 @@ class StatsFS(types.UDSFSInterface):
|
||||
|
||||
range = self._interval[interval]
|
||||
else:
|
||||
range = (StatsFS._interval['lastmonth']) # Any value except "today" will do the trick
|
||||
range = StatsFS._interval[
|
||||
'lastmonth'
|
||||
] # Any value except "today" will do the trick
|
||||
extension = interval
|
||||
|
||||
if extension != 'csv':
|
||||
@ -130,14 +140,22 @@ class StatsFS(types.UDSFSInterface):
|
||||
# If len(path) == 0, return the list of possible stats files (from _dispatchers)
|
||||
# else, raise an FileNotFoundError
|
||||
if len(path) == 0:
|
||||
return ['.', '..'] + [
|
||||
return (
|
||||
['.', '..']
|
||||
+ [
|
||||
f'{dispatcher}.{interval}.csv'
|
||||
for dispatcher in filter(lambda x: self._dispatchers[x][1], self._dispatchers)
|
||||
for dispatcher in filter(
|
||||
lambda x: self._dispatchers[x][1], self._dispatchers
|
||||
)
|
||||
for interval in self._interval
|
||||
] + [
|
||||
f'{dispatcher}.csv'
|
||||
for dispatcher in filter(lambda x: self._dispatchers[x][1] is False, self._dispatchers)
|
||||
]
|
||||
+ [
|
||||
f'{dispatcher}.csv'
|
||||
for dispatcher in filter(
|
||||
lambda x: self._dispatchers[x][1] is False, self._dispatchers
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
raise FileNotFoundError
|
||||
|
||||
@ -156,31 +174,47 @@ class StatsFS(types.UDSFSInterface):
|
||||
cacheTime = 60
|
||||
|
||||
# Check if the file info is cached
|
||||
cached = self._cache.get(path[0])
|
||||
cached = self._cache.get(path[0]+extension)
|
||||
if cached is not None:
|
||||
logger.debug('Cache hit for %s', path[0])
|
||||
return cached
|
||||
data = cached
|
||||
else:
|
||||
logger.debug('Cache miss for %s', path[0])
|
||||
data = dispatcher(interval, extension, 0, 0)
|
||||
self._cache.put(path[0]+extension, data, cacheTime)
|
||||
|
||||
# Calculate the size of the file
|
||||
size = len(dispatcher(interval, extension, 0, 0))
|
||||
size = len(data)
|
||||
logger.debug('Size of %s: %s', path[0], size)
|
||||
|
||||
data = types.StatType(
|
||||
return types.StatType(
|
||||
st_mode=(stat.S_IFREG | 0o755),
|
||||
st_nlink=1,
|
||||
st_size=size,
|
||||
st_mtime=interval.start_poxix,
|
||||
st_mtime=interval.start_timestamp,
|
||||
)
|
||||
|
||||
# store in cache
|
||||
self._cache.put(path[0], data, cacheTime)
|
||||
return data
|
||||
|
||||
def read(self, path: typing.List[str], size: int, offset: int) -> bytes:
|
||||
logger.debug('Reading data from %s: offset: %s, size: %s', path, offset, size)
|
||||
|
||||
dispatcher, interval, extension = self.getFilenameComponents(path)
|
||||
|
||||
# if interval is today, cache time is 10 seconds, else cache time is 60 seconds
|
||||
if interval == StatsFS._interval['today']:
|
||||
cacheTime = 10
|
||||
else:
|
||||
cacheTime = 60
|
||||
|
||||
# Check if the file info is cached
|
||||
cached = self._cache.get(path[0]+extension)
|
||||
if cached is not None:
|
||||
logger.debug('Cache hit for %s', path[0])
|
||||
data = cached
|
||||
else:
|
||||
logger.debug('Cache miss for %s', path[0])
|
||||
data = dispatcher(interval, extension, 0, 0)
|
||||
self._cache.put(path[0]+extension, data, cacheTime)
|
||||
|
||||
# Dispatch the read to the dispatcher
|
||||
data = dispatcher(interval, extension, size, offset)
|
||||
logger.debug('Readed %s data length', len(data))
|
||||
@ -201,7 +235,7 @@ class StatsFS(types.UDSFSInterface):
|
||||
virtualFile = models.StatsEvents.getCSVHeader().encode() + b'\n'
|
||||
# stamp is unix timestamp
|
||||
for record in models.StatsEvents.objects.filter(
|
||||
stamp__gte=interval.start_poxix, stamp__lte=interval.end_poxix
|
||||
stamp__gte=interval.start_timestamp, stamp__lte=interval.end_timestamp
|
||||
):
|
||||
virtualFile += record.toCsv().encode() + b'\n'
|
||||
|
||||
@ -210,10 +244,33 @@ class StatsFS(types.UDSFSInterface):
|
||||
def _read_pools(
|
||||
self, interval: StatInterval, extension: str, size: int, offset: int
|
||||
) -> bytes:
|
||||
logger.debug('Reading pools. Interval=%s, extension=%s, offset: %s, size: %s', interval, extension, offset, size)
|
||||
logger.debug(
|
||||
'Reading pools. Interval=%s, extension=%s, offset: %s, size: %s',
|
||||
interval,
|
||||
extension,
|
||||
offset,
|
||||
size,
|
||||
)
|
||||
# Compose the csv file from what we now of service pools
|
||||
virtualFile = models.ServicePool.getCSVHeader().encode() + b'\n'
|
||||
# First, get the list of service pools
|
||||
for pool in models.ServicePool.objects.all().order_by('name'):
|
||||
virtualFile += pool.toCsv().encode() + b'\n'
|
||||
return virtualFile
|
||||
|
||||
def _read_auths(
|
||||
self, interval: StatInterval, extension: str, size: int, offset: int
|
||||
) -> bytes:
|
||||
logger.debug(
|
||||
'Reading auths. Interval=%s, extension=%s, offset: %s, size: %s',
|
||||
interval,
|
||||
extension,
|
||||
offset,
|
||||
size,
|
||||
)
|
||||
# Compose the csv file from what we now of service pools
|
||||
virtualFile = models.Authenticator.getCSVHeader().encode() + b'\n'
|
||||
# First, get the list of service pools
|
||||
for auth in models.Authenticator.objects.all().order_by('name'):
|
||||
virtualFile += auth.toCsv().encode() + b'\n'
|
||||
return virtualFile
|
||||
|
22
server/src/uds/migrations/0044_servicetokenalias.py
Normal file
22
server/src/uds/migrations/0044_servicetokenalias.py
Normal file
@ -0,0 +1,22 @@
|
||||
# Generated by Django 3.2.8 on 2021-11-11 13:16
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('uds', '0043_clean_unused_config'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ServiceTokenAlias',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('alias', models.CharField(max_length=128)),
|
||||
('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='aliases', to='uds.service')),
|
||||
],
|
||||
),
|
||||
]
|
@ -43,7 +43,7 @@ from .permissions import Permissions
|
||||
|
||||
# Services
|
||||
from .provider import Provider
|
||||
from .service import Service
|
||||
from .service import Service, ServiceTokenAlias
|
||||
|
||||
# Os managers
|
||||
from .os_manager import OSManager
|
||||
|
@ -245,6 +245,31 @@ class Authenticator(ManagedObjectModel, TaggingMixin):
|
||||
# Clears related permissions
|
||||
clean(toDelete)
|
||||
|
||||
|
||||
# returns CSV header
|
||||
@staticmethod
|
||||
def getCSVHeader(sep: str = ',') -> str:
|
||||
return sep.join(
|
||||
[
|
||||
'name',
|
||||
'type',
|
||||
'users',
|
||||
'groups',
|
||||
]
|
||||
)
|
||||
|
||||
# Return record as csv line using separator (default: ',')
|
||||
def toCsv(self, sep: str = ',') -> str:
|
||||
return sep.join(
|
||||
[
|
||||
self.name,
|
||||
self.data_type,
|
||||
str(self.users.count()),
|
||||
str(self.groups.count()),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return u"{0} of type {1} (id:{2})".format(self.name, self.data_type, self.id)
|
||||
|
||||
|
@ -54,6 +54,20 @@ if typing.TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ServiceTokenAlias(models.Model):
|
||||
"""
|
||||
This model stores the alias for a service token.
|
||||
"""
|
||||
|
||||
service = models.ForeignKey(
|
||||
'Service', on_delete=models.CASCADE, related_name='aliases'
|
||||
)
|
||||
alias = models.CharField(max_length=128)
|
||||
|
||||
def __str__(self):
|
||||
return self.alias
|
||||
|
||||
|
||||
class Service(ManagedObjectModel, TaggingMixin): # type: ignore
|
||||
"""
|
||||
A Service represents an specidied type of service offered to final users,
|
||||
@ -79,6 +93,8 @@ class Service(ManagedObjectModel, TaggingMixin): # type: ignore
|
||||
# "fake" declarations for type checking
|
||||
objects: 'models.BaseManager[Service]'
|
||||
deployedServices: 'models.QuerySet[ServicePool]'
|
||||
aliases: 'models.QuerySet[ServiceTokenAlias]'
|
||||
|
||||
|
||||
class Meta(ManagedObjectModel.Meta):
|
||||
"""
|
||||
|
@ -78,6 +78,7 @@ class AuthenticatorsStats(StatsReportAuto):
|
||||
typing.cast('models.Authenticator', a),
|
||||
counters.CT_AUTH_SERVICES,
|
||||
since=since,
|
||||
to=to,
|
||||
interval=interval,
|
||||
limit=MAX_ELEMENTS,
|
||||
use_max=True,
|
||||
@ -88,6 +89,7 @@ class AuthenticatorsStats(StatsReportAuto):
|
||||
typing.cast('models.Authenticator', a),
|
||||
counters.CT_AUTH_USERS_WITH_SERVICES,
|
||||
since=since,
|
||||
to=to,
|
||||
interval=interval,
|
||||
limit=MAX_ELEMENTS,
|
||||
use_max=True,
|
||||
@ -97,6 +99,7 @@ class AuthenticatorsStats(StatsReportAuto):
|
||||
typing.cast('models.Authenticator', a),
|
||||
counters.CT_AUTH_USERS,
|
||||
since=since,
|
||||
to=to,
|
||||
interval=interval,
|
||||
limit=MAX_ELEMENTS,
|
||||
use_max=True,
|
||||
|
Loading…
Reference in New Issue
Block a user