mirror of
https://github.com/dkmstr/openuds.git
synced 2024-12-22 13:34:04 +03:00
Some more fixes
This commit is contained in:
parent
6f85a42673
commit
975dd80c5d
@ -279,14 +279,14 @@ class Users(DetailHandler):
|
|||||||
|
|
||||||
user.delete()
|
user.delete()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('Removing user')
|
logger.error('Error on user removal of %s.%s: %s', parent.name, item, e)
|
||||||
raise self.invalid_item_response() from e
|
raise self.invalid_item_response() from e
|
||||||
|
|
||||||
def servicesPools(self, parent: 'Model', item: str) -> typing.Any:
|
def servicesPools(self, parent: 'Model', item: str) -> list[dict[str, typing.Any]]:
|
||||||
parent = ensure.is_instance(parent, Authenticator)
|
parent = ensure.is_instance(parent, Authenticator)
|
||||||
uuid = process_uuid(item)
|
uuid = process_uuid(item)
|
||||||
user = parent.users.get(uuid=process_uuid(uuid))
|
user = parent.users.get(uuid=process_uuid(uuid))
|
||||||
res = []
|
res: list[dict[str, typing.Any]] = []
|
||||||
groups = list(user.get_groups())
|
groups = list(user.get_groups())
|
||||||
for i in get_service_pools_for_groups(groups):
|
for i in get_service_pools_for_groups(groups):
|
||||||
res.append(
|
res.append(
|
||||||
@ -303,11 +303,11 @@ class Users(DetailHandler):
|
|||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def userServices(self, parent: 'Authenticator', item: str) -> typing.Any:
|
def userServices(self, parent: 'Authenticator', item: str) -> list[dict[str, typing.Any]]:
|
||||||
parent = ensure.is_instance(parent, Authenticator)
|
parent = ensure.is_instance(parent, Authenticator)
|
||||||
uuid = process_uuid(item)
|
uuid = process_uuid(item)
|
||||||
user = parent.users.get(uuid=process_uuid(uuid))
|
user = parent.users.get(uuid=process_uuid(uuid))
|
||||||
res = []
|
res: list[dict[str, typing.Any]] = []
|
||||||
for i in user.userServices.all():
|
for i in user.userServices.all():
|
||||||
if i.state == State.USABLE:
|
if i.state == State.USABLE:
|
||||||
v = AssignedService.item_as_dict(i)
|
v = AssignedService.item_as_dict(i)
|
||||||
@ -336,10 +336,10 @@ class Groups(DetailHandler):
|
|||||||
q = parent.groups.all().order_by('name')
|
q = parent.groups.all().order_by('name')
|
||||||
else:
|
else:
|
||||||
q = parent.groups.filter(uuid=process_uuid(item))
|
q = parent.groups.filter(uuid=process_uuid(item))
|
||||||
res = []
|
res: list[dict[str, typing.Any]] = []
|
||||||
i = None
|
i = None
|
||||||
for i in q:
|
for i in q:
|
||||||
val = {
|
val: dict[str, typing.Any] = {
|
||||||
'id': i.uuid,
|
'id': i.uuid,
|
||||||
'name': i.name,
|
'name': i.name,
|
||||||
'comments': i.comments,
|
'comments': i.comments,
|
||||||
@ -360,7 +360,7 @@ class Groups(DetailHandler):
|
|||||||
result['pools'] = [v.uuid for v in get_service_pools_for_groups([i])]
|
result['pools'] = [v.uuid for v in get_service_pools_for_groups([i])]
|
||||||
return result
|
return result
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('REST groups')
|
logger.error('Group item not found: %s.%s: %s', parent.name, item, e)
|
||||||
raise self.invalid_item_response() from e
|
raise self.invalid_item_response() from e
|
||||||
|
|
||||||
def get_title(self, parent: 'Model') -> str:
|
def get_title(self, parent: 'Model') -> str:
|
||||||
@ -434,31 +434,30 @@ class Groups(DetailHandler):
|
|||||||
fields = self.fields_from_params(valid_fields)
|
fields = self.fields_from_params(valid_fields)
|
||||||
is_pattern = fields.get('name', '').find('pat:') == 0
|
is_pattern = fields.get('name', '').find('pat:') == 0
|
||||||
auth = parent.get_instance()
|
auth = parent.get_instance()
|
||||||
|
to_save: dict[str, typing.Any] = {}
|
||||||
if not item: # Create new
|
if not item: # Create new
|
||||||
if not is_meta and not is_pattern:
|
if not is_meta and not is_pattern:
|
||||||
auth.create_group(
|
auth.create_group(
|
||||||
fields
|
fields
|
||||||
) # this throws an exception if there is an error (for example, this auth can't create groups)
|
) # this throws an exception if there is an error (for example, this auth can't create groups)
|
||||||
toSave = {}
|
|
||||||
for k in valid_fields:
|
for k in valid_fields:
|
||||||
toSave[k] = fields[k]
|
to_save[k] = fields[k]
|
||||||
toSave['comments'] = fields['comments'][:255]
|
to_save['comments'] = fields['comments'][:255]
|
||||||
toSave['is_meta'] = is_meta
|
to_save['is_meta'] = is_meta
|
||||||
toSave['meta_if_any'] = meta_if_any
|
to_save['meta_if_any'] = meta_if_any
|
||||||
group = parent.groups.create(**toSave)
|
group = parent.groups.create(**to_save)
|
||||||
else:
|
else:
|
||||||
if not is_meta and not is_pattern:
|
if not is_meta and not is_pattern:
|
||||||
auth.modify_group(fields)
|
auth.modify_group(fields)
|
||||||
toSave = {}
|
|
||||||
for k in valid_fields:
|
for k in valid_fields:
|
||||||
toSave[k] = fields[k]
|
to_save[k] = fields[k]
|
||||||
del toSave['name'] # Name can't be changed
|
del to_save['name'] # Name can't be changed
|
||||||
toSave['comments'] = fields['comments'][:255]
|
to_save['comments'] = fields['comments'][:255]
|
||||||
toSave['meta_if_any'] = meta_if_any
|
to_save['meta_if_any'] = meta_if_any
|
||||||
toSave['skip_mfa'] = fields['skip_mfa']
|
to_save['skip_mfa'] = fields['skip_mfa']
|
||||||
|
|
||||||
group = parent.groups.get(uuid=process_uuid(item))
|
group = parent.groups.get(uuid=process_uuid(item))
|
||||||
group.__dict__.update(toSave)
|
group.__dict__.update(to_save)
|
||||||
|
|
||||||
if is_meta:
|
if is_meta:
|
||||||
# Do not allow to add meta groups to meta groups
|
# Do not allow to add meta groups to meta groups
|
||||||
|
@ -97,7 +97,7 @@ class Publication(Environmentable, Serializable):
|
|||||||
environment: 'Environment',
|
environment: 'Environment',
|
||||||
*,
|
*,
|
||||||
service: 'services.Service',
|
service: 'services.Service',
|
||||||
os_manager: typing.Optional['osmanagers.OSManager'] = None,
|
osmanager: typing.Optional['osmanagers.OSManager'] = None,
|
||||||
revision: int = -1,
|
revision: int = -1,
|
||||||
servicepool_name: str = 'Unknown',
|
servicepool_name: str = 'Unknown',
|
||||||
uuid: str = '',
|
uuid: str = '',
|
||||||
@ -111,7 +111,7 @@ class Publication(Environmentable, Serializable):
|
|||||||
Environmentable.__init__(self, environment)
|
Environmentable.__init__(self, environment)
|
||||||
Serializable.__init__(self)
|
Serializable.__init__(self)
|
||||||
self._service = service
|
self._service = service
|
||||||
self._osmanager = os_manager
|
self._osmanager = osmanager
|
||||||
self._revision = revision
|
self._revision = revision
|
||||||
self._servicepool_name = servicepool_name
|
self._servicepool_name = servicepool_name
|
||||||
self._uuid = uuid
|
self._uuid = uuid
|
||||||
|
@ -35,6 +35,7 @@ import codecs
|
|||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import inspect
|
import inspect
|
||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
from os import read
|
from os import read
|
||||||
import pickle # nosec: safe usage
|
import pickle # nosec: safe usage
|
||||||
@ -43,6 +44,7 @@ import time
|
|||||||
import typing
|
import typing
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import abc
|
import abc
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
from django.utils.translation import gettext
|
from django.utils.translation import gettext
|
||||||
|
|
||||||
@ -1491,10 +1493,10 @@ class UserInterface(metaclass=UserInterfaceType):
|
|||||||
if fld_name in values:
|
if fld_name in values:
|
||||||
fld.value = values[fld_name]
|
fld.value = values[fld_name]
|
||||||
else:
|
else:
|
||||||
caller = inspect.stack()[1]
|
logger.warning('Field %s.%s not found in values data, ', self.__class__.__name__, fld_name)
|
||||||
logger.warning(
|
if settings.DEBUG:
|
||||||
'Field %s not found (invoked from %s:%s)', fld_name, caller.filename, caller.lineno
|
for caller in itertools.islice(inspect.stack(), 1, 8):
|
||||||
)
|
logger.warning(' %s:%s:%s', caller.filename, caller.lineno, caller.function)
|
||||||
|
|
||||||
def init_gui(self) -> None:
|
def init_gui(self) -> None:
|
||||||
"""
|
"""
|
||||||
@ -1612,9 +1614,7 @@ class UserInterface(metaclass=UserInterfaceType):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
# For future use, right now we only have one version
|
# For future use, right now we only have one version
|
||||||
_version = values[
|
_version = values[len(SERIALIZATION_HEADER) : len(SERIALIZATION_HEADER) + len(SERIALIZATION_VERSION)]
|
||||||
len(SERIALIZATION_HEADER) : len(SERIALIZATION_HEADER) + len(SERIALIZATION_VERSION)
|
|
||||||
]
|
|
||||||
|
|
||||||
values = values[len(SERIALIZATION_HEADER) + len(SERIALIZATION_VERSION) :]
|
values = values[len(SERIALIZATION_HEADER) + len(SERIALIZATION_VERSION) :]
|
||||||
|
|
||||||
|
@ -64,12 +64,12 @@ class Group(UUIDModel):
|
|||||||
name = models.CharField(max_length=128, db_index=True)
|
name = models.CharField(max_length=128, db_index=True)
|
||||||
state = models.CharField(max_length=1, default=State.ACTIVE, db_index=True)
|
state = models.CharField(max_length=1, default=State.ACTIVE, db_index=True)
|
||||||
comments = models.CharField(max_length=256, default='')
|
comments = models.CharField(max_length=256, default='')
|
||||||
users = models.ManyToManyField(User, related_name='groups')
|
users: 'models.ManyToManyField[User, Group]' = models.ManyToManyField(User, related_name='groups')
|
||||||
is_meta = models.BooleanField(default=False, db_index=True)
|
is_meta = models.BooleanField(default=False, db_index=True)
|
||||||
# meta_if_any means that if an user belongs to ANY of the groups, it will be considered as belonging to this group
|
# meta_if_any means that if an user belongs to ANY of the groups, it will be considered as belonging to this group
|
||||||
# if it is false, the user must belong to ALL of the groups to be considered as belonging to this group
|
# if it is false, the user must belong to ALL of the groups to be considered as belonging to this group
|
||||||
meta_if_any = models.BooleanField(default=False)
|
meta_if_any = models.BooleanField(default=False)
|
||||||
groups = models.ManyToManyField('self', symmetrical=False)
|
groups: 'models.ManyToManyField[Group, Group]' = models.ManyToManyField('self', symmetrical=False)
|
||||||
created = models.DateTimeField(default=sql_datetime, blank=True)
|
created = models.DateTimeField(default=sql_datetime, blank=True)
|
||||||
skip_mfa = models.CharField(max_length=1, default=State.INACTIVE, db_index=True)
|
skip_mfa = models.CharField(max_length=1, default=State.INACTIVE, db_index=True)
|
||||||
|
|
||||||
@ -85,7 +85,7 @@ class Group(UUIDModel):
|
|||||||
"""
|
"""
|
||||||
return self.deployedServices
|
return self.deployedServices
|
||||||
|
|
||||||
class Meta: # pylint: disable=too-few-public-methods
|
class Meta: # pyright: ignore
|
||||||
"""
|
"""
|
||||||
Meta class to declare default order and unique multiple field index
|
Meta class to declare default order and unique multiple field index
|
||||||
"""
|
"""
|
||||||
|
@ -204,7 +204,7 @@ class ServicePoolPublication(UUIDModel):
|
|||||||
publication_manager().cancel(self)
|
publication_manager().cancel(self)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def pre_delete(sender, **kwargs) -> None: # pylint: disable=unused-argument
|
def pre_delete(sender: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=unused-argument
|
||||||
"""
|
"""
|
||||||
Used to invoke the Service class "Destroy" before deleting it from database.
|
Used to invoke the Service class "Destroy" before deleting it from database.
|
||||||
|
|
||||||
|
@ -56,6 +56,7 @@ if typing.TYPE_CHECKING:
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ProxmoxServiceFixed(FixedService): # pylint: disable=too-many-public-methods
|
class ProxmoxServiceFixed(FixedService): # pylint: disable=too-many-public-methods
|
||||||
"""
|
"""
|
||||||
Proxmox fixed machines service.
|
Proxmox fixed machines service.
|
||||||
|
@ -112,6 +112,11 @@ class TestUserService(services.UserService):
|
|||||||
self.data.count = 3
|
self.data.count = 3
|
||||||
return types.states.State.RUNNING
|
return types.states.State.RUNNING
|
||||||
|
|
||||||
|
def deploy_for_cache(self, level: int) -> types.states.State:
|
||||||
|
logger.info('Deploying for cache %s %s', level, self.data)
|
||||||
|
self.data.count = 3
|
||||||
|
return types.states.State.RUNNING
|
||||||
|
|
||||||
def check_state(self) -> types.states.State:
|
def check_state(self) -> types.states.State:
|
||||||
logger.info('Checking state of deployment %s', self.data)
|
logger.info('Checking state of deployment %s', self.data)
|
||||||
if self.data.count <= 0:
|
if self.data.count <= 0:
|
||||||
|
@ -41,7 +41,6 @@ import uuid
|
|||||||
|
|
||||||
from uds.core import types, environment
|
from uds.core import types, environment
|
||||||
from uds.core.ui.user_interface import gui
|
from uds.core.ui.user_interface import gui
|
||||||
from uds.services.OpenNebula.on import vm
|
|
||||||
|
|
||||||
from ...utils.test import UDSTestCase
|
from ...utils.test import UDSTestCase
|
||||||
from ...utils.autospec import autospec, AutoSpecMethodInfo
|
from ...utils.autospec import autospec, AutoSpecMethodInfo
|
||||||
@ -317,11 +316,11 @@ CLIENT_METHODS_INFO: typing.Final[list[AutoSpecMethodInfo]] = [
|
|||||||
# list_machines
|
# list_machines
|
||||||
AutoSpecMethodInfo('list_machines', return_value=VMS_INFO),
|
AutoSpecMethodInfo('list_machines', return_value=VMS_INFO),
|
||||||
# get_machine_pool_info
|
# get_machine_pool_info
|
||||||
AutoSpecMethodInfo('get_machine_pool_info', method=lambda vmid, poolid, **kwargs: VMS_INFO[vmid - 1]),
|
AutoSpecMethodInfo('get_machine_pool_info', method=lambda vmid, poolid, **kwargs: VMS_INFO[vmid - 1]), # pyright: ignore
|
||||||
# get_machine_info
|
# get_machine_info
|
||||||
AutoSpecMethodInfo('get_machine_info', method=lambda vmid, *args, **kwargs: VMS_INFO[vmid - 1]),
|
AutoSpecMethodInfo('get_machine_info', method=lambda vmid, *args, **kwargs: VMS_INFO[vmid - 1]), # pyright: ignore
|
||||||
# get_machine_configuration
|
# get_machine_configuration
|
||||||
AutoSpecMethodInfo('get_machine_configuration', method=lambda vmid, **kwargs: VMS_CONFIGURATION[vmid - 1]),
|
AutoSpecMethodInfo('get_machine_configuration', method=lambda vmid, **kwargs: VMS_CONFIGURATION[vmid - 1]), # pyright: ignore
|
||||||
# set_machine_ha return None
|
# set_machine_ha return None
|
||||||
# start_machine
|
# start_machine
|
||||||
AutoSpecMethodInfo('start_machine', return_value=UPID),
|
AutoSpecMethodInfo('start_machine', return_value=UPID),
|
||||||
@ -340,24 +339,24 @@ CLIENT_METHODS_INFO: typing.Final[list[AutoSpecMethodInfo]] = [
|
|||||||
# get_storage
|
# get_storage
|
||||||
AutoSpecMethodInfo(
|
AutoSpecMethodInfo(
|
||||||
'get_storage',
|
'get_storage',
|
||||||
method=lambda storage, node, **kwargs: next(filter(lambda s: s.storage == storage, STORAGES)),
|
method=lambda storage, node, **kwargs: next(filter(lambda s: s.storage == storage, STORAGES)), # pyright: ignore
|
||||||
),
|
),
|
||||||
# list_storages
|
# list_storages
|
||||||
AutoSpecMethodInfo(
|
AutoSpecMethodInfo(
|
||||||
'list_storages',
|
'list_storages',
|
||||||
method=lambda node, **kwargs: (
|
method=lambda node, **kwargs: ( # pyright: ignore
|
||||||
(list(filter(lambda s: s.node == node, STORAGES))) if node is not None else STORAGES
|
(list(filter(lambda s: s.node == node, STORAGES))) if node is not None else STORAGES # pyright: ignore
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
# get_node_stats
|
# get_node_stats
|
||||||
AutoSpecMethodInfo(
|
AutoSpecMethodInfo(
|
||||||
'get_node_stats', method=lambda node, **kwargs: next(filter(lambda n: n.name == node, NODE_STATS))
|
'get_node_stats', method=lambda node, **kwargs: next(filter(lambda n: n.name == node, NODE_STATS)) # pyright: ignore
|
||||||
),
|
),
|
||||||
# list_pools
|
# list_pools
|
||||||
AutoSpecMethodInfo('list_pools', return_value=POOLS),
|
AutoSpecMethodInfo('list_pools', return_value=POOLS),
|
||||||
# get_pool_info
|
# get_pool_info
|
||||||
AutoSpecMethodInfo(
|
AutoSpecMethodInfo(
|
||||||
'get_pool_info', method=lambda poolid, **kwargs: next(filter(lambda p: p.poolid == poolid, POOLS))
|
'get_pool_info', method=lambda poolid, **kwargs: next(filter(lambda p: p.poolid == poolid, POOLS)) # pyright: ignore
|
||||||
),
|
),
|
||||||
# get_console_connection
|
# get_console_connection
|
||||||
AutoSpecMethodInfo('get_console_connection', return_value=CONSOLE_CONNECTION_INFO),
|
AutoSpecMethodInfo('get_console_connection', return_value=CONSOLE_CONNECTION_INFO),
|
||||||
@ -390,9 +389,11 @@ SERVICE_LINKED_VALUES_DICT: typing.Final[gui.ValuesDictType] = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
SERVICE_FIXED_VALUES_DICT: typing.Final[gui.ValuesDictType] = {
|
SERVICE_FIXED_VALUES_DICT: typing.Final[gui.ValuesDictType] = {
|
||||||
|
'token': '',
|
||||||
'pool': POOLS[0].poolid,
|
'pool': POOLS[0].poolid,
|
||||||
'machines': [str(VMS_INFO[2].vmid), str(VMS_INFO[3].vmid), str(VMS_INFO[4].vmid)],
|
'machines': [str(VMS_INFO[2].vmid), str(VMS_INFO[3].vmid), str(VMS_INFO[4].vmid)],
|
||||||
'use_snapshots': True,
|
'use_snapshots': True,
|
||||||
|
'prov_uuid': '',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -64,7 +64,7 @@ class TestProxmovFixedService(UDSTransactionTestCase):
|
|||||||
api.test.assert_called_with()
|
api.test.assert_called_with()
|
||||||
|
|
||||||
def test_service_methods_1(self) -> None:
|
def test_service_methods_1(self) -> None:
|
||||||
with fixtures.patch_provider_api() as api:
|
with fixtures.patch_provider_api():
|
||||||
service = fixtures.create_service_fixed()
|
service = fixtures.create_service_fixed()
|
||||||
|
|
||||||
self.assertEqual(service.get_machine_info(2).name, fixtures.VMS_INFO[1].name)
|
self.assertEqual(service.get_machine_info(2).name, fixtures.VMS_INFO[1].name)
|
||||||
@ -72,7 +72,7 @@ class TestProxmovFixedService(UDSTransactionTestCase):
|
|||||||
# is_available is already tested, so we will skip it
|
# is_available is already tested, so we will skip it
|
||||||
|
|
||||||
# Enumerate assignables
|
# Enumerate assignables
|
||||||
locate_vm = lambda vmid: next(
|
locate_vm: typing.Callable[[str], typing.Any] = lambda vmid: next(
|
||||||
(x for x in fixtures.VMS_INFO if x.vmid == int(vmid)), fixtures.VMS_INFO[0]
|
(x for x in fixtures.VMS_INFO if x.vmid == int(vmid)), fixtures.VMS_INFO[0]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -109,7 +109,7 @@ class TestProxmovFixedService(UDSTransactionTestCase):
|
|||||||
self.assertEqual(service._get_assigned_machines(), set([vmid, vmid2]))
|
self.assertEqual(service._get_assigned_machines(), set([vmid, vmid2]))
|
||||||
|
|
||||||
def test_service_methods_2(self) -> None:
|
def test_service_methods_2(self) -> None:
|
||||||
with fixtures.patch_provider_api() as api:
|
with fixtures.patch_provider_api():
|
||||||
service = fixtures.create_service_fixed()
|
service = fixtures.create_service_fixed()
|
||||||
|
|
||||||
# Get machine name
|
# Get machine name
|
||||||
|
@ -37,10 +37,10 @@ from unittest import mock
|
|||||||
class AutoSpecMethodInfo:
|
class AutoSpecMethodInfo:
|
||||||
name: str
|
name: str
|
||||||
return_value: typing.Any = None
|
return_value: typing.Any = None
|
||||||
method: 'typing.Callable|None' = None
|
method: 'typing.Callable[..., typing.Any]|None' = None
|
||||||
|
|
||||||
|
|
||||||
def autospec(cls: type, metods_info: collections.abc.Iterable, **kwargs: typing.Any) -> mock.Mock:
|
def autospec(cls: type, metods_info: collections.abc.Iterable[AutoSpecMethodInfo], **kwargs: typing.Any) -> mock.Mock:
|
||||||
"""
|
"""
|
||||||
This is a helper function that will create a mock object with the same methods as the class passed as parameter.
|
This is a helper function that will create a mock object with the same methods as the class passed as parameter.
|
||||||
This is useful for testing purposes, where you want to mock a class and still have the same methods available.
|
This is useful for testing purposes, where you want to mock a class and still have the same methods available.
|
||||||
|
Loading…
Reference in New Issue
Block a user