mirror of
https://github.com/dkmstr/openuds.git
synced 2025-10-11 03:33:46 +03:00
Compare commits
98 Commits
release-v4
...
v4.0
Author | SHA1 | Date | |
---|---|---|---|
|
844060addb | ||
|
cd60b398a9 | ||
|
979f992b6d | ||
|
83689dddaa | ||
|
037b4abad1 | ||
|
839e4c6b1d | ||
|
2fd157e463 | ||
|
7e51c1fd93 | ||
|
b6af59cc44 | ||
|
50072e948e | ||
|
c5e0d0721f | ||
|
afbd4c5355 | ||
|
e4377b83e4 | ||
|
bf97c6f2dc | ||
|
6763de2bab | ||
|
b4ca743d7c | ||
|
20f7ae7fcd | ||
|
8aac4f9aa5 | ||
|
f494c706fc | ||
|
76b488dc1d | ||
|
826cc7aed8 | ||
|
4da15d66fe | ||
|
79495fc3b1 | ||
|
f438a9241e | ||
|
e37b345aff | ||
|
ce1330066f | ||
|
20e86cd8c7 | ||
|
dc52e37abc | ||
|
69fae6a1a6 | ||
|
7c14923afe | ||
|
9e66583b4e | ||
|
34676c817f | ||
|
d17224c9cb | ||
|
b57b00f3fc | ||
|
f82041da1e | ||
|
03a837f865 | ||
|
473dc2577f | ||
|
49dfaf3709 | ||
|
f5afb79a2b | ||
|
bd26fb38d9 | ||
|
95f0b0ab26 | ||
|
28433fc33e | ||
|
fc4e7414df | ||
|
e61cb1f855 | ||
|
689214cf84 | ||
|
d268478767 | ||
|
4a5ad5dc09 | ||
|
7365ee8cc6 | ||
|
5a93aa15e8 | ||
|
1fddc17b75 | ||
|
ca540d7725 | ||
|
fe11b485ed | ||
|
12394c873c | ||
|
5904a9c9c5 | ||
|
6ecefbabe8 | ||
|
74908f186c | ||
|
102b36d090 | ||
|
52f8a91f75 | ||
|
ad9a0c3f11 | ||
|
c5299378cf | ||
|
6c315ef8a8 | ||
|
6f192a8f7c | ||
|
890f00b111 | ||
|
3e67e3ba30 | ||
|
e5ea6d6a33 | ||
|
3009ae601a | ||
|
ed2e3c557d | ||
|
7ca96e4262 | ||
|
46546a837e | ||
|
f1ffe0cb9c | ||
|
7ce0fb0f22 | ||
|
a90d2719a3 | ||
|
e8d7da650c | ||
|
6f4d14bdf2 | ||
|
9f303b8cfa | ||
|
8db36ac964 | ||
|
921abfe0bb | ||
|
15b13b00ae | ||
|
763ca4bd07 | ||
|
faf852cdaa | ||
|
35e4422e0e | ||
|
288757c984 | ||
|
9b0ea5f893 | ||
|
af2178d338 | ||
|
3ec748eba6 | ||
|
429700d4c1 | ||
|
91184a24b0 | ||
|
8ae073bc27 | ||
|
d4b90c387b | ||
|
bf1b4a3d31 | ||
|
ed99412e71 | ||
|
d387366d15 | ||
|
f781a8001c | ||
|
1be33751d9 | ||
|
b2baf61d0e | ||
|
6d1e999b1f | ||
|
076a300751 | ||
|
43406e10f6 |
2
actor
2
actor
Submodule actor updated: a6ae57a964...04ce3fc2d1
@@ -14,6 +14,7 @@ BASE_DIR = '/'.join(
|
||||
) # If used 'relpath' instead of abspath, returns path of "enterprise" instead of "openuds"
|
||||
|
||||
DEBUG = True
|
||||
PROFILING = False
|
||||
|
||||
# USE_X_FORWARDED_HOST = True
|
||||
SECURE_PROXY_SSL_HEADER = (
|
||||
@@ -36,6 +37,14 @@ DATABASES = {
|
||||
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
|
||||
# 'CONN_MAX_AGE': 600, # Enable DB Pooling, 10 minutes max connection duration
|
||||
},
|
||||
'persistent': {
|
||||
'ENGINE': 'django.db.backends.sqlite3', # Persistent DB, used for persistent data
|
||||
'NAME': os.path.join(BASE_DIR, 'persistent.sqlite3'), # Path to persistent DB file
|
||||
'OPTIONS': {
|
||||
'timeout': 20, # Timeout for sqlite3 connections
|
||||
'transaction_mode': 'IMMEDIATE', # Use immediate transaction mode for better concurrency
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -34,6 +34,7 @@ import typing
|
||||
|
||||
from django.utils.translation import gettext
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.db import transaction
|
||||
|
||||
from uds import models
|
||||
from uds.core import consts, types, ui
|
||||
@@ -69,6 +70,7 @@ class ServersTokens(ModelHandler):
|
||||
{'os': {'title': _('OS')}},
|
||||
{'username': {'title': _('Issued by')}},
|
||||
{'stamp': {'title': _('Date'), 'type': 'datetime'}},
|
||||
{'mac': {'title': _('MAC Address')}},
|
||||
]
|
||||
|
||||
def item_as_dict(self, item: 'Model') -> dict[str, typing.Any]:
|
||||
@@ -163,7 +165,10 @@ class ServersServers(DetailHandler):
|
||||
{'mac': {'title': _('Mac')}},
|
||||
]
|
||||
if not parent.is_managed()
|
||||
else [{'listen_port': {'title': _('Port')}}]
|
||||
else [
|
||||
{'mac': {'title': _('Mac')}},
|
||||
{'listen_port': {'title': _('Port')}},
|
||||
]
|
||||
)
|
||||
+ [
|
||||
{
|
||||
@@ -301,9 +306,14 @@ class ServersServers(DetailHandler):
|
||||
raise self.invalid_item_response() from None
|
||||
|
||||
else:
|
||||
# Remove current server and add the new one in a single transaction
|
||||
try:
|
||||
server = models.Server.objects.get(uuid=process_uuid(item))
|
||||
parent.servers.add(server)
|
||||
with transaction.atomic():
|
||||
current_server = models.Server.objects.get(uuid=process_uuid(item))
|
||||
new_server = models.Server.objects.get(uuid=process_uuid(self._params['server']))
|
||||
parent.servers.remove(current_server)
|
||||
parent.servers.add(new_server)
|
||||
item = new_server.uuid
|
||||
except Exception:
|
||||
raise self.invalid_item_response() from None
|
||||
return {'id': item}
|
||||
@@ -328,7 +338,7 @@ class ServersServers(DetailHandler):
|
||||
:param item:
|
||||
"""
|
||||
item = models.Server.objects.get(uuid=process_uuid(id))
|
||||
self.ensure_has_access(item, types.permissions.PermissionType.MANAGEMENT)
|
||||
self.ensure_has_access(parent, types.permissions.PermissionType.MANAGEMENT)
|
||||
item.maintenance_mode = not item.maintenance_mode
|
||||
item.save()
|
||||
return 'ok'
|
||||
@@ -525,7 +535,8 @@ class ServersGroups(ModelHandler):
|
||||
'hostname': s[1].hostname,
|
||||
'mac': s[1].mac if s[1].mac != consts.MAC_UNKNOWN else '',
|
||||
'ip': s[1].ip,
|
||||
'load': s[0].load() if s[0] else 0,
|
||||
'load': s[0].load(weights=item.weights) if s[0] else 0,
|
||||
'weights': item.weights.as_dict(),
|
||||
},
|
||||
}
|
||||
for s in ServerManager.manager().get_server_stats(item.servers.all())
|
||||
|
@@ -236,7 +236,7 @@ class AssignedService(DetailHandler):
|
||||
if not item:
|
||||
raise self.invalid_item_response('Only modify is allowed')
|
||||
fields = self.fields_from_params(['auth_id:_', 'user_id:_', 'ip:_'])
|
||||
|
||||
|
||||
userservice = parent.userServices.get(uuid=process_uuid(item))
|
||||
if 'user_id' in fields and 'auth_id' in fields:
|
||||
user = models.User.objects.get(uuid=process_uuid(fields['user_id']))
|
||||
@@ -307,17 +307,11 @@ class CachedService(AssignedService):
|
||||
{'creation_date': {'title': _('Creation date'), 'type': 'datetime'}},
|
||||
{'revision': {'title': _('Revision')}},
|
||||
{'unique_id': {'title': 'Unique ID'}},
|
||||
{'ip': {'title': _('IP')}},
|
||||
{'friendly_name': {'title': _('Friendly name')}},
|
||||
{'state': {'title': _('State'), 'type': 'dict', 'dict': State.literals_dict()}},
|
||||
] + (
|
||||
[
|
||||
{
|
||||
'state': {
|
||||
'title': _('State'),
|
||||
'type': 'dict',
|
||||
'dict': State.literals_dict(),
|
||||
}
|
||||
},
|
||||
{'ip': {'title': _('IP')}},
|
||||
{'cache_level': {'title': _('Cache level')}},
|
||||
{'actor_version': {'title': _('Actor version')}},
|
||||
]
|
||||
|
@@ -197,7 +197,7 @@ class RadiusClient:
|
||||
if i.startswith(groupclass_prefix)
|
||||
]
|
||||
else:
|
||||
logger.info('No "Class (25)" attribute found')
|
||||
logger.info('No "Class (25)" attribute found: %s', reply)
|
||||
return ([], '', b'')
|
||||
|
||||
# ...and mfa code
|
||||
|
@@ -657,7 +657,10 @@ class SAMLAuthenticator(auths.Authenticator):
|
||||
raise exceptions.auth.AuthenticatorException(gettext('Error processing SAML response: ') + str(e))
|
||||
errors = typing.cast(list[str], auth.get_errors())
|
||||
if errors:
|
||||
raise exceptions.auth.AuthenticatorException('SAML response error: ' + str(errors))
|
||||
logger.debug('Errors processing SAML response: %s (%s)', errors, auth.get_last_error_reason()) # pyright: ignore reportUnknownVariableType
|
||||
logger.debug('post_data: %s', req['post_data'])
|
||||
logger.info('Response XML: %s', auth.get_last_response_xml()) # pyright: ignore reportUnknownVariableType
|
||||
raise exceptions.auth.AuthenticatorException(f'SAML response error: {errors} ({auth.get_last_error_reason()})')
|
||||
|
||||
if not auth.is_authenticated():
|
||||
raise exceptions.auth.AuthenticatorException(gettext('SAML response not authenticated'))
|
||||
|
@@ -80,13 +80,20 @@ def uds_cookie(
|
||||
cookie,
|
||||
samesite='Lax',
|
||||
httponly=config.GlobalConfig.ENHANCED_SECURITY.as_bool(),
|
||||
secure=True if config.GlobalConfig.ENHANCED_SECURITY.as_bool() else False,
|
||||
)
|
||||
request.COOKIES['uds'] = cookie
|
||||
else:
|
||||
cookie = request.COOKIES['uds'][: consts.auth.UDS_COOKIE_LENGTH]
|
||||
|
||||
if response and force:
|
||||
response.set_cookie('uds', cookie)
|
||||
response.set_cookie(
|
||||
'uds',
|
||||
cookie,
|
||||
samesite='Lax',
|
||||
httponly=config.GlobalConfig.ENHANCED_SECURITY.as_bool(),
|
||||
secure=True if config.GlobalConfig.ENHANCED_SECURITY.as_bool() else False,
|
||||
)
|
||||
|
||||
return cookie
|
||||
|
||||
@@ -117,7 +124,7 @@ def root_user() -> models.User:
|
||||
|
||||
# Decorator to make easier protect pages that needs to be logged in
|
||||
def weblogin_required(
|
||||
admin: typing.Union[bool, typing.Literal['admin']] = False
|
||||
admin: typing.Union[bool, typing.Literal['admin']] = False,
|
||||
) -> collections.abc.Callable[
|
||||
[collections.abc.Callable[..., HttpResponse]], collections.abc.Callable[..., HttpResponse]
|
||||
]:
|
||||
@@ -138,7 +145,7 @@ def weblogin_required(
|
||||
"""
|
||||
|
||||
def decorator(
|
||||
view_func: collections.abc.Callable[..., HttpResponse]
|
||||
view_func: collections.abc.Callable[..., HttpResponse],
|
||||
) -> collections.abc.Callable[..., HttpResponse]:
|
||||
@wraps(view_func)
|
||||
def _wrapped_view(
|
||||
@@ -173,7 +180,7 @@ def is_trusted_ip_forwarder(ip: str) -> bool:
|
||||
|
||||
# Decorator to protect pages that needs to be accessed from "trusted sites"
|
||||
def needs_trusted_source(
|
||||
view_func: collections.abc.Callable[..., HttpResponse]
|
||||
view_func: collections.abc.Callable[..., HttpResponse],
|
||||
) -> collections.abc.Callable[..., HttpResponse]:
|
||||
"""
|
||||
Decorator to set protection to access page
|
||||
|
@@ -74,3 +74,5 @@ UNLIMITED: typing.Final[int] = -1
|
||||
|
||||
# Constant marking no more names available
|
||||
NO_MORE_NAMES: typing.Final[str] = 'NO-NAME-ERROR'
|
||||
# For convenience, same as MAC_UNKNOWN, but different meaning
|
||||
NO_MORE_MACS: typing.Final[str] = MAC_UNKNOWN
|
||||
|
@@ -215,6 +215,9 @@ class Environment:
|
||||
def __exit__(self, exc_type: typing.Any, exc_value: typing.Any, traceback: typing.Any) -> None:
|
||||
if self._key == TEST_ENV or (self._key.startswith('#_#') and self._key.endswith('#^#')):
|
||||
self.clean_related_data()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'Environment: {self._key}'
|
||||
|
||||
|
||||
class Environmentable:
|
||||
|
@@ -108,23 +108,27 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
|
||||
|
||||
def execute_delayed_task(self) -> None:
|
||||
now = sql_now()
|
||||
filt = Q(execution_time__lt=now) | Q(insert_date__gt=now + timedelta(seconds=30))
|
||||
filt = Q(execution_time__lt=now) | Q(insert_date__gt=now + timedelta(seconds=3))
|
||||
# If next execution is before now or last execution is in the future (clock changed on this server, we take that task as executable)
|
||||
try:
|
||||
with transaction.atomic(): # Encloses
|
||||
# Throws exception if no delayed task is avilable
|
||||
task: DBDelayedTask = (
|
||||
DBDelayedTask.objects.select_for_update()
|
||||
.filter(filt)
|
||||
.order_by('execution_time')[0]
|
||||
) # @UndefinedVariable
|
||||
if task.insert_date > now + timedelta(seconds=30):
|
||||
logger.warning('Executed %s due to insert_date being in the future!', task.type)
|
||||
task: DBDelayedTask|None = DBDelayedTask.objects.select_for_update().filter(filt).order_by('execution_time').first()
|
||||
if not task:
|
||||
return
|
||||
|
||||
logger.debug('Obtained delayed task %s for execution', task)
|
||||
|
||||
if task.insert_date > now + timedelta(seconds=3):
|
||||
logger.warning(
|
||||
'Executed %s due to insert_date being in the future!, insert_date: %s, now: %s',
|
||||
task.type,
|
||||
task.insert_date,
|
||||
now,
|
||||
)
|
||||
task_instance_dump = base64.b64decode(task.instance.encode())
|
||||
task.delete()
|
||||
task_instance = pickle.loads(task_instance_dump) # nosec: controlled pickle
|
||||
except IndexError:
|
||||
return # No problem, there is no waiting delayed task
|
||||
except OperationalError:
|
||||
logger.info('Retrying delayed task')
|
||||
return
|
||||
@@ -139,6 +143,8 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
|
||||
# Re-create environment data
|
||||
task_instance.env = Environment.type_environment(task_instance.__class__)
|
||||
DelayedTaskThread(task_instance).start()
|
||||
else:
|
||||
logger.error('Could not load delayed task instance from %s <%s>', task, task_instance_dump)
|
||||
|
||||
def _insert(self, instance: DelayedTask, delay: int, tag: str) -> None:
|
||||
now = sql_now()
|
||||
@@ -160,14 +166,16 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
|
||||
exec_time,
|
||||
)
|
||||
|
||||
DBDelayedTask.objects.create(
|
||||
created = DBDelayedTask.objects.create(
|
||||
type=type_name,
|
||||
instance=instance_dump, # @UndefinedVariable
|
||||
instance=instance_dump,
|
||||
insert_date=now,
|
||||
execution_delay=delay,
|
||||
execution_time=exec_time,
|
||||
tag=tag,
|
||||
)
|
||||
|
||||
logger.debug('Delayed task %s inserted with', created)
|
||||
|
||||
def insert(self, instance: DelayedTask, delay: int, tag: str = '') -> bool:
|
||||
retries = 3
|
||||
@@ -175,6 +183,7 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
|
||||
retries -= 1
|
||||
try:
|
||||
self._insert(instance, delay, tag)
|
||||
logger.debug('Delayed task %s inserted with tag %s', instance, tag)
|
||||
break
|
||||
except Exception as e:
|
||||
logger.info('Exception inserting a delayed task %s: %s', e.__class__, e)
|
||||
@@ -212,7 +221,6 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
|
||||
logger.debug("At loop")
|
||||
while DelayedTaskRunner._keep_running:
|
||||
try:
|
||||
time.sleep(self.granularity)
|
||||
self.execute_delayed_task()
|
||||
except Exception as e:
|
||||
logger.error('Unexpected exception at run loop %s: %s', e.__class__, e)
|
||||
@@ -220,4 +228,5 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
|
||||
connections['default'].close()
|
||||
except Exception:
|
||||
logger.exception('Exception clossing connection at delayed task')
|
||||
time.sleep(self.granularity)
|
||||
logger.info('Exiting DelayedTask Runner because stop has been requested')
|
||||
|
@@ -162,10 +162,12 @@ class Scheduler:
|
||||
.filter(fltr)
|
||||
.order_by('next_execution')[0]
|
||||
)
|
||||
if job.last_execution > now:
|
||||
if job.last_execution > now + timedelta(seconds=3): # Give some skew
|
||||
logger.warning(
|
||||
'EXecuted %s due to last_execution being in the future!',
|
||||
'Executed %s due to last_execution being in the future!: %s > %s + 3',
|
||||
job.name,
|
||||
job.last_execution,
|
||||
now,
|
||||
)
|
||||
job.state = State.RUNNING
|
||||
job.owner_server = self._hostname
|
||||
|
@@ -51,7 +51,7 @@ class NotificationsManager(metaclass=singleton.Singleton):
|
||||
|
||||
_initialized: bool = False
|
||||
|
||||
def _ensure_local_db_exists(self) -> bool:
|
||||
def ensure_local_db_exists(self) -> bool:
|
||||
if not apps.ready:
|
||||
return False
|
||||
|
||||
@@ -85,7 +85,7 @@ class NotificationsManager(metaclass=singleton.Singleton):
|
||||
from uds.models.notifications import Notification # pylint: disable=import-outside-toplevel
|
||||
|
||||
# Due to use of local db, we must ensure that it exists (and cannot do it on ready)
|
||||
if self._ensure_local_db_exists() is False:
|
||||
if self.ensure_local_db_exists() is False:
|
||||
return # Not initialized apps yet, so we cannot do anything
|
||||
|
||||
# logger.debug(
|
||||
|
@@ -117,7 +117,8 @@ class PublicationLauncher(DelayedTask):
|
||||
): # If not preparing (may has been canceled by user) just return
|
||||
return
|
||||
servicepool_publication.state = State.PREPARING
|
||||
servicepool_publication.save()
|
||||
servicepool_publication.state_date = now
|
||||
servicepool_publication.save(update_fields=['state', 'state_date'])
|
||||
pi = servicepool_publication.get_instance()
|
||||
state = pi.publish()
|
||||
servicepool: ServicePool = servicepool_publication.deployed_service
|
||||
|
@@ -119,9 +119,9 @@ class ServerManager(metaclass=singleton.Singleton):
|
||||
Returns:
|
||||
An iterator of servers with activity in the last last_activity_delta time
|
||||
"""
|
||||
|
||||
|
||||
op = operator.gt if with_activity else operator.le
|
||||
|
||||
|
||||
activity_limit = model_utils.sql_now() - last_activity_delta
|
||||
# Get all servers with activity in the last 10 minutes
|
||||
for server in server_group.servers.filter(maintenance_mode=False):
|
||||
@@ -181,7 +181,7 @@ class ServerManager(metaclass=singleton.Singleton):
|
||||
weight_threshold_f = weight_threshold / 100
|
||||
|
||||
def _real_weight(stats: 'types.servers.ServerStats') -> float:
|
||||
stats_weight = stats.load()
|
||||
stats_weight = stats.load(weights=server_group.weights)
|
||||
|
||||
if weight_threshold == 0:
|
||||
return stats_weight
|
||||
@@ -545,7 +545,12 @@ class ServerManager(metaclass=singleton.Singleton):
|
||||
# Get the stats for all servers, but in parallel
|
||||
server_stats = self.get_server_stats(fltrs)
|
||||
# Sort by load, lower first (lower is better)
|
||||
return [s[1] for s in sorted(server_stats, key=lambda x: x[0].load() if x[0] else 999999999)]
|
||||
return [
|
||||
s[1]
|
||||
for s in sorted(
|
||||
server_stats, key=lambda x: x[0].load(weights=server_group.weights) if x[0] else 999999999
|
||||
)
|
||||
]
|
||||
|
||||
def perform_maintenance(self, server_group: 'models.ServerGroup') -> None:
|
||||
"""Realizes maintenance on server group
|
||||
|
@@ -50,13 +50,18 @@ def process_log(server: 'models.Server', data: dict[str, typing.Any]) -> typing.
|
||||
try:
|
||||
userservice = models.UserService.objects.get(uuid=data['userservice_uuid'])
|
||||
log.log(
|
||||
userservice, types.log.LogLevel.from_str(data['level']), data['message'], source=types.log.LogSource.SERVER
|
||||
userservice,
|
||||
types.log.LogLevel.from_str(data['level']),
|
||||
data['message'],
|
||||
source=types.log.LogSource.SERVER,
|
||||
)
|
||||
return rest_result(consts.OK)
|
||||
except models.UserService.DoesNotExist:
|
||||
pass # If not found, log on server
|
||||
|
||||
log.log(server, types.log.LogLevel.from_str(data['level']), data['message'], source=types.log.LogSource.SERVER)
|
||||
log.log(
|
||||
server, types.log.LogLevel.from_str(data['level']), data['message'], source=types.log.LogSource.SERVER
|
||||
)
|
||||
|
||||
return rest_result(consts.OK)
|
||||
|
||||
@@ -196,5 +201,5 @@ def process(server: 'models.Server', data: dict[str, typing.Any]) -> typing.Any:
|
||||
try:
|
||||
return fnc(server, data)
|
||||
except Exception as e:
|
||||
logger.error('Exception processing event %s: %s', data, e)
|
||||
logger.exception('Exception processing event %s: %s', data, e)
|
||||
return rest_result('error', error=str(e))
|
||||
|
@@ -219,18 +219,33 @@ class ServerApiRequester:
|
||||
logger.debug(
|
||||
'Notifying preconnect of service %s to server %s: %s', userservice.uuid, self.server.host, info
|
||||
)
|
||||
|
||||
# Fix username to contain the domain if needed
|
||||
username = info.username
|
||||
if info.domain:
|
||||
if '@' in username:
|
||||
username = username.split('@')[0]
|
||||
if '.' in info.domain: # FQDN domain
|
||||
username = f'{username}@{info.domain}'
|
||||
else: # NetBIOS domain
|
||||
username = f'{info.domain}\\{username}'
|
||||
|
||||
connect_data = types.connections.PreconnectRequest(
|
||||
user=username, # The username that will be used to login
|
||||
protocol=info.protocol,
|
||||
ip=src.ip,
|
||||
hostname=src.hostname,
|
||||
udsuser=userservice.user.name + '@' + userservice.user.manager.name if userservice.user else '',
|
||||
udsuser_uuid=userservice.user.uuid if userservice.user else '',
|
||||
userservice_uuid=userservice.uuid,
|
||||
service_type=info.service_type,
|
||||
)
|
||||
|
||||
logger.debug('Preconnect data to send: %s', connect_data)
|
||||
|
||||
self.post(
|
||||
'preconnect',
|
||||
types.connections.PreconnectRequest(
|
||||
user=info.username,
|
||||
protocol=info.protocol,
|
||||
ip=src.ip,
|
||||
hostname=src.hostname,
|
||||
udsuser=userservice.user.name + '@' + userservice.user.manager.name if userservice.user else '',
|
||||
udsuser_uuid=userservice.user.uuid if userservice.user else '',
|
||||
userservice_uuid=userservice.uuid,
|
||||
service_type=info.service_type,
|
||||
).as_dict(),
|
||||
connect_data.as_dict(),
|
||||
)
|
||||
return True
|
||||
|
||||
|
@@ -362,7 +362,7 @@ class StatsManager(metaclass=singleton.Singleton):
|
||||
return StatsEvents.objects.filter(id__gt=starting_id).order_by('-id')[:number]
|
||||
return StatsEvents.objects.order_by('-id')[:number]
|
||||
|
||||
def perform_events_maintenancecleanupEvents(self) -> None:
|
||||
def perform_events_maintenancecleanup_events(self) -> None:
|
||||
"""
|
||||
Removes all events previous to configured max keep time for stat information from database.
|
||||
"""
|
||||
|
@@ -234,7 +234,19 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
||||
assigned = self._create_assigned_user_service_at_db_from_pool(service_pool, user)
|
||||
|
||||
assigned_instance = assigned.get_instance()
|
||||
state = assigned_instance.deploy_for_user(user)
|
||||
try:
|
||||
state = assigned_instance.deploy_for_user(user)
|
||||
except MaxServicesReachedError:
|
||||
# If we reach this point, it means that the service has reached its maximum number of user services
|
||||
operations_logger.error(
|
||||
'Cannot create assigned service for user %s on pool %s: Maximum number of user services reached',
|
||||
user.pretty_name,
|
||||
service_pool.name,
|
||||
)
|
||||
# Remove existing assigned service
|
||||
if assigned.id:
|
||||
assigned.delete()
|
||||
raise MaxServicesReachedError()
|
||||
|
||||
UserServiceOpChecker.make_unique(assigned, state)
|
||||
|
||||
@@ -477,7 +489,13 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
||||
operations_logger.info('Removing userservice %a', userservice.name)
|
||||
if userservice.is_usable() is False and State.from_str(userservice.state).is_removable() is False:
|
||||
if not forced:
|
||||
raise OperationException(_('Can\'t remove a non active element') + ': ' + userservice.name + ', ' + userservice.state)
|
||||
raise OperationException(
|
||||
_('Can\'t remove a non active element')
|
||||
+ ': '
|
||||
+ userservice.name
|
||||
+ ', '
|
||||
+ userservice.state
|
||||
)
|
||||
userservice.set_state(State.REMOVING)
|
||||
logger.debug("***** The state now is %s *****", State.from_str(userservice.state).localized)
|
||||
userservice.set_in_use(False) # For accounting, ensure that it is not in use right now
|
||||
@@ -760,6 +778,11 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
||||
logger.warning('Could not check readyness of %s: %s', user_service, e)
|
||||
return False
|
||||
|
||||
if state == types.states.TaskState.ERROR:
|
||||
user_service.update_data(userservice_instance)
|
||||
user_service.set_state(State.ERROR)
|
||||
raise InvalidServiceException('Service missing or in error state')
|
||||
|
||||
logger.debug('State: %s', state)
|
||||
|
||||
if state == types.states.TaskState.FINISHED:
|
||||
|
@@ -33,6 +33,7 @@ import time
|
||||
import logging
|
||||
import typing
|
||||
|
||||
from uds.core.managers.notifications import NotificationsManager
|
||||
from uds.core.managers.task import BaseThread
|
||||
|
||||
from uds.models import Notifier, Notification
|
||||
@@ -43,13 +44,12 @@ from .config import DO_NOT_REPEAT
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Note that this thread will be running on the scheduler process
|
||||
class MessageProcessorThread(BaseThread):
|
||||
_keep_running: bool = True
|
||||
|
||||
_cached_providers: typing.Optional[
|
||||
list[tuple[int, NotificationProviderModule]]
|
||||
]
|
||||
_cached_providers: typing.Optional[list[tuple[int, NotificationProviderModule]]]
|
||||
_cached_stamp: float
|
||||
|
||||
def __init__(self) -> None:
|
||||
@@ -73,12 +73,14 @@ class MessageProcessorThread(BaseThread):
|
||||
return self._cached_providers
|
||||
|
||||
def run(self) -> None:
|
||||
while NotificationsManager.manager().ensure_local_db_exists() is False:
|
||||
logger.info('Waiting for local notifications database to be ready...')
|
||||
time.sleep(1)
|
||||
|
||||
while self._keep_running:
|
||||
# Locate all notifications from "persistent" and try to process them
|
||||
# If no notification can be fully resolved, it will be kept in the database
|
||||
not_before = sql_now() - datetime.timedelta(
|
||||
seconds=DO_NOT_REPEAT.as_int()
|
||||
)
|
||||
not_before = sql_now() - datetime.timedelta(seconds=DO_NOT_REPEAT.as_int())
|
||||
for n in Notification.get_persistent_queryset().all():
|
||||
# If there are any other notification simmilar to this on default db, skip it
|
||||
# Simmilar means that group, identificator and message are already been logged less than DO_NOT_REPEAT seconds ago
|
||||
@@ -119,7 +121,7 @@ class MessageProcessorThread(BaseThread):
|
||||
# logger.warning(
|
||||
# 'Could not save notification %s to main DB, trying notificators',
|
||||
# n,
|
||||
#)
|
||||
# )
|
||||
|
||||
if notify:
|
||||
for p in (i[1] for i in self.providers if i[0] >= n.level):
|
||||
|
@@ -291,9 +291,13 @@ class DynamicPublication(services.Publication, autoserializable.AutoSerializable
|
||||
if op == Operation.ERROR:
|
||||
return self._error('Machine is already in error state!')
|
||||
|
||||
destroy_operations = [
|
||||
types.services.Operation.DESTROY_VALIDATOR
|
||||
] + self._destroy_queue # copy is not needed due to list concatenation
|
||||
|
||||
# If a "paused" state, reset queue to destroy
|
||||
if op == Operation.FINISH:
|
||||
self._queue = self._destroy_queue.copy()
|
||||
self._queue = destroy_operations
|
||||
return self._execute_queue()
|
||||
|
||||
# If must wait until finish, flag for destroy and wait
|
||||
@@ -301,9 +305,7 @@ class DynamicPublication(services.Publication, autoserializable.AutoSerializable
|
||||
self._is_flagged_for_destroy = True
|
||||
else:
|
||||
# If other operation, wait for finish before destroying
|
||||
self._queue = [
|
||||
op
|
||||
] + self._destroy_queue # Copy not needed, will be copied anyway due to list concatenation
|
||||
self._queue = [op] + destroy_operations # Add destroy operations to the queue
|
||||
# Do not execute anything.here, just continue normally
|
||||
return types.states.TaskState.RUNNING
|
||||
|
||||
|
@@ -178,14 +178,25 @@ class DynamicService(services.Service, abc.ABC): # pylint: disable=too-many-pub
|
||||
caller_instance: typing.Optional['DynamicUserService | DynamicPublication'],
|
||||
vmid: str,
|
||||
*,
|
||||
force_new: bool = False,
|
||||
for_unique_id: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Returns the mac of the machine
|
||||
If cannot be obtained, MUST raise an exception
|
||||
Args:
|
||||
caller_instance: The instance of the caller
|
||||
vmid: The vmid of the machine
|
||||
for_unique_id: Whether to force a new mac address
|
||||
|
||||
Note:
|
||||
vmid can be '' or force_new can be True, in this case, a new mac must be generated
|
||||
If the service does not support this, it can raise an exception
|
||||
vmid can be '', or for_unique_id can be True. Is up tu the service to treat this situation
|
||||
Why is this?
|
||||
Because we need to give the oportunity to discern if the call to the get_mac is for a new unique_id
|
||||
for de userservice, or it is to force to generate one.
|
||||
For example:
|
||||
some_userservice ---> get_unique_id --> get_mac('xxxxxxx', for_unique_id=True) --> '' (because no mac until the end of the process...)
|
||||
some_userservice ---> on termination --> get_mac('', for_unique_id=False) --> 'the_mac' (because at the end, the mac will be available)
|
||||
|
||||
"""
|
||||
...
|
||||
|
||||
|
@@ -252,9 +252,14 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
op = self._current_op()
|
||||
|
||||
if op == types.services.Operation.ERROR:
|
||||
return types.states.TaskState.ERROR
|
||||
return types.states.TaskState.ERROR # Error is returned as soon as we find it
|
||||
|
||||
# We also check here the finish operation, because some other methods
|
||||
# as set_ready, expects this to return FINISHED. So keeping this here
|
||||
# is a good idea, because no check is needed if already in FINISH state
|
||||
if op == types.services.Operation.FINISH:
|
||||
if def_op := self._check_deferred_operations(): # Check if we have deferred operations to execute
|
||||
return def_op # If we have deferred operations, return their state
|
||||
return types.states.TaskState.FINISHED
|
||||
|
||||
try:
|
||||
@@ -278,6 +283,21 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
logger.exception('Unexpected DynamicUserService exception: %s', e)
|
||||
return self.error(e)
|
||||
|
||||
def _check_deferred_operations(self) -> typing.Optional[types.states.TaskState]:
|
||||
"""
|
||||
Checks if we have deferred operations to execute.
|
||||
Deferred operations are operations that are not executed immediately, but are stored in the queue
|
||||
to be executed later.
|
||||
"""
|
||||
# If has a deferred destroy, do it now
|
||||
if self.wait_until_finish_to_destroy and self._is_flagged_for_destroy:
|
||||
# Simply ensures nothing is left on queue and returns FINISHED
|
||||
logger.debug('Destroying service after finish')
|
||||
self._set_queue([types.services.Operation.FINISH])
|
||||
return self.destroy()
|
||||
|
||||
return None
|
||||
|
||||
@typing.final
|
||||
def retry_later(self) -> types.states.TaskState:
|
||||
"""
|
||||
@@ -342,7 +362,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
# Note that get_mac is used for creating a new mac, returning the one of the vm or whatever
|
||||
# This is responsibility of the service, not of the user service
|
||||
if not self._mac:
|
||||
self._mac = self.service().get_mac(self, self._vmid, force_new=True) or ''
|
||||
self._mac = self.service().get_mac(self, self._vmid, for_unique_id=True) or ''
|
||||
return self._mac
|
||||
|
||||
@typing.final
|
||||
@@ -354,7 +374,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
self._ip = self.service().get_ip(self, self._vmid)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
'Error obtaining IP for %s: %s', self.__class__.__name__, self._vmid, exc_info=True
|
||||
'Error obtaining IP for %s: %s', self.__class__.__name__, self._vmid # , exc_info=True
|
||||
)
|
||||
return self._ip
|
||||
|
||||
@@ -413,6 +433,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
)
|
||||
except Exception as e:
|
||||
return self.error(f'Error on set_ready: {e}')
|
||||
|
||||
return self._execute_queue()
|
||||
|
||||
def reset(self) -> types.states.TaskState:
|
||||
@@ -438,12 +459,8 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
return types.states.TaskState.ERROR
|
||||
|
||||
if op == types.services.Operation.FINISH:
|
||||
# If has a deferred destroy, do it now
|
||||
if self.wait_until_finish_to_destroy and self._is_flagged_for_destroy:
|
||||
self._is_flagged_for_destroy = False
|
||||
# Simply ensures nothing is left on queue and returns FINISHED
|
||||
self._set_queue([types.services.Operation.FINISH])
|
||||
return self.destroy()
|
||||
if def_op := self._check_deferred_operations(): # Check if we have deferred operations to execute
|
||||
return def_op # If we have deferred operations, return their state
|
||||
return types.states.TaskState.FINISHED
|
||||
|
||||
if op != types.services.Operation.WAIT:
|
||||
@@ -499,6 +516,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
|
||||
# If a "paused" state, reset queue to destroy
|
||||
if op in (types.services.Operation.FINISH, types.services.Operation.WAIT):
|
||||
logger.debug('Destroying service with these operations: %s', destroy_operations)
|
||||
self._set_queue(destroy_operations)
|
||||
return self._execute_queue()
|
||||
|
||||
@@ -548,6 +566,14 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
If you override this method, you should take care yourself of removing duplicated machines
|
||||
(maybe only calling "super().op_initialize()" method)
|
||||
"""
|
||||
# By default, should return a VALID username and unique_id
|
||||
# Note that valid is anything different from consts.NO_MORE_NAMES or consts.NO_MORE_MACS
|
||||
if self.get_name() == consts.NO_MORE_NAMES:
|
||||
self.error('No more names available') # Will mark as error and check will note it
|
||||
return
|
||||
if self.get_unique_id() == consts.NO_MORE_MACS:
|
||||
self.error('No more MACs available') # Will mark as error and check will note it
|
||||
return
|
||||
self.remove_duplicates()
|
||||
|
||||
@abc.abstractmethod
|
||||
@@ -561,7 +587,9 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
"""
|
||||
This method is called when the service creation is completed
|
||||
"""
|
||||
pass
|
||||
# By default, get the MAC address if not set already by get_unique_id at start
|
||||
if self._mac == '' and self._vmid != '':
|
||||
self._mac = self.service().get_mac(self, self._vmid)
|
||||
|
||||
@must_have_vmid
|
||||
def op_start(self) -> None:
|
||||
@@ -858,8 +886,16 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
||||
def _op2str(op: types.services.Operation) -> str:
|
||||
return op.name
|
||||
|
||||
def _debug(self, txt: str) -> str:
|
||||
return f'Queue at {txt} for {self._name}: {", ".join([DynamicUserService._op2str(op) for op in self._queue])}, mac:{self._mac}, vmid:{self._vmid}'
|
||||
def _debug(self, txt: str) -> None:
|
||||
# f'Queue at {txt} for {self._name}: {", ".join([DynamicUserService._op2str(op) for op in self._queue])}, mac:{self._mac}, vmid:{self._vmid}'
|
||||
logger.debug(
|
||||
'Queue at %s for %s: %s, mac:%s, vmid:%s',
|
||||
txt,
|
||||
self._name,
|
||||
', '.join([DynamicUserService._op2str(op) for op in self._queue]),
|
||||
self._mac,
|
||||
self._vmid,
|
||||
)
|
||||
|
||||
|
||||
# This is a map of operations to methods
|
||||
|
@@ -50,6 +50,7 @@ from . import (
|
||||
ui,
|
||||
core,
|
||||
log,
|
||||
net,
|
||||
)
|
||||
|
||||
# Log is not imported here, as it is a special case with lots of dependencies
|
||||
|
39
server/src/uds/core/types/net.py
Normal file
39
server/src/uds/core/types/net.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2025 Virtual Cable S.L.U.
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
# * Neither the name of Virtual Cable S.L.U. nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software
|
||||
# without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""
|
||||
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||
"""
|
||||
import dataclasses
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Iface:
|
||||
name: str
|
||||
mac: str
|
||||
ip: str
|
@@ -137,6 +137,38 @@ class ServerDiskInfo:
|
||||
}
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ServerStatsWeights:
|
||||
cpu: float = 0.3
|
||||
memory: float = 0.6
|
||||
users: float = 0.1
|
||||
max_users: int = 100 # Max users to consider in load calculation
|
||||
|
||||
def normalize(self) -> 'ServerStatsWeights':
|
||||
total = self.cpu + self.memory + self.users
|
||||
self.cpu /= total
|
||||
self.memory /= total
|
||||
self.users /= total
|
||||
return self
|
||||
|
||||
def as_dict(self) -> dict[str, float]:
|
||||
return {
|
||||
'cpu': self.cpu,
|
||||
'memory': self.memory,
|
||||
'users': self.users,
|
||||
'max_users': self.max_users,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def from_dict(data: dict[str, float]) -> 'ServerStatsWeights':
|
||||
return ServerStatsWeights(
|
||||
data.get('cpu', 0.3),
|
||||
data.get('memory', 0.6),
|
||||
data.get('users', 0.1),
|
||||
int(data.get('max_users', 100)),
|
||||
).normalize()
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ServerStats:
|
||||
memused: int = 0 # In bytes
|
||||
@@ -165,21 +197,23 @@ class ServerStats:
|
||||
|
||||
return self.stamp > sql_stamp() - consts.cache.DEFAULT_CACHE_TIMEOUT
|
||||
|
||||
def load(self, min_memory: int = 0) -> float:
|
||||
def load(self, *, min_memory: int = 0, weights: ServerStatsWeights | None = None) -> float:
|
||||
# Loads are calculated as:
|
||||
# 30% cpu usage
|
||||
# 60% memory usage
|
||||
# 10% current users, with a max of 1000 users
|
||||
# 10% current users, with a max of 100 users
|
||||
# Loads are normalized to 0-1
|
||||
# Lower weight is better
|
||||
|
||||
weights = (weights or ServerStatsWeights()).normalize()
|
||||
|
||||
if self.memtotal - self.memused < min_memory:
|
||||
return 1000000000 # At the end of the list
|
||||
|
||||
w = (
|
||||
0.3 * self.cpuused
|
||||
+ 0.6 * (self.memused / (self.memtotal or 1))
|
||||
+ 0.1 * (min(1.0, self.current_users / 100.0))
|
||||
weights.cpu * self.cpuused
|
||||
+ weights.memory * (self.memused / (self.memtotal or 1))
|
||||
+ weights.users * (min(1.0, self.current_users / weights.max_users))
|
||||
)
|
||||
|
||||
return min(max(0.0, w), 1.0)
|
||||
|
@@ -586,7 +586,7 @@ class gui:
|
||||
validators.validate_hostname(self.value)
|
||||
case types.ui.FieldPatternType.HOST:
|
||||
try:
|
||||
validators.validate_hostname(self.value, domain_allowed=True)
|
||||
validators.validate_hostname(self.value, allow_domain=True)
|
||||
except exceptions.ui.ValidationError:
|
||||
validators.validate_ip(self.value)
|
||||
case types.ui.FieldPatternType.PATH:
|
||||
|
93
server/src/uds/core/util/cluster.py
Normal file
93
server/src/uds/core/util/cluster.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import datetime
|
||||
import logging
|
||||
import socket
|
||||
import typing
|
||||
|
||||
from django.db import transaction, OperationalError
|
||||
|
||||
from uds import models
|
||||
from uds.core.util.iface import get_first_iface
|
||||
from uds.core.util.model import sql_now, get_my_ip_from_db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UDSClusterNode(typing.NamedTuple):
|
||||
"""
|
||||
Represents a node in the cluster with its hostname and last seen date.
|
||||
"""
|
||||
|
||||
hostname: str
|
||||
ip: str
|
||||
last_seen: datetime.datetime
|
||||
mac: str = '00:00:00:00:00:00'
|
||||
|
||||
def as_dict(self) -> dict[str, str]:
|
||||
"""
|
||||
Returns a dictionary representation of the UDSClusterNode.
|
||||
"""
|
||||
return {
|
||||
'hostname': self.hostname,
|
||||
'ip': self.ip,
|
||||
'last_seen': self.last_seen.isoformat(),
|
||||
'mac': self.mac,
|
||||
}
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'{self.hostname} ({self.ip}) - Last seen: {self.last_seen.isoformat()} - MAC: {self.mac}'
|
||||
|
||||
|
||||
def store_cluster_info() -> None:
|
||||
"""
|
||||
Stores the current hostname in the database, ensuring that it is unique.
|
||||
This is used to identify the current node in a cluster.
|
||||
"""
|
||||
iface = get_first_iface()
|
||||
ip = iface.ip if iface else get_my_ip_from_db()
|
||||
mac = iface.mac if iface else '00:00:00:00:00:00'
|
||||
|
||||
try:
|
||||
hostname = socket.getfqdn() + '|' + ip
|
||||
date = sql_now().isoformat()
|
||||
with transaction.atomic():
|
||||
current_host_property = (
|
||||
models.Properties.objects.select_for_update()
|
||||
.filter(owner_id='cluster', owner_type='cluster', key=hostname)
|
||||
.first()
|
||||
)
|
||||
if current_host_property:
|
||||
# Update existing property
|
||||
current_host_property.value = {'last_seen': date, 'mac': mac}
|
||||
current_host_property.save()
|
||||
else:
|
||||
# Create new property
|
||||
models.Properties.objects.create(
|
||||
owner_id='cluster', owner_type='cluster', key=hostname, value={'last_seen': date}
|
||||
)
|
||||
|
||||
except OperationalError as e:
|
||||
# If we cannot connect to the database, we log the error
|
||||
logger.error("Could not store cluster hostname: %s", e)
|
||||
|
||||
|
||||
def enumerate_cluster_nodes() -> list[UDSClusterNode]:
|
||||
"""
|
||||
Enumerates all nodes in the cluster by fetching properties with owner_type 'cluster'.
|
||||
Returns a list of hostnames.
|
||||
"""
|
||||
try:
|
||||
properties = models.Properties.objects.filter(owner_type='cluster')
|
||||
return [
|
||||
UDSClusterNode(
|
||||
hostname=prop.key.split('|')[0],
|
||||
ip=prop.key.split('|')[1],
|
||||
last_seen=datetime.datetime.fromisoformat(prop.value['last_seen']),
|
||||
mac=prop.value.get('mac', '00:00:00:00:00:00'),
|
||||
)
|
||||
for prop in properties
|
||||
if 'last_seen' in prop.value and '|' in prop.key
|
||||
]
|
||||
except OperationalError as e:
|
||||
# If we cannot connect to the database, we log the error and return an empty list
|
||||
logger.error("Could not enumerate cluster nodes: %s", e)
|
||||
return []
|
@@ -47,6 +47,7 @@ logger = logging.getLogger(__name__)
|
||||
P = typing.ParamSpec('P')
|
||||
R = typing.TypeVar('R')
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class CacheInfo:
|
||||
"""
|
||||
@@ -150,17 +151,18 @@ HasConnect = typing.TypeVar('HasConnect', bound=_HasConnect)
|
||||
|
||||
|
||||
def ensure_connected(
|
||||
func: collections.abc.Callable[typing.Concatenate[HasConnect, P], R]
|
||||
func: collections.abc.Callable[typing.Concatenate[HasConnect, P], R],
|
||||
) -> collections.abc.Callable[typing.Concatenate[HasConnect, P], R]:
|
||||
"""This decorator calls "connect" method of the class of the wrapped object"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def new_func(obj: HasConnect, /, *args: P.args, **kwargs: P.kwargs) -> R:
|
||||
def connect_and_execute(obj: HasConnect, /, *args: P.args, **kwargs: P.kwargs) -> R:
|
||||
# self = typing.cast(_HasConnect, args[0])
|
||||
obj.connect()
|
||||
return func(obj, *args, **kwargs)
|
||||
|
||||
return new_func
|
||||
return connect_and_execute
|
||||
|
||||
|
||||
# To be used in a future, for type checking only
|
||||
# currently the problem is that the signature of a function is diferent
|
||||
@@ -175,6 +177,7 @@ def ensure_connected(
|
||||
# Now, we could use this by creating two decorators, one for the class methods and one for the functions
|
||||
# But the inheritance problem will still be there, so we will keep the current implementation
|
||||
|
||||
|
||||
# Decorator for caching
|
||||
# This decorator will cache the result of the function for a given time, and given parameters
|
||||
def cached(
|
||||
@@ -244,7 +247,7 @@ def cached(
|
||||
except Exception:
|
||||
logger.debug('Function %s is not inspectable, no caching possible', fnc.__name__)
|
||||
# Not inspectable, no caching possible, return original function
|
||||
|
||||
|
||||
# Ensure compat with methods of cached functions
|
||||
setattr(fnc, 'cache_info', cache_info)
|
||||
setattr(fnc, 'cache_clear', cache_clear)
|
||||
@@ -370,28 +373,30 @@ def blocker(
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except uds.core.exceptions.rest.BlockAccess:
|
||||
raise exceptions.rest.AccessDenied
|
||||
raise exceptions.rest.AccessDenied()
|
||||
|
||||
request: typing.Optional[typing.Any] = getattr(args[0], request_attr or '_request', None)
|
||||
request: typing.Any = getattr(args[0], request_attr or '_request', None)
|
||||
|
||||
# No request object, so we can't block
|
||||
if request is None or not isinstance(request, types.requests.ExtendedHttpRequest):
|
||||
if request is None or getattr(request, 'ip', None) is None:
|
||||
logger.debug('No request object, so we can\'t block: (value is %s)', request)
|
||||
return f(*args, **kwargs)
|
||||
|
||||
request = typing.cast(types.requests.ExtendedHttpRequest, request)
|
||||
|
||||
ip = request.ip
|
||||
|
||||
# if ip is blocked, raise exception
|
||||
failures_count: int = mycache.get(ip, 0)
|
||||
if failures_count >= max_failures:
|
||||
raise exceptions.rest.AccessDenied
|
||||
raise exceptions.rest.AccessDenied()
|
||||
|
||||
try:
|
||||
result = f(*args, **kwargs)
|
||||
except uds.core.exceptions.rest.BlockAccess:
|
||||
# Increment
|
||||
mycache.put(ip, failures_count + 1, GlobalConfig.LOGIN_BLOCK.as_int())
|
||||
raise exceptions.rest.AccessDenied
|
||||
raise exceptions.rest.AccessDenied()
|
||||
# Any other exception will be raised
|
||||
except Exception:
|
||||
raise
|
||||
@@ -441,3 +446,40 @@ def profiler(
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def retry_on_exception(
|
||||
retries: int,
|
||||
*,
|
||||
wait_seconds: float = 2,
|
||||
retryable_exceptions: typing.Optional[typing.List[typing.Type[Exception]]] = None,
|
||||
do_log: bool = False,
|
||||
) -> collections.abc.Callable[[collections.abc.Callable[P, R]], collections.abc.Callable[P, R]]:
|
||||
to_retry = retryable_exceptions or [Exception]
|
||||
|
||||
def decorator(fnc: collections.abc.Callable[P, R]) -> collections.abc.Callable[P, R]:
|
||||
@functools.wraps(fnc)
|
||||
def wrapper(*args: typing.Any, **kwargs: typing.Any) -> R:
|
||||
for i in range(retries):
|
||||
try:
|
||||
return fnc(*args, **kwargs)
|
||||
except Exception as e:
|
||||
if do_log:
|
||||
logger.error('Exception raised in function %s: %s', fnc.__name__, e)
|
||||
|
||||
if not any(isinstance(e, exception_type) for exception_type in to_retry):
|
||||
raise e
|
||||
|
||||
# if this is the last retry, raise the exception
|
||||
if i == retries - 1:
|
||||
raise e
|
||||
|
||||
time.sleep(wait_seconds * (2 ** min(i, 4))) # Exponential backoff until 16x
|
||||
|
||||
# retries == 0 allowed, but only use it for testing purposes
|
||||
# because it's a nonsensical decorator otherwise
|
||||
return fnc(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
116
server/src/uds/core/util/iface.py
Normal file
116
server/src/uds/core/util/iface.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2014-2023 Virtual Cable S.L.
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
# * Neither the name of Virtual Cable S.L. nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software
|
||||
# without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
'''
|
||||
@author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||
'''
|
||||
import platform
|
||||
import socket
|
||||
import fcntl
|
||||
import struct
|
||||
import array
|
||||
import typing
|
||||
|
||||
from uds.core import types
|
||||
|
||||
|
||||
def list_ifaces() -> typing.Iterator[types.net.Iface]:
|
||||
def _get_iface_mac_addr(ifname: str) -> typing.Optional[str]:
|
||||
'''
|
||||
Returns the mac address of an interface
|
||||
Mac is returned as unicode utf-8 encoded
|
||||
'''
|
||||
ifnameBytes = ifname.encode('utf-8')
|
||||
try:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
info = bytearray(fcntl.ioctl(s.fileno(), 0x8927, struct.pack(str('256s'), ifnameBytes[:15])))
|
||||
return str(''.join(['%02x:' % char for char in info[18:24]])[:-1]).upper()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _get_iface_ip_addr(ifname: str) -> typing.Optional[str]:
|
||||
'''
|
||||
Returns the ip address of an interface
|
||||
Ip is returned as unicode utf-8 encoded
|
||||
'''
|
||||
ifnameBytes = ifname.encode('utf-8')
|
||||
try:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
return str(
|
||||
socket.inet_ntoa(
|
||||
fcntl.ioctl(
|
||||
s.fileno(),
|
||||
0x8915, # SIOCGIFADDR
|
||||
struct.pack(str('256s'), ifnameBytes[:15]),
|
||||
)[20:24]
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _list_ifaces() -> list[str]:
|
||||
'''
|
||||
Returns a list of interfaces names coded in utf-8
|
||||
'''
|
||||
max_possible = 128 # arbitrary. raise if needed.
|
||||
space = max_possible * 16
|
||||
if platform.architecture()[0] == '32bit':
|
||||
offset, length = 32, 32
|
||||
elif platform.architecture()[0] == '64bit':
|
||||
offset, length = 16, 40
|
||||
else:
|
||||
raise OSError('Unknown arquitecture {0}'.format(platform.architecture()[0]))
|
||||
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
names = array.array(str('B'), b'\0' * space)
|
||||
outbytes = struct.unpack(
|
||||
'iL',
|
||||
fcntl.ioctl(
|
||||
s.fileno(),
|
||||
0x8912, # SIOCGIFCONF
|
||||
struct.pack('iL', space, names.buffer_info()[0]),
|
||||
),
|
||||
)[0]
|
||||
namestr = names.tobytes()
|
||||
# return namestr, outbytes
|
||||
return [namestr[i : i + offset].split(b'\0', 1)[0].decode('utf-8') for i in range(0, outbytes, length)]
|
||||
|
||||
for ifname in _list_ifaces():
|
||||
ip, mac = _get_iface_ip_addr(ifname), _get_iface_mac_addr(ifname)
|
||||
if (
|
||||
mac != '00:00:00:00:00:00' and mac and ip and ip.startswith('169.254') is False
|
||||
): # Skips local interfaces & interfaces with no dhcp IPs
|
||||
yield types.net.Iface(name=ifname, mac=mac, ip=ip)
|
||||
|
||||
def get_first_iface() -> typing.Optional[types.net.Iface]:
|
||||
"""
|
||||
Returns the first interface found, or None if no interface is found.
|
||||
"""
|
||||
try:
|
||||
return next(list_ifaces())
|
||||
except StopIteration:
|
||||
return None
|
@@ -75,7 +75,9 @@ class TimeTrack:
|
||||
if connection.vendor in ('mysql', 'microsoft', 'postgresql'):
|
||||
cursor = connection.cursor()
|
||||
sentence = (
|
||||
'SELECT CURRENT_TIMESTAMP(4)' if connection.vendor in ('mysql', 'postgresql') else 'SELECT CURRENT_TIMESTAMP'
|
||||
'SELECT CURRENT_TIMESTAMP(4)'
|
||||
if connection.vendor in ('mysql', 'postgresql')
|
||||
else 'SELECT CURRENT_TIMESTAMP'
|
||||
)
|
||||
cursor.execute(sentence)
|
||||
date = (cursor.fetchone() or [datetime.datetime.now()])[0]
|
||||
@@ -94,12 +96,15 @@ class TimeTrack:
|
||||
# If in last_check is in the future, or more than CACHE_TIME_TIMEOUT seconds ago, we need to refresh
|
||||
# Future is possible if we have a clock update, or a big drift
|
||||
if diff > datetime.timedelta(seconds=CACHE_TIME_TIMEOUT) or diff < datetime.timedelta(seconds=0):
|
||||
TimeTrack.last_check = now
|
||||
TimeTrack.misses += 1
|
||||
TimeTrack.cached_time = TimeTrack._fetch_sql_datetime()
|
||||
TimeTrack.last_check = now
|
||||
else:
|
||||
TimeTrack.hits += 1
|
||||
return TimeTrack.cached_time + (now - TimeTrack.last_check)
|
||||
the_time = TimeTrack.cached_time + (now - TimeTrack.last_check)
|
||||
# Keep only cent of second precision
|
||||
the_time = the_time.replace(microsecond=int(the_time.microsecond / 10000) * 10000)
|
||||
return the_time
|
||||
|
||||
|
||||
def sql_now() -> datetime.datetime:
|
||||
@@ -131,10 +136,43 @@ def generate_uuid(obj: typing.Any = None) -> str:
|
||||
"""
|
||||
Generates a ramdom uuid for models default
|
||||
"""
|
||||
return CryptoManager().uuid(obj=obj).lower()
|
||||
return CryptoManager.manager().uuid(obj=obj).lower()
|
||||
|
||||
|
||||
def process_uuid(uuid: str) -> str:
|
||||
if isinstance(uuid, bytes):
|
||||
uuid = uuid.decode('utf8')
|
||||
return uuid.lower()
|
||||
|
||||
|
||||
def get_my_ip_from_db() -> str:
|
||||
"""
|
||||
Gets, from the database, the IP of the current server.
|
||||
"""
|
||||
# Mysql query:
|
||||
# SELECT host FROM information_schema.processlist WHERE ID = CONNECTION_ID();
|
||||
# Postgres query: SELECT client_addr FROM pg_stat_activity WHERE pid = pg_backend_pid();
|
||||
# sql server: SELECT client_net_address FROM sys.dm_exec_connections WHERE session_id = @@SPID;
|
||||
|
||||
try:
|
||||
match connection.vendor:
|
||||
case 'mysql':
|
||||
query = 'SELECT host FROM information_schema.processlist WHERE ID = CONNECTION_ID();'
|
||||
case 'postgresql':
|
||||
query = 'SELECT client_addr FROM pg_stat_activity WHERE pid = pg_backend_pid();'
|
||||
case 'microsoft':
|
||||
query = 'SELECT client_net_address FROM sys.dm_exec_connections WHERE session_id = @@SPID;'
|
||||
case _:
|
||||
return '0.0.0.0' # If not known, return a default IP
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
result = cursor.fetchone()
|
||||
if result:
|
||||
result = result[0] if isinstance(result[0], str) else result[0].decode('utf8')
|
||||
return result.split(':')[0]
|
||||
|
||||
except Exception as e:
|
||||
logger.error('Error getting my IP: %s', e)
|
||||
|
||||
return '0.0.0.0'
|
||||
|
@@ -76,8 +76,8 @@ def get_urlpatterns_from_modules() -> list[typing.Any]:
|
||||
# Append patters from mod
|
||||
for up in urlpatterns:
|
||||
patterns.append(up)
|
||||
except Exception:
|
||||
logger.error('No patterns found in %s', module_fullname)
|
||||
except Exception as e:
|
||||
logger.error('No patterns found in %s (%s)', module_fullname, e)
|
||||
except Exception:
|
||||
logger.exception('Processing dispatchers loading')
|
||||
|
||||
|
@@ -165,7 +165,7 @@ def check_certificate_matches_private_key(*, cert: str, key: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def secure_requests_session(*, verify: 'str|bool' = True) -> 'requests.Session':
|
||||
def secure_requests_session(*, verify: 'str|bool' = True, proxies: 'dict[str, str]|None' = None) -> 'requests.Session':
|
||||
'''
|
||||
Generates a requests.Session object with a custom adapter that uses a custom SSLContext.
|
||||
This is intended to be used for requests that need to be secure, but not necessarily verified.
|
||||
@@ -221,6 +221,9 @@ def secure_requests_session(*, verify: 'str|bool' = True) -> 'requests.Session':
|
||||
|
||||
session = requests.Session()
|
||||
session.mount("https://", UDSHTTPAdapter())
|
||||
|
||||
if proxies is not None:
|
||||
session.proxies = proxies
|
||||
|
||||
# Add user agent header to session
|
||||
session.headers.update({"User-Agent": consts.system.USER_AGENT})
|
||||
|
@@ -111,6 +111,7 @@ class UniqueGenerator:
|
||||
seq = range_start
|
||||
|
||||
if seq > range_end:
|
||||
logger.error('No more ids available in range %s - %s', range_start, range_end)
|
||||
return -1 # No ids free in range
|
||||
|
||||
# May ocurr on some circustance that a concurrency access gives same item twice, in this case, we
|
||||
|
@@ -33,6 +33,8 @@ Author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||
import logging
|
||||
import re
|
||||
|
||||
from uds.core import consts
|
||||
|
||||
from .unique_id_generator import UniqueGenerator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -48,8 +50,9 @@ class UniqueMacGenerator(UniqueGenerator):
|
||||
return int(mac.replace(':', ''), 16)
|
||||
|
||||
def _to_mac_addr(self, seq: int) -> str:
|
||||
if seq == -1: # No mor macs available
|
||||
return '00:00:00:00:00:00'
|
||||
if seq == -1: # No more macs available
|
||||
logger.error('No more MAC addresses available')
|
||||
return consts.NO_MORE_MACS
|
||||
return re.sub(r"(..)", r"\1:", f'{seq:012X}')[:-1]
|
||||
|
||||
# Mac Generator rewrites the signature of parent class, so we need to redefine it here
|
||||
|
@@ -93,15 +93,32 @@ def validate_numeric(
|
||||
|
||||
|
||||
def validate_hostname(
|
||||
hostname: str, max_length: int = 64, domain_allowed: bool = False, field_name: typing.Optional[str] = None
|
||||
hostname: str, max_length: int = 64, allow_domain: bool = False, field_name: typing.Optional[str] = None
|
||||
) -> str:
|
||||
"""
|
||||
Validates that a hostname is valid
|
||||
|
||||
Args:
|
||||
hostname (str): Hostname to validate
|
||||
max_length (int, optional): Maximum length of the hostname. Defaults to 64.
|
||||
domain_allowed (bool, optional): If True, allows domains in the hostname. Defaults to False.
|
||||
field_name (typing.Optional[str], optional): If present, the name of the field for "Raising" exceptions.
|
||||
If not present, the exception will be raised with the message "Invalid hostname". Defaults to None.
|
||||
|
||||
Returns:
|
||||
str: The validated hostname
|
||||
|
||||
Raises:
|
||||
exceptions.ValidationException: If value is not valid
|
||||
"""
|
||||
hostname = hostname.strip()
|
||||
field_name = f' (On field {field_name})' if field_name else ''
|
||||
if len(hostname) > max_length:
|
||||
raise exceptions.ui.ValidationError(
|
||||
_('{} is not a valid hostname: maximum host name length exceeded.').format(hostname + field_name)
|
||||
)
|
||||
|
||||
if not domain_allowed:
|
||||
if not allow_domain:
|
||||
if '.' in hostname:
|
||||
raise exceptions.ui.ValidationError(
|
||||
_('{} is not a valid hostname: (domains not allowed)').format(hostname + field_name)
|
||||
@@ -118,7 +135,7 @@ def validate_hostname(
|
||||
|
||||
|
||||
def validate_fqdn(fqdn: str, max_length: int = 255, field_name: typing.Optional[str] = None) -> str:
|
||||
return validate_hostname(fqdn, max_length, domain_allowed=True, field_name=field_name)
|
||||
return validate_hostname(fqdn, max_length, allow_domain=True, field_name=field_name)
|
||||
|
||||
|
||||
def validate_url(url: str, max_length: int = 1024, field_name: typing.Optional[str] = None) -> str:
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,8 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Abdel Baaddi <abaaddi@virtualcable.es>, 2016-2018
|
||||
# Víctor Alonso <valonso@virtualcable.net>, 2020
|
||||
# Víctor Alonso <valonso@virtualcable.net>, 2020
|
||||
@@ -33,7 +35,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Víctor Alonso <valonso@virtualcable.net>, 2020\n"
|
||||
"Language-Team: Arabic (http://app.transifex.com/openuds/openuds/language/"
|
||||
@@ -159,7 +161,6 @@ msgstr "الرئيسية"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -798,12 +799,10 @@ msgid "If selected, will initiate the publication inmediatly after creation"
|
||||
msgstr "في حالة ألإختيار سيتم الشروع في النشر مباشرة بعد الإنشاء "
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "تم قفل مجموعة الخدمات"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "تم قفل مجموعة الخدمات هذه ولا يمكن تحريرها"
|
||||
|
||||
@@ -901,7 +900,6 @@ msgid "Delete cached service"
|
||||
msgstr "حذف الخدمة المخبأة"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "تم قفل مجموعة الخدمة"
|
||||
|
||||
@@ -1168,9 +1166,13 @@ msgstr "حذف استخدام الحساب"
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "الصورة كبيرة جداً (بحد أقصى لحجم التحميل هو 256 كيلو بايت)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "خطأ في التحميل"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "نوع الصورة غير صالح (يدعم فقط JPEG و PNG و GIF"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "نوع الصورة غير صالح. الأنواع المدعومة هي: JPEG، PNG، وGIF"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,15 +25,18 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Adolfo Gómez <dkmaster@dkmon.com>, 2017,2020
|
||||
# albert clar <albert.clar@uib.cat>, 2017-2018,2020-2021
|
||||
# Andrés Schumann <aschumann@virtualcable.es>, 2025
|
||||
# Javier <jgonzalez@virtualcable.es>, 2025
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Javier <jgonzalez@virtualcable.es>, 2025\n"
|
||||
"Language-Team: Catalan (http://app.transifex.com/openuds/openuds/language/"
|
||||
@@ -158,7 +161,6 @@ msgstr "Principal"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -545,7 +547,7 @@ msgid ""
|
||||
"are optional. Separator can be configured."
|
||||
msgstr ""
|
||||
"El format del fitxer ha de ser \"hostname,ip,mac,...\". Tots els camps "
|
||||
"excepte el nom d'amfitrió són opcionals. El separador es pot configurar."
|
||||
"excepte el nom d'amfitrió són opcionals. El separador es pot configurar."
|
||||
|
||||
#: static/admin/main.js:17 static/admin/translations-fakejs.js:181
|
||||
msgid "Remove server from server group"
|
||||
@@ -706,7 +708,7 @@ msgstr "Netejar dades relacionades (mfa, ...)?"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:154
|
||||
msgid "Related data cleaned"
|
||||
msgstr "S'han netejat les dades relacionades"
|
||||
msgstr "S'han netejat les dades relacionades"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:156
|
||||
msgid "Client logging"
|
||||
@@ -714,7 +716,7 @@ msgstr "Registre de clients"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:157
|
||||
msgid "Enable client logging for user?"
|
||||
msgstr "Habilitar el registre del client per a l'usuari?"
|
||||
msgstr "Habilitar el registre del client per a l'usuari?"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:158
|
||||
msgid "Client logging enabled"
|
||||
@@ -798,12 +800,10 @@ msgstr ""
|
||||
"Si està seleccionat, s'iniciarà la publicació just després de la creació"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "La piscina de serveis està bloquejada"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "Aquest grup de serveis està bloquejat i no es pot editar"
|
||||
|
||||
@@ -901,7 +901,6 @@ msgid "Delete cached service"
|
||||
msgstr "Suprimir servei en memòria cau"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "La piscina de servei està tancada"
|
||||
|
||||
@@ -1168,9 +1167,14 @@ msgstr "Suprimir ús del compte"
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "Imatge massa gran (la mida màxima és de 256Kb)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "Error en la càrrega"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "Tipus d'imatge no suportat (només es suporta JPEG, PNG i GIF"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr ""
|
||||
"Tipus d'imatge no vàlid. Els tipus compatibles són: JPEG, PNG i GIF"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
@@ -1711,7 +1715,7 @@ msgstr "Nou permís ..."
|
||||
|
||||
#: static/admin/translations-fakejs.js:542
|
||||
msgid "CVS Import options for"
|
||||
msgstr "Opcions d'importació de CVS per"
|
||||
msgstr "Opcions d'importació de CVS per"
|
||||
|
||||
#: static/admin/translations-fakejs.js:543
|
||||
msgid "Header"
|
||||
@@ -1884,7 +1888,7 @@ msgid ""
|
||||
"here:"
|
||||
msgstr ""
|
||||
"Sembla que no teniu instal·lat el client UDS. Si us plau, instal·leu-lo des "
|
||||
"d'aquí:"
|
||||
"d'aquí:"
|
||||
|
||||
#: static/modern/main.js:8 static/modern/translations-fakejs.js:36
|
||||
msgid "UDS Client Download"
|
||||
@@ -1897,7 +1901,7 @@ msgstr "Llançament del client UDS, gairebé fet."
|
||||
#: static/modern/main.js:8
|
||||
msgid "Error communicating with your service. Please, retry again."
|
||||
msgstr ""
|
||||
"S'ha produït un error en comunicar-se amb el vostre servei. Si us plau, "
|
||||
"S'ha produït un error en comunicar-se amb el vostre servei. Si us plau, "
|
||||
"torna-ho a provar."
|
||||
|
||||
#: static/modern/main.js:8 static/modern/translations-fakejs.js:39
|
||||
@@ -1988,7 +1992,7 @@ msgstr ""
|
||||
|
||||
#: static/modern/translations-fakejs.js:24
|
||||
msgid "If you do not agree, please"
|
||||
msgstr "Si no esteu d'acord, si us plau"
|
||||
msgstr "Si no esteu d'acord, si us plau"
|
||||
|
||||
#: static/modern/translations-fakejs.js:25
|
||||
msgid "leave this site"
|
||||
@@ -2025,11 +2029,11 @@ msgstr "Si creieu que hem oblidat algun component, si us plau feu-nos-ho saber"
|
||||
|
||||
#: static/modern/translations-fakejs.js:45
|
||||
msgid "Login Verification"
|
||||
msgstr "Verificació d'inici de sessió"
|
||||
msgstr "Verificació d'inici de sessió"
|
||||
|
||||
#: static/modern/translations-fakejs.js:46
|
||||
msgid "Remember me for"
|
||||
msgstr "Recorda'm per"
|
||||
msgstr "Recorda'm per"
|
||||
|
||||
#: static/modern/translations-fakejs.js:47
|
||||
msgid "Submit"
|
||||
@@ -2138,4 +2142,4 @@ msgstr "Accions"
|
||||
|
||||
#: static/modern/translations-fakejs.js:88
|
||||
msgid "Please, enter access credentials"
|
||||
msgstr "Si us plau, introduïu les credencials d'accés"
|
||||
msgstr "Si us plau, introduïu les credencials d'accés"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,8 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Adolfo Gómez <dkmaster at dkmon dot com>, 2012
|
||||
# Javier <jgonzalez@virtualcable.es>, 2024-2025
|
||||
# Thorsten Latka <thorsten.latka@aktobis.de>, 2020
|
||||
@@ -35,9 +37,9 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Javier <jgonzalez@virtualcable.es>, 2024-2025\n"
|
||||
"Last-Translator: Víctor Alonso <valonso@virtualcable.net>, 2020,2022\n"
|
||||
"Language-Team: German (http://app.transifex.com/openuds/openuds/language/"
|
||||
"de/)\n"
|
||||
"Language: de\n"
|
||||
@@ -160,7 +162,6 @@ msgstr "Main"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -801,12 +802,10 @@ msgstr ""
|
||||
"gestartet"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "Der Servicepool ist gesperrt"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "Dieser Servicepool ist gesperrt und kann nicht bearbeitet werden"
|
||||
|
||||
@@ -904,7 +903,6 @@ msgid "Delete cached service"
|
||||
msgstr "Löschen Sie den zwischengespeicherten Dienst"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "Der Servicepool ist gesperrt"
|
||||
|
||||
@@ -1172,9 +1170,13 @@ msgstr "Benutzerkonto löschen"
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "Bild ist zu groß (max. Upload-Größe beträgt 256Kb)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "Fehler beim Hochladen"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "Ungültiger Bildtyp (unterstützt nur JPEG, PNG und GIF"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "Ungültiger Bildtyp. Unterstützte Typen sind: JPEG, PNG und GIF"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,8 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# , 2014
|
||||
# blafuente <blafuente@virtualcable.es>, 2014
|
||||
# Javier <jgonzalez@virtualcable.es>, 2014
|
||||
@@ -33,7 +35,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Javier <jgonzalez@virtualcable.es>, 2014\n"
|
||||
"Language-Team: English (http://app.transifex.com/openuds/openuds/language/"
|
||||
@@ -158,7 +160,6 @@ msgstr "Main"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -797,12 +798,10 @@ msgid "If selected, will initiate the publication inmediatly after creation"
|
||||
msgstr "If selected, will initiate the publication inmediatly after creation"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "Service Pool is locked"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "This service pool is locked and cannot be edited"
|
||||
|
||||
@@ -900,7 +899,6 @@ msgid "Delete cached service"
|
||||
msgstr "Delete cached service"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "Service pool is locked"
|
||||
|
||||
@@ -1167,9 +1165,13 @@ msgstr "Delete account usage"
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "Image is too big (max. upload size is 256Kb)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "Error on upload"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,8 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# , 2014
|
||||
# Adolfo Gómez <dkmaster@dkmon.com>, 2014-2015,2017-2020
|
||||
# Adolfo Gómez <dkmaster@dkmon.com>, 2019
|
||||
@@ -43,9 +45,9 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Javier <jgonzalez@virtualcable.es>, 2014,2020,2024-2025\n"
|
||||
"Last-Translator: Víctor Alonso <valonso@virtualcable.net>, 2020\n"
|
||||
"Language-Team: Spanish (http://app.transifex.com/openuds/openuds/language/"
|
||||
"es/)\n"
|
||||
"Language: es\n"
|
||||
@@ -169,7 +171,6 @@ msgstr "Principal"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -811,12 +812,10 @@ msgstr ""
|
||||
"creación"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "El grupo de servicios está bloqueado"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "Este grupo de servicios está bloqueado y no se puede editar"
|
||||
|
||||
@@ -914,7 +913,6 @@ msgid "Delete cached service"
|
||||
msgstr "Eliminar servicio en caché"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "El grupo de servicios está bloqueado"
|
||||
|
||||
@@ -1181,9 +1179,13 @@ msgstr "Eliminar uso de la cuenta"
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "La imagen es demasiado grande (el tamaño máximo es de 256Kb)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "Error al subir"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "Tipo de imagen no válido (solo soporta JPEG, PNG y GIF)"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "Tipo de imagen no válido. Los tipos admitidos son: JPEG, PNG y GIF."
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,8 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Adolfo Gómez <dkmaster@dkmon.com>, 2015
|
||||
# Javier <jgonzalez@virtualcable.es>, 2025
|
||||
# Lanmedia Comunicaciones <jetxaniz@lanmedia.es>, 2016-2017,2020
|
||||
@@ -37,9 +39,9 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Javier <jgonzalez@virtualcable.es>, 2025\n"
|
||||
"Last-Translator: Víctor Alonso <valonso@virtualcable.net>, 2020\n"
|
||||
"Language-Team: Basque (http://app.transifex.com/openuds/openuds/language/"
|
||||
"eu/)\n"
|
||||
"Language: eu\n"
|
||||
@@ -162,7 +164,6 @@ msgstr "Nagusia"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -802,12 +803,10 @@ msgid "If selected, will initiate the publication inmediatly after creation"
|
||||
msgstr "Hautatuta badago, sortu ostean argitaratuko da."
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "Zerbitzu-igerilekua blokeatuta dago"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "Zerbitzu-talde hau blokeatuta dago eta ezin da editatu"
|
||||
|
||||
@@ -905,7 +904,6 @@ msgid "Delete cached service"
|
||||
msgstr "Ezabatu cachean dagoen zerbitzua"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "Zerbitzu igerilekua blokeatuta dago"
|
||||
|
||||
@@ -1172,9 +1170,13 @@ msgstr "Ezabatu kontuaren erabilera"
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "Irudia handiegia da (gehienezko tamaina 256Kb)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "Errorea igotzean"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "Irudi mota okerra (JPEG, PNG eta GIF bakarrik onartzen dira)"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "Irudi mota baliogabea. Onartutako motak hauek dira: JPEG, PNG eta GIF"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,9 +25,11 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Abdel Baaddi <abaaddi@virtualcable.es>, 2016,2018
|
||||
# Adolfo Gómez <dkmaster at dkmon dot com>, 2012
|
||||
# Andrés Schumann <aschumann@virtualcable.es>, 2023
|
||||
# Andrés Schumann <aschumann@virtualcable.es>, 2023,2025
|
||||
# Javier Gomez <jgomez@virtualcable.es>, 2023-2024
|
||||
# Javier <jgonzalez@virtualcable.es>, 2025
|
||||
# Víctor Alonso <valonso@virtualcable.net>, 2020
|
||||
@@ -37,9 +39,9 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Javier <jgonzalez@virtualcable.es>, 2025\n"
|
||||
"Last-Translator: Víctor Alonso <valonso@virtualcable.net>, 2020\n"
|
||||
"Language-Team: French (http://app.transifex.com/openuds/openuds/language/"
|
||||
"fr/)\n"
|
||||
"Language: fr\n"
|
||||
@@ -163,7 +165,6 @@ msgstr "Principal"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -290,11 +291,11 @@ msgstr "non"
|
||||
|
||||
#: static/admin/main.js:10 static/admin/translations-fakejs.js:250
|
||||
msgid "Error saving: "
|
||||
msgstr "Erreur d'enregistrement:"
|
||||
msgstr "Erreur d'enregistrement:"
|
||||
|
||||
#: static/admin/main.js:10 static/admin/translations-fakejs.js:251
|
||||
msgid "Error saving element"
|
||||
msgstr "Erreur lors de l'enregistrement de l'élément"
|
||||
msgstr "Erreur lors de l'enregistrement de l'élément"
|
||||
|
||||
#: static/admin/main.js:10 static/admin/translations-fakejs.js:252
|
||||
msgid "Error handling your request"
|
||||
@@ -310,7 +311,7 @@ msgstr "Services assignés"
|
||||
|
||||
#: static/admin/main.js:10 static/admin/translations-fakejs.js:4
|
||||
msgid "Services in use"
|
||||
msgstr "Services en cours d'utilisation"
|
||||
msgstr "Services en cours d'utilisation"
|
||||
|
||||
#: static/admin/main.js:13 static/admin/translations-fakejs.js:276
|
||||
#: static/admin/translations-fakejs.js:521
|
||||
@@ -369,7 +370,7 @@ msgstr "Retirer"
|
||||
|
||||
#: static/admin/main.js:13 static/admin/translations-fakejs.js:267
|
||||
msgid "Confirm revokation of permission"
|
||||
msgstr "Confirmer la révocation de l'autorisation"
|
||||
msgstr "Confirmer la révocation de l'autorisation"
|
||||
|
||||
#: static/admin/main.js:14 static/admin/main.js:16 static/admin/main.js:18
|
||||
#: static/admin/translations-fakejs.js:171
|
||||
@@ -549,9 +550,9 @@ msgid ""
|
||||
"Format of file must be \"hostname,ip,mac,...\". All fields except hostname "
|
||||
"are optional. Separator can be configured."
|
||||
msgstr ""
|
||||
"Le format du fichier doit être « nom d'hôte, IP, MAC, ... ». Tous les "
|
||||
"champs, à l'exception du nom d'hôte, sont facultatifs. Le séparateur "
|
||||
"peut être configuré."
|
||||
"Le format du fichier doit être « nom d'hôte, IP, MAC, ... ». Tous les "
|
||||
"champs, à l'exception du nom d'hôte, sont facultatifs. Le séparateur peut "
|
||||
"être configuré."
|
||||
|
||||
#: static/admin/main.js:17 static/admin/translations-fakejs.js:181
|
||||
msgid "Remove server from server group"
|
||||
@@ -567,7 +568,7 @@ msgstr "Edit Authenticator"
|
||||
|
||||
#: static/admin/main.js:17 static/admin/translations-fakejs.js:115
|
||||
msgid "Delete Authenticator"
|
||||
msgstr "Supprimer l'authentificateur"
|
||||
msgstr "Supprimer l'authentificateur"
|
||||
|
||||
#: static/admin/main.js:17 static/admin/translations-fakejs.js:163
|
||||
msgid "New MFA"
|
||||
@@ -575,11 +576,11 @@ msgstr "Nouveau MFA"
|
||||
|
||||
#: static/admin/main.js:17 static/admin/translations-fakejs.js:164
|
||||
msgid "Edit MFA"
|
||||
msgstr "Modifier l'AMF"
|
||||
msgstr "Modifier l'AMF"
|
||||
|
||||
#: static/admin/main.js:17 static/admin/translations-fakejs.js:165
|
||||
msgid "Delete MFA"
|
||||
msgstr "Supprimer l'AMF"
|
||||
msgstr "Supprimer l'AMF"
|
||||
|
||||
#: static/admin/main.js:17 static/admin/translations-fakejs.js:160
|
||||
#: static/admin/translations-fakejs.js:482
|
||||
@@ -720,7 +721,7 @@ msgstr "Journalisation du client"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:157
|
||||
msgid "Enable client logging for user?"
|
||||
msgstr "Activer la journalisation client pour l'utilisateur ?"
|
||||
msgstr "Activer la journalisation client pour l'utilisateur ?"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:158
|
||||
msgid "Client logging enabled"
|
||||
@@ -803,12 +804,10 @@ msgid "If selected, will initiate the publication inmediatly after creation"
|
||||
msgstr "Si sélectionné, lancera la publication immédiatement après création"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "Le pool de services est verrouillé"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "Ce pool de services est verrouillé et ne peut pas être modifié"
|
||||
|
||||
@@ -823,15 +822,15 @@ msgstr "Supprimer le pool de services"
|
||||
#: static/admin/main.js:18 static/admin/main.js:23
|
||||
#: static/admin/translations-fakejs.js:6 static/admin/translations-fakejs.js:8
|
||||
msgid "Please, select a valid user"
|
||||
msgstr "S'il vous plaît, sélectionnez un utilisateur valide"
|
||||
msgstr "S'il vous plaît, sélectionnez un utilisateur valide"
|
||||
|
||||
#: static/admin/main.js:19 static/admin/translations-fakejs.js:38
|
||||
msgid "Please, select a valid group"
|
||||
msgstr "S'il vous plaît, sélectionnez un groupe valide"
|
||||
msgstr "S'il vous plaît, sélectionnez un groupe valide"
|
||||
|
||||
#: static/admin/main.js:19 static/admin/translations-fakejs.js:33
|
||||
msgid "Please, select a valid transport"
|
||||
msgstr "S'il vous plaît, sélectionnez un transport valide"
|
||||
msgstr "S'il vous plaît, sélectionnez un transport valide"
|
||||
|
||||
#: static/admin/main.js:23 static/admin/translations-fakejs.js:34
|
||||
msgid "Assigned"
|
||||
@@ -906,14 +905,13 @@ msgid "Delete cached service"
|
||||
msgstr "Supprimer le service en cache"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "Le pool de services est verrouillé"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:19
|
||||
msgid "Service pool is locked, no changes allowed"
|
||||
msgstr ""
|
||||
"Le pool de services est verrouillé, aucune modification n'est autorisée"
|
||||
"Le pool de services est verrouillé, aucune modification n'est autorisée"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:20
|
||||
#: static/admin/translations-fakejs.js:64
|
||||
@@ -938,7 +936,7 @@ msgstr "Publication annulée"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:26
|
||||
msgid "Delete scheduled action"
|
||||
msgstr "Supprimer l'action programmée"
|
||||
msgstr "Supprimer l'action programmée"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:27
|
||||
msgid "Execute scheduled action"
|
||||
@@ -946,7 +944,7 @@ msgstr "Exécuter une action planifiée"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:28
|
||||
msgid "Execute scheduled action right now?"
|
||||
msgstr "Exécuter l'action prévue maintenant?"
|
||||
msgstr "Exécuter l'action prévue maintenant?"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:29
|
||||
msgid "Scheduled action executed"
|
||||
@@ -955,7 +953,7 @@ msgstr "Action programmée exécutée"
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:31
|
||||
#: static/admin/translations-fakejs.js:65
|
||||
msgid "Delete calendar access rule"
|
||||
msgstr "Supprimer la règle d'accès au calendrier"
|
||||
msgstr "Supprimer la règle d'accès au calendrier"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:66
|
||||
msgid "New meta pool"
|
||||
@@ -1112,7 +1110,7 @@ msgstr "De"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:108
|
||||
msgid "until"
|
||||
msgstr "jusqu'à ce que"
|
||||
msgstr "jusqu'à ce que"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:109
|
||||
msgid "onwards"
|
||||
@@ -1132,7 +1130,7 @@ msgstr "avec aucune durée"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:72
|
||||
msgid "Delete calendar rule"
|
||||
msgstr "Supprimer la règle d'agenda"
|
||||
msgstr "Supprimer la règle d'agenda"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:51
|
||||
msgid "Set time mark"
|
||||
@@ -1168,17 +1166,19 @@ msgstr "Pas de marque de temps"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:50
|
||||
msgid "Delete account usage"
|
||||
msgstr "Supprimer l'utilisation du compte"
|
||||
msgstr "Supprimer l'utilisation du compte"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:227
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "L'image est trop grande (taille de téléchargement max. est de 256Kb)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "Erreur lors du téléchargement"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr ""
|
||||
"Type d'image non valide (supporte uniquement les formats JPEG, PNG et "
|
||||
"GIF)"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "Type d'image non valide. Types pris en charge : JPEG, PNG et GIF."
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
@@ -1186,7 +1186,7 @@ msgstr "Merci de donner un nom et une image"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:225
|
||||
msgid "Delete image"
|
||||
msgstr "Supprimer l'image"
|
||||
msgstr "Supprimer l'image"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:234
|
||||
#: static/admin/translations-fakejs.js:235
|
||||
@@ -1212,8 +1212,7 @@ msgstr "Modifier le notificateur"
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:241
|
||||
#: static/admin/translations-fakejs.js:244
|
||||
msgid "Delete actor token - USE WITH EXTREME CAUTION!!!"
|
||||
msgstr ""
|
||||
"Supprimer le jeton d'acteur - UTILISER AVEC UNE ATTENTION EXTRÊME !!!"
|
||||
msgstr "Supprimer le jeton d'acteur - UTILISER AVEC UNE ATTENTION EXTRÊME !!!"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:242
|
||||
msgid "Configuration saved"
|
||||
@@ -1243,7 +1242,7 @@ msgstr "Sélectionner"
|
||||
|
||||
#: static/admin/main.js:29 static/admin/translations-fakejs.js:275
|
||||
msgid ", (%i more items)"
|
||||
msgstr ", (% i plus d'articles)"
|
||||
msgstr ", (% i plus d'articles)"
|
||||
|
||||
#: static/admin/translations-fakejs.js:333
|
||||
#: static/modern/translations-fakejs.js:68
|
||||
@@ -1294,7 +1293,7 @@ msgstr "Tableau des services attribués"
|
||||
|
||||
#: static/admin/translations-fakejs.js:346
|
||||
msgid "In use services chart"
|
||||
msgstr "Tableau des services en cours d'utilisation"
|
||||
msgstr "Tableau des services en cours d'utilisation"
|
||||
|
||||
#: static/admin/translations-fakejs.js:347
|
||||
msgid "UDS Administration"
|
||||
@@ -1313,7 +1312,7 @@ msgid ""
|
||||
"In order to increase your access privileges, please contact your local UDS "
|
||||
"administrator."
|
||||
msgstr ""
|
||||
"Pour augmenter vos privilèges d'accès, veuillez contacter votre "
|
||||
"Pour augmenter vos privilèges d'accès, veuillez contacter votre "
|
||||
"administrateur UDS local."
|
||||
|
||||
#: static/admin/translations-fakejs.js:351
|
||||
@@ -1347,7 +1346,7 @@ msgstr "Changer le propriétaire du service attribué"
|
||||
#: static/admin/translations-fakejs.js:552
|
||||
#: static/admin/translations-fakejs.js:556
|
||||
msgid "Ok"
|
||||
msgstr "D'accord"
|
||||
msgstr "D'accord"
|
||||
|
||||
#: static/admin/translations-fakejs.js:357
|
||||
msgid "Changelog of"
|
||||
@@ -1355,7 +1354,7 @@ msgstr "Changelog de"
|
||||
|
||||
#: static/admin/translations-fakejs.js:359
|
||||
msgid "Assign service to user manually"
|
||||
msgstr "Attribuer manuellement le service à l'utilisateur"
|
||||
msgstr "Attribuer manuellement le service à l'utilisateur"
|
||||
|
||||
#: static/admin/translations-fakejs.js:360
|
||||
msgid "Service"
|
||||
@@ -1409,7 +1408,7 @@ msgstr "Actions programmées"
|
||||
#: static/admin/translations-fakejs.js:377
|
||||
#: static/admin/translations-fakejs.js:415
|
||||
msgid "Access calendars"
|
||||
msgstr "Calendriers d'accès"
|
||||
msgstr "Calendriers d'accès"
|
||||
|
||||
#: static/admin/translations-fakejs.js:378
|
||||
msgid "Charts"
|
||||
@@ -1417,11 +1416,11 @@ msgstr "Graphiques"
|
||||
|
||||
#: static/admin/translations-fakejs.js:380
|
||||
msgid "New access rule for"
|
||||
msgstr "Nouvelle règle d'accès pour"
|
||||
msgstr "Nouvelle règle d'accès pour"
|
||||
|
||||
#: static/admin/translations-fakejs.js:381
|
||||
msgid "Edit access rule for"
|
||||
msgstr "Modifier la règle d'accès pour"
|
||||
msgstr "Modifier la règle d'accès pour"
|
||||
|
||||
#: static/admin/translations-fakejs.js:382
|
||||
msgid "Default fallback access for"
|
||||
@@ -1444,7 +1443,7 @@ msgstr "action"
|
||||
|
||||
#: static/admin/translations-fakejs.js:388
|
||||
msgid "Edit action for"
|
||||
msgstr "Modifier l'action pour"
|
||||
msgstr "Modifier l'action pour"
|
||||
|
||||
#: static/admin/translations-fakejs.js:389
|
||||
msgid "New action for"
|
||||
@@ -1452,11 +1451,11 @@ msgstr "Nouvelle action pour"
|
||||
|
||||
#: static/admin/translations-fakejs.js:391
|
||||
msgid "Events offset (minutes)"
|
||||
msgstr "Décalage d'événements (minutes)"
|
||||
msgstr "Décalage d'événements (minutes)"
|
||||
|
||||
#: static/admin/translations-fakejs.js:392
|
||||
msgid "At the beginning of the interval?"
|
||||
msgstr "Au début de l'intervalle?"
|
||||
msgstr "Au début de l'intervalle?"
|
||||
|
||||
#: static/admin/translations-fakejs.js:399
|
||||
msgid "Logs of"
|
||||
@@ -1524,7 +1523,7 @@ msgstr "Date de début"
|
||||
|
||||
#: static/admin/translations-fakejs.js:434
|
||||
msgid "Repeat until date"
|
||||
msgstr "Répéter jusqu'à la date"
|
||||
msgstr "Répéter jusqu'à la date"
|
||||
|
||||
#: static/admin/translations-fakejs.js:435
|
||||
msgid "Frequency"
|
||||
@@ -1582,7 +1581,7 @@ msgstr "Nom du groupe méta"
|
||||
|
||||
#: static/admin/translations-fakejs.js:478
|
||||
msgid "Skip MFA"
|
||||
msgstr "Ignorer l'AMF"
|
||||
msgstr "Ignorer l'AMF"
|
||||
|
||||
#: static/admin/translations-fakejs.js:481
|
||||
msgid "Service Pools"
|
||||
@@ -1590,7 +1589,7 @@ msgstr "Pools de service"
|
||||
|
||||
#: static/admin/translations-fakejs.js:483
|
||||
msgid "Any group"
|
||||
msgstr "N'importe quel groupe"
|
||||
msgstr "N'importe quel groupe"
|
||||
|
||||
#: static/admin/translations-fakejs.js:484
|
||||
msgid "All groups"
|
||||
@@ -1635,7 +1634,7 @@ msgstr "Modifier pour"
|
||||
|
||||
#: static/admin/translations-fakejs.js:506
|
||||
msgid "Image name"
|
||||
msgstr "Nom de l'image"
|
||||
msgstr "Nom de l'image"
|
||||
|
||||
#: static/admin/translations-fakejs.js:507
|
||||
msgid "Image (click to change)"
|
||||
@@ -1647,7 +1646,7 @@ msgstr "Pour des résultats optimaux, utilisez des images « au carré »."
|
||||
|
||||
#: static/admin/translations-fakejs.js:509
|
||||
msgid "The image will be resized on upload to"
|
||||
msgstr "L'image sera redimensionnée lors du téléchargement sur"
|
||||
msgstr "L'image sera redimensionnée lors du téléchargement sur"
|
||||
|
||||
#: static/admin/translations-fakejs.js:512
|
||||
msgid "UDS Configuration"
|
||||
@@ -1721,7 +1720,7 @@ msgstr "Nouvelle permission ..."
|
||||
|
||||
#: static/admin/translations-fakejs.js:542
|
||||
msgid "CVS Import options for"
|
||||
msgstr "Options d'importation CVS pour"
|
||||
msgstr "Options d'importation CVS pour"
|
||||
|
||||
#: static/admin/translations-fakejs.js:543
|
||||
msgid "Header"
|
||||
@@ -1729,11 +1728,11 @@ msgstr "Entête"
|
||||
|
||||
#: static/admin/translations-fakejs.js:544
|
||||
msgid "CSV contains header line"
|
||||
msgstr "CSV contient une ligne d'en-tête"
|
||||
msgstr "CSV contient une ligne d'en-tête"
|
||||
|
||||
#: static/admin/translations-fakejs.js:545
|
||||
msgid "CSV DOES NOT contains header line"
|
||||
msgstr "CSV NE contient PAS de ligne d'en-tête"
|
||||
msgstr "CSV NE contient PAS de ligne d'en-tête"
|
||||
|
||||
#: static/admin/translations-fakejs.js:546
|
||||
msgid "Separator"
|
||||
@@ -1753,7 +1752,7 @@ msgstr "Utiliser un tuyau"
|
||||
|
||||
#: static/admin/translations-fakejs.js:550
|
||||
msgid "Use tab"
|
||||
msgstr "Utiliser l'onglet"
|
||||
msgstr "Utiliser l'onglet"
|
||||
|
||||
#: static/admin/translations-fakejs.js:551
|
||||
msgid "File"
|
||||
@@ -1876,8 +1875,8 @@ msgid ""
|
||||
"Remember that you will need the UDS client on your platform to access the "
|
||||
"service."
|
||||
msgstr ""
|
||||
"N'oubliez pas que vous aurez besoin du client UDS sur votre plateforme "
|
||||
"pour accéder au service."
|
||||
"N'oubliez pas que vous aurez besoin du client UDS sur votre plateforme pour "
|
||||
"accéder au service."
|
||||
|
||||
#: static/modern/main.js:8 static/modern/translations-fakejs.js:34
|
||||
#: static/modern/translations-fakejs.js:37
|
||||
@@ -1893,8 +1892,8 @@ msgid ""
|
||||
"It seems that you don't have UDS Client installed. Please, install it from "
|
||||
"here:"
|
||||
msgstr ""
|
||||
"Il semble que vous n'ayez pas installé le client UDS. Veuillez "
|
||||
"l'installer à partir d'ici :"
|
||||
"Il semble que vous n'ayez pas installé le client UDS. Veuillez l'installer à "
|
||||
"partir d'ici :"
|
||||
|
||||
#: static/modern/main.js:8 static/modern/translations-fakejs.js:36
|
||||
msgid "UDS Client Download"
|
||||
@@ -1932,8 +1931,8 @@ msgstr "Le service est en maintenance"
|
||||
#: static/modern/translations-fakejs.js:9
|
||||
msgid "This service is currently not accessible due to schedule restrictions."
|
||||
msgstr ""
|
||||
"Ce service n'est actuellement pas accessible en raison de contraintes "
|
||||
"d'horaire."
|
||||
"Ce service n'est actuellement pas accessible en raison de contraintes "
|
||||
"d'horaire."
|
||||
|
||||
#: static/modern/translations-fakejs.js:10
|
||||
msgid "Launcher"
|
||||
@@ -1946,8 +1945,8 @@ msgstr "Le service est en maintenance et ne peut pas être lancé"
|
||||
#: static/modern/translations-fakejs.js:12
|
||||
msgid "This service is currently not accesible due to schedule restrictions."
|
||||
msgstr ""
|
||||
"Ce service n'est actuellement pas accessible en raison de contraintes "
|
||||
"d'horaire."
|
||||
"Ce service n'est actuellement pas accessible en raison de contraintes "
|
||||
"d'horaire."
|
||||
|
||||
#: static/modern/translations-fakejs.js:13
|
||||
msgid "Service message"
|
||||
@@ -1980,7 +1979,7 @@ msgstr "Êtes-vous sûr?"
|
||||
#: static/modern/translations-fakejs.js:20
|
||||
#: static/modern/translations-fakejs.js:52
|
||||
msgid "Username"
|
||||
msgstr "Nom d'utilisateur"
|
||||
msgstr "Nom d'utilisateur"
|
||||
|
||||
#: static/modern/translations-fakejs.js:21
|
||||
#: static/modern/translations-fakejs.js:53
|
||||
@@ -1999,7 +1998,7 @@ msgstr ""
|
||||
|
||||
#: static/modern/translations-fakejs.js:24
|
||||
msgid "If you do not agree, please"
|
||||
msgstr "Si vous n'êtes pas d'accord, veuillez"
|
||||
msgstr "Si vous n'êtes pas d'accord, veuillez"
|
||||
|
||||
#: static/modern/translations-fakejs.js:25
|
||||
msgid "leave this site"
|
||||
@@ -2008,7 +2007,7 @@ msgstr "quitter ce site"
|
||||
#: static/modern/translations-fakejs.js:26
|
||||
#: static/modern/translations-fakejs.js:28
|
||||
msgid "I Accept"
|
||||
msgstr "J'accepte"
|
||||
msgstr "J'accepte"
|
||||
|
||||
#: static/modern/translations-fakejs.js:27
|
||||
msgid "Refuse and leave"
|
||||
@@ -2024,7 +2023,7 @@ msgstr "Politique de cookies"
|
||||
|
||||
#: static/modern/translations-fakejs.js:42
|
||||
msgid "You can access UDS Open Source code at"
|
||||
msgstr "Vous pouvez accéder au code UDS Open Source à l'adresse"
|
||||
msgstr "Vous pouvez accéder au code UDS Open Source à l'adresse"
|
||||
|
||||
#: static/modern/translations-fakejs.js:43
|
||||
msgid "UDS has been developed using these components:"
|
||||
@@ -2065,11 +2064,11 @@ msgstr "Téléchargements"
|
||||
|
||||
#: static/modern/translations-fakejs.js:51
|
||||
msgid "Always download the UDS actor matching your platform"
|
||||
msgstr "Téléchargez toujours l'acteur UDS correspondant à votre plateforme"
|
||||
msgstr "Téléchargez toujours l'acteur UDS correspondant à votre plateforme"
|
||||
|
||||
#: static/modern/translations-fakejs.js:55
|
||||
msgid "Login"
|
||||
msgstr "S'identifier"
|
||||
msgstr "S'identifier"
|
||||
|
||||
#: static/modern/translations-fakejs.js:56
|
||||
msgid "An error has occurred"
|
||||
@@ -2093,19 +2092,19 @@ msgstr "Veuillez noter que le rechargement de cette page ne fonctionnera pas."
|
||||
|
||||
#: static/modern/translations-fakejs.js:61
|
||||
msgid "To relaunch service, you will have to do it from origin."
|
||||
msgstr "Pour relancer le service, vous devrez le faire depuis l'origine."
|
||||
msgstr "Pour relancer le service, vous devrez le faire depuis l'origine."
|
||||
|
||||
#: static/modern/translations-fakejs.js:62
|
||||
msgid ""
|
||||
"If the service does not launchs automatically, probably you dont have the "
|
||||
"UDS Client installed"
|
||||
msgstr ""
|
||||
"Si le service ne se lance pas automatiquement, vous n'avez probablement "
|
||||
"pas installé le client UDS"
|
||||
"Si le service ne se lance pas automatiquement, vous n'avez probablement pas "
|
||||
"installé le client UDS"
|
||||
|
||||
#: static/modern/translations-fakejs.js:63
|
||||
msgid "You can obtain it from the"
|
||||
msgstr "Vous pouvez l'obtenir auprès du"
|
||||
msgstr "Vous pouvez l'obtenir auprès du"
|
||||
|
||||
#: static/modern/translations-fakejs.js:64
|
||||
msgid "UDS Client download page"
|
||||
@@ -2151,4 +2150,4 @@ msgstr "actes"
|
||||
|
||||
#: static/modern/translations-fakejs.js:88
|
||||
msgid "Please, enter access credentials"
|
||||
msgstr "Veuillez entrer les identifiants d'accès"
|
||||
msgstr "Veuillez entrer les identifiants d'accès"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,9 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Andrés Schumann <aschumann@virtualcable.es>, 2025
|
||||
# Javier <jgonzalez@virtualcable.es>, 2020,2025
|
||||
# Massimo Vignone, 2020
|
||||
# mvignone, 2020-2021,2024
|
||||
@@ -36,9 +39,9 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Javier <jgonzalez@virtualcable.es>, 2020,2025\n"
|
||||
"Last-Translator: Víctor Alonso <valonso@virtualcable.net>, 2020\n"
|
||||
"Language-Team: Italian (http://app.transifex.com/openuds/openuds/language/"
|
||||
"it/)\n"
|
||||
"Language: it\n"
|
||||
@@ -162,7 +165,6 @@ msgstr "Principale"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -577,7 +579,7 @@ msgstr "Modifica MFA"
|
||||
|
||||
#: static/admin/main.js:17 static/admin/translations-fakejs.js:165
|
||||
msgid "Delete MFA"
|
||||
msgstr "Elimina l'MFA"
|
||||
msgstr "Elimina l'MFA"
|
||||
|
||||
#: static/admin/main.js:17 static/admin/translations-fakejs.js:160
|
||||
#: static/admin/translations-fakejs.js:482
|
||||
@@ -718,7 +720,7 @@ msgstr "Registrazione del cliente"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:157
|
||||
msgid "Enable client logging for user?"
|
||||
msgstr "Abilitare la registrazione client per l'utente?"
|
||||
msgstr "Abilitare la registrazione client per l'utente?"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:158
|
||||
msgid "Client logging enabled"
|
||||
@@ -802,12 +804,10 @@ msgstr ""
|
||||
"Se selezionato, avvierà la pubblicazione immediatamente dopo la creazione"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "Il pool di servizi è bloccato"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "Questo pool di servizi è bloccato e non può essere modificato"
|
||||
|
||||
@@ -905,7 +905,6 @@ msgid "Delete cached service"
|
||||
msgstr "Elimina il servizio in cache"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "Il pool di servizi è bloccato"
|
||||
|
||||
@@ -1173,9 +1172,13 @@ msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr ""
|
||||
"L'immagine è troppo grande (la dimensione massima di caricamento è 256 Kb)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "Errore durante il caricamento"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "Tipo di immagine non valido (supporta solo JPEG, PNG e GIF)"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "Tipo di immagine non valido. I tipi supportati sono: JPEG, PNG e GIF."
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
@@ -1904,7 +1907,7 @@ msgstr "Errore di comunicazione con il tuo servizio. Riprova di nuovo."
|
||||
|
||||
#: static/modern/main.js:8 static/modern/translations-fakejs.js:39
|
||||
msgid "Your session has expired. Please, login again"
|
||||
msgstr "La tua sessione è scaduta. Per favore esegui l'accesso di nuovo"
|
||||
msgstr "La tua sessione è scaduta. Per favore esegui l'accesso di nuovo"
|
||||
|
||||
#: static/modern/translations-fakejs.js:3
|
||||
#: static/modern/translations-fakejs.js:4
|
||||
@@ -1992,7 +1995,7 @@ msgstr ""
|
||||
|
||||
#: static/modern/translations-fakejs.js:24
|
||||
msgid "If you do not agree, please"
|
||||
msgstr "Se non sei d'accordo, per favore"
|
||||
msgstr "Se non sei d'accordo, per favore"
|
||||
|
||||
#: static/modern/translations-fakejs.js:25
|
||||
msgid "leave this site"
|
||||
@@ -2057,7 +2060,7 @@ msgstr "Download"
|
||||
|
||||
#: static/modern/translations-fakejs.js:51
|
||||
msgid "Always download the UDS actor matching your platform"
|
||||
msgstr "Scarica sempre l'UDS Actor corrispondente alla tua piattaforma"
|
||||
msgstr "Scarica sempre l'UDS Actor corrispondente alla tua piattaforma"
|
||||
|
||||
#: static/modern/translations-fakejs.js:55
|
||||
msgid "Login"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,8 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Javier Gomez <jgomez@virtualcable.es>, 2023
|
||||
# Javier <jgonzalez@virtualcable.es>, 2025
|
||||
# Víctor Alonso <valonso@virtualcable.net>, 2020
|
||||
@@ -34,9 +36,9 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Javier <jgonzalez@virtualcable.es>, 2025\n"
|
||||
"Last-Translator: Víctor Alonso <valonso@virtualcable.net>, 2020\n"
|
||||
"Language-Team: Portuguese (http://app.transifex.com/openuds/openuds/language/"
|
||||
"pt/)\n"
|
||||
"Language: pt\n"
|
||||
@@ -160,7 +162,6 @@ msgstr "Principal"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -799,12 +800,10 @@ msgid "If selected, will initiate the publication inmediatly after creation"
|
||||
msgstr "Se selecionado, iniciará a publicação imediatamente após a criação"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "O pool de serviços está bloqueado"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "Este pool de serviços está bloqueado e não pode ser editado"
|
||||
|
||||
@@ -902,7 +901,6 @@ msgid "Delete cached service"
|
||||
msgstr "Excluir serviço em cache"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "O pool de serviços está bloqueado"
|
||||
|
||||
@@ -1169,9 +1167,13 @@ msgstr "Excluir o uso da conta"
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "A imagem é muito grande (o tamanho máximo de upload é de 256 KB)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "Erro no upload"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "Tipo de imagem inválido (suporta apenas JPEG, PNG e GIF"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "Tipo de imagem inválido. Os tipos suportados são: JPEG, PNG e GIF."
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,8 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Adolfo Gómez <dkmaster@dkmon.com>, 2022
|
||||
# Alexander <ag@kub.tel>, 2020
|
||||
# Alexey Shabalin <a.shabalin@gmail.com>, 2019
|
||||
@@ -40,7 +42,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Александр Бурматов, 2022\n"
|
||||
"Language-Team: Russian (http://app.transifex.com/openuds/openuds/language/"
|
||||
@@ -167,7 +169,6 @@ msgstr "Основной"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -806,12 +807,10 @@ msgid "If selected, will initiate the publication inmediatly after creation"
|
||||
msgstr "Если включено, инициирует публикацию сразу после создания"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "Пул услуг заблокирован"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "Этот пул услуг заблокирован и не может быть отредактирован."
|
||||
|
||||
@@ -909,7 +908,6 @@ msgid "Delete cached service"
|
||||
msgstr "Удалить кэшированную услугу"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "Пул услуг заблокирован"
|
||||
|
||||
@@ -1176,9 +1174,13 @@ msgstr "Удалить использование аккаунта"
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "Изображение велико (макс. размер 256КБ)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "Ошибка при загрузке"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "Неверный тип изображения (поддерживаются только JPEG, PNG и GIF)"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "Недопустимый тип изображения. Поддерживаемые типы: JPEG, PNG и GIF"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,12 +25,14 @@
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
# Translators:
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenUDS\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-03-12 19:02+0100\n"
|
||||
"POT-Creation-Date: 2025-06-27 15:48+0200\n"
|
||||
"PO-Revision-Date: 2014-03-26 02:16+0000\n"
|
||||
"Last-Translator: Javier <jgonzalez@virtualcable.es>, 2014\n"
|
||||
"Language-Team: Chinese (http://app.transifex.com/openuds/openuds/language/"
|
||||
@@ -155,7 +157,6 @@ msgstr "主要"
|
||||
#: static/admin/translations-fakejs.js:210
|
||||
#: static/admin/translations-fakejs.js:218
|
||||
#: static/admin/translations-fakejs.js:226
|
||||
#: static/admin/translations-fakejs.js:228
|
||||
#: static/admin/translations-fakejs.js:230
|
||||
#: static/admin/translations-fakejs.js:258
|
||||
#: static/admin/translations-fakejs.js:283 static/modern/main.js:8
|
||||
@@ -794,12 +795,10 @@ msgid "If selected, will initiate the publication inmediatly after creation"
|
||||
msgstr "如果选中,将在创建后立即启动发布"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:43
|
||||
#| msgid "Service Pools"
|
||||
msgid "Service Pool is locked"
|
||||
msgstr "服务池已锁定"
|
||||
|
||||
#: static/admin/main.js:18 static/admin/translations-fakejs.js:44
|
||||
#| msgid "Service is in maintenance and cannot be executed"
|
||||
msgid "This service pool is locked and cannot be edited"
|
||||
msgstr "此服务池已被锁定,无法编辑"
|
||||
|
||||
@@ -897,7 +896,6 @@ msgid "Delete cached service"
|
||||
msgstr "删除缓存的服务"
|
||||
|
||||
#: static/admin/main.js:26 static/admin/translations-fakejs.js:18
|
||||
#| msgid "Service pools"
|
||||
msgid "Service pool is locked"
|
||||
msgstr "服务池已锁定"
|
||||
|
||||
@@ -1164,9 +1162,13 @@ msgstr "删除帐户使用情况"
|
||||
msgid "Image is too big (max. upload size is 256Kb)"
|
||||
msgstr "图像太大(最大上传大小为256Kb)"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:228
|
||||
msgid "Error on upload"
|
||||
msgstr "上传错误"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:229
|
||||
msgid "Invalid image type (only supports JPEG, PNG and GIF"
|
||||
msgstr "图像类型无效(仅支持JPEG,PNG和GIF"
|
||||
msgid "Invalid image type. Supported types are: JPEG, PNG and GIF"
|
||||
msgstr "图片类型无效。支持的类型为:JPEG、PNG 和 GIF"
|
||||
|
||||
#: static/admin/main.js:28 static/admin/translations-fakejs.js:231
|
||||
msgid "Please, provide a name and a image"
|
||||
|
93
server/src/uds/management/commands/maintenance.py
Normal file
93
server/src/uds/management/commands/maintenance.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#
|
||||
# Copyright (c) 2012-2019 Virtual Cable S.L.
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
# * Neither the name of Virtual Cable S.L.U. nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software
|
||||
# without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""
|
||||
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||
"""
|
||||
import logging
|
||||
import typing
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from uds import models
|
||||
from uds.core.util import unique_mac_generator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MIN_VERBOSITY: typing.Final[int] = 1 # Minimum verbosity to print freed macs
|
||||
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Execute maintenance tasks for UDS broker"
|
||||
|
||||
def clean_unused_service_macs(self, service: models.Service) -> int:
|
||||
# Get all userservices from this service, extract their "unique_id" (the mac)
|
||||
# And store it in a set for later use
|
||||
self.stdout.write(f'Cleaning unused macs for service {service.name} (id: {service.id})\n')
|
||||
|
||||
def mac_to_int(mac: str) -> int:
|
||||
try:
|
||||
return int(mac.replace(':', ''), 16)
|
||||
except Exception:
|
||||
return -1
|
||||
|
||||
mac_gen = unique_mac_generator.UniqueMacGenerator(f't-service-{service.id}')
|
||||
|
||||
used_macs = {
|
||||
mac_to_int(us.unique_id) for us in models.UserService.objects.filter(deployed_service__service=service)
|
||||
}
|
||||
|
||||
counter = 0
|
||||
for seq in (
|
||||
models.UniqueId.objects.filter(basename='\tmac', assigned=True, owner=f't-service-{service.id}')
|
||||
.exclude(seq__in=used_macs)
|
||||
.values_list('seq', flat=True)
|
||||
):
|
||||
counter += 1
|
||||
self.stdout.write(f'Freeing mac {mac_gen._to_mac_addr(seq)} for service {service.name}\n')
|
||||
mac_gen.free(mac_gen._to_mac_addr(seq))
|
||||
|
||||
self.stdout.write(f'Freed {counter} macs for service {service.name}\n')
|
||||
logger.info('Freed %d macs for service %s', counter, service.name)
|
||||
|
||||
return counter
|
||||
|
||||
def handle(self, *args: typing.Any, **options: typing.Any) -> None:
|
||||
logger.debug('Maintenance called with args: %s, options: %s', args, options)
|
||||
|
||||
counter = 0
|
||||
for service in models.Service.objects.all():
|
||||
try:
|
||||
counter += self.clean_unused_service_macs(service)
|
||||
except Exception as e:
|
||||
logger.error('Error doing maintenance for service %s: %s', service.name, e)
|
||||
self.stdout.write(f'Error doing maintenance for service {service.name}: {e}\n')
|
||||
|
||||
logger.info('Maintenance finished, total freed macs: %d', counter)
|
||||
self.stdout.write(f'Total freed macs: {counter}\n')
|
@@ -42,6 +42,7 @@ from django.core.management.base import BaseCommand # , CommandError
|
||||
from django.conf import settings
|
||||
|
||||
from uds.core.managers import task_manager
|
||||
from uds.core.util import cluster
|
||||
from uds.core.util.config import GlobalConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -154,6 +155,10 @@ class Command(BaseCommand):
|
||||
|
||||
if start:
|
||||
logger.info('Starting task manager.')
|
||||
|
||||
# Store cluster hostname intially.
|
||||
# will be updated by system_info worker every hour
|
||||
cluster.store_cluster_info()
|
||||
|
||||
if not foreground:
|
||||
become_daemon(
|
||||
|
@@ -40,7 +40,7 @@ import yaml
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from uds.core.util import log, model, config
|
||||
from uds.core.util import cluster, log, model, config
|
||||
from uds import models
|
||||
from uds.core import types
|
||||
|
||||
@@ -57,6 +57,7 @@ CONSIDERED_OLD: typing.Final[datetime.timedelta] = datetime.timedelta(days=365)
|
||||
def get_serialized_from_managed_object(
|
||||
mod: 'models.ManagedObjectModel',
|
||||
removable_fields: typing.Optional[list[str]] = None,
|
||||
callback: typing.Optional[typing.Callable[[models.ManagedObjectModel, dict[str, typing.Any]], None]] = None,
|
||||
) -> collections.abc.Mapping[str, typing.Any]:
|
||||
try:
|
||||
obj: 'Module' = mod.get_instance()
|
||||
@@ -82,6 +83,10 @@ def get_serialized_from_managed_object(
|
||||
values['type_name'] = str(obj.type_name)
|
||||
values['comments'] = mod.comments
|
||||
|
||||
# May alter values with callback
|
||||
if callback:
|
||||
callback(mod, values)
|
||||
|
||||
return values
|
||||
except Exception:
|
||||
return {}
|
||||
@@ -91,12 +96,13 @@ def get_serialized_from_model(
|
||||
mod: 'dbmodels.Model',
|
||||
removable_fields: typing.Optional[list[str]] = None,
|
||||
password_fields: typing.Optional[list[str]] = None,
|
||||
exclude_uuid: bool = True,
|
||||
) -> collections.abc.Mapping[str, typing.Any]:
|
||||
removable_fields = removable_fields or []
|
||||
password_fields = password_fields or []
|
||||
try:
|
||||
values = mod._meta.managers[0].filter(pk=mod.pk).values()[0]
|
||||
for i in ['uuid', 'id'] + removable_fields:
|
||||
for i in (['uuid', 'id'] if exclude_uuid else []) + removable_fields:
|
||||
if i in values:
|
||||
del values[i]
|
||||
|
||||
@@ -124,7 +130,7 @@ class Command(BaseCommand):
|
||||
'--max-items',
|
||||
action='store',
|
||||
dest='maxitems',
|
||||
default=400,
|
||||
default=200,
|
||||
help='Maximum elements exported for groups and user services',
|
||||
)
|
||||
|
||||
@@ -160,7 +166,15 @@ class Command(BaseCommand):
|
||||
fltr = servicepool.userServices.all()
|
||||
if not options['alluserservices']:
|
||||
fltr = fltr.filter(state=types.states.State.ERROR)
|
||||
for item in fltr[:max_items]: # at most max_items items
|
||||
fltr_list = list(fltr)[:max_items]
|
||||
if len(fltr_list) < max_items:
|
||||
# Append rest of userservices, if there is space
|
||||
fltr_list += list(
|
||||
servicepool.userServices.exclude(
|
||||
pk__in=[u.pk for u in fltr_list]
|
||||
)[: max_items - len(fltr_list)]
|
||||
)
|
||||
for item in fltr_list[:max_items]: # at most max_items items
|
||||
logs = [
|
||||
f'{l["date"]}: {types.log.LogLevel.from_int(l["level"])} [{l["source"]}] - {l["message"]}'
|
||||
for l in log.get_logs(item)
|
||||
@@ -235,8 +249,22 @@ class Command(BaseCommand):
|
||||
'_': get_serialized_from_managed_object(provider),
|
||||
'services': services,
|
||||
}
|
||||
|
||||
tree[counter('PROVIDERS')] = providers
|
||||
|
||||
# Get server groups
|
||||
server_groups: dict[str, typing.Any] = {}
|
||||
for server_group in models.ServerGroup.objects.all():
|
||||
servers: dict[str, typing.Any] = {}
|
||||
for server in server_group.servers.all()[:max_items]: # at most max_items items
|
||||
servers[server.hostname] = get_serialized_from_model(server, exclude_uuid=False)
|
||||
server_groups[server_group.name] = {
|
||||
'_': get_serialized_from_model(server_group, exclude_uuid=False),
|
||||
'servers': servers,
|
||||
}
|
||||
|
||||
tree[counter('SERVICES')] = {
|
||||
'providers': providers,
|
||||
'server_groups': server_groups
|
||||
}
|
||||
|
||||
# authenticators
|
||||
authenticators: dict[str, typing.Any] = {}
|
||||
@@ -259,11 +287,27 @@ class Command(BaseCommand):
|
||||
tree[counter('AUTHENTICATORS')] = authenticators
|
||||
|
||||
# transports
|
||||
def trans_callback(mod: models.ManagedObjectModel, values: dict[str, typing.Any]) -> None:
|
||||
# Add transport type
|
||||
if 'tunnel' in values:
|
||||
tunnel = models.Server.objects.filter(
|
||||
type=types.servers.ServerType.TUNNEL, uuid=values['tunnel']
|
||||
).first()
|
||||
if tunnel:
|
||||
values['tunnel'] = get_serialized_from_model(tunnel, exclude_uuid=False)
|
||||
elif values['tunnel']:
|
||||
values['tunnel'] += ' (not found)'
|
||||
|
||||
transports: dict[str, typing.Any] = {}
|
||||
for transport in models.Transport.objects.all():
|
||||
transports[transport.name] = get_serialized_from_managed_object(transport)
|
||||
transports[transport.name] = get_serialized_from_managed_object(
|
||||
transport, callback=trans_callback
|
||||
)
|
||||
|
||||
tree[counter('TRANSPORTS')] = transports
|
||||
# Tunnel servers
|
||||
tunnels: dict[str, typing.Any] = {}
|
||||
for tunnel in models.Server.objects.filter(type=types.servers.ServerType.TUNNEL):
|
||||
tunnels[tunnel.hostname] = get_serialized_from_model(tunnel, exclude_uuid=False)
|
||||
|
||||
# Networks
|
||||
networks: dict[str, typing.Any] = {}
|
||||
@@ -273,7 +317,11 @@ class Command(BaseCommand):
|
||||
'transports': [t.name for t in network.transports.all()],
|
||||
}
|
||||
|
||||
tree[counter('NETWORKS')] = networks
|
||||
tree[counter('CONNECTIVITY')] = {
|
||||
'transports': transports,
|
||||
'tunnels': tunnels,
|
||||
'networks': networks,
|
||||
}
|
||||
|
||||
# os managers
|
||||
osmanagers: dict[str, typing.Any] = {}
|
||||
@@ -354,6 +402,32 @@ class Command(BaseCommand):
|
||||
|
||||
tree[counter('CONFIG')] = cfg
|
||||
|
||||
# Last 7 days of logs or 500 entries, whichever is less
|
||||
logs = [
|
||||
get_serialized_from_model(log_entry)
|
||||
for log_entry in models.Log.objects.filter(
|
||||
created__gt=now - datetime.timedelta(days=7)
|
||||
).order_by('-created')[:500]
|
||||
]
|
||||
# Cluster nodes
|
||||
cluster_nodes: list[dict[str, str]] = [node.as_dict() for node in cluster.enumerate_cluster_nodes()]
|
||||
# Scheduled jobs
|
||||
scheduled_jobs: list[dict[str, typing.Any]] = [
|
||||
{i.name: get_serialized_from_model(i)} for i in models.Scheduler.objects.all()
|
||||
]
|
||||
delayed_tasks: list[dict[str, typing.Any]] = [
|
||||
{task.insert_date.strftime('%Y-%m-%d %H:%M:%S'): get_serialized_from_model(task)}
|
||||
for task in models.DelayedTask.objects.all()
|
||||
]
|
||||
|
||||
# system
|
||||
tree[counter('SYSTEM')] = {
|
||||
'logs': logs,
|
||||
'cluster_nodes': cluster_nodes,
|
||||
'scheduled_jobs': scheduled_jobs,
|
||||
'delayed_tasks': delayed_tasks,
|
||||
}
|
||||
|
||||
self.stdout.write(yaml.safe_dump(tree, default_flow_style=False))
|
||||
|
||||
except Exception as e:
|
||||
|
@@ -81,7 +81,7 @@ class RadiusOTP(mfas.MFA):
|
||||
tooltip=_('Radius authentication port (usually 1812)'),
|
||||
required=True,
|
||||
)
|
||||
secret = gui.TextField(
|
||||
secret = gui.PasswordField(
|
||||
length=64,
|
||||
label=_('Secret'),
|
||||
order=3,
|
||||
|
@@ -113,8 +113,8 @@ def migrate(
|
||||
logger.error('Server %s on %s not found on DNS', server, record.name)
|
||||
|
||||
registered_server_group = ServerGroup.objects.create(
|
||||
name=f'{server_group_prefix} for {record.name}',
|
||||
comments='Migrated from {}'.format(record.name),
|
||||
name=f'{server_group_prefix} for {record.name}'[:64],
|
||||
comments='Migrated from {}'.format(record.name)[:255],
|
||||
type=types.servers.ServerType.UNMANAGED,
|
||||
subtype=subtype,
|
||||
)
|
||||
|
@@ -109,7 +109,7 @@ class IPMachinesService(services.Service):
|
||||
self.ipList.value = [_as_identifier(i) for i in _ips]
|
||||
|
||||
if values[0] != b'v1':
|
||||
self._token = values[1].decode()
|
||||
self.token.value = values[1].decode()
|
||||
if values[0] in (b'v3', b'v4', b'v5', b'v6', b'v7'):
|
||||
self.port.value = int(values[2].decode())
|
||||
if values[0] in (b'v4', b'v5', b'v6', b'v7'):
|
||||
|
@@ -104,7 +104,7 @@ class Authenticator(ManagedObjectModel, TaggingMixin):
|
||||
"""
|
||||
if self._cached_instance and values is None:
|
||||
return typing.cast(auths.Authenticator, self._cached_instance)
|
||||
|
||||
|
||||
if not self.id:
|
||||
# Return a fake authenticator
|
||||
return auths.Authenticator(
|
||||
@@ -115,9 +115,9 @@ class Authenticator(ManagedObjectModel, TaggingMixin):
|
||||
env = self.get_environment()
|
||||
auth = auth_type(env, values, uuid=self.uuid)
|
||||
self.deserialize(auth, values)
|
||||
|
||||
|
||||
self._cached_instance = auth
|
||||
|
||||
|
||||
return auth
|
||||
|
||||
def get_type(self) -> type[auths.Authenticator]:
|
||||
@@ -133,7 +133,7 @@ class Authenticator(ManagedObjectModel, TaggingMixin):
|
||||
"""
|
||||
# If type is not registered (should be, but maybe a database inconsistence), consider this a "base empty auth"
|
||||
return auths.factory().lookup(self.data_type) or auths.Authenticator
|
||||
|
||||
|
||||
def type_is_valid(self) -> bool:
|
||||
"""
|
||||
Returns if the type of this authenticator exists
|
||||
@@ -255,34 +255,36 @@ class Authenticator(ManagedObjectModel, TaggingMixin):
|
||||
return Authenticator.objects.all().order_by('priority')
|
||||
|
||||
@staticmethod
|
||||
def get_by_tag(*tag: typing.Optional[str]) -> collections.abc.Iterable['Authenticator']:
|
||||
def get_by_tag(
|
||||
tag: str | None,
|
||||
hostname: str | None = None,
|
||||
) -> collections.abc.Iterable['Authenticator']:
|
||||
"""
|
||||
Gets authenticator by tag name.
|
||||
Special tag name "disabled" is used to exclude customAuth
|
||||
"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from uds.core.util.config import GlobalConfig
|
||||
# Filter out None tags
|
||||
tags = list(filter(lambda x: x is not None, tag))
|
||||
available_auths = Authenticator.objects.exclude(state=consts.auth.DISABLED).order_by('priority', 'name')
|
||||
authenticators = available_auths
|
||||
if tag != 'disabled':
|
||||
if tag:
|
||||
hostname = None
|
||||
|
||||
if tags:
|
||||
# authenticators = list(auths.filter(small_name__in=[auth_host, tag]))
|
||||
authenticators = (
|
||||
Authenticator.objects.exclude(state=consts.auth.DISABLED)
|
||||
.filter(small_name__in=tags)
|
||||
.order_by('priority', 'name')
|
||||
q = (models.Q(small_name__iexact=tag) if tag else models.Q()) | (
|
||||
models.Q(small_name__iexact=hostname) if hostname else models.Q()
|
||||
)
|
||||
if not authenticators.exists():
|
||||
authenticators = Authenticator.objects.all().order_by('priority', 'name')
|
||||
else:
|
||||
authenticators = Authenticator.objects.all().order_by('priority', 'name')
|
||||
|
||||
authenticators = available_auths.filter(q)
|
||||
|
||||
if not tag and hostname and not authenticators.exists():
|
||||
# If no tag specified, we return all authenticators with the hostname
|
||||
authenticators = available_auths.all()
|
||||
if authenticators.exists():
|
||||
# If hostname is specified, we filter by hostname
|
||||
authenticators = authenticators.filter(small_name__iexact=authenticators[0].small_name)
|
||||
|
||||
for auth in authenticators:
|
||||
if auth.get_type() and (not auth.get_type().is_custom() or 'disabled' not in tag):
|
||||
if auth.get_type() and (not auth.get_type().is_custom() or tag != 'disabled'):
|
||||
yield auth
|
||||
# If disallow global login (that is, do not allow to select the auth), we break here
|
||||
if GlobalConfig.DISALLOW_GLOBAL_LOGIN.as_bool(force=False) is True:
|
||||
break # Only one auth for global login (if disallowed)
|
||||
|
||||
@staticmethod
|
||||
def pre_delete(sender: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=unused-argument
|
||||
|
@@ -69,4 +69,5 @@ class DelayedTask(models.Model):
|
||||
app_label = 'uds'
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'Run Queue task {self.type} owned by {self.execution_time},inserted at {self.insert_date} and with {self.execution_delay} seconds delay'
|
||||
# return f'Run Queue task {self.type} w {self.execution_time},inserted at {self.insert_date} and with {self.execution_delay} seconds delay'
|
||||
return f'Delayed Task {self.type} ({self.tag}) at {self.execution_time} (inserted at {self.insert_date}, delay {self.execution_delay})'
|
||||
|
@@ -47,7 +47,7 @@ class Properties(models.Model):
|
||||
owner_id = models.CharField(max_length=128, db_index=True)
|
||||
owner_type = models.CharField(max_length=64, db_index=True)
|
||||
key = models.CharField(max_length=64, db_index=True)
|
||||
value: typing.Any = models.JSONField(default=dict)
|
||||
value = typing.cast(typing.Any, models.JSONField(default=dict))
|
||||
|
||||
class Meta: # pyright: ignore
|
||||
"""
|
||||
|
@@ -109,6 +109,25 @@ class ServerGroup(UUIDModel, TaggingMixin, properties.PropertiesMixin):
|
||||
"""Sets the server type of this server"""
|
||||
self.type = value
|
||||
|
||||
@property
|
||||
def weights(self) -> types.servers.ServerStatsWeights:
|
||||
"""Returns the server stats weights for this server group"""
|
||||
weights_dict = self.properties.get('weights', None)
|
||||
if weights_dict:
|
||||
return types.servers.ServerStatsWeights.from_dict(weights_dict)
|
||||
return types.servers.ServerStatsWeights()
|
||||
|
||||
@weights.setter
|
||||
def weights(self, value: types.servers.ServerStatsWeights) -> None:
|
||||
"""Sets the server stats weights for this server group"""
|
||||
self.properties['weights'] = value.as_dict()
|
||||
|
||||
@weights.deleter
|
||||
def weights(self) -> None:
|
||||
"""Deletes the server stats weights for this server group"""
|
||||
if 'weights' in self.properties:
|
||||
del self.properties['weights']
|
||||
|
||||
def is_managed(self) -> bool:
|
||||
"""Returns if this server group is managed or not"""
|
||||
return self.server_type != types.servers.ServerType.UNMANAGED
|
||||
@@ -149,7 +168,6 @@ class ServerGroup(UUIDModel, TaggingMixin, properties.PropertiesMixin):
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def _create_token() -> str:
|
||||
return secrets.token_urlsafe(36)
|
||||
|
||||
@@ -250,7 +268,7 @@ class Server(UUIDModel, TaggingMixin, properties.PropertiesMixin):
|
||||
def server_type(self, value: types.servers.ServerType) -> None:
|
||||
"""Sets the server type of this server"""
|
||||
self.type = value
|
||||
|
||||
|
||||
def is_managed(self) -> bool:
|
||||
"""Returns if this server is managed or not"""
|
||||
return self.server_type != types.servers.ServerType.UNMANAGED
|
||||
@@ -296,15 +314,15 @@ class Server(UUIDModel, TaggingMixin, properties.PropertiesMixin):
|
||||
|
||||
def lock(self, duration: typing.Optional[datetime.timedelta]) -> None:
|
||||
"""Locks this server for a duration
|
||||
|
||||
|
||||
Args:
|
||||
duration: Duration to lock the server. If None, it will be unlocked
|
||||
|
||||
|
||||
Note:
|
||||
If duration is None, the server will be unlocked
|
||||
The lock time will be calculated from current time on sql server
|
||||
"""
|
||||
|
||||
|
||||
if duration is None:
|
||||
self.locked_until = None
|
||||
else:
|
||||
|
@@ -245,7 +245,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
||||
@property
|
||||
def owned_by_meta(self) -> bool:
|
||||
return self.memberOfMeta.count() > 0
|
||||
|
||||
|
||||
@property
|
||||
def uses_cache(self) -> bool:
|
||||
return self.cache_l1_srvs > 0 or self.cache_l2_srvs > 0 or self.initial_srvs > 0
|
||||
@@ -256,7 +256,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
||||
if self.short_name and str(self.short_name).strip():
|
||||
return str(self.short_name.strip())
|
||||
return str(self.name)
|
||||
|
||||
|
||||
def can_create_userservices(self) -> bool:
|
||||
"""
|
||||
If the service pool is in a state that allows to create user services
|
||||
@@ -291,13 +291,12 @@ class ServicePool(UUIDModel, TaggingMixin):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def is_locked(self) -> bool:
|
||||
"""
|
||||
Returns true if the service pool is locked
|
||||
"""
|
||||
return self.state == types.states.State.LOCKED
|
||||
|
||||
|
||||
def remaining_restraint_time(self) -> int:
|
||||
from uds.core.util.config import GlobalConfig
|
||||
@@ -327,7 +326,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
||||
|
||||
def is_usable(self) -> bool:
|
||||
return (
|
||||
self.state == types.states.State.ACTIVE
|
||||
self.state in (types.states.State.ACTIVE, types.states.State.LOCKED)
|
||||
and not self.is_in_maintenance()
|
||||
and not self.is_restrained()
|
||||
)
|
||||
@@ -487,7 +486,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
||||
Args:
|
||||
active_publication: Active publication used as "current" publication to make checks
|
||||
skip_assigned: If true, assigned services will not be marked as removable
|
||||
|
||||
|
||||
"""
|
||||
now = sql_now()
|
||||
non_active_publication: 'ServicePoolPublication'
|
||||
@@ -500,9 +499,9 @@ class ServicePool(UUIDModel, TaggingMixin):
|
||||
for userservice in non_active_publication.userServices.filter(state=types.states.State.PREPARING):
|
||||
userservice.cancel()
|
||||
with transaction.atomic():
|
||||
non_active_publication.userServices.exclude(cache_level=0).filter(state=types.states.State.USABLE).update(
|
||||
state=types.states.State.REMOVABLE, state_date=now
|
||||
)
|
||||
non_active_publication.userServices.exclude(cache_level=0).filter(
|
||||
state=types.states.State.USABLE
|
||||
).update(state=types.states.State.REMOVABLE, state_date=now)
|
||||
if not skip_assigned:
|
||||
non_active_publication.userServices.filter(
|
||||
cache_level=0, state=types.states.State.USABLE, in_use=False
|
||||
@@ -572,7 +571,9 @@ class ServicePool(UUIDModel, TaggingMixin):
|
||||
"""
|
||||
from uds.core import services # pylint: disable=import-outside-toplevel
|
||||
|
||||
services_not_needing_publication = [t.mod_type() for t in services.factory().services_not_needing_publication()]
|
||||
services_not_needing_publication = [
|
||||
t.mod_type() for t in services.factory().services_not_needing_publication()
|
||||
]
|
||||
# Get services that HAS publications
|
||||
query = (
|
||||
ServicePool.objects.filter(
|
||||
|
@@ -37,6 +37,8 @@ import logging
|
||||
|
||||
from django.db import models
|
||||
|
||||
from uds.core.util.model import sql_stamp_seconds
|
||||
|
||||
from .stats_counters import StatsCounters
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -123,7 +125,7 @@ class StatsCountersAccum(models.Model):
|
||||
) -> int:
|
||||
"""Adjusts a timestamp to the given interval"""
|
||||
if value == -1:
|
||||
value = int(datetime.datetime.now().timestamp())
|
||||
value = sql_stamp_seconds()
|
||||
return value - (value % interval_type.seconds())
|
||||
|
||||
@staticmethod
|
||||
|
@@ -261,7 +261,7 @@ class OpenStackLiveService(DynamicService):
|
||||
caller_instance: typing.Optional['DynamicUserService | DynamicPublication'],
|
||||
vmid: str,
|
||||
*,
|
||||
force_new: bool = False,
|
||||
for_unique_id: bool = False,
|
||||
) -> str:
|
||||
net_info = self.api.get_server_info(vmid).validated().addresses
|
||||
return '' if not net_info else net_info[0].mac
|
||||
|
@@ -30,11 +30,10 @@
|
||||
"""
|
||||
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||
"""
|
||||
import hashlib
|
||||
import logging
|
||||
import typing
|
||||
|
||||
from uds.core import services, types
|
||||
from uds.core import consts, services, types
|
||||
from uds.core.util import autoserializable
|
||||
|
||||
# Not imported at runtime, just for type checking
|
||||
@@ -74,7 +73,7 @@ class IPMachinesUserService(services.UserService, autoserializable.AutoSerializa
|
||||
|
||||
def get_unique_id(self) -> str:
|
||||
# Generate a 16 chars string mixing up all _vmid chars
|
||||
return hashlib.shake_128(self._vmid.encode('utf8')).hexdigest(8)
|
||||
return self._mac if self._mac and self._mac != consts.MAC_UNKNOWN else self._ip
|
||||
|
||||
def set_ready(self) -> types.states.TaskState:
|
||||
self.service().wakeup(self._ip, self._mac)
|
||||
|
@@ -141,6 +141,9 @@ class IPMachinesService(services.Service):
|
||||
|
||||
services_type_provided = types.services.ServiceType.VDI
|
||||
|
||||
def enumerate_servers(self) -> typing.Iterable['models.Server']:
|
||||
return fields.get_server_group_from_field(self.server_group).servers.filter(maintenance_mode=False)
|
||||
|
||||
def get_token(self) -> typing.Optional[str]:
|
||||
return self.token.as_str() or None
|
||||
|
||||
@@ -153,7 +156,7 @@ class IPMachinesService(services.Service):
|
||||
now = sql_now()
|
||||
return [
|
||||
gui.choice_item(server.uuid, f'{server.host}|{server.mac}')
|
||||
for server in fields.get_server_group_from_field(self.server_group).servers.all()
|
||||
for server in self.enumerate_servers()
|
||||
if server.locked_until is None or server.locked_until < now
|
||||
]
|
||||
|
||||
@@ -175,14 +178,15 @@ class IPMachinesService(services.Service):
|
||||
'''
|
||||
Returns an unassigned machine
|
||||
'''
|
||||
list_of_servers = list(fields.get_server_group_from_field(self.server_group).servers.all())
|
||||
# Get all servers in the group, not in maintenance mode
|
||||
list_of_servers = list(self.enumerate_servers())
|
||||
if self.randomize_host.as_bool() is True:
|
||||
random.shuffle(list_of_servers) # Reorder the list randomly if required
|
||||
|
||||
for server in list_of_servers:
|
||||
# If not locked or lock expired
|
||||
if server.locked_until is None or server.locked_until < sql_now():
|
||||
# if port check enabled, check
|
||||
# if port check enabled, check
|
||||
if self.port.value != 0:
|
||||
if not net.test_connectivity(server.host, self.port.value):
|
||||
server.lock(datetime.timedelta(minutes=self.ignore_minutes_on_failure.value))
|
||||
|
@@ -461,6 +461,10 @@ class ProxmoxClient:
|
||||
|
||||
@cached('hagrps', consts.CACHE_DURATION, key_helper=caching_key_helper)
|
||||
def list_ha_groups(self, **kwargs: typing.Any) -> list[str]:
|
||||
version = self.get_version()
|
||||
# Version 9 does not have the security groups
|
||||
if version[0] >= '9':
|
||||
return []
|
||||
return [g['group'] for g in self.do_get('cluster/ha/groups')['data']]
|
||||
|
||||
def enable_vm_ha(self, vmid: int, started: bool = False, group: typing.Optional[str] = None) -> None:
|
||||
@@ -484,6 +488,10 @@ class ProxmoxClient:
|
||||
+ ([('group', group)] if group else []), # Append ha group if present
|
||||
)
|
||||
|
||||
@cached('ha_resources', consts.CACHE_DURATION, key_helper=caching_key_helper)
|
||||
def list_ha_resources(self, **kwargs: typing.Any) -> list[str]:
|
||||
return [r['sid'] for r in self.do_get('cluster/ha/resources')['data']]
|
||||
|
||||
def disable_vm_ha(self, vmid: int) -> None:
|
||||
try:
|
||||
self.do_delete(f'cluster/ha/resources/vm%3A{vmid}')
|
||||
|
@@ -58,7 +58,7 @@ class Node:
|
||||
id: str
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dictionary: collections.abc.MutableMapping[str, typing.Any]) -> 'Node':
|
||||
def from_dict(dictionary: dict[str, typing.Any]) -> 'Node':
|
||||
return Node(
|
||||
name=dictionary.get('name', ''),
|
||||
online=dictionary.get('online', False),
|
||||
|
@@ -277,10 +277,12 @@ class ProxmoxServiceLinked(DynamicService):
|
||||
caller_instance: typing.Optional['DynamicUserService | DynamicPublication'],
|
||||
vmid: str,
|
||||
*,
|
||||
force_new: bool = False,
|
||||
for_unique_id: bool = False,
|
||||
) -> str:
|
||||
# If vmid is empty, we are requesting a new mac
|
||||
if not vmid or force_new:
|
||||
if not vmid or for_unique_id:
|
||||
if isinstance(caller_instance, DynamicUserService):
|
||||
return caller_instance.mac_generator().get(self.get_macs_range())
|
||||
return self.mac_generator().get(self.get_macs_range())
|
||||
return self.provider().api.get_vm_config(int(vmid)).networks[0].macaddr.lower()
|
||||
|
||||
|
@@ -61,7 +61,9 @@ class XenPublication(DynamicPublication, autoserializable.AutoSerializable):
|
||||
"""
|
||||
if not data.startswith(b'v'):
|
||||
return super().unmarshal(data)
|
||||
|
||||
|
||||
logger.debug('Upgrading XenPublication from old format: %s', data)
|
||||
|
||||
# logger.debug('Data: {0}'.format(data))
|
||||
vals = data.decode('utf8').split('\t')
|
||||
if vals[0] == 'v1':
|
||||
@@ -94,6 +96,7 @@ class XenPublication(DynamicPublication, autoserializable.AutoSerializable):
|
||||
)
|
||||
|
||||
self._task = self.service().start_deploy_of_template(self._name, comments)
|
||||
logger.debug('Task created: %s', self._task)
|
||||
|
||||
def op_create_checker(self) -> types.states.TaskState:
|
||||
"""
|
||||
@@ -101,11 +104,14 @@ class XenPublication(DynamicPublication, autoserializable.AutoSerializable):
|
||||
"""
|
||||
with self.service().provider().get_connection() as api:
|
||||
task_info = api.get_task_info(self._task)
|
||||
logger.debug('Task info: %s', task_info)
|
||||
if task_info.is_success():
|
||||
logger.debug('Task finished successfully: %s', task_info.result)
|
||||
self._vmid = task_info.result
|
||||
self.service().convert_to_template(self._vmid)
|
||||
return types.states.TaskState.FINISHED
|
||||
elif task_info.is_failure():
|
||||
logger.warning('Task failed: %s', task_info.result)
|
||||
return self._error(task_info.result)
|
||||
|
||||
return types.states.TaskState.RUNNING
|
||||
@@ -118,4 +124,5 @@ class XenPublication(DynamicPublication, autoserializable.AutoSerializable):
|
||||
"""
|
||||
Returns the template id associated with the publication
|
||||
"""
|
||||
logger.debug('Getting template id for publication: %s', self._vmid)
|
||||
return self._vmid
|
||||
|
@@ -37,6 +37,7 @@ import typing
|
||||
from django.utils.translation import gettext_noop as _
|
||||
from uds.core import exceptions, types
|
||||
from uds.core.services.generics.dynamic.service import DynamicService
|
||||
from uds.core.services.generics.dynamic.userservice import DynamicUserService
|
||||
from uds.core.util import validators
|
||||
from uds.core.ui import gui
|
||||
|
||||
@@ -49,7 +50,6 @@ from .xen import exceptions as xen_exceptions
|
||||
if typing.TYPE_CHECKING:
|
||||
from .provider import XenProvider
|
||||
from uds.core.services.generics.dynamic.publication import DynamicPublication
|
||||
from uds.core.services.generics.dynamic.userservice import DynamicUserService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -324,10 +324,16 @@ class XenLinkedService(DynamicService): # pylint: disable=too-many-public-metho
|
||||
caller_instance: typing.Optional['DynamicUserService | DynamicPublication'],
|
||||
vmid: str,
|
||||
*,
|
||||
force_new: bool = False,
|
||||
for_unique_id: bool = False,
|
||||
) -> str:
|
||||
return self.mac_generator().get(self.provider().get_macs_range())
|
||||
if not vmid or for_unique_id:
|
||||
if isinstance(caller_instance, DynamicUserService):
|
||||
return caller_instance.mac_generator().get(self.provider().get_macs_range())
|
||||
return self.mac_generator().get(self.provider().get_macs_range())
|
||||
|
||||
with self.provider().get_connection() as api:
|
||||
return api.get_first_mac(vmid)
|
||||
|
||||
def is_running(
|
||||
self, caller_instance: typing.Optional['DynamicUserService | DynamicPublication'], vmid: str
|
||||
) -> bool:
|
||||
|
@@ -77,7 +77,7 @@ class TimeoutTransport(xmlrpc.client.Transport):
|
||||
|
||||
|
||||
class XenClient: # pylint: disable=too-many-public-methods
|
||||
_originalHost: str
|
||||
_original_host: str
|
||||
_host: str
|
||||
_host_backup: str
|
||||
_port: str
|
||||
@@ -104,7 +104,7 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
verify_ssl: bool = False,
|
||||
timeout: int = 10,
|
||||
):
|
||||
self._originalHost = self._host = host
|
||||
self._original_host = self._host = host
|
||||
self._host_backup = host_backup or ''
|
||||
self._port = str(port)
|
||||
self._use_ssl = bool(use_ssl)
|
||||
@@ -198,11 +198,9 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
context = security.create_client_sslcontext(verify=self._verify_ssl)
|
||||
transport = SafeTimeoutTransport(context=context)
|
||||
transport.set_timeout(self._timeout)
|
||||
logger.debug('Transport: %s', transport)
|
||||
else:
|
||||
transport = TimeoutTransport()
|
||||
transport.set_timeout(self._timeout)
|
||||
logger.debug('Transport: %s', transport)
|
||||
|
||||
self._session = XenAPI.Session(self._url, transport=transport)
|
||||
self._session.xenapi.login_with_password(
|
||||
@@ -211,6 +209,11 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
self._logged_in = True
|
||||
self._api_version = self._session.API_version
|
||||
self._pool_name = self.get_pool_name(force=True)
|
||||
logger.debug('Connected to XenServer %s, API version %s, pool name: %s',
|
||||
self._host,
|
||||
self._api_version,
|
||||
self._pool_name,
|
||||
)
|
||||
except (
|
||||
XenAPI.Failure
|
||||
) as e: # XenAPI.Failure: ['HOST_IS_SLAVE', '172.27.0.29'] indicates that this host is an slave of 172.27.0.29, connect to it...
|
||||
@@ -257,7 +260,9 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
with exceptions.translator():
|
||||
task_info = xen_types.TaskInfo.from_dict(self.task.get_record(task_opaque_ref), task_opaque_ref)
|
||||
except exceptions.XenNotFoundError:
|
||||
task_info = xen_types.TaskInfo.unknown_task(task_opaque_ref)
|
||||
logger.warning('Task %s not found, returning unknown task info', task_opaque_ref)
|
||||
# task_info = xen_types.TaskInfo.unknown_task(task_opaque_ref)
|
||||
raise # Re-raise the exception to handle it outside
|
||||
|
||||
return task_info
|
||||
|
||||
@@ -269,7 +274,8 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
sr = xen_types.StorageInfo.from_dict(sr_raw, sr_id)
|
||||
if sr.is_usable():
|
||||
return_list.append(sr)
|
||||
|
||||
|
||||
logger.debug('Srs: %s', return_list)
|
||||
return return_list
|
||||
|
||||
@cached(prefix='xen_sr', timeout=consts.cache.SHORT_CACHE_TIMEOUT, key_helper=cache_key_helper)
|
||||
@@ -286,6 +292,7 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
if netinfo.is_host_internal_management_network is False:
|
||||
return_list.append(netinfo)
|
||||
|
||||
logger.debug('Networks: %s', return_list)
|
||||
return return_list
|
||||
|
||||
@cached(prefix='xen_net', timeout=consts.cache.SHORT_CACHE_TIMEOUT, key_helper=cache_key_helper)
|
||||
@@ -298,16 +305,13 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
def list_vms(self) -> list[xen_types.VMInfo]:
|
||||
return_list: list[xen_types.VMInfo] = []
|
||||
|
||||
try:
|
||||
for vm_id, vm_raw in typing.cast(dict[str, typing.Any], self.VM.get_all_records()).items():
|
||||
vm = xen_types.VMInfo.from_dict(vm_raw, vm_id)
|
||||
if vm.is_usable():
|
||||
return_list.append(vm)
|
||||
return return_list
|
||||
except XenAPI.Failure as e:
|
||||
raise exceptions.XenFailure(typing.cast(typing.Any, e.details))
|
||||
except Exception as e:
|
||||
raise exceptions.XenException(str(e))
|
||||
for vm_id, vm_raw in typing.cast(dict[str, typing.Any], self.VM.get_all_records()).items():
|
||||
vm = xen_types.VMInfo.from_dict(vm_raw, vm_id)
|
||||
if vm.is_usable():
|
||||
return_list.append(vm)
|
||||
|
||||
logger.debug('VMs: %s', return_list)
|
||||
return return_list
|
||||
|
||||
@cached(prefix='xen_vm', timeout=consts.cache.SHORT_CACHE_TIMEOUT, key_helper=cache_key_helper)
|
||||
@exceptions.catched
|
||||
@@ -440,22 +444,19 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
operations = self.VM.get_allowed_operations(vm_opaque_ref)
|
||||
logger.debug('Allowed operations: %s', operations)
|
||||
|
||||
try:
|
||||
if target_sr:
|
||||
if 'copy' not in operations:
|
||||
raise exceptions.XenFatalError(
|
||||
'Copy is not supported for this machine (maybe it\'s powered on?)'
|
||||
)
|
||||
task = self.Async.VM.copy(vm_opaque_ref, target_name, target_sr)
|
||||
else:
|
||||
if 'clone' not in operations:
|
||||
raise exceptions.XenFatalError(
|
||||
'Clone is not supported for this machine (maybe it\'s powered on?)'
|
||||
)
|
||||
task = self.Async.VM.clone(vm_opaque_ref, target_name)
|
||||
return task
|
||||
except XenAPI.Failure as e:
|
||||
raise exceptions.XenFailure(typing.cast(typing.Any, e.details))
|
||||
if target_sr:
|
||||
if 'copy' not in operations:
|
||||
raise exceptions.XenFatalError(
|
||||
'Copy is not supported for this machine (maybe it\'s powered on?)'
|
||||
)
|
||||
task = self.Async.VM.copy(vm_opaque_ref, target_name, target_sr)
|
||||
else:
|
||||
if 'clone' not in operations:
|
||||
raise exceptions.XenFatalError(
|
||||
'Clone is not supported for this machine (maybe it\'s powered on?)'
|
||||
)
|
||||
task = self.Async.VM.clone(vm_opaque_ref, target_name)
|
||||
return task
|
||||
|
||||
@exceptions.catched
|
||||
def delete_vm(self, vm_opaque_ref: str) -> None:
|
||||
@@ -536,12 +537,14 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
Returns:
|
||||
A list of 'folders' (organizations, str) in the XenServer
|
||||
"""
|
||||
folders: set[str] = set('/') # Add root folder for machines without folder
|
||||
folders: set[str]|list[str] = set('/') # Add root folder for machines without folder
|
||||
for vm in self.list_vms():
|
||||
if vm.folder:
|
||||
folders.add(vm.folder)
|
||||
|
||||
return sorted(folders)
|
||||
folders = sorted(folders)
|
||||
logger.debug('Folders: %s', folders)
|
||||
return folders
|
||||
|
||||
@exceptions.catched
|
||||
def list_vms_in_folder(self, folder: str) -> list[xen_types.VMInfo]:
|
||||
@@ -559,6 +562,8 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
for vm in self.list_vms():
|
||||
if vm.folder.upper() == folder:
|
||||
result_list.append(vm)
|
||||
|
||||
logger.debug('VMs in folder %s: %s', folder, result_list)
|
||||
return result_list
|
||||
|
||||
@exceptions.catched
|
||||
@@ -594,6 +599,8 @@ class XenClient: # pylint: disable=too-many-public-methods
|
||||
if not vifs:
|
||||
return ''
|
||||
vif = self.VIF.get_record(vifs[0])
|
||||
|
||||
logger.info('MAC: %s', vif['MAC'])
|
||||
return vif['MAC']
|
||||
|
||||
@exceptions.catched
|
||||
|
@@ -368,8 +368,6 @@ class VMInfo:
|
||||
|
||||
other_config = typing.cast(dict[str, str], data.get('other_config', {}))
|
||||
|
||||
logger.debug('data: %s', data)
|
||||
|
||||
return VMInfo(
|
||||
opaque_ref=opaque_ref,
|
||||
uuid=data['uuid'],
|
||||
|
File diff suppressed because one or more lines are too long
@@ -225,8 +225,8 @@ gettext("Delete servers token - USE WITH EXTREME CAUTION!!!");
|
||||
gettext("Delete image");
|
||||
gettext("Error");
|
||||
gettext("Image is too big (max. upload size is 256Kb)");
|
||||
gettext("Error");
|
||||
gettext("Invalid image type (only supports JPEG, PNG and GIF");
|
||||
gettext("Error on upload");
|
||||
gettext("Invalid image type. Supported types are: JPEG, PNG and GIF");
|
||||
gettext("Error");
|
||||
gettext("Please, provide a name and a image");
|
||||
gettext("Successfully saved");
|
||||
|
@@ -102,6 +102,6 @@
|
||||
</svg>
|
||||
</div>
|
||||
</uds-root>
|
||||
<link rel="modulepreload" href="/uds/res/admin/chunk-2F3F2YC2.js?stamp=1744125188" integrity="sha384-VVOra5xy5Xg9fYkBmK9MLhX7vif/MexRAaLIDBsQ4ZlkF31s/U6uWWrj+LAnvX/q"><script src="/uds/res/admin/polyfills.js?stamp=1744125188" type="module" crossorigin="anonymous" integrity="sha384-TVRkn44wOGJBeCKWJBHWLvXubZ+Julj/yA0OoEFa3LgJHVHaPeeATX6NcjuNgsIA"></script><script src="/uds/res/admin/main.js?stamp=1744125188" type="module" crossorigin="anonymous" integrity="sha384-a/msVaW+uwy7v/7wvgIPvj6xxA9qULgtFz6r+0wbGFc3x6tMc8cRBcPjW4OKt8IF"></script></body>
|
||||
<link rel="modulepreload" href="/uds/res/admin/chunk-2F3F2YC2.js?stamp=1758035900" integrity="sha384-VVOra5xy5Xg9fYkBmK9MLhX7vif/MexRAaLIDBsQ4ZlkF31s/U6uWWrj+LAnvX/q"><script src="/uds/res/admin/polyfills.js?stamp=1758035900" type="module" crossorigin="anonymous" integrity="sha384-TVRkn44wOGJBeCKWJBHWLvXubZ+Julj/yA0OoEFa3LgJHVHaPeeATX6NcjuNgsIA"></script><script src="/uds/res/admin/main.js?stamp=1758035900" type="module" crossorigin="anonymous" integrity="sha384-3lZTkxMTxP/KgMLbdC/mjFUbq3YXJmMoJbgPH6az7gjQiWFJvikEtKXNxd9gQYI7"></script></body>
|
||||
|
||||
</html>
|
||||
|
@@ -361,7 +361,7 @@ class HTML5RDPTransport(transports.Transport):
|
||||
if self.forced_username.value:
|
||||
username = self.forced_username.value
|
||||
|
||||
proc = username.split('@')
|
||||
proc = username.split('@', 1)
|
||||
if len(proc) > 1:
|
||||
domain = proc[1]
|
||||
else:
|
||||
@@ -369,11 +369,12 @@ class HTML5RDPTransport(transports.Transport):
|
||||
username = proc[0]
|
||||
|
||||
for_azure = False
|
||||
if self.forced_domain.value != '':
|
||||
if self.forced_domain.value.lower() == 'azuread':
|
||||
forced_domain = self.forced_domain.value.strip().lower()
|
||||
if forced_domain:
|
||||
if forced_domain == 'azuread':
|
||||
for_azure = True
|
||||
else:
|
||||
domain = self.forced_domain.value
|
||||
domain = forced_domain
|
||||
|
||||
if self.force_empty_creds.as_bool():
|
||||
username, password, domain = '', '', ''
|
||||
@@ -424,7 +425,7 @@ class HTML5RDPTransport(transports.Transport):
|
||||
return 'true' if txt else 'false'
|
||||
|
||||
# Build params dict
|
||||
params = {
|
||||
params: dict[str, typing.Any] = {
|
||||
'protocol': 'rdp',
|
||||
'hostname': ip,
|
||||
'port': self.rdp_port.as_int(),
|
||||
|
@@ -98,6 +98,9 @@ class RDPTransport(BaseRDPTransport):
|
||||
mac_custom_parameters = BaseRDPTransport.mac_custom_parameters
|
||||
wnd_custom_parameters = BaseRDPTransport.wnd_custom_parameters
|
||||
|
||||
lnx_use_rdp_file = BaseRDPTransport.lnx_use_rdp_file
|
||||
mac_use_rdp_file = BaseRDPTransport.mac_use_rdp_file
|
||||
|
||||
def get_transport_script( # pylint: disable=too-many-locals
|
||||
self,
|
||||
userservice: 'models.UserService',
|
||||
@@ -165,20 +168,27 @@ class RDPTransport(BaseRDPTransport):
|
||||
}
|
||||
)
|
||||
elif os.os == types.os.KnownOS.LINUX:
|
||||
r.custom_parameters = self.lnx_custom_parameters.value
|
||||
if self.lnx_use_rdp_file.as_bool():
|
||||
r.custom_parameters = self.wnd_custom_parameters.value
|
||||
else:
|
||||
r.custom_parameters = self.lnx_custom_parameters.value
|
||||
sp.update(
|
||||
{
|
||||
'as_new_xfreerdp_params': r.as_new_xfreerdp_params,
|
||||
'address': r.address,
|
||||
'as_file': r.as_file if self.lnx_use_rdp_file.as_bool() else '',
|
||||
}
|
||||
)
|
||||
elif os.os == types.os.KnownOS.MAC_OS:
|
||||
r.custom_parameters = self.mac_custom_parameters.value
|
||||
if self.mac_use_rdp_file.as_bool():
|
||||
r.custom_parameters = self.wnd_custom_parameters.value
|
||||
else:
|
||||
r.custom_parameters = self.mac_custom_parameters.value
|
||||
sp.update(
|
||||
{
|
||||
'as_new_xfreerdp_params': r.as_new_xfreerdp_params,
|
||||
'as_rdp_url': r.as_rdp_url if self.mac_allow_msrdc.as_bool() else '',
|
||||
'as_file': r.as_file if self.mac_allow_msrdc.as_bool() else '',
|
||||
'as_file': r.as_file if self.mac_use_rdp_file.as_bool() else '',
|
||||
'address': r.address,
|
||||
}
|
||||
)
|
||||
|
@@ -297,6 +297,14 @@ class BaseRDPTransport(transports.Transport):
|
||||
tab='Linux Client',
|
||||
old_field_name='alsa',
|
||||
)
|
||||
lnx_use_rdp_file = gui.CheckBoxField(
|
||||
label=_('Use RDP file for connections'),
|
||||
order=42,
|
||||
tooltip=_('If marked, an RDP file will be used for connections with Thincast or xfreerdp on Linux.'),
|
||||
tab='Linux Client',
|
||||
default=True,
|
||||
old_field_name='lnx_thincastRdpFile',
|
||||
)
|
||||
lnx_printer_string = gui.TextField(
|
||||
label=_('Printer string'),
|
||||
order=43,
|
||||
@@ -333,9 +341,18 @@ class BaseRDPTransport(transports.Transport):
|
||||
old_field_name='allowMacMSRDC',
|
||||
)
|
||||
|
||||
mac_use_rdp_file = gui.CheckBoxField(
|
||||
label=_('Use RDP file for connections'),
|
||||
order=51,
|
||||
tooltip=_('If marked, an RDP file will be used for connections with Thincast or xfreerdp on Mac OS X.'),
|
||||
tab='Mac OS X',
|
||||
default=True,
|
||||
old_field_name='mac_thincastRdpFile',
|
||||
)
|
||||
|
||||
mac_custom_parameters = gui.TextField(
|
||||
label=_('Custom parameters'),
|
||||
order=51,
|
||||
order=52,
|
||||
tooltip=_(
|
||||
'If not empty, extra parameter to include for Mac OS X Freerdp Client (for example /usb:id,dev:054c:0268, or aything compatible with your xfreerdp client)'
|
||||
),
|
||||
@@ -386,46 +403,40 @@ class BaseRDPTransport(transports.Transport):
|
||||
if self.forced_username.value:
|
||||
username = self.forced_username.value
|
||||
|
||||
proc = username.split('@')
|
||||
domain: str = ''
|
||||
proc = username.split('@', 1)
|
||||
if len(proc) > 1:
|
||||
domain = proc[1]
|
||||
else:
|
||||
domain = '' # Default domain is empty
|
||||
username = proc[0]
|
||||
|
||||
if self.forced_password.value:
|
||||
password = self.forced_password.value
|
||||
|
||||
azure_ad = False
|
||||
if self.forced_domain.value != '':
|
||||
if self.forced_domain.value.lower() == 'azuread':
|
||||
azure_ad = True
|
||||
for_azure = False
|
||||
forced_domain = self.forced_domain.value.strip().lower()
|
||||
if forced_domain: # If has forced domain
|
||||
if forced_domain == 'azuread':
|
||||
for_azure = True
|
||||
else:
|
||||
domain = self.forced_domain.value
|
||||
domain = forced_domain
|
||||
|
||||
if self.force_empty_creds.as_bool():
|
||||
username, password, domain = '', '', ''
|
||||
|
||||
if self.force_no_domain.as_bool():
|
||||
domain = ''
|
||||
|
||||
if domain: # If has domain
|
||||
if '.' in domain: # Dotter domain form
|
||||
username = username + '@' + domain
|
||||
domain = ''
|
||||
else: # In case of a NETBIOS domain (not recomended), join it so process_user_password can deal with it
|
||||
username = domain + '\\' + username
|
||||
domain = ''
|
||||
if '.' in domain: # Dotter domain form
|
||||
username = username + '@' + domain
|
||||
domain = ''
|
||||
|
||||
if for_azure:
|
||||
username = 'AzureAD\\' + username # AzureAD domain form
|
||||
|
||||
# Fix username/password acording to os manager
|
||||
username, password = userservice.process_user_password(username, password)
|
||||
|
||||
# Recover domain name if needed
|
||||
if '\\' in username:
|
||||
domain, username = username.split('\\')
|
||||
|
||||
# If AzureAD, include it on username
|
||||
if azure_ad:
|
||||
username = 'AzureAD\\' + username
|
||||
|
||||
return types.connections.ConnectionData(
|
||||
protocol=self.protocol,
|
||||
username=username,
|
||||
|
@@ -172,20 +172,17 @@ class RDPFile:
|
||||
|
||||
# RDP Security is A MUST if no username nor password is provided
|
||||
# NLA requires USERNAME&PASSWORD previously
|
||||
force_rdp_security = False
|
||||
if self.username != '':
|
||||
params.append('/u:{}'.format(self.username))
|
||||
else:
|
||||
force_rdp_security = True
|
||||
|
||||
if self.password:
|
||||
params.append('/p:{}'.format(self.password))
|
||||
else:
|
||||
force_rdp_security = True
|
||||
|
||||
if self.domain != '':
|
||||
params.append('/d:{}'.format(self.domain))
|
||||
|
||||
if force_rdp_security:
|
||||
params.append('/sec:rdp')
|
||||
if (self.username == '' and self.password == '') and not '/sec' in params:
|
||||
params.append('/sec:tls') # Use TLS security if no credentials are provided
|
||||
|
||||
if self.connection_bar and '/floatbar' not in params:
|
||||
params.append('/floatbar:sticky:off')
|
||||
@@ -298,8 +295,8 @@ class RDPFile:
|
||||
# Camera?
|
||||
# res += 'camerastoredirect:s:*\n'
|
||||
|
||||
# If target is windows, add customParameters
|
||||
if self.target == types.os.KnownOS.WINDOWS:
|
||||
# If target is windows or linux or macOS, add customParameters
|
||||
if self.target == types.os.KnownOS.WINDOWS or self.target == types.os.KnownOS.LINUX or self.target == types.os.KnownOS.MAC_OS:
|
||||
if self.custom_parameters and self.custom_parameters.strip() != '':
|
||||
res += self.custom_parameters.strip() + '\n'
|
||||
|
||||
|
@@ -114,6 +114,9 @@ class TRDPTransport(BaseRDPTransport):
|
||||
lnx_custom_parameters = BaseRDPTransport.lnx_custom_parameters
|
||||
mac_custom_parameters = BaseRDPTransport.mac_custom_parameters
|
||||
wnd_custom_parameters = BaseRDPTransport.wnd_custom_parameters
|
||||
|
||||
lnx_use_rdp_file = BaseRDPTransport.lnx_use_rdp_file
|
||||
mac_use_rdp_file = BaseRDPTransport.mac_use_rdp_file
|
||||
# optimizeTeams = BaseRDPTransport.optimizeTeams
|
||||
|
||||
def initialize(self, values: 'types.core.ValuesType') -> None:
|
||||
@@ -201,18 +204,25 @@ class TRDPTransport(BaseRDPTransport):
|
||||
}
|
||||
)
|
||||
elif os.os == types.os.KnownOS.LINUX:
|
||||
r.custom_parameters = self.lnx_custom_parameters.value
|
||||
if self.lnx_use_rdp_file.as_bool():
|
||||
r.custom_parameters = self.wnd_custom_parameters.value
|
||||
else:
|
||||
r.custom_parameters = self.lnx_custom_parameters.value
|
||||
sp.update(
|
||||
{
|
||||
'as_new_xfreerdp_params': r.as_new_xfreerdp_params,
|
||||
'as_file': r.as_file if self.lnx_use_rdp_file.as_bool() else '',
|
||||
}
|
||||
)
|
||||
elif os.os == types.os.KnownOS.MAC_OS:
|
||||
r.custom_parameters = self.mac_custom_parameters.value
|
||||
if self.mac_use_rdp_file.as_bool():
|
||||
r.custom_parameters = self.wnd_custom_parameters.value
|
||||
else:
|
||||
r.custom_parameters = self.mac_custom_parameters.value
|
||||
sp.update(
|
||||
{
|
||||
'as_new_xfreerdp_params': r.as_new_xfreerdp_params,
|
||||
'as_file': r.as_file if self.mac_allow_msrdc.as_bool() else '',
|
||||
'as_file': r.as_file if self.mac_use_rdp_file.as_bool() else '',
|
||||
'as_rdp_url': r.as_rdp_url if self.mac_allow_msrdc.as_bool() else '',
|
||||
}
|
||||
)
|
||||
|
@@ -1,4 +1,12 @@
|
||||
import typing
|
||||
import logging
|
||||
import subprocess
|
||||
import os.path
|
||||
import shutil
|
||||
import os
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# On older client versions, need importing globally to allow inner functions to work
|
||||
import subprocess # type: ignore
|
||||
@@ -14,38 +22,105 @@ if 'sp' not in globals():
|
||||
globals()['sp'] = sp # type: ignore # pylint: disable=undefined-variable
|
||||
|
||||
|
||||
def exec_udsrdp(udsrdp: str) -> None:
|
||||
import subprocess
|
||||
import os.path
|
||||
def _prepare_rdp_file(theFile: str, extension: str = '.rdp') -> str:
|
||||
"""Save RDP file to user's home directory with the given extension and return its path."""
|
||||
filename = tools.saveTempFile(theFile)
|
||||
home_dir = os.path.expanduser("~")
|
||||
base_name = os.path.basename(filename)
|
||||
dest_filename = os.path.join(home_dir, base_name + extension)
|
||||
temp_rdp_filename = filename + extension
|
||||
logger.debug(f'Renaming temp file {filename} to {temp_rdp_filename}')
|
||||
os.rename(filename, temp_rdp_filename)
|
||||
logger.debug(f'Moving temp file {temp_rdp_filename} to {dest_filename}')
|
||||
shutil.move(temp_rdp_filename, dest_filename)
|
||||
logger.debug(f'RDP file content (forced): {theFile}')
|
||||
return dest_filename
|
||||
|
||||
params: typing.List[str] = [os.path.expandvars(i) for i in [udsrdp] + sp['as_new_xfreerdp_params'] + ['/v:{}'.format(sp['address'])]] # type: ignore
|
||||
def _exec_client_with_params(executable: str, params: typing.List[str], unlink_file: typing.Optional[str] = None) -> None:
|
||||
logger.info(f'Executing {executable} with params: {params}')
|
||||
tools.addTaskToWait(subprocess.Popen(params))
|
||||
if unlink_file:
|
||||
tools.addFileToUnlink(unlink_file)
|
||||
|
||||
def exec_udsrdp(udsrdp: str) -> None:
|
||||
params = [os.path.expandvars(i) for i in [udsrdp] + sp['as_new_xfreerdp_params'] + [f'/v:{sp["address"]}']] # type: ignore
|
||||
_exec_client_with_params(udsrdp, params)
|
||||
|
||||
def exec_new_xfreerdp(xfreerdp: str) -> None:
|
||||
import subprocess # @Reimport
|
||||
import os.path
|
||||
if sp.get('as_file', ''): # type: ignore
|
||||
dest_filename = _prepare_rdp_file(sp['as_file'], '.uds.rdp') # type: ignore
|
||||
params = [xfreerdp, dest_filename, f'/p:{sp.get("password", "")}'] # type: ignore
|
||||
_exec_client_with_params(xfreerdp, params, unlink_file=dest_filename)
|
||||
else:
|
||||
params = [os.path.expandvars(i) for i in [xfreerdp] + sp['as_new_xfreerdp_params'] + [f'/v:{sp["address"]}']] # type: ignore
|
||||
_exec_client_with_params(xfreerdp, params)
|
||||
|
||||
params: typing.List[str] = [os.path.expandvars(i) for i in [xfreerdp] + sp['as_new_xfreerdp_params'] + ['/v:{}'.format(sp['address'])]] # type: ignore
|
||||
tools.addTaskToWait(subprocess.Popen(params))
|
||||
def exec_thincast(thincast: str) -> None:
|
||||
if sp.get('as_file', ''): # type: ignore
|
||||
dest_filename = _prepare_rdp_file(sp['as_file'], '.rdp') # type: ignore
|
||||
params = [thincast, dest_filename, f'/p:{sp.get("password", "")}'] # type: ignore
|
||||
_exec_client_with_params(thincast, params, unlink_file=dest_filename)
|
||||
else:
|
||||
params = [os.path.expandvars(i) for i in [thincast] + sp['as_new_xfreerdp_params'] + [f'/v:{sp["address"]}']] # type: ignore
|
||||
_exec_client_with_params(thincast, params)
|
||||
|
||||
# Typical Thincast Routes on Linux
|
||||
thincast_list = [
|
||||
'/usr/bin/thincast-remote-desktop-client',
|
||||
'/usr/bin/thincast',
|
||||
'/opt/thincast/thincast-remote-desktop-client',
|
||||
'/opt/thincast/thincast',
|
||||
'/snap/bin/thincast-remote-desktop-client',
|
||||
'/snap/bin/thincast',
|
||||
'/snap/bin/thincast-client'
|
||||
]
|
||||
|
||||
# Try to locate a xfreerdp and udsrdp. udsrdp will be used if found.
|
||||
xfreerdp: typing.Optional[str] = tools.findApp('xfreerdp3') or tools.findApp('xfreerdp') or tools.findApp('xfreerdp2')
|
||||
udsrdp: typing.Optional[str] = tools.findApp('udsrdp')
|
||||
fnc, app = None, None
|
||||
# Search Thincast first
|
||||
executable = None
|
||||
kind = ''
|
||||
for thincast in thincast_list:
|
||||
if os.path.isfile(thincast) and os.access(thincast, os.X_OK):
|
||||
executable = thincast
|
||||
kind = 'thincast'
|
||||
break
|
||||
|
||||
if xfreerdp:
|
||||
fnc, app = exec_new_xfreerdp, xfreerdp
|
||||
# If you don't find Thincast, search UDSRDP and XFREERDP
|
||||
if not executable:
|
||||
udsrdp: typing.Optional[str] = tools.findApp('udsrdp')
|
||||
xfreerdp: typing.Optional[str] = tools.findApp('xfreerdp3') or tools.findApp('xfreerdp') or tools.findApp('xfreerdp2')
|
||||
if udsrdp:
|
||||
executable = udsrdp
|
||||
kind = 'udsrdp'
|
||||
elif xfreerdp:
|
||||
executable = xfreerdp
|
||||
kind = 'xfreerdp'
|
||||
|
||||
if udsrdp is not None:
|
||||
fnc, app = exec_udsrdp, udsrdp
|
||||
|
||||
if app is None or fnc is None:
|
||||
if not executable:
|
||||
raise Exception(
|
||||
'''<p>You need to have xfreerdp (>= 2.0) installed on your systeam, and have it your PATH in order to connect to this UDS service.</p>
|
||||
'''<p>You need to have Thincast Remote Desktop Client or xfreerdp (>= 2.0) installed on your system, and have it in your PATH in order to connect to this UDS service.</p>
|
||||
<p>Please, install the proper package for your system.</p>
|
||||
<ul>
|
||||
<li>Thincast: <a href="https://thincast.com/en/products/client">Download</a></li>
|
||||
<li>xfreerdp: <a href="https://github.com/FreeRDP/FreeRDP">Download</a></li>
|
||||
</ul>
|
||||
'''
|
||||
)
|
||||
else:
|
||||
logging.debug(f'RDP client found: {executable} of kind {kind}')
|
||||
|
||||
fnc(app)
|
||||
# Execute the client found
|
||||
if kind == 'thincast':
|
||||
if isinstance(executable, str):
|
||||
exec_thincast(executable)
|
||||
else:
|
||||
raise TypeError("Executable must be a string for exec_thincast")
|
||||
elif kind == 'udsrdp':
|
||||
if isinstance(executable, str):
|
||||
exec_udsrdp(executable)
|
||||
else:
|
||||
raise TypeError("Executable must be a string for exec_udsrdp")
|
||||
elif kind == 'xfreerdp':
|
||||
if isinstance(executable, str):
|
||||
exec_new_xfreerdp(executable)
|
||||
else:
|
||||
raise TypeError("Executable must be a string for exec_new_xfreerdp")
|
||||
|
@@ -1 +1 @@
|
||||
fGtGXYFIwNgr7B2h23tZSTRTZZzuUjrRqphmqgpRAS+hQ3FKqZJIoNIO7qxHh2ibA9BUyMHN21mjQvtVvAnv7ic0HfYPfJQPGro/yAJooMIuZPvqZS6e1hOBdd50Z3FKuqHMyHvQZhMu9tdoE06gyArwcSE++PZoT8dptOhwm5ogSCf2yfPA+bPxm9ACC3OmHTvKjZExFlnWLec/idASdGBxWnqHoWrXpBR3N8V/CMS4/QZZ3I+e+hJ8I2Sz2hINH0X2TIVVr2CTe3j4TzkxkCDAC5JrRmgj35vkiaOHKpW6drRopLiOxE2DC4mshL0wwUw0wHExeP2W03tobSZpK8bRmNBe8s7bjUlQ7df2V0dB8W/G10ez3rIJnUGzeXhOjUy/f3T0KFP0wAJzQo1LNjTFfc3XEZq+IyqyIHjxDiN9yG/rsZP8vAZHc/kNCwNtuVjRE1K1hmUckVa5RD1TtATY8c5h2JkIL2pcPgFJvJh9s2CbK2VCfF2U3VUJu0icDqRREqY542/y84aZW7Pz+kIicBI6blwpCA6DxgCJ3fs/pgQYqueF+cJ7UteBEUaDOyvxvttYr02xio/izs7vRJsL0Gpve5WUHIl1+9QTuoDRi8w6l92AtrnNPN9QooOuS6VbrTW9up0nkHkUz3zm7QuXulennCFgO8je81FhxOk=
|
||||
Y7IpJNU/ne7JUqmOqNPu5gPlCWRDjK6nYKK69K6yxjEu7PT6g1ZkGlrNS2QeQY9bvOuxTlWF8nF+inbQTUQOte6rIX8kSZUJFcOKbxxcWDJiJvPArE5iKCJPpPPHFFH97MVVDYuYT4C5XvR7bDQNVlmxB6oUGvO/71mC4D3gxau1FA24fISRio3aCUVWFjEBlt55PyVhg9qn4r+jBP4bwBVW1NmmjONsIrf6+pvI3gIspYIwzBg9Nflv67aKY7tLdylkcQ7lHx2pY20onaDuT+WGRlQdqc4K0kAzZyTO9FEe/SGe0Nsi0BPMBK1LjFtNJ0KQABLWoTZWSXgloY/2JQvoOC1Z57K4g9R8Nq2BDNpCWADz2nP713zt30dK4Emg6Uh3lkew4zF98atY9Mb5bdNmzQp3dCv9xGAGPm66eIqQUEKDt7zone6j6r81c8jPbl7m/9f/iul8d3a4bPEv5DMHTL23YdFSc6AHDp32ngaB4qLxgtDCvQhnkrayqiYHOlYnS1A/TBS7c2K/Od0nZvuxZKUuDiC7RlSxEAo/EdmWSbeA01cYeh7v0lkdkQaOMGI45DPjLaCwuhgb85cbHcFEDo1CpZSYhyhqApx15/0Jt/CIuFM3FG0cf4B6/6T7Ll7IqGCeTl6nuF2N3ux1ND7UAMBpqjWAlg9/Y7T+aq4=
|
@@ -1,4 +1,11 @@
|
||||
import typing
|
||||
import shutil
|
||||
import os
|
||||
import logging
|
||||
import subprocess
|
||||
import os.path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# On older client versions, need importing globally to allow inner functions to work
|
||||
import subprocess # type: ignore
|
||||
@@ -20,40 +27,75 @@ if 'sp' not in globals():
|
||||
# Inject local passed sp into globals for inner functions if not already there
|
||||
globals()['sp'] = sp # type: ignore # pylint: disable=undefined-variable
|
||||
|
||||
def _prepare_rdp_file(theFile: str, port: int, extension: str = '.rdp') -> str:
|
||||
"""Save RDP file to user's home directory with the given extension and return its path."""
|
||||
# Replace the address in the RDP file with 127.0.0.1:{port}
|
||||
# Replace any line starting with "full address:s:" with the desired value
|
||||
theFile = theFile.format(
|
||||
address='127.0.0.1:{}'.format(port)
|
||||
)
|
||||
logger.info(f'Preparing RDP file with address 127.0.0.1:{port}')
|
||||
logger.debug(f'RDP file content (forced): {theFile}')
|
||||
filename = tools.saveTempFile(theFile)
|
||||
home_dir = os.path.expanduser("~")
|
||||
base_name = os.path.basename(filename)
|
||||
dest_filename = os.path.join(home_dir, base_name + extension)
|
||||
temp_rdp_filename = filename + extension
|
||||
logger.debug(f'Renaming temp file {filename} to {temp_rdp_filename}')
|
||||
os.rename(filename, temp_rdp_filename)
|
||||
logger.debug(f'Moving temp file {temp_rdp_filename} to {dest_filename}')
|
||||
shutil.move(temp_rdp_filename, dest_filename)
|
||||
logger.debug(f'RDP file content (forced): {theFile}')
|
||||
return dest_filename
|
||||
|
||||
def _exec_client_with_params(executable: str, params: typing.List[str], unlink_file: typing.Optional[str] = None) -> None:
|
||||
logger.info(f'Executing {executable} with params: {params}')
|
||||
tools.addTaskToWait(subprocess.Popen(params))
|
||||
if unlink_file:
|
||||
tools.addFileToUnlink(unlink_file)
|
||||
|
||||
def exec_udsrdp(udsrdp: str, port: int) -> None:
|
||||
import subprocess # @Reimport
|
||||
import os.path
|
||||
|
||||
params: typing.List[str] = [os.path.expandvars(i) for i in [udsrdp] + sp['as_new_xfreerdp_params'] + ['/v:127.0.0.1:{}'.format(port)]] # type: ignore
|
||||
tools.addTaskToWait(subprocess.Popen(params))
|
||||
|
||||
logging.debug('UDSRDP client will use command line parameters')
|
||||
params: typing.List[str] = [os.path.expandvars(i) for i in [app] + sp['as_new_xfreerdp_params'] + [f'/v:127.0.0.1:{port}']] # type: ignore
|
||||
_exec_client_with_params(udsrdp, params)
|
||||
|
||||
def exec_new_xfreerdp(xfreerdp: str, port: int) -> None:
|
||||
import subprocess # @Reimport
|
||||
import os.path
|
||||
if sp.get('as_file', ''): # type: ignore
|
||||
logger.debug('XFREERDP client will use RDP file')
|
||||
dest_filename = _prepare_rdp_file(sp['as_file'], port, '.rdp') # type: ignore
|
||||
params = [xfreerdp, dest_filename, f'/p:{sp.get("password", "")}'] # type: ignore
|
||||
_exec_client_with_params(xfreerdp, params, unlink_file=dest_filename)
|
||||
else:
|
||||
logging.debug('XFREERDP client will use command line parameters')
|
||||
params: typing.List[str] = [os.path.expandvars(i) for i in [app] + sp['as_new_xfreerdp_params'] + [f'/v:127.0.0.1:{port}']] # type: ignore
|
||||
_exec_client_with_params(xfreerdp, params)
|
||||
|
||||
params: typing.List[str] = [os.path.expandvars(i) for i in [xfreerdp] + sp['as_new_xfreerdp_params'] + ['/v:127.0.0.1:{}'.format(port)]] # type: ignore
|
||||
tools.addTaskToWait(subprocess.Popen(params))
|
||||
def exec_thincast(thincast: str, port: int) -> None:
|
||||
if sp.get('as_file', ''): # type: ignore
|
||||
logger.debug('Thincast client will use RDP file')
|
||||
dest_filename = _prepare_rdp_file(sp['as_file'], port, '.rdp') # type: ignore
|
||||
params = [thincast, dest_filename, f'/p:{sp.get("password", "")}'] # type: ignore
|
||||
_exec_client_with_params(thincast, params, unlink_file=dest_filename)
|
||||
else:
|
||||
logging.debug('Thincast client will use command line parameters')
|
||||
params: typing.List[str] = [os.path.expandvars(i) for i in [app] + sp['as_new_xfreerdp_params'] + [f'/v:127.0.0.1:{port}']] # type: ignore
|
||||
_exec_client_with_params(thincast, params)
|
||||
|
||||
|
||||
# Try to locate a xfreerdp and udsrdp. udsrdp will be used if found.
|
||||
xfreerdp: typing.Optional[str] = tools.findApp('xfreerdp3') or tools.findApp('xfreerdp') or tools.findApp('xfreerdp2')
|
||||
udsrdp = tools.findApp('udsrdp')
|
||||
fnc, app = None, None
|
||||
|
||||
if xfreerdp:
|
||||
fnc, app = exec_new_xfreerdp, xfreerdp
|
||||
|
||||
if udsrdp:
|
||||
fnc, app = exec_udsrdp, udsrdp
|
||||
|
||||
if app is None or fnc is None:
|
||||
raise Exception(
|
||||
'''<p>You need to have xfreerdp (>= 2.0) installed on your systeam, and have it your PATH in order to connect to this UDS service.</p>
|
||||
<p>Please, install the proper package for your system.</p>
|
||||
'''
|
||||
)
|
||||
# Add thinclast support
|
||||
thincast_list = [
|
||||
'/usr/bin/thincast-remote-desktop-client',
|
||||
'/usr/bin/thincast',
|
||||
'/opt/thincast/thincast-remote-desktop-client',
|
||||
'/opt/thincast/thincast',
|
||||
'/snap/bin/thincast-remote-desktop-client',
|
||||
'/snap/bin/thincast',
|
||||
'/snap/bin/thincast-client'
|
||||
]
|
||||
thincast_executable = None
|
||||
for thincast in thincast_list:
|
||||
if os.path.isfile(thincast) and os.access(thincast, os.X_OK):
|
||||
thincast_executable = thincast
|
||||
break
|
||||
|
||||
# Open tunnel and connect
|
||||
fs = forward(remote=(sp['tunHost'], int(sp['tunPort'])), ticket=sp['ticket'], timeout=sp['tunWait'], check_certificate=sp['tunChk']) # type: ignore
|
||||
@@ -64,4 +106,29 @@ if fs.check() is False:
|
||||
'<p>Could not connect to tunnel server.</p><p>Please, check your network settings.</p>'
|
||||
)
|
||||
|
||||
fnc(app, fs.server_address[1])
|
||||
# If thincast exists, use it. If not, continue with UDSRDP/XFREERDP as before
|
||||
if thincast_executable:
|
||||
logging.debug('Thincast client found, using it')
|
||||
#logging.debug(f'RDP file params: {sp.get("as_file", "")}')
|
||||
fnc, app = exec_thincast, thincast_executable
|
||||
else:
|
||||
xfreerdp: typing.Optional[str] = tools.findApp('xfreerdp3') or tools.findApp('xfreerdp') or tools.findApp('xfreerdp2')
|
||||
udsrdp = tools.findApp('udsrdp')
|
||||
fnc, app = None, None
|
||||
if xfreerdp:
|
||||
fnc, app = exec_new_xfreerdp, xfreerdp
|
||||
if udsrdp:
|
||||
fnc, app = exec_udsrdp, udsrdp
|
||||
if app is None or fnc is None:
|
||||
raise Exception(
|
||||
'''<p>You need to have Thincast Remote Desktop Client o xfreerdp (>= 2.0) installed on your system, y tenerlo en tu PATH para conectar con este servicio UDS.</p>
|
||||
<p>Please install the right package for your system.</p>
|
||||
<ul>
|
||||
<li>Thincast: <a href="https://thincast.com/en/products/client">Download</a></li>
|
||||
<li>xfreerdp: <a href="https://github.com/FreeRDP/FreeRDP">Download</a></li>
|
||||
</ul>
|
||||
'''
|
||||
)
|
||||
|
||||
if fnc is not None and app is not None:
|
||||
fnc(app, fs.server_address[1])
|
||||
|
@@ -1 +1 @@
|
||||
Tqjv2NoZjghIsF/sAHDgawag9vu/A+YEmr59t6HzER8TB13/UqZnCPJpyJrOL643h+ssN51HEoBjj3fAYBlleNjmr1Nkle9/VaFWNhuruRQDMUp41GoGzICkF4dNMWhdEgpZ80xGxcv+0AZzmo4eCZsIAZjMNDYJKq7N/M77I2kl++K79VU9OB/npjArVKUBiROvS5Y6E9dOXhUAFgpO5zKLPPLuFzdLyNnGpIOmu3ei5bG8arVq8S1nF4aBjVXgSP9bZ15VKHgT5vBkOIK0TIKKS+qfP7Hb1+QqxLeV5BKeVn4jLYBj+TZmqtUmIJQG226PdzsFTnjpPH33twdhTFFb8aCyoNma/U+xOOJZNSqouUPQV7KvHQuzYqrLB9Zg1rA9O2cz/sp4seePfrit8dw+H8Wq4dAlvgB05/zfoVMhttjuYuI46C69XrK5M8SFsnyD7faFRwNqZtdeJt7XwYtg1/TUMH6JM4UxTuL49aXjJQ3aOrNA9r6ukNe7lPMEbGBx4mtlGqlg+2ZCbqG1HZRITnUfuHy5GklU2scBGoEgoz6YeguGAgNbiwLohrWYAA1IGZEhSJc7Fx+nn5IRuYJdCof2dO/o0ntGi4pfVDL91sqmafEn/f90A5lBsoFotlbSG67tI7CcWs+QR0P0T6KspBv/TIFSNu/Igs1c0hY=
|
||||
n3a+b8OD5i8IAbP/fK2c72A1UzRScr7kH2BeY4zSaVJoJf+Xn4+F6Osm0emtS+1muStwHvIE9Hqnxdg8lbYHIrjRAGvFazs5xcExIvYU1rVA/gmBjaViqUl6SQc+3zWa7xXwOVrwQdoXF9WGYOAH8DAPT/aiKgeaCoKlCFNyrxlMImxWPlWRvCKrZQlWxvB6rKFC0apO+xxi4FU3xXIomWBufnLuDppYL+xwVzMj4YNWec3rdV6RT8cpBQzfkNec7rgHBOB+Py2BnY54TtMqlECpXZlEYy4NVORI0rwZ+aK8dvpGvRS4nWQnq3edRkWTzDZAo+Kkp17/fmCTEGucQ/TC4Y2qBoB35kxQmv1aWhWWyJ/BdpHI6lNJR884lK/yup3I+vQbf7ZJzu6wT98YHk0x+rsPt97FPxJsVdsxWG4IcaVgheuIjlYPNShMDoy/slOyEz/k0j7jWAhRu0WWMHjI/ozighX+jHmNeIG9EPim0sKZ/04okSXxhwwC1UzwcevUdrI2eUyaUEbq9Ua3SMCuQNmDVKFlJJ6PUucbWvgKsdsSMbW1L4syA41yTPk9dcYHlu32k8oaZ1o3QIDw7Do48pygwnWSJM9XlynFcRcYmOikRIrINJlJmKH6oY+ez9nva7cAV6rFel+jk7bgSZUrKMfdsnsLh1PSabnLJ/s=
|
@@ -2,6 +2,9 @@ import typing
|
||||
import shutil
|
||||
import os
|
||||
import os.path
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# On older client versions, need importing globally to allow inner functions to work
|
||||
import subprocess # type: ignore
|
||||
@@ -43,7 +46,7 @@ msrdc_list = [
|
||||
]
|
||||
|
||||
thincast_list = [
|
||||
'/Applications/Thincast Remote Desktop Client.app/Contents/MacOS/Thincast Remote Desktop Client',
|
||||
'/Applications/Thincast Remote Desktop Client.app',
|
||||
]
|
||||
|
||||
xfreerdp_list = [
|
||||
@@ -58,21 +61,28 @@ executable = None
|
||||
kind = ''
|
||||
|
||||
# Check first thincast (better option right now, prefer it)
|
||||
logger.debug('Searching for Thincast in: %s', thincast_list)
|
||||
for thincast in thincast_list:
|
||||
if os.path.isfile(thincast):
|
||||
if os.path.isdir(thincast):
|
||||
logger.debug('Thincast found: %s', thincast)
|
||||
executable = thincast
|
||||
kind = 'thincast'
|
||||
break
|
||||
|
||||
if not executable:
|
||||
logger.debug('Searching for xfreerdp in: %s', xfreerdp_list)
|
||||
found_xfreerdp = False
|
||||
for xfreerdp_executable in xfreerdp_list:
|
||||
xfreerdp: str = tools.findApp(xfreerdp_executable)
|
||||
if xfreerdp and os.path.isfile(xfreerdp):
|
||||
executable = xfreerdp
|
||||
xfreerdp = tools.findApp(xfreerdp_executable) # type: ignore
|
||||
logger.debug('tools.findApp(%s) result: %s', xfreerdp_executable, xfreerdp) # type: ignore
|
||||
if xfreerdp and os.path.isfile(xfreerdp): # type: ignore
|
||||
logger.debug('xfreerdp found: %s', xfreerdp) # type: ignore
|
||||
executable = xfreerdp # type: ignore
|
||||
# Ensure that the kind is 'xfreerdp' and not 'xfreerdp3' or 'xfreerdp2'
|
||||
kind = xfreerdp_executable.rstrip('3').rstrip('2')
|
||||
break
|
||||
else:
|
||||
if not found_xfreerdp:
|
||||
logger.debug('Searching for MSRDC in: %s', msrdc_list)
|
||||
for msrdc in msrdc_list:
|
||||
if os.path.isdir(msrdc) and sp['as_file']: # type: ignore
|
||||
executable = msrdc
|
||||
@@ -80,6 +90,7 @@ if not executable:
|
||||
break
|
||||
|
||||
if not executable:
|
||||
logger.debug('No compatible executable found (Thincast, xfreerdp, MSRDC)')
|
||||
msrd = msrd_li = ''
|
||||
if sp['as_rdp_url']: # type: ignore
|
||||
msrd = ', Microsoft Remote Desktop'
|
||||
@@ -110,26 +121,90 @@ if not executable:
|
||||
'''
|
||||
)
|
||||
|
||||
logger.debug('Using %s client of kind %s', executable, kind) # type: ignore
|
||||
|
||||
if kind == 'msrdc':
|
||||
theFile = sp['as_file'] # type: ignore
|
||||
filename = tools.saveTempFile(theFile)
|
||||
filename = tools.saveTempFile(theFile) # type: ignore
|
||||
# Rename as .rdp, so open recognizes it
|
||||
shutil.move(filename, filename + '.rdp')
|
||||
shutil.move(filename, filename + '.rdp') # type: ignore
|
||||
|
||||
# tools.addTaskToWait(subprocess.Popen(['open', filename + '.rdp']))
|
||||
# Force MSRDP to be used with -a (thanks to Dani Torregrosa @danitorregrosa (https://github.com/danitorregrosa) )
|
||||
tools.addTaskToWait(
|
||||
# Force MSRDP to be used with -a (thanks to Dani Torregrosa @danitorregrosa (https://github.com/danitorregrosa))
|
||||
tools.addTaskToWait( # type: ignore
|
||||
subprocess.Popen(
|
||||
[
|
||||
'open',
|
||||
'-a',
|
||||
executable,
|
||||
filename + '.rdp',
|
||||
]
|
||||
] # type: ignore
|
||||
)
|
||||
)
|
||||
tools.addFileToUnlink(filename + '.rdp')
|
||||
else: # thincast, udsrdp, freerdp
|
||||
tools.addFileToUnlink(filename + '.rdp') # type: ignore
|
||||
|
||||
|
||||
if kind == 'thincast':
|
||||
if sp['as_file']: # type: ignore
|
||||
logger.debug('Opening Thincast with RDP file %s', sp['as_file']) # type: ignore
|
||||
theFile = sp['as_file'] # type: ignore
|
||||
filename = tools.saveTempFile(theFile) # type: ignore
|
||||
|
||||
# # add to file the encrypted password for RDP
|
||||
# import win32crypt
|
||||
# import binascii
|
||||
|
||||
# def encrypt_password_rdp(plain_text_password):
|
||||
# # Convert password to UTF-16-LE (Unicode string used by RDP)
|
||||
# data = plain_text_password.encode('utf-16-le')
|
||||
# # Encrypt with DPAPI (CryptProtectData)
|
||||
# encrypted_data = win32crypt.CryptProtectData(data, None, None, None, None, 0)
|
||||
# # Convert bytes to hexadecimal for RDP
|
||||
# encrypted_hex = binascii.hexlify(encrypted_data).decode('ascii')
|
||||
# return encrypted_hex
|
||||
|
||||
# filename_handle = open(filename, 'a') # type: ignore
|
||||
# if sp.get('password', ''): # type: ignore
|
||||
# encrypted_password = encrypt_password_rdp(sp["password"])
|
||||
# filename_handle.write(f'password 51:b:{encrypted_password}\n') # type: ignore
|
||||
# filename_handle.close()
|
||||
|
||||
# add to file the password without encryption (Thincast will encrypt it)
|
||||
filename_handle = open(filename, 'a') # type: ignore
|
||||
if sp.get('password', ''): # type: ignore
|
||||
filename_handle.write(f'password 51:b:{sp["password"]}\n') # type: ignore
|
||||
filename_handle.close()
|
||||
|
||||
# Rename as .rdp, so open recognizes it
|
||||
shutil.move(filename, filename + '.rdp') # type: ignore
|
||||
params = [ # type: ignore
|
||||
'open',
|
||||
'-a',
|
||||
executable,
|
||||
filename + '.rdp', # type: ignore
|
||||
]
|
||||
logger.debug('Opening Thincast with RDP file with params: %s', ' '.join(params)) # type: ignore
|
||||
tools.addTaskToWait( # type: ignore
|
||||
subprocess.Popen(params) # type: ignore
|
||||
)
|
||||
tools.addFileToUnlink(filename + '.rdp') # type: ignore
|
||||
else:
|
||||
logger.debug('Opening Thincast with xfreerdp parameters')
|
||||
# Fix resolution...
|
||||
try:
|
||||
xfparms = fix_resolution()
|
||||
except Exception as e:
|
||||
xfparms = list(map(lambda x: x.replace('#WIDTH#', '1400').replace('#HEIGHT#', '800'), sp['as_new_xfreerdp_params'])) # type: ignore
|
||||
|
||||
params = [ # type: ignore
|
||||
'open',
|
||||
'-a',
|
||||
executable,
|
||||
'--args',
|
||||
] + [os.path.expandvars(i) for i in xfparms + ['/v:{}'.format(sp['address'])]] # type: ignore
|
||||
#logger.debug('Executing: %s', ' '.join(params))
|
||||
subprocess.Popen(params) # type: ignore
|
||||
else: # for now, both xfreerdp or udsrdp
|
||||
# Fix resolution...
|
||||
try:
|
||||
xfparms = fix_resolution()
|
||||
@@ -137,4 +212,5 @@ else: # thincast, udsrdp, freerdp
|
||||
xfparms = list(map(lambda x: x.replace('#WIDTH#', '1400').replace('#HEIGHT#', '800'), sp['as_new_xfreerdp_params'])) # type: ignore
|
||||
|
||||
params = [os.path.expandvars(i) for i in [executable] + xfparms + ['/v:{}'.format(sp['address'])]] # type: ignore
|
||||
subprocess.Popen(params)
|
||||
logger.debug('Executing: %s', ' '.join(params)) # type: ignore
|
||||
subprocess.Popen(params) # type: ignore
|
||||
|
@@ -1 +1 @@
|
||||
kIL9OXr/AnhhqE+5ln8YpmUhgCDxK1r7yuHjb5j+n1VJOLHtJwH0Rm5Dh5SrhNkCFo2suHTVb0f1nbkQ4mkzlszBNaCSvLkwxpTIZrO8P9aOpEJDmq5aGRHNHLPNzmhBzUfhK+ILGlsCElJzP/19kNxifoanH9xyZkybHAuxNy4QjRt8dkIZkxj0qxsRFRAt8R2yZARdjUkfufiztNAdTuHLVUG+JF5OnTQDJi6vIqOrbPiAIn/vweHuL4zFH4UJXHipvgXJiO9nq4ZvXCAy9+ASxEDG5ql/iwOFoQyGekXK5XtkSbT9F+fDejVoxKP/qp4lIrEHQ/Y9WMJnpGBt2ko7FeyDP8msx45svLQYBNeqxGqIUi0yzfzktglLfexInisHBtz0lb7Uuz/mdPscUoKU+j/5/ZgFS0yFD+NelFmG1q52X7ndkXUXaLb3tPtNDd1ZdgsyEkTiVuo3jW+kT1SWlAeK+YEyZinC8/Df7PLCX7fOl+WPm8MAnncfmbHO654HHgYiqDh+I93IqqReytepxWgK+bwxdGz/z0BPzLXSCy2im6UjEdNFSFRBEywXAdMTxgC2YD2XOl7SHAtcPT2kpgJtlxT1V7+wwv2tyzWf0bL0GA0EIDEAP6cga+l4wJGSIDnYnxiJHaF6aF3XYL8yh4ghHTD7IAJFpU6vWjM=
|
||||
rP6Eur9PlTONUNLjIRAVL/CdtT7ATNYC8l0AzvU57tqyFDFa/C8nNyq3Aaepf+SSYaYzxg9TnWUge8jpcnM20ERV6H2IA2aN3Hrg0+q76OPNlH1UmygyT1+UxxccPemnGAcVVnBXOHwONHvpE8FqdOFZn6P2CWWojOLUMB2yj/kO0l+bZDmDRlihg5sIpSd4Wkt4ezyz9j7Cjsz6JuFDQjVdaIDEFeGcqfEJIDKlpIY6GJgJYbGMx0C0uayNtQlFO653EcS7mnXhlIQwGg4YJl3fjKksjDWL2H65MsddRvZubIIrBU6jQnIj2W+gl1/xT8mRom48SogBJWzjzjT/X7sN6QRfvKCMfLwhfqHw7p0MYVV1Tcpjzn1sFMyrR4zPXGaH80+2hn9yf2HGVb6QVmir0x0VKRy0eQAEqYtb3TeMU0lmXShkuSogiOfdpqd65NKpboUuv/cVttpa8qzZhroBQXyufSEi1gmVTc6tp2PeQIXFZLrL6SOP263HXOWPirmIuLri8k3qK2L4BiuD7ZTiwursqCytoFjBCVpWPhnI3c6Q81WpzESBs6E8Kyvanr/jMX4T95i9m/kZBdLELLA7uj2dTaxsdUHJrs1fO7/hMGdgxdmWzXwXJX9VzJ+ZyF69KP0w4oZd+bazxFK0aaHqttxS2ZjATJ5rlOARtzs=
|
@@ -3,6 +3,9 @@ import typing
|
||||
import shutil
|
||||
import os
|
||||
import os.path
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# On older client versions, need importing globally to allow inner functions to work
|
||||
import subprocess # type: ignore
|
||||
@@ -50,7 +53,7 @@ msrdc_list = [
|
||||
]
|
||||
|
||||
thincast_list = [
|
||||
'/Applications/Thincast Remote Desktop Client.app/Contents/MacOS/Thincast Remote Desktop Client',
|
||||
'/Applications/Thincast Remote Desktop Client.app',
|
||||
]
|
||||
|
||||
xfreerdp_list = [
|
||||
@@ -65,25 +68,31 @@ executable = None
|
||||
kind = ''
|
||||
|
||||
# Check first thincast (better option right now, prefer it)
|
||||
logger.debug('Searching for Thincast in: %s', thincast_list)
|
||||
for thincast in thincast_list:
|
||||
if os.path.isfile(thincast):
|
||||
if os.path.isdir(thincast):
|
||||
executable = thincast
|
||||
kind = 'thincast'
|
||||
logger.debug('Found Thincast client at %s', thincast)
|
||||
break
|
||||
|
||||
if not executable:
|
||||
logger.debug('Searching for xfreerdp in: %s', xfreerdp_list)
|
||||
for xfreerdp_executable in xfreerdp_list:
|
||||
xfreerdp: str = tools.findApp(xfreerdp_executable)
|
||||
if xfreerdp and os.path.isfile(xfreerdp):
|
||||
executable = xfreerdp
|
||||
xfreerdp: str = tools.findApp(xfreerdp_executable) # type: ignore
|
||||
if xfreerdp and os.path.isfile(xfreerdp): # type: ignore
|
||||
executable = xfreerdp # type: ignore
|
||||
# Ensure that the kind is 'xfreerdp' and not 'xfreerdp3' or 'xfreerdp2'
|
||||
kind = xfreerdp_executable.rstrip('3').rstrip('2')
|
||||
logger.debug('Found xfreerdp client: %s (kind: %s)', xfreerdp, kind) # type: ignore
|
||||
break
|
||||
else:
|
||||
logger.debug('Searching for Microsoft Remote Desktop in: %s', msrdc_list)
|
||||
for msrdc in msrdc_list:
|
||||
if os.path.isdir(msrdc) and sp['as_file']: # type: ignore
|
||||
executable = msrdc
|
||||
kind = 'msrdc'
|
||||
logger.debug('Found Microsoft Remote Desktop client at %s', msrdc)
|
||||
break
|
||||
|
||||
if not executable:
|
||||
@@ -91,6 +100,7 @@ if not executable:
|
||||
if sp['as_rdp_url']: # type: ignore
|
||||
msrd = ', Microsoft Remote Desktop'
|
||||
msrd_li = '<li><p><b>{}</b> from Apple Store</p></li>'.format(msrd)
|
||||
logger.debug('as_rdp_url is set, will suggest Microsoft Remote Desktop')
|
||||
|
||||
raise Exception(
|
||||
f'''<p><b>xfreerdp{msrd} or thincast client not found</b></p>
|
||||
@@ -119,38 +129,90 @@ if not executable:
|
||||
|
||||
# Open tunnel
|
||||
fs = forward(remote=(sp['tunHost'], int(sp['tunPort'])), ticket=sp['ticket'], timeout=sp['tunWait'], check_certificate=sp['tunChk']) # type: ignore
|
||||
address = '127.0.0.1:{}'.format(fs.server_address[1])
|
||||
address = '127.0.0.1:{}'.format(fs.server_address[1]) # type: ignore
|
||||
|
||||
# Check that tunnel works..
|
||||
if fs.check() is False:
|
||||
if fs.check() is False: # type: ignore
|
||||
logger.debug('Tunnel check failed, could not connect to tunnel server')
|
||||
raise Exception('<p>Could not connect to tunnel server.</p><p>Please, check your network settings.</p>')
|
||||
else:
|
||||
logger.debug('Tunnel check succeeded, connection to tunnel server established')
|
||||
|
||||
logger.debug('Using %s client of kind %s', executable, kind) # type: ignore
|
||||
|
||||
if kind == 'msrdc':
|
||||
theFile = theFile = sp['as_file'].format(address=address) # type: ignore
|
||||
|
||||
filename = tools.saveTempFile(theFile)
|
||||
filename = tools.saveTempFile(theFile) # type: ignore
|
||||
# Rename as .rdp, so open recognizes it
|
||||
shutil.move(filename, filename + '.rdp')
|
||||
shutil.move(filename, filename + '.rdp') # type: ignore
|
||||
|
||||
# tools.addTaskToWait(subprocess.Popen(['open', filename + '.rdp']))
|
||||
# Force MSRDP to be used with -a (thanks to Dani Torregrosa @danitorregrosa (https://github.com/danitorregrosa) )
|
||||
tools.addTaskToWait(
|
||||
tools.addTaskToWait( # type: ignore
|
||||
subprocess.Popen(
|
||||
[
|
||||
'open',
|
||||
'-a',
|
||||
executable,
|
||||
filename + '.rdp',
|
||||
]
|
||||
)
|
||||
] # type: ignore
|
||||
)
|
||||
)
|
||||
tools.addFileToUnlink(filename + '.rdp')
|
||||
else: # freerdp, thincast or udsrdp
|
||||
tools.addFileToUnlink(filename + '.rdp') # type: ignore
|
||||
|
||||
if kind == 'thincast':
|
||||
if sp['as_file']: # type: ignore
|
||||
logger.debug('Opening Thincast with RDP file %s', sp['as_file']) # type: ignore
|
||||
theFile = sp['as_file'] # type: ignore
|
||||
theFile = theFile.format( # type: ignore
|
||||
address='{}'.format(address)
|
||||
)
|
||||
filename = tools.saveTempFile(theFile) # type: ignore
|
||||
|
||||
# filename_handle = open(filename, 'a') # type: ignore
|
||||
# if sp.get('password', ''): # type: ignore
|
||||
# filename_handle.write(f'password 51:b:{sp["password"]}\n') # type: ignore
|
||||
# filename_handle.close()
|
||||
|
||||
# Rename as .rdp, so open recognizes it
|
||||
shutil.move(filename, filename + '.rdp') # type: ignore
|
||||
# show filename content in log for debug
|
||||
with open(filename + '.rdp', 'r') as f: # type: ignore
|
||||
logger.debug('RDP file content:\n%s', f.read()) # type: ignore
|
||||
params = [ # type: ignore
|
||||
'open',
|
||||
'-a',
|
||||
executable,
|
||||
filename + '.rdp', # type: ignore
|
||||
]
|
||||
logger.debug('Opening Thincast with RDP file with params: %s', ' '.join(params)) # type: ignore
|
||||
tools.addTaskToWait( # type: ignore
|
||||
subprocess.Popen(params) # type: ignore
|
||||
)
|
||||
tools.addFileToUnlink(filename + '.rdp') # type: ignore
|
||||
else:
|
||||
logger.debug('Opening Thincast with xfreerdp parameters')
|
||||
# Fix resolution...
|
||||
try:
|
||||
xfparms = fix_resolution()
|
||||
except Exception as e:
|
||||
xfparms = list(map(lambda x: x.replace('#WIDTH#', '1400').replace('#HEIGHT#', '800'), sp['as_new_xfreerdp_params'])) # type: ignore
|
||||
|
||||
params = [ # type: ignore
|
||||
'open',
|
||||
'-a',
|
||||
executable,
|
||||
'--args',
|
||||
] + [os.path.expandvars(i) for i in xfparms + ['/v:{}'.format(address)]] # type: ignore
|
||||
#logger.debug('Executing: %s', ' '.join(params))
|
||||
subprocess.Popen(params) # type: ignore
|
||||
else: # freerdp or udsrdp
|
||||
# Fix resolution...
|
||||
try:
|
||||
xfparms = fix_resolution()
|
||||
except Exception as e:
|
||||
xfparms = list(map(lambda x: x.replace('#WIDTH#', '1400').replace('#HEIGHT#', '800'), sp['as_new_xfreerdp_params'])) # type: ignore
|
||||
|
||||
params = [os.path.expandvars(i) for i in [executable] + xfparms + ['/v:{}'.format(address)]]
|
||||
subprocess.Popen(params)
|
||||
params = [os.path.expandvars(i) for i in [executable] + xfparms + ['/v:{}'.format(address)]] # type: ignore
|
||||
subprocess.Popen(params) # type: ignore
|
||||
|
@@ -1 +1 @@
|
||||
fLDRyAJrjER+znvjbhAuUa+XJ0itaLRspgQzk5AKMz/YpNZgdaHBtOw25XUsYrszsjSq6AIPB2VnT45Bhg7/GB8CRpTuCkpeoDpUA2rNR24DW7i5urmhjfPjtuMmSOBKPdXkiYPIhavFeD+kRWrrm6X1lolTnGz+pie2IXxsEhsot4gbg6eOv1ieloRHnTptO2qRr99I35BmSTxCKnyhrV6AWJBb6lHhuT04fhk5+X/ZA4kQewaI0ncqBdsqZoxcdSDgiOllF8kUsDgBL91wpYAB9s5eQpnI5VUPY/7Gzd47guRNCf24EHswd1lJrGAGta1p/e3hvtU0whdaaAiuhG/U6zHhpDpE5SPV66MuPfo1kKyIeNTMTC50Dkahu+j19cVbubdSJIUj42+nXKHDy5Bojotd/IYYSSR68K0eblpHDId7YxXpYmKY2QyA2cWDUrGD4glRWinKR/Hoi1oNuuBItMQUcDCPxxPWks4OsYqwpbAAkdFYRXzUR18TKndtwmHKox5AfXuQkuHpICRRnEMzD5pfNerUtEk3Zy0BHZYtoD7BQu2OEh9e3jC5bRHaQvf/sGBGMWfKchBwtPiBKlji6nPw6pqLPW5npD3GHYkcxI8tgzlTzIZS6YkOXDI8lHMYV5lvHxOPYFrAecfNpK8pj2uAeGoEFvBRmZZG79g=
|
||||
NKfVtsTQWErfjpdrXd7qGLp6c0ScKMcbj4o5QTDAfQfDEuuj1/Qcg9gx+1NCyIF0hxy9ZIKbvwaYBYu9rARxz3XYsidxpgsnhZPyshdPN236M+zRo9SBFY3Ug0aNBZSewSZ6MSfrCZkMUW0NJOOpGu41KQNUVE5+DciC618rMoD0V//zJhz4SFy7dscjLg8cm69KNS6jC1trJkX7Ep19TF5DG6s0P9lQGaMLSj2UYTsF5gaZZY1jwZcCSw0QGrXon/mxR4i/t3BiARekyUB/ygM5DfzG5BMnQIMtLPNZu89vdJ+vAU5bUq1MrQjpAI3dzhdJwiUWTSVbQ3rHAoQHyBL4yE8bvuLIB0sUD5Jqd3EkTlVBJ0mmo1dHJZj0dzbXd5cHDlK+0Ms+vDSm/Qw8rqKatSBZlavcdQ+ScIEHlj0bEf6teKD3VsMUKFDxpdIBivkSz0KIZW+HXQiiFprdeN5tnAVj8ItlxFZlItNw3Zz6CrNqHpfSdyW0uzmuN3mD1iyQhkW6+JzWC+G9hmMY9X7eFv/xzwXQHUiB8rrt5QK7PJHlK1uRC2MFQRg7s83bYeZBEhAynuyjwLAT6mo+RsINgqF3hkNUHfh7E4IvK09ynl5Dnv3ypAfYKGvlWbpH8Moo+Pp45r+8sQPa83br9+y52I8JxDxqkdHLrzmXqc4=
|
@@ -27,7 +27,7 @@ theFile = sp['xf'].format(export=home, keyFile=keyFile.replace('\\', '/'), ip='1
|
||||
filename = tools.saveTempFile(theFile)
|
||||
|
||||
x2goPath = os.environ['PROGRAMFILES(X86)'] + '\\x2goclient'
|
||||
executable = tools.findApp('x2goclient.exe', [x2goPath])
|
||||
executable = tools.findApp('x2goclient.exe', x2goPath)
|
||||
if executable is None:
|
||||
raise Exception(
|
||||
'''<p>You must have installed latest X2GO Client in default program file folder in order to connect to this UDS service.</p>
|
||||
|
@@ -1 +1 @@
|
||||
NzheeXAbv2kVktgPeHMrFR+fkSjoz4NcPHrfrMXKP4BGS8oeBO8CBJw0OFYqXIRKCsBPi6yvd961Ofg2UFi6brr4dCut6PfXddKEJIEbanwTUr3yx6882TOw95h3ibLkIP9Fi/6VmId8X3QnPqdmN8XgU2eB7ONrvFzu9aCcMuKj2rKLXlesJ1t/ArxRn80xrCKp/27jDX1aitcTHzo9VsM9bjBNodHp6V+/hHiw+Ht7qI9MZ1a2jQxA63CR5uqsAJ5B4qcp/fn4TBXgPr2NxL1StT19v3e36YuCKhSUVS8b3ggDrGU982m5rfcbn9Gz3yaaYNcHmf2600AdMFGsjiBmiv65Tjc1utC81rsmbsomsSBj48IZ0ZoYXgZkSHDYzRU7xVA2QAYpLI1IM8tputACN1L5vcNQXl6SqrJc3+3PidOmUyxrQxjNDc6wlfsTOH0UhgT8FPxJu9NFPU/eYq3w3hPsvowg4MewjA4X8NBtqpus76Jyux2e8ym3WJHvlAff04VrwEeidaci591xbuu62ezTfACXtSOK5mOAgUWhCAHWE3/U32ibtsw5SER4u5ywk2nVGlvFY3+5x6RxoGN4FhsYwfumSHO54caonHyBVkkTK1FRbYJ6zNXMf37491YOrdqAFX7R205d4qcJkqLldo/DaYO10CSSiZ323x4=
|
||||
TpOuQbtplQOuMXpTtxaAQJwZJtoVTMtWruV9d8+d6HJnSd7Y7iLMWkqerP3+rQBOqT67YAI8fq8ZZsYSDNKCoYQwJXqp8d2S8K2LyW7ClnYZXlFzVvjW0+wgTVV4E3f5Y7xT5TGlQ7/mO/KPgEloTxC8KD/DOTRc6MBusSgc/nd1ssE946qznCvihTsWuM4KGTfhQ4q9BVIVaxcFRszmhPqjVT0iY8sF3Aqghx4hvbgZKglqF9o/1JemzffLn/pxyclIePFJTPZlIzecfYcQ7Gsa+BpLbryRr6LUmuZLhBZYTnRP0dCrwMjfnitVSspwo0MJX4bDa1rQNPsQZLylrzNqbjA/aCf60qLit58KA2i7Vb/BfE3lPtoXKKILDEzOFqZObfRDDNQpiP8xCyJVXb5f3oVYYtRbuk/Ld1pNtsmTsEw69kQ30pW5Mqw+Qy5/6Nf75dAm1/9/FJga1uVBhlgPG0qgSf96M1s8FlSUrC5gn9neJB3ThOBpJkDRqvER192dwVlwcSbGLzgEGdZLFZq4cskfdsJHdWQ5aTcbnEBPrWRbvKbM1c2iuXXkLiKAAcY1rFmPWZscWbj8HqjFHwDd4bUjKzUA2GZt0LbhLG0ujQAK49Xdx7rHR15RL9hFFRwH4gUKQj/zuHCNkNXcW4bMmL1ZYWQXVW/X8Tv+LtQ=
|
@@ -58,9 +58,9 @@ register = template.Library()
|
||||
|
||||
def uds_js(request: 'ExtendedHttpRequest') -> str:
|
||||
auth_host = (
|
||||
request.META.get('SERVER_NAME') or request.META.get('HTTP_HOST') or 'auth_host'
|
||||
request.META.get('HTTP_HOST') or request.META.get('SERVER_NAME') or 'auth_host'
|
||||
) # Last one is a placeholder in case we can't locate host name
|
||||
|
||||
|
||||
role: str = 'user'
|
||||
user: typing.Optional['User'] = request.user if request.authorized else None
|
||||
|
||||
@@ -79,8 +79,10 @@ def uds_js(request: 'ExtendedHttpRequest') -> str:
|
||||
# Filter out non accesible authenticators (using origin)
|
||||
authenticators = [
|
||||
a
|
||||
for a in Authenticator.get_by_tag(tag, auth_host)
|
||||
if a.get_instance().is_ip_allowed(request) and (tag != 'disabled' or not a.get_type().is_custom())
|
||||
for a in Authenticator.get_by_tag(
|
||||
tag, auth_host if GlobalConfig.DISALLOW_GLOBAL_LOGIN.as_bool(True) else None
|
||||
)
|
||||
if a.get_instance().is_ip_allowed(request)
|
||||
]
|
||||
|
||||
logger.debug('Authenticators PRE: %s', authenticators)
|
||||
@@ -102,25 +104,23 @@ def uds_js(request: 'ExtendedHttpRequest') -> str:
|
||||
'priority': auth.priority,
|
||||
'is_custom': auth_type.is_custom(),
|
||||
}
|
||||
|
||||
|
||||
def _is_auth_visible(auth: Authenticator) -> bool:
|
||||
"""
|
||||
Check if the authenticator is visible for the current request.
|
||||
This is used to filter out authenticators that are not allowed
|
||||
for the current user or request.
|
||||
"""
|
||||
return auth.type_is_valid() and (auth.state == consts.auth.VISIBLE or (auth.state == consts.auth.HIDDEN and use_hidden))
|
||||
return auth.type_is_valid() and (
|
||||
auth.state == consts.auth.VISIBLE or (auth.state == consts.auth.HIDDEN and use_hidden)
|
||||
)
|
||||
|
||||
config: dict[str, typing.Any] = {
|
||||
'version': consts.system.VERSION,
|
||||
'version_stamp': consts.system.VERSION_STAMP,
|
||||
'language': get_language(),
|
||||
'available_languages': [{'id': k, 'name': gettext(v)} for k, v in settings.LANGUAGES],
|
||||
'authenticators': [
|
||||
_get_auth_info(auth)
|
||||
for auth in authenticators
|
||||
if _is_auth_visible(auth)
|
||||
],
|
||||
'authenticators': [_get_auth_info(auth) for auth in authenticators if _is_auth_visible(auth)],
|
||||
'mfa': request.session.get('mfa', None),
|
||||
'tag': tag,
|
||||
'os': request.os.os.name,
|
||||
|
@@ -42,6 +42,7 @@ from uds.core.auths.auth import get_webpassword
|
||||
from uds.core.managers.crypto import CryptoManager
|
||||
from uds.core.managers.userservice import UserServiceManager
|
||||
from uds.core.exceptions.services import (
|
||||
InvalidServiceException,
|
||||
MaxServicesReachedError,
|
||||
ServiceAccessDeniedByCalendar,
|
||||
ServiceNotReadyError,
|
||||
@@ -466,6 +467,9 @@ def enable_service(
|
||||
except ServiceAccessDeniedByCalendar:
|
||||
logger.info('Access tried to a calendar limited access pool "%s"', service_id)
|
||||
error = types.errors.Error.SERVICE_CALENDAR_DENIED.message
|
||||
except InvalidServiceException as e:
|
||||
logger.warning('Invalid service: %s', e)
|
||||
error = types.errors.Error.INVALID_SERVICE.message
|
||||
except Exception as e:
|
||||
logger.exception('Error')
|
||||
error = str(e)
|
||||
|
@@ -46,10 +46,10 @@ MAX_REMOVING_TIME = 3600 * 24 * 1 # 2 days, in seconds
|
||||
|
||||
|
||||
class DeployedServiceInfoItemsCleaner(Job):
|
||||
frecuency = 3607
|
||||
frecuency_cfg = (
|
||||
GlobalConfig.CLEANUP_CHECK
|
||||
) # Request run cache "info" cleaner every configured seconds. If config value is changed, it will be used at next reload
|
||||
frecuency = 600
|
||||
# frecuency_cfg = (
|
||||
# GlobalConfig.CLEANUP_CHECK
|
||||
# ) # Request run cache "info" cleaner every configured seconds. If config value is changed, it will be used at next reload
|
||||
friendly_name = 'Deployed Service Info Cleaner'
|
||||
|
||||
def run(self) -> None:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user