1
0
mirror of https://github.com/dkmstr/openuds.git synced 2024-12-22 13:34:04 +03:00

upgrading and linting tunnel

This commit is contained in:
Adolfo Gómez García 2023-05-16 01:06:04 +02:00
parent 004ea3e7df
commit 24c9a18ea8
No known key found for this signature in database
GPG Key ID: DD1ABF20724CDA23
8 changed files with 700 additions and 110 deletions

632
tunnel-server/.pylintrc Normal file
View File

@ -0,0 +1,632 @@
[MAIN]
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
# in a server-like mode.
clear-cache-post-run=no
# Load and enable all available extensions. Use --list-extensions to see a list
# all available extensions.
enable-all-extensions=yes
# In error mode, messages with a category besides ERROR or FATAL are
# suppressed, and no reports are done by default. Error mode is compatible with
# disabling specific errors.
#errors-only=
# Always return a 0 (non-error) status code, even if lint errors are found.
# This is primarily useful in continuous integration scripts.
#exit-zero=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=lxml.etree
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
# for backward compatibility.)
extension-pkg-whitelist=
# Return non-zero exit code if any of these messages/categories are detected,
# even if score is above --fail-under value. Syntax same as enable. Messages
# specified are enabled, while categories only check already-enabled messages.
fail-on=
# Specify a score threshold under which the program will exit with error.
fail-under=10
# Interpret the stdin as a python script, whose filename needs to be passed as
# the module_or_package argument.
#from-stdin=
# Files or directories to be skipped. They should be base names, not paths.
ignore=CVS
# Add files or directories matching the regular expressions patterns to the
# ignore-list. The regex matches against paths and can be in Posix or Windows
# format. Because '\\' represents the directory delimiter on Windows systems,
# it can't be used as an escape character.
ignore-paths=
# Files or directories matching the regular expression patterns are skipped.
# The regex matches against base names, not paths. The default value ignores
# Emacs file locks
ignore-patterns=^\.#
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use, and will cap the count on Windows to
# avoid hangs.
jobs=1
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Minimum Python version to use for version dependent checks. Will default to
# the version used to run pylint.
py-version=3.11
# Discover python modules and packages in the file system subtree.
recursive=no
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# In verbose mode, extra non-checker-related info will be displayed.
#verbose=
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style. If left empty, argument names will be checked with the set
# naming style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style. If left empty, attribute names will be checked with the set naming
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style. If left empty, class attribute names will be checked
# with the set naming style.
#class-attribute-rgx=
# Naming style matching correct class constant names.
class-const-naming-style=UPPER_CASE
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style. If left empty, class constant names will be checked with
# the set naming style.
#class-const-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style. If left empty, class names will be checked with the set naming style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style. If left empty, constant names will be checked with the set naming
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=camelCase
# Regular expression matching correct function names. Overrides function-
# naming-style. If left empty, function names will be checked with the set
# naming style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style. If left empty, inline iteration names will be checked
# with the set naming style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=camelCase
# Regular expression matching correct method names. Overrides method-naming-
# style. If left empty, method names will be checked with the set naming style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=camelCase
# Regular expression matching correct module names. Overrides module-naming-
# style. If left empty, module names will be checked with the set naming style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Regular expression matching correct type variable names. If left empty, type
# variable names will be checked with the set naming style.
#typevar-rgx=
# Naming style matching correct variable names.
variable-naming-style=camelCase
# Regular expression matching correct variable names. Overrides variable-
# naming-style. If left empty, variable names will be checked with the set
# naming style.
#variable-rgx=
[CLASSES]
# Warn about protected attribute access inside special methods
check-protected-access-in-special-methods=no
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[DESIGN]
# List of regular expressions of class ancestor names to ignore when counting
# public methods (see R0903)
exclude-too-few-public-methods=
# List of qualified class names to ignore when counting class parents (see
# R0901)
ignored-parents=
# Maximum number of arguments for function / method.
max-args=10
# Maximum number of attributes for a class (see R0902).
max-attributes=12
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=6
# Maximum number of branch for function / method body.
max-branches=24
# Maximum number of locals for function / method body.
max-locals=24
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=32
# Maximum number of return / yield for function / method body.
max-returns=9
# Maximum number of statements in function / method body.
max-statements=96
# Minimum number of public methods for a class (see R0903).
min-public-methods=1
[EXCEPTIONS]
# Exceptions that will emit a warning when caught.
overgeneral-exceptions=builtins.BaseException,builtins.Exception
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=1000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow explicit reexports by alias from a package __init__.
allow-reexport-from-package=no
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=
# Output a graph (.gv or any supported image format) of external dependencies
# to the given file (report RP0402 must not be disabled).
ext-import-graph=
# Output a graph (.gv or any supported image format) of all (i.e. internal and
# external) dependencies to the given file (report RP0402 must not be
# disabled).
import-graph=
# Output a graph (.gv or any supported image format) of internal dependencies
# to the given file (report RP0402 must not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
# UNDEFINED.
confidence=HIGH,
CONTROL_FLOW,
INFERENCE,
INFERENCE_FAILURE,
UNDEFINED
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=raw-checker-failed,
bad-inline-option,
locally-disabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
use-symbolic-message-instead,
R0022,
invalid-name,
broad-exception-caught,
broad-exception-raised,
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[METHOD_ARGS]
# List of qualified names (i.e., library.method) which require a timeout
# parameter e.g. 'requests.api.get,requests.api.post'
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
# Regular expression of note tags to take in consideration.
notes-rgx=
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=8
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit,argparse.parse_error
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
# 'convention', and 'info' which contain the number of messages in each
# category, as well as 'statement' which is the total number of statements
# analyzed. This score is used by the global evaluation report (RP0004).
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
#output-format=
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[SIMILARITIES]
# Comments are removed from the similarity computation
ignore-comments=yes
# Docstrings are removed from the similarity computation
ignore-docstrings=yes
# Imports are removed from the similarity computation
ignore-imports=yes
# Signatures are removed from the similarity computation
ignore-signatures=yes
# Minimum lines number of a similarity.
min-similarity-lines=4
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it work,
# install the 'python-enchant' package.
spelling-dict=
# List of comma separated words that should be considered directives if they
# appear at the beginning of a comment and should not be checked.
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members= .*.objects
.*.DoesNotExist.*
.+service,
.+osmanager,
ldap\..+,
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of symbolic message names to ignore for Mixin members.
ignored-checks-for-mixins=no-member,
not-async-context-manager,
not-context-manager,
attribute-defined-outside-init
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# Regex pattern to define which classes are considered mixins.
mixin-class-rgx=.*[Mm]ixin
# List of decorators that change the signature of a decorated function.
signature-mutators=
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of names allowed to shadow builtins
allowed-redefined-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io

View File

@ -50,7 +50,7 @@ class ConfigurationType(typing.NamedTuple):
listen_address: str
listen_port: int
ipv6: bool
workers: int
@ -77,26 +77,21 @@ class ConfigurationType(typing.NamedTuple):
def __str__(self) -> str:
return 'Configuration: \n' + '\n'.join(
f'{k}={v}'
for k, v in self._asdict().items()
for k, v in self._asdict().items() # pylint: disable=no-member # python >=3.8 has _asdict
)
def read_config_file(
cfg_file: typing.Optional[typing.Union[typing.TextIO, str]] = None
) -> str:
def read_config_file(cfg_file: typing.Optional[typing.Union[typing.TextIO, str]] = None) -> str:
if cfg_file is None:
cfg_file = CONFIGFILE
if isinstance(cfg_file, str):
with open(cfg_file, 'r') as f:
with open(cfg_file, 'r', encoding='utf-8') as f:
return '[uds]\n' + f.read()
# path is in fact a file-like object
return '[uds]\n' + cfg_file.read()
def read(
cfg_file: typing.Optional[typing.Union[typing.TextIO, str]] = None
) -> ConfigurationType:
def read(cfg_file: typing.Optional[typing.Union[typing.TextIO, str]] = None) -> ConfigurationType:
config_str = read_config_file(cfg_file)
cfg = configparser.ConfigParser()
@ -150,8 +145,8 @@ def read(
except ValueError as e:
raise Exception(
f'Mandatory configuration file in incorrect format: {e.args[0]}. Please, revise {CONFIGFILE}'
)
) from None
except KeyError as e:
raise Exception(
f'Mandatory configuration parameter not found: {e.args[0]}. Please, revise {CONFIGFILE}'
)
) from None

View File

@ -28,6 +28,7 @@
'''
Author: Adolfo Gómez, dkmaster at dkmon dot com
'''
import re
import typing
DEBUG = True
@ -71,3 +72,6 @@ RESPONSE_OK: typing.Final[bytes] = b'OK'
# Backlog for listen socket
BACKLOG = 1024
# Regular expression for parsing ticket
TICKET_REGEX = re.compile(f'^[a-zA-Z0-9]{{{TICKET_LENGTH}}}$')

View File

@ -72,7 +72,7 @@ class Processes:
self.cfg = cfg
self.ns = ns
for i in range(cfg.workers):
for _ in range(cfg.workers):
self.add_child_pid()
def add_child_pid(self):
@ -157,7 +157,7 @@ class Processes:
) -> None:
if cfg.use_uvloop:
try:
import uvloop
import uvloop # pylint: disable=import-outside-toplevel
if sys.version_info >= (3, 11):
with asyncio.Runner(loop_factory=uvloop.new_event_loop) as runner:

View File

@ -36,8 +36,6 @@ import typing
import io
import asyncio
import ssl
import logging
import typing
from . import config
@ -51,6 +49,7 @@ INTERVAL = 2 # Interval in seconds between stats update
logger = logging.getLogger(__name__)
class StatsSingleCounter:
def __init__(self, parent: 'StatsManager', for_receiving=True) -> None:
if for_receiving:
@ -69,9 +68,9 @@ class StatsManager:
sent: int
last_recv: int
recv: int
last: float
start_time: float # timestamp
end_time: float
last: float # timestamp, from time.monotonic()
start_time: float # timestamp, from time.monotonic()
end_time: float # timestamp, from time.monotonic()
def __init__(self, ns: 'Namespace'):
self.ns = ns
@ -85,7 +84,6 @@ class StatsManager:
def current_time(self) -> float:
return time.monotonic()
def update(self, force: bool = False):
now = time.monotonic()
if force or now - self.last > INTERVAL:
@ -126,6 +124,7 @@ class StatsManager:
self.decrement_connections()
self.end_time = time.monotonic()
# Stats collector thread
class GlobalStats:
manager: 'SyncManager'
@ -151,6 +150,7 @@ class GlobalStats:
def get_stats(ns: 'Namespace') -> typing.Iterable[str]:
yield ';'.join([str(ns.current), str(ns.total), str(ns.sent), str(ns.recv)])
# Stats processor, invoked from command line
async def getServerStats(detailed: bool = False) -> None:
cfg = config.read()
@ -174,8 +174,8 @@ async def getServerStats(detailed: bool = False) -> None:
tmpdata = io.BytesIO()
cmd = consts.COMMAND_STAT if detailed else consts.COMMAND_INFO
writer.write(cmd + cfg.secret.encode())
writer.write(cmd + cfg.secret.encode())
await writer.drain()
while True:

View File

@ -78,9 +78,7 @@ class TunnelProtocol(asyncio.Protocol):
# If there is a timeout task running
timeout_task: typing.Optional[asyncio.Task] = None
def __init__(
self, owner: 'proxy.Proxy'
) -> None:
def __init__(self, owner: 'proxy.Proxy') -> None:
# If no other side is given, we are the server part
super().__init__()
# transport is undefined until connection_made is called
@ -91,7 +89,7 @@ class TunnelProtocol(asyncio.Protocol):
self.destination = ('', 0)
self.tls_version = ''
self.tls_cipher = ''
# If other_side is given, we are the client part (that is, the tunnel from us to remote machine)
# In this case, only do_proxy is used
self.client = None
@ -124,9 +122,7 @@ class TunnelProtocol(asyncio.Protocol):
async def open_client() -> None:
try:
result = await TunnelProtocol.get_ticket_from_uds(
self.owner.cfg, ticket, self.source
)
result = await TunnelProtocol.get_ticket_from_uds(self.owner.cfg, ticket, self.source)
except Exception as e:
logger.error('ERROR %s', e.args[0] if e.args else e)
self.transport.write(consts.RESPONSE_ERROR_TICKET)
@ -146,8 +142,7 @@ class TunnelProtocol(asyncio.Protocol):
try:
family = (
socket.AF_INET6
if ':' in self.destination[0]
or (self.owner.cfg.ipv6 and not '.' in self.destination[0])
if ':' in self.destination[0] or (self.owner.cfg.ipv6 and '.' not in self.destination[0])
else socket.AF_INET
)
(_, self.client) = await loop.create_connection(
@ -161,7 +156,7 @@ class TunnelProtocol(asyncio.Protocol):
self.transport.resume_reading()
# send OK to client
self.transport.write(b'OK')
self.stats_manager.increment_connections() # Increment connections counters
self.stats_manager.increment_connections() # Increment connections counters
except Exception as e:
logger.error('Error opening connection: %s', e)
self.close_connection()
@ -171,7 +166,7 @@ class TunnelProtocol(asyncio.Protocol):
# From now, proxy connection
self.runner = self.do_proxy
def process_stats(self, full: bool) -> None:
def process_stats(self, full: bool) -> None: # pylint: disable=unused-argument
# if pasword is not already received, wait for it
if len(self.cmd) < consts.PASSWORD_LENGTH + consts.COMMAND_LENGTH:
return
@ -246,22 +241,22 @@ class TunnelProtocol(asyncio.Protocol):
try:
if command == consts.COMMAND_OPEN:
self.process_open()
elif command == consts.COMMAND_TEST:
return
if command == consts.COMMAND_TEST:
self.clean_timeout() # Stop timeout
logger.info('COMMAND: TEST')
self.transport.write(consts.RESPONSE_OK)
self.close_connection()
return
elif command in (consts.COMMAND_STAT, consts.COMMAND_INFO):
if command in (consts.COMMAND_STAT, consts.COMMAND_INFO):
# This is an stats requests
try:
self.process_stats(full=command == consts.COMMAND_STAT)
self.process_stats(full=command == consts.COMMAND_STAT)
except Exception as e:
logger.error('ERROR processing stats: %s', e.args[0] if e.args else e)
self.close_connection()
return
else:
raise Exception('Invalid command')
raise Exception('Invalid command')
except Exception:
logger.error('ERROR from %s', self.pretty_source())
self.transport.write(consts.RESPONSE_ERROR_COMMAND)
@ -298,9 +293,7 @@ class TunnelProtocol(asyncio.Protocol):
)
# Notify end to uds, using a task becase we are not an async function
asyncio.get_event_loop().create_task(
TunnelProtocol.notify_end_to_uds(
self.owner.cfg, self.notify_ticket, self.stats_manager
)
TunnelProtocol.notify_end_to_uds(self.owner.cfg, self.notify_ticket, self.stats_manager)
)
self.notify_ticket = b'' # Clean up so no more notifications
else:
@ -350,7 +343,6 @@ class TunnelProtocol(asyncio.Protocol):
def pretty_destination(self) -> str:
return TunnelProtocol.pretty_address(self.destination)
@staticmethod
async def _read_from_uds(
cfg: config.ConfigurationType,
@ -359,13 +351,9 @@ class TunnelProtocol(asyncio.Protocol):
queryParams: typing.Optional[typing.Mapping[str, str]] = None,
) -> typing.MutableMapping[str, typing.Any]:
try:
url = (
cfg.uds_server + '/' + ticket.decode() + '/' + msg + '/' + cfg.uds_token
)
url = cfg.uds_server + '/' + ticket.decode() + '/' + msg + '/' + cfg.uds_token
if queryParams:
url += '?' + '&'.join(
[f'{key}={value}' for key, value in queryParams.items()]
)
url += '?' + '&'.join([f'{key}={value}' for key, value in queryParams.items()])
# Set options
options: typing.Dict[str, typing.Any] = {'timeout': cfg.uds_timeout}
if cfg.uds_verify_ssl is False:
@ -378,24 +366,15 @@ class TunnelProtocol(asyncio.Protocol):
raise Exception(await r.text())
return await r.json()
except Exception as e:
raise Exception(f'TICKET COMMS ERROR: {ticket.decode()} {msg} {e!s}')
raise Exception(f'TICKET COMMS ERROR: {ticket.decode()} {msg} {e!s}') from e
@staticmethod
async def get_ticket_from_uds(
cfg: config.ConfigurationType, ticket: bytes, address: typing.Tuple[str, int]
) -> typing.MutableMapping[str, typing.Any]:
# Sanity checks
if len(ticket) != consts.TICKET_LENGTH:
raise ValueError(f'TICKET INVALID (len={len(ticket)})')
for n, i in enumerate(ticket.decode(errors='ignore')):
if (
(i >= 'a' and i <= 'z')
or (i >= '0' and i <= '9')
or (i >= 'A' and i <= 'Z')
):
continue # Correctus
raise ValueError(f'TICKET INVALID (char {i} at pos {n})')
# Check ticket using re
if consts.TICKET_REGEX.match(ticket.decode(errors='replace')) is None:
raise ValueError(f'TICKET INVALID ({ticket.decode(errors="replace")})')
return await TunnelProtocol._read_from_uds(cfg, ticket, address[0])

View File

@ -1,10 +1,3 @@
"""
Copyright (c) 2023 Adolfo Gómez García <dkmaster@dkmon.com>
This software is released under the MIT License.
https://opensource.org/licenses/MIT
"""
# -*- coding: utf-8 -*-
#
# Copyright (c) 2022 Virtual Cable S.L.U.
@ -39,8 +32,6 @@ import asyncio
import typing
import logging
from . import consts, config
logger = logging.getLogger(__name__)
if typing.TYPE_CHECKING:
@ -71,7 +62,7 @@ class TunnelClientProtocol(asyncio.Protocol):
self.transport = typing.cast('asyncio.transports.Transport', transport)
def connection_lost(self, exc: typing.Optional[Exception]) -> None:
# Ensure close other side if not server_side
# Ensure close other side
try:
self.receiver.close_connection()
except Exception:

View File

@ -37,14 +37,16 @@ import argparse
import signal
import ssl
import socket
import logging
from concurrent.futures import ThreadPoolExecutor
# event for stop notification
import threading
import threading # event for stop notification
import typing
import logging
from logging.handlers import RotatingFileHandler
from concurrent.futures import ThreadPoolExecutor
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except ImportError:
pass # no uvloop support
@ -73,8 +75,6 @@ def stop_signal(signum: int, frame: typing.Any) -> None:
def setup_log(cfg: config.ConfigurationType) -> None:
from logging.handlers import RotatingFileHandler
# Update logging if needed
if cfg.logfile:
fileh = RotatingFileHandler(
@ -96,9 +96,7 @@ def setup_log(cfg: config.ConfigurationType) -> None:
log.setLevel(cfg.loglevel)
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(cfg.loglevel)
formatter = logging.Formatter(
'%(levelname)s - %(message)s'
) # Basic log format, nice for syslog
formatter = logging.Formatter('%(levelname)s - %(message)s') # Basic log format, nice for syslog
handler.setFormatter(formatter)
log.addHandler(handler)
@ -107,10 +105,7 @@ def setup_log(cfg: config.ConfigurationType) -> None:
logger.debug('Configuration: %s', cfg)
async def tunnel_proc_async(
pipe: 'Connection', cfg: config.ConfigurationType, ns: 'Namespace'
) -> None:
async def tunnel_proc_async(pipe: 'Connection', cfg: config.ConfigurationType, ns: 'Namespace') -> None:
loop = asyncio.get_running_loop()
tasks: typing.List[asyncio.Task] = []
@ -123,9 +118,7 @@ async def tunnel_proc_async(
try:
while True:
# Clear back event, for next data
msg: typing.Optional[
typing.Tuple[socket.socket, typing.Tuple[str, int]]
] = pipe.recv()
msg: typing.Optional[typing.Tuple[socket.socket, typing.Tuple[str, int]]] = pipe.recv()
if msg:
return msg
except EOFError:
@ -147,13 +140,15 @@ async def tunnel_proc_async(
args['keyfile'] = cfg.ssl_certificate_key
if cfg.ssl_password:
args['password'] = cfg.ssl_password
context.load_cert_chain(**args)
# Set min version from string (1.2 or 1.3) as ssl.TLSVersion.TLSv1_2 or ssl.TLSVersion.TLSv1_3
if cfg.ssl_min_tls_version in ('1.2', '1.3'):
try:
context.minimum_version = getattr(ssl.TLSVersion, f'TLSv1_{cfg.ssl_min_tls_version.split(".")[1]}')
context.minimum_version = getattr(
ssl.TLSVersion, f'TLSv1_{cfg.ssl_min_tls_version.split(".")[1]}'
)
except Exception as e:
logger.exception('Setting min tls version failed: %s. Using defaults', e)
context.minimum_version = ssl.TLSVersion.TLSv1_2
@ -178,25 +173,26 @@ async def tunnel_proc_async(
(sock, address) = await loop.run_in_executor(None, get_socket)
if not sock:
break # No more sockets, exit
logger.debug(f'CONNECTION from {address!r} (pid: {os.getpid()})')
logger.debug('CONNECTION from %s (pid: %s)', address, os.getpid())
# Due to proxy contains an "event" to stop, we need to create a new one for each connection
add_autoremovable_task(asyncio.create_task(proxy.Proxy(cfg, ns)(sock, context)))
except asyncio.CancelledError:
raise
except asyncio.CancelledError: # pylint: disable=try-except-raise
raise # Stop, but avoid generic exception
except Exception:
logger.error('NEGOTIATION ERROR from %s', address[0] if address else 'unknown')
except asyncio.CancelledError:
pass # Stop
# create task for server
add_autoremovable_task(asyncio.create_task(run_server()))
try:
while tasks and not do_stop.is_set():
to_wait = tasks[:] # Get a copy of the list
# Wait for "to_wait" tasks to finish, stop every 2 seconds to check if we need to stop
done, _ = await asyncio.wait(to_wait, return_when=asyncio.FIRST_COMPLETED, timeout=2)
# done, _ =
await asyncio.wait(to_wait, return_when=asyncio.FIRST_COMPLETED, timeout=2)
except asyncio.CancelledError:
logger.info('Task cancelled')
do_stop.set() # ensure we stop
@ -209,7 +205,7 @@ async def tunnel_proc_async(
task.cancel()
except asyncio.CancelledError:
pass # Ignore, we are stopping
# for task in tasks:
# task.cancel()
@ -218,16 +214,15 @@ async def tunnel_proc_async(
logger.info('PROCESS %s stopped', os.getpid())
def process_connection(
client: socket.socket, addr: typing.Tuple[str, str], conn: 'Connection'
) -> None:
def process_connection(client: socket.socket, addr: typing.Tuple[str, str], conn: 'Connection') -> None:
data: bytes = b''
try:
# First, ensure handshake (simple handshake) and command
data = client.recv(len(consts.HANDSHAKE_V1))
if data != consts.HANDSHAKE_V1:
raise Exception('Invalid data from {}: {}'.format(addr[0], data.hex())) # Invalid handshake
raise Exception(f'Invalid data from {addr[0]}: {data.hex()}') # Invalid handshake
conn.send((client, addr))
del client # Ensure socket is controlled on child process
except Exception as e:
@ -241,9 +236,7 @@ def tunnel_main(args: 'argparse.Namespace') -> None:
# Try to bind to port as running user
# Wait for socket incoming connections and spread them
socket.setdefaulttimeout(
3.0
) # So we can check for stop from time to time and not block forever
socket.setdefaulttimeout(3.0) # So we can check for stop from time to time and not block forever
af_inet = socket.AF_INET6 if args.ipv6 or cfg.ipv6 or ':' in cfg.listen_address else socket.AF_INET
sock = socket.socket(af_inet, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
@ -267,15 +260,13 @@ def tunnel_main(args: 'argparse.Namespace') -> None:
setup_log(cfg)
logger.info(
'Starting tunnel server on %s:%s', cfg.listen_address, cfg.listen_port
)
logger.info('Starting tunnel server on %s:%s', cfg.listen_address, cfg.listen_port)
if setproctitle:
setproctitle.setproctitle(f'UDSTunnel {cfg.listen_address}:{cfg.listen_port}')
# Create pid file
if cfg.pidfile:
with open(cfg.pidfile, mode='w') as f:
with open(cfg.pidfile, mode='w', encoding='utf-8') as f:
f.write(str(os.getpid()))
except Exception as e:
@ -288,7 +279,7 @@ def tunnel_main(args: 'argparse.Namespace') -> None:
signal.signal(signal.SIGINT, stop_signal)
signal.signal(signal.SIGTERM, stop_signal)
except Exception as e:
# Signal not available on threads, and we use threads on tests,
# Signal not available on threads, and we use threads on tests,
# so we will ignore this because on tests signals are not important
logger.warning('Signal not available: %s', e)
@ -335,9 +326,7 @@ def tunnel_main(args: 'argparse.Namespace') -> None:
def main() -> None:
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group()
group.add_argument(
'-t', '--tunnel', help='Starts the tunnel server', action='store_true'
)
group.add_argument('-t', '--tunnel', help='Starts the tunnel server', action='store_true')
# group.add_argument('-r', '--rdp', help='RDP Tunnel for traffic accounting')
group.add_argument(
'-s',