mirror of
https://github.com/altlinux/gpupdate.git
synced 2025-10-17 03:33:18 +03:00
Compare commits
226 Commits
0.9.4-alt1
...
ntp_applie
Author | SHA1 | Date | |
---|---|---|---|
|
ca7fb05fea | ||
|
249d3a6caa | ||
|
7b6cb64d58 | ||
|
da71aaf0dd | ||
|
d35dd5433d | ||
|
cb6bc1f280 | ||
|
3d79315470 | ||
|
077d67c417 | ||
|
77b6ffb81a | ||
|
e4a41e9d07 | ||
|
0460f64b47 | ||
|
477a99c703 | ||
|
385e9ae02f | ||
|
18a7426863 | ||
|
3f2176659a | ||
|
72e756c778 | ||
|
bb340112d5 | ||
|
fe4a5fa78c | ||
|
88efbfe3e3 | ||
|
edbdaccb71 | ||
|
b9c2b91add | ||
|
f289584044 | ||
|
88773f4e99 | ||
|
cd71ac4c81 | ||
|
e08546ad2f | ||
|
4c59c4ba7c | ||
|
6e1898ca27 | ||
|
3cab21e9c3 | ||
|
bc1676dc71 | ||
|
5a60253dac | ||
|
e14043174a | ||
|
bfc05fee36 | ||
|
762fc4e525 | ||
|
66008b8a37 | ||
|
ffc3bc46c1 | ||
|
11abedd7b6 | ||
|
a01609afc3 | ||
f1a415bdae | |||
|
5d1cf84304 | ||
|
3c3147c2fc | ||
|
e62739a43b | ||
|
727d7e073f | ||
|
a13373cf92 | ||
|
1c0678957c | ||
|
bdf9300be4 | ||
|
19acaad7e1 | ||
|
6b1aa004c4 | ||
|
d3740a106c | ||
|
9be2604be9 | ||
|
a35e578cf4 | ||
|
52eaea95c6 | ||
|
8f65f79c6c | ||
|
e50c5d7883 | ||
|
549315fe48 | ||
|
91824acdab | ||
|
cd25431bb8 | ||
|
10b9fa0ff1 | ||
|
44585adddd | ||
|
d3213b4d0b | ||
|
80e9dba4c4 | ||
|
1ed5c0f043 | ||
|
f801c09737 | ||
|
fd17b19f33 | ||
|
f1e22e0cc5 | ||
|
efc9dac26e | ||
|
e085c10bb3 | ||
|
5b08fcd917 | ||
|
408bccb76d | ||
|
3f32d3bbda | ||
|
fa707104b7 | ||
|
69ac2abf8b | ||
|
3a8af98231 | ||
|
41242561e1 | ||
|
97e5418666 | ||
|
5015da40b7 | ||
|
cdfc39540f | ||
|
95af821475 | ||
|
b63fe63784 | ||
|
889bf5124a | ||
|
2da7758621 | ||
|
cb720084fa | ||
|
baba56465c | ||
|
020e5f3128 | ||
|
f07f752211 | ||
|
31bcb2cd2a | ||
|
cc80d8c74a | ||
|
931aaf9300 | ||
|
7ade31de8a | ||
|
86d02146e2 | ||
|
cf979596b3 | ||
|
9a74efefde | ||
|
73404ceced | ||
|
23be105462 | ||
|
03977710a4 | ||
|
d76c0a9a00 | ||
|
a01d5253dc | ||
|
403432ecd2 | ||
|
47a3c6b39c | ||
|
51c218eb7a | ||
|
1513eab336 | ||
|
4701847d1b | ||
|
1486084594 | ||
|
70bc4faea3 | ||
|
6283d72ccc | ||
|
c795a8323e | ||
|
3187b9f0f1 | ||
|
0e159d34d0 | ||
|
4327f0b17b | ||
|
d1169eaeef | ||
|
c00e2d7f09 | ||
|
fdcbda576b | ||
|
43161e61bc | ||
|
31ba4ad214 | ||
|
877ce7b2aa | ||
|
856eecf708 | ||
|
b869573f31 | ||
|
f01bf08a95 | ||
|
5ae9031cda | ||
|
1aed44454c | ||
|
474378d17d | ||
|
1e8a6c61c6 | ||
|
925947765d | ||
|
8f8b7045b7 | ||
|
d6c438f277 | ||
|
326dc0600b | ||
|
5dd3ca17e8 | ||
|
fc650b125a | ||
|
dc9479fbbe | ||
|
c71356211f | ||
|
7ab98ffa6e | ||
|
3519be7bc6 | ||
|
7926137e84 | ||
|
d5ecd040df | ||
|
be0603e809 | ||
|
3ff6f053ea | ||
|
1b95a20cad | ||
|
818f5919fe | ||
|
8765ef862b | ||
|
66ebe87592 | ||
|
bced76ac4d | ||
|
4ddea369c5 | ||
|
6ac15e6be2 | ||
|
487483fb6f | ||
|
20e4a77ff7 | ||
|
22cff21d3a | ||
|
b69bc56e38 | ||
|
fbf192a984 | ||
|
7df737be29 | ||
|
d321264866 | ||
|
1991f143be | ||
|
08b5b2262c | ||
|
b1b08f2ab0 | ||
|
382fa292bd | ||
|
ca346cc115 | ||
|
c8727b0215 | ||
|
be2aa6889f | ||
|
edd2a5e7c4 | ||
|
0165167881 | ||
|
b2c7144a0d | ||
|
2f32c71902 | ||
|
d871e7d717 | ||
|
db31db0143 | ||
|
ab74c4e878 | ||
|
75768fdb48 | ||
|
72ad8dd9c4 | ||
|
0f3b0cc265 | ||
|
b253ce7140 | ||
|
df37fd051e | ||
|
776281c0b3 | ||
|
c5cc32688f | ||
|
8183fe4f22 | ||
|
590464f230 | ||
|
f49a7c7671 | ||
|
0d2ee48434 | ||
|
f8c8f89327 | ||
|
99cdb4a043 | ||
|
dff638bc57 | ||
|
693a1d3a08 | ||
|
653d8c5f19 | ||
|
382c425b97 | ||
|
1f48a203ff | ||
|
0a93d16e04 | ||
|
d392a01046 | ||
|
5a39275d1f | ||
|
90699f8fc1 | ||
|
f22fc38972 | ||
|
11a4893e90 | ||
|
20c651746c | ||
|
0e9334f3e4 | ||
|
88887f7111 | ||
|
c7bafc4d21 | ||
|
d00e99e5d4 | ||
|
a45483c550 | ||
|
e7548bcbc8 | ||
|
cab3811627 | ||
|
382a3e2bd2 | ||
|
9571f46e73 | ||
|
57dda04216 | ||
|
431b18e177 | ||
|
17d35b8f4d | ||
|
5d34a51e07 | ||
|
d26eaca24f | ||
|
9357d5006f | ||
|
692a950d4a | ||
|
12ee1d7a8b | ||
|
87c5e1e75f | ||
|
7f7064ddd6 | ||
|
9eb81ea32f | ||
|
78ff997987 | ||
|
56aa8078c4 | ||
|
94d039653a | ||
|
e6f19a2116 | ||
|
86c240b9df | ||
|
dae3cf2c6c | ||
|
4fe7d0a73e | ||
|
54d0c7c2cb | ||
|
954a5598fb | ||
|
ba4eb4bf28 | ||
|
aa10d5bbf9 | ||
|
f3062668fa | ||
|
046079d4c9 | ||
|
414a827eb8 | ||
|
8ce322d552 | ||
|
84d5122319 | ||
|
436eeb3760 | ||
|
4b9ef4335a |
19
dist/gpupdate-group-users
vendored
Executable file
19
dist/gpupdate-group-users
vendored
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
|
||||
. /etc/control.d/functions
|
||||
|
||||
CONFIG=/etc/pam.d/system-policy-gpupdate
|
||||
|
||||
new_subst disabled \
|
||||
'^[[:space:]]*session[[:space:]]+\[.*default=1.*\][[:space:]]+pam_succeed_if.so user ingroup users.*' \
|
||||
's,^\([[:space:]]*session[[:space:]]\+\[.*\)default=[[:alnum:]]\+\(.*pam_succeed_if.so user ingroup users.*\)$,\1default=1\2,'
|
||||
new_subst enabled \
|
||||
'^[[:space:]]*session[[:space:]]+\[.*default=ignore.*\][[:space:]]+pam_succeed_if.so user ingroup users.*' \
|
||||
's,^\([[:space:]]*session[[:space:]]\+\[.*\)default=[[:alnum:]]\+\(.*pam_succeed_if.so user ingroup users.*\)$,\1default=ignore\2,'
|
||||
|
||||
new_help disabled "Disable group policy applying for users in 'users' group only"
|
||||
new_help enabled "Enable group policy applying for users in 'users' group only"
|
||||
|
||||
new_summary "Group policy applying for users in 'users' group only"
|
||||
|
||||
control_subst "$CONFIG" "$*"
|
19
dist/gpupdate-localusers
vendored
Executable file
19
dist/gpupdate-localusers
vendored
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
|
||||
. /etc/control.d/functions
|
||||
|
||||
CONFIG=/etc/pam.d/system-policy-gpupdate
|
||||
|
||||
new_subst disabled \
|
||||
'^[[:space:]]*session[[:space:]]+\[.*success=2.*\][[:space:]]+pam_localuser.so' \
|
||||
's,^\([[:space:]]*session[[:space:]]\+\[.*\)success=[[:alnum:]]\+\(.*pam_localuser.so.*\)$,\1success=2\2,'
|
||||
new_subst enabled \
|
||||
'^[[:space:]]*session[[:space:]]+\[.*success=1.*\][[:space:]]+pam_localuser.so' \
|
||||
's,^\([[:space:]]*session[[:space:]]\+\[.*\)success=[[:alnum:]]\+\(.*pam_localuser.so.*\)$,\1success=1\2,'
|
||||
|
||||
new_help disabled 'Disable group policy applying for local users'
|
||||
new_help enabled 'Enable group policy applying for local users'
|
||||
|
||||
new_summary 'Group policy applying for local users'
|
||||
|
||||
control_subst "$CONFIG" "$*"
|
4
dist/gpupdate-remote-policy
vendored
Normal file
4
dist/gpupdate-remote-policy
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
#%PAM-1.0
|
||||
#auth optional pam_mount.so
|
||||
session required pam_mkhomedir.so silent
|
||||
#session optional pam_mount.so
|
12
dist/gpupdate-scripts-run-user.service
vendored
Normal file
12
dist/gpupdate-scripts-run-user.service
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
[Unit]
|
||||
Description=Run Group Policy scripts for a user
|
||||
After=gpupdate-user.service
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
RemainAfterExit=true
|
||||
ExecStart=/usr/libexec/gpupdate/scripts_runner --mode USER --action LOGON --user %u
|
||||
ExecStop=/usr/libexec/gpupdate/scripts_runner --mode USER --action LOGOFF --user %u
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
15
dist/gpupdate-scripts-run.service
vendored
Normal file
15
dist/gpupdate-scripts-run.service
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
[Unit]
|
||||
Description=Running Group Policy Scripts
|
||||
After=gpupdate.service
|
||||
|
||||
[Service]
|
||||
Environment=PATH=/bin:/sbin:/usr/bin:/usr/sbin
|
||||
UnsetEnvironment=LANG LANGUAGE LC_CTYPE LC_NUMERIC LC_TIME LC_COLLATE LC_MONETARY LC_MESSAGES LC_PAPER LC_NAME LC_ADDRESS LC_TELEPHONE LC_MEASUREMENT LC_IDENTIFICATION
|
||||
Type=oneshot
|
||||
RemainAfterExit=true
|
||||
ExecStart=/usr/libexec/gpupdate/scripts_runner --mode MACHINE --action STARTUP
|
||||
ExecStop=/usr/libexec/gpupdate/scripts_runner --mode MACHINE --action SHUTDOWN
|
||||
StandardOutput=journal
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
19
dist/gpupdate-system-uids
vendored
Executable file
19
dist/gpupdate-system-uids
vendored
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
|
||||
. /etc/control.d/functions
|
||||
|
||||
CONFIG=/etc/pam.d/system-policy-gpupdate
|
||||
|
||||
new_subst disabled \
|
||||
'^[[:space:]]*session[[:space:]]+\[.*default=1.*\][[:space:]]+pam_succeed_if.so uid >= 500.*' \
|
||||
's,^\([[:space:]]*session[[:space:]]\+\[.*\)default=[[:alnum:]]\+\(.*pam_succeed_if.so uid >= 500.*\)$,\1default=1\2,'
|
||||
new_subst enabled \
|
||||
'^[[:space:]]*session[[:space:]]+\[.*default=ignore.*\][[:space:]]+pam_succeed_if.so uid >= 500.*' \
|
||||
's,^\([[:space:]]*session[[:space:]]\+\[.*\)default=[[:alnum:]]\+\(.*pam_succeed_if.so uid >= 500.*\)$,\1default=ignore\2,'
|
||||
|
||||
new_help disabled "Disable group policy applying for users with not system uids only"
|
||||
new_help enabled "Enable group policy applying for users with not system uids only"
|
||||
|
||||
new_summary "Group policy applying for users with not system uids (greater or equal 500) only"
|
||||
|
||||
control_subst "$CONFIG" "$*"
|
11
dist/gpupdate-user.service
vendored
11
dist/gpupdate-user.service
vendored
@@ -4,13 +4,10 @@ Description=gpupdate in userspace
|
||||
|
||||
# gpupdate on Windows runs once per hour
|
||||
[Service]
|
||||
Environment="PATH=/bin:/sbin:/usr/bin:/usr/sbin"
|
||||
Type=simple
|
||||
RestartSec=3600
|
||||
TimeoutSec=3000
|
||||
Restart=always
|
||||
ExecStart=/usr/sbin/gpoa
|
||||
Environment=PATH=/bin:/sbin:/usr/bin:/usr/sbin
|
||||
UnsetEnvironment=LANG LANGUAGE LC_CTYPE LC_NUMERIC LC_TIME LC_COLLATE LC_MONETARY LC_MESSAGES LC_PAPER LC_NAME LC_ADDRESS LC_TELEPHONE LC_MEASUREMENT LC_IDENTIFICATION
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/gpupdate --target USER
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
|
||||
|
9
dist/gpupdate-user.timer
vendored
Normal file
9
dist/gpupdate-user.timer
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
[Unit]
|
||||
Description=Run gpupdate-user every hour
|
||||
|
||||
[Timer]
|
||||
OnStartupSec=1
|
||||
OnUnitActiveSec=60min
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
8
dist/gpupdate.service
vendored
8
dist/gpupdate.service
vendored
@@ -3,11 +3,9 @@ Description=Group policy update for machine
|
||||
After=syslog.target network-online.target sssd.service
|
||||
|
||||
[Service]
|
||||
Environment="PATH=/bin:/sbin:/usr/bin:/usr/sbin"
|
||||
Type=simple
|
||||
RestartSec=3600
|
||||
TimeoutSec=3000
|
||||
Restart=always
|
||||
Environment=PATH=/bin:/sbin:/usr/bin:/usr/sbin
|
||||
UnsetEnvironment=LANG LANGUAGE LC_CTYPE LC_NUMERIC LC_TIME LC_COLLATE LC_MONETARY LC_MESSAGES LC_PAPER LC_NAME LC_ADDRESS LC_TELEPHONE LC_MEASUREMENT LC_IDENTIFICATION
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/gpupdate
|
||||
StandardOutput=journal
|
||||
|
||||
|
9
dist/gpupdate.timer
vendored
Normal file
9
dist/gpupdate.timer
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
[Unit]
|
||||
Description=Run gpupdate every hour
|
||||
|
||||
[Timer]
|
||||
OnStartupSec=1
|
||||
OnUnitActiveSec=60min
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
10
dist/system-policy-gpupdate
vendored
10
dist/system-policy-gpupdate
vendored
@@ -1,4 +1,12 @@
|
||||
#%PAM-1.0
|
||||
session required pam_mkhomedir.so silent
|
||||
session [success=2 perm_denied=ignore default=die] pam_localuser.so
|
||||
session substack gpupdate-remote-policy
|
||||
session [default=1] pam_permit.so
|
||||
session [default=6] pam_permit.so
|
||||
session [success=1 default=ignore] pam_succeed_if.so user ingroup users quiet
|
||||
session [default=4] pam_permit.so
|
||||
session [success=1 default=ignore] pam_succeed_if.so uid >= 500 quiet
|
||||
session [default=2] pam_permit.so
|
||||
-session required pam_oddjob_gpupdate.so
|
||||
session optional pam_env.so user_readenv=1 conffile=/etc/gpupdate/environment user_envfile=.gpupdate_environment
|
||||
session required pam_permit.so
|
||||
|
@@ -25,7 +25,7 @@ from gpt.gpt import gpt, get_local_gpt
|
||||
from util.util import (
|
||||
get_machine_name
|
||||
)
|
||||
from util.windows import get_sid
|
||||
from util.sid import get_sid
|
||||
import util.preg
|
||||
from util.logging import slogm
|
||||
|
||||
@@ -52,5 +52,6 @@ class nodomain_backend(applier_backend):
|
||||
self.storage.wipe_hklm()
|
||||
self.storage.wipe_user(self.storage.get_info('machine_sid'))
|
||||
local_policy = get_local_gpt(self.sid)
|
||||
local_policy.merge()
|
||||
local_policy.merge_machine()
|
||||
local_policy.merge_user()
|
||||
|
||||
|
@@ -18,7 +18,7 @@
|
||||
|
||||
import os
|
||||
# Facility to determine GPTs for user
|
||||
from samba.gpclass import check_safe_path, check_refresh_gpo_list
|
||||
from samba.gpclass import check_safe_path
|
||||
|
||||
from .applier_backend import applier_backend
|
||||
from storage import cache_factory, registry_factory
|
||||
@@ -31,11 +31,12 @@ from util.kerberos import (
|
||||
machine_kinit
|
||||
, machine_kdestroy
|
||||
)
|
||||
from util.windows import get_sid
|
||||
from util.sid import get_sid
|
||||
import util.preg
|
||||
from util.logging import log
|
||||
|
||||
class samba_backend(applier_backend):
|
||||
__user_policy_mode_key = 'Software\\Policies\\Microsoft\\Windows\\System\\UserPolicyMode'
|
||||
|
||||
def __init__(self, sambacreds, username, domain, is_machine):
|
||||
self.cache_path = '/var/cache/gpupdate/creds/krb5cc_{}'.format(os.getpid())
|
||||
@@ -71,6 +72,22 @@ class samba_backend(applier_backend):
|
||||
if self.__kinit_successful:
|
||||
machine_kdestroy()
|
||||
|
||||
def get_policy_mode(self):
|
||||
'''
|
||||
Get UserPolicyMode parameter value in order to determine if it
|
||||
is possible to work with user's part of GPT. This value is
|
||||
checked only if working for user's SID.
|
||||
'''
|
||||
upm = self.storage.get_hklm_entry(self.__user_policy_mode_key)
|
||||
if upm and upm.data:
|
||||
upm = int(upm.data)
|
||||
if upm < 0 or upm > 2:
|
||||
upm = 0
|
||||
else:
|
||||
upm = 0
|
||||
|
||||
return upm
|
||||
|
||||
def retrieve_and_store(self):
|
||||
'''
|
||||
Retrieve settings and strore it in a database
|
||||
@@ -82,19 +99,21 @@ class samba_backend(applier_backend):
|
||||
except Exception as exc:
|
||||
log('F2')
|
||||
raise exc
|
||||
self.storage.wipe_hklm()
|
||||
self.storage.wipe_user(self.storage.get_info('machine_sid'))
|
||||
for gptobj in machine_gpts:
|
||||
try:
|
||||
gptobj.merge()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E26', logdata)
|
||||
|
||||
if self._is_machine_username:
|
||||
self.storage.wipe_hklm()
|
||||
self.storage.wipe_user(self.storage.get_info('machine_sid'))
|
||||
for gptobj in machine_gpts:
|
||||
try:
|
||||
gptobj.merge_machine()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E26', logdata)
|
||||
|
||||
# Load user GPT values in case user's name specified
|
||||
# This is a buggy implementation and should be tested more
|
||||
if not self._is_machine_username:
|
||||
else:
|
||||
user_gpts = list()
|
||||
try:
|
||||
user_gpts = self._get_gpts(self.username, self.sid)
|
||||
@@ -102,13 +121,30 @@ class samba_backend(applier_backend):
|
||||
log('F3')
|
||||
raise exc
|
||||
self.storage.wipe_user(self.sid)
|
||||
for gptobj in user_gpts:
|
||||
try:
|
||||
gptobj.merge()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E27', logdata)
|
||||
|
||||
# Merge user settings if UserPolicyMode set accordingly
|
||||
# and user settings (for HKCU) are exist.
|
||||
policy_mode = self.get_policy_mode()
|
||||
logdata = dict({'mode': upm2str(policy_mode), 'sid': self.sid})
|
||||
log('D152', logdata)
|
||||
|
||||
if policy_mode < 2:
|
||||
for gptobj in user_gpts:
|
||||
try:
|
||||
gptobj.merge_user()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E27', logdata)
|
||||
|
||||
if policy_mode > 0:
|
||||
for gptobj in machine_gpts:
|
||||
try:
|
||||
gptobj.merge_user()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E63', logdata)
|
||||
|
||||
def _check_sysvol_present(self, gpo):
|
||||
'''
|
||||
@@ -145,3 +181,16 @@ class samba_backend(applier_backend):
|
||||
|
||||
return gpts
|
||||
|
||||
def upm2str(upm_num):
|
||||
'''
|
||||
Translate UserPolicyMode to string.
|
||||
'''
|
||||
result = 'Not configured'
|
||||
|
||||
if upm_num in [1, '1']:
|
||||
result = 'Replace'
|
||||
|
||||
if upm_num in [2, '2']:
|
||||
result = 'Merge'
|
||||
|
||||
return result
|
||||
|
@@ -19,7 +19,7 @@
|
||||
import subprocess
|
||||
import threading
|
||||
import logging
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
|
||||
def control_subst(preg_name):
|
||||
'''
|
||||
@@ -55,10 +55,12 @@ class control:
|
||||
values = list()
|
||||
|
||||
popen_call = ['/usr/sbin/control', self.control_name, 'list']
|
||||
with subprocess.Popen(popen_call, stdout=subprocess.PIPE) as proc:
|
||||
with subprocess.Popen(popen_call, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
|
||||
values = proc.stdout.readline().decode('utf-8').split()
|
||||
valErr = proc.stderr.readline().decode('utf-8')
|
||||
if valErr:
|
||||
raise ValueError(valErr)
|
||||
proc.wait()
|
||||
|
||||
return values
|
||||
|
||||
def _map_control_status(self, int_status):
|
||||
@@ -68,7 +70,11 @@ class control:
|
||||
try:
|
||||
str_status = self.possible_values[int_status]
|
||||
except IndexError as exc:
|
||||
logging.error(slogm('Error getting control ({}) value from {} by index {}'.format(self.control_name, self.possible_values, int_status)))
|
||||
logdata = dict()
|
||||
logdata['control'] = self.control_name
|
||||
logdata['value from'] = self.possible_values
|
||||
logdata['by index'] = int_status
|
||||
log('E41', )
|
||||
str_status = None
|
||||
|
||||
return str_status
|
||||
@@ -93,20 +99,30 @@ class control:
|
||||
if type(self.control_value) == int:
|
||||
status = self._map_control_status(self.control_value)
|
||||
if status == None:
|
||||
logging.error(slogm('\'{}\' is not in possible values for control {}'.format(self.control_value, self.control_name)))
|
||||
logdata = dict()
|
||||
logdata['control'] = self.control_name
|
||||
logdata['inpossible values'] = self.self.control_value
|
||||
log('E42', logdata)
|
||||
return
|
||||
elif type(self.control_value) == str:
|
||||
if self.control_value not in self.possible_values:
|
||||
logging.error(slogm('\'{}\' is not in possible values for control {}'.format(self.control_value, self.control_name)))
|
||||
logdata = dict()
|
||||
logdata['control'] = self.control_name
|
||||
logdata['inpossible values'] = self.self.control_value
|
||||
log('E59', logdata)
|
||||
return
|
||||
status = self.control_value
|
||||
|
||||
logging.debug(slogm('Setting control {} to {}'.format(self.control_name, status)))
|
||||
logdata = dict()
|
||||
logdata['control'] = self.control_name
|
||||
logdata['status'] = status
|
||||
log('D68', logdata)
|
||||
|
||||
try:
|
||||
popen_call = ['/usr/sbin/control', self.control_name, status]
|
||||
with subprocess.Popen(popen_call, stdout=subprocess.PIPE) as proc:
|
||||
proc.wait()
|
||||
except:
|
||||
logging.error(slogm('Unable to set {} to {}'.format(self.control_name, status)))
|
||||
|
||||
logdata = dict()
|
||||
logdata['control'] = self.control_name
|
||||
logdata['status'] = status
|
||||
log('E43', logdata)
|
||||
|
196
gpoa/frontend/appliers/file_cp.py
Normal file
196
gpoa/frontend/appliers/file_cp.py
Normal file
@@ -0,0 +1,196 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from gpt.folders import (
|
||||
FileAction
|
||||
, action_letter2enum
|
||||
)
|
||||
from .folder import str2bool
|
||||
from util.logging import log
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from util.windows import expand_windows_var
|
||||
from util.util import get_homedir
|
||||
from util.exceptions import NotUNCPathError
|
||||
|
||||
class Files_cp:
|
||||
def __init__(self, file_obj, file_cache ,username=None):
|
||||
self.file_cache = file_cache
|
||||
targetPath = expand_windows_var(file_obj.targetPath, username).replace('\\', '/')
|
||||
self.targetPath = check_target_path(targetPath, username)
|
||||
if not self.targetPath:
|
||||
return
|
||||
self.fromPath = (expand_windows_var(file_obj.fromPath, username).replace('\\', '/')
|
||||
if file_obj.fromPath else None)
|
||||
self.action = action_letter2enum(file_obj.action)
|
||||
self.readOnly = str2bool(file_obj.readOnly)
|
||||
self.archive = str2bool(file_obj.archive)
|
||||
self.hidden = str2bool(file_obj.hidden)
|
||||
self.suppress = str2bool(file_obj.suppress)
|
||||
self.username = username
|
||||
self.fromPathFiles = self.get_list_files()
|
||||
self.act()
|
||||
|
||||
def get_target_file(self, targetPath, fromPath):
|
||||
try:
|
||||
if fromPath and targetPath.is_dir():
|
||||
if self.hidden:
|
||||
return targetPath.joinpath('.' + fromPath.name)
|
||||
else:
|
||||
return targetPath.joinpath(fromPath.name)
|
||||
|
||||
else:
|
||||
if not self.hidden:
|
||||
return targetPath
|
||||
else:
|
||||
return targetPath.parent.joinpath('.' + targetPath.name)
|
||||
|
||||
except Exception as exc:
|
||||
logdata = dict({'exc': exc})
|
||||
log('D163', logdata)
|
||||
|
||||
def set_read_only(self, targetFile):
|
||||
if self.readOnly:
|
||||
shutil.os.chmod(targetFile, int('444', base = 8))
|
||||
else:
|
||||
shutil.os.chmod(targetFile, int('664', base = 8))
|
||||
|
||||
def _create_action(self):
|
||||
for fromPath in self.fromPathFiles:
|
||||
try:
|
||||
targetFile = self.get_target_file(self.targetPath, fromPath)
|
||||
if not targetFile.exists():
|
||||
targetFile.write_bytes(fromPath.read_bytes())
|
||||
if self.username:
|
||||
shutil.chown(targetFile, self.username)
|
||||
self.set_read_only(targetFile)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exc'] = exc
|
||||
logdata['fromPath'] = fromPath
|
||||
logdata['targetPath'] = self.targetPath
|
||||
logdata['targetFile'] = targetFile
|
||||
log('D164', logdata)
|
||||
|
||||
def _delete_action(self):
|
||||
targetFile = Path(self.targetPath)
|
||||
try:
|
||||
if targetFile.exists():
|
||||
targetFile.unlink()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exc'] = exc
|
||||
logdata['targetPath'] = self.targetPath
|
||||
logdata['targetFile'] = targetFile
|
||||
log('D165', logdata)
|
||||
|
||||
def _update_action(self):
|
||||
for fromPath in self.fromPathFiles:
|
||||
targetFile = self.get_target_file(self.targetPath, fromPath)
|
||||
try:
|
||||
targetFile.write_bytes(fromPath.read_bytes())
|
||||
if self.username:
|
||||
shutil.chown(self.targetPath, self.username)
|
||||
self.set_read_only(targetFile)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exc'] = exc
|
||||
logdata['fromPath'] = self.fromPath
|
||||
logdata['targetPath'] = self.targetPath
|
||||
logdata['targetFile'] = targetFile
|
||||
log('D166', logdata)
|
||||
|
||||
def act(self):
|
||||
if self.action == FileAction.CREATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.UPDATE:
|
||||
self._update_action()
|
||||
if self.action == FileAction.DELETE:
|
||||
self._delete_action()
|
||||
if self.action == FileAction.REPLACE:
|
||||
self._delete_action()
|
||||
self._create_action()
|
||||
|
||||
def get_list_files(self):
|
||||
ls_all_files = list()
|
||||
logdata = dict()
|
||||
logdata['targetPath'] = self.targetPath
|
||||
if self.fromPath and self.fromPath.split('/')[-1] != '*':
|
||||
try:
|
||||
self.file_cache.store(self.fromPath)
|
||||
fromPath = Path(self.file_cache.get(self.fromPath))
|
||||
ls_all_files.append(fromPath)
|
||||
except NotUNCPathError as exc:
|
||||
fromPath = Path(self.fromPath)
|
||||
if fromPath.exists():
|
||||
ls_all_files.append(fromPath)
|
||||
except Exception as exc:
|
||||
logdata['fromPath'] = self.fromPath
|
||||
logdata['exc'] = exc
|
||||
log('W13', logdata)
|
||||
elif self.fromPath and len(self.fromPath.split('/')) > 2:
|
||||
ls_files = self.file_cache.get_ls_smbdir(self.fromPath[:-1])
|
||||
if ls_files:
|
||||
ls_from_paths = [self.fromPath[:-1] + file_s for file_s in ls_files]
|
||||
for from_path in ls_from_paths:
|
||||
try:
|
||||
self.file_cache.store(from_path)
|
||||
fromPath = Path(self.file_cache.get(from_path))
|
||||
ls_all_files.append(fromPath)
|
||||
except Exception as exc:
|
||||
logdata['fromPath'] = self.fromPath
|
||||
logdata['exc'] = exc
|
||||
log('W13', logdata)
|
||||
else:
|
||||
try:
|
||||
fromLocalPath = Path(self.fromPath[:-1])
|
||||
if fromLocalPath.is_dir():
|
||||
ls = [fromFile for fromFile in fromLocalPath.iterdir() if fromFile.is_file()]
|
||||
for fromPath in ls:
|
||||
ls_all_files.append(fromPath)
|
||||
except Exception as exc:
|
||||
logdata['fromPath'] = self.fromPath
|
||||
logdata['exc'] = exc
|
||||
log('W13', logdata)
|
||||
else:
|
||||
fromPath = Path(self.fromPath) if self.fromPath else None
|
||||
ls_all_files.append(fromPath)
|
||||
return ls_all_files
|
||||
|
||||
def check_target_path(path_to_check, username = None):
|
||||
'''
|
||||
Function for checking the correctness of the path
|
||||
'''
|
||||
checking = Path(path_to_check)
|
||||
if checking.is_dir():
|
||||
if username and path_to_check == '/':
|
||||
return Path(get_homedir(username))
|
||||
return checking
|
||||
#Check for path directory without '/something' suffix
|
||||
elif (len(path_to_check.split('/')) > 2
|
||||
and Path(path_to_check.replace(path_to_check.split('/')[-1], '')).is_dir()):
|
||||
return checking
|
||||
elif username:
|
||||
target_path = Path(get_homedir(username))
|
||||
res = target_path.joinpath(path_to_check
|
||||
if path_to_check[0] != '/'
|
||||
else path_to_check[1:])
|
||||
return res
|
||||
else:
|
||||
return False
|
@@ -25,29 +25,36 @@ from gpt.folders import (
|
||||
, action_letter2enum
|
||||
)
|
||||
from util.windows import expand_windows_var
|
||||
from util.util import get_homedir
|
||||
|
||||
def remove_dir_tree(path, delete_files=False, delete_folder=False, delete_sub_folders=False):
|
||||
content = list()
|
||||
for entry in path.iterdir():
|
||||
if entry.is_file():
|
||||
content.append(entry)
|
||||
if entry.is_file() and delete_files:
|
||||
entry.unlink()
|
||||
if entry.is_dir():
|
||||
if delete_sub_folders:
|
||||
remove_dir_tree(entry,
|
||||
delete_files,
|
||||
delete_folder,
|
||||
delete_sub_folders)
|
||||
|
||||
if delete_folder:
|
||||
content.remove(entry)
|
||||
if entry.is_dir() and delete_sub_folders:
|
||||
content.remove(entry)
|
||||
remove_dir_tree(entry, delete_files, delete_folder, delete_sub_folders)
|
||||
if delete_folder and not content:
|
||||
path.rmdir()
|
||||
|
||||
|
||||
def str2bool(boolstr):
|
||||
if boolstr.lower in ['true', 'yes', '1']:
|
||||
if boolstr and boolstr.lower() in ['true', 'yes', '1']:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class Folder:
|
||||
def __init__(self, folder_object, username):
|
||||
self.folder_path = Path(expand_windows_var(folder_object.path, username).replace('\\', '/'))
|
||||
def __init__(self, folder_object, username=None):
|
||||
folder_path = expand_windows_var(folder_object.path, username).replace('\\', '/')
|
||||
if username:
|
||||
folder_path = folder_path.replace(get_homedir(username), '')
|
||||
self.folder_path = Path(get_homedir(username)).joinpath(folder_path if folder_path [0] != '/' else folder_path [1:])
|
||||
else:
|
||||
self.folder_path = Path(folder_path)
|
||||
self.action = action_letter2enum(folder_object.action)
|
||||
self.delete_files = str2bool(folder_object.delete_files)
|
||||
self.delete_folder = str2bool(folder_object.delete_folder)
|
||||
@@ -57,10 +64,11 @@ class Folder:
|
||||
self.folder_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _delete_action(self):
|
||||
remove_dir_tree(self.folder_path,
|
||||
self.delete_files,
|
||||
self.delete_folders,
|
||||
self.delete_sub_folders)
|
||||
if self.folder_path.exists():
|
||||
remove_dir_tree(self.folder_path,
|
||||
self.delete_files,
|
||||
self.delete_folder,
|
||||
self.delete_sub_folders)
|
||||
|
||||
def act(self):
|
||||
if self.action == FileAction.CREATE:
|
||||
|
@@ -21,7 +21,7 @@ import os
|
||||
import logging
|
||||
from gi.repository import Gio, GLib
|
||||
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
|
||||
class system_gsetting:
|
||||
def __init__(self, schema, path, value, lock, helper_function=None):
|
||||
@@ -59,18 +59,27 @@ class system_gsettings:
|
||||
self.override_file_path = override_file_path
|
||||
|
||||
def append(self, schema, path, data, lock, helper):
|
||||
self.gsettings.append(system_gsetting(schema, path, data, lock, helper))
|
||||
if check_existing_gsettings(schema, path):
|
||||
self.gsettings.append(system_gsetting(schema, path, data, lock, helper))
|
||||
else:
|
||||
logdata = dict()
|
||||
logdata['schema'] = schema
|
||||
logdata['path'] = path
|
||||
logdata['data'] = data
|
||||
logdata['lock'] = lock
|
||||
log('D150', logdata)
|
||||
|
||||
def apply(self):
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
for gsetting in self.gsettings:
|
||||
logdata = dict()
|
||||
logdata['gsetting.schema'] = gsetting.schema
|
||||
logdata['gsetting.path'] = gsetting.path
|
||||
logdata['gsetting.value'] = gsetting.value
|
||||
logdata['gsetting.lock'] = gsetting.lock
|
||||
settings = Gio.Settings(schema=gsetting.schema)
|
||||
logging.debug(slogm('Applying machine setting {}.{} to {} {}'.format(gsetting.schema,
|
||||
gsetting.path,
|
||||
gsetting.value,
|
||||
gsetting.value,
|
||||
'locked' if gsetting.lock else 'unlocked')))
|
||||
log('D89', logdata)
|
||||
gsetting.apply(settings, config, self.locks)
|
||||
|
||||
with open(self.override_file_path, 'w') as f:
|
||||
@@ -96,7 +105,7 @@ class system_gsettings:
|
||||
def glib_map(value, glib_type):
|
||||
result_value = value
|
||||
|
||||
if glib_type == 'i' or glib_type == 'b':
|
||||
if glib_type == 'i' or glib_type == 'b' or glib_type == 'q':
|
||||
result_value = GLib.Variant(glib_type, int(value))
|
||||
else:
|
||||
result_value = GLib.Variant(glib_type, value)
|
||||
@@ -114,6 +123,38 @@ def glib_value(schema, path, value, settings):
|
||||
# Build the new value with the determined type
|
||||
return glib_map(value, glib_value_type)
|
||||
|
||||
def check_existing_gsettings (schema, path):
|
||||
source = Gio.SettingsSchemaSource.get_default()
|
||||
sourceSchema = (source.lookup(schema, False))
|
||||
if bool(sourceSchema) and sourceSchema.has_key(path):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
class user_gsettings:
|
||||
def __init__(self):
|
||||
self.gsettings = list()
|
||||
|
||||
def append(self, schema, path, value, helper=None):
|
||||
if check_existing_gsettings(schema, path):
|
||||
self.gsettings.append(user_gsetting(schema, path, value, helper))
|
||||
else:
|
||||
logdata = dict()
|
||||
logdata['schema'] = schema
|
||||
logdata['path'] = path
|
||||
logdata['data'] = value
|
||||
log('D151', logdata)
|
||||
|
||||
def apply(self):
|
||||
for gsetting in self.gsettings:
|
||||
logdata = dict()
|
||||
logdata['gsetting.schema'] = gsetting.schema
|
||||
logdata['gsetting.path'] = gsetting.path
|
||||
logdata['gsetting.value'] = gsetting.value
|
||||
log('D85', logdata)
|
||||
gsetting.apply()
|
||||
|
||||
|
||||
class user_gsetting:
|
||||
def __init__(self, schema, path, value, helper_function=None):
|
||||
self.schema = schema
|
||||
|
113
gpoa/frontend/appliers/ini_file.py
Normal file
113
gpoa/frontend/appliers/ini_file.py
Normal file
@@ -0,0 +1,113 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
|
||||
from gpt.folders import (
|
||||
FileAction
|
||||
, action_letter2enum
|
||||
)
|
||||
from util.logging import log
|
||||
from pathlib import Path
|
||||
import configparser
|
||||
from util.windows import expand_windows_var
|
||||
from util.util import get_homedir
|
||||
|
||||
|
||||
|
||||
class Ini_file:
|
||||
def __init__(self, ini_obj, username=None):
|
||||
path = expand_windows_var(ini_obj.path, username).replace('\\', '/')
|
||||
self.path = check_path(path, username)
|
||||
if not self.path:
|
||||
logdata = {'path': ini_obj.path}
|
||||
log('D175', logdata)
|
||||
return None
|
||||
self.section = ini_obj.section
|
||||
self.action = action_letter2enum(ini_obj.action)
|
||||
self.key = ini_obj.property
|
||||
self.value = ini_obj.value
|
||||
self.config = configparser.ConfigParser()
|
||||
self.act()
|
||||
|
||||
def _create_action(self):
|
||||
if self.section not in self.config:
|
||||
self.config[self.section] = dict()
|
||||
|
||||
self.config[self.section][self.key] = self.value
|
||||
|
||||
with self.path.open("w", encoding="utf-8") as configfile:
|
||||
self.config.write(configfile)
|
||||
|
||||
|
||||
|
||||
def _delete_action(self):
|
||||
if not self.path.exists():
|
||||
return
|
||||
|
||||
if not self.section:
|
||||
self.path.unlink()
|
||||
return
|
||||
if not self.key:
|
||||
self.config.remove_section(self.section)
|
||||
elif self.section in self.config:
|
||||
self.config.remove_option(self.section, self.key)
|
||||
|
||||
with self.path.open("w", encoding="utf-8") as configfile:
|
||||
self.config.write(configfile)
|
||||
|
||||
|
||||
def act(self):
|
||||
try:
|
||||
self.config.read(self.path)
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('D176', logdata)
|
||||
return
|
||||
if self.action == FileAction.CREATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.UPDATE:
|
||||
self._delete_action()
|
||||
self._create_action()
|
||||
if self.action == FileAction.DELETE:
|
||||
self._delete_action()
|
||||
if self.action == FileAction.REPLACE:
|
||||
self._delete_action()
|
||||
self._create_action()
|
||||
|
||||
def check_path(path_to_check, username = None):
|
||||
'''
|
||||
Function for checking the right path for Inifile
|
||||
'''
|
||||
checking = Path(path_to_check)
|
||||
if checking.exists():
|
||||
if username and path_to_check == '/':
|
||||
return Path(get_homedir(username))
|
||||
return checking
|
||||
#Check for path directory without '/nameIni' suffix
|
||||
elif (len(path_to_check.split('/')) > 2
|
||||
and Path(path_to_check.replace(path_to_check.split('/')[-1], '')).is_dir()):
|
||||
return checking
|
||||
elif username:
|
||||
target_path = Path(get_homedir(username))
|
||||
res = target_path.joinpath(path_to_check
|
||||
if path_to_check[0] != '/'
|
||||
else path_to_check[1:])
|
||||
return check_path(str(res))
|
||||
else:
|
||||
return False
|
78
gpoa/frontend/appliers/netshare.py
Normal file
78
gpoa/frontend/appliers/netshare.py
Normal file
@@ -0,0 +1,78 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import subprocess
|
||||
|
||||
from gpt.folders import (
|
||||
FileAction
|
||||
, action_letter2enum
|
||||
)
|
||||
from util.logging import log
|
||||
from util.windows import expand_windows_var
|
||||
|
||||
|
||||
|
||||
class Networkshare:
|
||||
|
||||
def __init__(self, networkshare_obj):
|
||||
self.net_full_cmd = ['/usr/bin/net', 'usershare']
|
||||
self.cmd = list()
|
||||
self.name = networkshare_obj.name
|
||||
self.path = expand_windows_var(networkshare_obj.path).replace('\\', '/') if networkshare_obj.path else None
|
||||
self.action = action_letter2enum(networkshare_obj.action)
|
||||
self.allRegular = networkshare_obj.allRegular
|
||||
self.comment = networkshare_obj.comment
|
||||
self.limitUsers = networkshare_obj.limitUsers
|
||||
self.abe = networkshare_obj.abe
|
||||
self._guest = 'guest_ok=y'
|
||||
self.acl = 'Everyone:'
|
||||
self.act()
|
||||
|
||||
def _run_net_full_cmd(self):
|
||||
try:
|
||||
subprocess.call(self.net_full_cmd, stderr=subprocess.DEVNULL)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['cmd'] = self.net_full_cmd
|
||||
logdata['exc'] = exc
|
||||
log('D182', logdata)
|
||||
|
||||
|
||||
def _create_action(self):
|
||||
self.net_full_cmd.append('add')
|
||||
self.net_full_cmd.append(self.name)
|
||||
self.net_full_cmd.append(self.path)
|
||||
self.net_full_cmd.append(self.comment)
|
||||
self.net_full_cmd.append(self.acl + 'F' if self.abe == 'ENABLE' else self.acl + 'R')
|
||||
self.net_full_cmd.append(self._guest)
|
||||
self._run_net_full_cmd()
|
||||
|
||||
def _delete_action(self):
|
||||
self.net_full_cmd.append('delete')
|
||||
self.net_full_cmd.append(self.name)
|
||||
self._run_net_full_cmd()
|
||||
|
||||
def act(self):
|
||||
if self.action == FileAction.CREATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.UPDATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.DELETE:
|
||||
self._delete_action()
|
||||
if self.action == FileAction.REPLACE:
|
||||
self._create_action()
|
@@ -20,7 +20,7 @@ import os
|
||||
import jinja2
|
||||
import logging
|
||||
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
|
||||
class polkit:
|
||||
__template_path = '/usr/share/gpupdate/templates'
|
||||
@@ -46,7 +46,13 @@ class polkit:
|
||||
with open(self.outfile, 'w') as f:
|
||||
f.write(text)
|
||||
|
||||
logging.debug(slogm('Generated file {} with arguments {}'.format(self.outfile, self.args)))
|
||||
logdata = dict()
|
||||
logdata['file'] = self.outfile
|
||||
logdata['arguments'] = self.args
|
||||
log('D77', logdata)
|
||||
except Exception as exc:
|
||||
logging.error(slogm('Unable to generate file {} from {}'.format(self.outfile, self.infilename)))
|
||||
logdata = dict()
|
||||
logdata['file'] = self.outfile
|
||||
logdata['arguments'] = self.args
|
||||
log('E44', logdata)
|
||||
|
||||
|
@@ -19,7 +19,7 @@
|
||||
import dbus
|
||||
import logging
|
||||
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
|
||||
class systemd_unit:
|
||||
def __init__(self, unit_name, state):
|
||||
@@ -39,7 +39,9 @@ class systemd_unit:
|
||||
self.manager.UnmaskUnitFiles([self.unit_name], dbus.Boolean(False))
|
||||
self.manager.EnableUnitFiles([self.unit_name], dbus.Boolean(False), dbus.Boolean(True))
|
||||
self.manager.StartUnit(self.unit_name, 'replace')
|
||||
logging.info(slogm('Starting systemd unit: {}'.format(self.unit_name)))
|
||||
logdata = dict()
|
||||
logdata['unit'] = self.unit_name
|
||||
log('I6', logdata)
|
||||
|
||||
# In case the service has 'RestartSec' property set it
|
||||
# switches to 'activating (auto-restart)' state instead of
|
||||
@@ -47,17 +49,23 @@ class systemd_unit:
|
||||
service_state = self._get_state()
|
||||
|
||||
if not service_state in ['active', 'activating']:
|
||||
logging.error(slogm('Unable to start systemd unit {}'.format(self.unit_name)))
|
||||
logdata = dict()
|
||||
logdata['unit'] = self.unit_name
|
||||
log('E46', logdata)
|
||||
else:
|
||||
self.manager.StopUnit(self.unit_name, 'replace')
|
||||
self.manager.DisableUnitFiles([self.unit_name], dbus.Boolean(False))
|
||||
self.manager.MaskUnitFiles([self.unit_name], dbus.Boolean(False), dbus.Boolean(True))
|
||||
logging.info(slogm('Stopping systemd unit: {}'.format(self.unit_name)))
|
||||
logdata = dict()
|
||||
logdata['unit'] = self.unit_name
|
||||
log('I6', logdata)
|
||||
|
||||
service_state = self._get_state()
|
||||
|
||||
if not service_state in ['stopped']:
|
||||
logging.error(slogm('Unable to stop systemd unit {}'.format(self.unit_name)))
|
||||
logdata = dict()
|
||||
logdata['unit'] = self.unit_name
|
||||
log('E46', logdata)
|
||||
|
||||
def _get_state(self):
|
||||
'''
|
||||
|
@@ -21,11 +21,9 @@ from .applier_frontend import (
|
||||
, check_enabled
|
||||
)
|
||||
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
|
||||
from util.logging import slogm
|
||||
from util.logging import log
|
||||
from util.util import is_machine_name
|
||||
|
||||
class chromium_applier(applier_frontend):
|
||||
@@ -35,119 +33,166 @@ class chromium_applier(applier_frontend):
|
||||
__registry_branch = 'Software\\Policies\\Google\\Chrome'
|
||||
__managed_policies_path = '/etc/chromium/policies/managed'
|
||||
__recommended_policies_path = '/etc/chromium/policies/recommended'
|
||||
# JSON file where Chromium stores its settings (and which is
|
||||
# overwritten every exit.
|
||||
__user_settings = '.config/chromium/Default'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self._is_machine_name = is_machine_name(self.username)
|
||||
self.policies = dict()
|
||||
chromium_filter = '{}%'.format(self.__registry_branch)
|
||||
self.chromium_keys = self.storage.filter_hklm_entries(chromium_filter)
|
||||
|
||||
self.policies_json = dict()
|
||||
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def get_hklm_string_entry(self, hive_subkey):
|
||||
query_str = '{}\\{}'.format(self.__registry_branch, hive_subkey)
|
||||
return self.storage.get_hklm_entry(query_str)
|
||||
|
||||
def get_hkcu_string_entry(self, hive_subkey):
|
||||
query_str = '{}\\{}'.format(self.__registry_branch, hive_subkey)
|
||||
return self.storage.get_hkcu_entry(sid, query_str)
|
||||
|
||||
def get_hklm_string_entry_default(self, hive_subkey, default):
|
||||
'''
|
||||
Return row from HKLM table identified by hive_subkey as string
|
||||
or return supplied default value if such hive_subkey is missing.
|
||||
'''
|
||||
|
||||
defval = str(default)
|
||||
response = self.get_hklm_string_entry(hive_subkey)
|
||||
|
||||
if response:
|
||||
return response.data
|
||||
|
||||
return defval
|
||||
|
||||
def get_hkcu_string_entry_default(self, hive_subkey, default):
|
||||
defval = str(default)
|
||||
response = self.get_hkcu_string_entry(hive_subkey)
|
||||
if response:
|
||||
return response.data
|
||||
return defval
|
||||
|
||||
def set_policy(self, name, obj):
|
||||
if obj:
|
||||
self.policies[name] = obj
|
||||
logging.info(slogm('Chromium policy \'{}\' set to {}'.format(name, obj)))
|
||||
|
||||
def set_user_policy(self, name, obj):
|
||||
'''
|
||||
Please not that writing user preferences file is not considered
|
||||
a good practice and used mostly by various malware.
|
||||
'''
|
||||
if not self._is_machine_name:
|
||||
prefdir = os.path.join(util.get_homedir(self.username), self.__user_settings)
|
||||
os.makedirs(prefdir, exist_ok=True)
|
||||
|
||||
prefpath = os.path.join(prefdir, 'Preferences')
|
||||
util.mk_homedir_path(self.username, self.__user_settings)
|
||||
settings = dict()
|
||||
try:
|
||||
with open(prefpath, 'r') as f:
|
||||
settings = json.load(f)
|
||||
except FileNotFoundError as exc:
|
||||
logging.error(slogm('Chromium preferences file {} does not exist at the moment'.format(prefpath)))
|
||||
except:
|
||||
logging.error(slogm('Error during attempt to read Chromium preferences for user {}'.format(self.username)))
|
||||
|
||||
if obj:
|
||||
settings[name] = obj
|
||||
|
||||
with open(prefpath, 'w') as f:
|
||||
json.dump(settings, f)
|
||||
logging.info(slogm('Set user ({}) property \'{}\' to {}'.format(self.username, name, obj)))
|
||||
|
||||
def get_home_page(self, hkcu=False):
|
||||
response = self.get_hklm_string_entry('HomepageLocation')
|
||||
result = 'about:blank'
|
||||
if response:
|
||||
result = response.data
|
||||
return result
|
||||
|
||||
def machine_apply(self):
|
||||
'''
|
||||
Apply machine settings.
|
||||
'''
|
||||
self.set_policy('HomepageLocation', self.get_home_page())
|
||||
|
||||
destfile = os.path.join(self.__managed_policies_path, 'policies.json')
|
||||
|
||||
try:
|
||||
recommended__json = self.policies_json.pop('Recommended')
|
||||
except:
|
||||
recommended__json = {}
|
||||
|
||||
#Replacing all nested dictionaries with a list
|
||||
dict_item_to_list = (
|
||||
lambda target_dict :
|
||||
{key:[*val.values()] if type(val) == dict else val for key,val in target_dict.items()}
|
||||
)
|
||||
os.makedirs(self.__managed_policies_path, exist_ok=True)
|
||||
with open(destfile, 'w') as f:
|
||||
json.dump(self.policies, f)
|
||||
logging.debug(slogm('Wrote Chromium preferences to {}'.format(destfile)))
|
||||
json.dump(dict_item_to_list(self.policies_json), f)
|
||||
logdata = dict()
|
||||
logdata['destfile'] = destfile
|
||||
log('D97', logdata)
|
||||
|
||||
destfilerec = os.path.join(self.__recommended_policies_path, 'policies.json')
|
||||
os.makedirs(self.__recommended_policies_path, exist_ok=True)
|
||||
with open(destfilerec, 'w') as f:
|
||||
json.dump(dict_item_to_list(recommended__json), f)
|
||||
logdata = dict()
|
||||
logdata['destfilerec'] = destfilerec
|
||||
log('D97', logdata)
|
||||
|
||||
def user_apply(self):
|
||||
'''
|
||||
Apply settings for the specified username.
|
||||
'''
|
||||
self.set_user_policy('homepage', self.get_home_page(hkcu=True))
|
||||
|
||||
def apply(self):
|
||||
'''
|
||||
All actual job done here.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Chromium applier for machine'))
|
||||
log('D95')
|
||||
self.create_dict(self.chromium_keys)
|
||||
self.machine_apply()
|
||||
else:
|
||||
logging.debug(slogm('Chromium applier for machine will not be started'))
|
||||
#if not self._is_machine_name:
|
||||
# logging.debug('Running user applier for Chromium')
|
||||
# self.user_apply()
|
||||
log('D96')
|
||||
|
||||
def get_valuename_typeint(self):
|
||||
'''
|
||||
List of keys resulting from parsing chrome.admx with parsing_chrom_admx_intvalues.py
|
||||
'''
|
||||
valuename_typeint = (['DefaultCookiesSetting',
|
||||
'DefaultFileHandlingGuardSetting',
|
||||
'DefaultFileSystemReadGuardSetting',
|
||||
'DefaultFileSystemWriteGuardSetting',
|
||||
'DefaultImagesSetting',
|
||||
'DefaultInsecureContentSetting',
|
||||
'DefaultJavaScriptSetting',
|
||||
'DefaultPopupsSetting',
|
||||
'DefaultNotificationsSetting',
|
||||
'DefaultGeolocationSetting',
|
||||
'DefaultSensorsSetting',
|
||||
'DefaultWebBluetoothGuardSetting',
|
||||
'DefaultWebUsbGuardSetting',
|
||||
'DefaultSerialGuardSetting',
|
||||
'LegacySameSiteCookieBehaviorEnabled',
|
||||
'ProxyServerMode',
|
||||
'DefaultMediaStreamSetting',
|
||||
'PrintRasterizationMode',
|
||||
'DefaultPluginsSetting',
|
||||
'DefaultKeygenSetting',
|
||||
'ChromeFrameRendererSettings',
|
||||
'SafeBrowsingProtectionLevel',
|
||||
'PasswordProtectionWarningTrigger',
|
||||
'SafeBrowsingProtectionLevel_recommended',
|
||||
'RestoreOnStartup',
|
||||
'RestoreOnStartup_recommended',
|
||||
'AdsSettingForIntrusiveAdsSites',
|
||||
'AmbientAuthenticationInPrivateModesEnabled',
|
||||
'BrowserSignin',
|
||||
'ChromeVariations',
|
||||
'DeveloperToolsAvailability',
|
||||
'DownloadRestrictions',
|
||||
'DownloadRestrictions_recommended',
|
||||
'ForceYouTubeRestrict',
|
||||
'HeadlessMode',
|
||||
'IncognitoModeAvailability',
|
||||
'IntranetRedirectBehavior',
|
||||
'NetworkPredictionOptions',
|
||||
'NetworkPredictionOptions_recommended',
|
||||
'ProfilePickerOnStartupAvailability',
|
||||
'RelaunchNotification',
|
||||
'SafeSitesFilterBehavior'])
|
||||
return valuename_typeint
|
||||
|
||||
|
||||
def get_boolean(self,data):
|
||||
if data in ['0', 'false', None, 'none', 0]:
|
||||
return False
|
||||
if data in ['1', 'true', 1]:
|
||||
return True
|
||||
def get_parts(self, hivekeyname):
|
||||
'''
|
||||
Parse registry path string and leave key parameters
|
||||
'''
|
||||
parts = hivekeyname.replace(self.__registry_branch, '').split('\\')
|
||||
return parts
|
||||
|
||||
|
||||
def create_dict(self, chromium_keys):
|
||||
'''
|
||||
Collect dictionaries from registry keys into a general dictionary
|
||||
'''
|
||||
counts = dict()
|
||||
#getting the list of keys to read as an integer
|
||||
valuename_typeint = self.get_valuename_typeint()
|
||||
for it_data in chromium_keys:
|
||||
branch = counts
|
||||
try:
|
||||
if type(it_data.data) is bytes:
|
||||
it_data.data = it_data.data.decode(encoding='utf-16').replace('\x00','')
|
||||
parts = self.get_parts(it_data.hive_key)
|
||||
#creating a nested dictionary from elements
|
||||
for part in parts[:-1]:
|
||||
branch = branch.setdefault(part, {})
|
||||
#dictionary key value initialization
|
||||
if it_data.type == 4:
|
||||
if it_data.valuename in valuename_typeint:
|
||||
branch[parts[-1]] = int(it_data.data)
|
||||
else:
|
||||
branch[parts[-1]] = self.get_boolean(it_data.data)
|
||||
else:
|
||||
if it_data.data[0] == '[' and it_data.data[-1] == ']':
|
||||
try:
|
||||
branch[parts[-1]] = json.loads(str(it_data.data))
|
||||
except:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
else:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['Exception'] = exc
|
||||
logdata['keyname'] = it_data.keyname
|
||||
log('D178', logdata)
|
||||
try:
|
||||
self.policies_json = counts['']
|
||||
except:
|
||||
self.policies_json = {}
|
||||
|
@@ -29,7 +29,7 @@ from .applier_frontend import (
|
||||
)
|
||||
from gpt.drives import json2drive
|
||||
from util.util import get_homedir
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
|
||||
def storage_get_drives(storage, sid):
|
||||
drives = storage.get_drives(sid)
|
||||
@@ -157,8 +157,8 @@ class cifs_applier_user(applier_frontend):
|
||||
|
||||
def admin_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running CIFS applier for user in administrator context'))
|
||||
log('D146')
|
||||
self.__admin_context_apply()
|
||||
else:
|
||||
logging.debug(slogm('CIFS applier for user in administrator context will not be started'))
|
||||
log('D147')
|
||||
|
||||
|
@@ -21,7 +21,7 @@ from .applier_frontend import (
|
||||
, check_enabled
|
||||
)
|
||||
from .appliers.control import control
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
|
||||
import logging
|
||||
|
||||
@@ -46,12 +46,27 @@ class control_applier(applier_frontend):
|
||||
valuename = setting.hive_key.rpartition('\\')[2]
|
||||
try:
|
||||
self.controls.append(control(valuename, int(setting.data)))
|
||||
logging.info(slogm('Working with control {}'.format(valuename)))
|
||||
logdata = dict()
|
||||
logdata['control'] = valuename
|
||||
logdata['value'] = setting.data
|
||||
log('I3', logdata)
|
||||
except ValueError as exc:
|
||||
self.controls.append(control(valuename, setting.data))
|
||||
logging.info(slogm('Working with control {} with string value'.format(valuename)))
|
||||
try:
|
||||
ctl = control(valuename, setting.data)
|
||||
except Exception as exc:
|
||||
logdata = {'Exception': exc}
|
||||
log('I3', logdata)
|
||||
continue
|
||||
self.controls.append(ctl)
|
||||
logdata = dict()
|
||||
logdata['control'] = valuename
|
||||
logdata['with string value'] = setting.data
|
||||
log('I3', logdata)
|
||||
except Exception as exc:
|
||||
logging.info(slogm('Unable to work with control {}: {}'.format(valuename, exc)))
|
||||
logdata = dict()
|
||||
logdata['control'] = valuename
|
||||
logdata['exc'] = exc
|
||||
log('E39', logdata)
|
||||
#for e in polfile.pol_file.entries:
|
||||
# print('{}:{}:{}:{}:{}'.format(e.type, e.data, e.valuename, e.keyname))
|
||||
for cont in self.controls:
|
||||
@@ -62,8 +77,7 @@ class control_applier(applier_frontend):
|
||||
Trigger control facility invocation.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Control applier for machine'))
|
||||
log('D67')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('Control applier for machine will not be started'))
|
||||
|
||||
log('E40')
|
||||
|
@@ -28,7 +28,7 @@ from .applier_frontend import (
|
||||
)
|
||||
from gpt.printers import json2printer
|
||||
from util.rpm import is_rpm_installed
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
|
||||
def storage_get_printers(storage, sid):
|
||||
'''
|
||||
@@ -79,7 +79,7 @@ class cups_applier(applier_frontend):
|
||||
|
||||
def run(self):
|
||||
if not is_rpm_installed('cups'):
|
||||
logging.warning(slogm('CUPS is not installed: no printer settings will be deployed'))
|
||||
log('W9')
|
||||
return
|
||||
|
||||
self.cups_connection = cups.Connection()
|
||||
@@ -94,10 +94,10 @@ class cups_applier(applier_frontend):
|
||||
Perform configuration of printer which is assigned to computer.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running CUPS applier for machine'))
|
||||
log('D113')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('CUPS applier for machine will not be started'))
|
||||
log('D114')
|
||||
|
||||
class cups_applier_user(applier_frontend):
|
||||
__module_name = 'CUPSApplierUser'
|
||||
@@ -123,7 +123,7 @@ class cups_applier_user(applier_frontend):
|
||||
|
||||
def run(self):
|
||||
if not is_rpm_installed('cups'):
|
||||
logging.warning(slogm('CUPS is not installed: no printer settings will be deployed'))
|
||||
log('W9')
|
||||
return
|
||||
|
||||
self.cups_connection = cups.Connection()
|
||||
@@ -138,8 +138,8 @@ class cups_applier_user(applier_frontend):
|
||||
Perform printer configuration assigned for user.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running CUPS applier for user in administrator context'))
|
||||
log('D115')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('CUPS applier for user in administrator context will not be started'))
|
||||
log('D116')
|
||||
|
||||
|
@@ -21,7 +21,7 @@ from .applier_frontend import (
|
||||
, check_enabled
|
||||
)
|
||||
from .appliers.envvar import Envvar
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
|
||||
import logging
|
||||
|
||||
@@ -38,11 +38,11 @@ class envvar_applier(applier_frontend):
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Envvar applier for machine'))
|
||||
log('D134')
|
||||
ev = Envvar(self.envvars, 'root')
|
||||
ev.act()
|
||||
else:
|
||||
logging.debug(slogm('Envvar applier for machine will not be started'))
|
||||
log('D135')
|
||||
|
||||
class envvar_applier_user(applier_frontend):
|
||||
__module_name = 'EnvvarsApplierUser'
|
||||
@@ -61,9 +61,9 @@ class envvar_applier_user(applier_frontend):
|
||||
|
||||
def user_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Envvar applier for user in user context'))
|
||||
log('D136')
|
||||
ev = Envvar(self.envvars, self.username)
|
||||
ev.act()
|
||||
else:
|
||||
logging.debug(slogm('Envvar applier for user in user context will not be started'))
|
||||
log('D137')
|
||||
|
||||
|
81
gpoa/frontend/file_applier.py
Normal file
81
gpoa/frontend/file_applier.py
Normal file
@@ -0,0 +1,81 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from .appliers.file_cp import Files_cp
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from util.logging import log
|
||||
|
||||
|
||||
|
||||
class file_applier(applier_frontend):
|
||||
__module_name = 'FilesApplier'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
|
||||
def __init__(self, storage, file_cache, sid):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.file_cache = file_cache
|
||||
self.files = self.storage.get_files(self.sid)
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def run(self):
|
||||
for file in self.files:
|
||||
Files_cp(file, self.file_cache)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D167')
|
||||
self.run()
|
||||
else:
|
||||
log('D168')
|
||||
|
||||
class file_applier_user(applier_frontend):
|
||||
__module_name = 'FilesApplierUser'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
|
||||
def __init__(self, storage, file_cache, sid, username):
|
||||
self.storage = storage
|
||||
self.file_cache = file_cache
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self.files = self.storage.get_files(self.sid)
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def run(self):
|
||||
for file in self.files:
|
||||
Files_cp(file, self.file_cache, self.username)
|
||||
|
||||
def admin_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D169')
|
||||
self.run()
|
||||
else:
|
||||
log('D170')
|
||||
|
||||
def user_context_apply(self):
|
||||
pass
|
@@ -25,26 +25,23 @@
|
||||
# This thing must work with keys and subkeys located at:
|
||||
# Software\Policies\Mozilla\Firefox
|
||||
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import configparser
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from util.logging import slogm
|
||||
from util.logging import log
|
||||
from util.util import is_machine_name
|
||||
|
||||
class firefox_applier(applier_frontend):
|
||||
__module_name = 'FirefoxApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__registry_branch = 'Software\\Policies\\Mozilla\\Firefox'
|
||||
__registry_branch = 'Software\\Policies\\Mozilla\\Firefox\\'
|
||||
__firefox_installdir1 = '/usr/lib64/firefox/distribution'
|
||||
__firefox_installdir2 = '/etc/firefox/policies'
|
||||
__user_settings_dir = '.mozilla/firefox'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
self.storage = storage
|
||||
@@ -53,158 +50,120 @@ class firefox_applier(applier_frontend):
|
||||
self._is_machine_name = is_machine_name(self.username)
|
||||
self.policies = dict()
|
||||
self.policies_json = dict({ 'policies': self.policies })
|
||||
firefox_filter = '{}%'.format(self.__registry_branch)
|
||||
self.firefox_keys = self.storage.filter_hklm_entries(firefox_filter)
|
||||
self.policies_gen = dict()
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def get_profiles(self):
|
||||
'''
|
||||
Get directory names of Firefox profiles for specified username.
|
||||
'''
|
||||
profiles_ini = os.path.join(util.get_homedir(self.username), self.__user_settings_dir, 'profiles.ini')
|
||||
config = configparser.ConfigParser()
|
||||
config.read(profiles_ini)
|
||||
def get_boolean(self,data):
|
||||
if data in ['0', 'false', None, 'none', 0]:
|
||||
return False
|
||||
if data in ['1', 'true', 1]:
|
||||
return True
|
||||
|
||||
profile_paths = list()
|
||||
for section in config.keys():
|
||||
if section.startswith('Profile'):
|
||||
profile_paths.append(config[section]['Path'])
|
||||
def get_parts(self, hivekeyname):
|
||||
'''
|
||||
Parse registry path string and leave key parameters
|
||||
'''
|
||||
parts = hivekeyname.replace(self.__registry_branch, '').split('\\')
|
||||
return parts
|
||||
|
||||
return profile_paths
|
||||
def create_dict(self, firefox_keys):
|
||||
'''
|
||||
Collect dictionaries from registry keys into a general dictionary
|
||||
'''
|
||||
counts = dict()
|
||||
for it_data in firefox_keys:
|
||||
branch = counts
|
||||
try:
|
||||
if type(it_data.data) is bytes:
|
||||
it_data.data = it_data.data.decode(encoding='utf-16').replace('\x00','')
|
||||
#Cases when it is necessary to create nested dictionaries
|
||||
if it_data.valuename != it_data.data:
|
||||
parts = self.get_parts(it_data.hive_key)
|
||||
#creating a nested dictionary from elements
|
||||
for part in parts[:-1]:
|
||||
branch = branch.setdefault(part, {})
|
||||
#dictionary key value initialization
|
||||
if it_data.type == 4:
|
||||
branch[parts[-1]] = self.get_boolean(it_data.data)
|
||||
else:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
#Cases when it is necessary to create lists in a dictionary
|
||||
else:
|
||||
parts = self.get_parts(it_data.keyname)
|
||||
for part in parts[:-1]:
|
||||
branch = branch.setdefault(part, {})
|
||||
if branch.get(parts[-1]) is None:
|
||||
branch[parts[-1]] = list()
|
||||
if it_data.type == 4:
|
||||
branch[parts[-1]].append(self.get_boolean(it_data.data))
|
||||
else:
|
||||
if os.path.isdir(str(it_data.data).replace('\\', '/')):
|
||||
branch[parts[-1]].append(str(it_data.data).replace('\\', '/'))
|
||||
else:
|
||||
branch[parts[-1]].append(str(it_data.data))
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['Exception'] = exc
|
||||
logdata['keyname'] = it_data.keyname
|
||||
log('W14', logdata)
|
||||
|
||||
def get_hklm_string_entry(self, hive_subkey):
|
||||
'''
|
||||
Get HKEY_LOCAL_MACHINE hive subkey of
|
||||
'Software\Policies\Mozilla\Firefox'.
|
||||
'''
|
||||
query_str = '{}\\{}'.format(self.__registry_branch, hive_subkey)
|
||||
return self.storage.get_hklm_entry(query_str)
|
||||
|
||||
def get_hklm_string_entry_default(self, hive_subkey, default):
|
||||
'''
|
||||
Get Firefox's subkey or return the default value.
|
||||
'''
|
||||
defval = str(default)
|
||||
response = self.get_hklm_string_entry(hive_subkey)
|
||||
if response:
|
||||
return response.data
|
||||
return defval
|
||||
|
||||
def set_policy(self, name, obj):
|
||||
'''
|
||||
Add entry to policy set.
|
||||
'''
|
||||
if obj:
|
||||
self.policies[name] = obj
|
||||
logging.info(slogm('Firefox policy \'{}\' set to {}'.format(name, obj)))
|
||||
|
||||
def get_home_page(self):
|
||||
'''
|
||||
Query the Homepage property from the storage.
|
||||
'''
|
||||
homepage = dict({
|
||||
'URL': 'about:blank',
|
||||
'Locked': False,
|
||||
'StartPage': 'homepage'
|
||||
})
|
||||
response = self.get_hklm_string_entry('Homepage\\URL')
|
||||
if response:
|
||||
homepage['URL'] = response.data
|
||||
return homepage
|
||||
return None
|
||||
|
||||
def get_boolean_config(self, name):
|
||||
'''
|
||||
Query boolean property from the storage.
|
||||
'''
|
||||
response = self.get_hklm_string_entry(name)
|
||||
if response:
|
||||
data = response.data if isinstance(response.data, int) else str(response.data).lower()
|
||||
if data in ['0', 'false', None, 'none', 0]:
|
||||
return False
|
||||
if data in ['1', 'true', 1]:
|
||||
return True
|
||||
|
||||
return None
|
||||
|
||||
def set_boolean_policy(self, name):
|
||||
'''
|
||||
Add boolean entry to policy set.
|
||||
'''
|
||||
obj = self.get_boolean_config(name)
|
||||
if obj is not None:
|
||||
self.policies[name] = obj
|
||||
logging.info(slogm('Firefox policy \'{}\' set to {}'.format(name, obj)))
|
||||
self.policies_json = {'policies': dict_item_to_list(counts)}
|
||||
|
||||
def machine_apply(self):
|
||||
'''
|
||||
Write policies.json to Firefox installdir.
|
||||
'''
|
||||
self.set_policy('Homepage', self.get_home_page())
|
||||
self.set_boolean_policy('BlockAboutConfig')
|
||||
self.set_boolean_policy('BlockAboutProfiles')
|
||||
self.set_boolean_policy('BlockAboutSupport')
|
||||
self.set_boolean_policy('CaptivePortal')
|
||||
self.set_boolean_policy('DisableSetDesktopBackground')
|
||||
self.set_boolean_policy('DisableMasterPasswordCreation')
|
||||
self.set_boolean_policy('DisableBuiltinPDFViewer')
|
||||
self.set_boolean_policy('DisableDeveloperTools')
|
||||
self.set_boolean_policy('DisableFeedbackCommands')
|
||||
self.set_boolean_policy('DisableFirefoxScreenshots')
|
||||
self.set_boolean_policy('DisableFirefoxAccounts')
|
||||
self.set_boolean_policy('DisableFirefoxStudies')
|
||||
self.set_boolean_policy('DisableForgetButton')
|
||||
self.set_boolean_policy('DisableFormHistory')
|
||||
self.set_boolean_policy('DisablePasswordReveal')
|
||||
self.set_boolean_policy('DisablePocket')
|
||||
self.set_boolean_policy('DisablePrivateBrowsing')
|
||||
self.set_boolean_policy('DisableProfileImport')
|
||||
self.set_boolean_policy('DisableProfileRefresh')
|
||||
self.set_boolean_policy('DisableSafeMode')
|
||||
self.set_boolean_policy('DisableSystemAddonUpdate')
|
||||
self.set_boolean_policy('DisableTelemetry')
|
||||
self.set_boolean_policy('DontCheckDefaultBrowser')
|
||||
self.set_boolean_policy('ExtensionUpdate')
|
||||
self.set_boolean_policy('HardwareAcceleration')
|
||||
self.set_boolean_policy('PrimaryPassword')
|
||||
self.set_boolean_policy('NetworkPrediction')
|
||||
self.set_boolean_policy('NewTabPage')
|
||||
self.set_boolean_policy('NoDefaultBookmarks')
|
||||
self.set_boolean_policy('OfferToSaveLogins')
|
||||
self.set_boolean_policy('PasswordManagerEnabled')
|
||||
self.set_boolean_policy('PromptForDownloadLocation')
|
||||
self.set_boolean_policy('SanitizeOnShutdown')
|
||||
self.set_boolean_policy('SearchSuggestEnabled')
|
||||
|
||||
self.create_dict(self.firefox_keys)
|
||||
destfile = os.path.join(self.__firefox_installdir1, 'policies.json')
|
||||
|
||||
os.makedirs(self.__firefox_installdir1, exist_ok=True)
|
||||
with open(destfile, 'w') as f:
|
||||
json.dump(self.policies_json, f)
|
||||
logging.debug(slogm('Wrote Firefox preferences to {}'.format(destfile)))
|
||||
logdata = dict()
|
||||
logdata['destfile'] = destfile
|
||||
log('D91', logdata)
|
||||
|
||||
destfile = os.path.join(self.__firefox_installdir2, 'policies.json')
|
||||
os.makedirs(self.__firefox_installdir2, exist_ok=True)
|
||||
with open(destfile, 'w') as f:
|
||||
json.dump(self.policies_json, f)
|
||||
logging.debug(slogm('Wrote Firefox preferences to {}'.format(destfile)))
|
||||
|
||||
def user_apply(self):
|
||||
profiles = self.get_profiles()
|
||||
|
||||
profiledir = os.path.join(util.get_homedir(self.username), self.__user_settings_dir)
|
||||
for profile in profiles:
|
||||
logging.debug(slogm('Found Firefox profile in {}/{}'.format(profiledir, profile)))
|
||||
logdata = dict()
|
||||
logdata['destfile'] = destfile
|
||||
log('D91', logdata)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Firefox applier for machine'))
|
||||
log('D93')
|
||||
self.machine_apply()
|
||||
else:
|
||||
logging.debug(slogm('Firefox applier for machine will not be started'))
|
||||
#if not self._is_machine_name:
|
||||
# logging.debug('Running user applier for Firefox')
|
||||
# self.user_apply()
|
||||
log('D94')
|
||||
|
||||
def key_dict_is_digit(dictionary:dict) -> bool:
|
||||
'''
|
||||
Checking if a dictionary key is a digit
|
||||
'''
|
||||
if not isinstance(dictionary, dict):
|
||||
return False
|
||||
for dig in dictionary.keys():
|
||||
if dig.isdigit():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def dict_item_to_list(dictionary:dict) -> dict:
|
||||
'''
|
||||
Replacing dictionaries with numeric keys with a List
|
||||
'''
|
||||
for key,val in dictionary.items():
|
||||
if type(val) == dict:
|
||||
if key_dict_is_digit(val):
|
||||
dictionary[key] = [*val.values()]
|
||||
else:
|
||||
dict_item_to_list(dictionary[key])
|
||||
return dictionary
|
||||
|
@@ -20,7 +20,7 @@
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
@@ -52,14 +52,14 @@ class firewall_applier(applier_frontend):
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Firewall applier for machine'))
|
||||
log('D117')
|
||||
if '1' == self.firewall_enabled:
|
||||
logging.debug(slogm('Firewall is enabled'))
|
||||
log('D118')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('Firewall is disabled, settings will be reset'))
|
||||
log('D119')
|
||||
proc = subprocess.Popen(self.__firewall_reset_cmd)
|
||||
proc.wait()
|
||||
else:
|
||||
logging.debug(slogm('Firewall applier will not be started'))
|
||||
log('D120')
|
||||
|
||||
|
@@ -23,9 +23,9 @@ from .applier_frontend import (
|
||||
, check_enabled
|
||||
)
|
||||
from .appliers.folder import Folder
|
||||
from util.logging import slogm
|
||||
|
||||
import logging
|
||||
from util.logging import log
|
||||
from util.windows import expand_windows_var
|
||||
import re
|
||||
|
||||
class folder_applier(applier_frontend):
|
||||
__module_name = 'FoldersApplier'
|
||||
@@ -36,16 +36,22 @@ class folder_applier(applier_frontend):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.folders = self.storage.get_folders(self.sid)
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_enabled)
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Folder applier for machine'))
|
||||
log('D107')
|
||||
for directory_obj in self.folders:
|
||||
check = expand_windows_var(directory_obj.path).replace('\\', '/')
|
||||
win_var = re.findall(r'%.+?%', check)
|
||||
drive = re.findall(r'^[a-z A-Z]\:',check)
|
||||
if drive or win_var:
|
||||
log('D109', {"path": directory_obj.path})
|
||||
continue
|
||||
fld = Folder(directory_obj)
|
||||
fld.action()
|
||||
fld.act()
|
||||
else:
|
||||
logging.debug(slogm('Folder applier for machine will not be started'))
|
||||
log('D108')
|
||||
|
||||
class folder_applier_user(applier_frontend):
|
||||
__module_name = 'FoldersApplierUser'
|
||||
@@ -65,20 +71,22 @@ class folder_applier_user(applier_frontend):
|
||||
|
||||
def run(self):
|
||||
for directory_obj in self.folders:
|
||||
check = expand_windows_var(directory_obj.path, self.username).replace('\\', '/')
|
||||
win_var = re.findall(r'%.+?%', check)
|
||||
drive = re.findall(r'^[a-z A-Z]\:',check)
|
||||
if drive or win_var:
|
||||
log('D110', {"path": directory_obj.path})
|
||||
continue
|
||||
fld = Folder(directory_obj, self.username)
|
||||
fld.act()
|
||||
|
||||
def admin_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Folder applier for user in administrator context'))
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('Folder applier for user in administrator context will not be started'))
|
||||
pass
|
||||
|
||||
def user_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Folder applier for user in user context'))
|
||||
log('D111')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('Folder applier for user administrator context will not be started'))
|
||||
log('D112')
|
||||
|
||||
|
@@ -51,7 +51,25 @@ from .envvar_applier import (
|
||||
envvar_applier
|
||||
, envvar_applier_user
|
||||
)
|
||||
from util.windows import get_sid
|
||||
from .scripts_applier import (
|
||||
scripts_applier
|
||||
, scripts_applier_user
|
||||
)
|
||||
|
||||
from .file_applier import (
|
||||
file_applier
|
||||
, file_applier_user
|
||||
)
|
||||
|
||||
from .ini_applier import (
|
||||
ini_applier
|
||||
, ini_applier_user
|
||||
)
|
||||
|
||||
from .networkshare_applier import networkshare_applier
|
||||
from .yandex_browser_applier import yandex_browser_applier
|
||||
|
||||
from util.sid import get_sid
|
||||
from util.users import (
|
||||
is_root,
|
||||
get_process_user,
|
||||
@@ -111,11 +129,19 @@ class frontend_manager:
|
||||
self.file_cache = fs_file_cache('file_cache')
|
||||
|
||||
self.machine_appliers = dict()
|
||||
self.user_appliers = dict()
|
||||
if is_machine:
|
||||
self._init_machine_appliers()
|
||||
else:
|
||||
self._init_user_appliers()
|
||||
|
||||
def _init_machine_appliers(self):
|
||||
self.machine_appliers['control'] = control_applier(self.storage)
|
||||
self.machine_appliers['polkit'] = polkit_applier(self.storage)
|
||||
self.machine_appliers['systemd'] = systemd_applier(self.storage)
|
||||
self.machine_appliers['firefox'] = firefox_applier(self.storage, self.sid, self.username)
|
||||
self.machine_appliers['chromium'] = chromium_applier(self.storage, self.sid, self.username)
|
||||
self.machine_appliers['yandex_browser'] = yandex_browser_applier(self.storage, self.sid, self.username)
|
||||
self.machine_appliers['shortcuts'] = shortcut_applier(self.storage)
|
||||
self.machine_appliers['gsettings'] = gsettings_applier(self.storage, self.file_cache)
|
||||
self.machine_appliers['cups'] = cups_applier(self.storage)
|
||||
@@ -124,10 +150,14 @@ class frontend_manager:
|
||||
self.machine_appliers['package'] = package_applier(self.storage)
|
||||
self.machine_appliers['ntp'] = ntp_applier(self.storage)
|
||||
self.machine_appliers['envvar'] = envvar_applier(self.storage, self.sid)
|
||||
self.machine_appliers['networkshare'] = networkshare_applier(self.storage, self.sid)
|
||||
self.machine_appliers['scripts'] = scripts_applier(self.storage, self.sid)
|
||||
self.machine_appliers['files'] = file_applier(self.storage, self.file_cache, self.sid)
|
||||
self.machine_appliers['ini'] = ini_applier(self.storage, self.sid)
|
||||
|
||||
def _init_user_appliers(self):
|
||||
# User appliers are expected to work with user-writable
|
||||
# files and settings, mostly in $HOME.
|
||||
self.user_appliers = dict()
|
||||
self.user_appliers['shortcuts'] = shortcut_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['folders'] = folder_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['gsettings'] = gsettings_applier_user(self.storage, self.file_cache, self.sid, self.username)
|
||||
@@ -141,6 +171,9 @@ class frontend_manager:
|
||||
self.user_appliers['package'] = package_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['polkit'] = polkit_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['envvar'] = envvar_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['scripts'] = scripts_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['files'] = file_applier_user(self.storage, self.file_cache, self.sid, self.username)
|
||||
self.user_appliers['ini'] = ini_applier_user(self.storage, self.sid, self.username)
|
||||
|
||||
def machine_apply(self):
|
||||
'''
|
||||
|
@@ -33,9 +33,9 @@ from .applier_frontend import (
|
||||
)
|
||||
from .appliers.gsettings import (
|
||||
system_gsettings,
|
||||
user_gsetting
|
||||
user_gsettings
|
||||
)
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm ,log
|
||||
|
||||
def uri_fetch(schema, path, value, cache):
|
||||
'''
|
||||
@@ -49,7 +49,7 @@ def uri_fetch(schema, path, value, cache):
|
||||
try:
|
||||
retval = cache.get(value)
|
||||
logdata['dst'] = retval
|
||||
logging.debug(slogm('Getting cached file for URI: {}'.format(logdata)))
|
||||
log('D90', logdata)
|
||||
except Exception as exc:
|
||||
pass
|
||||
|
||||
@@ -62,6 +62,7 @@ class gsettings_applier(applier_frontend):
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\GSettings\\'
|
||||
__registry_locks_branch = 'Software\\BaseALT\\Policies\\GSettingsLocks\\'
|
||||
__wallpaper_entry = 'Software\\BaseALT\\Policies\\GSettings\\org.mate.background.picture-filename'
|
||||
__vino_authentication_methods_entry = 'Software\\BaseALT\\Policies\\GSettings\\org.gnome.Vino.authentication-methods'
|
||||
__global_schema = '/usr/share/glib-2.0/schemas'
|
||||
__override_priority_file = 'zzz_policy.gschema.override'
|
||||
__override_old_file = '0_policy.gschema.override'
|
||||
@@ -90,7 +91,7 @@ class gsettings_applier(applier_frontend):
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exception'] = str(exc)
|
||||
logging.debug(slogm('Unable to cache specified URI for machine: {}'.format(logdata)))
|
||||
log('D145', logdata)
|
||||
|
||||
def uri_fetch_helper(self, schema, path, value):
|
||||
return uri_fetch(schema, path, value, self.file_cache)
|
||||
@@ -102,7 +103,7 @@ class gsettings_applier(applier_frontend):
|
||||
|
||||
# Cleanup settings from previous run
|
||||
if os.path.exists(self.override_file):
|
||||
logging.debug(slogm('Removing GSettings policy file from previous run'))
|
||||
log('D82')
|
||||
os.remove(self.override_file)
|
||||
|
||||
# Get all configured gsettings locks
|
||||
@@ -117,11 +118,14 @@ class gsettings_applier(applier_frontend):
|
||||
rp = valuename.rpartition('.')
|
||||
schema = rp[0]
|
||||
path = rp[2]
|
||||
data = setting.data
|
||||
lock = bool(self.locks[valuename]) if valuename in self.locks else None
|
||||
if setting.hive_key.lower() == self.__wallpaper_entry.lower():
|
||||
self.update_file_cache(setting.data)
|
||||
helper = self.uri_fetch_helper
|
||||
self.gsettings.append(schema, path, setting.data, lock, helper)
|
||||
elif setting.hive_key.lower() == self.__vino_authentication_methods_entry.lower():
|
||||
data = [setting.data]
|
||||
self.gsettings.append(schema, path, data, lock, helper)
|
||||
|
||||
# Create GSettings policy with highest available priority
|
||||
self.gsettings.apply()
|
||||
@@ -130,20 +134,20 @@ class gsettings_applier(applier_frontend):
|
||||
try:
|
||||
proc = subprocess.run(args=['/usr/bin/glib-compile-schemas', self.__global_schema], capture_output=True, check=True)
|
||||
except Exception as exc:
|
||||
logging.debug(slogm('Error recompiling global GSettings schemas'))
|
||||
log('E48')
|
||||
|
||||
# Update desktop configuration system backend
|
||||
try:
|
||||
proc = subprocess.run(args=['/usr/bin/dconf', "update"], capture_output=True, check=True)
|
||||
except Exception as exc:
|
||||
logging.debug(slogm('Error update desktop configuration system backend'))
|
||||
log('E49')
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running GSettings applier for machine'))
|
||||
log('D80')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('GSettings applier for machine will not be started'))
|
||||
log('D81')
|
||||
|
||||
class GSettingsMapping:
|
||||
def __init__(self, hive_key, gsettings_schema, gsettings_key):
|
||||
@@ -161,7 +165,7 @@ class GSettingsMapping:
|
||||
logdata['hive_key'] = self.hive_key
|
||||
logdata['gsettings_schema'] = self.gsettings_schema
|
||||
logdata['gsettings_key'] = self.gsettings_key
|
||||
logging.warning(slogm('Unable to resolve GSettings parameter {}.{}'.format(self.gsettings_schema, self.gsettings_key)))
|
||||
log('W6', logdata)
|
||||
|
||||
def preg2gsettings(self):
|
||||
'''
|
||||
@@ -182,6 +186,7 @@ class gsettings_applier_user(applier_frontend):
|
||||
__module_enabled = True
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\GSettings\\'
|
||||
__wallpaper_entry = 'Software\\BaseALT\\Policies\\GSettings\\org.mate.background.picture-filename'
|
||||
__vino_authentication_methods_entry = 'Software\\BaseALT\\Policies\\GSettings\\org.gnome.Vino.authentication-methods'
|
||||
|
||||
def __init__(self, storage, file_cache, sid, username):
|
||||
self.storage = storage
|
||||
@@ -190,8 +195,8 @@ class gsettings_applier_user(applier_frontend):
|
||||
self.username = username
|
||||
gsettings_filter = '{}%'.format(self.__registry_branch)
|
||||
self.gsettings_keys = self.storage.filter_hkcu_entries(self.sid, gsettings_filter)
|
||||
self.gsettings = list()
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_enabled)
|
||||
self.gsettings = user_gsettings()
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
self.__windows_mapping_enabled = check_windows_mapping_enabled(self.storage)
|
||||
|
||||
self.__windows_settings = dict()
|
||||
@@ -232,10 +237,13 @@ class gsettings_applier_user(applier_frontend):
|
||||
for setting_key in self.__windows_settings.keys():
|
||||
value = self.storage.get_hkcu_entry(self.sid, setting_key)
|
||||
if value:
|
||||
logging.debug(slogm('Found GSettings windows mapping {} to {}'.format(setting_key, value.data)))
|
||||
logdata = dict()
|
||||
logdata['setting_key'] = setting_key
|
||||
logdata['value.data'] = value.data
|
||||
log('D86', logdata)
|
||||
mapping = self.__windows_settings[setting_key]
|
||||
try:
|
||||
self.gsettings.append(user_gsetting(mapping.gsettings_schema, mapping.gsettings_key, value.data))
|
||||
self.gsettings.append(mapping.gsettings_schema, mapping.gsettings_key, value.data)
|
||||
except Exception as exc:
|
||||
print(exc)
|
||||
|
||||
@@ -253,10 +261,10 @@ class gsettings_applier_user(applier_frontend):
|
||||
|
||||
# Calculate all mapped gsettings if mapping enabled
|
||||
if self.__windows_mapping_enabled:
|
||||
logging.debug(slogm('Mapping Windows policies to GSettings policies'))
|
||||
log('D83')
|
||||
self.windows_mapping_append()
|
||||
else:
|
||||
logging.debug(slogm('GSettings windows policies mapping not enabled'))
|
||||
log('D84')
|
||||
|
||||
# Calculate all configured gsettings
|
||||
for setting in self.gsettings_keys:
|
||||
@@ -264,22 +272,21 @@ class gsettings_applier_user(applier_frontend):
|
||||
rp = valuename.rpartition('.')
|
||||
schema = rp[0]
|
||||
path = rp[2]
|
||||
data = setting.data
|
||||
helper = self.uri_fetch_helper if setting.hive_key.lower() == self.__wallpaper_entry.lower() else None
|
||||
self.gsettings.append(user_gsetting(schema, path, setting.data, helper))
|
||||
if setting.hive_key.lower() == self.__vino_authentication_methods_entry.lower():
|
||||
data = [setting.data]
|
||||
self.gsettings.append(schema, path, data, helper)
|
||||
|
||||
# Create GSettings policy with highest available priority
|
||||
for gsetting in self.gsettings:
|
||||
logging.debug(slogm('Applying user setting {}.{} to {}'.format(gsetting.schema,
|
||||
gsetting.path,
|
||||
gsetting.value)))
|
||||
gsetting.apply()
|
||||
self.gsettings.apply()
|
||||
|
||||
def user_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running GSettings applier for user in user context'))
|
||||
log('D87')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('GSettings applier for user in user context will not be started'))
|
||||
log('D88')
|
||||
|
||||
def admin_context_apply(self):
|
||||
# Cache files on remote locations
|
||||
@@ -291,5 +298,6 @@ class gsettings_applier_user(applier_frontend):
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exception'] = str(exc)
|
||||
logging.debug(slogm('Unable to cache specified URI for user: {}'.format(logdata)))
|
||||
log('E50', logdata)
|
||||
|
||||
|
||||
|
78
gpoa/frontend/ini_applier.py
Normal file
78
gpoa/frontend/ini_applier.py
Normal file
@@ -0,0 +1,78 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from .appliers.ini_file import Ini_file
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from util.logging import log
|
||||
|
||||
class ini_applier(applier_frontend):
|
||||
__module_name = 'InifilesApplier'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
|
||||
def __init__(self, storage, sid):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.inifiles_info = self.storage.get_ini(self.sid)
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def run(self):
|
||||
for inifile in self.inifiles_info:
|
||||
Ini_file(inifile)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D171')
|
||||
self.run()
|
||||
else:
|
||||
log('D172')
|
||||
|
||||
class ini_applier_user(applier_frontend):
|
||||
__module_name = 'InifilesApplierUser'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self.storage = storage
|
||||
self.inifiles_info = self.storage.get_ini(self.sid)
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def run(self):
|
||||
for inifile in self.inifiles_info:
|
||||
Ini_file(inifile, self.username)
|
||||
|
||||
def admin_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D173')
|
||||
self.run()
|
||||
else:
|
||||
log('D174')
|
||||
|
||||
def user_context_apply(self):
|
||||
pass
|
46
gpoa/frontend/networkshare_applier.py
Normal file
46
gpoa/frontend/networkshare_applier.py
Normal file
@@ -0,0 +1,46 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .appliers.netshare import Networkshare
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from util.logging import log
|
||||
|
||||
class networkshare_applier(applier_frontend):
|
||||
__module_name = 'NetworksharesApplier'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
|
||||
def __init__(self, storage, sid):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.networkshare_info = self.storage.get_networkshare(self.sid)
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def run(self):
|
||||
for networkshar in self.networkshare_info:
|
||||
Networkshare(networkshar)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D180')
|
||||
self.run()
|
||||
else:
|
||||
log('D181')
|
@@ -17,7 +17,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import subprocess
|
||||
from enum import Enum
|
||||
|
||||
@@ -26,7 +26,7 @@ from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from util.logging import slogm
|
||||
from util.logging import log
|
||||
|
||||
|
||||
class NTPServerType(Enum):
|
||||
@@ -77,20 +77,24 @@ class ntp_applier(applier_frontend):
|
||||
srv = None
|
||||
if server:
|
||||
srv = server.data.rpartition(',')[0]
|
||||
logging.debug(slogm('NTP server is configured to {}'.format(srv)))
|
||||
logdata = dict()
|
||||
logdata['srv'] = srv
|
||||
log('D122', logdata)
|
||||
|
||||
start_command = ['/usr/bin/systemctl', 'start', 'chronyd']
|
||||
chrony_set_server = ['/usr/bin/chronyc', 'add', 'server', srv]
|
||||
chrony_disconnect_all = ['/usr/bin/chronyc', 'offline']
|
||||
chrony_connect = ['/usr/bin/chronyc', 'online', srv]
|
||||
|
||||
logging.debug(slogm('Starting Chrony daemon'))
|
||||
log('D123')
|
||||
|
||||
proc = subprocess.Popen(start_command)
|
||||
proc.wait()
|
||||
|
||||
if srv:
|
||||
logging.debug(slogm('Setting reference NTP server to {}'.format(srv)))
|
||||
logdata = dict()
|
||||
logdata['srv'] = srv
|
||||
log('D124', logdata)
|
||||
|
||||
proc = subprocess.Popen(chrony_disconnect_all)
|
||||
proc.wait()
|
||||
@@ -103,9 +107,7 @@ class ntp_applier(applier_frontend):
|
||||
|
||||
def _stop_chrony_client(self):
|
||||
stop_command = ['/usr/bin/systemctl', 'stop', 'chronyd']
|
||||
|
||||
logging.debug(slogm('Stopping Chrony daemon'))
|
||||
|
||||
log('D125')
|
||||
proc = subprocess.Popen(stop_command)
|
||||
proc.wait()
|
||||
|
||||
@@ -115,33 +117,38 @@ class ntp_applier(applier_frontend):
|
||||
ntp_server_enabled = self.storage.get_hklm_entry(self.ntp_server_enabled)
|
||||
ntp_client_enabled = self.storage.get_hklm_entry(self.ntp_client_enabled)
|
||||
|
||||
if NTPServerType.NTP.value != server_type.data:
|
||||
logging.warning(slogm('Unsupported NTP server type: {}'.format(server_type)))
|
||||
else:
|
||||
logging.debug(slogm('Configuring NTP server...'))
|
||||
if '1' == ntp_server_enabled.data:
|
||||
logging.debug(slogm('NTP server is enabled'))
|
||||
self._start_chrony_client(server_address)
|
||||
self._chrony_as_server()
|
||||
elif '0' == ntp_server_enabled.data:
|
||||
logging.debug(slogm('NTP server is disabled'))
|
||||
self._chrony_as_client()
|
||||
if server_type:
|
||||
if NTPServerType.NTP.value != server_type.data:
|
||||
logdata = dict()
|
||||
logdata['server_type'] = server_type
|
||||
log('W10', logdata)
|
||||
else:
|
||||
logging.debug(slogm('NTP server is not configured'))
|
||||
log('D126')
|
||||
if ntp_server_enabled:
|
||||
if '1' == ntp_server_enabled.data:
|
||||
log('D127')
|
||||
self._start_chrony_client(server_address)
|
||||
self._chrony_as_server()
|
||||
elif '0' == ntp_server_enabled.data:
|
||||
log('D128')
|
||||
self._chrony_as_client()
|
||||
else:
|
||||
log('D129')
|
||||
|
||||
if '1' == ntp_client_enabled.data:
|
||||
logging.debug(slogm('NTP client is enabled'))
|
||||
self._start_chrony_client()
|
||||
elif '0' == ntp_client_enabled.data:
|
||||
logging.debug(slogm('NTP client is disabled'))
|
||||
self._stop_chrony_client()
|
||||
else:
|
||||
logging.debug(slogm('NTP client is not configured'))
|
||||
if ntp_client_enabled:
|
||||
if '1' == ntp_client_enabled.data:
|
||||
log('D130')
|
||||
self._start_chrony_client()
|
||||
elif '0' == ntp_client_enabled.data:
|
||||
log('D131')
|
||||
self._stop_chrony_client()
|
||||
else:
|
||||
log('D132')
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running NTP applier for machine'))
|
||||
log('D121')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('NTP applier for machine will not be started'))
|
||||
log('D133')
|
||||
|
||||
|
@@ -17,7 +17,8 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
from util.logging import slogm
|
||||
import subprocess
|
||||
from util.logging import slogm, log
|
||||
from util.rpm import (
|
||||
update
|
||||
, install_rpm
|
||||
@@ -35,6 +36,7 @@ class package_applier(applier_frontend):
|
||||
__module_enabled = False
|
||||
__install_key_name = 'Install'
|
||||
__remove_key_name = 'Remove'
|
||||
__sync_key_name = 'Sync'
|
||||
__hklm_branch = 'Software\\BaseALT\\Policies\\Packages'
|
||||
|
||||
def __init__(self, storage):
|
||||
@@ -42,37 +44,49 @@ class package_applier(applier_frontend):
|
||||
|
||||
install_branch = '{}\\{}%'.format(self.__hklm_branch, self.__install_key_name)
|
||||
remove_branch = '{}\\{}%'.format(self.__hklm_branch, self.__remove_key_name)
|
||||
|
||||
sync_branch = '{}\\{}%'.format(self.__hklm_branch, self.__sync_key_name)
|
||||
self.fulcmd = list()
|
||||
self.fulcmd.append('/usr/libexec/gpupdate/pkcon_runner')
|
||||
self.fulcmd.append('--loglevel')
|
||||
logger = logging.getLogger()
|
||||
self.fulcmd.append(str(logger.level))
|
||||
self.install_packages_setting = self.storage.filter_hklm_entries(install_branch)
|
||||
self.remove_packages_setting = self.storage.filter_hklm_entries(remove_branch)
|
||||
self.sync_packages_setting = self.storage.filter_hklm_entries(sync_branch)
|
||||
self.flagSync = True
|
||||
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def run(self):
|
||||
if 0 < self.install_packages_setting.count() or 0 < self.remove_packages_setting.count():
|
||||
update()
|
||||
for package in self.install_packages_setting:
|
||||
try:
|
||||
install_rpm(package.data)
|
||||
except Exception as exc:
|
||||
logging.error(exc)
|
||||
for flag in self.sync_packages_setting:
|
||||
if flag.data:
|
||||
self.flagSync = bool(int(flag.data))
|
||||
|
||||
for package in self.remove_packages_setting:
|
||||
if 0 < self.install_packages_setting.count() or 0 < self.remove_packages_setting.count():
|
||||
if self.flagSync:
|
||||
try:
|
||||
remove_rpm(package.data)
|
||||
subprocess.check_call(self.fulcmd)
|
||||
except Exception as exc:
|
||||
logging.error(exc)
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E55', logdata)
|
||||
else:
|
||||
try:
|
||||
subprocess.Popen(self.fulcmd,close_fds=False)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E61', logdata)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Package applier for machine'))
|
||||
log('D138')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('Package applier for machine will not be started'))
|
||||
log('D139')
|
||||
|
||||
|
||||
class package_applier_user(applier_frontend):
|
||||
@@ -81,18 +95,29 @@ class package_applier_user(applier_frontend):
|
||||
__module_enabled = False
|
||||
__install_key_name = 'Install'
|
||||
__remove_key_name = 'Remove'
|
||||
__sync_key_name = 'Sync'
|
||||
__hkcu_branch = 'Software\\BaseALT\\Policies\\Packages'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self.fulcmd = list()
|
||||
self.fulcmd.append('/usr/libexec/gpupdate/pkcon_runner')
|
||||
self.fulcmd.append('--sid')
|
||||
self.fulcmd.append(self.sid)
|
||||
self.fulcmd.append('--loglevel')
|
||||
logger = logging.getLogger()
|
||||
self.fulcmd.append(str(logger.level))
|
||||
|
||||
install_branch = '{}\\{}%'.format(self.__hkcu_branch, self.__install_key_name)
|
||||
remove_branch = '{}\\{}%'.format(self.__hkcu_branch, self.__remove_key_name)
|
||||
sync_branch = '{}\\{}%'.format(self.__hkcu_branch, self.__sync_key_name)
|
||||
|
||||
self.install_packages_setting = self.storage.filter_hkcu_entries(self.sid, install_branch)
|
||||
self.remove_packages_setting = self.storage.filter_hkcu_entries(self.sid, remove_branch)
|
||||
self.sync_packages_setting = self.storage.filter_hkcu_entries(self.sid, sync_branch)
|
||||
self.flagSync = False
|
||||
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_enabled)
|
||||
|
||||
@@ -103,19 +128,25 @@ class package_applier_user(applier_frontend):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
if 0 < self.install_packages_setting.count() or 0 < self.remove_packages_setting.count():
|
||||
update()
|
||||
for package in self.install_packages_setting:
|
||||
try:
|
||||
install_rpm(package.data)
|
||||
except Exception as exc:
|
||||
logging.debug(exc)
|
||||
for flag in self.sync_packages_setting:
|
||||
if flag.data:
|
||||
self.flagSync = bool(int(flag.data))
|
||||
|
||||
for package in self.remove_packages_setting:
|
||||
if 0 < self.install_packages_setting.count() or 0 < self.remove_packages_setting.count():
|
||||
if self.flagSync:
|
||||
try:
|
||||
remove_rpm(package.data)
|
||||
subprocess.check_call(self.fulcmd)
|
||||
except Exception as exc:
|
||||
logging.debug(exc)
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E60', logdata)
|
||||
else:
|
||||
try:
|
||||
subprocess.Popen(self.fulcmd,close_fds=False)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E62', logdata)
|
||||
|
||||
def admin_context_apply(self):
|
||||
'''
|
||||
@@ -123,8 +154,8 @@ class package_applier_user(applier_frontend):
|
||||
which computer he uses to log into system.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Package applier for user in administrator context'))
|
||||
log('D140')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('Package applier for user in administrator context will not be started'))
|
||||
log('D141')
|
||||
|
||||
|
@@ -19,17 +19,17 @@
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
, check_windows_mapping_enabled
|
||||
)
|
||||
from .appliers.polkit import polkit
|
||||
from util.logging import slogm
|
||||
|
||||
import logging
|
||||
from util.logging import log
|
||||
|
||||
class polkit_applier(applier_frontend):
|
||||
__module_name = 'PolkitApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__deny_all = 'Software\\Policies\\Microsoft\\Windows\\RemovableStorageDevices\\Deny_All'
|
||||
__deny_all_win = 'Software\\Policies\\Microsoft\\Windows\\RemovableStorageDevices\\Deny_All'
|
||||
__deny_all = 'Software\\BaseALT\\Policies\\GPUpdate\\RemovableStorageDevices\\Deny_All'
|
||||
__polkit_map = {
|
||||
__deny_all: ['49-gpoa_disk_permissions', { 'Deny_All': 0 }]
|
||||
}
|
||||
@@ -37,14 +37,18 @@ class polkit_applier(applier_frontend):
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
deny_all = storage.filter_hklm_entries(self.__deny_all).first()
|
||||
if not deny_all and check_windows_mapping_enabled(self.storage):
|
||||
deny_all = storage.filter_hklm_entries(self.__deny_all_win).first()
|
||||
# Deny_All hook: initialize defaults
|
||||
template_file = self.__polkit_map[self.__deny_all][0]
|
||||
template_vars = self.__polkit_map[self.__deny_all][1]
|
||||
if deny_all:
|
||||
logging.debug(slogm('Deny_All setting found: {}'.format(deny_all.data)))
|
||||
logdata = dict()
|
||||
logdata['Deny_All'] = deny_all.data
|
||||
log('D69', logdata)
|
||||
self.__polkit_map[self.__deny_all][1]['Deny_All'] = deny_all.data
|
||||
else:
|
||||
logging.debug(slogm('Deny_All setting not found'))
|
||||
log('D71')
|
||||
self.policies = []
|
||||
self.policies.append(polkit(template_file, template_vars))
|
||||
self.__module_enabled = check_enabled(
|
||||
@@ -58,17 +62,18 @@ class polkit_applier(applier_frontend):
|
||||
Trigger control facility invocation.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Polkit applier for machine'))
|
||||
log('D73')
|
||||
for policy in self.policies:
|
||||
policy.generate()
|
||||
else:
|
||||
logging.debug(slogm('Polkit applier for machine will not be started'))
|
||||
log('D75')
|
||||
|
||||
class polkit_applier_user(applier_frontend):
|
||||
__module_name = 'PolkitApplierUser'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__deny_all = 'Software\\Policies\\Microsoft\\Windows\\RemovableStorageDevices\\Deny_All'
|
||||
__deny_all_win = 'Software\\Policies\\Microsoft\\Windows\\RemovableStorageDevices\\Deny_All'
|
||||
__deny_all = 'Software\\BaseALT\\Policies\\GPUpdate\\RemovableStorageDevices\\Deny_All'
|
||||
__polkit_map = {
|
||||
__deny_all: ['48-gpoa_disk_permissions_user', { 'Deny_All': 0, 'User': '' }]
|
||||
}
|
||||
@@ -79,15 +84,20 @@ class polkit_applier_user(applier_frontend):
|
||||
self.username = username
|
||||
|
||||
deny_all = storage.filter_hkcu_entries(self.sid, self.__deny_all).first()
|
||||
if not deny_all and check_windows_mapping_enabled(self.storage):
|
||||
deny_all = storage.filter_hkcu_entries(self.sid, self.__deny_all_win).first()
|
||||
# Deny_All hook: initialize defaults
|
||||
template_file = self.__polkit_map[self.__deny_all][0]
|
||||
template_vars = self.__polkit_map[self.__deny_all][1]
|
||||
if deny_all:
|
||||
logging.debug(slogm('Deny_All setting for user {} found: {}'.format(self.username, deny_all.data)))
|
||||
logdata = dict()
|
||||
logdata['user'] = self.username
|
||||
logdata['Deny_All'] = deny_all.data
|
||||
log('D70', logdata)
|
||||
self.__polkit_map[self.__deny_all][1]['Deny_All'] = deny_all.data
|
||||
self.__polkit_map[self.__deny_all][1]['User'] = self.username
|
||||
else:
|
||||
logging.debug(slogm('Deny_All setting not found'))
|
||||
log('D72')
|
||||
self.policies = []
|
||||
self.policies.append(polkit(template_file, template_vars, self.username))
|
||||
self.__module_enabled = check_enabled(
|
||||
@@ -104,9 +114,10 @@ class polkit_applier_user(applier_frontend):
|
||||
Trigger control facility invocation.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Polkit applier for user in administrator context'))
|
||||
log('D74')
|
||||
for policy in self.policies:
|
||||
policy.generate()
|
||||
else:
|
||||
logging.debug(slogm('Polkit applier for user in administrator context will not be started'))
|
||||
log('D76')
|
||||
|
||||
|
||||
|
159
gpoa/frontend/scripts_applier.py
Normal file
159
gpoa/frontend/scripts_applier.py
Normal file
@@ -0,0 +1,159 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
import pysss_nss_idmap
|
||||
|
||||
from django.template import base
|
||||
from util.logging import log
|
||||
from .appliers.folder import remove_dir_tree
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
|
||||
class scripts_applier(applier_frontend):
|
||||
__module_name = 'ScriptsApplier'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
__cache_scripts = '/var/cache/gpupdate_scripts_cache/machine/'
|
||||
|
||||
def __init__(self, storage, sid):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.startup_scripts = self.storage.get_scripts(self.sid, 'STARTUP')
|
||||
self.shutdown_scripts = self.storage.get_scripts(self.sid, 'SHUTDOWN')
|
||||
self.folder_path = Path(self.__cache_scripts)
|
||||
self.__module_enabled = check_enabled(self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def cleaning_cache(self):
|
||||
log('D160')
|
||||
try:
|
||||
remove_dir_tree(self.folder_path, True, True, True,)
|
||||
except FileNotFoundError as exc:
|
||||
log('D154')
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exc'] = exc
|
||||
log('E64', logdata)
|
||||
|
||||
def filling_cache(self):
|
||||
'''
|
||||
Creating and updating folder directories for scripts and copying them
|
||||
'''
|
||||
self.folder_path.mkdir(parents=True, exist_ok=True)
|
||||
for ts in self.startup_scripts:
|
||||
script_path = os.path.join(self.__cache_scripts, 'STARTUP')
|
||||
install_script(ts, script_path, '700')
|
||||
for ts in self.shutdown_scripts:
|
||||
script_path = os.path.join(self.__cache_scripts, 'SHUTDOWN')
|
||||
install_script(ts, script_path, '700')
|
||||
|
||||
def run(self):
|
||||
self.filling_cache()
|
||||
|
||||
def apply(self):
|
||||
self.cleaning_cache()
|
||||
if self.__module_enabled:
|
||||
log('D156')
|
||||
self.run()
|
||||
else:
|
||||
log('D157')
|
||||
|
||||
class scripts_applier_user(applier_frontend):
|
||||
__module_name = 'ScriptsApplierUser'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
__cache_scripts = '/var/cache/gpupdate_scripts_cache/users/'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.logon_scripts = self.storage.get_scripts(self.sid, 'LOGON')
|
||||
self.logoff_scripts = self.storage.get_scripts(self.sid, 'LOGOFF')
|
||||
self.username = username
|
||||
self.folder_path = Path(self.__cache_scripts + self.username)
|
||||
self.__module_enabled = check_enabled(self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
self.filling_cache()
|
||||
|
||||
def cleaning_cache(self):
|
||||
log('D161')
|
||||
try:
|
||||
remove_dir_tree(self.folder_path, True, True, True,)
|
||||
except FileNotFoundError as exc:
|
||||
log('D155')
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exc'] = exc
|
||||
log('E65', logdata)
|
||||
|
||||
def filling_cache(self):
|
||||
'''
|
||||
Creating and updating folder directories for scripts and copying them
|
||||
'''
|
||||
self.folder_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for ts in self.logon_scripts:
|
||||
script_path = os.path.join(self.__cache_scripts, self.username, 'LOGON')
|
||||
install_script(ts, script_path, '755')
|
||||
for ts in self.logoff_scripts:
|
||||
script_path = os.path.join(self.__cache_scripts, self.username, 'LOGOFF')
|
||||
install_script(ts, script_path, '755')
|
||||
|
||||
def user_context_apply(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
self.filling_cache()
|
||||
|
||||
def admin_context_apply(self):
|
||||
self.cleaning_cache()
|
||||
if self.__module_enabled:
|
||||
log('D158')
|
||||
self.run()
|
||||
else:
|
||||
log('D159')
|
||||
|
||||
def install_script(storage_script_entry, script_dir, access_permissions):
|
||||
'''
|
||||
Copy scripts to specific directories and
|
||||
if given arguments
|
||||
create directories for them and copy them there
|
||||
'''
|
||||
dir_cr = Path(script_dir)
|
||||
dir_cr.mkdir(parents=True, exist_ok=True)
|
||||
script_name = str(int(storage_script_entry.number)).zfill(5) + '_' + os.path.basename(storage_script_entry.path)
|
||||
script_file = os.path.join(script_dir, script_name)
|
||||
shutil.copyfile(storage_script_entry.path, script_file)
|
||||
|
||||
os.chmod(script_file, int(access_permissions, base = 8))
|
||||
if storage_script_entry.arg:
|
||||
dir_path = script_dir + '/' + script_name + '.arg'
|
||||
dir_arg = Path(dir_path)
|
||||
dir_arg.mkdir(parents=True, exist_ok=True)
|
||||
file_arg = open(dir_path + '/arg', 'w')
|
||||
file_arg.write(storage_script_entry.arg)
|
||||
file_arg.close()
|
@@ -25,7 +25,7 @@ from .applier_frontend import (
|
||||
)
|
||||
from gpt.shortcuts import json2sc
|
||||
from util.windows import expand_windows_var
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
from util.util import (
|
||||
get_homedir,
|
||||
homedir_exists
|
||||
@@ -55,8 +55,10 @@ def apply_shortcut(shortcut, username=None):
|
||||
dest_abspath = shortcut.dest
|
||||
if not dest_abspath.startswith('/') and not dest_abspath.startswith('%'):
|
||||
dest_abspath = '%HOME%/' + dest_abspath
|
||||
|
||||
logging.debug(slogm('Try to expand path for shortcut: {} for {}'.format(dest_abspath, username)))
|
||||
logdata = dict()
|
||||
logdata['shortcut'] = dest_abspath
|
||||
logdata['for'] = username
|
||||
log('D105', logdata)
|
||||
dest_abspath = expand_windows_var(dest_abspath, username).replace('\\', '/') + '.desktop'
|
||||
|
||||
# Check that we're working for user, not on global system level
|
||||
@@ -66,21 +68,33 @@ def apply_shortcut(shortcut, username=None):
|
||||
if dest_abspath.startswith(get_homedir(username)):
|
||||
# Don't try to operate on non-existent directory
|
||||
if not homedir_exists(username):
|
||||
logging.warning(slogm('No home directory exists for user {}: will not apply link {}'.format(username, dest_abspath)))
|
||||
logdata = dict()
|
||||
logdata['user'] = username
|
||||
logdata['dest_abspath'] = dest_abspath
|
||||
log('W7', logdata)
|
||||
return None
|
||||
else:
|
||||
logging.warning(slogm('User\'s shortcut not placed to home directory for {}: bad path {}'.format(username, dest_abspath)))
|
||||
logdata = dict()
|
||||
logdata['user'] = username
|
||||
logdata['bad path'] = dest_abspath
|
||||
log('W8', logdata)
|
||||
return None
|
||||
|
||||
if '%' in dest_abspath:
|
||||
logging.debug(slogm('Fail for applying shortcut to file with \'%\': {}'.format(dest_abspath)))
|
||||
logdata = dict()
|
||||
logdata['dest_abspath'] = dest_abspath
|
||||
log('E53', logdata)
|
||||
return None
|
||||
|
||||
if not dest_abspath.startswith('/'):
|
||||
logging.debug(slogm('Fail for applying shortcut to not absolute path \'%\': {}'.format(dest_abspath)))
|
||||
logdata = dict()
|
||||
logdata['dest_abspath'] = dest_abspath
|
||||
log('E54', logdata)
|
||||
return None
|
||||
|
||||
logging.debug(slogm('Applying shortcut file to {} with action {}'.format(dest_abspath, shortcut.action)))
|
||||
logdata = dict()
|
||||
logdata['file'] = dest_abspath
|
||||
logdata['with_action'] = shortcut.action
|
||||
log('D106', logdata)
|
||||
shortcut.apply_desktop(dest_abspath)
|
||||
|
||||
class shortcut_applier(applier_frontend):
|
||||
@@ -108,14 +122,16 @@ class shortcut_applier(applier_frontend):
|
||||
# /usr/local/share/applications
|
||||
subprocess.check_call(['/usr/bin/update-desktop-database'])
|
||||
else:
|
||||
logging.debug(slogm('No shortcuts to process for {}'.format(self.storage.get_info('machine_sid'))))
|
||||
logdata = dict()
|
||||
logdata['machine_sid'] = self.storage.get_info('machine_sid')
|
||||
log('D100', logdata)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Shortcut applier for machine'))
|
||||
log('D98')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('Shortcut applier for machine will not be started'))
|
||||
log('D99')
|
||||
|
||||
class shortcut_applier_user(applier_frontend):
|
||||
__module_name = 'ShortcutsApplierUser'
|
||||
@@ -137,19 +153,21 @@ class shortcut_applier_user(applier_frontend):
|
||||
if not in_usercontext and not sc.is_usercontext():
|
||||
apply_shortcut(sc, self.username)
|
||||
else:
|
||||
logging.debug(slogm('No shortcuts to process for {}'.format(self.sid)))
|
||||
logdata = dict()
|
||||
logdata['sid'] = self.sid
|
||||
log('D100', logdata)
|
||||
|
||||
def user_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Shortcut applier for user in user context'))
|
||||
log('D101')
|
||||
self.run(True)
|
||||
else:
|
||||
logging.debug(slogm('Shortcut applier for user in user context will not be started'))
|
||||
log('D102')
|
||||
|
||||
def admin_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running Shortcut applier for user in administrator context'))
|
||||
log('D103')
|
||||
self.run(False)
|
||||
else:
|
||||
logging.debug(slogm('Shortcut applier for user in administrator context will not be started'))
|
||||
log('D104')
|
||||
|
||||
|
@@ -21,7 +21,7 @@ from .applier_frontend import (
|
||||
, check_enabled
|
||||
)
|
||||
from .appliers.systemd import systemd_unit
|
||||
from util.logging import slogm
|
||||
from util.logging import slogm, log
|
||||
|
||||
import logging
|
||||
|
||||
@@ -46,24 +46,31 @@ class systemd_applier(applier_frontend):
|
||||
valuename = setting.hive_key.rpartition('\\')[2]
|
||||
try:
|
||||
self.units.append(systemd_unit(valuename, int(setting.data)))
|
||||
logging.info(slogm('Working with systemd unit {}'.format(valuename)))
|
||||
logdata = dict()
|
||||
logdata['unit'] = format(valuename)
|
||||
log('I4', logdata)
|
||||
except Exception as exc:
|
||||
logging.info(slogm('Unable to work with systemd unit {}: {}'.format(valuename, exc)))
|
||||
logdata = dict()
|
||||
logdata['unit'] = format(valuename)
|
||||
logdata['exc'] = exc
|
||||
log('I5', logdata)
|
||||
for unit in self.units:
|
||||
try:
|
||||
unit.apply()
|
||||
except:
|
||||
logging.error(slogm('Failed applying unit {}'.format(unit.unit_name)))
|
||||
logdata = dict()
|
||||
logdata['unit'] = unit.unit_name
|
||||
log('E45', logdata)
|
||||
|
||||
def apply(self):
|
||||
'''
|
||||
Trigger control facility invocation.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
logging.debug(slogm('Running systemd applier for machine'))
|
||||
log('D78')
|
||||
self.run()
|
||||
else:
|
||||
logging.debug(slogm('systemd applier for machine will not be started'))
|
||||
log('D79')
|
||||
|
||||
class systemd_applier_user(applier_frontend):
|
||||
__module_name = 'SystemdApplierUser'
|
||||
|
169
gpoa/frontend/yandex_browser_applier.py
Normal file
169
gpoa/frontend/yandex_browser_applier.py
Normal file
@@ -0,0 +1,169 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
|
||||
import json
|
||||
import os
|
||||
from util.logging import log
|
||||
from util.util import is_machine_name
|
||||
|
||||
class yandex_browser_applier(applier_frontend):
|
||||
__module_name = 'YandexBrowserApplier'
|
||||
__module_enabled = True
|
||||
__module_experimental = False
|
||||
__registry_branch = 'Software\\Policies\\YandexBrowser'
|
||||
__managed_policies_path = '/etc/opt/yandex/browser/policies/managed'
|
||||
__recommended_policies_path = '/etc/opt/yandex/browser/policies/recommended'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self._is_machine_name = is_machine_name(self.username)
|
||||
yandex_filter = '{}%'.format(self.__registry_branch)
|
||||
self.yandex_keys = self.storage.filter_hklm_entries(yandex_filter)
|
||||
|
||||
self.policies_json = dict()
|
||||
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def machine_apply(self):
|
||||
'''
|
||||
Apply machine settings.
|
||||
'''
|
||||
|
||||
destfile = os.path.join(self.__managed_policies_path, 'policies.json')
|
||||
|
||||
try:
|
||||
recommended__json = self.policies_json.pop('Recommended')
|
||||
except:
|
||||
recommended__json = {}
|
||||
|
||||
#Replacing all nested dictionaries with a list
|
||||
dict_item_to_list = (
|
||||
lambda target_dict :
|
||||
{key:[*val.values()] if type(val) == dict else val for key,val in target_dict.items()}
|
||||
)
|
||||
os.makedirs(self.__managed_policies_path, exist_ok=True)
|
||||
with open(destfile, 'w') as f:
|
||||
json.dump(dict_item_to_list(self.policies_json), f)
|
||||
logdata = dict()
|
||||
logdata['destfile'] = destfile
|
||||
log('D185', logdata)
|
||||
|
||||
destfilerec = os.path.join(self.__recommended_policies_path, 'policies.json')
|
||||
os.makedirs(self.__recommended_policies_path, exist_ok=True)
|
||||
with open(destfilerec, 'w') as f:
|
||||
json.dump(dict_item_to_list(recommended__json), f)
|
||||
logdata = dict()
|
||||
logdata['destfilerec'] = destfilerec
|
||||
log('D185', logdata)
|
||||
|
||||
|
||||
def apply(self):
|
||||
'''
|
||||
All actual job done here.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
log('D183')
|
||||
self.create_dict(self.yandex_keys)
|
||||
self.machine_apply()
|
||||
else:
|
||||
log('D184')
|
||||
|
||||
def get_valuename_typeint(self):
|
||||
'''
|
||||
List of keys resulting from parsing chrome.admx with parsing_chrom_admx_intvalues.py
|
||||
'''
|
||||
valuename_typeint = (['TurboSettings',
|
||||
'DefaultPluginsSetting',
|
||||
'BrowserSignin',
|
||||
'DefaultCookiesSetting',
|
||||
'DefaultGeolocationSetting',
|
||||
'DefaultPopupsSetting',
|
||||
'DeveloperToolsAvailability',
|
||||
'IncognitoModeAvailability',
|
||||
'PasswordProtectionWarningTrigger',
|
||||
'SafeBrowsingProtectionLevel',
|
||||
'SafeBrowsingProtectionLevel_recommended',
|
||||
'SidePanelMode',
|
||||
'YandexAutoLaunchMode'])
|
||||
return valuename_typeint
|
||||
|
||||
|
||||
def get_boolean(self,data):
|
||||
if data in ['0', 'false', None, 'none', 0]:
|
||||
return False
|
||||
if data in ['1', 'true', 1]:
|
||||
return True
|
||||
def get_parts(self, hivekeyname):
|
||||
'''
|
||||
Parse registry path string and leave key parameters
|
||||
'''
|
||||
parts = hivekeyname.replace(self.__registry_branch, '').split('\\')
|
||||
return parts
|
||||
|
||||
|
||||
def create_dict(self, yandex_keys):
|
||||
'''
|
||||
Collect dictionaries from registry keys into a general dictionary
|
||||
'''
|
||||
counts = dict()
|
||||
#getting the list of keys to read as an integer
|
||||
valuename_typeint = self.get_valuename_typeint()
|
||||
for it_data in yandex_keys:
|
||||
branch = counts
|
||||
try:
|
||||
if type(it_data.data) is bytes:
|
||||
it_data.data = it_data.data.decode(encoding='utf-16').replace('\x00','')
|
||||
parts = self.get_parts(it_data.hive_key)
|
||||
#creating a nested dictionary from elements
|
||||
for part in parts[:-1]:
|
||||
branch = branch.setdefault(part, {})
|
||||
#dictionary key value initialization
|
||||
if it_data.type == 4:
|
||||
if it_data.valuename in valuename_typeint:
|
||||
branch[parts[-1]] = int(it_data.data)
|
||||
else:
|
||||
branch[parts[-1]] = self.get_boolean(it_data.data)
|
||||
else:
|
||||
if it_data.data[0] == '[' and it_data.data[-1] == ']':
|
||||
try:
|
||||
branch[parts[-1]] = json.loads(str(it_data.data))
|
||||
except:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
else:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['Exception'] = exc
|
||||
logdata['keyname'] = it_data.keyname
|
||||
log('D178', logdata)
|
||||
try:
|
||||
self.policies_json = counts['']
|
||||
except:
|
||||
self.policies_json = {}
|
@@ -22,17 +22,35 @@ def read_files(filesxml):
|
||||
files = list()
|
||||
|
||||
for fil in get_xml_root(filesxml):
|
||||
fil_obj = fileentry()
|
||||
|
||||
props = fil.find('Properties')
|
||||
fil_obj = fileentry(props.get('fromPath'))
|
||||
fil_obj.set_action(props.get('action', default='C'))
|
||||
fil_obj.set_target_path(props.get('targetPath', default=None))
|
||||
fil_obj.set_read_only(props.get('readOnly', default=None))
|
||||
fil_obj.set_archive(props.get('archive', default=None))
|
||||
fil_obj.set_hidden(props.get('hidden', default=None))
|
||||
fil_obj.set_suppress(props.get('suppress', default=None))
|
||||
files.append(fil_obj)
|
||||
|
||||
return files
|
||||
|
||||
def merge_files(storage, sid, file_objects, policy_name):
|
||||
for fileobj in file_objects:
|
||||
pass
|
||||
storage.add_file(sid, fileobj, policy_name)
|
||||
|
||||
class fileentry:
|
||||
def __init__(self):
|
||||
pass
|
||||
def __init__(self, fromPath):
|
||||
self.fromPath = fromPath
|
||||
|
||||
def set_action(self, action):
|
||||
self.action = action
|
||||
def set_target_path(self, targetPath):
|
||||
self.targetPath = targetPath
|
||||
def set_read_only(self, readOnly):
|
||||
self.readOnly = readOnly
|
||||
def set_archive(self, archive):
|
||||
self.archive = archive
|
||||
def set_hidden(self, hidden):
|
||||
self.hidden = hidden
|
||||
def set_suppress(self, suppress):
|
||||
self.suppress = suppress
|
||||
|
@@ -50,7 +50,7 @@ def folder_int2bool(val):
|
||||
if type(value) == str:
|
||||
value = int(value)
|
||||
|
||||
if value == 0:
|
||||
if value == 1:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
146
gpoa/gpt/gpt.py
146
gpoa/gpt/gpt.py
@@ -64,7 +64,14 @@ from .tasks import (
|
||||
read_tasks
|
||||
, merge_tasks
|
||||
)
|
||||
|
||||
from .scriptsini import (
|
||||
read_scripts
|
||||
, merge_scripts
|
||||
)
|
||||
from .networkshares import (
|
||||
read_networkshares
|
||||
, merge_networkshares
|
||||
)
|
||||
import util
|
||||
import util.preg
|
||||
from util.paths import (
|
||||
@@ -87,6 +94,8 @@ class FileType(Enum):
|
||||
INIFILES = 'inifiles.xml'
|
||||
SERVICES = 'services.xml'
|
||||
PRINTERS = 'printers.xml'
|
||||
SCRIPTS = 'scripts.ini'
|
||||
NETWORKSHARES = 'networkshares.xml'
|
||||
|
||||
def get_preftype(path_to_file):
|
||||
fpath = Path(path_to_file)
|
||||
@@ -112,6 +121,8 @@ def pref_parsers():
|
||||
parsers[FileType.INIFILES] = read_inifiles
|
||||
parsers[FileType.SERVICES] = read_services
|
||||
parsers[FileType.PRINTERS] = read_printers
|
||||
parsers[FileType.SCRIPTS] = read_scripts
|
||||
parsers[FileType.NETWORKSHARES] = read_networkshares
|
||||
|
||||
return parsers
|
||||
|
||||
@@ -132,6 +143,8 @@ def pref_mergers():
|
||||
mergers[FileType.INIFILES] = merge_inifiles
|
||||
mergers[FileType.SERVICES] = merge_services
|
||||
mergers[FileType.PRINTERS] = merge_printers
|
||||
mergers[FileType.SCRIPTS] = merge_scripts
|
||||
mergers[FileType.NETWORKSHARES] = merge_networkshares
|
||||
|
||||
return mergers
|
||||
|
||||
@@ -140,20 +153,19 @@ def get_merger(preference_type):
|
||||
return mergers[preference_type]
|
||||
|
||||
class gpt:
|
||||
__user_policy_mode_key = 'Software\\Policies\\Microsoft\\Windows\\System\\UserPolicyMode'
|
||||
|
||||
def __init__(self, gpt_path, sid):
|
||||
self.path = gpt_path
|
||||
self.sid = sid
|
||||
self.storage = registry_factory('registry')
|
||||
self.name = ''
|
||||
|
||||
self.guid = self.path.rpartition('/')[2]
|
||||
if 'default' == self.guid:
|
||||
self.guid = 'Local Policy'
|
||||
|
||||
self._machine_path = find_dir(self.path, 'Machine')
|
||||
self._user_path = find_dir(self.path, 'User')
|
||||
self._scripts_machine_path = find_dir(self._machine_path, 'Scripts')
|
||||
self._scripts_user_path = find_dir(self._user_path, 'Scripts')
|
||||
|
||||
self.settings_list = [
|
||||
'shortcuts'
|
||||
@@ -165,6 +177,8 @@ class gpt:
|
||||
, 'inifiles'
|
||||
, 'services'
|
||||
, 'scheduledtasks'
|
||||
, 'scripts'
|
||||
, 'networkshares'
|
||||
]
|
||||
self.settings = dict()
|
||||
self.settings['machine'] = dict()
|
||||
@@ -181,76 +195,67 @@ class gpt:
|
||||
log('D23', ulogdata)
|
||||
self.settings['user'][setting] = user_preffile
|
||||
|
||||
self.settings['machine']['scripts'] = find_file(self._scripts_machine_path, 'scripts.ini')
|
||||
self.settings['user']['scripts'] = find_file(self._scripts_user_path, 'scripts.ini')
|
||||
|
||||
|
||||
def set_name(self, name):
|
||||
'''
|
||||
Set human-readable GPT name.
|
||||
'''
|
||||
self.name = name
|
||||
|
||||
def get_policy_mode(self):
|
||||
def merge_machine(self):
|
||||
'''
|
||||
Get UserPolicyMode parameter value in order to determine if it
|
||||
is possible to work with user's part of GPT. This value is
|
||||
checked only if working for user's SID.
|
||||
Merge machine settings to storage.
|
||||
'''
|
||||
upm = self.storage.get_hklm_entry(self.__user_policy_mode_key)
|
||||
if not upm:
|
||||
upm = 0
|
||||
upm = int(upm)
|
||||
if 0 > upm or 2 > upm:
|
||||
upm = 0
|
||||
try:
|
||||
# Merge machine policies to registry if possible
|
||||
if self.settings['machine']['regpol']:
|
||||
mlogdata = dict({'polfile': self.settings['machine']['regpol']})
|
||||
log('D34', mlogdata)
|
||||
util.preg.merge_polfile(self.settings['machine']['regpol'], policy_name=self.name)
|
||||
# Merge machine preferences to registry if possible
|
||||
for preference_name, preference_path in self.settings['machine'].items():
|
||||
if preference_path:
|
||||
preference_type = get_preftype(preference_path)
|
||||
logdata = dict({'pref': preference_type.value, 'sid': self.sid})
|
||||
log('D28', logdata)
|
||||
preference_parser = get_parser(preference_type)
|
||||
preference_merger = get_merger(preference_type)
|
||||
preference_objects = preference_parser(preference_path)
|
||||
preference_merger(self.storage, self.sid, preference_objects, self.name)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['gpt'] = self.name
|
||||
logdata['msg'] = str(exc)
|
||||
log('E28', logdata)
|
||||
|
||||
return upm
|
||||
|
||||
def merge(self):
|
||||
def merge_user(self):
|
||||
'''
|
||||
Merge machine and user (if sid provided) settings to storage.
|
||||
Merge user settings to storage.
|
||||
'''
|
||||
if self.sid == self.storage.get_info('machine_sid'):
|
||||
try:
|
||||
# Merge machine settings to registry if possible
|
||||
for preference_name, preference_path in self.settings['machine'].items():
|
||||
if preference_path:
|
||||
preference_type = get_preftype(preference_path)
|
||||
logdata = dict({'pref': preference_type.value, 'sid': self.sid})
|
||||
log('D28', logdata)
|
||||
preference_parser = get_parser(preference_type)
|
||||
preference_merger = get_merger(preference_type)
|
||||
preference_objects = preference_parser(preference_path)
|
||||
preference_merger(self.storage, self.sid, preference_objects, self.name)
|
||||
if self.settings['user']['regpol']:
|
||||
mulogdata = dict({'polfile': self.settings['machine']['regpol']})
|
||||
log('D35', mulogdata)
|
||||
util.preg.merge_polfile(self.settings['user']['regpol'], sid=self.sid, policy_name=self.name)
|
||||
if self.settings['machine']['regpol']:
|
||||
mlogdata = dict({'polfile': self.settings['machine']['regpol']})
|
||||
log('D34', mlogdata)
|
||||
util.preg.merge_polfile(self.settings['machine']['regpol'], policy_name=self.name)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['gpt'] = self.name
|
||||
logdata['msg'] = str(exc)
|
||||
log('E28', logdata)
|
||||
else:
|
||||
# Merge user settings if UserPolicyMode set accordingly
|
||||
# and user settings (for HKCU) are exist.
|
||||
policy_mode = upm2str(self.get_policy_mode())
|
||||
if 'Merge' == policy_mode or 'Not configured' == policy_mode:
|
||||
try:
|
||||
for preference_name, preference_path in self.settings['user'].items():
|
||||
if preference_path:
|
||||
preference_type = get_preftype(preference_path)
|
||||
logdata = dict({'pref': preference_type.value, 'sid': self.sid})
|
||||
log('D29', logdata)
|
||||
preference_parser = get_parser(preference_type)
|
||||
preference_merger = get_merger(preference_type)
|
||||
preference_objects = preference_parser(preference_path)
|
||||
preference_merger(self.storage, self.sid, preference_objects, self.name)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['gpt'] = self.name
|
||||
logdata['msg'] = str(exc)
|
||||
log('E29', logdata)
|
||||
try:
|
||||
# Merge user policies to registry if possible
|
||||
if self.settings['user']['regpol']:
|
||||
mulogdata = dict({'polfile': self.settings['user']['regpol']})
|
||||
log('D35', mulogdata)
|
||||
util.preg.merge_polfile(self.settings['user']['regpol'], sid=self.sid, policy_name=self.name)
|
||||
# Merge user preferences to registry if possible
|
||||
for preference_name, preference_path in self.settings['user'].items():
|
||||
if preference_path:
|
||||
preference_type = get_preftype(preference_path)
|
||||
logdata = dict({'pref': preference_type.value, 'sid': self.sid})
|
||||
log('D29', logdata)
|
||||
preference_parser = get_parser(preference_type)
|
||||
preference_merger = get_merger(preference_type)
|
||||
preference_objects = preference_parser(preference_path)
|
||||
preference_merger(self.storage, self.sid, preference_objects, self.name)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['gpt'] = self.name
|
||||
logdata['msg'] = str(exc)
|
||||
log('E29', logdata)
|
||||
|
||||
def find_dir(search_path, name):
|
||||
'''
|
||||
@@ -350,18 +355,3 @@ def get_local_gpt(sid):
|
||||
local_policy.set_name('Local Policy')
|
||||
|
||||
return local_policy
|
||||
|
||||
def upm2str(upm_num):
|
||||
'''
|
||||
Translate UserPolicyMode to string.
|
||||
'''
|
||||
result = 'Not configured'
|
||||
|
||||
if upm_num in [1, '1']:
|
||||
result = 'Replace'
|
||||
|
||||
if upm_num in [2, '2']:
|
||||
result = 'Merge'
|
||||
|
||||
return result
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -21,18 +21,32 @@ from util.xml import get_xml_root
|
||||
def read_inifiles(inifiles_file):
|
||||
inifiles = list()
|
||||
|
||||
for inifile in get_xml_root(inifiles_file):
|
||||
ini_obj = inifile()
|
||||
for ini in get_xml_root(inifiles_file):
|
||||
prors = ini.find('Properties')
|
||||
ini_obj = inifile(prors.get('path'))
|
||||
ini_obj.set_section(prors.get('section', default=None))
|
||||
ini_obj.set_property(prors.get('property', default=None))
|
||||
ini_obj.set_value(prors.get('value', default=None))
|
||||
ini_obj.set_action(prors.get('action'))
|
||||
|
||||
inifiles.append(ini_obj)
|
||||
|
||||
return inifiles
|
||||
|
||||
def merge_inifiles(storage, sid, inifile_objects, policy_name):
|
||||
for inifile in inifile_objects:
|
||||
pass
|
||||
for iniobj in inifile_objects:
|
||||
storage.add_ini(sid, iniobj, policy_name)
|
||||
|
||||
def inifile():
|
||||
def __init__(self):
|
||||
pass
|
||||
class inifile:
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def set_section(self, section):
|
||||
self.section = section
|
||||
def set_property(self, property):
|
||||
self.property = property
|
||||
def set_value(self, value):
|
||||
self.value = value
|
||||
def set_action(self, action):
|
||||
self.action = action
|
||||
|
||||
|
56
gpoa/gpt/networkshares.py
Normal file
56
gpoa/gpt/networkshares.py
Normal file
@@ -0,0 +1,56 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from util.xml import get_xml_root
|
||||
|
||||
def read_networkshares(networksharesxml):
|
||||
networkshares = list()
|
||||
|
||||
for share in get_xml_root(networksharesxml):
|
||||
props = share.find('Properties')
|
||||
networkshare_obj = networkshare(props.get('name'))
|
||||
networkshare_obj.set_action(props.get('action', default='C'))
|
||||
networkshare_obj.set_path(props.get('path', default=None))
|
||||
networkshare_obj.set_all_regular(props.get('allRegular', default=None))
|
||||
networkshare_obj.set_comment(props.get('comment', default=None))
|
||||
networkshare_obj.set_limitUsers(props.get('limitUsers', default=None))
|
||||
networkshare_obj.set_abe(props.get('abe', default=None))
|
||||
networkshares.append(networkshare_obj)
|
||||
|
||||
return networkshares
|
||||
|
||||
def merge_networkshares(storage, sid, networkshares_objects, policy_name):
|
||||
for networkshareobj in networkshares_objects:
|
||||
storage.add_networkshare(sid, networkshareobj, policy_name)
|
||||
|
||||
class networkshare:
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def set_action(self, action):
|
||||
self.action = action
|
||||
def set_path(self, path):
|
||||
self.path = path
|
||||
def set_all_regular(self, allRegular):
|
||||
self.allRegular = allRegular
|
||||
def set_comment(self, comment):
|
||||
self.comment = comment
|
||||
def set_limitUsers(self, limitUsers):
|
||||
self.limitUsers = limitUsers
|
||||
def set_abe(self, abe):
|
||||
self.abe = abe
|
147
gpoa/gpt/scriptsini.py
Normal file
147
gpoa/gpt/scriptsini.py
Normal file
@@ -0,0 +1,147 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import configparser
|
||||
import os
|
||||
|
||||
|
||||
def read_scripts(scripts_file):
|
||||
scripts = Scripts_lists()
|
||||
|
||||
logon_scripts = dict()
|
||||
logoff_scripts = dict()
|
||||
startup_scripts = dict()
|
||||
shutdown_scripts = dict()
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.read(scripts_file, encoding = 'utf-16')
|
||||
scripts_file_dir = os.path.dirname(scripts_file)
|
||||
|
||||
actions = config.sections()
|
||||
|
||||
for act in actions:
|
||||
act_upper = act.upper()
|
||||
if act_upper == 'LOGON':
|
||||
section_scripts = logon_scripts
|
||||
elif act_upper == 'LOGOFF':
|
||||
section_scripts = logoff_scripts
|
||||
elif act_upper == 'STARTUP':
|
||||
section_scripts = startup_scripts
|
||||
elif act_upper == 'SHUTDOWN':
|
||||
section_scripts = shutdown_scripts
|
||||
else:
|
||||
continue
|
||||
|
||||
for key in config[act]:
|
||||
key_lower = key.lower()
|
||||
key_split = key_lower.split('cmdline')
|
||||
if len(key_split) > 1 and not key_split[1]:
|
||||
if key_split[0].isdigit():
|
||||
key_index = int(key_split[0])
|
||||
section_scripts[key_index] = Script(act, scripts_file_dir, config[act][key])
|
||||
key_split = key_lower.split('parameters')
|
||||
if len(key_split) > 1 and not key_split[1]:
|
||||
if key_split[0].isdigit():
|
||||
key_index = int(key_split[0])
|
||||
section_scripts[key_index].set_args(config[act][key])
|
||||
if logon_scripts:
|
||||
for i in sorted(logon_scripts.keys()):
|
||||
scripts.add_script(act_upper, logon_scripts[i])
|
||||
|
||||
if logoff_scripts:
|
||||
for i in sorted(logoff_scripts.keys()):
|
||||
scripts.add_script(act_upper, logoff_scripts[i])
|
||||
|
||||
if startup_scripts:
|
||||
for i in sorted(startup_scripts.keys()):
|
||||
scripts.add_script(act_upper, startup_scripts[i])
|
||||
|
||||
if shutdown_scripts:
|
||||
for i in sorted(shutdown_scripts.keys()):
|
||||
scripts.add_script(act_upper, shutdown_scripts[i])
|
||||
|
||||
|
||||
return scripts
|
||||
|
||||
def merge_scripts(storage, sid, scripts_objects, policy_name):
|
||||
for script in scripts_objects.get_logon_scripts():
|
||||
storage.add_script(sid, script, policy_name)
|
||||
for script in scripts_objects.get_logoff_scripts():
|
||||
storage.add_script(sid, script, policy_name)
|
||||
for script in scripts_objects.get_startup_scripts():
|
||||
storage.add_script(sid, script, policy_name)
|
||||
for script in scripts_objects.get_shutdown_scripts():
|
||||
storage.add_script(sid, script, policy_name)
|
||||
|
||||
class Scripts_lists:
|
||||
def __init__ (self):
|
||||
self.__logon_scripts = list()
|
||||
self.__logoff_scripts = list()
|
||||
self.__startup_scripts = list()
|
||||
self.__shutdown_scripts = list()
|
||||
|
||||
def get_logon_scripts(self):
|
||||
return self.__logon_scripts
|
||||
def get_logoff_scripts(self):
|
||||
return self.__logoff_scripts
|
||||
def get_startup_scripts(self):
|
||||
return self.__startup_scripts
|
||||
def get_shutdown_scripts(self):
|
||||
return self.__shutdown_scripts
|
||||
|
||||
def add_script(self, action, script):
|
||||
if action == 'LOGON':
|
||||
self.get_logon_scripts().append(script)
|
||||
elif action == 'LOGOFF':
|
||||
self.get_logoff_scripts().append(script)
|
||||
elif action == 'STARTUP':
|
||||
self.get_startup_scripts().append(script)
|
||||
elif action == 'SHUTDOWN':
|
||||
self.get_shutdown_scripts().append(script)
|
||||
|
||||
|
||||
class Script:
|
||||
__logon_counter = 0
|
||||
__logoff_counter = 0
|
||||
__startup_counter = 0
|
||||
__shutdown_counter = 0
|
||||
|
||||
def __init__(self, action, script_dir, script_filename):
|
||||
action_upper = action.upper()
|
||||
self.action = action_upper
|
||||
self.path = os.path.join(script_dir, action_upper, script_filename.upper())
|
||||
if not os.path.isfile(self.path):
|
||||
return None
|
||||
self.args = None
|
||||
|
||||
if action_upper == 'LOGON':
|
||||
self.number = Script.__logon_counter
|
||||
Script.__logon_counter += 1
|
||||
elif action_upper == 'LOGOFF':
|
||||
self.number = Script.__logoff_counter
|
||||
Script.__logoff_counter += 1
|
||||
elif action_upper == 'STARTUP':
|
||||
self.number = Script.__startup_counter
|
||||
Script.__startup_counter += 1
|
||||
elif action_upper == 'SHUTDOWN':
|
||||
self.number = Script.__shutdown_counter
|
||||
Script.__shutdown_counter += 1
|
||||
|
||||
def set_args(self, args):
|
||||
self.args = args
|
||||
|
@@ -50,7 +50,7 @@ class service:
|
||||
self.serviceaction = None
|
||||
|
||||
def set_clsid(self, clsid):
|
||||
self.guid = uid
|
||||
self.guid = clsid
|
||||
|
||||
def set_usercontext(self, usercontext=False):
|
||||
ctx = False
|
||||
|
@@ -47,33 +47,47 @@ from util.logging import log
|
||||
class file_runner:
|
||||
_gpoa_exe = '/usr/sbin/gpoa'
|
||||
|
||||
def __init__(self, username=None):
|
||||
def __init__(self, loglevel, username=None):
|
||||
self._user = username
|
||||
self._loglevel = loglevel
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
Call gpoa utility to generate scripts
|
||||
'''
|
||||
gpoa_cmd = [self._gpoa_exe]
|
||||
if self._loglevel != None:
|
||||
gpoa_cmd += ["--loglevel", str(self._loglevel)]
|
||||
if self._user:
|
||||
gpoa_cmd += [self._user]
|
||||
|
||||
output = subprocess.call(gpoa_cmd)
|
||||
sys.exit(output)
|
||||
subprocess.check_output(gpoa_cmd)
|
||||
|
||||
def parse_cli_arguments():
|
||||
'''
|
||||
Command line argument parser
|
||||
'''
|
||||
argparser = argparse.ArgumentParser(description='Update group policies for the specified user')
|
||||
argparser = argparse.ArgumentParser(description='Update group policies for computer and the specified user')
|
||||
argparser.add_argument('-u',
|
||||
'--user',
|
||||
default=None,
|
||||
help='Name of the user for GPO update')
|
||||
argparser.add_argument('--target',
|
||||
argparser.add_argument('-t',
|
||||
'--target',
|
||||
default=None,
|
||||
type=str,
|
||||
type=str.upper,
|
||||
choices=["ALL", "USER", "COMPUTER"],
|
||||
help='Specify if it is needed to update user\'s or computer\'s policies')
|
||||
argparser.add_argument('-l',
|
||||
'--loglevel',
|
||||
type=int,
|
||||
default=5,
|
||||
help='Set logging verbosity level')
|
||||
argparser.add_argument('-s',
|
||||
'--system',
|
||||
action='store_true',
|
||||
default=None,
|
||||
help='Run gpoa directly in system mode')
|
||||
|
||||
return argparser.parse_args()
|
||||
|
||||
@@ -83,13 +97,14 @@ def runner_factory(args, target):
|
||||
factors taken into account.
|
||||
'''
|
||||
username = None
|
||||
target = target.upper()
|
||||
if is_root():
|
||||
# Only root may specify any username to update.
|
||||
try:
|
||||
if args.user:
|
||||
username = pwd.getpwnam(args.user).pw_name
|
||||
else:
|
||||
target = 'Computer'
|
||||
target = 'COMPUTER'
|
||||
except:
|
||||
username = None
|
||||
logdata = dict({'username': args.user})
|
||||
@@ -99,30 +114,45 @@ def runner_factory(args, target):
|
||||
# itself (os.getusername()).
|
||||
username = pwd.getpwuid(os.getuid()).pw_name
|
||||
if args.user != username:
|
||||
logdata = dict({'username': args.user})
|
||||
logdata = dict({'username': username})
|
||||
log('W2', logdata)
|
||||
|
||||
if args.system:
|
||||
return try_directly(username, target, args.loglevel)
|
||||
else:
|
||||
return try_by_oddjob(username, target)
|
||||
|
||||
def try_by_oddjob(username, target):
|
||||
'''
|
||||
Run group policies applying by oddjob service
|
||||
'''
|
||||
if is_oddjobd_gpupdate_accessible():
|
||||
log('D13')
|
||||
computer_runner = None
|
||||
user_runner = None
|
||||
if target == 'All' or target == 'Computer':
|
||||
if target == 'ALL' or target == 'COMPUTER':
|
||||
computer_runner = dbus_runner()
|
||||
if username:
|
||||
if target == 'All' or target == 'User':
|
||||
if target == 'ALL' or target == 'USER':
|
||||
user_runner = dbus_runner(username)
|
||||
return (computer_runner, user_runner)
|
||||
else:
|
||||
log('W3')
|
||||
|
||||
return None
|
||||
|
||||
def try_directly(username, target, loglevel):
|
||||
'''
|
||||
Run group policies applying directly
|
||||
'''
|
||||
if is_root():
|
||||
log('D14')
|
||||
computer_runner = None
|
||||
user_runner = None
|
||||
if target == 'All' or target == 'Computer':
|
||||
computer_runner = file_runner()
|
||||
if target == 'All' or target == 'User':
|
||||
user_runner = file_runner(username)
|
||||
if target == 'ALL' or target == 'COMPUTER':
|
||||
computer_runner = file_runner(loglevel)
|
||||
if target == 'ALL' or target == 'USER':
|
||||
user_runner = file_runner(loglevel, username)
|
||||
return (computer_runner, user_runner)
|
||||
else:
|
||||
log('E1')
|
||||
@@ -134,7 +164,7 @@ def main():
|
||||
locale.bindtextdomain('gpoa', '/usr/lib/python3/site-packages/gpoa/locale')
|
||||
gettext.bindtextdomain('gpoa', '/usr/lib/python3/site-packages/gpoa/locale')
|
||||
gettext.textdomain('gpoa')
|
||||
set_loglevel(0)
|
||||
set_loglevel(args.loglevel)
|
||||
gpo_appliers = runner_factory(args, process_target(args.target))
|
||||
|
||||
if gpo_appliers:
|
||||
|
@@ -31,17 +31,14 @@ from util.util import (
|
||||
, get_policy_variants
|
||||
)
|
||||
from util.config import GPConfig
|
||||
from util.paths import get_custom_policy_dir
|
||||
|
||||
|
||||
class Runner:
|
||||
__control_path = '/usr/sbin/control'
|
||||
__systemctl_path = '/bin/systemctl'
|
||||
__etc_policy_dir = '/etc/local-policy'
|
||||
__usr_policy_dir = '/usr/share/local-policy'
|
||||
|
||||
def __init__(self):
|
||||
self.etc_policies = get_policy_entries(self.__etc_policy_dir)
|
||||
self.usr_policies = get_policy_entries(self.__usr_policy_dir)
|
||||
self.arguments = parse_arguments()
|
||||
|
||||
def parse_arguments():
|
||||
@@ -64,7 +61,8 @@ def parse_arguments():
|
||||
|
||||
parser_disable = subparsers.add_parser('disable',
|
||||
help='Disable Group Policy subsystem')
|
||||
|
||||
parser_update = subparsers.add_parser('update',
|
||||
help='Update state')
|
||||
parser_write = subparsers.add_parser('write',
|
||||
help='Operate on Group Policies (enable or disable)')
|
||||
parser_set_backend = subparsers.add_parser('set-backend',
|
||||
@@ -108,6 +106,16 @@ def parse_arguments():
|
||||
choices=['local', 'samba'],
|
||||
help='Backend (source of settings) name')
|
||||
|
||||
parser_update.add_argument('--local-policy',
|
||||
default=None,
|
||||
help='Name of local policy to enable')
|
||||
parser_update.add_argument('--backend',
|
||||
default='samba',
|
||||
type=str,
|
||||
choices=['local', 'samba'],
|
||||
help='Backend (source of settings) name')
|
||||
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def validate_policy_name(policy_name):
|
||||
@@ -176,7 +184,11 @@ def disable_gp():
|
||||
cmd_set_local_policy = ['/usr/sbin/control', 'system-policy', 'local']
|
||||
cmd_disable_gpupdate_service = ['/bin/systemctl', 'disable', 'gpupdate.service']
|
||||
cmd_disable_gpupdate_user_service = ['/bin/systemctl', '--global', 'disable', 'gpupdate-user.service']
|
||||
cmd_disable_gpupdate_timer = ['/bin/systemctl', 'disable', 'gpupdate.timer']
|
||||
cmd_disable_gpupdate_user_timer = ['/bin/systemctl', '--global', 'disable', 'gpupdate-user.timer']
|
||||
cmd_control_system_auth = ['/usr/sbin/control', 'system-auth']
|
||||
cmd_disable_gpupdate_scripts_service = ['/bin/systemctl', 'disable', 'gpupdate-scripts-run.service']
|
||||
cmd_disable_gpupdate_scripts_user_service = ['/bin/systemctl', '--global', 'disable', 'gpupdate-scripts-run-user.service']
|
||||
|
||||
config = GPConfig()
|
||||
|
||||
@@ -192,32 +204,39 @@ def disable_gp():
|
||||
runcmd(cmd_set_local_policy)
|
||||
runcmd(cmd_disable_gpupdate_service)
|
||||
runcmd(cmd_disable_gpupdate_user_service)
|
||||
runcmd(cmd_disable_gpupdate_timer)
|
||||
runcmd(cmd_disable_gpupdate_user_timer)
|
||||
runcmd(cmd_disable_gpupdate_scripts_service)
|
||||
runcmd(cmd_disable_gpupdate_scripts_user_service)
|
||||
config.set_local_policy_template()
|
||||
config.set_backend()
|
||||
|
||||
def enable_gp(policy_name, backend_type):
|
||||
'''
|
||||
Consistently enable group policy services
|
||||
'''
|
||||
policy_dir = '/usr/share/local-policy'
|
||||
etc_policy_dir = '/etc/local-policy'
|
||||
cmd_set_gpupdate_policy = ['/usr/sbin/control', 'system-policy', 'gpupdate']
|
||||
cmd_gpoa_nodomain = ['/usr/sbin/gpoa', '--nodomain', '--loglevel', '5']
|
||||
cmd_enable_gpupdate_service = ['/bin/systemctl', 'enable', 'gpupdate.service']
|
||||
cmd_enable_gpupdate_user_service = ['/bin/systemctl', '--global', 'enable', 'gpupdate-user.service']
|
||||
cmd_enable_gpupdate_timer = ['/bin/systemctl', 'enable', 'gpupdate.timer']
|
||||
cmd_enable_gpupdate_user_timer = ['/bin/systemctl', '--global', 'enable', 'gpupdate-user.timer']
|
||||
cmd_enable_gpupdate_scripts_service = ['/bin/systemctl', 'enable', 'gpupdate-scripts-run.service']
|
||||
cmd_enable_gpupdate_user_scripts_service = ['/bin/systemctl', '--global', 'enable', 'gpupdate-scripts-run-user.service']
|
||||
|
||||
config = GPConfig()
|
||||
|
||||
custom_policy_dir = get_custom_policy_dir()
|
||||
if not os.path.isdir(custom_policy_dir):
|
||||
os.makedirs(custom_policy_dir)
|
||||
|
||||
target_policy_name = get_default_policy_name()
|
||||
if policy_name:
|
||||
if validate_policy_name(policy_name):
|
||||
target_policy_name = policy_name
|
||||
|
||||
print (target_policy_name)
|
||||
default_policy_name = os.path.join(policy_dir, target_policy_name)
|
||||
|
||||
if not os.path.isdir(etc_policy_dir):
|
||||
os.makedirs(etc_policy_dir)
|
||||
|
||||
config.set_local_policy_template(default_policy_name)
|
||||
config.set_local_policy_template(target_policy_name)
|
||||
config.set_backend(backend_type)
|
||||
|
||||
# Enable oddjobd_gpupdate in PAM config
|
||||
@@ -239,6 +258,32 @@ def enable_gp(policy_name, backend_type):
|
||||
disable_gp()
|
||||
return
|
||||
|
||||
# Enable gpupdate-scripts-run.service
|
||||
if not rollback_on_error(cmd_enable_gpupdate_scripts_service):
|
||||
return
|
||||
if not is_unit_enabled('gpupdate-scripts-run.service'):
|
||||
disable_gp()
|
||||
return
|
||||
# Enable gpupdate-scripts-run-user.service for all users
|
||||
if not rollback_on_error(cmd_enable_gpupdate_user_scripts_service):
|
||||
return
|
||||
if not is_unit_enabled('gpupdate-scripts-run-user.service', unit_global=True):
|
||||
disable_gp()
|
||||
return
|
||||
|
||||
# Enable gpupdate.timer
|
||||
if not rollback_on_error(cmd_enable_gpupdate_timer):
|
||||
return
|
||||
if not is_unit_enabled('gpupdate.timer'):
|
||||
disable_gp()
|
||||
return
|
||||
# Enable gpupdate-setup.timer for all users
|
||||
if not rollback_on_error(cmd_enable_gpupdate_user_timer):
|
||||
return
|
||||
if not is_unit_enabled('gpupdate-user.timer', unit_global=True):
|
||||
disable_gp()
|
||||
return
|
||||
|
||||
def act_list():
|
||||
'''
|
||||
Show list of available templates of Local Policy
|
||||
@@ -310,6 +355,7 @@ def main():
|
||||
action['set-backend'] = act_set_backend
|
||||
action['write'] = act_write
|
||||
action['enable'] = act_enable
|
||||
action['update'] = act_enable
|
||||
action['disable'] = disable_gp
|
||||
action['active-policy'] = act_active_policy
|
||||
action['active-backend'] = act_active_backend
|
||||
@@ -317,6 +363,9 @@ def main():
|
||||
|
||||
if arguments.action == None:
|
||||
action['status']()
|
||||
elif arguments.action == 'update':
|
||||
if get_status():
|
||||
action[arguments.action](arguments.local_policy, arguments.backend)
|
||||
elif arguments.action == 'enable':
|
||||
action[arguments.action](arguments.local_policy, arguments.backend)
|
||||
elif arguments.action == 'write':
|
||||
|
@@ -41,6 +41,27 @@ msgstr "Получен объект групповой политики"
|
||||
msgid "Unknown info code"
|
||||
msgstr "Неизвестный код информационного сообщения"
|
||||
|
||||
msgid "Working with control"
|
||||
msgstr "Применение настроек control"
|
||||
|
||||
msgid "Working with systemd"
|
||||
msgstr "Работа с systemd"
|
||||
|
||||
msgid "Unable to work with systemd unit"
|
||||
msgstr "Невозможно создать оъект для unit systemd"
|
||||
|
||||
msgid "Starting systemd unit"
|
||||
msgstr "Запуск unit systemd"
|
||||
|
||||
msgid "Firefox policy"
|
||||
msgstr "Политика Firefox"
|
||||
|
||||
msgid "Chromium policy"
|
||||
msgstr "Политика Chromium"
|
||||
|
||||
msgid "Set user property to"
|
||||
msgstr "Установка свойств для пользователя"
|
||||
|
||||
# Error
|
||||
msgid "Insufficient permissions to run gpupdate"
|
||||
msgstr "Недостаточно прав для запуска gpupdate"
|
||||
@@ -132,6 +153,83 @@ msgstr "Ошибка слияния пользовательской части
|
||||
msgid "Unknown error code"
|
||||
msgstr "Неизвестный код ошибки"
|
||||
|
||||
msgid "Unable to work with control"
|
||||
msgstr "Не удалось применить настройки control"
|
||||
|
||||
msgid "Control applier for machine will not be started"
|
||||
msgstr "Приминение Control для машины не удалось"
|
||||
|
||||
msgid "Error getting control"
|
||||
msgstr "Ошибка установки control"
|
||||
|
||||
msgid "Is not in possible values for control"
|
||||
msgstr "Не входит в возможные значения для control"
|
||||
|
||||
msgid "Unable to set"
|
||||
msgstr "Невозможно установить"
|
||||
|
||||
msgid "Unable to generate file"
|
||||
msgstr "Невозможно создать файл"
|
||||
|
||||
msgid "Failed applying unit"
|
||||
msgstr "Не удалось применить настройки"
|
||||
|
||||
msgid "Unable to start systemd unit"
|
||||
msgstr "Невозможно запустить systemd unit"
|
||||
|
||||
msgid "Unable to cache specified URI"
|
||||
msgstr "Невозможно кэшировать указанный URI"
|
||||
|
||||
msgid "Unable to cache specified URI for machine"
|
||||
msgstr "Невозможно кэшировать указанный URI для компьютера"
|
||||
|
||||
msgid "Error recompiling global GSettings schemas"
|
||||
msgstr "Ошибка перекомпиляции глобальных GSettings schemas"
|
||||
|
||||
msgid "Error update configuration dconf"
|
||||
msgstr "Ошибка обновления конфигурации dconf"
|
||||
|
||||
msgid "Unable to cache specified URI for user"
|
||||
msgstr "Невозможно кэшировать указанный URI для пользователя"
|
||||
|
||||
msgid "Error during attempt to read Chromium preferences for user"
|
||||
msgstr "Ошибка при попытке прочитать настройки Chromium для пользователя"
|
||||
|
||||
msgid "Fail for applying shortcut to file with %"
|
||||
msgstr "Не удалось применить ярлык к файлу с %"
|
||||
|
||||
msgid "Fail for applying shortcut to not absolute path"
|
||||
msgstr "Не удалось применить ярлык к не абсолютному пути"
|
||||
|
||||
msgid "Error running pkcon_runner sync for machine"
|
||||
msgstr "Ошибка при запуске pkcon_runner синхронно для компьютера"
|
||||
|
||||
msgid "Package install error"
|
||||
msgstr "Ошибка установки пакета"
|
||||
|
||||
msgid "Package remove error"
|
||||
msgstr "Ошибка удаления пакета"
|
||||
|
||||
msgid "Error running pkcon_runner sync for user"
|
||||
msgstr "Ошибка при запуске pkcon_runner синхронно для пользователя"
|
||||
|
||||
msgid "Error running pkcon_runner async for machine"
|
||||
msgstr "Ошибка при запуске pkcon_runner асинхронно для компьютера"
|
||||
|
||||
msgid "Error running pkcon_runner async for user"
|
||||
msgstr "Ошибка при запуске pkcon_runner асинхронно для пользователя"
|
||||
|
||||
msgid "Error merging user GPT (from machine GPO)"
|
||||
msgstr "Ошибка слияния пользовательской групповой политики (машинная часть)"
|
||||
|
||||
msgid "Error cleaning directory for machine"
|
||||
msgstr "Ошибка очистки каталога для машины"
|
||||
|
||||
msgid "Error cleaning directory for user"
|
||||
msgstr "Ошибка очистки каталога для пользователя"
|
||||
|
||||
# Error_end
|
||||
|
||||
# Debug
|
||||
msgid "The GPOA process was started for user"
|
||||
msgstr "Произведён запуск GPOA для обновления политик пользователя"
|
||||
@@ -295,6 +393,377 @@ msgstr "Сохранение информации о переменных окр
|
||||
msgid "Unknown debug code"
|
||||
msgstr "Неизвестный отладочный код"
|
||||
|
||||
msgid "Running Control applier for machine"
|
||||
msgstr "Начато применение Control для машины"
|
||||
|
||||
msgid "Setting control"
|
||||
msgstr "Установка control"
|
||||
|
||||
msgid "Deny_All setting found"
|
||||
msgstr "Deny_All настройка найдена"
|
||||
|
||||
msgid "Deny_All setting for user"
|
||||
msgstr "Deny_All настройка для пользователя"
|
||||
|
||||
msgid "Deny_All setting not found"
|
||||
msgstr "Deny_All настройка не найдена"
|
||||
|
||||
msgid "Deny_All setting not found for user"
|
||||
msgstr "Deny_All настройка не найдена для пользователя"
|
||||
|
||||
msgid "Running Polkit applier for machine"
|
||||
msgstr "Начато применение настроек Polkit для машины"
|
||||
|
||||
msgid "Running Polkit applier for user in administrator context"
|
||||
msgstr "Начато применение настроек Polkit пользователя в контексте администратора"
|
||||
|
||||
msgid "Polkit applier for machine will not be started"
|
||||
msgstr "Polkit для машины не запускается"
|
||||
|
||||
msgid "Polkit applier for user in administrator context will not be started"
|
||||
msgstr "Polkit для пользователя в контексте администратора не запускается"
|
||||
|
||||
msgid "Generated file"
|
||||
msgstr "Созданный файл"
|
||||
|
||||
msgid "Running systemd applier for machine"
|
||||
msgstr "Начато применение настроек systemd для машины"
|
||||
|
||||
msgid "Running systemd applier for machine will not be started"
|
||||
msgstr "Применение настроек systemd для машины не удалось"
|
||||
|
||||
msgid "Running GSettings applier for machine"
|
||||
msgstr "Запуск применение настроек GSettings для машины"
|
||||
|
||||
msgid "GSettings applier for machine will not be started"
|
||||
msgstr "Применение настроек GSettings для машины не удалось"
|
||||
|
||||
msgid "Removing GSettings policy file from previous run"
|
||||
msgstr "Удаление файла политики GSettings от предыдущего запуска"
|
||||
|
||||
msgid "Mapping Windows policies to GSettings policies"
|
||||
msgstr "Сопоставление политик Windows с политиками GSettings"
|
||||
|
||||
msgid "GSettings windows policies mapping not enabled"
|
||||
msgstr "Сопоставление политик Windows GSettings не включено"
|
||||
|
||||
msgid "Applying user setting"
|
||||
msgstr "Применение пользовательских настроек"
|
||||
|
||||
msgid "Found GSettings windows mapping"
|
||||
msgstr "Найдены соответствия настроек windows-GSettings"
|
||||
|
||||
msgid "Running GSettings applier for user in user context"
|
||||
msgstr "Запуск применение настроек GSettings в контексте пользователя"
|
||||
|
||||
msgid "GSettings applier for user in user context will not be started"
|
||||
msgstr "GSettings в контексте пользователя не запускается"
|
||||
|
||||
msgid "Applying machine setting"
|
||||
msgstr "Применение настроек машины"
|
||||
|
||||
msgid "Path not resolved as UNC URI"
|
||||
msgstr "Путь не разрешен"
|
||||
|
||||
msgid "Getting cached file for URI"
|
||||
msgstr "Получение кешированного файла для URI"
|
||||
|
||||
msgid "Wrote Firefox preferences to"
|
||||
msgstr "Настройки Firefox записаны в"
|
||||
|
||||
msgid "Found Firefox profile in"
|
||||
msgstr "Найден профиль Firefox в"
|
||||
|
||||
msgid "Running Firefox applier for machine"
|
||||
msgstr "Запуск применение настроек Firefox для машины"
|
||||
|
||||
msgid "Firefox applier for machine will not be started"
|
||||
msgstr "Применение настроек Firefox для компьютера не запускается"
|
||||
|
||||
msgid "Running Chromium applier for machine"
|
||||
msgstr "Запуск применение настроек Chromium для машины"
|
||||
|
||||
msgid "Chromium applier for machine will not be started"
|
||||
msgstr "Применение настроек Chromium для компьютера не запускается"
|
||||
|
||||
msgid "Wrote Chromium preferences to"
|
||||
msgstr "Настройки Chromium записаны в"
|
||||
|
||||
msgid "Running Shortcut applier for machine"
|
||||
msgstr "Запуск применение ярлыков для машины"
|
||||
|
||||
msgid "Shortcut applier for machine will not be started"
|
||||
msgstr "Применение ярлыков для компьютера не запускается"
|
||||
|
||||
msgid "No shortcuts to process for"
|
||||
msgstr "Нет ярлыков для обработки"
|
||||
|
||||
msgid "Running Shortcut applier for user in user context"
|
||||
msgstr "Запуск применение ярлыков в контексте пользователя"
|
||||
|
||||
msgid "Shortcut applier for user in user context will not be started"
|
||||
msgstr "Применение ярлыков в контексте пользователя не запускается"
|
||||
|
||||
msgid "Running Shortcut applier for user in administrator context"
|
||||
msgstr "Запуск применение ярлыков в контексте администратора"
|
||||
|
||||
msgid "Shortcut applier for user in administrator context will not be started"
|
||||
msgstr "Применение ярлыков в контексте администратора не запускается"
|
||||
|
||||
msgid "Try to expand path for shortcut"
|
||||
msgstr "Попытка расширить путь для ярлыка"
|
||||
|
||||
msgid "Applying shortcut file to"
|
||||
msgstr "Применение ярлыка к файлу"
|
||||
|
||||
msgid "Running Folder applier for machine"
|
||||
msgstr "Запуск применение папок для машины"
|
||||
|
||||
msgid "Folder applier for machine will not be started"
|
||||
msgstr "Применение папок для машины не запускается"
|
||||
|
||||
msgid "Folder creation skipped for machine"
|
||||
msgstr "Создание папки для машины пропущено"
|
||||
|
||||
msgid "Folder creation skipped for user"
|
||||
msgstr "Создание папки для пользователя пропущено"
|
||||
|
||||
msgid "Running Folder applier for user in user context"
|
||||
msgstr "Запуск применение папок для пользователя в контексте пользователя"
|
||||
|
||||
msgid "Folder applier for user in user context will not be started"
|
||||
msgstr "Применение папок для пользователя в контексте пользователя не запускается"
|
||||
|
||||
msgid "Running CUPS applier for machine"
|
||||
msgstr "Запуск применение настроек CUPS для машины"
|
||||
|
||||
msgid "CUPS applier for machine will not be started"
|
||||
msgstr "Применение настроек CUPS для машины не запускается"
|
||||
|
||||
msgid "Running CUPS applier for user in administrator context"
|
||||
msgstr "Запуск применение настроек CUPS для пользователя в контексте администратора"
|
||||
|
||||
msgid "CUPS applier for user in administrator context will not be started"
|
||||
msgstr "Применение настроек CUPS для пользователя в контексте администратора не запускается"
|
||||
|
||||
msgid "Running Firewall applier for machine"
|
||||
msgstr "Запуск применение настроек Firewall для машины"
|
||||
|
||||
msgid "Firewall is enabled"
|
||||
msgstr "Firewall включен"
|
||||
|
||||
msgid "Firewall is disabled, settings will be reset"
|
||||
msgstr "Firewall отключен, настройки будут сброшены"
|
||||
|
||||
msgid "Firewall applier will not be started"
|
||||
msgstr "Применение настроек Firewall не запускается"
|
||||
|
||||
msgid "Running NTP applier for machine"
|
||||
msgstr "Запуск применение настроек NTP для машины"
|
||||
|
||||
msgid "NTP server is configured to"
|
||||
msgstr "Сервер NTP настроен на"
|
||||
|
||||
msgid "Starting Chrony daemon"
|
||||
msgstr "Запуск демона Chrony"
|
||||
|
||||
msgid "Setting reference NTP server to"
|
||||
msgstr "Установка эталонного сервера NTP на"
|
||||
|
||||
msgid "Stopping Chrony daemon"
|
||||
msgstr "Остановка демона Chrony"
|
||||
|
||||
msgid "Configuring NTP server..."
|
||||
msgstr "Настройка NTP-сервера ..."
|
||||
|
||||
msgid "NTP server is enabled"
|
||||
msgstr "Сервер NTP включен"
|
||||
|
||||
msgid "NTP server is disabled"
|
||||
msgstr "NTP сервер отключен"
|
||||
|
||||
msgid "NTP server is not configured"
|
||||
msgstr "NTP сервер не настроен"
|
||||
|
||||
msgid "NTP client is enabled"
|
||||
msgstr "Клиент NTP включен"
|
||||
|
||||
msgid "NTP client is disabled"
|
||||
msgstr "Клиент NTP отключен"
|
||||
|
||||
msgid "NTP client is not configured"
|
||||
msgstr "NTP клиент не настроен"
|
||||
|
||||
msgid "NTP applier for machine will not be started"
|
||||
msgstr "Применение настроек NTP для машины не запускается"
|
||||
|
||||
msgid "Running Envvar applier for machine"
|
||||
msgstr "Запуск применение настроек Envvar для машины"
|
||||
|
||||
msgid "Envvar applier for machine will not be started"
|
||||
msgstr "Применение настроек Envvar для машины не запускается"
|
||||
|
||||
msgid "Running Envvar applier for user in user context"
|
||||
msgstr "Запуск применение настроек Envvar для пользователя в контексте пользователя"
|
||||
|
||||
msgid "Envvar applier for user in user context will not be started"
|
||||
msgstr "Применение настроек Envvar для пользователя в контексте пользователя не запускается"
|
||||
|
||||
msgid "Running Package applier for machine"
|
||||
msgstr "Запуск установки пакетов для машины"
|
||||
|
||||
msgid "Package applier for machine will not be started"
|
||||
msgstr "Применение установки пакетов для машины не запускается"
|
||||
|
||||
msgid "Running Package applier for user in administrator context"
|
||||
msgstr "Запуск установки пакетов для пользователя в контексте администратора"
|
||||
|
||||
msgid "Package applier for user in administrator context will not be started"
|
||||
msgstr "Применение установки пакетов для пользователя в контексте администратора не запускается"
|
||||
|
||||
msgid "Running pkcon_runner to install and remove packages"
|
||||
msgstr "Запуск pkcon_runner для установки и удаления пакетов"
|
||||
|
||||
msgid "Run apt-get update"
|
||||
msgstr "Запускаем apt-get update"
|
||||
|
||||
msgid "Error run apt-get update"
|
||||
msgstr "Ошибка запуска apt-get update"
|
||||
|
||||
msgid "Run user context applier with dropped privileges"
|
||||
msgstr "Запуск из контекста пользователя с удаленными привилегиями"
|
||||
|
||||
msgid "Run forked process with droped privileges"
|
||||
msgstr "Запустить разветвленный процесс с удаленными привилегиями"
|
||||
|
||||
msgid "Found connection by org.freedesktop.DBus.GetConnectionUnixProcessID"
|
||||
msgstr "Найдено соединение org.freedesktop.DBus.GetConnectionUnixProcessID"
|
||||
|
||||
msgid "Kill dbus-daemon and dconf-service in user context"
|
||||
msgstr "Остановка dbus-daemon и dconf-service в контексте пользователя"
|
||||
|
||||
msgid "Running CIFS applier for user in administrator context"
|
||||
msgstr "Запуск применение настроек CIFS для пользователя в контексте администратора"
|
||||
|
||||
msgid "CIFS applier for user in administrator context will not be started"
|
||||
msgstr "Применение настроек CIFS для пользователя в контексте администратора не запускается"
|
||||
|
||||
msgid "Installing the package"
|
||||
msgstr "Установка пакета"
|
||||
|
||||
msgid "Removing a package"
|
||||
msgstr "Удаление пакета"
|
||||
|
||||
msgid "Failed to found gsettings for machine"
|
||||
msgstr "Не удалось найти настройки gsettings для машины"
|
||||
|
||||
msgid "Failed to found user gsettings"
|
||||
msgstr "Не удалось найти настройки gsettings пользователя"
|
||||
|
||||
msgid "Configure user Group Policy loopback processing mode"
|
||||
msgstr "Настройка режима обработки замыкания пользовательской групповой политики"
|
||||
|
||||
msgid "Saving information about script"
|
||||
msgstr "Сохранение информации о скрипте"
|
||||
|
||||
msgid "No machine scripts directory to clean up"
|
||||
msgstr "Нет каталога машинных скриптов для очистки"
|
||||
|
||||
msgid "No user scripts directory to clean up"
|
||||
msgstr "Нет каталога пользовательских скриптов для очистки"
|
||||
|
||||
msgid "Prepare Scripts applier for machine"
|
||||
msgstr "Подготовка к применению машинных скриптов"
|
||||
|
||||
msgid "Scripts applier for machine will not be started"
|
||||
msgstr "Применение машинных скриптов не запускается"
|
||||
|
||||
msgid "Prepare Scripts applier for user in user context"
|
||||
msgstr "Подготовка к применению скриптов пользователя в его контексте"
|
||||
|
||||
msgid "Scripts applier for user in user context will not be started"
|
||||
msgstr "Применение скриптов пользователя в его контексте не запускается"
|
||||
|
||||
msgid "Clean machine scripts directory"
|
||||
msgstr "Очистка каталога машинных скриптов"
|
||||
|
||||
msgid "Clean user scripts directory"
|
||||
msgstr "Очистка каталога пользовательских скриптов"
|
||||
|
||||
msgid "Saving information about file"
|
||||
msgstr "Сохранение информации о файле"
|
||||
|
||||
msgid "Failed to return file path"
|
||||
msgstr "Не удалось вернуть путь к файлу"
|
||||
|
||||
msgid "Failed to create file"
|
||||
msgstr "Не удалось создать файл"
|
||||
|
||||
msgid "Failed to delete file"
|
||||
msgstr "Не удалось удалить файл"
|
||||
|
||||
msgid "Failed to update file"
|
||||
msgstr "Не удалось обновить файл"
|
||||
|
||||
msgid "Running File copy applier for machine"
|
||||
msgstr "Запуск применение настроек копирования файлов для машины"
|
||||
|
||||
msgid "Running File copy applier for machine will not be started"
|
||||
msgstr "Применение настроек копирования файлов для машины не будет запущено"
|
||||
|
||||
msgid "Running File copy applier for user in administrator context"
|
||||
msgstr "Запуск применение настроек копирования файлов для пользователя в контексте администратора"
|
||||
|
||||
msgid "Running File copy applier for user in administrator context will not be started"
|
||||
msgstr "Применение настроек копирования файлов для пользователя в контексте администратора не будет запущено"
|
||||
|
||||
msgid "Running ini applier for machine"
|
||||
msgstr "Запуск применение настроек ini файлов для машины"
|
||||
|
||||
msgid "Running ini applier for machine will not be started"
|
||||
msgstr "Применение настроек ini файлов для машины не будет запущено"
|
||||
|
||||
msgid "Running ini applier for user in administrator context"
|
||||
msgstr "Запуск применение настроек ini файлов для пользователя в контексте администратора"
|
||||
|
||||
msgid "Running ini applier for user in administrator context will not be started"
|
||||
msgstr "Применение настроек ini файлов для пользователя в контексте администратора не будет запущено"
|
||||
|
||||
msgid "Ini-file path not recognized"
|
||||
msgstr "Путь к ini-файлу не распознан"
|
||||
|
||||
msgid "Ini-file is not readable"
|
||||
msgstr "Ini-файл не читается"
|
||||
|
||||
msgid "Saving information about ini-file"
|
||||
msgstr "Сохранение информации об ini-файле"
|
||||
|
||||
msgid "Dictionary key generation failed"
|
||||
msgstr "Формирования ключа словаря не удалось"
|
||||
|
||||
msgid "Saving information about network shares"
|
||||
msgstr "Сохранение информации о сетевых ресурсах"
|
||||
|
||||
msgid "Running networkshare applier for machine"
|
||||
msgstr "Запуск применение настроек сетевых каталогов для машины"
|
||||
|
||||
msgid "Running networkshare applier for machine will not be starte"
|
||||
msgstr "Применение настроек сетевых каталогов для машины не будет запущено"
|
||||
|
||||
msgid "Apply network share data action failed"
|
||||
msgstr "Не удалось применить действие с данными общего сетевого ресурса"
|
||||
|
||||
msgid "Running yandex_browser_applier for machine"
|
||||
msgstr "Запуск yandex_browser_applier для машины"
|
||||
|
||||
msgid "Yandex_browser_applier for machine will not be started"
|
||||
msgstr "Yandex_browser_applier для машины не запустится"
|
||||
|
||||
msgid "Wrote YandexBrowser preferences to"
|
||||
msgstr "Запись настройки Яндекс Браузера в"
|
||||
|
||||
# Debug_end
|
||||
|
||||
# Warning
|
||||
msgid "Unable to perform gpupdate for non-existent user, will update machine settings"
|
||||
msgstr "Невозможно запустить gpupdate для несуществующего пользователя, будут обновлены настройки машины"
|
||||
@@ -315,6 +784,30 @@ msgstr "Пакет ADP не установлен, плагин не будет
|
||||
msgid "Unknown warning code"
|
||||
msgstr "Неизвестный код предупреждения"
|
||||
|
||||
msgid "Unable to resolve GSettings parameter"
|
||||
msgstr "Не удалось установить параметр GSettings"
|
||||
|
||||
msgid "No home directory exists for user"
|
||||
msgstr "Для пользователя не существует домашнего каталога"
|
||||
|
||||
msgid "User's shortcut not placed to home directory"
|
||||
msgstr "Ярлык пользователя не помещен в домашний каталог"
|
||||
|
||||
msgid "CUPS is not installed: no printer settings will be deployed"
|
||||
msgstr "CUPS не установлен: настройки принтера не будут развернуты"
|
||||
|
||||
msgid "Unsupported NTP server type"
|
||||
msgstr "Неподдерживаемый тип сервера NTP"
|
||||
|
||||
msgid "Failed to read the list of files"
|
||||
msgstr "Не удалось прочитать список файлов"
|
||||
|
||||
msgid "Failed to caching the file"
|
||||
msgstr "Не удалось кэшировать файл"
|
||||
|
||||
msgid "Could not create a valid list of keys"
|
||||
msgstr "Не удалось создать допустимый список ключей"
|
||||
|
||||
# Fatal
|
||||
msgid "Unable to refresh GPO list"
|
||||
msgstr "Невозможно обновить список объектов групповых политик"
|
||||
|
@@ -23,6 +23,13 @@ def info_code(code):
|
||||
info_ids = dict()
|
||||
info_ids[1] = 'Got GPO list for username'
|
||||
info_ids[2] = 'Got GPO'
|
||||
info_ids[3] = 'Working with control'
|
||||
info_ids[4] = 'Working with systemd'
|
||||
info_ids[5] = 'Unable to work with systemd unit'
|
||||
info_ids[6] = 'Starting systemd unit'
|
||||
info_ids[7] = 'Firefox policy'
|
||||
info_ids[8] = 'Chromium policy'
|
||||
info_ids[9] = 'Set user property to'
|
||||
|
||||
return info_ids.get(code, 'Unknown info code')
|
||||
|
||||
@@ -66,6 +73,33 @@ def error_code(code):
|
||||
error_ids[36] = 'Error getting cached file for URI'
|
||||
error_ids[37] = 'Error caching file URIs'
|
||||
error_ids[38] = 'Unable to cache specified URI'
|
||||
error_ids[39] = 'Unable to work with control'
|
||||
error_ids[40] = 'Control applier for machine will not be started'
|
||||
error_ids[41] = 'Error getting control'
|
||||
error_ids[42] = 'Is not in possible values for control'
|
||||
error_ids[43] = 'Unable to set'
|
||||
error_ids[44] = 'Unable to generate file'
|
||||
error_ids[45] = 'Failed applying unit'
|
||||
error_ids[46] = 'Unable to start systemd unit'
|
||||
error_ids[47] = 'Unable to cache specified URI for machine'
|
||||
error_ids[48] = 'Error recompiling global GSettings schemas'
|
||||
error_ids[49] = 'Error update configuration dconf'
|
||||
error_ids[50] = 'Unable to cache specified URI for user'
|
||||
error_ids[52] = 'Error during attempt to read Chromium preferences for user'
|
||||
error_ids[53] = 'Fail for applying shortcut to file with \'%\''
|
||||
error_ids[54] = 'Fail for applying shortcut to not absolute path'
|
||||
error_ids[55] = 'Error running pkcon_runner sync for machine'
|
||||
error_ids[56] = 'Error run apt-get update'
|
||||
error_ids[57] = 'Package install error'
|
||||
error_ids[58] = 'Package remove error'
|
||||
error_ids[59] = 'Is not in possible values for control'
|
||||
error_ids[60] = 'Error running pkcon_runner sync for user'
|
||||
error_ids[61] = 'Error running pkcon_runner async for machine'
|
||||
error_ids[62] = 'Error running pkcon_runner async for user'
|
||||
error_ids[63] = 'Error merging user GPT (from machine GPO)'
|
||||
error_ids[64] = 'Error to cleanup directory for machine'
|
||||
error_ids[65] = 'Error to cleanup directory for user'
|
||||
|
||||
|
||||
return error_ids.get(code, 'Unknown error code')
|
||||
|
||||
@@ -133,6 +167,129 @@ def debug_code(code):
|
||||
debug_ids[60] = 'Running GPOA by root for user'
|
||||
debug_ids[61] = 'The GPOA process was started for computer'
|
||||
debug_ids[62] = 'Path not resolved as UNC URI'
|
||||
debug_ids[63] = 'Delete HKLM branch key'
|
||||
debug_ids[64] = 'Delete HKCU branch key'
|
||||
debug_ids[65] = 'Delete HKLM branch key error'
|
||||
debug_ids[66] = 'Delete HKCU branch key error'
|
||||
debug_ids[67] = 'Running Control applier for machine'
|
||||
debug_ids[68] = 'Setting control'
|
||||
debug_ids[69] = 'Deny_All setting found'
|
||||
debug_ids[70] = 'Deny_All setting for user'
|
||||
debug_ids[71] = 'Deny_All setting not found'
|
||||
debug_ids[72] = 'Deny_All setting not found for user'
|
||||
debug_ids[73] = 'Running Polkit applier for machine'
|
||||
debug_ids[74] = 'Running Polkit applier for user in administrator context'
|
||||
debug_ids[75] = 'Polkit applier for machine will not be started'
|
||||
debug_ids[76] = 'Polkit applier for user in administrator context will not be started'
|
||||
debug_ids[77] = 'Generated file'
|
||||
debug_ids[78] = 'Running systemd applier for machine'
|
||||
debug_ids[79] = 'Running systemd applier for machine will not be started'
|
||||
debug_ids[80] = 'Running GSettings applier for machine'
|
||||
debug_ids[81] = 'GSettings applier for machine will not be started'
|
||||
debug_ids[82] = 'Removing GSettings policy file from previous run'
|
||||
debug_ids[83] = 'Mapping Windows policies to GSettings policies'
|
||||
debug_ids[84] = 'GSettings windows policies mapping not enabled'
|
||||
debug_ids[85] = 'Applying user setting'
|
||||
debug_ids[86] = 'Found GSettings windows mapping'
|
||||
debug_ids[87] = 'Running GSettings applier for user in user context'
|
||||
debug_ids[88] = 'GSettings applier for user in user context will not be started'
|
||||
debug_ids[89] = 'Applying machine setting'
|
||||
debug_ids[90] = 'Getting cached file for URI'
|
||||
debug_ids[91] = 'Wrote Firefox preferences to'
|
||||
debug_ids[92] = 'Found Firefox profile in'
|
||||
debug_ids[93] = 'Running Firefox applier for machine'
|
||||
debug_ids[94] = 'Firefox applier for machine will not be started'
|
||||
debug_ids[95] = 'Running Chromium applier for machine'
|
||||
debug_ids[96] = 'Chromium applier for machine will not be started'
|
||||
debug_ids[97] = 'Wrote Chromium preferences to'
|
||||
debug_ids[98] = 'Running Shortcut applier for machine'
|
||||
debug_ids[99] = 'Shortcut applier for machine will not be started'
|
||||
debug_ids[100] = 'No shortcuts to process for'
|
||||
debug_ids[101] = 'Running Shortcut applier for user in user context'
|
||||
debug_ids[102] = 'Shortcut applier for user in user context will not be started'
|
||||
debug_ids[103] = 'Running Shortcut applier for user in administrator context'
|
||||
debug_ids[104] = 'Shortcut applier for user in administrator context will not be started'
|
||||
debug_ids[105] = 'Try to expand path for shortcut'
|
||||
debug_ids[106] = 'Applying shortcut file to'
|
||||
debug_ids[107] = 'Running Folder applier for machine'
|
||||
debug_ids[108] = 'Folder applier for machine will not be started'
|
||||
debug_ids[109] = 'Folder creation skipped for machine'
|
||||
debug_ids[110] = 'Folder creation skipped for user'
|
||||
debug_ids[111] = 'Running Folder applier for user in user context'
|
||||
debug_ids[112] = 'Folder applier for user in user context will not be started'
|
||||
debug_ids[113] = 'Running CUPS applier for machine'
|
||||
debug_ids[114] = 'CUPS applier for machine will not be started'
|
||||
debug_ids[115] = 'Running CUPS applier for user in administrator context'
|
||||
debug_ids[116] = 'CUPS applier for user in administrator context will not be started'
|
||||
debug_ids[117] = 'Running Firewall applier for machine'
|
||||
debug_ids[118] = 'Firewall is enabled'
|
||||
debug_ids[119] = 'Firewall is disabled, settings will be reset'
|
||||
debug_ids[120] = 'Firewall applier will not be started'
|
||||
debug_ids[121] = 'Running NTP applier for machine'
|
||||
debug_ids[122] = 'NTP server is configured to'
|
||||
debug_ids[123] = 'Starting Chrony daemon'
|
||||
debug_ids[124] = 'Setting reference NTP server to'
|
||||
debug_ids[125] = 'Stopping Chrony daemon'
|
||||
debug_ids[126] = 'Configuring NTP server...'
|
||||
debug_ids[127] = 'NTP server is enabled'
|
||||
debug_ids[128] = 'NTP server is disabled'
|
||||
debug_ids[129] = 'NTP server is not configured'
|
||||
debug_ids[130] = 'NTP client is enabled'
|
||||
debug_ids[131] = 'NTP client is disabled'
|
||||
debug_ids[132] = 'NTP client is not configured'
|
||||
debug_ids[133] = 'NTP applier for machine will not be started'
|
||||
debug_ids[134] = 'Running Envvar applier for machine'
|
||||
debug_ids[135] = 'Envvar applier for machine will not be started'
|
||||
debug_ids[136] = 'Running Envvar applier for user in user context'
|
||||
debug_ids[137] = 'Envvar applier for user in user context will not be started'
|
||||
debug_ids[138] = 'Running Package applier for machine'
|
||||
debug_ids[139] = 'Package applier for machine will not be started'
|
||||
debug_ids[140] = 'Running Package applier for user in administrator context'
|
||||
debug_ids[141] = 'Package applier for user in administrator context will not be started'
|
||||
debug_ids[142] = 'Running pkcon_runner to install and remove packages'
|
||||
debug_ids[143] = 'Run apt-get update'
|
||||
debug_ids[144] = 'Unable to cache specified URI'
|
||||
debug_ids[145] = 'Unable to cache specified URI for machine'
|
||||
debug_ids[146] = 'Running CIFS applier for user in administrator context'
|
||||
debug_ids[147] = 'CIFS applier for user in administrator context will not be started'
|
||||
debug_ids[148] = 'Installing the package'
|
||||
debug_ids[149] = 'Removing a package'
|
||||
debug_ids[150] = 'Failed to found gsettings for machine'
|
||||
debug_ids[151] = 'Failed to found user gsettings'
|
||||
debug_ids[152] = 'Configure user Group Policy loopback processing mode'
|
||||
debug_ids[153] = 'Saving information about script'
|
||||
debug_ids[154] = 'No machine scripts directory to clean up'
|
||||
debug_ids[155] = 'No user scripts directory to clean up'
|
||||
debug_ids[156] = 'Prepare Scripts applier for machine'
|
||||
debug_ids[157] = 'Scripts applier for machine will not be started'
|
||||
debug_ids[158] = 'Prepare Scripts applier for user in user context'
|
||||
debug_ids[159] = 'Scripts applier for user in user context will not be started'
|
||||
debug_ids[160] = 'Clean machine scripts directory'
|
||||
debug_ids[161] = 'Clean user scripts directory'
|
||||
debug_ids[162] = 'Saving information about file'
|
||||
debug_ids[163] = 'Failed to return file path'
|
||||
debug_ids[164] = 'Failed to create file'
|
||||
debug_ids[165] = 'Failed to delete file'
|
||||
debug_ids[166] = 'Failed to update file'
|
||||
debug_ids[167] = 'Running File copy applier for machine'
|
||||
debug_ids[168] = 'Running File copy applier for machine will not be started'
|
||||
debug_ids[169] = 'Running File copy applier for user in administrator context'
|
||||
debug_ids[170] = 'Running File copy applier for user in administrator context will not be started'
|
||||
debug_ids[171] = 'Running ini applier for machine'
|
||||
debug_ids[172] = 'Running ini applier for machine will not be started'
|
||||
debug_ids[173] = 'Running ini applier for user in administrator context'
|
||||
debug_ids[174] = 'Running ini applier for user in administrator context will not be started'
|
||||
debug_ids[175] = 'Ini-file path not recognized'
|
||||
debug_ids[176] = 'Ini-file is not readable'
|
||||
debug_ids[177] = 'Saving information about ini-file'
|
||||
debug_ids[178] = 'Dictionary key generation failed'
|
||||
debug_ids[179] = 'Saving information about network shares'
|
||||
debug_ids[180] = 'Running networkshare applier for machine'
|
||||
debug_ids[181] = 'Running networkshare applier for machine will not be started'
|
||||
debug_ids[182] = 'Apply network share data action failed'
|
||||
debug_ids[183] = 'Running yandex_browser_applier for machine'
|
||||
debug_ids[184] = 'Yandex_browser_applier for machine will not be started'
|
||||
debug_ids[185] = 'Wrote YandexBrowser preferences to'
|
||||
|
||||
return debug_ids.get(code, 'Unknown debug code')
|
||||
|
||||
@@ -149,6 +306,15 @@ def warning_code(code):
|
||||
warning_ids[3] = 'oddjobd is inaccessible'
|
||||
warning_ids[4] = 'No SYSVOL entry assigned to GPO'
|
||||
warning_ids[5] = 'ADP package is not installed - plugin will not be initialized'
|
||||
warning_ids[6] = 'Unable to resolve GSettings parameter'
|
||||
warning_ids[7] = 'No home directory exists for user'
|
||||
warning_ids[8] = 'User\'s shortcut not placed to home directory'
|
||||
warning_ids[9] = 'CUPS is not installed: no printer settings will be deployed'
|
||||
warning_ids[10] = 'Unsupported NTP server type'
|
||||
warning_ids[11] = 'Unable to refresh GPO list'
|
||||
warning_ids[12] = 'Failed to read the list of files'
|
||||
warning_ids[13] = 'Failed to caching the file'
|
||||
warning_ids[14] = 'Could not create a valid list of keys'
|
||||
|
||||
return warning_ids.get(code, 'Unknown warning code')
|
||||
|
||||
|
150
gpoa/pkcon_runner
Executable file
150
gpoa/pkcon_runner
Executable file
@@ -0,0 +1,150 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import rpm
|
||||
import subprocess
|
||||
from gpoa.storage import registry_factory
|
||||
import logging
|
||||
from util.logging import log
|
||||
import argparse
|
||||
import gettext
|
||||
import locale
|
||||
from messages import message_with_code
|
||||
from util.arguments import (
|
||||
set_loglevel
|
||||
)
|
||||
|
||||
|
||||
def is_rpm_installed(rpm_name):
|
||||
'''
|
||||
Check if the package named 'rpm_name' is installed
|
||||
'''
|
||||
ts = rpm.TransactionSet()
|
||||
pm = ts.dbMatch('name', rpm_name)
|
||||
if pm.count() > 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
class Pkcon_applier:
|
||||
|
||||
def __init__(self, sid = None):
|
||||
self.__install_key_name = 'Install'
|
||||
self.__remove_key_name = 'Remove'
|
||||
self.__hkcu_branch = 'Software\\BaseALT\\Policies\\Packages'
|
||||
self.__hklm_branch = 'Software\\BaseALT\\Policies\\Packages'
|
||||
self.__install_command = ['/usr/bin/pkcon', '-y', 'install']
|
||||
self.__remove_command = ['/usr/bin/pkcon', '-y', 'remove']
|
||||
self.__reinstall_command = ['/usr/bin/pkcon', '-y', 'reinstall']
|
||||
self.install_packages = set()
|
||||
self.remove_packages = set()
|
||||
self.storage = registry_factory('registry')
|
||||
if sid:
|
||||
install_branch_user = '{}\\{}%'.format(self.__hkcu_branch, self.__install_key_name)
|
||||
remove_branch_user = '{}\\{}%'.format(self.__hkcu_branch, self.__remove_key_name)
|
||||
self.install_packages_setting = self.storage.filter_hkcu_entries(sid, install_branch_user)
|
||||
self.remove_packages_setting = self.storage.filter_hkcu_entries(sid, remove_branch_user)
|
||||
else:
|
||||
install_branch = '{}\\{}%'.format(self.__hklm_branch, self.__install_key_name)
|
||||
remove_branch = '{}\\{}%'.format(self.__hklm_branch, self.__remove_key_name)
|
||||
self.install_packages_setting = self.storage.filter_hklm_entries(install_branch)
|
||||
self.remove_packages_setting = self.storage.filter_hklm_entries(remove_branch)
|
||||
for package in self.install_packages_setting:
|
||||
if not is_rpm_installed(package.data):
|
||||
self.install_packages.add(package.data)
|
||||
for package in self.remove_packages_setting:
|
||||
if package.data in self.install_packages:
|
||||
self.install_packages.remove(package.data)
|
||||
if is_rpm_installed(package.data):
|
||||
self.remove_packages.add(package.data)
|
||||
|
||||
def apply(self):
|
||||
log('D142')
|
||||
self.update()
|
||||
for package in self.remove_packages:
|
||||
try:
|
||||
logdata = dict()
|
||||
logdata['name'] = package
|
||||
log('D149', logdata)
|
||||
self.remove_pkg(package)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exc'] = exc
|
||||
log('E58', logdata)
|
||||
|
||||
for package in self.install_packages:
|
||||
try:
|
||||
logdata = dict()
|
||||
logdata['name'] = package
|
||||
log('D148', logdata)
|
||||
self.install_pkg(package)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exc'] = exc
|
||||
log('E57', logdata)
|
||||
|
||||
|
||||
def install_pkg(self, package_name):
|
||||
fullcmd = list(self.__install_command)
|
||||
fullcmd.append(package_name)
|
||||
return subprocess.check_output(fullcmd)
|
||||
|
||||
def reinstall_pkg(self, package_name):
|
||||
pass
|
||||
|
||||
def remove_pkg(self, package_name):
|
||||
fullcmd = self.__remove_command
|
||||
fullcmd.append(package_name)
|
||||
return subprocess.check_output(fullcmd)
|
||||
|
||||
def update(self):
|
||||
'''
|
||||
Update APT-RPM database.
|
||||
'''
|
||||
try:
|
||||
res = subprocess.check_output(['/usr/bin/apt-get', 'update'], encoding='utf-8')
|
||||
msg = str(res).split('\n')
|
||||
logdata = dict()
|
||||
for mslog in msg:
|
||||
ms = str(mslog).split(' ')
|
||||
if ms:
|
||||
logdata = {ms[0]: ms[1:-1]}
|
||||
log('D143', logdata)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = exc
|
||||
log('E56',logdata)
|
||||
|
||||
if __name__ == '__main__':
|
||||
locale.bindtextdomain('gpoa', '/usr/lib/python3/site-packages/gpoa/locale')
|
||||
gettext.bindtextdomain('gpoa', '/usr/lib/python3/site-packages/gpoa/locale')
|
||||
gettext.textdomain('gpoa')
|
||||
logger = logging.getLogger()
|
||||
parser = argparse.ArgumentParser(description='Package applier')
|
||||
parser.add_argument('--sid', type = str, help = 'sid', nargs = '?', default = None)
|
||||
parser.add_argument('--loglevel', type = int, help = 'loglevel', nargs = '?', default = 30)
|
||||
|
||||
args = parser.parse_args()
|
||||
logger.setLevel(args.loglevel)
|
||||
if args.sid:
|
||||
applier = Pkcon_applier(args.sid)
|
||||
else:
|
||||
applier = Pkcon_applier()
|
||||
applier.apply()
|
||||
|
124
gpoa/scripts_runner
Executable file
124
gpoa/scripts_runner
Executable file
@@ -0,0 +1,124 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import subprocess
|
||||
import argparse
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
class Scripts_runner:
|
||||
'''
|
||||
A class for an object that iterates over directories with scripts
|
||||
in the desired sequence and launches them
|
||||
'''
|
||||
def __init__(self, work_mode = None, user_name = None, action = None):
|
||||
self.dir_scripts_machine = '/var/cache/gpupdate_scripts_cache/machine/'
|
||||
self.dir_scripts_users = '/var/cache/gpupdate_scripts_cache/users/'
|
||||
self.user_name = user_name
|
||||
self.list_with_all_commands = list()
|
||||
stack_dir = None
|
||||
if work_mode and work_mode.upper() == 'MACHINE':
|
||||
stack_dir = self.machine_runner_fill()
|
||||
elif work_mode and work_mode.upper() == 'USER':
|
||||
stack_dir = self.user_runner_fill()
|
||||
else:
|
||||
print('Invalid arguments entered')
|
||||
return
|
||||
if action:
|
||||
self.action = action.upper()
|
||||
else:
|
||||
print('Action needed')
|
||||
return
|
||||
|
||||
self.find_action(stack_dir)
|
||||
for it_cmd in self.list_with_all_commands:
|
||||
print(self.run_cmd_subprocess(it_cmd))
|
||||
|
||||
def user_runner_fill(self):
|
||||
return self.get_stack_dir(self.dir_scripts_users + self.user_name)
|
||||
|
||||
def machine_runner_fill(self):
|
||||
return self.get_stack_dir(self.dir_scripts_machine)
|
||||
|
||||
def get_stack_dir(self, path_dir):
|
||||
stack_dir = list()
|
||||
try:
|
||||
dir_script = Path(path_dir)
|
||||
for it_dir in dir_script.iterdir():
|
||||
stack_dir.append(str(it_dir))
|
||||
return stack_dir
|
||||
except Exception as exc:
|
||||
print(exc)
|
||||
return None
|
||||
|
||||
def find_action(self, stack_dir):
|
||||
if not stack_dir:
|
||||
return
|
||||
list_tmp = list()
|
||||
while stack_dir:
|
||||
path_turn = stack_dir.pop()
|
||||
basename = os.path.basename(path_turn)
|
||||
if basename == self.action:
|
||||
list_tmp = self.get_stack_dir(path_turn)
|
||||
if list_tmp:
|
||||
self.fill_list_cmd(list_tmp)
|
||||
|
||||
|
||||
def fill_list_cmd(self, list_tmp):
|
||||
list_tmp = sorted(list_tmp)
|
||||
for file_in_task_dir in list_tmp:
|
||||
suffix = os.path.basename(file_in_task_dir)[-4:]
|
||||
if suffix == '.arg':
|
||||
try:
|
||||
arg = self.read_args(file_in_task_dir)
|
||||
for it_arg in arg.split():
|
||||
self.list_with_all_commands[-1].append(it_arg)
|
||||
except Exception as exc:
|
||||
print('Argument read for {}: {}'.format(self.list_with_all_commands.pop(), exc))
|
||||
else:
|
||||
cmd = list()
|
||||
cmd.append(file_in_task_dir)
|
||||
self.list_with_all_commands.append(cmd)
|
||||
|
||||
|
||||
def read_args(self, path):
|
||||
with open(path + '/arg') as f:
|
||||
args = f.readlines()
|
||||
return args[0]
|
||||
|
||||
def run_cmd_subprocess(self, cmd):
|
||||
try:
|
||||
subprocess.Popen(cmd)
|
||||
return 'Script run: {}'.format(cmd)
|
||||
except Exception as exc:
|
||||
return exc
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Scripts runner')
|
||||
parser.add_argument('--mode', type = str, help = 'MACHINE or USER', nargs = '?', default = None)
|
||||
parser.add_argument('--user', type = str, help = 'User name ', nargs = '?', default = None)
|
||||
parser.add_argument('--action', type = str, help = 'MACHINE : [STARTUP or SHUTDOWN], USER : [LOGON or LOGOFF]', nargs = '?', default = None)
|
||||
|
||||
args = parser.parse_args()
|
||||
try:
|
||||
Scripts_runner(args.mode, args.user, args.action)
|
||||
except Exception as exc:
|
||||
print(exc)
|
@@ -50,7 +50,7 @@ class fs_file_cache:
|
||||
file_path))
|
||||
except Exception as exc:
|
||||
logdata = dict({'exception': str(exc)})
|
||||
log('E38', logdata)
|
||||
log('D144', logdata)
|
||||
raise exc
|
||||
|
||||
if not destdir.exists():
|
||||
@@ -63,18 +63,13 @@ class fs_file_cache:
|
||||
with open(destfile, 'wb') as df:
|
||||
df.truncate()
|
||||
df.flush()
|
||||
try:
|
||||
file_handler = self.samba_context.open(str(uri_path), os.O_RDONLY)
|
||||
while True:
|
||||
data = file_handler.read(self.__read_blocksize)
|
||||
if not data:
|
||||
break
|
||||
df.write(data)
|
||||
df.flush()
|
||||
except Exception as exc:
|
||||
logdata = dict({'exception': str(exc)})
|
||||
log('E35', logdata)
|
||||
raise exc
|
||||
file_handler = self.samba_context.open(str(uri_path), os.O_RDONLY)
|
||||
while True:
|
||||
data = file_handler.read(self.__read_blocksize)
|
||||
if not data:
|
||||
break
|
||||
df.write(data)
|
||||
df.flush()
|
||||
|
||||
def get(self, uri):
|
||||
destfile = uri
|
||||
@@ -95,3 +90,17 @@ class fs_file_cache:
|
||||
|
||||
return str(destfile)
|
||||
|
||||
def get_ls_smbdir(self, uri):
|
||||
type_file_smb = 8
|
||||
try:
|
||||
uri_path = UNCPath(uri)
|
||||
opendir = self.samba_context.opendir(str(uri_path))
|
||||
ls_obj = opendir.getdents()
|
||||
ls = [obj.name for obj in ls_obj if obj.smbc_type == type_file_smb]
|
||||
return ls
|
||||
except Exception as exc:
|
||||
if Path(uri).exists():
|
||||
return None
|
||||
logdata = dict({'exception': str(exc)})
|
||||
log('W12', logdata)
|
||||
return None
|
||||
|
@@ -22,7 +22,9 @@ class samba_preg(object):
|
||||
'''
|
||||
def __init__(self, preg_obj, policy_name):
|
||||
self.policy_name = policy_name
|
||||
self.hive_key = '{}\\{}'.format(preg_obj.keyname, preg_obj.valuename)
|
||||
self.keyname = preg_obj.keyname
|
||||
self.valuename = preg_obj.valuename
|
||||
self.hive_key = '{}\\{}'.format(self.keyname, self.valuename)
|
||||
self.type = preg_obj.type
|
||||
self.data = preg_obj.data
|
||||
|
||||
@@ -41,7 +43,9 @@ class samba_hkcu_preg(object):
|
||||
def __init__(self, sid, preg_obj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.hive_key = '{}\\{}'.format(preg_obj.keyname, preg_obj.valuename)
|
||||
self.keyname = preg_obj.keyname
|
||||
self.valuename = preg_obj.valuename
|
||||
self.hive_key = '{}\\{}'.format(self.keyname, self.valuename)
|
||||
self.type = preg_obj.type
|
||||
self.data = preg_obj.data
|
||||
|
||||
@@ -170,3 +174,119 @@ class envvar_entry(object):
|
||||
|
||||
return fields
|
||||
|
||||
class script_entry(object):
|
||||
'''
|
||||
Object mapping representing scripts.ini
|
||||
'''
|
||||
def __init__(self, sid, scrobj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.action = scrobj.action
|
||||
self.number = scrobj.number
|
||||
self.path = scrobj.path
|
||||
self.arg = scrobj.args
|
||||
|
||||
def update_fields(self):
|
||||
'''
|
||||
Return list of fields to update
|
||||
'''
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['action'] = self.action
|
||||
fields['number'] = self.number
|
||||
fields['path'] = self.path
|
||||
fields['arg'] = self.arg
|
||||
|
||||
return fields
|
||||
|
||||
class file_entry(object):
|
||||
'''
|
||||
Object mapping representing FILES.XML
|
||||
'''
|
||||
def __init__(self, sid, scrobj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.action = scrobj.action
|
||||
self.fromPath = scrobj.fromPath
|
||||
self.targetPath = scrobj.targetPath
|
||||
self.readOnly = scrobj.readOnly
|
||||
self.archive = scrobj.archive
|
||||
self.hidden = scrobj.hidden
|
||||
self.suppress = scrobj.suppress
|
||||
|
||||
|
||||
def update_fields(self):
|
||||
'''
|
||||
Return list of fields to update
|
||||
'''
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['action'] = self.action
|
||||
fields['fromPath'] = self.fromPath
|
||||
fields['targetPath'] = self.targetPath
|
||||
fields['readOnly'] = self.readOnly
|
||||
fields['archive'] = self.archive
|
||||
fields['hidden'] = self.hidden
|
||||
fields['suppress'] = self.suppress
|
||||
|
||||
return fields
|
||||
|
||||
class ini_entry(object):
|
||||
'''
|
||||
Object mapping representing INIFILES.XML
|
||||
'''
|
||||
def __init__(self, sid, iniobj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.action = iniobj.action
|
||||
self.path = iniobj.path
|
||||
self.section = iniobj.section
|
||||
self.property = iniobj.property
|
||||
self.value = iniobj.value
|
||||
|
||||
|
||||
def update_fields(self):
|
||||
'''
|
||||
Return list of fields to update
|
||||
'''
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['action'] = self.action
|
||||
fields['path'] = self.path
|
||||
fields['section'] = self.section
|
||||
fields['property'] = self.property
|
||||
fields['value'] = self.value
|
||||
|
||||
return fields
|
||||
|
||||
class networkshare_entry(object):
|
||||
'''
|
||||
Object mapping representing NETWORKSHARES.XML
|
||||
'''
|
||||
def __init__(self, sid, networkshareobj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.name = networkshareobj.name
|
||||
self.action = networkshareobj.action
|
||||
self.path = networkshareobj.path
|
||||
self.allRegular = networkshareobj.allRegular
|
||||
self.comment = networkshareobj.comment
|
||||
self.limitUsers = networkshareobj.limitUsers
|
||||
self.abe = networkshareobj.abe
|
||||
|
||||
|
||||
def update_fields(self):
|
||||
'''
|
||||
Return list of fields to update
|
||||
'''
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['name'] = self.name
|
||||
fields['action'] = self.action
|
||||
fields['path'] = self.path
|
||||
fields['allRegular'] = self.allRegular
|
||||
fields['comment'] = self.comment
|
||||
fields['limitUsers'] = self.limitUsers
|
||||
fields['abe'] = self.abe
|
||||
|
||||
return fields
|
||||
|
@@ -44,6 +44,10 @@ from .record_types import (
|
||||
, drive_entry
|
||||
, folder_entry
|
||||
, envvar_entry
|
||||
, script_entry
|
||||
, file_entry
|
||||
, ini_entry
|
||||
, networkshare_entry
|
||||
)
|
||||
|
||||
class sqlite_registry(registry):
|
||||
@@ -68,6 +72,8 @@ class sqlite_registry(registry):
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('hive_key', String(65536, collation='NOCASE'),
|
||||
unique=True)
|
||||
, Column('keyname', String(collation='NOCASE'))
|
||||
, Column('valuename', String(collation='NOCASE'))
|
||||
, Column('policy_name', String)
|
||||
, Column('type', Integer)
|
||||
, Column('data', String)
|
||||
@@ -78,6 +84,8 @@ class sqlite_registry(registry):
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('hive_key', String(65536, collation='NOCASE'))
|
||||
, Column('keyname', String(collation='NOCASE'))
|
||||
, Column('valuename', String(collation='NOCASE'))
|
||||
, Column('policy_name', String)
|
||||
, Column('type', Integer)
|
||||
, Column('data', String)
|
||||
@@ -139,6 +147,62 @@ class sqlite_registry(registry):
|
||||
, Column('value', String)
|
||||
, UniqueConstraint('sid', 'name')
|
||||
)
|
||||
self.__scripts = Table(
|
||||
'Scripts'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('policy_name', String)
|
||||
, Column('number', String)
|
||||
, Column('action', String)
|
||||
, Column('path', String)
|
||||
, Column('arg', String)
|
||||
, UniqueConstraint('sid', 'path', 'arg')
|
||||
)
|
||||
self.__files = Table(
|
||||
'Files'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('policy_name', String)
|
||||
, Column('action', String)
|
||||
, Column('fromPath', String)
|
||||
, Column('targetPath', String)
|
||||
, Column('readOnly', String)
|
||||
, Column('archive', String)
|
||||
, Column('hidden', String)
|
||||
, Column('suppress', String)
|
||||
, UniqueConstraint('sid', 'policy_name', 'targetPath', 'fromPath')
|
||||
)
|
||||
self.__ini = Table(
|
||||
'Ini'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('policy_name', String)
|
||||
, Column('action', String)
|
||||
, Column('path', String)
|
||||
, Column('section', String)
|
||||
, Column('property', String)
|
||||
, Column('value', String)
|
||||
, UniqueConstraint('sid', 'action', 'path', 'section', 'property', 'value')
|
||||
)
|
||||
self.__networkshare = Table(
|
||||
'Networkshare'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('policy_name', String)
|
||||
, Column('name', String)
|
||||
, Column('action', String)
|
||||
, Column('path', String)
|
||||
, Column('allRegular', String)
|
||||
, Column('comment', String)
|
||||
, Column('limitUsers', String)
|
||||
, Column('abe', String)
|
||||
, UniqueConstraint('sid', 'name', 'path')
|
||||
)
|
||||
|
||||
self.__metadata.create_all(self.db_cnt)
|
||||
Session = sessionmaker(bind=self.db_cnt)
|
||||
self.db_session = Session()
|
||||
@@ -151,6 +215,10 @@ class sqlite_registry(registry):
|
||||
mapper(drive_entry, self.__drives)
|
||||
mapper(folder_entry, self.__folders)
|
||||
mapper(envvar_entry, self.__envvars)
|
||||
mapper(script_entry, self.__scripts)
|
||||
mapper(file_entry, self.__files)
|
||||
mapper(ini_entry, self.__ini)
|
||||
mapper(networkshare_entry, self.__networkshare)
|
||||
except:
|
||||
pass
|
||||
#logging.error('Error creating mapper')
|
||||
@@ -240,16 +308,52 @@ class sqlite_registry(registry):
|
||||
log('D19', logdata)
|
||||
self._info_upsert(ientry)
|
||||
|
||||
def _delete_hklm_keyname(self, keyname):
|
||||
'''
|
||||
Delete PReg hive_key from HKEY_LOCAL_MACHINE
|
||||
'''
|
||||
logdata = dict({'keyname': keyname})
|
||||
try:
|
||||
(self
|
||||
.db_session
|
||||
.query(samba_preg)
|
||||
.filter(samba_preg.keyname == keyname)
|
||||
.delete(synchronize_session=False))
|
||||
self.db_session.commit()
|
||||
log('D65', logdata)
|
||||
except Exception as exc:
|
||||
log('D63', logdata)
|
||||
|
||||
def add_hklm_entry(self, preg_entry, policy_name):
|
||||
'''
|
||||
Write PReg entry to HKEY_LOCAL_MACHINE
|
||||
'''
|
||||
pentry = samba_preg(preg_entry, policy_name)
|
||||
if not pentry.hive_key.rpartition('\\')[2].startswith('**'):
|
||||
if not pentry.valuename.startswith('**'):
|
||||
self._hklm_upsert(pentry)
|
||||
else:
|
||||
logdata = dict({'key': pentry.hive_key})
|
||||
log('D27', logdata)
|
||||
if pentry.valuename.lower() == '**delvals.':
|
||||
self._delete_hklm_keyname(pentry.keyname)
|
||||
else:
|
||||
log('D27', logdata)
|
||||
|
||||
def _delete_hkcu_keyname(self, keyname, sid):
|
||||
'''
|
||||
Delete PReg hive_key from HKEY_CURRENT_USER
|
||||
'''
|
||||
logdata = dict({'sid': sid, 'keyname': keyname})
|
||||
try:
|
||||
(self
|
||||
.db_session
|
||||
.query(samba_hkcu_preg)
|
||||
.filter(samba_hkcu_preg.sid == sid)
|
||||
.filter(samba_hkcu_preg.keyname == keyname)
|
||||
.delete(synchronize_session=False))
|
||||
self.db_session.commit()
|
||||
log('D66', logdata)
|
||||
except:
|
||||
log('D64', logdata)
|
||||
|
||||
def add_hkcu_entry(self, preg_entry, sid, policy_name):
|
||||
'''
|
||||
@@ -257,11 +361,14 @@ class sqlite_registry(registry):
|
||||
'''
|
||||
hkcu_pentry = samba_hkcu_preg(sid, preg_entry, policy_name)
|
||||
logdata = dict({'sid': sid, 'policy': policy_name, 'key': hkcu_pentry.hive_key})
|
||||
if not hkcu_pentry.hive_key.rpartition('\\')[2].startswith('**'):
|
||||
if not hkcu_pentry.valuename.startswith('**'):
|
||||
log('D26', logdata)
|
||||
self._hkcu_upsert(hkcu_pentry)
|
||||
else:
|
||||
log('D51', logdata)
|
||||
if hkcu_pentry.valuename.lower() == '**delvals.':
|
||||
self._delete_hkcu_keyname(hkcu_pentry.keyname, sid)
|
||||
else:
|
||||
log('D51', logdata)
|
||||
|
||||
def add_shortcut(self, sid, sc_obj, policy_name):
|
||||
'''
|
||||
@@ -322,6 +429,68 @@ class sqlite_registry(registry):
|
||||
.filter(envvar_entry.name == ev_entry.name)
|
||||
.update(ev_entry.update_fields()))
|
||||
self.db_session.commit()
|
||||
def add_script(self, sid, scrobj, policy_name):
|
||||
scr_entry = script_entry(sid, scrobj, policy_name)
|
||||
logdata = dict()
|
||||
logdata['script path'] = scrobj.path
|
||||
logdata['sid'] = sid
|
||||
log('D153', logdata)
|
||||
try:
|
||||
self._add(scr_entry)
|
||||
except Exception as exc:
|
||||
(self
|
||||
._filter_sid_obj(script_entry, sid)
|
||||
.filter(script_entry.path == scr_entry.path)
|
||||
.update(scr_entry.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def add_file(self, sid, fileobj, policy_name):
|
||||
f_entry = file_entry(sid, fileobj, policy_name)
|
||||
logdata = dict()
|
||||
logdata['targetPath'] = f_entry.targetPath
|
||||
logdata['fromPath'] = f_entry.fromPath
|
||||
log('D162', logdata)
|
||||
try:
|
||||
self._add(f_entry)
|
||||
except Exception as exc:
|
||||
(self
|
||||
._filter_sid_obj(file_entry, sid)
|
||||
.filter(file_entry.targetPath == f_entry.targetPath)
|
||||
.update(f_entry.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
|
||||
def add_ini(self, sid, iniobj, policy_name):
|
||||
inientry = ini_entry(sid, iniobj, policy_name)
|
||||
logdata = dict()
|
||||
logdata['path'] = inientry.path
|
||||
logdata['action'] = inientry.action
|
||||
log('D177', logdata)
|
||||
try:
|
||||
self._add(inientry)
|
||||
except Exception as exc:
|
||||
(self
|
||||
._filter_sid_obj(ini_entry, sid)
|
||||
.filter(ini_entry.path == inientry.path)
|
||||
.update(inientry.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def add_networkshare(self, sid, networkshareobj, policy_name):
|
||||
networkshareentry = networkshare_entry(sid, networkshareobj, policy_name)
|
||||
logdata = dict()
|
||||
logdata['name'] = networkshareentry.name
|
||||
logdata['path'] = networkshareentry.path
|
||||
logdata['action'] = networkshareentry.action
|
||||
log('D179', logdata)
|
||||
try:
|
||||
self._add(networkshareentry)
|
||||
except Exception as exc:
|
||||
(self
|
||||
._filter_sid_obj(networkshare_entry, sid)
|
||||
.filter(networkshare_entry.path == networkshareentry.path)
|
||||
.update(networkshareentry.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
|
||||
def _filter_sid_obj(self, row_object, sid):
|
||||
res = (self
|
||||
@@ -354,6 +523,28 @@ class sqlite_registry(registry):
|
||||
def get_envvars(self, sid):
|
||||
return self._filter_sid_list(envvar_entry, sid)
|
||||
|
||||
def _filter_scripts_list(self, row_object, sid, action):
|
||||
res = (self
|
||||
.db_session
|
||||
.query(row_object)
|
||||
.filter(row_object.sid == sid)
|
||||
.filter(row_object.action == action)
|
||||
.order_by(row_object.id)
|
||||
.all())
|
||||
return res
|
||||
|
||||
def get_scripts(self, sid, action):
|
||||
return self._filter_scripts_list(script_entry, sid, action)
|
||||
|
||||
def get_files(self, sid):
|
||||
return self._filter_sid_list(file_entry, sid)
|
||||
|
||||
def get_networkshare(self, sid):
|
||||
return self._filter_sid_list(networkshare_entry, sid)
|
||||
|
||||
def get_ini(self, sid):
|
||||
return self._filter_sid_list(ini_entry, sid)
|
||||
|
||||
def get_hkcu_entry(self, sid, hive_key):
|
||||
res = (self
|
||||
.db_session
|
||||
@@ -403,6 +594,10 @@ class sqlite_registry(registry):
|
||||
self._wipe_sid(ad_shortcut, sid)
|
||||
self._wipe_sid(printer_entry, sid)
|
||||
self._wipe_sid(drive_entry, sid)
|
||||
self._wipe_sid(script_entry, sid)
|
||||
self._wipe_sid(file_entry, sid)
|
||||
self._wipe_sid(ini_entry, sid)
|
||||
self._wipe_sid(networkshare_entry, sid)
|
||||
|
||||
def _wipe_sid(self, row_object, sid):
|
||||
(self
|
||||
|
@@ -16,5 +16,5 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#}
|
||||
{{ home_dir }}/net {{ mount_file }} -t 120
|
||||
{{ home_dir }}/net {{ mount_file }} -t 120 --browse
|
||||
|
||||
|
@@ -64,18 +64,15 @@ def process_target(target_name=None):
|
||||
The target may be 'All', 'Computer' or 'User'. This function
|
||||
determines which one was specified.
|
||||
'''
|
||||
target = 'All'
|
||||
|
||||
if target_name == 'Computer':
|
||||
target = 'Computer'
|
||||
|
||||
if target_name == 'User':
|
||||
target = 'User'
|
||||
target = "All"
|
||||
if target_name:
|
||||
target = target_name
|
||||
|
||||
logdata = dict({'target': target})
|
||||
logging.debug(slogm(message_with_code('D10'), logdata))
|
||||
|
||||
return target
|
||||
return target.upper()
|
||||
|
||||
class ExitCodeUpdater(IntEnum):
|
||||
'''
|
||||
|
@@ -45,7 +45,7 @@ class GPConfig:
|
||||
|
||||
return 'samba'
|
||||
|
||||
def set_backend(self, backend_name):
|
||||
def set_backend(self, backend_name='local'):
|
||||
self.full_config['gpoa']['backend'] = backend_name
|
||||
self.write_config()
|
||||
|
||||
@@ -71,7 +71,7 @@ class GPConfig:
|
||||
|
||||
return get_default_policy_name()
|
||||
|
||||
def set_local_policy_template(self, template_name):
|
||||
def set_local_policy_template(self, template_name='default'):
|
||||
self.full_config['gpoa']['local-policy'] = template_name
|
||||
self.write_config()
|
||||
|
||||
|
@@ -28,9 +28,11 @@ class dbus_runner:
|
||||
to trigger gpoa for user running in sysadmin context.
|
||||
'''
|
||||
|
||||
_bus_name = 'com.redhat.oddjob_gpupdate'
|
||||
_redhat_bus_name = 'com.redhat.oddjob_gpupdate'
|
||||
_basealt_bus_name = 'ru.basealt.oddjob_gpupdate'
|
||||
# Interface name is equal to bus name.
|
||||
_interface_name = 'com.redhat.oddjob_gpupdate'
|
||||
_redhat_interface_name = 'com.redhat.oddjob_gpupdate'
|
||||
_basealt_interface_name = 'ru.basealt.oddjob_gpupdate'
|
||||
_object_path = '/'
|
||||
# The timeout is in milliseconds. The default is -1 which is
|
||||
# DBUS_TIMEOUT_USE_DEFAULT which is 25 seconds. There is also
|
||||
@@ -44,6 +46,27 @@ class dbus_runner:
|
||||
def __init__(self, username=None):
|
||||
self.username = username
|
||||
self.system_bus = dbus.SystemBus()
|
||||
self.bus_name = self._basealt_bus_name
|
||||
self.interface_name = self._basealt_interface_name
|
||||
self.check_dbus()
|
||||
|
||||
def check_dbus(self):
|
||||
try:
|
||||
# Check privileged operations bus
|
||||
log('D900', {'bus_name': self.bus_name})
|
||||
self.system_bus.get_object(self.bus_name, '/')
|
||||
return
|
||||
|
||||
except dbus.exceptions.DBusException as exc:
|
||||
if exc.get_dbus_name() != 'org.freedesktop.DBus.Error.ServiceUnknown':
|
||||
raise exc
|
||||
|
||||
self.bus_name = self._redhat_bus_name
|
||||
self.interface_name = self._redhat_interface_name
|
||||
|
||||
# Try to check alternative privileged operations bus
|
||||
log('W902', {'origin_bus_name': self._basealt_interface_name, 'bus_name': self.bus_name})
|
||||
self.system_bus.get_object(self.bus_name, '/')
|
||||
|
||||
def run(self):
|
||||
if self.username:
|
||||
@@ -54,12 +77,12 @@ class dbus_runner:
|
||||
# only for superuser. This method is called via PAM
|
||||
# when user logs in.
|
||||
try:
|
||||
result = self.system_bus.call_blocking(self._bus_name,
|
||||
result = self.system_bus.call_blocking(self.bus_name,
|
||||
self._object_path,
|
||||
self._interface_name,
|
||||
self.interface_name,
|
||||
'gpupdatefor',
|
||||
(username),
|
||||
(dbus.String(self.username)),
|
||||
's',
|
||||
[self.username],
|
||||
timeout=self._synchronous_timeout)
|
||||
print_dbus_result(result)
|
||||
except dbus.exceptions.DBusException as exc:
|
||||
@@ -69,12 +92,12 @@ class dbus_runner:
|
||||
raise exc
|
||||
else:
|
||||
try:
|
||||
result = self.system_bus.call_blocking(self._bus_name,
|
||||
result = self.system_bus.call_blocking(self.bus_name,
|
||||
self._object_path,
|
||||
self._interface_name,
|
||||
self.interface_name,
|
||||
'gpupdate',
|
||||
None,
|
||||
(),
|
||||
[],
|
||||
timeout=self._synchronous_timeout)
|
||||
print_dbus_result(result)
|
||||
except dbus.exceptions.DBusException as exc:
|
||||
@@ -84,14 +107,14 @@ class dbus_runner:
|
||||
else:
|
||||
log('D11')
|
||||
try:
|
||||
result = self.system_bus.call_blocking(self._bus_name,
|
||||
result = self.system_bus.call_blocking(self.bus_name,
|
||||
self._object_path,
|
||||
self._interface_name,
|
||||
self.interface_name,
|
||||
'gpupdate_computer',
|
||||
None,
|
||||
# The following positional parameter is called "args".
|
||||
# There is no official documentation for it.
|
||||
(),
|
||||
[],
|
||||
timeout=self._synchronous_timeout)
|
||||
print_dbus_result(result)
|
||||
except dbus.exceptions.DBusException as exc:
|
||||
@@ -146,9 +169,14 @@ def is_oddjobd_gpupdate_accessible():
|
||||
oddjobd_state = oddjobd_properties.Get('org.freedesktop.systemd1.Unit', 'ActiveState')
|
||||
|
||||
# Check if oddjobd_gpupdate is accesssible
|
||||
oddjobd_gpupdate = system_bus.get_object('com.redhat.oddjob_gpupdate', '/')
|
||||
oddjobd_upupdate_interface = dbus.Interface(oddjobd_gpupdate, 'com.redhat.oddjob_gpupdate')
|
||||
#oddjobd_upupdate_interface.gpupdate()
|
||||
try:
|
||||
oddjobd_gpupdate = system_bus.get_object('ru.basealt.oddjob_gpupdate', '/')
|
||||
oddjobd_upupdate_interface = dbus.Interface(oddjobd_gpupdate, 'ru.basealt.oddjob_gpupdate')
|
||||
except dbus.exceptions.DBusException as exc:
|
||||
if exc.get_dbus_name() != '.org.freedesktop.DBus.Error.ServiceUnknown':
|
||||
oddjobd_gpupdate = system_bus.get_object('com.redhat.oddjob_gpupdate', '/')
|
||||
oddjobd_upupdate_interface = dbus.Interface(oddjobd_gpupdate, 'com.redhat.oddjob_gpupdate')
|
||||
#oddjobd_upupdate_interface.gpupdate()
|
||||
|
||||
if oddjobd_state == 'active':
|
||||
oddjobd_accessible = True
|
||||
@@ -168,7 +196,7 @@ def print_dbus_result(result):
|
||||
log('D12', logdata)
|
||||
|
||||
for line in message:
|
||||
print(str(line))
|
||||
if line: print(str(line))
|
||||
|
||||
|
||||
class dbus_session:
|
||||
|
@@ -59,8 +59,9 @@ def machine_kdestroy(cache_name=None):
|
||||
if cache_name:
|
||||
kdestroy_cmd.extend(['-c', cache_name])
|
||||
|
||||
proc = subprocess.Popen(kdestroy_cmd, stderr=subprocess.DEVNULL)
|
||||
proc.wait()
|
||||
if cache_name or 'KRB5CCNAME' in os.environ:
|
||||
proc = subprocess.Popen(kdestroy_cmd, stderr=subprocess.DEVNULL)
|
||||
proc.wait()
|
||||
|
||||
if cache_name and os.path.exists(cache_name):
|
||||
os.unlink(cache_name)
|
||||
|
@@ -49,16 +49,8 @@ class slogm(object):
|
||||
def __str__(self):
|
||||
now = str(datetime.datetime.now().isoformat(sep=' ', timespec='milliseconds'))
|
||||
args = dict()
|
||||
#args.update(dict({'timestamp': now, 'message': str(self.message)}))
|
||||
args.update(self.kwargs)
|
||||
|
||||
kwa = dict()
|
||||
try:
|
||||
kwa = encoder().encode(args)
|
||||
except Exception as exc:
|
||||
pass
|
||||
|
||||
result = '{}|{}|{}'.format(now, self.message, kwa)
|
||||
result = '{}|{}|{}'.format(now, self.message, args)
|
||||
|
||||
return result
|
||||
|
||||
|
@@ -26,23 +26,31 @@ from .config import GPConfig
|
||||
from .exceptions import NotUNCPathError
|
||||
|
||||
|
||||
def local_policy_path():
|
||||
def get_custom_policy_dir():
|
||||
'''
|
||||
Returns path pointing to Default Policy directory.
|
||||
Returns path pointing to Custom Policy directory.
|
||||
'''
|
||||
return '/etc/local-policy'
|
||||
|
||||
def local_policy_path(default_template_name="default"):
|
||||
'''
|
||||
Returns path pointing to Local Policy template directory.
|
||||
'''
|
||||
local_policy_dir = '/usr/share/local-policy'
|
||||
local_policy_default = '{}/default'.format(local_policy_dir)
|
||||
|
||||
config = GPConfig()
|
||||
local_policy_system = '{}/{}'.format(local_policy_dir, config.get_local_policy_template())
|
||||
local_policy_template = config.get_local_policy_template()
|
||||
local_policy_template_path = os.path.join(local_policy_dir, local_policy_template)
|
||||
local_policy_default = os.path.join(local_policy_dir, default_template_name)
|
||||
|
||||
result_path = pathlib.Path(local_policy_default)
|
||||
if os.path.exists(local_policy_system):
|
||||
result_path = pathlib.Path(local_policy_system)
|
||||
if os.path.exists(local_policy_template):
|
||||
result_path = pathlib.Path(local_policy_template)
|
||||
elif os.path.exists(local_policy_template_path):
|
||||
result_path = pathlib.Path(local_policy_template_path)
|
||||
|
||||
return pathlib.Path(result_path)
|
||||
|
||||
|
||||
def cache_dir():
|
||||
'''
|
||||
Returns path pointing to gpupdate's cache directory
|
||||
@@ -83,7 +91,7 @@ class UNCPath:
|
||||
self.type = None
|
||||
if self.path.startswith(r'smb://'):
|
||||
self.type = 'uri'
|
||||
if self.path.startswith(r'\\'):
|
||||
if self.path.startswith(r'\\') or self.path.startswith(r'//'):
|
||||
self.type = 'unc'
|
||||
if not self.type:
|
||||
raise NotUNCPathError(path)
|
||||
|
@@ -19,6 +19,12 @@
|
||||
|
||||
from enum import Enum
|
||||
|
||||
import pwd
|
||||
import logging
|
||||
import subprocess
|
||||
import pysss_nss_idmap
|
||||
|
||||
from .logging import log
|
||||
|
||||
def wbinfo_getsid(domain, user):
|
||||
'''
|
||||
@@ -39,25 +45,35 @@ def wbinfo_getsid(domain, user):
|
||||
return sid
|
||||
|
||||
|
||||
def get_sid(domain, username):
|
||||
def get_local_sid_prefix():
|
||||
return "S-1-5-21-0-0-0"
|
||||
|
||||
|
||||
def get_sid(domain, username, is_machine = False):
|
||||
'''
|
||||
Lookup SID not only using wbinfo or sssd but also using own cache
|
||||
'''
|
||||
domain_username = '{}\\{}'.format(domain, username)
|
||||
sid = 'local-{}'.format(username)
|
||||
|
||||
# local user
|
||||
if not domain:
|
||||
found_uid = 0
|
||||
if not is_machine:
|
||||
found_uid = pwd.getpwnam(username).pw_uid
|
||||
return '{}-{}'.format(get_local_sid_prefix(), found_uid)
|
||||
|
||||
# domain user
|
||||
try:
|
||||
sid = wbinfo_getsid(domain, username)
|
||||
except:
|
||||
sid = 'local-{}'.format(username)
|
||||
logging.warning(
|
||||
slogm('Error getting SID using wbinfo, will use cached SID: {}'.format(sid)))
|
||||
logdata = dict({'sid': sid})
|
||||
log('E16', logdata)
|
||||
|
||||
logging.debug(slogm('Working with SID: {}'.format(sid)))
|
||||
logdata = dict({'sid': sid})
|
||||
log('D21', logdata)
|
||||
|
||||
return sid
|
||||
|
||||
|
||||
class IssuingAuthority(Enum):
|
||||
SECURITY_NULL_SID_AUTHORITY = 0
|
||||
SECURITY_WORLD_SID_AUTHORITY = 1
|
||||
|
@@ -21,7 +21,7 @@ import sys
|
||||
import pwd
|
||||
import signal
|
||||
import subprocess
|
||||
|
||||
import locale
|
||||
from .logging import log
|
||||
from .dbus import dbus_session
|
||||
|
||||
@@ -30,11 +30,13 @@ def set_privileges(username, uid, gid, groups, home):
|
||||
'''
|
||||
Set current process privileges
|
||||
'''
|
||||
|
||||
defaultlocale = locale.getdefaultlocale()
|
||||
os.environ.clear()
|
||||
os.environ['HOME'] = home
|
||||
os.environ['USER'] = username
|
||||
os.environ['USERNAME'] = username
|
||||
if defaultlocale[0] and defaultlocale[1]:
|
||||
os.environ["LANG"] = '.'.join(defaultlocale)
|
||||
|
||||
try:
|
||||
os.setgid(gid)
|
||||
|
@@ -18,14 +18,12 @@
|
||||
|
||||
|
||||
import os
|
||||
import pwd
|
||||
|
||||
import subprocess
|
||||
from samba import getopt as options
|
||||
|
||||
from samba import NTSTATUSError
|
||||
from samba.gpclass import get_dc_hostname, check_refresh_gpo_list
|
||||
from samba.netcmd.common import netcmd_get_domain_infos_via_cldap
|
||||
import samba.gpo
|
||||
import pysss_nss_idmap
|
||||
|
||||
from storage import cache_factory
|
||||
from messages import message_with_code
|
||||
@@ -117,75 +115,42 @@ class smbcreds (smbopts):
|
||||
def update_gpos(self, username):
|
||||
gpos = self.get_gpos(username)
|
||||
|
||||
try:
|
||||
log('D49')
|
||||
check_refresh_gpo_list(self.selected_dc, self.lp, self.creds, gpos)
|
||||
log('D50')
|
||||
except Exception as exc:
|
||||
list_selected_dc = set()
|
||||
list_selected_dc.add(self.selected_dc)
|
||||
|
||||
while list_selected_dc:
|
||||
logdata = dict()
|
||||
logdata['username'] = username
|
||||
logdata['dc'] = self.selected_dc
|
||||
logdata['err'] = str(exc)
|
||||
log('F1')
|
||||
raise exc
|
||||
try:
|
||||
log('D49', logdata)
|
||||
check_refresh_gpo_list(self.selected_dc, self.lp, self.creds, gpos)
|
||||
log('D50', logdata)
|
||||
list_selected_dc.clear()
|
||||
except NTSTATUSError as smb_exc:
|
||||
logdata['smb_exc'] = str(smb_exc)
|
||||
self.selected_dc = get_dc_hostname(self.creds, self.lp)
|
||||
if self.selected_dc not in list_selected_dc:
|
||||
logdata['action'] = 'Search another dc'
|
||||
log('W11', logdata)
|
||||
list_selected_dc.add(self.selected_dc)
|
||||
else:
|
||||
log('F1', logdata)
|
||||
raise smb_exc
|
||||
except Exception as exc:
|
||||
logdata['exc'] = str(exc)
|
||||
log('F1', logdata)
|
||||
raise exc
|
||||
return gpos
|
||||
|
||||
|
||||
def wbinfo_getsid(domain, user):
|
||||
'''
|
||||
Get SID using wbinfo
|
||||
'''
|
||||
# This part works only on client
|
||||
username = '{}\\{}'.format(domain.upper(), user)
|
||||
sid = pysss_nss_idmap.getsidbyname(username)
|
||||
|
||||
if username in sid:
|
||||
return sid[username]['sid']
|
||||
|
||||
# This part works only on DC
|
||||
wbinfo_cmd = ['wbinfo', '-n', username]
|
||||
output = subprocess.check_output(wbinfo_cmd)
|
||||
sid = output.split()[0].decode('utf-8')
|
||||
|
||||
return sid
|
||||
|
||||
|
||||
def get_local_sid_prefix():
|
||||
return "S-1-5-21-0-0-0"
|
||||
|
||||
|
||||
def get_sid(domain, username, is_machine = False):
|
||||
'''
|
||||
Lookup SID not only using wbinfo or sssd but also using own cache
|
||||
'''
|
||||
sid = 'local-{}'.format(username)
|
||||
|
||||
# local user
|
||||
if not domain:
|
||||
found_uid = 0
|
||||
if not is_machine:
|
||||
found_uid = pwd.getpwnam(username).pw_uid
|
||||
return '{}-{}'.format(get_local_sid_prefix(), found_uid)
|
||||
|
||||
# domain user
|
||||
try:
|
||||
sid = wbinfo_getsid(domain, username)
|
||||
except:
|
||||
logdata = dict({'sid': sid})
|
||||
log('E16', logdata)
|
||||
|
||||
logdata = dict({'sid': sid})
|
||||
log('D21', logdata)
|
||||
|
||||
return sid
|
||||
|
||||
|
||||
def expand_windows_var(text, username=None):
|
||||
'''
|
||||
Scan the line for percent-encoded variables and expand them.
|
||||
'''
|
||||
variables = dict()
|
||||
variables['HOME'] = '/etc/skel'
|
||||
variables['HOMEPATH'] = '/etc/skel'
|
||||
variables['HOMEDRIVE'] = '/'
|
||||
variables['SystemRoot'] = '/'
|
||||
variables['StartMenuDir'] = '/usr/share/applications'
|
||||
variables['SystemDrive'] = '/'
|
||||
@@ -194,6 +159,7 @@ def expand_windows_var(text, username=None):
|
||||
if username:
|
||||
variables['LogonUser'] = username
|
||||
variables['HOME'] = get_homedir(username)
|
||||
variables['HOMEPATH'] = get_homedir(username)
|
||||
|
||||
variables['StartMenuDir'] = os.path.join(
|
||||
variables['HOME'], '.local', 'share', 'applications')
|
||||
|
@@ -18,7 +18,7 @@
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from messages import message_with_code
|
||||
from .util import get_homedir
|
||||
from .logging import log
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
%define _unpackaged_files_terminate_build 1
|
||||
|
||||
Name: gpupdate
|
||||
Version: 0.9.4
|
||||
Version: 0.9.11.2
|
||||
Release: alt1
|
||||
|
||||
Summary: GPT applier
|
||||
@@ -57,9 +57,18 @@ ln -s %python3_sitelibdir/gpoa/gpoa \
|
||||
%buildroot%_sbindir/gpoa
|
||||
ln -s %python3_sitelibdir/gpoa/gpupdate \
|
||||
%buildroot%_bindir/gpupdate
|
||||
|
||||
ln -s %python3_sitelibdir/gpoa/gpupdate-setup \
|
||||
%buildroot%_sbindir/gpupdate-setup
|
||||
|
||||
mkdir -p \
|
||||
%buildroot%_prefix/libexec/%name
|
||||
|
||||
ln -s %python3_sitelibdir/gpoa/pkcon_runner \
|
||||
%buildroot%_prefix/libexec/%name/pkcon_runner
|
||||
ln -s %python3_sitelibdir/gpoa/scripts_runner \
|
||||
%buildroot%_prefix/libexec/%name/scripts_runner
|
||||
|
||||
mkdir -p %buildroot%_datadir/%name
|
||||
mv %buildroot%python3_sitelibdir/gpoa/templates \
|
||||
%buildroot%_datadir/%name/
|
||||
@@ -68,22 +77,38 @@ mkdir -p %buildroot%_sysconfdir/%name
|
||||
touch %buildroot%_sysconfdir/%name/environment
|
||||
|
||||
install -Dm0644 dist/%name.service %buildroot%_unitdir/%name.service
|
||||
install -Dm0644 dist/%name.timer %buildroot%_unitdir/%name.timer
|
||||
install -Dm0644 dist/%name-scripts-run.service %buildroot%_unitdir/%name-scripts-run.service
|
||||
install -Dm0644 dist/%name-user.service %buildroot/usr/lib/systemd/user/%name-user.service
|
||||
install -Dm0644 dist/%name-scripts-run-user.service %buildroot/usr/lib/systemd/user/%name-scripts-run-user.service
|
||||
install -Dm0644 dist/%name-user.timer %buildroot/usr/lib/systemd/user/%name-user.timer
|
||||
install -Dm0644 dist/system-policy-%name %buildroot%_sysconfdir/pam.d/system-policy-%name
|
||||
install -Dm0644 dist/%name-remote-policy %buildroot%_sysconfdir/pam.d/%name-remote-policy
|
||||
install -Dm0644 dist/%name.ini %buildroot%_sysconfdir/%name/%name.ini
|
||||
install -Dm0644 doc/gpoa.1 %buildroot/%_man1dir/gpoa.1
|
||||
install -Dm0644 doc/gpupdate.1 %buildroot/%_man1dir/gpupdate.1
|
||||
|
||||
for i in gpupdate-localusers \
|
||||
gpupdate-group-users \
|
||||
gpupdate-system-uids
|
||||
do
|
||||
install -pD -m755 "dist/$i" \
|
||||
"%buildroot%_sysconfdir/control.d/facilities/$i"
|
||||
done
|
||||
|
||||
%preun
|
||||
%preun_service gpupdate
|
||||
|
||||
%post
|
||||
%post_service gpupdate
|
||||
if [ -x "/bin/systemctl" ]; then
|
||||
gpupdate-setup update
|
||||
fi
|
||||
|
||||
# Remove storage in case we've lost compatibility between versions.
|
||||
# The storage will be regenerated on GPOA start.
|
||||
%define active_policy %_sysconfdir/local-policy/active
|
||||
%triggerpostun -- %name < 0.8.0
|
||||
%triggerpostun -- %name < 0.9.10
|
||||
rm -f %_cachedir/%name/registry.sqlite
|
||||
if test -L %active_policy; then
|
||||
sed -i "s|^\s*local-policy\s*=.*|local-policy = $(readlink -f %active_policy)|" \
|
||||
@@ -94,19 +119,29 @@ fi
|
||||
%_sbindir/gpoa
|
||||
%_sbindir/gpupdate-setup
|
||||
%_bindir/gpupdate
|
||||
%_prefix/libexec/%name/scripts_runner
|
||||
%_prefix/libexec/%name/pkcon_runner
|
||||
%attr(755,root,root) %python3_sitelibdir/gpoa/gpoa
|
||||
%attr(755,root,root) %python3_sitelibdir/gpoa/gpupdate
|
||||
%attr(755,root,root) %python3_sitelibdir/gpoa/gpupdate-setup
|
||||
%attr(755,root,root) %python3_sitelibdir/gpoa/scripts_runner
|
||||
%attr(755,root,root) %python3_sitelibdir/gpoa/pkcon_runner
|
||||
%python3_sitelibdir/gpoa
|
||||
%_datadir/%name
|
||||
%_unitdir/%name.service
|
||||
%_unitdir/%name-scripts-run.service
|
||||
%_unitdir/%name.timer
|
||||
%_man1dir/gpoa.1.*
|
||||
%_man1dir/gpupdate.1.*
|
||||
/usr/lib/systemd/user/%name-user.service
|
||||
/usr/lib/systemd/user/%name-user.timer
|
||||
/usr/lib/systemd/user/%name-scripts-run-user.service
|
||||
%dir %_sysconfdir/%name
|
||||
%_sysconfdir/control.d/facilities/*
|
||||
%config(noreplace) %_sysconfdir/%name/environment
|
||||
%config(noreplace) %_sysconfdir/%name/%name.ini
|
||||
%config(noreplace) %_sysconfdir/pam.d/system-policy-%name
|
||||
%config(noreplace) %_sysconfdir/pam.d/%name-remote-policy
|
||||
%dir %attr(0700, root, root) %_cachedir/%name
|
||||
%dir %attr(0755, root, root) %_cachedir/%{name}_file_cache
|
||||
%dir %attr(0700, root, root) %_cachedir/%name/creds
|
||||
@@ -116,6 +151,59 @@ fi
|
||||
%exclude %python3_sitelibdir/gpoa/test
|
||||
|
||||
%changelog
|
||||
* Fri Sep 30 2022 Valery Sinelnikov <greh@altlinux.org> 0.9.11.2-alt1
|
||||
- Fixed formation of the correct path for creating a user directory
|
||||
|
||||
* Tue Sep 27 2022 Valery Sinelnikov <greh@altlinux.org> 0.9.11.1-alt1
|
||||
- Fixed merge for nodomain_backend
|
||||
- Added support for complex types in chromium_applier
|
||||
|
||||
* Wed Sep 14 2022 Evgeny Sinelnikov <sin@altlinux.org> 0.9.11-alt1
|
||||
- Add Chromium applier
|
||||
- Update Firefox applier
|
||||
|
||||
* Fri Aug 26 2022 Valery Sinelnikov <greh@altlinux.org> 0.9.10-alt1
|
||||
- INI-files preferences implementation
|
||||
- Files preferences implementation
|
||||
- Scripts (logon logoff startup shutdown) implementation
|
||||
- UserPolicyMode set accordingly
|
||||
- Folder bugs fixed
|
||||
- Firefox app full release
|
||||
|
||||
* Thu Mar 03 2022 Valery Sinelnikov <greh@altlinux.org> 0.9.9.1-alt1
|
||||
- Fixed method call (Closes: 41994)
|
||||
- Removed unnecessary replace
|
||||
- Fixed declaration of variable
|
||||
|
||||
* Fri Feb 18 2022 Evgeny Sinelnikov <sin@altlinux.org> 0.9.9-alt1
|
||||
- Add gpupdate-remote-policy PAM substack (for pam_mount support)
|
||||
- Added lookup for possible dc if first found is unreadable
|
||||
- Correct folder applier (still experimental)
|
||||
- Update logging and translations
|
||||
- Fix error when control facilites not exists
|
||||
- Add check for the presence of Gsettings schema and keys exists
|
||||
- Add support of package applier via pkcon (still experimental)
|
||||
|
||||
* Mon Oct 25 2021 Evgeny Sinelnikov <sin@altlinux.org> 0.9.8-alt1
|
||||
- Added exception for org.gnome.Vino authentication-methods
|
||||
- Fixed bug for alternative-port in org.gnome.Vino
|
||||
|
||||
* Wed Sep 29 2021 Evgeny Sinelnikov <sin@altlinux.org> 0.9.7-alt1
|
||||
- Fix regression with kestroy for user credential cache
|
||||
- Update system-policy-gpupdate PAM-rules to ignore applying group policies
|
||||
for local users and system users with uid less than 500
|
||||
- Add control facilities to rule system-policy-gpupdate rules:
|
||||
+ gpupdate-group-users
|
||||
+ gpupdate-localusers
|
||||
+ gpupdate-system-uids
|
||||
|
||||
* Mon Sep 20 2021 Evgeny Sinelnikov <sin@altlinux.org> 0.9.6-alt1
|
||||
- Add support changed GPO List Processing for '**DelVals.' value name
|
||||
|
||||
* Tue Sep 14 2021 Evgeny Sinelnikov <sin@altlinux.org> 0.9.5-alt1
|
||||
- Refix local policy path detection
|
||||
- gpupdate-setup: revert settings to default when disabled
|
||||
|
||||
* Tue Sep 14 2021 Evgeny Sinelnikov <sin@altlinux.org> 0.9.4-alt1
|
||||
- Add improvement with new local-policy system-policy control
|
||||
- Fix gpupdate-setup and user service installation regressions
|
||||
|
60
tools/parsing_chrom_admx_intvalues.py
Executable file
60
tools/parsing_chrom_admx_intvalues.py
Executable file
@@ -0,0 +1,60 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
#Script for parsing the chrome.admx or the Yandex Browser.admx file
|
||||
#into the number of keys needed to be treated as an integer
|
||||
|
||||
import sys
|
||||
from xml.etree import ElementTree
|
||||
|
||||
def get_child(parent, desires:list, list_data_pol:list):
|
||||
if parent.tag == 'decimal':
|
||||
list_data_pol.append(parent.get('value'))
|
||||
return
|
||||
for child in parent:
|
||||
if child.tag == desires[0]:
|
||||
get_child(child, desires[1:], list_data_pol)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
try:
|
||||
xml_contents = ElementTree.iterparse(sys.argv[1])
|
||||
except:
|
||||
print('Enter the correct file path')
|
||||
sys.exit()
|
||||
|
||||
#Ignore XML file namespace
|
||||
for _, el in xml_contents:
|
||||
prefix, has_namespace, postfix = el.tag.partition('}')
|
||||
if has_namespace:
|
||||
el.tag = postfix
|
||||
|
||||
xml_root = xml_contents.root
|
||||
pol_count = 0
|
||||
dict_policies = dict()
|
||||
for parent in xml_root:
|
||||
if parent.tag == 'policies':
|
||||
for child in parent:
|
||||
pol_count += 1
|
||||
dict_policies[child.get('name')] = list()
|
||||
desires = ['elements', 'enum', 'item', 'value', 'decimal']
|
||||
get_child(child, desires, dict_policies[child.get('name')])
|
||||
|
||||
target_list = list()
|
||||
count = 0
|
||||
len_dict = len(set([key if val else None for key,val in dict_policies.items()])) - 1
|
||||
for key, value in dict_policies.items():
|
||||
if value:
|
||||
target_list.append(key)
|
||||
count+=1
|
||||
key_int = "'{}'".format(key)
|
||||
if len_dict > count:
|
||||
key_int += ','
|
||||
else:
|
||||
print(key_int, '\n\nkey_int:', count)
|
||||
break
|
||||
print(key_int)
|
||||
|
||||
print('total:',pol_count)
|
||||
|
||||
except Exception as exc:
|
||||
print(exc)
|
Reference in New Issue
Block a user