mirror of
https://github.com/altlinux/gpupdate.git
synced 2025-11-07 00:23:53 +03:00
Compare commits
763 Commits
0.9.9.1-al
...
plugin-sys
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d509504f17 | ||
|
|
25b58966f4 | ||
|
|
95d6119028 | ||
|
|
08b7305b09 | ||
|
|
6e64d9a0e3 | ||
|
|
bbbc0b8289 | ||
|
|
2b38a3f33e | ||
|
|
db0fb15e4c | ||
|
|
6ae3427b97 | ||
|
|
998b6ce90c | ||
|
|
fef03a4997 | ||
|
|
ede592079d | ||
|
|
f2fd4521c5 | ||
|
|
5e3a1bf534 | ||
|
|
6dab83ae92 | ||
|
|
00b0765905 | ||
|
|
75bd036078 | ||
|
|
13d2a7cbce | ||
|
|
5dc7c7f3cb | ||
|
|
28a2a18962 | ||
|
|
cffe811805 | ||
|
|
d975cd2f10 | ||
|
|
cb9c70d6c1 | ||
|
|
99feb569a2 | ||
|
|
a37b895a27 | ||
|
|
cd1a2fc042 | ||
|
|
5e918900c6 | ||
|
|
326064996c | ||
|
|
8888943c06 | ||
|
|
5e52abdb5d | ||
|
|
b7f38fd1ee | ||
|
|
4a3c423a2d | ||
|
|
a6dfd91d9a | ||
|
|
f031799086 | ||
|
|
239ba4a34a | ||
|
|
6d58115221 | ||
|
|
bd5b543bfc | ||
|
|
07da90680e | ||
|
|
4eb1b18a5e | ||
|
|
f7418c35de | ||
|
|
1e4a8ecf62 | ||
|
|
c286263de6 | ||
|
|
b12967991c | ||
|
|
a8429b3ba7 | ||
|
|
03eb942f33 | ||
|
|
faaa7a0aba | ||
|
|
87f905333d | ||
|
|
d26cdbb2e7 | ||
|
|
8b996454e8 | ||
|
|
bf69072ce3 | ||
|
|
0511a89e35 | ||
|
|
92491d0a50 | ||
|
|
20cefd47e6 | ||
|
|
6dacded1c4 | ||
|
|
ba00f58b4f | ||
|
|
9147fcf228 | ||
|
|
f32bf47c9b | ||
|
|
5c5d7a5563 | ||
|
|
fae11aee96 | ||
|
|
b75d0cad25 | ||
|
|
f52bddab41 | ||
|
|
931ec5ecf0 | ||
|
|
898f24c30c | ||
|
|
8f375ff60d | ||
|
|
c21460cd20 | ||
|
|
79c12f8c89 | ||
|
|
f7e376c41f | ||
|
|
2da8fd8d54 | ||
|
|
838b709366 | ||
|
|
935af6d115 | ||
|
|
646308944e | ||
|
|
f28b85f696 | ||
|
|
357cd3b5b0 | ||
|
|
f130d93568 | ||
|
|
8d6beb60c5 | ||
|
|
015b30f4f8 | ||
|
|
65dc9ec6a0 | ||
|
|
c1bcd39a5a | ||
|
|
79f33343a8 | ||
|
|
70be9bee1e | ||
|
|
786530f1b8 | ||
|
|
078ba47c13 | ||
|
|
63e5ffc3f8 | ||
|
|
01d219cb8e | ||
|
|
6af54ff17d | ||
|
|
238d1f4784 | ||
|
|
b3253bd684 | ||
|
|
66b17be85b | ||
|
|
bea7fe9803 | ||
|
|
f36b362523 | ||
|
|
abfb756edb | ||
|
|
0578e21521 | ||
|
|
02bd6773aa | ||
|
|
927c3ceb2f | ||
|
|
a329f601f7 | ||
|
|
d4f12dacfa | ||
|
|
4d05358790 | ||
|
|
bc4bb96b03 | ||
|
|
5588be1daa | ||
|
|
bde48cbedf | ||
|
|
43d32c3882 | ||
|
|
0932d1da26 | ||
|
|
2a4375c6fb | ||
|
|
ba11149983 | ||
|
|
56008e7e1c | ||
|
|
9325c241ef | ||
|
|
3c0c722818 | ||
|
|
9424c2c8e8 | ||
|
|
9065352bb0 | ||
|
|
9034d4ba4c | ||
|
|
2e6c76337b | ||
|
|
a3398e0307 | ||
|
|
c7192773fd | ||
|
|
93bcac5f19 | ||
|
|
967687497c | ||
|
|
3797993209 | ||
|
|
04831c4dbd | ||
|
|
316c0881a9 | ||
|
|
22d0c87538 | ||
|
|
2c66ad9bc1 | ||
|
|
5fe0b6f418 | ||
|
|
829825060b | ||
|
|
463620ff25 | ||
|
|
ab632a8177 | ||
|
|
5c47ebb6c5 | ||
|
|
6a840674ca | ||
|
|
a6f6b021fa | ||
|
|
0f4066e0f0 | ||
|
|
030e69cb86 | ||
|
|
5f94fad90b | ||
|
|
156918ad3b | ||
|
|
6df5a5754f | ||
|
|
dda57ed179 | ||
|
|
99595c85d3 | ||
|
|
e25c5844a9 | ||
|
|
8e1a76552f | ||
|
|
1f6776912d | ||
|
|
3e889622b1 | ||
|
|
1c827d4533 | ||
|
|
ce660afcbd | ||
|
|
5b1a928291 | ||
|
|
a77a6e3c6f | ||
|
|
25a784fa2e | ||
|
|
6378c8c78b | ||
|
|
9ad7440c8b | ||
|
|
2a5642a76d | ||
| dbff83050b | |||
| ed1b2aa39e | |||
|
|
02701136c0 | ||
|
|
408d221c3d | ||
|
|
67a02a4623 | ||
| 7a0af6ab9b | |||
|
|
ce6e49443f | ||
|
|
433d312c0f | ||
|
|
2ec68dd95a | ||
|
|
3990f876a4 | ||
|
|
1f541914cd | ||
|
|
dc054008fd | ||
|
|
aa4bf9a7c8 | ||
|
|
99a6e85ccf | ||
|
|
79ef884f7d | ||
|
|
0abc5b0282 | ||
| dce52c4d9c | |||
|
|
4d5969a5fa | ||
|
|
3263a4cfd3 | ||
|
|
0685b9e492 | ||
|
|
7188c70a77 | ||
|
|
2edc5c326c | ||
|
|
39b92ce763 | ||
|
|
620010e1ab | ||
|
|
b87e8b218f | ||
|
|
df0f806035 | ||
|
|
7e8657939f | ||
|
|
a879d5ad52 | ||
|
|
c097769681 | ||
|
|
a85158ce3c | ||
|
|
f79b283574 | ||
|
|
b791f3d5eb | ||
|
|
b16460309a | ||
|
|
40cf97989e | ||
|
|
71eeb1d5a0 | ||
|
|
f45fc7092d | ||
|
|
e537b3846a | ||
|
|
64581f60d2 | ||
|
|
1436ee201e | ||
|
|
0051e001a8 | ||
|
|
d4eb4263fa | ||
|
|
a99ed2db2a | ||
|
|
8bc4375339 | ||
|
|
f24038b288 | ||
|
|
96ec5cc690 | ||
|
|
e88278fb47 | ||
|
|
4be89029aa | ||
|
|
b981744d75 | ||
|
|
760a1d8b90 | ||
|
|
cb035fd56e | ||
|
|
e56293e768 | ||
|
|
0c0f7d223b | ||
|
|
3c09737aa7 | ||
|
|
0027b5aa96 | ||
| df8984dd65 | |||
|
|
5f8c75e27c | ||
|
|
03b031734a | ||
| 77c0d60b7d | |||
| 51b744f94b | |||
| cdd9d84037 | |||
|
|
4de1946e32 | ||
|
|
73759857b3 | ||
|
|
b3e222ae55 | ||
|
|
8a2c9554f7 | ||
|
|
862b3b358b | ||
|
|
0d2c70da35 | ||
|
|
2953e4b0c6 | ||
|
|
c8585ac932 | ||
|
|
981d883ed0 | ||
|
|
3ddd9462ea | ||
|
|
ab79f169e8 | ||
| 5a3ba30910 | |||
|
|
d554b1fdf9 | ||
|
|
3960c4b094 | ||
|
|
5f178651f7 | ||
|
|
674e1d176b | ||
|
|
afe6ef04d4 | ||
|
|
fa98fef5a3 | ||
|
|
c6c34accff | ||
|
|
dba6a58c6a | ||
|
|
a02969c686 | ||
|
|
e040bbbd69 | ||
|
|
1775bfa08c | ||
|
|
165f4bfc83 | ||
|
|
316f5d1e49 | ||
|
|
150f3441fd | ||
|
|
769b520d47 | ||
|
|
517ed6d56b | ||
|
|
40635f9a01 | ||
|
|
2eb6e0c632 | ||
|
|
710b78b79f | ||
|
|
f308539a5a | ||
|
|
ca8cb9ce78 | ||
|
|
3c7d45cd52 | ||
|
|
6e77d54aa3 | ||
|
|
3c72786bd8 | ||
|
|
8a36e01fbb | ||
|
|
32cb959f0b | ||
|
|
3fb24dbd99 | ||
|
|
b737c9f0aa | ||
|
|
48d94ae046 | ||
|
|
4ed05cb481 | ||
|
|
cddc7d70fb | ||
|
|
64c305c544 | ||
|
|
4ee10c1560 | ||
|
|
5e5c5d45a6 | ||
|
|
56ee1334af | ||
|
|
de5ef65c16 | ||
|
|
453934621d | ||
|
|
2132c3676f | ||
|
|
e9adb9b298 | ||
|
|
3e3957d693 | ||
|
|
554147b57f | ||
|
|
6b632e851c | ||
|
|
3e99bfcb60 | ||
|
|
2c48b3a6a4 | ||
|
|
2e22d7abc9 | ||
|
|
e645fa4e86 | ||
|
|
cdcac9e4db | ||
|
|
d3a316c1c0 | ||
|
|
f081ec6454 | ||
|
|
60d6996db2 | ||
|
|
ea52e9671b | ||
|
|
92df692559 | ||
|
|
3b4f92997e | ||
|
|
98d02a4da0 | ||
|
|
eb951cbd5e | ||
|
|
9ce68f2acc | ||
|
|
54239c339c | ||
|
|
2b108e2029 | ||
|
|
2a21983b13 | ||
|
|
b6e84b3d9e | ||
|
|
bb314fb553 | ||
|
|
28718e8ad6 | ||
|
|
2857cfb899 | ||
| 8717e1b9a3 | |||
| d3c9b95331 | |||
|
|
4d6a5d750c | ||
|
|
84e1340362 | ||
| 5ee05df574 | |||
|
|
2a993f0400 | ||
|
|
b878b7e1b3 | ||
|
|
c57d1bac9e | ||
| b9b5239448 | |||
| aae2776790 | |||
|
|
a20aa841d6 | ||
|
|
8c7819d96f | ||
|
|
3d9473f979 | ||
|
|
01f48be853 | ||
|
|
1638098fd4 | ||
|
|
047e5459af | ||
|
|
5baa4245e3 | ||
| ec6b9f7887 | |||
| 22d0d23b89 | |||
| fd3a32e8e1 | |||
|
|
9e849e8fe3 | ||
|
|
d65f3ed942 | ||
|
|
31298be840 | ||
|
|
5c889fd57e | ||
|
|
4e2874c972 | ||
|
|
63e50ac2df | ||
|
|
ad2a87e20d | ||
|
|
e9c3a4262a | ||
|
|
b5706ec6e1 | ||
|
|
61e7350429 | ||
|
|
c9a274fc79 | ||
|
|
127c9f7183 | ||
|
|
a27f8ba5dd | ||
|
|
fafe2c34b4 | ||
|
|
9c91ddc7ba | ||
|
|
1f02ed650b | ||
|
|
fc47df4649 | ||
|
|
42b8bdb82a | ||
|
|
2a174edeef | ||
|
|
9b8529b39b | ||
|
|
062ff742c3 | ||
|
|
1764560c49 | ||
|
|
b439e04a2f | ||
|
|
e413f95633 | ||
|
|
675f37ab85 | ||
|
|
9932c682ef | ||
|
|
018b30cdc4 | ||
|
|
249eb69ade | ||
|
|
1ab8c7aee0 | ||
|
|
400a5fab7d | ||
|
|
e7851e88b3 | ||
|
|
0761637666 | ||
|
|
dda4d987cb | ||
|
|
609ec0e8b8 | ||
|
|
c0b28a0655 | ||
|
|
78aad11e06 | ||
|
|
59bebbc45e | ||
|
|
e92656add0 | ||
|
|
5d24579d2f | ||
|
|
ce284b61be | ||
|
|
7a8118ac63 | ||
|
|
18d8e73acd | ||
|
|
58235cb1a1 | ||
|
|
e0d88cc076 | ||
|
|
c8b0927090 | ||
|
|
a4a79d8c99 | ||
|
|
408609fa58 | ||
|
|
6efebfad89 | ||
| 12865b0b43 | |||
| 9117dddcee | |||
| 1e267f5cb6 | |||
| 62ed015ea9 | |||
| 3e6b7cd040 | |||
| 209eb84d6d | |||
| 7f3b47a23c | |||
| 08ba87c8d8 | |||
| f2a45a2a6d | |||
| 9c544adc94 | |||
| a225c9aa7f | |||
| 51c8711da6 | |||
| 54eb4188a7 | |||
|
|
89d5e36d6c | ||
|
|
6cd5ab4ee2 | ||
|
|
0c913c68e3 | ||
|
|
12d746a1dc | ||
|
|
0a25f3a1d6 | ||
|
|
1eaab893c8 | ||
|
|
05ea872831 | ||
|
|
d0506dba29 | ||
|
|
dd28587b20 | ||
|
|
1a288c84f5 | ||
|
|
cadc3eda52 | ||
|
|
8d3e6691d4 | ||
|
|
cb54fa5d78 | ||
|
|
53ffc072f0 | ||
|
|
7a59bcb65b | ||
|
|
b81a727cd4 | ||
|
|
11b33dd148 | ||
|
|
1ccc18a31f | ||
|
|
9a3afeebdf | ||
|
|
0720471cca | ||
|
|
dd43ddaad6 | ||
|
|
6fc059aaac | ||
|
|
8cfb6f0bb3 | ||
|
|
ddcdc322f8 | ||
|
|
4ee52f06d6 | ||
|
|
603efc2deb | ||
|
|
9fc5007590 | ||
|
|
a6210f8b29 | ||
|
|
175f244a5f | ||
|
|
0d4ce533bc | ||
|
|
8e22235df2 | ||
|
|
0519d2703c | ||
|
|
1ca9b006e1 | ||
|
|
8cc5a8904b | ||
|
|
70cdef2e71 | ||
|
|
3baffeb12d | ||
|
|
a0d9dc585f | ||
|
|
388125415b | ||
|
|
14c7e5db21 | ||
|
|
582a85df88 | ||
|
|
18ddc54626 | ||
|
|
6bad9a331d | ||
|
|
16b5747620 | ||
|
|
47015ec312 | ||
|
|
666c88bdf1 | ||
|
|
bd5262353b | ||
|
|
e1d5712b83 | ||
|
|
bcb9108424 | ||
|
|
82bb88ca34 | ||
|
|
518685f361 | ||
|
|
39e3d15fa8 | ||
|
|
7a755bbb3e | ||
|
|
41260df1a1 | ||
|
|
0d1b60158a | ||
|
|
b244df8f2d | ||
|
|
e48ca4fc8e | ||
|
|
82d52d1c9f | ||
|
|
e6a51d02fb | ||
|
|
28e2d9c94b | ||
|
|
60137feed0 | ||
|
|
a86c49e471 | ||
|
|
8c5d0bbb06 | ||
|
|
c26fbf8042 | ||
|
|
83e70d5e7a | ||
|
|
c383b8df9b | ||
|
|
fc810c3362 | ||
|
|
7e225c837a | ||
|
|
b053544512 | ||
|
|
9b4527d334 | ||
|
|
3794ffa5be | ||
|
|
fe68f0cca8 | ||
|
|
d83cf4d29d | ||
|
|
47dc1df796 | ||
|
|
5d2fb3f719 | ||
|
|
3fded83c75 | ||
|
|
aeab315c3d | ||
|
|
446fa532db | ||
|
|
ac2190809a | ||
|
|
66bae5a1af | ||
|
|
4f41c64c98 | ||
|
|
729f916646 | ||
|
|
1b150e21c7 | ||
| 459993d133 | |||
| 7ee065309b | |||
| 22c4f97a15 | |||
|
|
e62b366cf2 | ||
|
|
fbdd8cc79a | ||
|
|
8fddb3494a | ||
|
|
4b3e621650 | ||
|
|
4a2842b872 | ||
|
|
682797fb90 | ||
|
|
12bd7a5b51 | ||
|
|
0674340f74 | ||
|
|
5486bcfcef | ||
|
|
d935557c4c | ||
|
|
c6b6cdfff3 | ||
| 2d7144c1b4 | |||
|
|
4cca8b241a | ||
|
|
a50f8c0d04 | ||
|
|
8c4ce9f8a6 | ||
|
|
bb1183c471 | ||
| db74303e73 | |||
|
|
ced9d35ec4 | ||
|
|
d84b754292 | ||
|
|
7507c558ba | ||
| 9fb411c2e2 | |||
|
|
b8dc00443f | ||
| 179b16baa4 | |||
| 209e4e3128 | |||
| 2fb59a1b7c | |||
| d82cfcfe89 | |||
| 220313a1fb | |||
|
|
38378440ff | ||
| debe48c06b | |||
| b84715cfe4 | |||
| abad246ab2 | |||
|
|
5bc8309abd | ||
|
|
a18e1a6cce | ||
| 8420f50f9c | |||
| 07662349ca | |||
|
|
a1281d3ac0 | ||
|
|
5c0fc9bed0 | ||
|
|
78815c5ecd | ||
|
|
7a0571278f | ||
|
|
7e666043be | ||
|
|
e733c346b3 | ||
|
|
7e26d8397c | ||
|
|
b0d3ab2384 | ||
| d744cf8f6e | |||
| 443b410dfa | |||
|
|
721c66b20d | ||
| 9fbe8f76be | |||
| 3c95c0c84b | |||
|
|
ed42f3cf6a | ||
| 5dabd2c259 | |||
| 1f32d4efae | |||
|
|
5c809a2d5a | ||
|
|
bec19cf69e | ||
|
|
583b47ae7c | ||
|
|
264cedd342 | ||
|
|
de6db7ad2b | ||
| 17c8aef19f | |||
| e402d399e9 | |||
| 5258880419 | |||
| 3fd6d9558e | |||
| d26290a720 | |||
| f1800a834f | |||
| 93806b342d | |||
| 17ea444bcb | |||
| fc0495abd0 | |||
| c9da82376a | |||
| ae9ced2794 | |||
| 6c231c8b4d | |||
| 6461aa6836 | |||
| 5eeba1e73a | |||
| ca4399b9b5 | |||
| 377aa07b9f | |||
| 38d1f0e571 | |||
|
|
04651494be | ||
|
|
4c7e69f7f6 | ||
|
|
51f4b3aa18 | ||
|
|
beb555bdf2 | ||
|
|
bb55c38e21 | ||
|
|
5df3c6f468 | ||
|
|
7edaa4afe7 | ||
|
|
486e035649 | ||
|
|
51bd701b2d | ||
|
|
de0635952f | ||
|
|
21b4ced721 | ||
|
|
2567bb9c45 | ||
|
|
a4db4d9cd0 | ||
|
|
8cdc84aef6 | ||
|
|
8b82278934 | ||
|
|
4b4adbf3e1 | ||
|
|
0e6c3bb6aa | ||
|
|
fa315bb599 | ||
|
|
d54cd790b1 | ||
|
|
c729b8a6d6 | ||
|
|
142d6eda50 | ||
|
|
ae8dd798ab | ||
|
|
8121eb8d6f | ||
|
|
be15051ba5 | ||
|
|
7f7a154e1b | ||
|
|
72c34a7475 | ||
|
|
abc3a3f609 | ||
|
|
ce2d1c6e05 | ||
|
|
58cff92891 | ||
|
|
6bcd916203 | ||
|
|
c924adc4b0 | ||
|
|
9e1760ae9d | ||
|
|
1a90996259 | ||
|
|
11768248e4 | ||
|
|
34d7124a46 | ||
|
|
c5c80b9091 | ||
|
|
1b3d046d05 | ||
|
|
5c2e4fe356 | ||
|
|
ff5645ef73 | ||
|
|
3fb3f2e857 | ||
|
|
f75c79cbeb | ||
|
|
43c8031da5 | ||
|
|
4f1c2f288e | ||
|
|
26908178d3 | ||
|
|
fe63894ad8 | ||
|
|
1bf898f1d0 | ||
|
|
2c71b5e53a | ||
|
|
601e8b1072 | ||
|
|
2c15d1cea0 | ||
|
|
52fc6ea4de | ||
|
|
3621e80055 | ||
|
|
d9191e47fa | ||
|
|
87d873862a | ||
|
|
9dc833a970 | ||
|
|
45bf77a64a | ||
|
|
5be7cc14b0 | ||
|
|
1f0e417ff1 | ||
|
|
1d31c72946 | ||
|
|
eb7538249f | ||
|
|
0dacf2f657 | ||
|
|
13f1529306 | ||
|
|
3b2d0c0af2 | ||
|
|
aea8f6ed0a | ||
|
|
322f28baa7 | ||
|
|
3860bf6b74 | ||
|
|
abcc660118 | ||
|
|
b7e61e4ab8 | ||
|
|
ca50d7f73b | ||
|
|
d9f3bd3b8c | ||
|
|
b4e50c2ef8 | ||
|
|
e46d717af8 | ||
|
|
83c0395ee4 | ||
|
|
eef4823e56 | ||
|
|
4100edcacf | ||
|
|
89e72eeaff | ||
|
|
ce54bae087 | ||
|
|
bbbde0c46a | ||
|
|
a43f47abd4 | ||
|
|
60ab746ce3 | ||
|
|
418d182726 | ||
|
|
ccb3dd53a8 | ||
|
|
bb0beb4a92 | ||
|
|
dda3ca452b | ||
|
|
0d54a2a0c8 | ||
|
|
c1a4e67ba3 | ||
|
|
b10dde3b21 | ||
|
|
c7b632fbb8 | ||
|
|
a00366650a | ||
|
|
a10beac915 | ||
|
|
d409d68052 | ||
|
|
5fdefaecc0 | ||
|
|
0e3d3598f1 | ||
|
|
556a8f833c | ||
|
|
a17dd4a9b4 | ||
|
|
681c4828a6 | ||
|
|
e670c03026 | ||
|
|
5bd64352f1 | ||
|
|
56b7186c15 | ||
|
|
249d3a6caa | ||
|
|
7b6cb64d58 | ||
|
|
da71aaf0dd | ||
|
|
d35dd5433d | ||
|
|
cb6bc1f280 | ||
|
|
3d79315470 | ||
|
|
077d67c417 | ||
|
|
77b6ffb81a | ||
|
|
e4a41e9d07 | ||
|
|
0460f64b47 | ||
|
|
477a99c703 | ||
|
|
385e9ae02f | ||
|
|
18a7426863 | ||
|
|
3f2176659a | ||
|
|
72e756c778 | ||
|
|
bb340112d5 | ||
|
|
fe4a5fa78c | ||
|
|
88efbfe3e3 | ||
|
|
edbdaccb71 | ||
|
|
b9c2b91add | ||
|
|
f289584044 | ||
|
|
88773f4e99 | ||
|
|
cd71ac4c81 | ||
|
|
e08546ad2f | ||
|
|
4c59c4ba7c | ||
|
|
6e1898ca27 | ||
|
|
3cab21e9c3 | ||
|
|
6b0cfbe2b5 | ||
|
|
bc1676dc71 | ||
|
|
5a60253dac | ||
|
|
e14043174a | ||
|
|
bfc05fee36 | ||
|
|
762fc4e525 | ||
|
|
66008b8a37 | ||
|
|
ffc3bc46c1 | ||
|
|
11abedd7b6 | ||
|
|
a01609afc3 | ||
| f1a415bdae | |||
|
|
5d1cf84304 | ||
|
|
3c3147c2fc | ||
|
|
e62739a43b | ||
|
|
727d7e073f | ||
|
|
a13373cf92 | ||
|
|
1c0678957c | ||
|
|
bdf9300be4 | ||
|
|
19acaad7e1 | ||
|
|
6b1aa004c4 | ||
|
|
d3740a106c | ||
|
|
9be2604be9 | ||
|
|
a35e578cf4 | ||
|
|
52eaea95c6 | ||
|
|
8f65f79c6c | ||
|
|
e50c5d7883 | ||
|
|
549315fe48 | ||
|
|
91824acdab | ||
|
|
cd25431bb8 | ||
|
|
10b9fa0ff1 | ||
|
|
44585adddd | ||
|
|
d3213b4d0b | ||
|
|
80e9dba4c4 | ||
|
|
1ed5c0f043 | ||
|
|
f801c09737 | ||
|
|
fd17b19f33 | ||
|
|
f1e22e0cc5 | ||
|
|
efc9dac26e | ||
|
|
e085c10bb3 | ||
|
|
5b08fcd917 | ||
|
|
408bccb76d | ||
|
|
3f32d3bbda | ||
|
|
fa707104b7 | ||
|
|
69ac2abf8b | ||
|
|
3a8af98231 | ||
|
|
41242561e1 | ||
|
|
97e5418666 | ||
|
|
5015da40b7 | ||
|
|
cdfc39540f | ||
|
|
95af821475 | ||
|
|
b63fe63784 | ||
|
|
889bf5124a | ||
|
|
2da7758621 | ||
|
|
cb720084fa | ||
|
|
baba56465c | ||
|
|
020e5f3128 | ||
|
|
f07f752211 | ||
|
|
31bcb2cd2a | ||
|
|
cc80d8c74a | ||
|
|
931aaf9300 | ||
|
|
7ade31de8a | ||
|
|
86d02146e2 | ||
|
|
cf979596b3 | ||
|
|
9a74efefde | ||
|
|
73404ceced | ||
|
|
23be105462 | ||
|
|
03977710a4 | ||
|
|
d76c0a9a00 | ||
|
|
a01d5253dc | ||
|
|
403432ecd2 | ||
|
|
47a3c6b39c | ||
|
|
51c218eb7a | ||
|
|
1513eab336 | ||
|
|
4701847d1b | ||
|
|
1486084594 | ||
|
|
70bc4faea3 | ||
|
|
6283d72ccc | ||
|
|
c795a8323e | ||
|
|
3187b9f0f1 | ||
|
|
0e159d34d0 | ||
|
|
4327f0b17b | ||
|
|
d1169eaeef | ||
|
|
c00e2d7f09 | ||
|
|
fdcbda576b | ||
|
|
43161e61bc | ||
|
|
31ba4ad214 | ||
|
|
877ce7b2aa | ||
|
|
856eecf708 | ||
|
|
b869573f31 | ||
|
|
f01bf08a95 | ||
|
|
5ae9031cda | ||
|
|
1aed44454c | ||
|
|
474378d17d | ||
|
|
1e8a6c61c6 | ||
|
|
925947765d | ||
|
|
8f8b7045b7 | ||
|
|
d6c438f277 | ||
|
|
326dc0600b | ||
|
|
5dd3ca17e8 | ||
|
|
fc650b125a | ||
|
|
dc9479fbbe | ||
|
|
c71356211f | ||
|
|
7ab98ffa6e | ||
|
|
3519be7bc6 | ||
|
|
7926137e84 | ||
|
|
d5ecd040df | ||
|
|
be0603e809 | ||
|
|
3ff6f053ea | ||
|
|
1b95a20cad | ||
|
|
818f5919fe | ||
|
|
8765ef862b | ||
|
|
66ebe87592 | ||
|
|
bced76ac4d | ||
|
|
4ddea369c5 | ||
|
|
6ac15e6be2 | ||
|
|
487483fb6f | ||
|
|
20e4a77ff7 | ||
|
|
22cff21d3a |
332
PLUGIN_DEVELOPMENT_GUIDE.md
Normal file
332
PLUGIN_DEVELOPMENT_GUIDE.md
Normal file
@@ -0,0 +1,332 @@
|
||||
# GPOA Plugin Development Guide
|
||||
|
||||
## Introduction
|
||||
|
||||
GPOA (GPO Applier for Linux) supports a plugin system for extending group policy application functionality.
|
||||
Plugins allow adding support for new policy types and system settings without modifying the core code.
|
||||
|
||||
## Plugin Architecture
|
||||
|
||||
### Base Classes
|
||||
|
||||
- **`plugin`** - Abstract base class with final methods `apply()` and `apply_user()`
|
||||
- **`FrontendPlugin`** - Simplified class for plugins with logging support
|
||||
|
||||
### Plugin Manager
|
||||
|
||||
- **`plugin_manager`** - Loads and executes plugins from directories:
|
||||
- `/usr/lib/gpupdate/plugins/` - system plugins
|
||||
- `gpoa/frontend_plugins/` - development plugins
|
||||
|
||||
## Creating a Simple Plugin
|
||||
|
||||
### Example: Basic Plugin with Logging
|
||||
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
from gpoa.plugin.plugin_base import FrontendPlugin
|
||||
|
||||
|
||||
class ExampleApplier(FrontendPlugin):
|
||||
"""
|
||||
Example simple plugin with logging and registry access.
|
||||
"""
|
||||
|
||||
# Domain for translations
|
||||
domain = 'example_applier'
|
||||
|
||||
def __init__(self, dict_dconf_db, username=None, fs_file_cache=None):
|
||||
"""
|
||||
Initialize the plugin.
|
||||
|
||||
Args:
|
||||
dict_dconf_db (dict): Dictionary with registry data
|
||||
username (str): Username
|
||||
fs_file_cache: File system cache
|
||||
"""
|
||||
super().__init__(dict_dconf_db, username, fs_file_cache)
|
||||
|
||||
# Initialize logging system
|
||||
self._init_plugin_log(
|
||||
message_dict={
|
||||
'i': { # Informational messages
|
||||
1: "Example Applier initialized",
|
||||
2: "Configuration applied successfully"
|
||||
},
|
||||
'w': { # Warnings
|
||||
10: "No configuration found in registry"
|
||||
},
|
||||
'e': { # Errors
|
||||
20: "Failed to apply configuration"
|
||||
}
|
||||
},
|
||||
domain="example_applier"
|
||||
)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Main plugin execution method.
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False on error
|
||||
"""
|
||||
try:
|
||||
self.log("I1") # Plugin initialized
|
||||
|
||||
# Get data from registry
|
||||
self.config = self.get_dict_registry('Software/BaseALT/Policies/Example')
|
||||
|
||||
if not self.config:
|
||||
self.log("W10") # No configuration found in registry
|
||||
return True
|
||||
|
||||
# Log registry data
|
||||
self.log("I2") # Configuration applied successfully
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.log("E20", {"error": str(e)})
|
||||
return False
|
||||
|
||||
|
||||
def create_machine_applier(dict_dconf_db, username=None, fs_file_cache=None):
|
||||
"""
|
||||
Factory function for creating plugin instance for machine context.
|
||||
|
||||
Args:
|
||||
dict_dconf_db (dict): Dictionary with registry data
|
||||
username (str): Username
|
||||
fs_file_cache: File system cache
|
||||
|
||||
Returns:
|
||||
ExampleApplier: Plugin instance
|
||||
"""
|
||||
return ExampleApplier(dict_dconf_db, username, fs_file_cache)
|
||||
|
||||
|
||||
def create_user_applier(dict_dconf_db, username=None, fs_file_cache=None):
|
||||
"""
|
||||
Factory function for creating plugin instance for user context.
|
||||
|
||||
Args:
|
||||
dict_dconf_db (dict): Dictionary with registry data
|
||||
username (str): Username
|
||||
fs_file_cache: File system cache
|
||||
|
||||
Returns:
|
||||
ExampleApplier: Plugin instance
|
||||
"""
|
||||
return ExampleApplier(dict_dconf_db, username, fs_file_cache)
|
||||
```
|
||||
|
||||
## Key Plugin Elements
|
||||
|
||||
### 1. Log Registration
|
||||
|
||||
Plugins use a logging system with message codes:
|
||||
|
||||
```python
|
||||
self._init_plugin_log(
|
||||
message_dict={
|
||||
'i': { # Informational messages
|
||||
1: "Example Applier initialized",
|
||||
2: "Configuration applied successfully"
|
||||
},
|
||||
'w': { # Warnings
|
||||
10: "No configuration found in registry"
|
||||
},
|
||||
'e': { # Errors
|
||||
20: "Failed to apply configuration"
|
||||
}
|
||||
},
|
||||
domain="example_applier"
|
||||
)
|
||||
```
|
||||
|
||||
### 2. Registry Access
|
||||
|
||||
Access registry data through `get_dict_registry()` method:
|
||||
|
||||
```python
|
||||
self.config = self.get_dict_registry('Software/BaseALT/Policies/Example')
|
||||
```
|
||||
|
||||
### 3. Logging in run Method
|
||||
|
||||
Using registered message codes:
|
||||
|
||||
```python
|
||||
self.log("I1") # Simple message
|
||||
self.log("E20", {"error": str(e)}) # Message with data
|
||||
```
|
||||
|
||||
### 4. Factory Functions
|
||||
|
||||
Plugins must provide factory functions:
|
||||
|
||||
- `create_machine_applier()` - for machine context
|
||||
- `create_user_applier()` - for user context
|
||||
|
||||
## Translation System
|
||||
|
||||
### Localization Support
|
||||
|
||||
GPOA supports automatic localization of plugin messages. The system uses standard GNU gettext.
|
||||
|
||||
### Translation File Structure
|
||||
|
||||
```
|
||||
gpoa/locale/
|
||||
├── ru/
|
||||
│ └── LC_MESSAGES/
|
||||
│ ├── gpoa.mo
|
||||
│ └── gpoa.po
|
||||
└── en/
|
||||
└── LC_MESSAGES/
|
||||
├── gpoa.mo
|
||||
└── gpoa.po
|
||||
```
|
||||
|
||||
### Setting Up Translations in Plugin
|
||||
|
||||
1. **Define translation domain**:
|
||||
```python
|
||||
class MyPlugin(FrontendPlugin):
|
||||
domain = 'my_plugin' # Domain for translation files
|
||||
```
|
||||
|
||||
2. **Initialize logger with translation support**:
|
||||
```python
|
||||
self._init_plugin_log(
|
||||
message_dict={
|
||||
'i': {
|
||||
1: "Plugin initialized",
|
||||
2: "Configuration applied successfully"
|
||||
},
|
||||
'e': {
|
||||
10: "Configuration error"
|
||||
}
|
||||
},
|
||||
domain="my_plugin" # Domain for translation file lookup
|
||||
)
|
||||
```
|
||||
|
||||
3. **Usage in code**:
|
||||
```python
|
||||
# Messages are automatically translated when logged
|
||||
self.log("I1") # Will be displayed in system language
|
||||
```
|
||||
|
||||
### Creating Translation Files
|
||||
|
||||
1. **Extract strings for translation**:
|
||||
```bash
|
||||
# Extract strings from plugin code
|
||||
xgettext -d my_plugin -o my_plugin.po my_plugin.py
|
||||
```
|
||||
|
||||
2. **Create translation file**:
|
||||
```po
|
||||
# my_plugin.po
|
||||
msgid "Plugin initialized"
|
||||
msgstr ""
|
||||
|
||||
msgid "Configuration applied successfully"
|
||||
msgstr ""
|
||||
```
|
||||
|
||||
3. **Compile translations**:
|
||||
```bash
|
||||
# Compile .po to .mo
|
||||
msgfmt my_plugin.po -o my_plugin.mo
|
||||
|
||||
# Place in correct directory
|
||||
mkdir -p /usr/share/locale/ru/LC_MESSAGES/
|
||||
cp my_plugin.mo /usr/share/locale/ru/LC_MESSAGES/
|
||||
```
|
||||
|
||||
### Best Practices for Translations
|
||||
|
||||
1. **Use complete sentences** - don't split strings into parts
|
||||
2. **Avoid string concatenation** - this complicates translation
|
||||
3. **Provide context** - add comments for translators
|
||||
4. **Test translations** - verify display in different languages
|
||||
5. **Update translations** - update .po files when messages change
|
||||
|
||||
### Example Plugin Structure with Translations
|
||||
|
||||
```
|
||||
my_plugin/
|
||||
├── my_plugin.py # Main plugin code
|
||||
├── locale/
|
||||
│ ├── ru/
|
||||
│ │ └── LC_MESSAGES/
|
||||
│ │ ├── my_plugin.mo
|
||||
│ │ └── my_plugin.po
|
||||
│ └── en/
|
||||
│ └── LC_MESSAGES/
|
||||
│ ├── my_plugin.mo
|
||||
│ └── my_plugin.po
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## Plugin API
|
||||
|
||||
### Core Methods
|
||||
|
||||
- **`__init__(dict_dconf_db, username=None, fs_file_cache=None)`** - initialization
|
||||
- **`run()`** - main execution method (abstract)
|
||||
- **`apply()`** - execute with current privileges (final)
|
||||
- **`apply_user(username)`** - execute with user privileges (final)
|
||||
- **`get_dict_registry(prefix='')`** - get registry data
|
||||
- **`_init_plugin_log(message_dict=None, locale_dir=None, domain=None)`** - initialize logger
|
||||
- **`log(message_code, data=None)`** - logging with message codes
|
||||
|
||||
### Logging System
|
||||
|
||||
Message codes:
|
||||
- **I** - Informational messages
|
||||
- **W** - Warnings
|
||||
- **E** - Errors
|
||||
- **D** - Debug messages
|
||||
- **F** - Fatal errors
|
||||
|
||||
### Data Access
|
||||
|
||||
- **`dict_dconf_db`** - dictionary with registry data
|
||||
- **`username`** - username (for user context)
|
||||
- **`fs_file_cache`** - file system cache for file operations
|
||||
|
||||
## Execution Contexts
|
||||
|
||||
### Machine Context
|
||||
|
||||
- Executed with root privileges
|
||||
- Applies system-wide settings
|
||||
- Uses factory function `create_machine_applier()`
|
||||
|
||||
### User Context
|
||||
|
||||
- Executed with specified user privileges
|
||||
- Applies user-specific settings
|
||||
- Uses factory function `create_user_applier()`
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Security**: Always validate input data
|
||||
2. **Idempotence**: Repeated execution should produce the same result
|
||||
3. **Logging**: Use message codes for all operations
|
||||
4. **Error Handling**: Plugin should not crash on errors
|
||||
5. **Transactional**: Changes should be atomic
|
||||
6. **Translations**: Support message localization
|
||||
332
PLUGIN_DEVELOPMENT_GUIDE_RU.md
Normal file
332
PLUGIN_DEVELOPMENT_GUIDE_RU.md
Normal file
@@ -0,0 +1,332 @@
|
||||
# Руководство по разработке плагинов GPOA
|
||||
|
||||
## Введение
|
||||
|
||||
GPOA (GPO Applier for Linux) поддерживает систему плагинов для расширения функциональности применения групповых политик.
|
||||
Плагины позволяют добавлять поддержку новых типов политик и системных настроек без изменения основного кода.
|
||||
|
||||
## Архитектура плагинов
|
||||
|
||||
### Базовые классы
|
||||
|
||||
- **`plugin`** - Абстрактный базовый класс с финальными методами `apply()` и `apply_user()`
|
||||
- **`FrontendPlugin`** - Упрощенный класс для плагинов с поддержкой логирования
|
||||
|
||||
### Менеджер плагинов
|
||||
|
||||
- **`plugin_manager`** - Загружает и выполняет плагины из директорий:
|
||||
- `/usr/lib/gpupdate/plugins/` - системные плагины
|
||||
- `gpoa/frontend_plugins/` - плагины разработки
|
||||
|
||||
## Создание простого плагина
|
||||
|
||||
### Пример: Базовый плагин с логированием
|
||||
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
from gpoa.plugin.plugin_base import FrontendPlugin
|
||||
|
||||
|
||||
class ExampleApplier(FrontendPlugin):
|
||||
"""
|
||||
Пример простого плагина с логированием и работой с реестром.
|
||||
"""
|
||||
|
||||
# Домен для переводов
|
||||
domain = 'example_applier'
|
||||
|
||||
def __init__(self, dict_dconf_db, username=None, fs_file_cache=None):
|
||||
"""
|
||||
Инициализация плагина.
|
||||
|
||||
Args:
|
||||
dict_dconf_db (dict): Словарь с данными из реестра
|
||||
username (str): Имя пользователя
|
||||
fs_file_cache: Кэш файловой системы
|
||||
"""
|
||||
super().__init__(dict_dconf_db, username, fs_file_cache)
|
||||
|
||||
# Инициализация системы логирования
|
||||
self._init_plugin_log(
|
||||
message_dict={
|
||||
'i': { # Информационные сообщения
|
||||
1: "Example Applier initialized",
|
||||
2: "Configuration applied successfully"
|
||||
},
|
||||
'w': { # Предупреждения
|
||||
10: "No configuration found in registry"
|
||||
},
|
||||
'e': { # Ошибки
|
||||
20: "Failed to apply configuration"
|
||||
}
|
||||
},
|
||||
domain="example_applier"
|
||||
)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Основной метод выполнения плагина.
|
||||
|
||||
Returns:
|
||||
bool: True если успешно, False при ошибке
|
||||
"""
|
||||
try:
|
||||
self.log("I1") # Плагин инициализирован
|
||||
|
||||
# Получение данных из реестра
|
||||
self.config = self.get_dict_registry('Software/BaseALT/Policies/Example')
|
||||
|
||||
if not self.config:
|
||||
self.log("W10") # Конфигурация не найдена в реестре
|
||||
return True
|
||||
|
||||
# Логирование данных из реестра
|
||||
self.log("I2") # Конфигурация успешно применена
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.log("E20", {"error": str(e)})
|
||||
return False
|
||||
|
||||
|
||||
def create_machine_applier(dict_dconf_db, username=None, fs_file_cache=None):
|
||||
"""
|
||||
Фабричная функция для создания экземпляра плагина для машинного контекста.
|
||||
|
||||
Args:
|
||||
dict_dconf_db (dict): Словарь с данными из реестра
|
||||
username (str): Имя пользователя
|
||||
fs_file_cache: Кэш файловой системы
|
||||
|
||||
Returns:
|
||||
ExampleApplier: Экземпляр плагина
|
||||
"""
|
||||
return ExampleApplier(dict_dconf_db, username, fs_file_cache)
|
||||
|
||||
|
||||
def create_user_applier(dict_dconf_db, username=None, fs_file_cache=None):
|
||||
"""
|
||||
Фабричная функция для создания экземпляра плагина для пользовательского контекста.
|
||||
|
||||
Args:
|
||||
dict_dconf_db (dict): Словарь с данными из реестра
|
||||
username (str): Имя пользователя
|
||||
fs_file_cache: Кэш файловой системы
|
||||
|
||||
Returns:
|
||||
ExampleApplier: Экземпляр плагина
|
||||
"""
|
||||
return ExampleApplier(dict_dconf_db, username, fs_file_cache)
|
||||
```
|
||||
|
||||
## Ключевые элементы плагина
|
||||
|
||||
### 1. Регистрация логов
|
||||
|
||||
Плагины используют систему логирования с кодами сообщений:
|
||||
|
||||
```python
|
||||
self._init_plugin_log(
|
||||
message_dict={
|
||||
'i': { # Информационные сообщения
|
||||
1: "Example Applier initialized",
|
||||
2: "Configuration applied successfully"
|
||||
},
|
||||
'w': { # Предупреждения
|
||||
10: "No configuration found in registry"
|
||||
},
|
||||
'e': { # Ошибки
|
||||
20: "Failed to apply configuration"
|
||||
}
|
||||
},
|
||||
domain="example_applier"
|
||||
)
|
||||
```
|
||||
|
||||
### 2. Работа с реестром
|
||||
|
||||
Доступ к данным из реестра через метод `get_dict_registry()`:
|
||||
|
||||
```python
|
||||
self.config = self.get_dict_registry('Software/BaseALT/Policies/Example')
|
||||
```
|
||||
|
||||
### 3. Вывод логов в методе run
|
||||
|
||||
Использование зарегистрированных кодов сообщений:
|
||||
|
||||
```python
|
||||
self.log("I1") # Простое сообщение
|
||||
self.log("E20", {"error": str(e)}) # Сообщение с данными
|
||||
```
|
||||
|
||||
### 4. Фабричные функции
|
||||
|
||||
Плагины должны предоставлять фабричные функции:
|
||||
|
||||
- `create_machine_applier()` - для машинного контекста
|
||||
- `create_user_applier()` - для пользовательского контекста
|
||||
|
||||
## Система переводов
|
||||
|
||||
### Поддержка локализации
|
||||
|
||||
GPOA поддерживает автоматическую локализацию сообщений плагинов. Система использует стандарт GNU gettext.
|
||||
|
||||
### Структура файлов переводов
|
||||
|
||||
```
|
||||
gpoa/locale/
|
||||
├── ru/
|
||||
│ └── LC_MESSAGES/
|
||||
│ ├── gpoa.mo
|
||||
│ └── gpoa.po
|
||||
└── en/
|
||||
└── LC_MESSAGES/
|
||||
├── gpoa.mo
|
||||
└── gpoa.po
|
||||
```
|
||||
|
||||
### Настройка переводов в плагине
|
||||
|
||||
1. **Определение домена переводов**:
|
||||
```python
|
||||
class MyPlugin(FrontendPlugin):
|
||||
domain = 'my_plugin' # Домен для файлов перевода
|
||||
```
|
||||
|
||||
2. **Инициализация логгера с поддержкой переводов**:
|
||||
```python
|
||||
self._init_plugin_log(
|
||||
message_dict={
|
||||
'i': {
|
||||
1: "Plugin initialized",
|
||||
2: "Configuration applied successfully"
|
||||
},
|
||||
'e': {
|
||||
10: "Configuration error"
|
||||
}
|
||||
},
|
||||
domain="my_plugin" # Домен для поиска файлов перевода
|
||||
)
|
||||
```
|
||||
|
||||
3. **Использование в коде**:
|
||||
```python
|
||||
# Сообщения автоматически переводятся при логировании
|
||||
self.log("I1") # Будет показано на языке системы
|
||||
```
|
||||
|
||||
### Создание файлов перевода
|
||||
|
||||
1. **Извлечение строк для перевода**:
|
||||
```bash
|
||||
# Извлечь строки из кода плагина
|
||||
xgettext -d my_plugin -o my_plugin.po my_plugin.py
|
||||
```
|
||||
|
||||
2. **Создание файла перевода**:
|
||||
```po
|
||||
# my_plugin.po
|
||||
msgid "Plugin initialized"
|
||||
msgstr "Плагин инициализирован"
|
||||
|
||||
msgid "Configuration applied successfully"
|
||||
msgstr "Конфигурация успешно применена"
|
||||
```
|
||||
|
||||
3. **Компиляция переводов**:
|
||||
```bash
|
||||
# Скомпилировать .po в .mo
|
||||
msgfmt my_plugin.po -o my_plugin.mo
|
||||
|
||||
# Разместить в правильной директории
|
||||
mkdir -p /usr/share/locale/ru/LC_MESSAGES/
|
||||
cp my_plugin.mo /usr/share/locale/ru/LC_MESSAGES/
|
||||
```
|
||||
|
||||
### Лучшие практики для переводов
|
||||
|
||||
1. **Используйте полные предложения** - не разбивайте строки на части
|
||||
2. **Избегайте конкатенации строк** - это затрудняет перевод
|
||||
3. **Указывайте контекст** - добавляйте комментарии для переводчиков
|
||||
4. **Тестируйте переводы** - проверяйте отображение на разных языках
|
||||
5. **Обновляйте переводы** - при изменении сообщений обновляйте файлы .po
|
||||
|
||||
### Пример структуры плагина с переводами
|
||||
|
||||
```
|
||||
my_plugin/
|
||||
├── my_plugin.py # Основной код плагина
|
||||
├── locale/
|
||||
│ ├── ru/
|
||||
│ │ └── LC_MESSAGES/
|
||||
│ │ ├── my_plugin.mo
|
||||
│ │ └── my_plugin.po
|
||||
│ └── en/
|
||||
│ └── LC_MESSAGES/
|
||||
│ ├── my_plugin.mo
|
||||
│ └── my_plugin.po
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## API плагинов
|
||||
|
||||
### Основные методы
|
||||
|
||||
- **`__init__(dict_dconf_db, username=None, fs_file_cache=None)`** - инициализация
|
||||
- **`run()`** - основной метод выполнения (абстрактный)
|
||||
- **`apply()`** - выполнение с текущими привилегиями (финальный)
|
||||
- **`apply_user(username)`** - выполнение с привилегиями пользователя (финальный)
|
||||
- **`get_dict_registry(prefix='')`** - получение данных из реестра
|
||||
- **`_init_plugin_log(message_dict=None, locale_dir=None, domain=None)`** - инициализация логгера
|
||||
- **`log(message_code, data=None)`** - логирование с кодами сообщений
|
||||
|
||||
### Система логирования
|
||||
|
||||
Коды сообщений:
|
||||
- **I** - Информационные сообщения
|
||||
- **W** - Предупреждения
|
||||
- **E** - Ошибки
|
||||
- **D** - Отладочные сообщения
|
||||
- **F** - Фатальные ошибки
|
||||
|
||||
### Доступ к данным
|
||||
|
||||
- **`dict_dconf_db`** - словарь данных из реестра
|
||||
- **`username`** - имя пользователя (для пользовательского контекста)
|
||||
- **`fs_file_cache`** - кэш файловой системы для работы с файлами
|
||||
|
||||
## Контексты выполнения
|
||||
|
||||
### Машинный контекст
|
||||
|
||||
- Выполняется с правами root
|
||||
- Применяет системные настройки
|
||||
- Использует фабричную функцию `create_machine_applier()`
|
||||
|
||||
### Пользовательский контекст
|
||||
|
||||
- Выполняется с правами указанного пользователя
|
||||
- Применяет пользовательские настройки
|
||||
- Использует фабричную функцию `create_user_applier()`
|
||||
|
||||
## Лучшие практики
|
||||
|
||||
1. **Безопасность**: Всегда валидируйте входные данные
|
||||
2. **Идемпотентность**: Повторное выполнение должно давать тот же результат
|
||||
3. **Логирование**: Используйте коды сообщений для всех операций
|
||||
4. **Обработка ошибок**: Плагин не должен "падать" при ошибках
|
||||
5. **Транзакционность**: Изменения должны быть атомарными
|
||||
6. **Переводы**: Поддерживайте локализацию сообщений
|
||||
139
README.md
139
README.md
@@ -1,9 +1,13 @@
|
||||
# GPOA - GPO Applier
|
||||
# GPOA - GPO Applier for Linux
|
||||
|
||||
## Contents
|
||||
|
||||
* [Introduction](#introduction)
|
||||
* [Development](#development)
|
||||
* [Features](#features)
|
||||
* [Architecture](#architecture)
|
||||
* [Installation](#installation)
|
||||
* [Usage](#usage)
|
||||
* [Plugin Development](#plugin-development)
|
||||
* [Contributing](#contributing)
|
||||
* [License](#license)
|
||||
|
||||
@@ -11,38 +15,137 @@
|
||||
|
||||
## Introduction
|
||||
|
||||
GPOA is a facility to fetch, reinterpret and apply GPOs from Windows
|
||||
Active Directory domains in UNIX environments.
|
||||
GPOA (GPO Applier for Linux) is a comprehensive facility to fetch, reinterpret and apply Group Policy Objects (GPOs) from Windows Active Directory domains in Linux environments. Developed by ALT Linux team, it enables seamless integration of Linux machines into corporate Windows infrastructure.
|
||||
|
||||
## Development
|
||||
|
||||
This project needs some additional dependencies for development
|
||||
purposes (static analisys):
|
||||
## Features
|
||||
|
||||
* python3-module-setuptools
|
||||
* python3-module-pip
|
||||
* python3-module-pylint
|
||||
### Core Functionality
|
||||
- **Multi-backend Support**: Samba, FreeIPA, and no-domain backends
|
||||
- **Policy Types**: Registry settings, files, folders, environment variables, scripts, services, and more
|
||||
- **Display Manager Integration**: LightDM, GDM with background and theme support
|
||||
- **Plugin System**: Extensible architecture for custom policy types
|
||||
- **Privilege Separation**: Secure execution with proper privilege contexts
|
||||
|
||||
And then you may install prospector like:
|
||||
### Supported Policy Areas
|
||||
- **System Configuration**: Environment variables, services
|
||||
- **Desktop Settings**: GSettings, KDE configuration, browser policies
|
||||
- **Security**: Polkit policies
|
||||
- **Network**: Network shares
|
||||
- **Applications**: Firefox, Chrome, Thunderbird, Yandex Browser
|
||||
- **Files and Folders**: File deployment, folder redirection
|
||||
|
||||
```sh
|
||||
# pip install prospector[with_pyroma]
|
||||
## Architecture
|
||||
|
||||
### Backend System
|
||||
- **Samba Backend**: Traditional Active Directory integration
|
||||
- **FreeIPA Backend**: Enhanced FreeIPA/IdM integration
|
||||
- **No-domain Backend**: Local policy application
|
||||
|
||||
### Frontend System
|
||||
- **Policy Appliers**: Specialized modules for different policy types
|
||||
- **Plugin Framework**: Extensible plugin system with logging and translations
|
||||
|
||||
### Plugin System
|
||||
- **Machine Context**: Root-privileged system-wide changes
|
||||
- **User Context**: User-specific configuration application
|
||||
- **Message Codes**: Structured logging with translation support
|
||||
- **Registry Access**: Secure access to policy registry data
|
||||
|
||||
## Installation
|
||||
|
||||
### From Source
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/altlinux/gpupdate.git
|
||||
cd gpupdate
|
||||
|
||||
# Build RPM package
|
||||
rpmbuild -ba gpupdate.spec
|
||||
|
||||
# Install the package
|
||||
rpm -ivh ~/rpmbuild/RPMS/noarch/gpupdate-*.rpm
|
||||
```
|
||||
|
||||
### Dependencies
|
||||
- Python 3.6+
|
||||
- Samba client tools
|
||||
- FreeIPA client (optional)
|
||||
- Systemd
|
||||
- D-Bus
|
||||
|
||||
## Usage
|
||||
|
||||
### Apply Policies for Machine
|
||||
```bash
|
||||
# Run as root for system-wide policies
|
||||
sudo gpoa
|
||||
```
|
||||
|
||||
### Apply Policies for User
|
||||
```bash
|
||||
# Run as root for user-specific policies
|
||||
sudo gpoa username
|
||||
```
|
||||
|
||||
### Force Policy Refresh
|
||||
```bash
|
||||
# Can be run as regular user
|
||||
gpupdate --force
|
||||
```
|
||||
|
||||
### Plugin Management
|
||||
Plugins are automatically discovered from:
|
||||
- `/usr/lib/gpupdate/plugins/` (system plugins)
|
||||
- `gpoa/frontend_plugins/` (development plugins)
|
||||
|
||||
## Plugin Development
|
||||
|
||||
GPOA features a comprehensive plugin system. See documentation for detailed information:
|
||||
|
||||
- [PLUGIN_DEVELOPMENT_GUIDE.md](PLUGIN_DEVELOPMENT_GUIDE.md) - English version
|
||||
- [PLUGIN_DEVELOPMENT_GUIDE_RU.md](PLUGIN_DEVELOPMENT_GUIDE_RU.md) - Russian version
|
||||
|
||||
Documentation covers:
|
||||
- Plugin architecture and API
|
||||
- Creating custom plugins
|
||||
- Logging and message codes
|
||||
- Translation support
|
||||
- Best practices
|
||||
|
||||
### Quick Plugin Example
|
||||
```python
|
||||
from gpoa.plugin.plugin_base import FrontendPlugin
|
||||
|
||||
class MyPlugin(FrontendPlugin):
|
||||
domain = 'my_plugin'
|
||||
|
||||
def __init__(self, dict_dconf_db, username=None, fs_file_cache=None):
|
||||
super().__init__(dict_dconf_db, username, fs_file_cache)
|
||||
self._init_plugin_log(message_dict={
|
||||
'i': {1: "Plugin initialized"},
|
||||
'e': {1: "Plugin failed"}
|
||||
}, domain="my_plugin")
|
||||
|
||||
def run(self):
|
||||
self.log("I1")
|
||||
return True
|
||||
|
||||
def create_machine_applier(dict_dconf_db, username=None, fs_file_cache=None):
|
||||
return MyPlugin(dict_dconf_db, username, fs_file_cache)
|
||||
```
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
The main communication channel for GPOA is
|
||||
[Samba@ALT Linux mailing lists](https://lists.altlinux.org/mailman/listinfo/samba).
|
||||
The mailing list is in Russian but you may also send e-mail in English
|
||||
or German.
|
||||
The main communication channel for GPOA is [Samba@ALT Linux mailing lists](https://lists.altlinux.org/mailman/listinfo/samba). The mailing list is in Russian but you may also send e-mail in English or German.
|
||||
|
||||
|
||||
## License
|
||||
|
||||
GPOA - GPO Applier for Linux
|
||||
|
||||
Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
|
||||
22
completions/gpoa
Normal file
22
completions/gpoa
Normal file
@@ -0,0 +1,22 @@
|
||||
_gpoa()
|
||||
{
|
||||
local cur prev words cword split
|
||||
_init_completion -s || return
|
||||
|
||||
case $prev in
|
||||
--dc)
|
||||
_filedir
|
||||
return
|
||||
;;
|
||||
--loglevel)
|
||||
COMPREPLY=($(compgen -W '0 1 2 3 4 5' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W '--dc --nodomain --noupdate --noplugins --list-backends --loglevel --help --force' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
complete -F _gpoa gpoa
|
||||
27
completions/gpupdate
Normal file
27
completions/gpupdate
Normal file
@@ -0,0 +1,27 @@
|
||||
_gpupdate()
|
||||
{
|
||||
local cur prev words cword split
|
||||
_init_completion -s || return
|
||||
|
||||
case $prev in
|
||||
-u|--user)
|
||||
_filedir
|
||||
return
|
||||
;;
|
||||
-t|--target)
|
||||
COMPREPLY=($(compgen -W 'ALL USER COMPUTER' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
-l|--loglevel)
|
||||
COMPREPLY=($(compgen -W '0 1 2 3 4 5' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W '--user --target --loglevel --system --help --force' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
complete -F _gpupdate gpupdate
|
||||
|
||||
18
completions/gpupdate-setup
Normal file
18
completions/gpupdate-setup
Normal file
@@ -0,0 +1,18 @@
|
||||
_gpupdate-setup()
|
||||
{
|
||||
local cur prev words cword split
|
||||
_init_completion -s || return
|
||||
|
||||
case $prev in
|
||||
set-backend)
|
||||
COMPREPLY=($(compgen -W 'local samba' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W 'list list-backends status enable disable update write set-backend default-policy active-policy active-backend' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
complete -F _gpupdate-setup gpupdate-setup
|
||||
11
dist/gpupdate-scripts-run-user.service
vendored
Normal file
11
dist/gpupdate-scripts-run-user.service
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
[Unit]
|
||||
Description=Run Group Policy scripts for a user
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
RemainAfterExit=true
|
||||
ExecStart=/usr/libexec/gpupdate/scripts_runner --mode USER --action LOGON --user %u
|
||||
ExecStop=/usr/libexec/gpupdate/scripts_runner --mode USER --action LOGOFF --user %u
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
15
dist/gpupdate-scripts-run.service
vendored
Normal file
15
dist/gpupdate-scripts-run.service
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
[Unit]
|
||||
Description=Running Group Policy Scripts
|
||||
After=gpupdate.service
|
||||
|
||||
[Service]
|
||||
Environment=PATH=/bin:/sbin:/usr/bin:/usr/sbin
|
||||
UnsetEnvironment=LANG LANGUAGE LC_CTYPE LC_NUMERIC LC_TIME LC_COLLATE LC_MONETARY LC_MESSAGES LC_PAPER LC_NAME LC_ADDRESS LC_TELEPHONE LC_MEASUREMENT LC_IDENTIFICATION
|
||||
Type=oneshot
|
||||
RemainAfterExit=true
|
||||
ExecStart=/usr/libexec/gpupdate/scripts_runner --mode MACHINE --action STARTUP
|
||||
ExecStop=/usr/libexec/gpupdate/scripts_runner --mode MACHINE --action SHUTDOWN
|
||||
StandardOutput=journal
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
8
dist/gpupdate-user.service
vendored
8
dist/gpupdate-user.service
vendored
@@ -6,12 +6,8 @@ Description=gpupdate in userspace
|
||||
[Service]
|
||||
Environment=PATH=/bin:/sbin:/usr/bin:/usr/sbin
|
||||
UnsetEnvironment=LANG LANGUAGE LC_CTYPE LC_NUMERIC LC_TIME LC_COLLATE LC_MONETARY LC_MESSAGES LC_PAPER LC_NAME LC_ADDRESS LC_TELEPHONE LC_MEASUREMENT LC_IDENTIFICATION
|
||||
Type=simple
|
||||
RestartSec=3600
|
||||
TimeoutSec=3000
|
||||
Restart=always
|
||||
ExecStart=/usr/sbin/gpoa
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/gpupdate --target USER
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
|
||||
|
||||
9
dist/gpupdate-user.timer
vendored
Normal file
9
dist/gpupdate-user.timer
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
[Unit]
|
||||
Description=Run gpupdate-user every hour
|
||||
|
||||
[Timer]
|
||||
OnStartupSec=60min
|
||||
OnUnitActiveSec=60min
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
||||
6
dist/gpupdate.service
vendored
6
dist/gpupdate.service
vendored
@@ -1,14 +1,12 @@
|
||||
[Unit]
|
||||
Description=Group policy update for machine
|
||||
After=syslog.target network-online.target sssd.service
|
||||
Before=systemd-logind.service
|
||||
|
||||
[Service]
|
||||
Environment=PATH=/bin:/sbin:/usr/bin:/usr/sbin
|
||||
UnsetEnvironment=LANG LANGUAGE LC_CTYPE LC_NUMERIC LC_TIME LC_COLLATE LC_MONETARY LC_MESSAGES LC_PAPER LC_NAME LC_ADDRESS LC_TELEPHONE LC_MEASUREMENT LC_IDENTIFICATION
|
||||
Type=simple
|
||||
RestartSec=3600
|
||||
TimeoutSec=3000
|
||||
Restart=always
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/gpupdate
|
||||
StandardOutput=journal
|
||||
|
||||
|
||||
9
dist/gpupdate.timer
vendored
Normal file
9
dist/gpupdate.timer
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
[Unit]
|
||||
Description=Run gpupdate every hour
|
||||
|
||||
[Timer]
|
||||
OnStartupSec=60min
|
||||
OnUnitActiveSec=60min
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
||||
9
dist/system-policy-gpupdate
vendored
9
dist/system-policy-gpupdate
vendored
@@ -2,11 +2,12 @@
|
||||
session [success=2 perm_denied=ignore default=die] pam_localuser.so
|
||||
session substack gpupdate-remote-policy
|
||||
session [default=1] pam_permit.so
|
||||
session [default=6] pam_permit.so
|
||||
session [default=7] pam_permit.so
|
||||
session [success=1 default=ignore] pam_succeed_if.so user ingroup users quiet
|
||||
session [default=4] pam_permit.so
|
||||
session [default=5] pam_permit.so
|
||||
session [success=1 default=ignore] pam_succeed_if.so uid >= 500 quiet
|
||||
session [default=2] pam_permit.so
|
||||
session [default=3] pam_permit.so
|
||||
session [success=1 default=ignore] pam_succeed_if.so service = systemd-user quiet
|
||||
-session required pam_oddjob_gpupdate.so
|
||||
session optional pam_env.so user_readenv=1 conffile=/etc/gpupdate/environment user_envfile=.gpupdate_environment
|
||||
session required pam_permit.so
|
||||
session required pam_permit.so
|
||||
20
doc/gpoa.1
20
doc/gpoa.1
@@ -20,12 +20,15 @@
|
||||
gpoa \- utility to update and apply group policy settings
|
||||
.
|
||||
.SH SYNOPSYS
|
||||
.B gpoa
|
||||
.B gpoa [user][options]
|
||||
.
|
||||
.SH DESCRIPTION
|
||||
.B gpoa
|
||||
Fetches GPT files for designated user from AD instance and transforms
|
||||
them into UNIX system settings.
|
||||
|
||||
If no user argument is specified, gpoa applies machine policies.
|
||||
If a user name is given, gpoa applies user policies for that domain account.
|
||||
.SS Options
|
||||
.TP
|
||||
\fB-h\fP
|
||||
@@ -35,7 +38,11 @@ Show help.
|
||||
Specify domain controller hostname FQDN to replicate GPTs from. May be
|
||||
useful in case of default DC problems.
|
||||
.TP
|
||||
\fB--target \fITARGET\fP
|
||||
\fB--list-backends\fP
|
||||
Show a list of available backends for applying policies.
|
||||
.TP
|
||||
\fB--nodomain\fP
|
||||
Operate without a domain controller. Apply only local policy.
|
||||
.TP
|
||||
\fB--noupdate\fP
|
||||
Don't update settings.
|
||||
@@ -45,6 +52,9 @@ Don't run plugins.
|
||||
.TP
|
||||
\fB--loglevel \fILOGLEVEL\fP
|
||||
Set logging verbosity from 0 to 5.
|
||||
.TP
|
||||
\fB--force\fP
|
||||
Force GPT download.
|
||||
.
|
||||
.SH FILES
|
||||
\fB/usr/sbin/gpoa\fR utility uses \fB/usr/share/local-policy/default\fR
|
||||
@@ -55,8 +65,10 @@ All data is located in \fB/var/cache/gpupdate\fR. Also domain GPTs are
|
||||
taken from Samba's \fB/var/cache/samba\fR.
|
||||
.
|
||||
The settings read from Samba are stored in
|
||||
\fB/var/cache/gpupdate/registry.sqlite\fR and "Local Policy" settings
|
||||
read from \fB/usr/local/share/local-policy/default\fR are converted
|
||||
Dconf. Machine policies are stored in the \fB/etc/dconf/db/policy.d/policy.ini\fR file,
|
||||
user policies are stored in the \fB/etc/dconf/db/policy<UID>.d/policy<UID>.ini\fR file
|
||||
(where UID is the user ID in the system)."Local Policy" settings
|
||||
read from \fB/usr/share/local-policy/\fR are converted
|
||||
into GPT and stored as \fB/var/cache/gpupdate/local-policy\fR.
|
||||
.SH "SEE ALSO"
|
||||
gpupdate(1)
|
||||
|
||||
@@ -43,6 +43,9 @@ Show help.
|
||||
.TP
|
||||
\fB--user \fIusername\fR
|
||||
Run \fBgpupdate\fP for \fIusername\fP.
|
||||
.TP
|
||||
\fB--force\fP
|
||||
Force GPT download.
|
||||
.
|
||||
.SS "EXIT CODES"
|
||||
.TP
|
||||
|
||||
@@ -17,11 +17,21 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from util.windows import smbcreds
|
||||
from .samba_backend import samba_backend
|
||||
from .nodomain_backend import nodomain_backend
|
||||
from util.logging import log
|
||||
from storage.dconf_registry import (
|
||||
Dconf_registry,
|
||||
add_preferences_to_global_registry_dict,
|
||||
create_dconf_ini_file,
|
||||
)
|
||||
from util.config import GPConfig
|
||||
from util.logging import log
|
||||
from util.paths import get_dconf_config_file
|
||||
from util.util import get_uid_by_username, touch_file
|
||||
from util.windows import smbcreds
|
||||
from util.ipacreds import ipacreds
|
||||
|
||||
from .nodomain_backend import nodomain_backend
|
||||
from .samba_backend import samba_backend
|
||||
from .freeipa_backend import freeipa_backend
|
||||
|
||||
def backend_factory(dc, username, is_machine, no_domain = False):
|
||||
'''
|
||||
@@ -49,6 +59,20 @@ def backend_factory(dc, username, is_machine, no_domain = False):
|
||||
logdata = dict({'error': str(exc)})
|
||||
log('E7', logdata)
|
||||
|
||||
if config.get_backend() == 'freeipa' and not no_domain:
|
||||
try:
|
||||
if not dc:
|
||||
dc = config.get_dc()
|
||||
if dc:
|
||||
ld = {'dc': dc}
|
||||
log('D52', ld)
|
||||
ipac = ipacreds()
|
||||
domain = ipac.get_domain()
|
||||
back = freeipa_backend(ipac, username, domain, is_machine)
|
||||
except Exception as exc:
|
||||
logdata = {'error': str(exc)}
|
||||
log('E79', logdata)
|
||||
|
||||
if config.get_backend() == 'local' or no_domain:
|
||||
log('D8')
|
||||
try:
|
||||
@@ -59,3 +83,14 @@ def backend_factory(dc, username, is_machine, no_domain = False):
|
||||
|
||||
return back
|
||||
|
||||
def save_dconf(username, is_machine, nodomain=None):
|
||||
if is_machine:
|
||||
uid = None
|
||||
else:
|
||||
uid = get_uid_by_username(username) if not is_machine else None
|
||||
target_file = get_dconf_config_file(uid)
|
||||
touch_file(target_file)
|
||||
Dconf_registry.apply_template(uid)
|
||||
add_preferences_to_global_registry_dict(username, is_machine)
|
||||
Dconf_registry.update_dict_to_previous()
|
||||
create_dconf_ini_file(target_file,Dconf_registry.global_registry_dict, uid, nodomain)
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
from abc import ABC
|
||||
|
||||
|
||||
class applier_backend(ABC):
|
||||
@classmethod
|
||||
def __init__(self):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,10 +16,232 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import smbc
|
||||
import re
|
||||
|
||||
from .applier_backend import applier_backend
|
||||
from pathlib import Path
|
||||
from gpt.gpt import gpt, get_local_gpt
|
||||
from gpt.gpo_dconf_mapping import GpoInfoDconf
|
||||
from storage import registry_factory
|
||||
from storage.dconf_registry import Dconf_registry, extract_display_name_version
|
||||
from storage.fs_file_cache import fs_file_cache
|
||||
from util.logging import log
|
||||
from util.util import get_uid_by_username
|
||||
from util.kerberos import (
|
||||
machine_kinit
|
||||
, machine_kdestroy
|
||||
)
|
||||
|
||||
|
||||
class freeipa_backend(applier_backend):
|
||||
def __init__(self):
|
||||
pass
|
||||
def __init__(self, ipacreds, username, domain, is_machine):
|
||||
self.ipacreds = ipacreds
|
||||
self.cache_path = '/var/cache/gpupdate/creds/krb5cc_{}'.format(os.getpid())
|
||||
self.__kinit_successful = machine_kinit(self.cache_path, "freeipa")
|
||||
if not self.__kinit_successful:
|
||||
raise Exception('kinit is not successful')
|
||||
|
||||
self.storage = registry_factory()
|
||||
self.storage.set_info('domain', domain)
|
||||
|
||||
machine_name = self.ipacreds.get_machine_name()
|
||||
self.storage.set_info('machine_name', machine_name)
|
||||
self.username = machine_name if is_machine else username
|
||||
self._is_machine_username = is_machine
|
||||
|
||||
self.cache_dir = self.ipacreds.get_cache_dir()
|
||||
self.gpo_cache_part = 'gpo_cache'
|
||||
self.gpo_cache_dir = os.path.join(self.cache_dir, self.gpo_cache_part)
|
||||
self.storage.set_info('cache_dir', self.gpo_cache_dir)
|
||||
self.file_cache = fs_file_cache("freeipa_gpo", username)
|
||||
logdata = {'cachedir': self.cache_dir}
|
||||
log('D7', logdata)
|
||||
|
||||
def __del__(self):
|
||||
if self.__kinit_successful:
|
||||
machine_kdestroy()
|
||||
|
||||
def retrieve_and_store(self):
|
||||
'''
|
||||
Retrieve settings and store it in a database - FreeIPA version
|
||||
'''
|
||||
try:
|
||||
if self._is_machine_username:
|
||||
dconf_dict = Dconf_registry.get_dictionary_from_dconf_file_db(save_dconf_db=True)
|
||||
else:
|
||||
uid = get_uid_by_username(self.username)
|
||||
dconf_dict = Dconf_registry.get_dictionary_from_dconf_file_db(uid, save_dconf_db=True)
|
||||
except Exception as e:
|
||||
logdata = {'msg': str(e)}
|
||||
log('E72', logdata)
|
||||
|
||||
if self._is_machine_username:
|
||||
machine_gpts = []
|
||||
|
||||
try:
|
||||
machine_name = self.storage.get_info('machine_name')
|
||||
machine_gpts = self._get_gpts(machine_name)
|
||||
machine_gpts.reverse()
|
||||
|
||||
except Exception as exc:
|
||||
logdata = {'msg': str(exc)}
|
||||
log('E17', logdata)
|
||||
|
||||
for i, gptobj in enumerate(machine_gpts):
|
||||
try:
|
||||
gptobj.merge_machine()
|
||||
except Exception as exc:
|
||||
logdata = {'msg': str(exc)}
|
||||
log('E26', logdata)
|
||||
else:
|
||||
user_gpts = []
|
||||
try:
|
||||
user_gpts = self._get_gpts(self.username)
|
||||
user_gpts.reverse()
|
||||
except Exception as exc:
|
||||
logdata = {'msg': str(exc)}
|
||||
log('E17', logdata)
|
||||
for i, gptobj in enumerate(user_gpts):
|
||||
try:
|
||||
gptobj.merge_user()
|
||||
except Exception as exc:
|
||||
logdata = {'msg': str(exc)}
|
||||
log('E27', logdata)
|
||||
|
||||
def _get_gpts(self, username):
|
||||
gpts = []
|
||||
gpos, server = self.ipacreds.update_gpos(username)
|
||||
if not gpos:
|
||||
return gpts
|
||||
if not server:
|
||||
return gpts
|
||||
|
||||
cached_gpos = []
|
||||
download_gpos = []
|
||||
|
||||
for i, gpo in enumerate(gpos):
|
||||
if gpo.file_sys_path.startswith('/'):
|
||||
if os.path.exists(gpo.file_sys_path):
|
||||
logdata = {'gpo_name': gpo.display_name, 'path': gpo.file_sys_path}
|
||||
log('D11', logdata)
|
||||
cached_gpos.append(gpo)
|
||||
else:
|
||||
download_gpos.append(gpo)
|
||||
else:
|
||||
if self._check_sysvol_present(gpo):
|
||||
download_gpos.append(gpo)
|
||||
else:
|
||||
logdata = {'gpo_name': gpo.display_name}
|
||||
log('W4', logdata)
|
||||
|
||||
if download_gpos:
|
||||
try:
|
||||
self._download_gpos(download_gpos, server)
|
||||
logdata = {'count': len(download_gpos)}
|
||||
log('D50', logdata)
|
||||
except Exception as e:
|
||||
logdata = {'msg': str(e), 'count': len(download_gpos)}
|
||||
log('E35', logdata)
|
||||
else:
|
||||
log('D211', {})
|
||||
|
||||
all_gpos = cached_gpos + download_gpos
|
||||
for gpo in all_gpos:
|
||||
gpt_abspath = gpo.file_sys_path
|
||||
if not os.path.exists(gpt_abspath):
|
||||
logdata = {'path': gpt_abspath, 'gpo_name': gpo.display_name}
|
||||
log('W12', logdata)
|
||||
continue
|
||||
|
||||
if self._is_machine_username:
|
||||
obj = gpt(gpt_abspath, None, GpoInfoDconf(gpo))
|
||||
else:
|
||||
obj = gpt(gpt_abspath, self.username, GpoInfoDconf(gpo))
|
||||
|
||||
obj.set_name(gpo.display_name)
|
||||
gpts.append(obj)
|
||||
|
||||
local_gpt = get_local_gpt()
|
||||
gpts.append(local_gpt)
|
||||
logdata = {'total_count': len(gpts), 'downloaded_count': len(download_gpos)}
|
||||
log('I2', logdata)
|
||||
return gpts
|
||||
|
||||
def _check_sysvol_present(self, gpo):
|
||||
if not gpo.file_sys_path:
|
||||
if getattr(gpo, 'name', '') != 'Local Policy':
|
||||
logdata = {'gponame': getattr(gpo, 'name', 'Unknown')}
|
||||
log('W4', logdata)
|
||||
return False
|
||||
|
||||
if gpo.file_sys_path.startswith('\\\\'):
|
||||
return True
|
||||
|
||||
elif gpo.file_sys_path.startswith('/'):
|
||||
if os.path.exists(gpo.file_sys_path):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
else:
|
||||
return False
|
||||
|
||||
def _download_gpos(self, gpos, server):
|
||||
cache_dir = self.ipacreds.get_cache_dir()
|
||||
domain = self.ipacreds.get_domain().upper()
|
||||
gpo_cache_dir = os.path.join(cache_dir, domain, 'POLICIES')
|
||||
os.makedirs(gpo_cache_dir, exist_ok=True)
|
||||
|
||||
for gpo in gpos:
|
||||
if not gpo.file_sys_path:
|
||||
continue
|
||||
smb_remote_path = None
|
||||
try:
|
||||
smb_remote_path = self._convert_to_smb_path(gpo.file_sys_path, server)
|
||||
local_gpo_path = os.path.join(gpo_cache_dir, gpo.name)
|
||||
|
||||
self._download_gpo_directory(smb_remote_path, local_gpo_path)
|
||||
gpo.file_sys_path = local_gpo_path
|
||||
|
||||
except Exception as e:
|
||||
logdata = {
|
||||
'msg': str(e),
|
||||
'gpo_name': gpo.display_name,
|
||||
'smb_path': smb_remote_path,
|
||||
}
|
||||
log('E38', logdata)
|
||||
|
||||
def _convert_to_smb_path(self, windows_path, server):
|
||||
match = re.search(r'\\\\[^\\]+\\(.+)', windows_path)
|
||||
if not match:
|
||||
raise Exception(f"Invalid Windows path format: {windows_path}")
|
||||
relative_path = match.group(1).replace('\\', '/').lower()
|
||||
smb_url = f"smb://{server}/{relative_path}"
|
||||
|
||||
return smb_url
|
||||
|
||||
def _download_gpo_directory(self, remote_smb_path, local_path):
|
||||
os.makedirs(local_path, exist_ok=True)
|
||||
try:
|
||||
entries = self.file_cache.samba_context.opendir(remote_smb_path).getdents()
|
||||
for entry in entries:
|
||||
if entry.name in [".", ".."]:
|
||||
continue
|
||||
remote_entry_path = f"{remote_smb_path}/{entry.name}"
|
||||
local_entry_path = os.path.join(local_path, entry.name)
|
||||
if entry.smbc_type == smbc.DIR:
|
||||
self._download_gpo_directory(remote_entry_path, local_entry_path)
|
||||
elif entry.smbc_type == smbc.FILE:
|
||||
try:
|
||||
os.makedirs(os.path.dirname(local_entry_path), exist_ok=True)
|
||||
self.file_cache.store(remote_entry_path, Path(local_entry_path))
|
||||
except Exception as e:
|
||||
logdata = {'exception': str(e), 'file': entry.name}
|
||||
log('W30', logdata)
|
||||
except Exception as e:
|
||||
logdata = {'exception': str(e), 'remote_folder_path': remote_smb_path}
|
||||
log('W31', logdata)
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,33 +16,18 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from gpt.gpt import get_local_gpt
|
||||
from storage import registry_factory
|
||||
|
||||
from .applier_backend import applier_backend
|
||||
from storage import registry_factory
|
||||
from gpt.gpt import gpt, get_local_gpt
|
||||
from util.util import (
|
||||
get_machine_name
|
||||
)
|
||||
from util.windows import get_sid
|
||||
import util.preg
|
||||
from util.logging import slogm
|
||||
|
||||
|
||||
class nodomain_backend(applier_backend):
|
||||
|
||||
def __init__(self):
|
||||
domain = None
|
||||
machine_name = get_machine_name()
|
||||
machine_sid = get_sid(domain, machine_name, True)
|
||||
self.storage = registry_factory('registry')
|
||||
self.storage.set_info('domain', domain)
|
||||
self.storage.set_info('machine_name', machine_name)
|
||||
self.storage.set_info('machine_sid', machine_sid)
|
||||
self.storage = registry_factory()
|
||||
|
||||
# User SID to work with HKCU hive
|
||||
self.username = machine_name
|
||||
self.sid = machine_sid
|
||||
|
||||
def retrieve_and_store(self):
|
||||
'''
|
||||
@@ -50,7 +35,7 @@ class nodomain_backend(applier_backend):
|
||||
'''
|
||||
# Get policies for machine at first.
|
||||
self.storage.wipe_hklm()
|
||||
self.storage.wipe_user(self.storage.get_info('machine_sid'))
|
||||
local_policy = get_local_gpt(self.sid)
|
||||
local_policy.merge()
|
||||
local_policy = get_local_gpt()
|
||||
local_policy.merge_machine()
|
||||
local_policy.merge_user()
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -17,32 +17,34 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
|
||||
# Facility to determine GPTs for user
|
||||
from samba.gpclass import check_safe_path, check_refresh_gpo_list
|
||||
try:
|
||||
from samba.gpclass import check_safe_path
|
||||
except ImportError:
|
||||
from samba.gp.gpclass import check_safe_path
|
||||
|
||||
from gpt.gpo_dconf_mapping import GpoInfoDconf
|
||||
from gpt.gpt import get_local_gpt, gpt
|
||||
from storage import registry_factory
|
||||
from util.kerberos import machine_kdestroy, machine_kinit
|
||||
from util.logging import log
|
||||
from util.sid import get_sid
|
||||
from util.util import get_machine_name
|
||||
|
||||
from .applier_backend import applier_backend
|
||||
from storage import cache_factory, registry_factory
|
||||
from gpt.gpt import gpt, get_local_gpt
|
||||
from util.util import (
|
||||
get_machine_name,
|
||||
is_machine_name
|
||||
)
|
||||
from util.kerberos import (
|
||||
machine_kinit
|
||||
, machine_kdestroy
|
||||
)
|
||||
from util.windows import get_sid
|
||||
import util.preg
|
||||
from util.logging import log
|
||||
|
||||
|
||||
class samba_backend(applier_backend):
|
||||
__user_policy_mode_key = '/SOFTWARE/Policies/Microsoft/Windows/System/UserPolicyMode'
|
||||
__user_policy_mode_key_win = '/Software/Policies/Microsoft/Windows/System/UserPolicyMode'
|
||||
|
||||
def __init__(self, sambacreds, username, domain, is_machine):
|
||||
self.cache_path = '/var/cache/gpupdate/creds/krb5cc_{}'.format(os.getpid())
|
||||
self.__kinit_successful = machine_kinit(self.cache_path)
|
||||
if not self.__kinit_successful:
|
||||
raise Exception('kinit is not successful')
|
||||
self.storage = registry_factory('registry')
|
||||
self.storage = registry_factory()
|
||||
self.storage.set_info('domain', domain)
|
||||
machine_name = get_machine_name()
|
||||
machine_sid = get_sid(domain, machine_name, is_machine)
|
||||
@@ -51,97 +53,160 @@ class samba_backend(applier_backend):
|
||||
|
||||
# User SID to work with HKCU hive
|
||||
self.username = username
|
||||
self._is_machine_username = is_machine
|
||||
self._is_machine = is_machine
|
||||
if is_machine:
|
||||
self.sid = machine_sid
|
||||
else:
|
||||
self.sid = get_sid(self.storage.get_info('domain'), self.username)
|
||||
|
||||
self.cache = cache_factory('regpol_cache')
|
||||
self.gpo_names = cache_factory('gpo_names')
|
||||
|
||||
# Samba objects - LoadParm() and CredentialsOptions()
|
||||
self.sambacreds = sambacreds
|
||||
|
||||
self.cache_dir = self.sambacreds.get_cache_dir()
|
||||
logdata = dict({'cachedir': self.cache_dir})
|
||||
self.gpo_cache_part ='gpo_cache'
|
||||
self._cached = False
|
||||
self.storage.set_info('cache_dir', os.path.join(self.cache_dir, self.gpo_cache_part))
|
||||
logdata = {'cachedir': self.cache_dir}
|
||||
log('D7', logdata)
|
||||
|
||||
def __del__(self):
|
||||
if self.__kinit_successful:
|
||||
machine_kdestroy()
|
||||
|
||||
def get_policy_mode(self):
|
||||
'''
|
||||
Get UserPolicyMode parameter value in order to determine if it
|
||||
is possible to work with user's part of GPT. This value is
|
||||
checked only if working for user's SID.
|
||||
'''
|
||||
upm_key = self.storage.get_key_value(self.__user_policy_mode_key)
|
||||
upm_win_key = self.storage.get_key_value(self.__user_policy_mode_key_win)
|
||||
upm = upm_key if upm_key else upm_win_key
|
||||
if upm:
|
||||
upm = int(upm)
|
||||
if upm < 0 or upm > 2:
|
||||
upm = 0
|
||||
else:
|
||||
upm = 0
|
||||
|
||||
return upm
|
||||
|
||||
def retrieve_and_store(self):
|
||||
'''
|
||||
Retrieve settings and strore it in a database
|
||||
'''
|
||||
# Get policies for machine at first.
|
||||
machine_gpts = list()
|
||||
machine_gpts = []
|
||||
try:
|
||||
machine_gpts = self._get_gpts(get_machine_name(), self.storage.get_info('machine_sid'))
|
||||
machine_gpts = self._get_gpts()
|
||||
except Exception as exc:
|
||||
log('F2')
|
||||
raise exc
|
||||
self.storage.wipe_hklm()
|
||||
self.storage.wipe_user(self.storage.get_info('machine_sid'))
|
||||
for gptobj in machine_gpts:
|
||||
try:
|
||||
gptobj.merge()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E26', logdata)
|
||||
|
||||
if self._is_machine:
|
||||
for gptobj in machine_gpts:
|
||||
try:
|
||||
gptobj.merge_machine()
|
||||
except Exception as exc:
|
||||
logdata = {}
|
||||
logdata['msg'] = str(exc)
|
||||
log('E26', logdata)
|
||||
|
||||
# Load user GPT values in case user's name specified
|
||||
# This is a buggy implementation and should be tested more
|
||||
if not self._is_machine_username:
|
||||
user_gpts = list()
|
||||
else:
|
||||
user_gpts = []
|
||||
user_path_gpts = set()
|
||||
try:
|
||||
user_gpts = self._get_gpts(self.username, self.sid)
|
||||
user_gpts = self._get_gpts(self.username)
|
||||
except Exception as exc:
|
||||
log('F3')
|
||||
raise exc
|
||||
self.storage.wipe_user(self.sid)
|
||||
for gptobj in user_gpts:
|
||||
try:
|
||||
gptobj.merge()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('E27', logdata)
|
||||
|
||||
# Merge user settings if UserPolicyMode set accordingly
|
||||
# and user settings (for HKCU) are exist.
|
||||
policy_mode = self.get_policy_mode()
|
||||
logdata = {'mode': upm2str(policy_mode)}
|
||||
log('D152', logdata)
|
||||
|
||||
if policy_mode < 2:
|
||||
for gptobj in user_gpts:
|
||||
try:
|
||||
gptobj.merge_user()
|
||||
user_path_gpts.add(gptobj.path)
|
||||
except Exception as exc:
|
||||
logdata = {}
|
||||
logdata['msg'] = str(exc)
|
||||
log('E27', logdata)
|
||||
filtered_machine_gpts = [gpt for gpt in machine_gpts
|
||||
if gpt.path not in user_path_gpts]
|
||||
if policy_mode > 0:
|
||||
for gptobj in filtered_machine_gpts:
|
||||
try:
|
||||
gptobj.merge_user()
|
||||
except Exception as exc:
|
||||
logdata = {}
|
||||
logdata['msg'] = str(exc)
|
||||
log('E63', logdata)
|
||||
|
||||
def _check_sysvol_present(self, gpo):
|
||||
'''
|
||||
Check if there is SYSVOL path for GPO assigned
|
||||
'''
|
||||
self._cached = False
|
||||
if not gpo.file_sys_path:
|
||||
# GPO named "Local Policy" has no entry by its nature so
|
||||
# no reason to print warning.
|
||||
if 'Local Policy' != gpo.name:
|
||||
logdata = dict({'gponame': gpo.name})
|
||||
if gpo.display_name in self.storage._dict_gpo_name_version_cache.keys():
|
||||
gpo.file_sys_path = self.storage._dict_gpo_name_version_cache.get(gpo.display_name, {}).get('correct_path')
|
||||
self._cached = True
|
||||
return True
|
||||
elif 'Local Policy' != gpo.name:
|
||||
logdata = {'gponame': gpo.name}
|
||||
log('W4', logdata)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _get_gpts(self, username, sid):
|
||||
gpts = list()
|
||||
|
||||
log('D45', {'username': username, 'sid': sid})
|
||||
def _get_gpts(self, username=None):
|
||||
gpts = []
|
||||
if not username:
|
||||
username = get_machine_name()
|
||||
log('D45', {'username': username})
|
||||
# util.windows.smbcreds
|
||||
gpos = self.sambacreds.update_gpos(username)
|
||||
log('D46')
|
||||
for gpo in gpos:
|
||||
if self._check_sysvol_present(gpo):
|
||||
path = check_safe_path(gpo.file_sys_path).upper()
|
||||
slogdata = dict({'sysvol_path': gpo.file_sys_path, 'gpo_name': gpo.display_name, 'gpo_path': path})
|
||||
log('D30', slogdata)
|
||||
gpt_abspath = os.path.join(self.cache_dir, 'gpo_cache', path)
|
||||
obj = gpt(gpt_abspath, sid)
|
||||
if not self._cached:
|
||||
path = check_safe_path(gpo.file_sys_path).upper()
|
||||
slogdata = {'sysvol_path': gpo.file_sys_path, 'gpo_name': gpo.display_name, 'gpo_path': path}
|
||||
log('D30', slogdata)
|
||||
gpt_abspath = os.path.join(self.cache_dir, self.gpo_cache_part, path)
|
||||
else:
|
||||
gpt_abspath = gpo.file_sys_path
|
||||
log('D211', {'sysvol_path': gpo.file_sys_path, 'gpo_name': gpo.display_name})
|
||||
if self._is_machine:
|
||||
obj = gpt(gpt_abspath, None, GpoInfoDconf(gpo))
|
||||
else:
|
||||
obj = gpt(gpt_abspath, self.username, GpoInfoDconf(gpo))
|
||||
obj.set_name(gpo.display_name)
|
||||
gpts.append(obj)
|
||||
else:
|
||||
if 'Local Policy' == gpo.name:
|
||||
gpts.append(get_local_gpt(sid))
|
||||
gpts.append(get_local_gpt())
|
||||
|
||||
return gpts
|
||||
|
||||
def upm2str(upm_num):
|
||||
'''
|
||||
Translate UserPolicyMode to string.
|
||||
'''
|
||||
result = 'Not configured'
|
||||
|
||||
if upm_num in [1, '1']:
|
||||
result = 'Merge'
|
||||
|
||||
if upm_num in [2, '2']:
|
||||
result = 'Replace'
|
||||
|
||||
return result
|
||||
|
||||
@@ -16,7 +16,4 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .frontend_manager import (
|
||||
frontend_manager as applier
|
||||
)
|
||||
|
||||
from .frontend_manager import frontend_manager as applier
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2024 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -18,43 +18,41 @@
|
||||
|
||||
from abc import ABC
|
||||
|
||||
import logging
|
||||
from util.logging import slogm
|
||||
|
||||
def check_experimental_enabled(storage):
|
||||
experimental_enable_flag = 'Software\\BaseALT\\Policies\\GPUpdate\\GlobalExperimental'
|
||||
flag = storage.get_hklm_entry(experimental_enable_flag)
|
||||
experimental_enable_flag = '/Software/BaseALT/Policies/GPUpdate/GlobalExperimental'
|
||||
flag = storage.get_key_value(experimental_enable_flag)
|
||||
|
||||
result = False
|
||||
|
||||
if flag and '1' == flag.data:
|
||||
if flag and '1' == str(flag):
|
||||
result = True
|
||||
|
||||
return result
|
||||
|
||||
def check_windows_mapping_enabled(storage):
|
||||
windows_mapping_enable_flag = 'Software\\BaseALT\\Policies\\GPUpdate\\WindowsPoliciesMapping'
|
||||
flag = storage.get_hklm_entry(windows_mapping_enable_flag)
|
||||
windows_mapping_enable_flag = '/Software/BaseALT/Policies/GPUpdate/WindowsPoliciesMapping'
|
||||
flag = storage.get_key_value(windows_mapping_enable_flag)
|
||||
|
||||
result = True
|
||||
|
||||
if flag and '0' == flag.data:
|
||||
flag = str(flag)
|
||||
if flag and '0' == flag:
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
def check_module_enabled(storage, module_name):
|
||||
gpupdate_module_enable_branch = 'Software\\BaseALT\\Policies\\GPUpdate'
|
||||
gpupdate_module_flag = '{}\\{}'.format(gpupdate_module_enable_branch, module_name)
|
||||
flag = storage.get_hklm_entry(gpupdate_module_flag)
|
||||
gpupdate_module_enable_branch = '/Software/BaseALT/Policies/GPUpdate'
|
||||
gpupdate_module_flag = '{}/{}'.format(gpupdate_module_enable_branch, module_name)
|
||||
flag = storage.get_key_value(gpupdate_module_flag)
|
||||
|
||||
result = None
|
||||
|
||||
if flag:
|
||||
if '1' == flag.data:
|
||||
flag = str(flag)
|
||||
if flag and flag!='None':
|
||||
if '1' == flag:
|
||||
result = True
|
||||
if '0' == flag.data:
|
||||
result = False
|
||||
else:
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -17,16 +17,14 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import subprocess
|
||||
import threading
|
||||
import logging
|
||||
from util.logging import slogm, log
|
||||
from util.logging import log
|
||||
|
||||
def control_subst(preg_name):
|
||||
'''
|
||||
This is a workaround for control names which can't be used in
|
||||
PReg/ADMX files.
|
||||
'''
|
||||
control_triggers = dict()
|
||||
control_triggers = {}
|
||||
control_triggers['dvd_rw-format'] = 'dvd+rw-format'
|
||||
control_triggers['dvd_rw-mediainfo'] = 'dvd+rw-mediainfo'
|
||||
control_triggers['dvd_rw-booktype'] = 'dvd+rw-booktype'
|
||||
@@ -52,7 +50,7 @@ class control:
|
||||
Query possible values from control in order to perform check of
|
||||
parameter passed to constructor.
|
||||
'''
|
||||
values = list()
|
||||
values = []
|
||||
|
||||
popen_call = ['/usr/sbin/control', self.control_name, 'list']
|
||||
with subprocess.Popen(popen_call, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
|
||||
@@ -70,7 +68,7 @@ class control:
|
||||
try:
|
||||
str_status = self.possible_values[int_status]
|
||||
except IndexError as exc:
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
logdata['control'] = self.control_name
|
||||
logdata['value from'] = self.possible_values
|
||||
logdata['by index'] = int_status
|
||||
@@ -99,20 +97,20 @@ class control:
|
||||
if type(self.control_value) == int:
|
||||
status = self._map_control_status(self.control_value)
|
||||
if status == None:
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
logdata['control'] = self.control_name
|
||||
logdata['inpossible values'] = self.self.control_value
|
||||
logdata['inpossible values'] = self.control_value
|
||||
log('E42', logdata)
|
||||
return
|
||||
elif type(self.control_value) == str:
|
||||
if self.control_value not in self.possible_values:
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
logdata['control'] = self.control_name
|
||||
logdata['inpossible values'] = self.self.control_value
|
||||
logdata['inpossible values'] = self.control_value
|
||||
log('E59', logdata)
|
||||
return
|
||||
status = self.control_value
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
logdata['control'] = self.control_name
|
||||
logdata['status'] = status
|
||||
log('D68', logdata)
|
||||
@@ -122,7 +120,7 @@ class control:
|
||||
with subprocess.Popen(popen_call, stdout=subprocess.PIPE) as proc:
|
||||
proc.wait()
|
||||
except:
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
logdata['control'] = self.control_name
|
||||
logdata['status'] = status
|
||||
log('E43', logdata)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2024 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -17,27 +17,40 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from os.path import isfile
|
||||
from util.logging import slogm
|
||||
import logging
|
||||
|
||||
from gpt.envvars import (
|
||||
from util.arguments import (
|
||||
FileAction
|
||||
, action_letter2enum
|
||||
)
|
||||
from util.windows import expand_windows_var
|
||||
from util.util import (
|
||||
get_homedir,
|
||||
homedir_exists
|
||||
)
|
||||
from util.util import get_homedir
|
||||
from util.logging import log
|
||||
|
||||
class Envvar:
|
||||
__envvar_file_path = '/etc/gpupdate/environment'
|
||||
__envvar_file_path_user = '/.gpupdate_environment'
|
||||
|
||||
def __init__(self, envvars, username=''):
|
||||
self.username = username
|
||||
self.envvars = envvars
|
||||
if self.username == 'root':
|
||||
self.envvar_file_path = '/etc/gpupdate/environment'
|
||||
self.envvar_file_path = Envvar.__envvar_file_path
|
||||
else:
|
||||
self.envvar_file_path = get_homedir(self.username) + '/.gpupdate_environment'
|
||||
self.envvar_file_path = get_homedir(self.username) + Envvar.__envvar_file_path_user
|
||||
|
||||
@staticmethod
|
||||
def clear_envvar_file(username = False):
|
||||
if username:
|
||||
file_path = get_homedir(username) + Envvar.__envvar_file_path_user
|
||||
else:
|
||||
file_path = Envvar.__envvar_file_path
|
||||
|
||||
try:
|
||||
with open(file_path, 'w') as file:
|
||||
file.write('')
|
||||
log('D215', {'path':file_path})
|
||||
except Exception as exc:
|
||||
log('D216', {'path': file_path, 'exc': exc})
|
||||
|
||||
def _open_envvar_file(self):
|
||||
fd = None
|
||||
@@ -51,7 +64,7 @@ class Envvar:
|
||||
|
||||
def _create_action(self, create_dict, envvar_file):
|
||||
lines_old = envvar_file.readlines()
|
||||
lines_new = list()
|
||||
lines_new = []
|
||||
for name in create_dict:
|
||||
exist = False
|
||||
for line in lines_old:
|
||||
@@ -80,7 +93,7 @@ class Envvar:
|
||||
with open(self.envvar_file_path, 'r') as f:
|
||||
lines = f.readlines()
|
||||
else:
|
||||
lines = list()
|
||||
lines = []
|
||||
|
||||
file_changed = False
|
||||
for envvar_object in self.envvars:
|
||||
@@ -92,6 +105,8 @@ class Envvar:
|
||||
value = value.replace('\\', '/')
|
||||
exist_line = None
|
||||
for line in lines:
|
||||
if line == '\n':
|
||||
continue
|
||||
if line.split()[0] == name:
|
||||
exist_line = line
|
||||
break
|
||||
|
||||
303
gpoa/frontend/appliers/file_cp.py
Normal file
303
gpoa/frontend/appliers/file_cp.py
Normal file
@@ -0,0 +1,303 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from util.arguments import (
|
||||
FileAction
|
||||
, action_letter2enum
|
||||
)
|
||||
from .folder import str2bool
|
||||
from util.logging import log
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from util.windows import expand_windows_var
|
||||
from util.util import get_homedir, get_user_info
|
||||
from util.exceptions import NotUNCPathError
|
||||
from util.paths import UNCPath
|
||||
import fnmatch
|
||||
import pwd
|
||||
import grp
|
||||
|
||||
class Files_cp:
|
||||
def __init__(self, file_obj, file_cache, exe_check, username=None):
|
||||
self.file_cache = file_cache
|
||||
self.exe_check = exe_check
|
||||
targetPath = expand_windows_var(file_obj.targetPath, username).replace('\\', '/')
|
||||
self.targetPath = check_target_path(targetPath, username)
|
||||
if not self.targetPath:
|
||||
return
|
||||
self.fromPath = (expand_windows_var(file_obj.fromPath, username).replace('\\', '/')
|
||||
if file_obj.fromPath else None)
|
||||
self.isTargetPathDirectory = False
|
||||
self.action = action_letter2enum(file_obj.action)
|
||||
self.readOnly = str2bool(file_obj.readOnly)
|
||||
self.archive = str2bool(file_obj.archive)
|
||||
self.hidden = str2bool(file_obj.hidden)
|
||||
self.suppress = str2bool(file_obj.suppress)
|
||||
self.executable = str2bool(file_obj.executable)
|
||||
self.username = username
|
||||
self.pw = get_user_info(username) if username else None
|
||||
self.fromPathFiles = []
|
||||
if self.fromPath:
|
||||
if targetPath[-1] == '/' or self.is_pattern(Path(self.fromPath).name):
|
||||
self.isTargetPathDirectory = True
|
||||
self.get_list_files()
|
||||
self.act()
|
||||
|
||||
def get_target_file(self, targetPath:Path, fromFile:str) -> Path:
|
||||
try:
|
||||
if fromFile:
|
||||
fromFileName = Path(fromFile).name
|
||||
if self.isTargetPathDirectory:
|
||||
targetPath.mkdir(parents = True, exist_ok = True)
|
||||
else:
|
||||
targetPath.parent.mkdir(parents = True, exist_ok = True)
|
||||
targetPath = targetPath.parent
|
||||
fromFileName = self.targetPath.name
|
||||
if self.hidden:
|
||||
return targetPath.joinpath('.' + fromFileName)
|
||||
else:
|
||||
return targetPath.joinpath(fromFileName)
|
||||
|
||||
else:
|
||||
if not self.hidden:
|
||||
return targetPath
|
||||
else:
|
||||
return targetPath.parent.joinpath('.' + targetPath.name)
|
||||
except Exception as exc:
|
||||
logdata = {}
|
||||
logdata['targetPath'] = targetPath
|
||||
logdata['fromFile'] = fromFile
|
||||
logdata['exc'] = exc
|
||||
log('D163', logdata)
|
||||
|
||||
return None
|
||||
|
||||
def copy_target_file(self, targetFile:Path, fromFile:str):
|
||||
try:
|
||||
uri_path = UNCPath(fromFile)
|
||||
self.file_cache.store(fromFile, targetFile)
|
||||
except NotUNCPathError as exc:
|
||||
fromFilePath = Path(fromFile)
|
||||
if fromFilePath.exists():
|
||||
targetFile.write_bytes(fromFilePath.read_bytes())
|
||||
except Exception as exc:
|
||||
logdata = {}
|
||||
logdata['targetFile'] = targetFile
|
||||
logdata['fromFile'] = fromFile
|
||||
logdata['exc'] = exc
|
||||
log('W15', logdata)
|
||||
|
||||
def set_exe_file(self, targetFile, fromFile):
|
||||
if self.executable:
|
||||
return True
|
||||
if Path(fromFile).suffix in self.exe_check.get_list_markers():
|
||||
targetPath = targetFile.parent
|
||||
for i in self.exe_check.get_list_paths():
|
||||
if targetPath == Path(i):
|
||||
return True
|
||||
return False
|
||||
|
||||
def set_mod_file(self, targetFile, fromFile):
|
||||
if not targetFile.is_file():
|
||||
return
|
||||
if self.set_exe_file(targetFile, fromFile):
|
||||
if self.readOnly:
|
||||
shutil.os.chmod(targetFile, 0o555)
|
||||
else:
|
||||
shutil.os.chmod(targetFile, 0o755)
|
||||
else:
|
||||
if self.readOnly:
|
||||
shutil.os.chmod(targetFile, 0o444)
|
||||
else:
|
||||
shutil.os.chmod(targetFile, 0o644)
|
||||
|
||||
def _create_action(self):
|
||||
logdata = {}
|
||||
for fromFile in self.fromPathFiles:
|
||||
targetFile = None
|
||||
|
||||
try:
|
||||
targetFile = self.get_target_file(self.targetPath, fromFile)
|
||||
if targetFile and not targetFile.exists():
|
||||
self.copy_target_file(targetFile, fromFile)
|
||||
if self.username:
|
||||
group_name = grp.getgrgid(self.pw.pw_gid).gr_name
|
||||
chown_home_path(targetFile, username=self.username, group=group_name)
|
||||
self.set_mod_file(targetFile, fromFile)
|
||||
logdata['File'] = targetFile
|
||||
log('D191', logdata)
|
||||
except Exception as exc:
|
||||
logdata['exc'] = exc
|
||||
logdata['fromPath'] = fromFile
|
||||
logdata['targetPath'] = self.targetPath
|
||||
logdata['targetFile'] = targetFile
|
||||
log('D164', logdata)
|
||||
|
||||
def _delete_action(self):
|
||||
list_target = [self.targetPath.name]
|
||||
if self.is_pattern(self.targetPath.name) and self.targetPath.parent.exists() and self.targetPath.parent.is_dir():
|
||||
list_target = fnmatch.filter([str(x.name) for x in self.targetPath.parent.iterdir() if x.is_file()], self.targetPath.name)
|
||||
logdata = {}
|
||||
for targetFile in list_target:
|
||||
targetFile = self.targetPath.parent.joinpath(targetFile)
|
||||
try:
|
||||
if targetFile.exists():
|
||||
targetFile.unlink()
|
||||
logdata['File'] = targetFile
|
||||
log('D193', logdata)
|
||||
|
||||
except Exception as exc:
|
||||
logdata['exc'] = exc
|
||||
logdata['targetPath'] = self.targetPath
|
||||
logdata['targetFile'] = targetFile
|
||||
log('D165', logdata)
|
||||
|
||||
def _update_action(self):
|
||||
logdata = {}
|
||||
for fromFile in self.fromPathFiles:
|
||||
targetFile = self.get_target_file(self.targetPath, fromFile)
|
||||
try:
|
||||
self.copy_target_file(targetFile, fromFile)
|
||||
if self.username:
|
||||
shutil.chown(self.targetPath, self.username)
|
||||
group_name = grp.getgrgid(self.pw.pw_gid).gr_name
|
||||
chown_home_path(targetFile, username=self.username, group=group_name)
|
||||
self.set_mod_file(targetFile, fromFile)
|
||||
logdata['File'] = targetFile
|
||||
log('D192', logdata)
|
||||
except Exception as exc:
|
||||
logdata['exc'] = exc
|
||||
logdata['fromPath'] = self.fromPath
|
||||
logdata['targetPath'] = self.targetPath
|
||||
logdata['targetFile'] = targetFile
|
||||
log('D166', logdata)
|
||||
|
||||
def act(self):
|
||||
if self.action == FileAction.CREATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.UPDATE:
|
||||
self._update_action()
|
||||
if self.action == FileAction.DELETE:
|
||||
self._delete_action()
|
||||
if self.action == FileAction.REPLACE:
|
||||
self._delete_action()
|
||||
self._create_action()
|
||||
|
||||
def is_pattern(self, name):
|
||||
if name.find('*') != -1 or name.find('?') != -1:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_list_files(self):
|
||||
logdata = {}
|
||||
logdata['targetPath'] = str(self.targetPath)
|
||||
fromFilePath = Path(self.fromPath)
|
||||
if not self.is_pattern(fromFilePath.name):
|
||||
self.fromPathFiles.append(self.fromPath)
|
||||
else:
|
||||
fromPathDir = self.fromPath[:self.fromPath.rfind('/')]
|
||||
|
||||
try:
|
||||
uri_path = UNCPath(fromPathDir)
|
||||
ls_files = self.file_cache.get_ls_smbdir(fromPathDir)
|
||||
if ls_files:
|
||||
filtered_ls_files = fnmatch.filter(ls_files, fromFilePath.name)
|
||||
if filtered_ls_files:
|
||||
self.fromPathFiles = [fromPathDir + '/' + file_s for file_s in filtered_ls_files]
|
||||
except NotUNCPathError as exc:
|
||||
try:
|
||||
exact_path = Path(fromPathDir)
|
||||
if exact_path.is_dir():
|
||||
self.fromPathFiles = [str(fromFile) for fromFile in exact_path.iterdir() if fromFile.is_file()]
|
||||
except Exception as exc:
|
||||
logdata['fromPath'] = self.fromPath
|
||||
logdata['exc'] = exc
|
||||
log('W3316', logdata)
|
||||
except Exception as exc:
|
||||
logdata['fromPath'] = self.fromPath
|
||||
logdata['exc'] = exc
|
||||
log('W3317', logdata)
|
||||
|
||||
def check_target_path(path_to_check, username = None):
|
||||
'''
|
||||
Function for checking the correctness of the path
|
||||
'''
|
||||
if not path_to_check:
|
||||
return None
|
||||
|
||||
checking = Path(path_to_check)
|
||||
rootpath = Path('/')
|
||||
if username:
|
||||
rootpath = Path(get_homedir(username))
|
||||
|
||||
return rootpath.joinpath(checking)
|
||||
|
||||
class Execution_check():
|
||||
|
||||
__etension_marker_key_name = 'ExtensionMarker'
|
||||
__marker_usage_path_key_name = 'MarkerUsagePath'
|
||||
__hklm_branch = 'Software\\BaseALT\\Policies\\GroupPolicies\\Files'
|
||||
|
||||
def __init__(self, storage):
|
||||
etension_marker_branch = '{}\\{}%'.format(self.__hklm_branch, self.__etension_marker_key_name)
|
||||
marker_usage_path_branch = '{}\\{}%'.format(self.__hklm_branch, self.__marker_usage_path_key_name)
|
||||
self.etension_marker = storage.filter_hklm_entries(etension_marker_branch)
|
||||
self.marker_usage_path = storage.filter_hklm_entries(marker_usage_path_branch)
|
||||
self.list_paths = []
|
||||
self.list_markers = []
|
||||
for marker in self.etension_marker:
|
||||
self.list_markers.append(marker.data)
|
||||
for usage_path in self.marker_usage_path:
|
||||
self.list_paths.append(usage_path.data)
|
||||
|
||||
def get_list_paths(self):
|
||||
return self.list_paths
|
||||
|
||||
def get_list_markers(self):
|
||||
return self.list_markers
|
||||
|
||||
|
||||
def chown_home_path(path: Path, username: str, group: str) -> None:
|
||||
"""
|
||||
Change ownership (user and group) of the given path and all its parent
|
||||
directories up to (but NOT including) the user's home directory.
|
||||
|
||||
If the path is not inside the user's home directory, do nothing.
|
||||
|
||||
:param path: Path to a file or directory.
|
||||
:param user: Username to set as owner.
|
||||
:param group: Group name to set as group.
|
||||
"""
|
||||
path = path.resolve()
|
||||
home_root = Path(get_homedir(username))
|
||||
|
||||
# Check if the path is inside user's home directory
|
||||
if home_root not in path.parents:
|
||||
return # Not inside user's home - do nothing
|
||||
|
||||
# Walk upwards from the given path until just above home_root
|
||||
current = path
|
||||
while True:
|
||||
if current == home_root:
|
||||
break # do not change ownership of the home directory itself
|
||||
shutil.chown(current, user=username, group=group)
|
||||
if current.parent == current: # Safety check: reached root (/)
|
||||
break
|
||||
current = current.parent
|
||||
@@ -20,7 +20,7 @@ from enum import Enum
|
||||
import subprocess
|
||||
|
||||
def getprops(param_list):
|
||||
props = dict()
|
||||
props = {}
|
||||
|
||||
for entry in param_list:
|
||||
lentry = entry.lower()
|
||||
@@ -35,7 +35,7 @@ def getprops(param_list):
|
||||
|
||||
|
||||
def get_ports(param_list):
|
||||
portlist = list()
|
||||
portlist = []
|
||||
|
||||
for entry in param_list:
|
||||
lentry = entry.lower()
|
||||
|
||||
@@ -20,14 +20,15 @@
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
from gpt.folders import (
|
||||
from util.arguments import (
|
||||
FileAction
|
||||
, action_letter2enum
|
||||
)
|
||||
from util.windows import expand_windows_var
|
||||
from util.util import get_homedir
|
||||
|
||||
def remove_dir_tree(path, delete_files=False, delete_folder=False, delete_sub_folders=False):
|
||||
content = list()
|
||||
content = []
|
||||
for entry in path.iterdir():
|
||||
content.append(entry)
|
||||
if entry.is_file() and delete_files:
|
||||
@@ -35,36 +36,54 @@ def remove_dir_tree(path, delete_files=False, delete_folder=False, delete_sub_fo
|
||||
content.remove(entry)
|
||||
if entry.is_dir() and delete_sub_folders:
|
||||
content.remove(entry)
|
||||
remove_dir_tree(entry, delete_files, delete_folder, delete_sub_folders)
|
||||
content.extend(remove_dir_tree(entry, delete_files, delete_folder, delete_sub_folders))
|
||||
|
||||
if delete_folder and not content:
|
||||
path.rmdir()
|
||||
|
||||
return content
|
||||
|
||||
def str2bool(boolstr):
|
||||
if boolstr.lower() in ['true', 'yes', '1']:
|
||||
if isinstance(boolstr, bool):
|
||||
return boolstr
|
||||
elif boolstr and boolstr.lower() in ['true', 'yes', '1']:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class Folder:
|
||||
def __init__(self, folder_object, username):
|
||||
self.folder_path = Path(expand_windows_var(folder_object.path, username).replace('\\', '/'))
|
||||
def __init__(self, folder_object, username=None):
|
||||
folder_path = expand_windows_var(folder_object.path, username).replace('\\', '/').replace('//', '/')
|
||||
if username:
|
||||
folder_path = folder_path.replace(get_homedir(username), '')
|
||||
self.folder_path = Path(get_homedir(username)).joinpath(folder_path if folder_path [0] != '/' else folder_path [1:])
|
||||
else:
|
||||
self.folder_path = Path(folder_path)
|
||||
self.action = action_letter2enum(folder_object.action)
|
||||
self.delete_files = str2bool(folder_object.delete_files)
|
||||
self.delete_folder = str2bool(folder_object.delete_folder)
|
||||
self.delete_sub_folders = str2bool(folder_object.delete_sub_folders)
|
||||
self.hidden_folder = str2bool(folder_object.hidden_folder)
|
||||
|
||||
def _create_action(self):
|
||||
self.folder_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _delete_action(self):
|
||||
if self.folder_path.exists():
|
||||
if self.action == FileAction.REPLACE:
|
||||
self.delete_folder = True
|
||||
remove_dir_tree(self.folder_path,
|
||||
self.delete_files,
|
||||
self.delete_folder,
|
||||
self.delete_sub_folders)
|
||||
|
||||
|
||||
def act(self):
|
||||
if self.hidden_folder == True and str(self.folder_path.name)[0] != '.':
|
||||
path_components = [*self.folder_path.parts]
|
||||
path_components[-1] = '.' + path_components[-1]
|
||||
new_folder_path = Path(*path_components)
|
||||
self.folder_path = new_folder_path
|
||||
if self.action == FileAction.CREATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.UPDATE:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2021 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -18,10 +18,9 @@
|
||||
|
||||
import configparser
|
||||
import os
|
||||
import logging
|
||||
from gi.repository import Gio, GLib
|
||||
|
||||
from util.logging import slogm, log
|
||||
from util.logging import log
|
||||
|
||||
class system_gsetting:
|
||||
def __init__(self, schema, path, value, lock, helper_function=None):
|
||||
@@ -54,15 +53,15 @@ class system_gsettings:
|
||||
__profile_data = 'user-db:user\nsystem-db:policy\nsystem-db:local\n'
|
||||
|
||||
def __init__(self, override_file_path):
|
||||
self.gsettings = list()
|
||||
self.locks = list()
|
||||
self.gsettings = []
|
||||
self.locks = []
|
||||
self.override_file_path = override_file_path
|
||||
|
||||
def append(self, schema, path, data, lock, helper):
|
||||
if check_existing_gsettings(schema, path):
|
||||
self.gsettings.append(system_gsetting(schema, path, data, lock, helper))
|
||||
else:
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
logdata['schema'] = schema
|
||||
logdata['path'] = path
|
||||
logdata['data'] = data
|
||||
@@ -73,7 +72,7 @@ class system_gsettings:
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
for gsetting in self.gsettings:
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
logdata['gsetting.schema'] = gsetting.schema
|
||||
logdata['gsetting.path'] = gsetting.path
|
||||
logdata['gsetting.value'] = gsetting.value
|
||||
@@ -133,13 +132,13 @@ def check_existing_gsettings (schema, path):
|
||||
|
||||
class user_gsettings:
|
||||
def __init__(self):
|
||||
self.gsettings = list()
|
||||
self.gsettings = []
|
||||
|
||||
def append(self, schema, path, value, helper=None):
|
||||
if check_existing_gsettings(schema, path):
|
||||
self.gsettings.append(user_gsetting(schema, path, value, helper))
|
||||
else:
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
logdata['schema'] = schema
|
||||
logdata['path'] = path
|
||||
logdata['data'] = value
|
||||
@@ -147,7 +146,7 @@ class user_gsettings:
|
||||
|
||||
def apply(self):
|
||||
for gsetting in self.gsettings:
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
logdata['gsetting.schema'] = gsetting.schema
|
||||
logdata['gsetting.path'] = gsetting.path
|
||||
logdata['gsetting.value'] = gsetting.value
|
||||
|
||||
114
gpoa/frontend/appliers/ini_file.py
Normal file
114
gpoa/frontend/appliers/ini_file.py
Normal file
@@ -0,0 +1,114 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
|
||||
from util.arguments import (
|
||||
FileAction
|
||||
, action_letter2enum
|
||||
)
|
||||
from util.logging import log
|
||||
from pathlib import Path
|
||||
from util.windows import expand_windows_var
|
||||
from util.util import get_homedir
|
||||
from util.gpoa_ini_parsing import GpoaConfigObj
|
||||
|
||||
|
||||
class Ini_file:
|
||||
def __init__(self, ini_obj, username=None):
|
||||
path = expand_windows_var(ini_obj.path, username).replace('\\', '/')
|
||||
self.path = check_path(path, username)
|
||||
if not self.path:
|
||||
logdata = {'path': ini_obj.path}
|
||||
log('D175', logdata)
|
||||
return None
|
||||
self.section = ini_obj.section
|
||||
self.action = action_letter2enum(ini_obj.action)
|
||||
self.key = ini_obj.property
|
||||
self.value = ini_obj.value
|
||||
try:
|
||||
self.config = GpoaConfigObj(str(self.path), unrepr=False)
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('D176', logdata)
|
||||
return
|
||||
|
||||
self.act()
|
||||
|
||||
def _create_action(self):
|
||||
if self.path.is_dir():
|
||||
return
|
||||
if self.section not in self.config:
|
||||
self.config[self.section] = {}
|
||||
|
||||
self.config[self.section][self.key] = self.value
|
||||
self.config.write()
|
||||
|
||||
|
||||
def _delete_action(self):
|
||||
if not self.path.exists() or self.path.is_dir():
|
||||
return
|
||||
if not self.section:
|
||||
self.path.unlink()
|
||||
return
|
||||
if self.section in self.config:
|
||||
if not self.key:
|
||||
self.config.pop(self.section)
|
||||
elif self.key in self.config[self.section]:
|
||||
self.config[self.section].pop(self.key)
|
||||
self.config.write()
|
||||
|
||||
|
||||
def act(self):
|
||||
try:
|
||||
if self.action == FileAction.CREATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.UPDATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.DELETE:
|
||||
self._delete_action()
|
||||
if self.action == FileAction.REPLACE:
|
||||
self._create_action()
|
||||
except Exception as exc:
|
||||
logdata = {}
|
||||
logdata['action'] = self.action
|
||||
logdata['exc'] = exc
|
||||
log('W23', logdata)
|
||||
|
||||
|
||||
def check_path(path_to_check, username = None):
|
||||
'''
|
||||
Function for checking the right path for Inifile
|
||||
'''
|
||||
checking = Path(path_to_check)
|
||||
if checking.exists():
|
||||
if username and path_to_check == '/':
|
||||
return Path(get_homedir(username))
|
||||
return checking
|
||||
#Check for path directory without '/nameIni' suffix
|
||||
elif (len(path_to_check.split('/')) > 2
|
||||
and Path(path_to_check.replace(path_to_check.split('/')[-1], '')).is_dir()):
|
||||
return checking
|
||||
elif username:
|
||||
target_path = Path(get_homedir(username))
|
||||
res = target_path.joinpath(path_to_check
|
||||
if path_to_check[0] != '/'
|
||||
else path_to_check[1:])
|
||||
return check_path(str(res))
|
||||
else:
|
||||
return False
|
||||
90
gpoa/frontend/appliers/netshare.py
Normal file
90
gpoa/frontend/appliers/netshare.py
Normal file
@@ -0,0 +1,90 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import subprocess
|
||||
|
||||
from util.arguments import (
|
||||
FileAction
|
||||
, action_letter2enum
|
||||
)
|
||||
from util.logging import log
|
||||
from util.windows import expand_windows_var
|
||||
|
||||
|
||||
class Networkshare:
|
||||
|
||||
def __init__(self, networkshare_obj, username = None):
|
||||
self.net_full_cmd = ['/usr/bin/net', 'usershare']
|
||||
self.net_cmd_check = ['/usr/bin/net', 'usershare', 'list']
|
||||
self.cmd = []
|
||||
self.name = networkshare_obj.name
|
||||
self.path = expand_windows_var(networkshare_obj.path, username).replace('\\', '/') if networkshare_obj.path else None
|
||||
|
||||
self.action = action_letter2enum(networkshare_obj.action)
|
||||
self.allRegular = networkshare_obj.allRegular
|
||||
self.comment = networkshare_obj.comment
|
||||
self.limitUsers = networkshare_obj.limitUsers
|
||||
self.abe = networkshare_obj.abe
|
||||
self._guest = 'guest_ok=y'
|
||||
self.acl = 'Everyone:'
|
||||
self.act()
|
||||
|
||||
def check_list_net(self):
|
||||
try:
|
||||
res = subprocess.check_output(self.net_cmd_check, encoding='utf-8')
|
||||
return res
|
||||
except Exception as exc:
|
||||
return exc
|
||||
|
||||
def _run_net_full_cmd(self):
|
||||
logdata = {}
|
||||
try:
|
||||
res = subprocess.check_output(self.net_full_cmd, stderr=subprocess.DEVNULL, encoding='utf-8')
|
||||
if res:
|
||||
logdata['cmd'] = self.net_full_cmd
|
||||
logdata['answer'] = res
|
||||
log('D190', logdata)
|
||||
except Exception as exc:
|
||||
logdata['cmd'] = self.net_full_cmd
|
||||
logdata['exc'] = exc
|
||||
log('D182', logdata)
|
||||
|
||||
|
||||
def _create_action(self):
|
||||
self.net_full_cmd.append('add')
|
||||
self.net_full_cmd.append(self.name)
|
||||
self.net_full_cmd.append(self.path)
|
||||
self.net_full_cmd.append(self.comment)
|
||||
self.net_full_cmd.append(self.acl + 'F')
|
||||
self.net_full_cmd.append(self._guest)
|
||||
self._run_net_full_cmd()
|
||||
|
||||
def _delete_action(self):
|
||||
self.net_full_cmd.append('delete')
|
||||
self.net_full_cmd.append(self.name)
|
||||
self._run_net_full_cmd()
|
||||
|
||||
def act(self):
|
||||
if self.action == FileAction.CREATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.UPDATE:
|
||||
self._create_action()
|
||||
if self.action == FileAction.DELETE:
|
||||
self._delete_action()
|
||||
if self.action == FileAction.REPLACE:
|
||||
self._create_action()
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -18,9 +18,8 @@
|
||||
|
||||
import os
|
||||
import jinja2
|
||||
import logging
|
||||
|
||||
from util.logging import slogm, log
|
||||
from util.logging import log
|
||||
|
||||
class polkit:
|
||||
__template_path = '/usr/share/gpupdate/templates'
|
||||
@@ -38,7 +37,20 @@ class polkit:
|
||||
else:
|
||||
self.outfile = os.path.join(self.__policy_dir, '{}.rules'.format(self.template_name))
|
||||
|
||||
def _is_empty(self):
|
||||
for key, item in self.args.items():
|
||||
if key == 'User':
|
||||
continue
|
||||
elif item:
|
||||
return False
|
||||
return True
|
||||
|
||||
def generate(self):
|
||||
if self._is_empty():
|
||||
if os.path.isfile(self.outfile):
|
||||
os.remove(self.outfile)
|
||||
return
|
||||
logdata = {}
|
||||
try:
|
||||
template = self.__template_environment.get_template(self.infilename)
|
||||
text = template.render(**self.args)
|
||||
@@ -46,12 +58,10 @@ class polkit:
|
||||
with open(self.outfile, 'w') as f:
|
||||
f.write(text)
|
||||
|
||||
logdata = dict()
|
||||
logdata['file'] = self.outfile
|
||||
logdata['arguments'] = self.args
|
||||
log('D77', logdata)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['file'] = self.outfile
|
||||
logdata['arguments'] = self.args
|
||||
log('E44', logdata)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -17,9 +17,8 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import dbus
|
||||
import logging
|
||||
|
||||
from util.logging import slogm, log
|
||||
from util.logging import log
|
||||
|
||||
class systemd_unit:
|
||||
def __init__(self, unit_name, state):
|
||||
@@ -35,12 +34,14 @@ class systemd_unit:
|
||||
self.unit_properties = dbus.Interface(self.unit_proxy, dbus_interface='org.freedesktop.DBus.Properties')
|
||||
|
||||
def apply(self):
|
||||
logdata = {'unit': self.unit_name}
|
||||
if self.desired_state == 1:
|
||||
self.manager.UnmaskUnitFiles([self.unit_name], dbus.Boolean(False))
|
||||
self.manager.EnableUnitFiles([self.unit_name], dbus.Boolean(False), dbus.Boolean(True))
|
||||
if self.unit_name == 'gpupdate.service':
|
||||
if self.manager.GetUnitFileState(dbus.String(self.unit_name)) == 'enabled':
|
||||
return
|
||||
self.manager.StartUnit(self.unit_name, 'replace')
|
||||
logdata = dict()
|
||||
logdata['unit'] = self.unit_name
|
||||
log('I6', logdata)
|
||||
|
||||
# In case the service has 'RestartSec' property set it
|
||||
@@ -48,23 +49,21 @@ class systemd_unit:
|
||||
# 'active' so we consider 'activating' a valid state too.
|
||||
service_state = self._get_state()
|
||||
|
||||
if not service_state in ['active', 'activating']:
|
||||
logdata = dict()
|
||||
logdata['unit'] = self.unit_name
|
||||
log('E46', logdata)
|
||||
if service_state not in ('active', 'activating'):
|
||||
service_timer_name = self.unit_name.replace(".service", ".timer")
|
||||
self.unit = self.manager.LoadUnit(dbus.String(service_timer_name))
|
||||
service_state = self._get_state()
|
||||
if service_state not in ('active', 'activating'):
|
||||
log('E46', logdata)
|
||||
else:
|
||||
self.manager.StopUnit(self.unit_name, 'replace')
|
||||
self.manager.DisableUnitFiles([self.unit_name], dbus.Boolean(False))
|
||||
self.manager.MaskUnitFiles([self.unit_name], dbus.Boolean(False), dbus.Boolean(True))
|
||||
logdata = dict()
|
||||
logdata['unit'] = self.unit_name
|
||||
log('I6', logdata)
|
||||
|
||||
service_state = self._get_state()
|
||||
|
||||
if not service_state in ['stopped']:
|
||||
logdata = dict()
|
||||
logdata['unit'] = self.unit_name
|
||||
if service_state not in ('stopped', 'deactivating', 'inactive'):
|
||||
log('E46', logdata)
|
||||
|
||||
def _get_state(self):
|
||||
@@ -73,3 +72,19 @@ class systemd_unit:
|
||||
'''
|
||||
return self.unit_properties.Get('org.freedesktop.systemd1.Unit', 'ActiveState')
|
||||
|
||||
def restart(self):
|
||||
"""
|
||||
Restarts the specified unit, if available
|
||||
"""
|
||||
logdata = {'unit': self.unit_name, 'action': 'restart'}
|
||||
try:
|
||||
self.unit = self.manager.LoadUnit(dbus.String(self.unit_name))
|
||||
self.manager.RestartUnit(self.unit_name, 'replace')
|
||||
log('I13', logdata)
|
||||
service_state = self._get_state()
|
||||
if service_state not in ('active', 'activating'):
|
||||
log('E77', logdata)
|
||||
|
||||
except dbus.DBusException as exc:
|
||||
log('E77', {**logdata, 'error': str(exc)})
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,140 +16,69 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
|
||||
from util.logging import slogm, log
|
||||
from util.util import is_machine_name
|
||||
from util.logging import log
|
||||
from util.util import is_machine_name, string_to_literal_eval
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
|
||||
class chromium_applier(applier_frontend):
|
||||
__module_name = 'ChromiumApplier'
|
||||
__module_enabled = True
|
||||
__module_experimental = False
|
||||
__registry_branch = 'Software\\Policies\\Google\\Chrome'
|
||||
__registry_branch = 'Software/Policies/Google/Chrome'
|
||||
__managed_policies_path = '/etc/chromium/policies/managed'
|
||||
__recommended_policies_path = '/etc/chromium/policies/recommended'
|
||||
# JSON file where Chromium stores its settings (and which is
|
||||
# overwritten every exit.
|
||||
__user_settings = '.config/chromium/Default'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self._is_machine_name = is_machine_name(self.username)
|
||||
self.policies = dict()
|
||||
self.chromium_keys = self.storage.filter_hklm_entries(self.__registry_branch)
|
||||
|
||||
self.policies_json = {}
|
||||
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def get_hklm_string_entry(self, hive_subkey):
|
||||
query_str = '{}\\{}'.format(self.__registry_branch, hive_subkey)
|
||||
return self.storage.get_hklm_entry(query_str)
|
||||
|
||||
def get_hkcu_string_entry(self, hive_subkey):
|
||||
query_str = '{}\\{}'.format(self.__registry_branch, hive_subkey)
|
||||
return self.storage.get_hkcu_entry(sid, query_str)
|
||||
|
||||
def get_hklm_string_entry_default(self, hive_subkey, default):
|
||||
'''
|
||||
Return row from HKLM table identified by hive_subkey as string
|
||||
or return supplied default value if such hive_subkey is missing.
|
||||
'''
|
||||
|
||||
defval = str(default)
|
||||
response = self.get_hklm_string_entry(hive_subkey)
|
||||
|
||||
if response:
|
||||
return response.data
|
||||
|
||||
return defval
|
||||
|
||||
def get_hkcu_string_entry_default(self, hive_subkey, default):
|
||||
defval = str(default)
|
||||
response = self.get_hkcu_string_entry(hive_subkey)
|
||||
if response:
|
||||
return response.data
|
||||
return defval
|
||||
|
||||
def set_policy(self, name, obj):
|
||||
if obj:
|
||||
self.policies[name] = obj
|
||||
logdata = dict()
|
||||
logdata['name'] = name
|
||||
logdata['set to'] = obj
|
||||
log('I8', logdata)
|
||||
|
||||
def set_user_policy(self, name, obj):
|
||||
'''
|
||||
Please not that writing user preferences file is not considered
|
||||
a good practice and used mostly by various malware.
|
||||
'''
|
||||
if not self._is_machine_name:
|
||||
prefdir = os.path.join(util.get_homedir(self.username), self.__user_settings)
|
||||
os.makedirs(prefdir, exist_ok=True)
|
||||
|
||||
prefpath = os.path.join(prefdir, 'Preferences')
|
||||
util.mk_homedir_path(self.username, self.__user_settings)
|
||||
settings = dict()
|
||||
try:
|
||||
with open(prefpath, 'r') as f:
|
||||
settings = json.load(f)
|
||||
except FileNotFoundError as exc:
|
||||
logdata = dict()
|
||||
logdata['prefpath'] = prefpath
|
||||
log('E51', logdata)
|
||||
except:
|
||||
logdata = dict()
|
||||
logdata['username'] = self.username
|
||||
log('E51', logdata)
|
||||
|
||||
if obj:
|
||||
settings[name] = obj
|
||||
|
||||
with open(prefpath, 'w') as f:
|
||||
json.dump(settings, f)
|
||||
logdata = dict()
|
||||
logdata['user'] = self.username
|
||||
logdata['name'] = name
|
||||
logdata['set to'] = obj
|
||||
log('I9', logdata)
|
||||
|
||||
def get_home_page(self, hkcu=False):
|
||||
response = self.get_hklm_string_entry('HomepageLocation')
|
||||
result = 'about:blank'
|
||||
if response:
|
||||
result = response.data
|
||||
return result
|
||||
|
||||
def machine_apply(self):
|
||||
'''
|
||||
Apply machine settings.
|
||||
'''
|
||||
self.set_policy('HomepageLocation', self.get_home_page())
|
||||
|
||||
destfile = os.path.join(self.__managed_policies_path, 'policies.json')
|
||||
|
||||
try:
|
||||
recommended__json = self.policies_json.pop('Recommended')
|
||||
except:
|
||||
recommended__json = {}
|
||||
|
||||
#Replacing all nested dictionaries with a list
|
||||
dict_item_to_list = (
|
||||
lambda target_dict :
|
||||
{key:[*val.values()] if type(val) == dict else string_to_literal_eval(val) for key,val in target_dict.items()}
|
||||
)
|
||||
os.makedirs(self.__managed_policies_path, exist_ok=True)
|
||||
with open(destfile, 'w') as f:
|
||||
json.dump(self.policies, f)
|
||||
logdata = dict()
|
||||
json.dump(dict_item_to_list(self.policies_json), f)
|
||||
logdata = {}
|
||||
logdata['destfile'] = destfile
|
||||
log('D97', logdata)
|
||||
|
||||
def user_apply(self):
|
||||
'''
|
||||
Apply settings for the specified username.
|
||||
'''
|
||||
self.set_user_policy('homepage', self.get_home_page(hkcu=True))
|
||||
destfilerec = os.path.join(self.__recommended_policies_path, 'policies.json')
|
||||
os.makedirs(self.__recommended_policies_path, exist_ok=True)
|
||||
with open(destfilerec, 'w') as f:
|
||||
json.dump(dict_item_to_list(recommended__json), f)
|
||||
logdata = {}
|
||||
logdata['destfilerec'] = destfilerec
|
||||
log('D97', logdata)
|
||||
|
||||
|
||||
def apply(self):
|
||||
'''
|
||||
@@ -157,10 +86,135 @@ class chromium_applier(applier_frontend):
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
log('D95')
|
||||
self.create_dict(self.chromium_keys)
|
||||
self.machine_apply()
|
||||
else:
|
||||
log('D96')
|
||||
#if not self._is_machine_name:
|
||||
# logging.debug('Running user applier for Chromium')
|
||||
# self.user_apply()
|
||||
|
||||
def get_valuename_typeint(self):
|
||||
'''
|
||||
List of keys resulting from parsing chrome.admx with parsing_chrom_admx_intvalues.py
|
||||
'''
|
||||
valuename_typeint = (['DefaultClipboardSetting',
|
||||
'DefaultCookiesSetting',
|
||||
'DefaultFileSystemReadGuardSetting',
|
||||
'DefaultFileSystemWriteGuardSetting',
|
||||
'DefaultGeolocationSetting',
|
||||
'DefaultImagesSetting',
|
||||
'DefaultInsecureContentSetting',
|
||||
'DefaultJavaScriptJitSetting',
|
||||
'DefaultJavaScriptSetting',
|
||||
'DefaultLocalFontsSetting',
|
||||
'DefaultNotificationsSetting',
|
||||
'DefaultPopupsSetting',
|
||||
'DefaultSensorsSetting',
|
||||
'DefaultSerialGuardSetting',
|
||||
'DefaultThirdPartyStoragePartitioningSetting',
|
||||
'DefaultWebBluetoothGuardSetting',
|
||||
'DefaultWebHidGuardSetting',
|
||||
'DefaultWebUsbGuardSetting',
|
||||
'DefaultWindowManagementSetting',
|
||||
'DefaultMediaStreamSetting',
|
||||
'DefaultWindowPlacementSetting',
|
||||
'ProxyServerMode',
|
||||
'ExtensionManifestV2Availability',
|
||||
'ExtensionUnpublishedAvailability',
|
||||
'CreateThemesSettings',
|
||||
'DevToolsGenAiSettings',
|
||||
'GenAILocalFoundationalModelSettings',
|
||||
'HelpMeWriteSettings',
|
||||
'TabOrganizerSettings',
|
||||
'BrowserSwitcherParsingMode',
|
||||
'CloudAPAuthEnabled',
|
||||
'AdsSettingForIntrusiveAdsSites',
|
||||
'AmbientAuthenticationInPrivateModesEnabled',
|
||||
'BatterySaverModeAvailability',
|
||||
'BrowserSignin',
|
||||
'ChromeVariations',
|
||||
'DeveloperToolsAvailability',
|
||||
'DownloadRestrictions',
|
||||
'ForceYouTubeRestrict',
|
||||
'HeadlessMode',
|
||||
'IncognitoModeAvailability',
|
||||
'IntranetRedirectBehavior',
|
||||
'LensOverlaySettings',
|
||||
'MemorySaverModeSavings',
|
||||
'NetworkPredictionOptions',
|
||||
'ProfilePickerOnStartupAvailability',
|
||||
'ProfileReauthPrompt',
|
||||
'RelaunchNotification',
|
||||
'SafeSitesFilterBehavior',
|
||||
'ToolbarAvatarLabelSettings',
|
||||
'UserAgentReduction',
|
||||
'BatterySaverModeAvailability_recommended',
|
||||
'DownloadRestrictions_recommended',
|
||||
'NetworkPredictionOptions_recommended',
|
||||
'PrintPostScriptMode',
|
||||
'PrintRasterizationMode',
|
||||
'ChromeFrameRendererSettings',
|
||||
'DefaultFileHandlingGuardSetting',
|
||||
'DefaultKeygenSetting',
|
||||
'DefaultPluginsSetting',
|
||||
'LegacySameSiteCookieBehaviorEnabled',
|
||||
'ForceMajorVersionToMinorPositionInUserAgent',
|
||||
'PasswordProtectionWarningTrigger',
|
||||
'SafeBrowsingProtectionLevel',
|
||||
'SafeBrowsingProtectionLevel_recommended',
|
||||
'RestoreOnStartup',
|
||||
'RestoreOnStartup_recommended'])
|
||||
return valuename_typeint
|
||||
|
||||
|
||||
def get_boolean(self,data):
|
||||
if data in ['0', 'false', None, 'none', 0]:
|
||||
return False
|
||||
if data in ['1', 'true', 1]:
|
||||
return True
|
||||
def get_parts(self, hivekeyname):
|
||||
'''
|
||||
Parse registry path string and leave key parameters
|
||||
'''
|
||||
parts = hivekeyname.replace(self.__registry_branch, '').split('/')
|
||||
return parts
|
||||
|
||||
|
||||
def create_dict(self, chromium_keys):
|
||||
'''
|
||||
Collect dictionaries from registry keys into a general dictionary
|
||||
'''
|
||||
counts = {}
|
||||
#getting the list of keys to read as an integer
|
||||
valuename_typeint = self.get_valuename_typeint()
|
||||
for it_data in chromium_keys:
|
||||
branch = counts
|
||||
try:
|
||||
if type(it_data.data) is bytes:
|
||||
it_data.data = it_data.data.decode(encoding='utf-16').replace('\x00','')
|
||||
parts = self.get_parts(it_data.hive_key)
|
||||
#creating a nested dictionary from elements
|
||||
for part in parts[:-1]:
|
||||
branch = branch.setdefault(part, {})
|
||||
#dictionary key value initialization
|
||||
if it_data.type == 4:
|
||||
if it_data.valuename in valuename_typeint:
|
||||
branch[parts[-1]] = int(it_data.data)
|
||||
else:
|
||||
branch[parts[-1]] = self.get_boolean(it_data.data)
|
||||
else:
|
||||
if it_data.data[0] == '[' and it_data.data[-1] == ']':
|
||||
try:
|
||||
branch[parts[-1]] = json.loads(str(it_data.data))
|
||||
except:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
else:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
|
||||
except Exception as exc:
|
||||
logdata = {}
|
||||
logdata['Exception'] = exc
|
||||
logdata['keyname'] = it_data.keyname
|
||||
log('D178', logdata)
|
||||
try:
|
||||
self.policies_json = counts['']
|
||||
except:
|
||||
self.policies_json = {}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,24 +16,22 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import fileinput
|
||||
import jinja2
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import pwd
|
||||
import string
|
||||
import subprocess
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from gpt.drives import json2drive
|
||||
from util.util import get_homedir
|
||||
from util.logging import slogm, log
|
||||
import jinja2
|
||||
from util.logging import log
|
||||
from util.util import get_homedir, get_machine_name, get_uid_by_username, get_user_info
|
||||
|
||||
def storage_get_drives(storage, sid):
|
||||
drives = storage.get_drives(sid)
|
||||
drive_list = list()
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
|
||||
def storage_get_drives(storage):
|
||||
drives = storage.get_drives()
|
||||
drive_list = []
|
||||
|
||||
for drv_obj in drives:
|
||||
drive_list.append(drv_obj)
|
||||
@@ -50,46 +48,213 @@ def add_line_if_missing(filename, ins_line):
|
||||
f.write(ins_line + '\n')
|
||||
f.flush()
|
||||
|
||||
def remove_chars_before_colon(input_string):
|
||||
if ":" in input_string:
|
||||
colon_index = input_string.index(":")
|
||||
result_string = input_string[colon_index + 1:]
|
||||
return result_string
|
||||
else:
|
||||
return input_string
|
||||
|
||||
def remove_escaped_quotes(input_string):
|
||||
result_string = input_string.replace('"', '').replace("'", '')
|
||||
return result_string
|
||||
|
||||
|
||||
class Drive_list:
|
||||
__alphabet = string.ascii_uppercase
|
||||
def __init__(self):
|
||||
self.dict_drives = {}
|
||||
|
||||
def __get_letter(self, letter):
|
||||
slice_letters = set(self.__alphabet[self.__alphabet.find(letter) + 1:]) - set(self.dict_drives.keys())
|
||||
free_letters = sorted(slice_letters)
|
||||
if free_letters:
|
||||
return free_letters[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def append(self, drive:dict):
|
||||
cur_dir = drive['dir']
|
||||
if cur_dir not in set(self.dict_drives.keys()):
|
||||
if drive['action'] == 'D':
|
||||
return
|
||||
self.dict_drives[cur_dir] = drive
|
||||
return
|
||||
|
||||
else:
|
||||
if drive['action'] == 'C':
|
||||
if drive['useLetter'] == '1':
|
||||
return
|
||||
else:
|
||||
new_dir = self.__get_letter(cur_dir)
|
||||
if not new_dir:
|
||||
return
|
||||
drive['dir'] = new_dir
|
||||
self.dict_drives[new_dir] = drive
|
||||
return
|
||||
|
||||
if drive['action'] == 'U':
|
||||
self.dict_drives[cur_dir]['thisDrive'] = drive['thisDrive']
|
||||
self.dict_drives[cur_dir]['allDrives'] = drive['allDrives']
|
||||
self.dict_drives[cur_dir]['label'] = drive['label']
|
||||
self.dict_drives[cur_dir]['persistent'] = drive['persistent']
|
||||
self.dict_drives[cur_dir]['useLetter'] = drive['useLetter']
|
||||
return
|
||||
|
||||
if drive['action'] == 'R':
|
||||
self.dict_drives[cur_dir] = drive
|
||||
return
|
||||
if drive['action'] == 'D':
|
||||
if drive['useLetter'] == '1':
|
||||
self.dict_drives.pop(cur_dir, None)
|
||||
else:
|
||||
keys_set = set(self.dict_drives.keys())
|
||||
slice_letters = set(self.__alphabet[self.__alphabet.find(cur_dir):])
|
||||
for letter_dir in (keys_set & slice_letters):
|
||||
self.dict_drives.pop(letter_dir, None)
|
||||
|
||||
def __call__(self):
|
||||
return list(self.dict_drives.values())
|
||||
|
||||
def len(self):
|
||||
return len(self.dict_drives)
|
||||
|
||||
class cifs_applier(applier_frontend):
|
||||
__module_name = 'CIFSApplier'
|
||||
__module_enabled = True
|
||||
__module_experimental = False
|
||||
__dir4clean = '/etc/auto.master.gpupdate.d'
|
||||
|
||||
def __init__(self, storage):
|
||||
pass
|
||||
self.clear_directory_auto_dir()
|
||||
self.applier_cifs = cifs_applier_user(storage, None)
|
||||
self.__module_enabled = check_enabled(
|
||||
storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
def clear_directory_auto_dir(self):
|
||||
path = Path(self.__dir4clean)
|
||||
if not path.exists():
|
||||
return
|
||||
|
||||
for item in path.iterdir():
|
||||
try:
|
||||
if item.is_file() or item.is_symlink():
|
||||
item.unlink()
|
||||
except Exception as exc:
|
||||
log('W37', {'exc': exc})
|
||||
log('D231')
|
||||
|
||||
def apply(self):
|
||||
pass
|
||||
if self.__module_enabled:
|
||||
log('D179')
|
||||
self.applier_cifs._admin_context_apply()
|
||||
else:
|
||||
log('D180')
|
||||
|
||||
class cifs_applier_user(applier_frontend):
|
||||
__module_name = 'CIFSApplierUser'
|
||||
__module_enabled = False
|
||||
__module_experimental = True
|
||||
__module_enabled = True
|
||||
__module_experimental = False
|
||||
__auto_file = '/etc/auto.master'
|
||||
__auto_dir = '/etc/auto.master.gpupdate.d'
|
||||
__template_path = '/usr/share/gpupdate/templates'
|
||||
__template_mountpoints = 'autofs_mountpoints.j2'
|
||||
__template_identity = 'autofs_identity.j2'
|
||||
__template_auto = 'autofs_auto.j2'
|
||||
__template_mountpoints_hide = 'autofs_mountpoints_hide.j2'
|
||||
__template_auto_hide = 'autofs_auto_hide.j2'
|
||||
__enable_home_link = '/Software/BaseALT/Policies/GPUpdate/DriveMapsHome'
|
||||
__enable_home_link_user = '/Software/BaseALT/Policies/GPUpdate/DriveMapsHomeUser'
|
||||
__name_dir = '/Software/BaseALT/Policies/GPUpdate'
|
||||
__name_link_prefix = '/Software/BaseALT/Policies/GPUpdate/DriveMapsHomeDisableNet'
|
||||
__name_link_prefix_user = '/Software/BaseALT/Policies/GPUpdate/DriveMapsHomeDisableNetUser'
|
||||
__key_link_prefix = 'DriveMapsHomeDisableNet'
|
||||
__key_link_prefix_user = 'DriveMapsHomeDisableNetUser'
|
||||
__timeout_user_key = '/Software/BaseALT/Policies/GPUpdate/TimeoutAutofsUser'
|
||||
__timeout_key = '/Software/BaseALT/Policies/GPUpdate/TimeoutAutofs'
|
||||
__cifsacl_key = '/Software/BaseALT/Policies/GPUpdate/CifsaclDisable'
|
||||
__target_mountpoint = '/media/gpupdate'
|
||||
__target_mountpoint_user = '/run/media'
|
||||
__mountpoint_dirname = 'drives.system'
|
||||
__mountpoint_dirname_user = 'drives'
|
||||
__key_cifs_previous_value = 'Previous/Software/BaseALT/Policies/GPUpdate'
|
||||
__key_preferences = 'Software/BaseALT/Policies/Preferences/'
|
||||
__key_preferences_previous = 'Previous/Software/BaseALT/Policies/Preferences/'
|
||||
__name_value = 'DriveMapsName'
|
||||
__name_value_user = 'DriveMapsNameUser'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self.state_home_link = False
|
||||
self.state_home_link_user = False
|
||||
self.dict_registry_machine = self.storage.get_dictionary_from_dconf_file_db()
|
||||
self.homedir = ''
|
||||
name_dir = self.__name_dir[1:]
|
||||
|
||||
self.home = get_homedir(username)
|
||||
conf_file = '{}.conf'.format(sid)
|
||||
autofs_file = '{}.autofs'.format(sid)
|
||||
cred_file = '{}.creds'.format(sid)
|
||||
if username:
|
||||
self.dict_registry_user = self.storage.get_dictionary_from_dconf_file_db(get_uid_by_username(username))
|
||||
self.home = self.__target_mountpoint_user + '/' + username
|
||||
self.state_home_link = self.storage.check_enable_key(self.__enable_home_link)
|
||||
self.state_home_link_disable_net = self.storage.check_enable_key(self.__name_link_prefix)
|
||||
self.state_home_link_disable_net_user = self.storage.check_enable_key(self.__name_link_prefix_user)
|
||||
|
||||
self.state_home_link_user = self.storage.check_enable_key(self.__enable_home_link_user)
|
||||
self.timeout = self.storage.get_entry(self.__timeout_user_key)
|
||||
dirname = self.storage.get_entry(self.__name_dir + '/' + self.__name_value_user)
|
||||
dirname_system_from_machine = self.dict_registry_machine.get(name_dir, dict()).get(self.__name_value, None)
|
||||
self.__mountpoint_dirname_user = dirname.data if dirname and dirname.data else self.__mountpoint_dirname_user
|
||||
self.__mountpoint_dirname = dirname_system_from_machine if dirname_system_from_machine else self.__mountpoint_dirname
|
||||
mntTarget = self.__mountpoint_dirname_user
|
||||
|
||||
self.keys_cifs_previous_values_user = self.dict_registry_user.get(self.__key_cifs_previous_value,{})
|
||||
self.keys_cifs_values_user = self.dict_registry_user.get(name_dir,{})
|
||||
self.keys_the_preferences_previous_values_user = self.dict_registry_user.get((self.__key_preferences_previous+self.username),{}).get('Drives', {})
|
||||
self.keys_the_preferences_values_user = self.dict_registry_user.get((self.__key_preferences+self.username),{}).get('Drives', {})
|
||||
|
||||
else:
|
||||
self.home = self.__target_mountpoint
|
||||
self.timeout = self.storage.get_entry(self.__timeout_key)
|
||||
dirname_system = self.storage.get_entry(self.__name_dir + '/' + self.__name_value)
|
||||
self.__mountpoint_dirname = dirname_system.data if dirname_system and dirname_system.data else self.__mountpoint_dirname
|
||||
mntTarget = self.__mountpoint_dirname
|
||||
|
||||
self.keys_cifs_previous_values_machine = self.dict_registry_machine.get(self.__key_cifs_previous_value,{})
|
||||
self.keys_cifs_values_machine = self.dict_registry_machine.get(name_dir,{})
|
||||
self.keys_the_preferences_previous_values = self.dict_registry_machine.get((self.__key_preferences_previous+'Machine'),{}).get('Drives', {})
|
||||
self.keys_the_preferences_values = self.dict_registry_machine.get((self.__key_preferences+'Machine'),{}).get('Drives', {})
|
||||
self.cifsacl_disable = self.storage.get_entry(self.__cifsacl_key, preg=False)
|
||||
|
||||
self.mntTarget = mntTarget.translate(str.maketrans({" ": r"\ "}))
|
||||
file_name = username if username else get_machine_name()
|
||||
conf_file = '{}.conf'.format(file_name)
|
||||
conf_hide_file = '{}_hide.conf'.format(file_name)
|
||||
autofs_file = '{}.autofs'.format(file_name)
|
||||
autofs_hide_file = '{}_hide.autofs'.format(file_name)
|
||||
cred_file = '{}.creds'.format(file_name)
|
||||
|
||||
self.auto_master_d = Path(self.__auto_dir)
|
||||
|
||||
self.user_config = self.auto_master_d / conf_file
|
||||
self.user_config_hide = self.auto_master_d / conf_hide_file
|
||||
if os.path.exists(self.user_config.resolve()):
|
||||
self.user_config.unlink()
|
||||
if os.path.exists(self.user_config_hide.resolve()):
|
||||
self.user_config_hide.unlink()
|
||||
self.user_autofs = self.auto_master_d / autofs_file
|
||||
self.user_autofs_hide = self.auto_master_d / autofs_hide_file
|
||||
if os.path.exists(self.user_autofs.resolve()):
|
||||
self.user_autofs.unlink()
|
||||
if os.path.exists(self.user_autofs_hide.resolve()):
|
||||
self.user_autofs_hide.unlink()
|
||||
self.user_creds = self.auto_master_d / cred_file
|
||||
|
||||
self.mount_dir = Path(os.path.join(self.home, 'net'))
|
||||
self.drives = storage_get_drives(self.storage, self.sid)
|
||||
|
||||
self.mount_dir = Path(os.path.join(self.home))
|
||||
self.drives = storage_get_drives(self.storage)
|
||||
|
||||
self.template_loader = jinja2.FileSystemLoader(searchpath=self.__template_path)
|
||||
self.template_env = jinja2.Environment(loader=self.template_loader)
|
||||
@@ -98,6 +263,9 @@ class cifs_applier_user(applier_frontend):
|
||||
self.template_indentity = self.template_env.get_template(self.__template_identity)
|
||||
self.template_auto = self.template_env.get_template(self.__template_auto)
|
||||
|
||||
self.template_mountpoints_hide = self.template_env.get_template(self.__template_mountpoints_hide)
|
||||
self.template_auto_hide = self.template_env.get_template(self.__template_auto_hide)
|
||||
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
@@ -105,46 +273,82 @@ class cifs_applier_user(applier_frontend):
|
||||
)
|
||||
|
||||
|
||||
def is_mount_point_dirname(self):
|
||||
if self.username:
|
||||
return self.mount_dir.joinpath(self.__mountpoint_dirname_user).is_mount()
|
||||
else:
|
||||
return self.mount_dir.joinpath(self.__mountpoint_dirname).is_mount()
|
||||
|
||||
def is_changed_keys(self):
|
||||
if self.username:
|
||||
return (self.keys_cifs_previous_values_user.get(self.__name_value_user) != self.keys_cifs_values_user.get(self.__name_value_user) or
|
||||
self.keys_the_preferences_previous_values_user != self.keys_the_preferences_values_user)
|
||||
else:
|
||||
return (self.keys_cifs_previous_values_machine.get(self.__name_value) != self.keys_cifs_values_machine.get(self.__name_value) or
|
||||
self.keys_the_preferences_previous_values != self.keys_the_preferences_values)
|
||||
|
||||
def user_context_apply(self):
|
||||
'''
|
||||
Nothing to implement.
|
||||
'''
|
||||
pass
|
||||
|
||||
def __admin_context_apply(self):
|
||||
def _admin_context_apply(self):
|
||||
# Create /etc/auto.master.gpupdate.d directory
|
||||
self.auto_master_d.mkdir(parents=True, exist_ok=True)
|
||||
# Create user's destination mount directory
|
||||
self.mount_dir.mkdir(parents=True, exist_ok=True)
|
||||
uid = get_user_info(self.username).pw_uid if self.username else None
|
||||
if uid:
|
||||
os.chown(self.mount_dir, uid=uid, gid=-1)
|
||||
self.mount_dir.chmod(0o700)
|
||||
|
||||
# Add pointer to /etc/auto.master.gpiupdate.d in /etc/auto.master
|
||||
auto_destdir = '+dir:{}'.format(self.__auto_dir)
|
||||
add_line_if_missing(self.__auto_file, auto_destdir)
|
||||
|
||||
# Collect data for drive settings
|
||||
drive_list = list()
|
||||
drive_list = Drive_list()
|
||||
for drv in self.drives:
|
||||
drive_settings = dict()
|
||||
drive_settings = {}
|
||||
drive_settings['dir'] = drv.dir
|
||||
drive_settings['login'] = drv.login
|
||||
drive_settings['password'] = drv.password
|
||||
drive_settings['path'] = drv.path.replace('\\', '/')
|
||||
drive_settings['path'] = remove_chars_before_colon(drv.path.replace('\\', '/'))
|
||||
drive_settings['action'] = drv.action
|
||||
drive_settings['thisDrive'] = drv.thisDrive
|
||||
drive_settings['allDrives'] = drv.allDrives
|
||||
drive_settings['label'] = remove_escaped_quotes(drv.label) if drv.persistent == '1' else None
|
||||
drive_settings['persistent'] = drv.persistent
|
||||
drive_settings['useLetter'] = drv.useLetter
|
||||
drive_settings['username'] = self.username
|
||||
drive_settings['cifsacl'] = False if self.cifsacl_disable else True
|
||||
|
||||
drive_list.append(drive_settings)
|
||||
|
||||
if len(drive_list) > 0:
|
||||
mount_settings = dict()
|
||||
mount_settings['drives'] = drive_list
|
||||
if drive_list.len() > 0:
|
||||
mount_settings = {}
|
||||
mount_settings['drives'] = drive_list()
|
||||
mount_text = self.template_mountpoints.render(**mount_settings)
|
||||
|
||||
mount_text_hide = self.template_mountpoints_hide.render(**mount_settings)
|
||||
|
||||
with open(self.user_config.resolve(), 'w') as f:
|
||||
f.truncate()
|
||||
f.write(mount_text)
|
||||
f.flush()
|
||||
|
||||
autofs_settings = dict()
|
||||
with open(self.user_config_hide.resolve(), 'w') as f:
|
||||
f.truncate()
|
||||
f.write(mount_text_hide)
|
||||
f.flush()
|
||||
|
||||
autofs_settings = {}
|
||||
autofs_settings['home_dir'] = self.home
|
||||
autofs_settings['mntTarget'] = self.mntTarget
|
||||
autofs_settings['mount_file'] = self.user_config.resolve()
|
||||
autofs_settings['timeout'] = self.timeout.data if self.timeout and self.timeout.data else 120
|
||||
|
||||
autofs_text = self.template_auto.render(**autofs_settings)
|
||||
|
||||
with open(self.user_autofs.resolve(), 'w') as f:
|
||||
@@ -152,13 +356,121 @@ class cifs_applier_user(applier_frontend):
|
||||
f.write(autofs_text)
|
||||
f.flush()
|
||||
|
||||
autofs_settings['mount_file'] = self.user_config_hide.resolve()
|
||||
autofs_text = self.template_auto_hide.render(**autofs_settings)
|
||||
with open(self.user_autofs_hide.resolve(), 'w') as f:
|
||||
f.truncate()
|
||||
f.write(autofs_text)
|
||||
f.flush()
|
||||
|
||||
if self.is_changed_keys() or (self.drives and not self.is_mount_point_dirname()):
|
||||
self.restart_autofs()
|
||||
|
||||
if self.username:
|
||||
self.update_drivemaps_home_links()
|
||||
|
||||
def restart_autofs(self):
|
||||
try:
|
||||
subprocess.check_call(['/bin/systemctl', 'restart', 'autofs'])
|
||||
except Exception as exc:
|
||||
log('E74', {'exc': exc})
|
||||
|
||||
|
||||
def unlink_symlink(self, symlink:Path, previous=None):
|
||||
try:
|
||||
if symlink.exists() and symlink.is_symlink() and symlink.owner() == 'root':
|
||||
symlink.unlink()
|
||||
elif symlink.is_symlink() and not symlink.exists():
|
||||
symlink.unlink()
|
||||
elif previous:
|
||||
symlink.unlink()
|
||||
except:
|
||||
pass
|
||||
|
||||
def del_previous_link(self, previous_value_link , mountpoint_dirname, prefix):
|
||||
d_previous = Path(self.homedir + ('/' if prefix else '/net.') + previous_value_link)
|
||||
if d_previous.name != mountpoint_dirname:
|
||||
dHide_previous = Path(self.homedir + ('/.' if prefix else '/.net.') + previous_value_link)
|
||||
self.unlink_symlink(d_previous, True)
|
||||
self.unlink_symlink(dHide_previous, True)
|
||||
|
||||
def update_drivemaps_home_links(self):
|
||||
if self.state_home_link_disable_net:
|
||||
prefix = ''
|
||||
else:
|
||||
prefix = 'net.'
|
||||
if self.state_home_link_disable_net_user:
|
||||
prefix_user = ''
|
||||
else:
|
||||
prefix_user = 'net.'
|
||||
|
||||
previous_value_link = self.keys_cifs_previous_values_machine.get(self.__name_value, self.__mountpoint_dirname)
|
||||
previous_state_home_link_disable_net_user = self.keys_cifs_previous_values_user.get(self.__key_link_prefix_user)
|
||||
previous_state_home_link_disable_net = self.keys_cifs_previous_values_user.get(self.__key_link_prefix)
|
||||
previous_value_link_user = self.keys_cifs_previous_values_user.get(self.__name_value_user, self.__mountpoint_dirname_user)
|
||||
|
||||
self.homedir = get_homedir(self.username)
|
||||
|
||||
dUser = Path(self.homedir + '/' + prefix_user + self.__mountpoint_dirname_user)
|
||||
dUserHide = Path(self.homedir + '/.' + prefix_user + self.__mountpoint_dirname_user)
|
||||
dMachine = Path(self.homedir+'/' + prefix + self.__mountpoint_dirname)
|
||||
dMachineHide = Path(self.homedir+'/.' + prefix + self.__mountpoint_dirname)
|
||||
|
||||
if self.state_home_link_user:
|
||||
dUserMountpoint = Path(self.home).joinpath(self.__mountpoint_dirname_user)
|
||||
dUserMountpointHide = Path(self.home).joinpath('.' + self.__mountpoint_dirname_user)
|
||||
self.del_previous_link(previous_value_link_user, dUser.name, previous_state_home_link_disable_net_user)
|
||||
if not dUser.exists() and dUserMountpoint.exists():
|
||||
try:
|
||||
os.symlink(dUserMountpoint, dUser, True)
|
||||
except Exception as exc:
|
||||
log('D194', {'exc': exc})
|
||||
elif dUser.is_symlink() and not dUserMountpoint.exists():
|
||||
self.unlink_symlink(dUser)
|
||||
|
||||
if not dUserHide.exists() and dUserMountpointHide.exists():
|
||||
try:
|
||||
os.symlink(dUserMountpointHide, dUserHide, True)
|
||||
except Exception as exc:
|
||||
log('D196', {'exc': exc})
|
||||
elif dUserHide.is_symlink() and not dUserMountpointHide.exists():
|
||||
self.unlink_symlink(dUserHide)
|
||||
else:
|
||||
self.del_previous_link(previous_value_link_user, dUser.name, previous_state_home_link_disable_net_user)
|
||||
self.unlink_symlink(dUser)
|
||||
self.unlink_symlink(dUserHide)
|
||||
|
||||
|
||||
if self.state_home_link:
|
||||
dMachineMountpoint = Path(self.__target_mountpoint).joinpath(self.__mountpoint_dirname)
|
||||
dMachineMountpointHide = Path(self.__target_mountpoint).joinpath('.' + self.__mountpoint_dirname)
|
||||
self.del_previous_link(previous_value_link, dMachine.name, previous_state_home_link_disable_net)
|
||||
|
||||
if not dMachine.exists() and dMachineMountpoint.exists():
|
||||
try:
|
||||
os.symlink(dMachineMountpoint, dMachine, True)
|
||||
except Exception as exc:
|
||||
log('D195', {'exc': exc})
|
||||
elif dMachine.is_symlink() and not dMachineMountpoint.exists():
|
||||
self.unlink_symlink(dMachine)
|
||||
|
||||
if not dMachineHide.exists() and dMachineMountpointHide.exists():
|
||||
try:
|
||||
os.symlink(dMachineMountpointHide, dMachineHide, True)
|
||||
except Exception as exc:
|
||||
log('D197', {'exc': exc})
|
||||
elif dMachineHide.is_symlink() and not dMachineMountpointHide.exists():
|
||||
self.unlink_symlink(dMachineHide)
|
||||
else:
|
||||
self.del_previous_link(previous_value_link, dMachine.name, previous_state_home_link_disable_net)
|
||||
self.unlink_symlink(dMachine)
|
||||
self.unlink_symlink(dMachineHide)
|
||||
|
||||
|
||||
def admin_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D146')
|
||||
self.__admin_context_apply()
|
||||
self._admin_context_apply()
|
||||
else:
|
||||
log('D147')
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,25 +16,22 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from .appliers.control import control
|
||||
from util.logging import slogm, log
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .appliers.control import control
|
||||
|
||||
import logging
|
||||
|
||||
class control_applier(applier_frontend):
|
||||
__module_name = 'ControlApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
_registry_branch = 'Software\\BaseALT\\Policies\\Control'
|
||||
_registry_branch = 'Software/BaseALT/Policies/Control'
|
||||
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
self.control_settings = self.storage.filter_hklm_entries('Software\\BaseALT\\Policies\\Control%')
|
||||
self.controls = list()
|
||||
self.control_settings = self.storage.filter_hklm_entries(self._registry_branch)
|
||||
self.controls = []
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
@@ -43,12 +40,10 @@ class control_applier(applier_frontend):
|
||||
|
||||
def run(self):
|
||||
for setting in self.control_settings:
|
||||
valuename = setting.hive_key.rpartition('\\')[2]
|
||||
valuename = setting.hive_key.rpartition('/')[2]
|
||||
try:
|
||||
self.controls.append(control(valuename, int(setting.data)))
|
||||
logdata = dict()
|
||||
logdata['control'] = valuename
|
||||
logdata['value'] = setting.data
|
||||
logdata = {'control': valuename, 'value': setting.data}
|
||||
log('I3', logdata)
|
||||
except ValueError as exc:
|
||||
try:
|
||||
@@ -58,14 +53,10 @@ class control_applier(applier_frontend):
|
||||
log('I3', logdata)
|
||||
continue
|
||||
self.controls.append(ctl)
|
||||
logdata = dict()
|
||||
logdata['control'] = valuename
|
||||
logdata['with string value'] = setting.data
|
||||
logdata = {'control': valuename, 'with string value': setting.data}
|
||||
log('I3', logdata)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['control'] = valuename
|
||||
logdata['exc'] = exc
|
||||
logdata = {'control': valuename, 'exc': exc}
|
||||
log('E39', logdata)
|
||||
#for e in polfile.pol_file.entries:
|
||||
# print('{}:{}:{}:{}:{}'.format(e.type, e.data, e.valuename, e.keyname))
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,26 +16,22 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import json
|
||||
|
||||
import cups
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from gpt.printers import json2printer
|
||||
from util.logging import log
|
||||
from util.rpm import is_rpm_installed
|
||||
from util.logging import slogm, log
|
||||
|
||||
def storage_get_printers(storage, sid):
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
|
||||
def storage_get_printers(storage):
|
||||
'''
|
||||
Query printers configuration from storage
|
||||
'''
|
||||
printer_objs = storage.get_printers(sid)
|
||||
printers = list()
|
||||
printer_objs = storage.get_printers()
|
||||
printers = []
|
||||
|
||||
for prnj in printer_objs:
|
||||
printers.append(prnj)
|
||||
@@ -66,8 +62,8 @@ def connect_printer(connection, prn):
|
||||
|
||||
class cups_applier(applier_frontend):
|
||||
__module_name = 'CUPSApplier'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
@@ -81,9 +77,12 @@ class cups_applier(applier_frontend):
|
||||
if not is_rpm_installed('cups'):
|
||||
log('W9')
|
||||
return
|
||||
|
||||
self.cups_connection = cups.Connection()
|
||||
self.printers = storage_get_printers(self.storage, self.storage.get_info('machine_sid'))
|
||||
try:
|
||||
self.cups_connection = cups.Connection()
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('W20', logdata)
|
||||
self.printers = storage_get_printers(self.storage)
|
||||
|
||||
if self.printers:
|
||||
for prn in self.printers:
|
||||
@@ -101,17 +100,16 @@ class cups_applier(applier_frontend):
|
||||
|
||||
class cups_applier_user(applier_frontend):
|
||||
__module_name = 'CUPSApplierUser'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_enabled
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def user_context_apply(self):
|
||||
@@ -127,7 +125,7 @@ class cups_applier_user(applier_frontend):
|
||||
return
|
||||
|
||||
self.cups_connection = cups.Connection()
|
||||
self.printers = storage_get_printers(self.storage, self.sid)
|
||||
self.printers = storage_get_printers(self.storage)
|
||||
|
||||
if self.printers:
|
||||
for prn in self.printers:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,25 +16,22 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from .appliers.envvar import Envvar
|
||||
from util.logging import slogm, log
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .appliers.envvar import Envvar
|
||||
|
||||
import logging
|
||||
|
||||
class envvar_applier(applier_frontend):
|
||||
__module_name = 'EnvvarsApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage, sid):
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.envvars = self.storage.get_envvars(self.sid)
|
||||
#self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_enabled)
|
||||
self.envvars = self.storage.get_envvars()
|
||||
Envvar.clear_envvar_file()
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
@@ -49,17 +46,14 @@ class envvar_applier_user(applier_frontend):
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self.envvars = self.storage.get_envvars(self.sid)
|
||||
#self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
self.envvars = self.storage.get_envvars()
|
||||
Envvar.clear_envvar_file(username)
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def admin_context_apply(self):
|
||||
pass
|
||||
|
||||
def user_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D136')
|
||||
ev = Envvar(self.envvars, self.username)
|
||||
@@ -67,3 +61,6 @@ class envvar_applier_user(applier_frontend):
|
||||
else:
|
||||
log('D137')
|
||||
|
||||
def user_context_apply(self):
|
||||
pass
|
||||
|
||||
|
||||
78
gpoa/frontend/file_applier.py
Normal file
78
gpoa/frontend/file_applier.py
Normal file
@@ -0,0 +1,78 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .appliers.file_cp import Execution_check, Files_cp
|
||||
|
||||
|
||||
class file_applier(applier_frontend):
|
||||
__module_name = 'FilesApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage, file_cache):
|
||||
self.storage = storage
|
||||
self.exe_check = Execution_check(storage)
|
||||
self.file_cache = file_cache
|
||||
self.files = self.storage.get_files()
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def run(self):
|
||||
for file in self.files:
|
||||
Files_cp(file, self.file_cache, self.exe_check)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D167')
|
||||
self.run()
|
||||
else:
|
||||
log('D168')
|
||||
|
||||
class file_applier_user(applier_frontend):
|
||||
__module_name = 'FilesApplierUser'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage, file_cache, username):
|
||||
self.storage = storage
|
||||
self.file_cache = file_cache
|
||||
self.username = username
|
||||
self.exe_check = Execution_check(storage)
|
||||
self.files = self.storage.get_files()
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def run(self):
|
||||
for file in self.files:
|
||||
Files_cp(file, self.file_cache, self.exe_check, self.username)
|
||||
|
||||
def admin_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D169')
|
||||
self.run()
|
||||
else:
|
||||
log('D170')
|
||||
|
||||
def user_context_apply(self):
|
||||
pass
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -25,199 +25,141 @@
|
||||
# This thing must work with keys and subkeys located at:
|
||||
# Software\Policies\Mozilla\Firefox
|
||||
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import configparser
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from util.logging import slogm, log
|
||||
from util.util import is_machine_name
|
||||
from util.logging import log
|
||||
from util.util import is_machine_name, try_dict_to_literal_eval
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
|
||||
class firefox_applier(applier_frontend):
|
||||
__module_name = 'FirefoxApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__registry_branch = 'Software\\Policies\\Mozilla\\Firefox'
|
||||
__firefox_installdir1 = '/usr/lib64/firefox/distribution'
|
||||
__firefox_installdir2 = '/etc/firefox/policies'
|
||||
__user_settings_dir = '.mozilla/firefox'
|
||||
__registry_branch = 'Software/Policies/Mozilla/Firefox'
|
||||
__firefox_policies = '/etc/firefox/policies'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self._is_machine_name = is_machine_name(self.username)
|
||||
self.policies = dict()
|
||||
self.policies_json = dict({ 'policies': self.policies })
|
||||
self.policies = {}
|
||||
self.policies_json = {'policies': self.policies}
|
||||
self.firefox_keys = self.storage.filter_hklm_entries(self.__registry_branch)
|
||||
self.policies_gen = {}
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def get_profiles(self):
|
||||
'''
|
||||
Get directory names of Firefox profiles for specified username.
|
||||
'''
|
||||
profiles_ini = os.path.join(util.get_homedir(self.username), self.__user_settings_dir, 'profiles.ini')
|
||||
config = configparser.ConfigParser()
|
||||
config.read(profiles_ini)
|
||||
|
||||
profile_paths = list()
|
||||
for section in config.keys():
|
||||
if section.startswith('Profile'):
|
||||
profile_paths.append(config[section]['Path'])
|
||||
|
||||
return profile_paths
|
||||
|
||||
def get_hklm_string_entry(self, hive_subkey):
|
||||
'''
|
||||
Get HKEY_LOCAL_MACHINE hive subkey of
|
||||
'Software\Policies\Mozilla\Firefox'.
|
||||
'''
|
||||
query_str = '{}\\{}'.format(self.__registry_branch, hive_subkey)
|
||||
return self.storage.get_hklm_entry(query_str)
|
||||
|
||||
def get_hklm_string_entry_default(self, hive_subkey, default):
|
||||
'''
|
||||
Get Firefox's subkey or return the default value.
|
||||
'''
|
||||
defval = str(default)
|
||||
response = self.get_hklm_string_entry(hive_subkey)
|
||||
if response:
|
||||
return response.data
|
||||
return defval
|
||||
|
||||
def set_policy(self, name, obj):
|
||||
'''
|
||||
Add entry to policy set.
|
||||
'''
|
||||
if obj:
|
||||
self.policies[name] = obj
|
||||
logdata = dict()
|
||||
logdata['name'] = name
|
||||
logdata['set to'] = obj
|
||||
log('I7', logdata)
|
||||
|
||||
def get_home_page(self):
|
||||
'''
|
||||
Query the Homepage property from the storage.
|
||||
'''
|
||||
homepage = dict({
|
||||
'URL': 'about:blank',
|
||||
'Locked': False,
|
||||
'StartPage': 'homepage'
|
||||
})
|
||||
response = self.get_hklm_string_entry('Homepage\\URL')
|
||||
if response:
|
||||
homepage['URL'] = response.data
|
||||
return homepage
|
||||
return None
|
||||
|
||||
def get_boolean_config(self, name):
|
||||
'''
|
||||
Query boolean property from the storage.
|
||||
'''
|
||||
response = self.get_hklm_string_entry(name)
|
||||
if response:
|
||||
data = response.data if isinstance(response.data, int) else str(response.data).lower()
|
||||
if data in ['0', 'false', None, 'none', 0]:
|
||||
return False
|
||||
if data in ['1', 'true', 1]:
|
||||
return True
|
||||
|
||||
return None
|
||||
|
||||
def set_boolean_policy(self, name):
|
||||
'''
|
||||
Add boolean entry to policy set.
|
||||
'''
|
||||
obj = self.get_boolean_config(name)
|
||||
if obj is not None:
|
||||
self.policies[name] = obj
|
||||
logdata = dict()
|
||||
logdata['name'] = name
|
||||
logdata['set to'] = obj
|
||||
log('I7', logdata)
|
||||
|
||||
def machine_apply(self):
|
||||
'''
|
||||
Write policies.json to Firefox installdir.
|
||||
Write policies.json to Firefox.
|
||||
'''
|
||||
self.set_policy('Homepage', self.get_home_page())
|
||||
self.set_boolean_policy('BlockAboutConfig')
|
||||
self.set_boolean_policy('BlockAboutProfiles')
|
||||
self.set_boolean_policy('BlockAboutSupport')
|
||||
self.set_boolean_policy('CaptivePortal')
|
||||
self.set_boolean_policy('DisableSetDesktopBackground')
|
||||
self.set_boolean_policy('DisableMasterPasswordCreation')
|
||||
self.set_boolean_policy('DisableBuiltinPDFViewer')
|
||||
self.set_boolean_policy('DisableDeveloperTools')
|
||||
self.set_boolean_policy('DisableFeedbackCommands')
|
||||
self.set_boolean_policy('DisableFirefoxScreenshots')
|
||||
self.set_boolean_policy('DisableFirefoxAccounts')
|
||||
self.set_boolean_policy('DisableFirefoxStudies')
|
||||
self.set_boolean_policy('DisableForgetButton')
|
||||
self.set_boolean_policy('DisableFormHistory')
|
||||
self.set_boolean_policy('DisablePasswordReveal')
|
||||
self.set_boolean_policy('DisablePocket')
|
||||
self.set_boolean_policy('DisablePrivateBrowsing')
|
||||
self.set_boolean_policy('DisableProfileImport')
|
||||
self.set_boolean_policy('DisableProfileRefresh')
|
||||
self.set_boolean_policy('DisableSafeMode')
|
||||
self.set_boolean_policy('DisableSystemAddonUpdate')
|
||||
self.set_boolean_policy('DisableTelemetry')
|
||||
self.set_boolean_policy('DontCheckDefaultBrowser')
|
||||
self.set_boolean_policy('ExtensionUpdate')
|
||||
self.set_boolean_policy('HardwareAcceleration')
|
||||
self.set_boolean_policy('PrimaryPassword')
|
||||
self.set_boolean_policy('NetworkPrediction')
|
||||
self.set_boolean_policy('NewTabPage')
|
||||
self.set_boolean_policy('NoDefaultBookmarks')
|
||||
self.set_boolean_policy('OfferToSaveLogins')
|
||||
self.set_boolean_policy('PasswordManagerEnabled')
|
||||
self.set_boolean_policy('PromptForDownloadLocation')
|
||||
self.set_boolean_policy('SanitizeOnShutdown')
|
||||
self.set_boolean_policy('SearchSuggestEnabled')
|
||||
excp = ['SOCKSVersion']
|
||||
self.policies_json = create_dict(self.firefox_keys, self.__registry_branch, excp)
|
||||
|
||||
destfile = os.path.join(self.__firefox_installdir1, 'policies.json')
|
||||
|
||||
os.makedirs(self.__firefox_installdir1, exist_ok=True)
|
||||
destfile = os.path.join(self.__firefox_policies, 'policies.json')
|
||||
os.makedirs(self.__firefox_policies, exist_ok=True)
|
||||
with open(destfile, 'w') as f:
|
||||
json.dump(self.policies_json, f)
|
||||
logdata = dict()
|
||||
logdata['destfile'] = destfile
|
||||
logdata = {'destfile': destfile}
|
||||
log('D91', logdata)
|
||||
|
||||
destfile = os.path.join(self.__firefox_installdir2, 'policies.json')
|
||||
os.makedirs(self.__firefox_installdir2, exist_ok=True)
|
||||
with open(destfile, 'w') as f:
|
||||
json.dump(self.policies_json, f)
|
||||
logdata = dict()
|
||||
logdata['destfile'] = destfile
|
||||
log('D91', logdata)
|
||||
|
||||
def user_apply(self):
|
||||
profiles = self.get_profiles()
|
||||
|
||||
profiledir = os.path.join(util.get_homedir(self.username), self.__user_settings_dir)
|
||||
for profile in profiles:
|
||||
logdata = dict()
|
||||
logdata['profiledir'] = profiledir
|
||||
logdata['profile'] = profile
|
||||
log('D92', logdata)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D93')
|
||||
self.machine_apply()
|
||||
else:
|
||||
log('D94')
|
||||
#if not self._is_machine_name:
|
||||
# logging.debug('Running user applier for Firefox')
|
||||
# self.user_apply()
|
||||
|
||||
def key_dict_is_digit(dictionary:dict) -> bool:
|
||||
'''
|
||||
Checking if a dictionary key is a digit
|
||||
'''
|
||||
if not isinstance(dictionary, dict):
|
||||
return False
|
||||
for dig in dictionary.keys():
|
||||
if dig.isdigit():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def dict_item_to_list(dictionary:dict) -> dict:
|
||||
'''
|
||||
Replacing dictionaries with numeric keys with a List
|
||||
'''
|
||||
if '' in dictionary:
|
||||
dictionary = dictionary.pop('')
|
||||
|
||||
for key,val in dictionary.items():
|
||||
if type(val) == dict:
|
||||
if key_dict_is_digit(val):
|
||||
dictionary[key] = [*val.values()]
|
||||
else:
|
||||
dict_item_to_list(dictionary[key])
|
||||
return dictionary
|
||||
|
||||
def clean_data_firefox(data):
|
||||
return data.replace("'", '\"')
|
||||
|
||||
|
||||
|
||||
def create_dict(firefox_keys, registry_branch, excp=[]):
|
||||
'''
|
||||
Collect dictionaries from registry keys into a general dictionary
|
||||
'''
|
||||
get_boolean = lambda data: data in ['1', 'true', 'True', True, 1] if isinstance(data, (str, int)) else False
|
||||
get_parts = lambda hivekey, registry: hivekey.replace(registry, '').split('/')
|
||||
counts = {}
|
||||
for it_data in firefox_keys:
|
||||
branch = counts
|
||||
try:
|
||||
if type(it_data.data) is bytes:
|
||||
it_data.data = it_data.data.decode(encoding='utf-16').replace('\x00','')
|
||||
json_data = try_dict_to_literal_eval(it_data.data)
|
||||
if json_data:
|
||||
it_data.data = json_data
|
||||
it_data.type = 7
|
||||
else:
|
||||
if it_data.type == 1:
|
||||
it_data.data = clean_data_firefox(it_data.data)
|
||||
#Cases when it is necessary to create nested dictionaries
|
||||
if it_data.valuename != it_data.data:
|
||||
parts = get_parts(it_data.hive_key, registry_branch)
|
||||
#creating a nested dictionary from elements
|
||||
for part in parts[:-1]:
|
||||
branch = branch.setdefault(part, {})
|
||||
#dictionary key value initialization
|
||||
if it_data.type == 4:
|
||||
if it_data.valuename in excp:
|
||||
branch[parts[-1]] = int(it_data.data)
|
||||
else:
|
||||
branch[parts[-1]] = get_boolean(it_data.data)
|
||||
elif it_data.type == 7:
|
||||
branch[parts[-1]] = it_data.data
|
||||
else:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
#Cases when it is necessary to create lists in a dictionary
|
||||
else:
|
||||
parts = get_parts(it_data.keyname, registry_branch)
|
||||
for part in parts[:-1]:
|
||||
branch = branch.setdefault(part, {})
|
||||
if branch.get(parts[-1]) is None:
|
||||
branch[parts[-1]] = []
|
||||
if it_data.type == 4:
|
||||
branch[parts[-1]].append(get_boolean(it_data.data))
|
||||
else:
|
||||
if os.path.isdir(str(it_data.data).replace('\\', '/')):
|
||||
branch[parts[-1]].append(str(it_data.data).replace('\\', '/'))
|
||||
else:
|
||||
branch[parts[-1]].append(str(it_data.data))
|
||||
except Exception as exc:
|
||||
logdata = {'Exception': exc, 'keyname': it_data.keyname}
|
||||
log('W14', logdata)
|
||||
|
||||
return {'policies': dict_item_to_list(counts)}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2024 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -17,16 +17,15 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from util.logging import slogm, log
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .appliers.firewall_rule import FirewallRule
|
||||
|
||||
|
||||
class firewall_applier(applier_frontend):
|
||||
__module_name = 'FirewallApplier'
|
||||
__module_experimental = True
|
||||
@@ -34,6 +33,7 @@ class firewall_applier(applier_frontend):
|
||||
__firewall_branch = 'SOFTWARE\\Policies\\Microsoft\\WindowsFirewall\\FirewallRules'
|
||||
__firewall_switch = 'SOFTWARE\\Policies\\Microsoft\\WindowsFirewall\\DomainProfile\\EnableFirewall'
|
||||
__firewall_reset_cmd = ['/usr/bin/alterator-net-iptables', 'reset']
|
||||
__firewall_reset_cmd_path = '/usr/bin/alterator-net-iptables'
|
||||
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
@@ -51,6 +51,9 @@ class firewall_applier(applier_frontend):
|
||||
rule.apply()
|
||||
|
||||
def apply(self):
|
||||
if not os.path.exists(self.__firewall_reset_cmd_path):
|
||||
log('D120', {'not_found_cmd': self.__firewall_reset_cmd_path})
|
||||
return
|
||||
if self.__module_enabled:
|
||||
log('D117')
|
||||
if '1' == self.firewall_enabled:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,28 +16,25 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from .appliers.folder import Folder
|
||||
from util.logging import slogm, log
|
||||
from util.windows import expand_windows_var
|
||||
import re
|
||||
import logging
|
||||
|
||||
from util.logging import log
|
||||
from util.windows import expand_windows_var
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .appliers.folder import Folder
|
||||
|
||||
|
||||
class folder_applier(applier_frontend):
|
||||
__module_name = 'FoldersApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage, sid):
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.folders = self.storage.get_folders(self.sid)
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_enabled)
|
||||
self.folders = self.storage.get_folders()
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
@@ -47,9 +44,10 @@ class folder_applier(applier_frontend):
|
||||
win_var = re.findall(r'%.+?%', check)
|
||||
drive = re.findall(r'^[a-z A-Z]\:',check)
|
||||
if drive or win_var:
|
||||
log('D109', {"path": directory_obj.path})
|
||||
continue
|
||||
fld = Folder(directory_obj)
|
||||
fld.action()
|
||||
fld.act()
|
||||
else:
|
||||
log('D108')
|
||||
|
||||
@@ -58,11 +56,10 @@ class folder_applier_user(applier_frontend):
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self.folders = self.storage.get_folders(self.sid)
|
||||
self.folders = self.storage.get_folders()
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
@@ -75,16 +72,13 @@ class folder_applier_user(applier_frontend):
|
||||
win_var = re.findall(r'%.+?%', check)
|
||||
drive = re.findall(r'^[a-z A-Z]\:',check)
|
||||
if drive or win_var:
|
||||
log('D110', {"path": directory_obj.path})
|
||||
continue
|
||||
fld = Folder(directory_obj, self.username)
|
||||
fld.act()
|
||||
|
||||
def admin_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D109')
|
||||
self.run()
|
||||
else:
|
||||
log('D110')
|
||||
pass
|
||||
|
||||
def user_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -18,47 +18,36 @@
|
||||
|
||||
from storage import registry_factory
|
||||
from storage.fs_file_cache import fs_file_cache
|
||||
|
||||
from .control_applier import control_applier
|
||||
from .polkit_applier import (
|
||||
polkit_applier
|
||||
, polkit_applier_user
|
||||
)
|
||||
from .systemd_applier import systemd_applier
|
||||
from .firefox_applier import firefox_applier
|
||||
from .chromium_applier import chromium_applier
|
||||
from .cups_applier import cups_applier
|
||||
from .package_applier import (
|
||||
package_applier
|
||||
, package_applier_user
|
||||
)
|
||||
from .shortcut_applier import (
|
||||
shortcut_applier,
|
||||
shortcut_applier_user
|
||||
)
|
||||
from .gsettings_applier import (
|
||||
gsettings_applier,
|
||||
gsettings_applier_user
|
||||
)
|
||||
from .firewall_applier import firewall_applier
|
||||
from .folder_applier import (
|
||||
folder_applier
|
||||
, folder_applier_user
|
||||
)
|
||||
from .cifs_applier import cifs_applier_user
|
||||
from .ntp_applier import ntp_applier
|
||||
from .envvar_applier import (
|
||||
envvar_applier
|
||||
, envvar_applier_user
|
||||
)
|
||||
from util.windows import get_sid
|
||||
from util.users import (
|
||||
is_root,
|
||||
get_process_user,
|
||||
username_match_uid,
|
||||
)
|
||||
from util.logging import log
|
||||
from util.system import with_privileges
|
||||
from util.users import (
|
||||
get_process_user,
|
||||
is_root,
|
||||
username_match_uid,
|
||||
)
|
||||
|
||||
from .chromium_applier import chromium_applier
|
||||
from .cifs_applier import cifs_applier, cifs_applier_user
|
||||
from .control_applier import control_applier
|
||||
from .cups_applier import cups_applier
|
||||
from .envvar_applier import envvar_applier, envvar_applier_user
|
||||
from .file_applier import file_applier, file_applier_user
|
||||
from .firefox_applier import firefox_applier
|
||||
from .firewall_applier import firewall_applier
|
||||
from .folder_applier import folder_applier, folder_applier_user
|
||||
from .gsettings_applier import gsettings_applier, gsettings_applier_user
|
||||
from .ini_applier import ini_applier, ini_applier_user
|
||||
from .kde_applier import kde_applier, kde_applier_user
|
||||
from .laps_applier import laps_applier
|
||||
from .networkshare_applier import networkshare_applier
|
||||
from .ntp_applier import ntp_applier
|
||||
from .package_applier import package_applier, package_applier_user
|
||||
from .polkit_applier import polkit_applier, polkit_applier_user
|
||||
from .scripts_applier import scripts_applier, scripts_applier_user
|
||||
from .shortcut_applier import shortcut_applier, shortcut_applier_user
|
||||
from .systemd_applier import systemd_applier
|
||||
from .thunderbird_applier import thunderbird_applier
|
||||
from .yandex_browser_applier import yandex_browser_applier
|
||||
|
||||
|
||||
def determine_username(username=None):
|
||||
@@ -70,16 +59,15 @@ def determine_username(username=None):
|
||||
|
||||
# If username is not set then it will be the name
|
||||
# of process owner.
|
||||
logdata = {'username': name}
|
||||
if not username:
|
||||
name = get_process_user()
|
||||
logdata = dict({'username': name})
|
||||
log('D2', logdata)
|
||||
|
||||
if not username_match_uid(name):
|
||||
if not is_root():
|
||||
raise Exception('Current process UID does not match specified username')
|
||||
|
||||
logdata = dict({'username': name})
|
||||
log('D15', logdata)
|
||||
|
||||
return name
|
||||
@@ -91,9 +79,7 @@ def apply_user_context(user_appliers):
|
||||
try:
|
||||
applier_object.user_context_apply()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['applier'] = applier_name
|
||||
logdata['exception'] = str(exc)
|
||||
logdata = {'applier': applier_name, 'exception': str(exc)}
|
||||
log('E20', logdata)
|
||||
|
||||
class frontend_manager:
|
||||
@@ -103,44 +89,66 @@ class frontend_manager:
|
||||
'''
|
||||
|
||||
def __init__(self, username, is_machine):
|
||||
self.storage = registry_factory('registry')
|
||||
self.username = determine_username(username)
|
||||
self.storage = registry_factory('dconf', username=self.username)
|
||||
self.is_machine = is_machine
|
||||
self.process_uname = get_process_user()
|
||||
self.sid = get_sid(self.storage.get_info('domain'), self.username, is_machine)
|
||||
self.file_cache = fs_file_cache('file_cache')
|
||||
self.file_cache = fs_file_cache('file_cache', self.username)
|
||||
|
||||
self.machine_appliers = dict()
|
||||
self.user_appliers = dict()
|
||||
if is_machine:
|
||||
self._init_machine_appliers()
|
||||
else:
|
||||
self._init_user_appliers()
|
||||
|
||||
def _init_machine_appliers(self):
|
||||
self.machine_appliers['laps_applier'] = laps_applier(self.storage)
|
||||
self.machine_appliers['control'] = control_applier(self.storage)
|
||||
self.machine_appliers['polkit'] = polkit_applier(self.storage)
|
||||
self.machine_appliers['systemd'] = systemd_applier(self.storage)
|
||||
self.machine_appliers['firefox'] = firefox_applier(self.storage, self.sid, self.username)
|
||||
self.machine_appliers['chromium'] = chromium_applier(self.storage, self.sid, self.username)
|
||||
self.machine_appliers['firefox'] = firefox_applier(self.storage, self.username)
|
||||
self.machine_appliers['thunderbird'] = thunderbird_applier(self.storage, self.username)
|
||||
self.machine_appliers['chromium'] = chromium_applier(self.storage, self.username)
|
||||
self.machine_appliers['yandex_browser'] = yandex_browser_applier(self.storage, self.username)
|
||||
self.machine_appliers['shortcuts'] = shortcut_applier(self.storage)
|
||||
self.machine_appliers['gsettings'] = gsettings_applier(self.storage, self.file_cache)
|
||||
try:
|
||||
self.machine_appliers['cifs'] = cifs_applier(self.storage)
|
||||
except Exception as exc:
|
||||
logdata = {'applier_name': 'cifs', 'msg': str(exc)}
|
||||
log('E24', logdata)
|
||||
self.machine_appliers['cups'] = cups_applier(self.storage)
|
||||
self.machine_appliers['firewall'] = firewall_applier(self.storage)
|
||||
self.machine_appliers['folders'] = folder_applier(self.storage, self.sid)
|
||||
self.machine_appliers['package'] = package_applier(self.storage)
|
||||
self.machine_appliers['folders'] = folder_applier(self.storage)
|
||||
self.machine_appliers['ntp'] = ntp_applier(self.storage)
|
||||
self.machine_appliers['envvar'] = envvar_applier(self.storage, self.sid)
|
||||
self.machine_appliers['envvar'] = envvar_applier(self.storage)
|
||||
self.machine_appliers['networkshare'] = networkshare_applier(self.storage)
|
||||
self.machine_appliers['scripts'] = scripts_applier(self.storage)
|
||||
self.machine_appliers['files'] = file_applier(self.storage, self.file_cache)
|
||||
self.machine_appliers['ini'] = ini_applier(self.storage)
|
||||
self.machine_appliers['kde'] = kde_applier(self.storage)
|
||||
self.machine_appliers['package'] = package_applier(self.storage)
|
||||
|
||||
def _init_user_appliers(self):
|
||||
# User appliers are expected to work with user-writable
|
||||
# files and settings, mostly in $HOME.
|
||||
self.user_appliers = dict()
|
||||
self.user_appliers['shortcuts'] = shortcut_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['folders'] = folder_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['gsettings'] = gsettings_applier_user(self.storage, self.file_cache, self.sid, self.username)
|
||||
self.user_appliers['shortcuts'] = shortcut_applier_user(self.storage, self.username)
|
||||
self.user_appliers['folders'] = folder_applier_user(self.storage, self.username)
|
||||
self.user_appliers['gsettings'] = gsettings_applier_user(self.storage, self.file_cache, self.username)
|
||||
try:
|
||||
self.user_appliers['cifs'] = cifs_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['cifs'] = cifs_applier_user(self.storage, self.username)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['applier_name'] = 'cifs'
|
||||
logdata['msg'] = str(exc)
|
||||
logdata = {'applier_name': 'cifs', 'msg': str(exc)}
|
||||
log('E25', logdata)
|
||||
self.user_appliers['package'] = package_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['polkit'] = polkit_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['envvar'] = envvar_applier_user(self.storage, self.sid, self.username)
|
||||
self.user_appliers['polkit'] = polkit_applier_user(self.storage, self.username)
|
||||
self.user_appliers['envvar'] = envvar_applier_user(self.storage, self.username)
|
||||
self.user_appliers['networkshare'] = networkshare_applier(self.storage, self.username)
|
||||
self.user_appliers['scripts'] = scripts_applier_user(self.storage, self.username)
|
||||
self.user_appliers['files'] = file_applier_user(self.storage, self.file_cache, self.username)
|
||||
self.user_appliers['ini'] = ini_applier_user(self.storage, self.username)
|
||||
self.user_appliers['kde'] = kde_applier_user(self.storage, self.username, self.file_cache)
|
||||
self.user_appliers['package'] = package_applier_user(self.storage, self.username)
|
||||
|
||||
def machine_apply(self):
|
||||
'''
|
||||
@@ -155,9 +163,7 @@ class frontend_manager:
|
||||
try:
|
||||
applier_object.apply()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['applier_name'] = applier_name
|
||||
logdata['msg'] = str(exc)
|
||||
logdata = {'applier_name': applier_name, 'msg': str(exc)}
|
||||
log('E24', logdata)
|
||||
|
||||
def user_apply(self):
|
||||
@@ -169,24 +175,20 @@ class frontend_manager:
|
||||
try:
|
||||
applier_object.admin_context_apply()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['applier'] = applier_name
|
||||
logdata['exception'] = str(exc)
|
||||
logdata = {'applier': applier_name, 'exception': str(exc)}
|
||||
log('E19', logdata)
|
||||
|
||||
try:
|
||||
with_privileges(self.username, lambda: apply_user_context(self.user_appliers))
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['username'] = self.username
|
||||
logdata['exception'] = str(exc)
|
||||
logdata = {'username': self.username, 'exception': str(exc)}
|
||||
log('E30', logdata)
|
||||
else:
|
||||
for applier_name, applier_object in self.user_appliers.items():
|
||||
try:
|
||||
applier_object.user_context_apply()
|
||||
except Exception as exc:
|
||||
logdata = dict({'applier_name': applier_name, 'message': str(exc)})
|
||||
logdata = {'applier_name': applier_name, 'message': str(exc)}
|
||||
log('E11', logdata)
|
||||
|
||||
def apply_parameters(self):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2021 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,38 +16,33 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pwd
|
||||
import subprocess
|
||||
|
||||
from gi.repository import (
|
||||
Gio
|
||||
, GLib
|
||||
)
|
||||
from gi.repository import Gio
|
||||
from storage.dconf_registry import Dconf_registry
|
||||
from util.exceptions import NotUNCPathError
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
, check_windows_mapping_enabled
|
||||
applier_frontend,
|
||||
check_enabled,
|
||||
check_windows_mapping_enabled,
|
||||
)
|
||||
from .appliers.gsettings import (
|
||||
system_gsettings,
|
||||
user_gsettings
|
||||
)
|
||||
from util.logging import slogm ,log
|
||||
from .appliers.gsettings import system_gsettings, user_gsettings
|
||||
|
||||
|
||||
def uri_fetch(schema, path, value, cache):
|
||||
'''
|
||||
Function to fetch and cache uri
|
||||
'''
|
||||
retval = value
|
||||
logdata = dict()
|
||||
logdata['schema'] = schema
|
||||
logdata['path'] = path
|
||||
logdata['src'] = value
|
||||
logdata = {'schema': schema, 'path': path, 'src': value}
|
||||
try:
|
||||
retval = cache.get(value)
|
||||
if not retval:
|
||||
retval = ''
|
||||
logdata['dst'] = retval
|
||||
log('D90', logdata)
|
||||
except Exception as exc:
|
||||
@@ -59,14 +54,14 @@ class gsettings_applier(applier_frontend):
|
||||
__module_name = 'GSettingsApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\GSettings\\'
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\gsettings\\'
|
||||
__registry_locks_branch = 'Software\\BaseALT\\Policies\\GSettingsLocks\\'
|
||||
__wallpaper_entry = 'Software\\BaseALT\\Policies\\GSettings\\org.mate.background.picture-filename'
|
||||
__vino_authentication_methods_entry = 'Software\\BaseALT\\Policies\\GSettings\\org.gnome.Vino.authentication-methods'
|
||||
__wallpaper_entry = 'Software/BaseALT/Policies/gsettings/org.mate.background.picture-filename'
|
||||
__vino_authentication_methods_entry = 'Software/BaseALT/Policies/gsettings/org.gnome.Vino.authentication-methods'
|
||||
__global_schema = '/usr/share/glib-2.0/schemas'
|
||||
__override_priority_file = 'zzz_policy.gschema.override'
|
||||
__override_old_file = '0_policy.gschema.override'
|
||||
__windows_settings = dict()
|
||||
|
||||
|
||||
def __init__(self, storage, file_cache):
|
||||
self.storage = storage
|
||||
@@ -78,7 +73,7 @@ class gsettings_applier(applier_frontend):
|
||||
self.override_file = os.path.join(self.__global_schema, self.__override_priority_file)
|
||||
self.override_old_file = os.path.join(self.__global_schema, self.__override_old_file)
|
||||
self.gsettings = system_gsettings(self.override_file)
|
||||
self.locks = dict()
|
||||
self.locks = {}
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
@@ -89,8 +84,7 @@ class gsettings_applier(applier_frontend):
|
||||
try:
|
||||
self.file_cache.store(data)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exception'] = str(exc)
|
||||
logdata = {'exception': str(exc)}
|
||||
log('D145', logdata)
|
||||
|
||||
def uri_fetch_helper(self, schema, path, value):
|
||||
@@ -108,13 +102,13 @@ class gsettings_applier(applier_frontend):
|
||||
|
||||
# Get all configured gsettings locks
|
||||
for lock in self.gsettings_locks:
|
||||
valuename = lock.hive_key.rpartition('\\')[2]
|
||||
valuename = lock.hive_key.rpartition('/')[2]
|
||||
self.locks[valuename] = int(lock.data)
|
||||
|
||||
# Calculate all configured gsettings
|
||||
for setting in self.gsettings_keys:
|
||||
helper = None
|
||||
valuename = setting.hive_key.rpartition('\\')[2]
|
||||
valuename = setting.hive_key.rpartition('/')[2]
|
||||
rp = valuename.rpartition('.')
|
||||
schema = rp[0]
|
||||
path = rp[2]
|
||||
@@ -137,10 +131,7 @@ class gsettings_applier(applier_frontend):
|
||||
log('E48')
|
||||
|
||||
# Update desktop configuration system backend
|
||||
try:
|
||||
proc = subprocess.run(args=['/usr/bin/dconf', "update"], capture_output=True, check=True)
|
||||
except Exception as exc:
|
||||
log('E49')
|
||||
Dconf_registry.dconf_update()
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
@@ -161,10 +152,9 @@ class GSettingsMapping:
|
||||
self.gsettings_schema_key = self.schema.get_key(self.gsettings_key)
|
||||
self.gsettings_type = self.gsettings_schema_key.get_value_type()
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['hive_key'] = self.hive_key
|
||||
logdata['gsettings_schema'] = self.gsettings_schema
|
||||
logdata['gsettings_key'] = self.gsettings_key
|
||||
logdata = {'hive_key': self.hive_key,
|
||||
'gsettings_schema': self.gsettings_schema,
|
||||
'gsettings_key': self.gsettings_key}
|
||||
log('W6', logdata)
|
||||
|
||||
def preg2gsettings(self):
|
||||
@@ -184,45 +174,44 @@ class gsettings_applier_user(applier_frontend):
|
||||
__module_name = 'GSettingsApplierUser'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\GSettings\\'
|
||||
__wallpaper_entry = 'Software\\BaseALT\\Policies\\GSettings\\org.mate.background.picture-filename'
|
||||
__vino_authentication_methods_entry = 'Software\\BaseALT\\Policies\\GSettings\\org.gnome.Vino.authentication-methods'
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\gsettings\\'
|
||||
__wallpaper_entry = 'Software/BaseALT/Policies/gsettings/org.mate.background.picture-filename'
|
||||
__vino_authentication_methods_entry = 'Software/BaseALT/Policies/gsettings/org.gnome.Vino.authentication-methods'
|
||||
|
||||
def __init__(self, storage, file_cache, sid, username):
|
||||
def __init__(self, storage, file_cache, username):
|
||||
self.storage = storage
|
||||
self.file_cache = file_cache
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
gsettings_filter = '{}%'.format(self.__registry_branch)
|
||||
self.gsettings_keys = self.storage.filter_hkcu_entries(self.sid, gsettings_filter)
|
||||
self.gsettings_keys = self.storage.filter_hkcu_entries(gsettings_filter)
|
||||
self.gsettings = user_gsettings()
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
self.__windows_mapping_enabled = check_windows_mapping_enabled(self.storage)
|
||||
|
||||
self.__windows_settings = dict()
|
||||
self.windows_settings = list()
|
||||
self.__windows_settings = {}
|
||||
self.windows_settings = []
|
||||
mapping = [
|
||||
# Disable or enable screen saver
|
||||
GSettingsMapping(
|
||||
'Software\\Policies\\Microsoft\\Windows\\Control Panel\\Desktop\\ScreenSaveActive'
|
||||
'Software/Policies/Microsoft/Windows/Control Panel/Desktop/ScreenSaveActive'
|
||||
, 'org.mate.screensaver'
|
||||
, 'idle-activation-enabled'
|
||||
)
|
||||
# Timeout in seconds for screen saver activation. The value of zero effectively disables screensaver start
|
||||
, GSettingsMapping(
|
||||
'Software\\Policies\\Microsoft\\Windows\\Control Panel\\Desktop\\ScreenSaveTimeOut'
|
||||
'Software/Policies/Microsoft/Windows/Control Panel/Desktop/ScreenSaveTimeOut'
|
||||
, 'org.mate.session'
|
||||
, 'idle-delay'
|
||||
)
|
||||
# Enable or disable password protection for screen saver
|
||||
, GSettingsMapping(
|
||||
'Software\\Policies\\Microsoft\\Windows\\Control Panel\\Desktop\\ScreenSaverIsSecure'
|
||||
'Software/Policies/Microsoft/Windows/Control Panel/Desktop/ScreenSaverIsSecure'
|
||||
, 'org.mate.screensaver'
|
||||
, 'lock-enabled'
|
||||
)
|
||||
# Specify image which will be used as a wallpaper
|
||||
, GSettingsMapping(
|
||||
'Software\\Microsoft\\Windows\\CurrentVersion\\Policies\\System\\Wallpaper'
|
||||
'Software/Microsoft/Windows/CurrentVersion/Policies/System/Wallpaper'
|
||||
, 'org.mate.background'
|
||||
, 'picture-filename'
|
||||
)
|
||||
@@ -235,11 +224,9 @@ class gsettings_applier_user(applier_frontend):
|
||||
|
||||
def windows_mapping_append(self):
|
||||
for setting_key in self.__windows_settings.keys():
|
||||
value = self.storage.get_hkcu_entry(self.sid, setting_key)
|
||||
value = self.storage.get_hkcu_entry(setting_key)
|
||||
if value:
|
||||
logdata = dict()
|
||||
logdata['setting_key'] = setting_key
|
||||
logdata['value.data'] = value.data
|
||||
logdata = {'setting_key': setting_key, 'value.data': value.data}
|
||||
log('D86', logdata)
|
||||
mapping = self.__windows_settings[setting_key]
|
||||
try:
|
||||
@@ -251,15 +238,6 @@ class gsettings_applier_user(applier_frontend):
|
||||
return uri_fetch(schema, path, value, self.file_cache)
|
||||
|
||||
def run(self):
|
||||
#for setting in self.gsettings_keys:
|
||||
# valuename = setting.hive_key.rpartition('\\')[2]
|
||||
# rp = valuename.rpartition('.')
|
||||
# schema = rp[0]
|
||||
# path = rp[2]
|
||||
# self.gsettings.append(user_gsetting(schema, path, setting.data))
|
||||
|
||||
|
||||
# Calculate all mapped gsettings if mapping enabled
|
||||
if self.__windows_mapping_enabled:
|
||||
log('D83')
|
||||
self.windows_mapping_append()
|
||||
@@ -268,7 +246,7 @@ class gsettings_applier_user(applier_frontend):
|
||||
|
||||
# Calculate all configured gsettings
|
||||
for setting in self.gsettings_keys:
|
||||
valuename = setting.hive_key.rpartition('\\')[2]
|
||||
valuename = setting.hive_key.rpartition('/')[2]
|
||||
rp = valuename.rpartition('.')
|
||||
schema = rp[0]
|
||||
path = rp[2]
|
||||
@@ -292,12 +270,13 @@ class gsettings_applier_user(applier_frontend):
|
||||
# Cache files on remote locations
|
||||
try:
|
||||
entry = self.__wallpaper_entry
|
||||
filter_result = self.storage.get_hkcu_entry(self.sid, entry)
|
||||
if filter_result:
|
||||
filter_result = self.storage.get_hkcu_entry(entry)
|
||||
if filter_result and filter_result.data:
|
||||
self.file_cache.store(filter_result.data)
|
||||
except NotUNCPathError:
|
||||
...
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exception'] = str(exc)
|
||||
logdata = {'exception': str(exc)}
|
||||
log('E50', logdata)
|
||||
|
||||
|
||||
|
||||
74
gpoa/frontend/ini_applier.py
Normal file
74
gpoa/frontend/ini_applier.py
Normal file
@@ -0,0 +1,74 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .appliers.ini_file import Ini_file
|
||||
|
||||
|
||||
class ini_applier(applier_frontend):
|
||||
__module_name = 'InifilesApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
self.inifiles_info = self.storage.get_ini()
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def run(self):
|
||||
for inifile in self.inifiles_info:
|
||||
Ini_file(inifile)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D171')
|
||||
self.run()
|
||||
else:
|
||||
log('D172')
|
||||
|
||||
class ini_applier_user(applier_frontend):
|
||||
__module_name = 'InifilesApplierUser'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
|
||||
def __init__(self, storage, username):
|
||||
self.username = username
|
||||
self.storage = storage
|
||||
self.inifiles_info = self.storage.get_ini()
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def run(self):
|
||||
for inifile in self.inifiles_info:
|
||||
Ini_file(inifile, self.username)
|
||||
|
||||
def admin_context_apply(self):
|
||||
pass
|
||||
|
||||
def user_context_apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D173')
|
||||
self.run()
|
||||
else:
|
||||
log('D174')
|
||||
366
gpoa/frontend/kde_applier.py
Normal file
366
gpoa/frontend/kde_applier.py
Normal file
@@ -0,0 +1,366 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
import dbus
|
||||
from util.exceptions import NotUNCPathError
|
||||
from util.logging import log
|
||||
from util.util import get_homedir
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
|
||||
class kde_applier(applier_frontend):
|
||||
__module_name = 'KdeApplier'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
__hklm_branch = 'Software/BaseALT/Policies/KDE/'
|
||||
__hklm_lock_branch = 'Software/BaseALT/Policies/KDELocks/'
|
||||
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
self.locks_dict = {}
|
||||
self.locks_data_dict = {}
|
||||
self.all_kde_settings = {}
|
||||
kde_filter = '{}%'.format(self.__hklm_branch)
|
||||
locks_filter = '{}%'.format(self.__hklm_lock_branch)
|
||||
self.locks_settings = self.storage.filter_hklm_entries(locks_filter)
|
||||
self.kde_settings = self.storage.filter_hklm_entries(kde_filter)
|
||||
self.all_kde_settings = {}
|
||||
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage,
|
||||
self.__module_name,
|
||||
self.__module_experimental
|
||||
)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D198')
|
||||
create_dict(self.kde_settings, self.all_kde_settings, self.locks_settings, self.locks_dict)
|
||||
apply(self.all_kde_settings, self.locks_dict)
|
||||
else:
|
||||
log('D199')
|
||||
|
||||
class kde_applier_user(applier_frontend):
|
||||
__module_name = 'KdeApplierUser'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
kde_version = None
|
||||
__hkcu_branch = 'Software/BaseALT/Policies/KDE'
|
||||
__hkcu_lock_branch = 'Software/BaseALT/Policies/KDELocks'
|
||||
__plasma_update_entry = 'Software/BaseALT/Policies/KDE/Plasma/Update'
|
||||
|
||||
def __init__(self, storage, username=None, file_cache = None):
|
||||
self.storage = storage
|
||||
self.username = username
|
||||
self.file_cache = file_cache
|
||||
self.locks_dict = {}
|
||||
self.locks_data_dict = {}
|
||||
self.all_kde_settings = {}
|
||||
kde_applier_user.kde_version = get_kde_version()
|
||||
kde_filter = '{}%'.format(self.__hkcu_branch)
|
||||
locks_filter = '{}%'.format(self.__hkcu_lock_branch)
|
||||
self.locks_settings = self.storage.filter_hkcu_entries(locks_filter)
|
||||
self.plasma_update = self.storage.get_entry(self.__plasma_update_entry)
|
||||
self.plasma_update_flag = self.plasma_update.data if self.plasma_update is not None else 0
|
||||
self.kde_settings = self.storage.filter_hkcu_entries(kde_filter)
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage,
|
||||
self.__module_name,
|
||||
self.__module_experimental
|
||||
)
|
||||
|
||||
def admin_context_apply(self):
|
||||
try:
|
||||
for setting in self.kde_settings:
|
||||
file_name = setting.keyname.split("/")[-2]
|
||||
if file_name == 'wallpaper':
|
||||
data = setting.data
|
||||
break
|
||||
self.file_cache.store(data)
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
|
||||
def user_context_apply(self):
|
||||
'''
|
||||
Change settings applied in user context
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
log('D200')
|
||||
create_dict(self.kde_settings, self.all_kde_settings, self.locks_settings, self.locks_dict, self.file_cache, self.username, self.plasma_update_flag)
|
||||
apply(self.all_kde_settings, self.locks_dict, self.username)
|
||||
else:
|
||||
log('D201')
|
||||
|
||||
dbus_methods_mapping = {
|
||||
'kscreenlockerrc': {
|
||||
'dbus_service': 'org.kde.screensaver',
|
||||
'dbus_path': '/ScreenSaver',
|
||||
'dbus_interface': 'org.kde.screensaver',
|
||||
'dbus_method': 'configure'
|
||||
},
|
||||
'wallpaper': {
|
||||
'dbus_service': 'org.freedesktop.systemd1',
|
||||
'dbus_path': '/org/freedesktop/systemd1',
|
||||
'dbus_interface': 'org.freedesktop.systemd1.Manager',
|
||||
'dbus_method': 'RestartUnit',
|
||||
'dbus_args': ['plasma-plasmashell.service', 'replace']
|
||||
}
|
||||
}
|
||||
|
||||
def get_kde_version():
|
||||
try:
|
||||
kinfo_path = shutil.which("kinfo", path="/usr/lib/kf5/bin:/usr/bin")
|
||||
if not kinfo_path:
|
||||
raise FileNotFoundError("Unable to find kinfo")
|
||||
output = subprocess.check_output([kinfo_path], text=True, env={'LANG':'C'})
|
||||
for line in output.splitlines():
|
||||
if "KDE Frameworks Version" in line:
|
||||
frameworks_version = line.split(":", 1)[1].strip()
|
||||
major_frameworks_version = int(frameworks_version.split(".")[0])
|
||||
return major_frameworks_version
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def create_dict(kde_settings, all_kde_settings, locks_settings, locks_dict, file_cache = None, username = None, plasmaupdate = False):
|
||||
for locks in locks_settings:
|
||||
locks_dict[locks.valuename] = locks.data
|
||||
for setting in kde_settings:
|
||||
try:
|
||||
file_name, section, value = setting.keyname.split("/")[-2], setting.keyname.split("/")[-1], setting.valuename
|
||||
data = setting.data
|
||||
if file_name == 'wallpaper':
|
||||
apply_for_wallpaper(data, file_cache, username, plasmaupdate)
|
||||
else:
|
||||
all_kde_settings.setdefault(file_name, {}).setdefault(section, {})[value] = data
|
||||
except Exception as exc:
|
||||
logdata = {'file_name': file_name,
|
||||
'section': section,
|
||||
'value': value,
|
||||
'data': data,
|
||||
'exc': exc}
|
||||
log('W16', logdata)
|
||||
|
||||
def apply(all_kde_settings, locks_dict, username = None):
|
||||
logdata = {}
|
||||
modified_files = set()
|
||||
if username is None:
|
||||
system_path_settings = '/etc/xdg/'
|
||||
system_files = [
|
||||
"baloofilerc",
|
||||
"kcminputrc",
|
||||
"kded_device_automounterrc",
|
||||
"kdeglobals",
|
||||
"ksplashrc",
|
||||
"kwinrc",
|
||||
"plasma-localerc",
|
||||
"plasmarc",
|
||||
"powermanagementprofilesrc"
|
||||
]
|
||||
for file in system_files:
|
||||
file_to_remove = f'{system_path_settings}{file}'
|
||||
if os.path.exists(file_to_remove):
|
||||
os.remove(file_to_remove)
|
||||
for file_name, sections in all_kde_settings.items():
|
||||
file_path = f'{system_path_settings}{file_name}'
|
||||
with open(file_path, 'w') as file:
|
||||
for section, keys in sections.items():
|
||||
section = section.replace(')(', '][')
|
||||
file.write(f'[{section}]\n')
|
||||
for key, value in keys.items():
|
||||
lock = f"{file_name}.{section}.{key}".replace('][', ')(')
|
||||
if locks_dict.get(lock) == 1:
|
||||
file.write(f'{key}[$i]={value}\n')
|
||||
else:
|
||||
file.write(f'{key}={value}\n')
|
||||
file.write('\n')
|
||||
modified_files.add(file_name)
|
||||
else:
|
||||
for file_name, sections in all_kde_settings.items():
|
||||
path = f'{get_homedir(username)}/.config/{file_name}'
|
||||
if not os.path.exists(path):
|
||||
open(path, 'a').close()
|
||||
else:
|
||||
pass
|
||||
for section, keys in sections.items():
|
||||
for key, value in keys.items():
|
||||
value = str(value)
|
||||
lock = f"{file_name}.{section}.{key}"
|
||||
if lock in locks_dict and locks_dict[lock] == 1:
|
||||
command = [
|
||||
f'kwriteconfig{kde_applier_user.kde_version}',
|
||||
'--file', file_name,
|
||||
'--group', section,
|
||||
'--key', key +'/$i/',
|
||||
'--type', 'string',
|
||||
value
|
||||
]
|
||||
else:
|
||||
command = [
|
||||
f'kwriteconfig{kde_applier_user.kde_version}',
|
||||
'--file', file_name,
|
||||
'--group', section,
|
||||
'--key', key,
|
||||
'--type', 'string',
|
||||
value
|
||||
]
|
||||
try:
|
||||
clear_locks_settings(username, file_name, key)
|
||||
env_path = dict(os.environ)
|
||||
env_path["PATH"] = "/usr/lib/kf5/bin:/usr/bin"
|
||||
subprocess.run(command, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env_path)
|
||||
except:
|
||||
logdata = {'command': command}
|
||||
log('W22', logdata)
|
||||
new_content = []
|
||||
file_path = f'{get_homedir(username)}/.config/{file_name}'
|
||||
try:
|
||||
with open(file_path, 'r') as file:
|
||||
for line in file:
|
||||
line = line.replace('/$i/', '[$i]').replace(')(', '][')
|
||||
new_content.append(line)
|
||||
with open(file_path, 'w') as file:
|
||||
file.writelines(new_content)
|
||||
logdata['file'] = file_name
|
||||
log('D202', logdata)
|
||||
except Exception as exc:
|
||||
logdata['exc'] = exc
|
||||
log('W19', logdata)
|
||||
modified_files.add(file_name)
|
||||
for file_name in modified_files:
|
||||
call_dbus_method(file_name)
|
||||
|
||||
def clear_locks_settings(username, file_name, key):
|
||||
'''
|
||||
Method to remove old locked settings
|
||||
'''
|
||||
file_path = f'{get_homedir(username)}/.config/{file_name}'
|
||||
with open(file_path, 'r') as file:
|
||||
lines = file.readlines()
|
||||
with open(file_path, 'w') as file:
|
||||
for line in lines:
|
||||
if f'{key}[$i]=' not in line:
|
||||
file.write(line)
|
||||
for line in lines:
|
||||
if f'{key}[$i]=' in line:
|
||||
logdata = {'line': line.strip()}
|
||||
log('I10', logdata)
|
||||
|
||||
def apply_for_wallpaper(data, file_cache, username, plasmaupdate):
|
||||
'''
|
||||
Method to change wallpaper
|
||||
'''
|
||||
logdata = {}
|
||||
path_to_wallpaper = f'{get_homedir(username)}/.config/plasma-org.kde.plasma.desktop-appletsrc'
|
||||
id_desktop = get_id_desktop(path_to_wallpaper)
|
||||
try:
|
||||
try:
|
||||
data = str(file_cache.get(data))
|
||||
except NotUNCPathError:
|
||||
data = str(data)
|
||||
|
||||
with open(path_to_wallpaper, 'r') as file:
|
||||
current_wallpaper = file.read()
|
||||
match = re.search(rf'\[Containments\]\[{id_desktop}\]\[Wallpaper\]\[org\.kde\.image\]\[General\]\s+Image=(.*)', current_wallpaper)
|
||||
if match:
|
||||
current_wallpaper_path = match.group(1)
|
||||
flag = (current_wallpaper_path == data)
|
||||
else:
|
||||
flag = False
|
||||
|
||||
os.environ["LANGUAGE"] = os.environ["LANG"].split(".")[0]
|
||||
os.environ["XDG_DATA_DIRS"] = "/usr/share/kf5:"
|
||||
#Variable for system detection of directories before files with .colors extension
|
||||
os.environ["DISPLAY"] = ":0"
|
||||
#Variable for command execution plasma-apply-colorscheme
|
||||
os.environ["XDG_RUNTIME_DIR"] = f"/run/user/{os.getuid()}"
|
||||
os.environ["PATH"] = "/usr/lib/kf5/bin:"
|
||||
os.environ["DBUS_SESSION_BUS_ADDRESS"] = f"unix:path=/run/user/{os.getuid()}/bus"#plasma-apply-wallpaperimage
|
||||
env_path = dict(os.environ)
|
||||
env_path["PATH"] = "/usr/lib/kf5/bin:/usr/bin"
|
||||
#environment variable for accessing binary files without hard links
|
||||
if not flag:
|
||||
if os.path.isfile(path_to_wallpaper):
|
||||
command = [
|
||||
f'kwriteconfig{kde_applier_user.kde_version}',
|
||||
'--file', 'plasma-org.kde.plasma.desktop-appletsrc',
|
||||
'--group', 'Containments',
|
||||
'--group', id_desktop,
|
||||
'--group', 'Wallpaper',
|
||||
'--group', 'org.kde.image',
|
||||
'--group', 'General',
|
||||
'--key', 'Image',
|
||||
'--type', 'string',
|
||||
data
|
||||
]
|
||||
try:
|
||||
subprocess.run(command, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env_path)
|
||||
except:
|
||||
logdata = {'command': command}
|
||||
log('E68', logdata)
|
||||
if plasmaupdate == 1:
|
||||
call_dbus_method("wallpaper")
|
||||
else:
|
||||
logdata = {'file': path_to_wallpaper}
|
||||
log('W21', logdata)
|
||||
except OSError as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('W17', logdata)
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('E67', logdata)
|
||||
|
||||
def get_id_desktop(path_to_wallpaper):
|
||||
'''
|
||||
Method for getting desktop id. It is currently accepted that this number is one of the sections in the configuration file.
|
||||
'''
|
||||
pattern = r'\[Containments\]\[(\d+)\][^\[]*activityId=([^\s]+)'
|
||||
try:
|
||||
with open(path_to_wallpaper, 'r') as file:
|
||||
file_content = file.read()
|
||||
match = re.search(pattern, file_content)
|
||||
return match.group(1) if match else None
|
||||
except:
|
||||
return None
|
||||
|
||||
def call_dbus_method(file_name):
|
||||
'''
|
||||
Method to call D-Bus method based on the file name
|
||||
'''
|
||||
os.environ["DBUS_SESSION_BUS_ADDRESS"] = f"unix:path=/run/user/{os.getuid()}/bus"
|
||||
if file_name in dbus_methods_mapping:
|
||||
config = dbus_methods_mapping[file_name]
|
||||
try:
|
||||
session_bus = dbus.SessionBus()
|
||||
dbus_object = session_bus.get_object(config['dbus_service'], config['dbus_path'])
|
||||
dbus_iface = dbus.Interface(dbus_object, config['dbus_interface'])
|
||||
if 'dbus_args' in config:
|
||||
getattr(dbus_iface, config['dbus_method'])(*config['dbus_args'])
|
||||
else:
|
||||
getattr(dbus_iface, config['dbus_method'])()
|
||||
except dbus.exceptions.DBusException as exc:
|
||||
logdata = {'error': str(exc)}
|
||||
log('E31', logdata)
|
||||
else:
|
||||
pass
|
||||
816
gpoa/frontend/laps_applier.py
Normal file
816
gpoa/frontend/laps_applier.py
Normal file
@@ -0,0 +1,816 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import secrets
|
||||
import string
|
||||
import struct
|
||||
import subprocess
|
||||
|
||||
from dateutil import tz
|
||||
import ldb
|
||||
import psutil
|
||||
from util.logging import log
|
||||
from util.sid import WellKnown21RID
|
||||
from util.util import check_local_user_exists, remove_prefix_from_keys, get_machine_name
|
||||
from util.windows import get_kerberos_domain_info
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
from libcng_dpapi import (
|
||||
create_protection_descriptor,
|
||||
protect_secret,
|
||||
unprotect_secret,
|
||||
NcryptError
|
||||
)
|
||||
|
||||
_DATEUTIL_AVAILABLE = False
|
||||
try:
|
||||
from dateutil import tz
|
||||
_DATEUTIL_AVAILABLE = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
class laps_applier(applier_frontend):
|
||||
"""
|
||||
LAPS (Local Administrator Password Solution) implementation for managing
|
||||
and automatically rotating administrator passwords.
|
||||
"""
|
||||
|
||||
# Time calculation constants
|
||||
|
||||
# Number of seconds between the Windows epoch (1601-01-01 00:00:00 UTC)
|
||||
# and the Unix epoch (1970-01-01 00:00:00 UTC).
|
||||
# Used to convert between Unix timestamps and Windows FileTime.
|
||||
_EPOCH_TIMESTAMP = 11644473600
|
||||
# Number of 100-nanosecond intervals per second.
|
||||
# Used to convert seconds to Windows FileTime format.
|
||||
_HUNDREDS_OF_NANOSECONDS = 10000000
|
||||
# Number of 100-nanosecond intervals in one day
|
||||
_DAY_FLOAT = 8.64e11
|
||||
|
||||
# Module configuration
|
||||
__module_name = 'LapsApplier'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
|
||||
# Registry paths
|
||||
_WINDOWS_REGISTRY_PATH = 'SOFTWARE/Microsoft/Windows/CurrentVersion/Policies/LAPS/'
|
||||
_ALT_REGISTRY_PATH = 'Software/BaseALT/Policies/Laps/'
|
||||
|
||||
# LDAP attributes
|
||||
_ATTR_ENCRYPTED_PASSWORD = 'msLAPS-EncryptedPassword'
|
||||
_ATTR_PASSWORD_EXPIRATION_TIME = 'msLAPS-PasswordExpirationTime'
|
||||
|
||||
# dconf key for password modification time
|
||||
_KEY_PASSWORD_LAST_MODIFIED = '/Software/BaseALT/Policies/Laps/PasswordLastModified/'
|
||||
|
||||
# Password complexity levels
|
||||
_PASSWORD_COMPLEXITY = {
|
||||
1: string.ascii_uppercase,
|
||||
2: string.ascii_letters,
|
||||
3: string.ascii_letters + string.digits,
|
||||
4: string.ascii_letters + string.digits + string.punctuation
|
||||
}
|
||||
|
||||
# Post-authentication actions
|
||||
_ACTION_NONE = 0
|
||||
_ACTION_CHANGE_PASSWORD = 1
|
||||
_ACTION_TERMINATE_SESSIONS = 3
|
||||
_ACTION_REBOOT = 5
|
||||
|
||||
def __init__(self, storage):
|
||||
"""
|
||||
Initialize the LAPS applier with configuration from registry.
|
||||
|
||||
Args:
|
||||
storage: Storage object containing registry entries and system information
|
||||
"""
|
||||
self.storage = storage
|
||||
|
||||
# Load registry configuration
|
||||
if not self._load_configuration():
|
||||
self.__module_enabled = False
|
||||
return
|
||||
|
||||
if not self._check_requirements():
|
||||
log('W29')
|
||||
self.__module_enabled = False
|
||||
return
|
||||
|
||||
# Initialize system connections and parameters
|
||||
self._initialize_system_parameters()
|
||||
|
||||
# Check if module is enabled in configuration
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage,
|
||||
self.__module_name,
|
||||
self.__module_experimental
|
||||
)
|
||||
|
||||
def _load_configuration(self):
|
||||
"""Load configuration settings from registry."""
|
||||
alt_keys = remove_prefix_from_keys(
|
||||
self.storage.filter_entries(self._ALT_REGISTRY_PATH),
|
||||
self._ALT_REGISTRY_PATH
|
||||
)
|
||||
windows_keys = remove_prefix_from_keys(
|
||||
self.storage.filter_entries(self._WINDOWS_REGISTRY_PATH),
|
||||
self._WINDOWS_REGISTRY_PATH
|
||||
)
|
||||
|
||||
# Combine configurations with BaseALT taking precedence
|
||||
self.config = windows_keys
|
||||
self.config.update(alt_keys)
|
||||
|
||||
# Extract commonly used configuration parameters
|
||||
self.backup_directory = self.config.get('BackupDirectory', None)
|
||||
self.encryption_enabled = self.config.get('ADPasswordEncryptionEnabled', 1)
|
||||
self.password_expiration_protection = self.config.get('PasswordExpirationProtectionEnabled', 1)
|
||||
self.password_age_days = self.config.get('PasswordAgeDays', 30)
|
||||
self.post_authentication_actions = self.config.get('PostAuthenticationActions', 3)
|
||||
self.post_authentication_reset_delay = self.config.get('PostAuthenticationResetDelay', 24)
|
||||
name = self.config.get('AdministratorAccountName', 'root')
|
||||
if name and check_local_user_exists(name):
|
||||
self.target_user = name
|
||||
else:
|
||||
log('W36')
|
||||
return False
|
||||
return True
|
||||
|
||||
def _check_requirements(self):
|
||||
"""
|
||||
Check if the necessary requirements are met for the module to operate.
|
||||
|
||||
Returns:
|
||||
bool: True if requirements are met, False otherwise
|
||||
"""
|
||||
if self.backup_directory != 2 or not self.encryption_enabled:
|
||||
logdata = {}
|
||||
logdata['backup_directory'] = self.backup_directory
|
||||
logdata['encryption_enabled'] = self.encryption_enabled
|
||||
log('D223', logdata)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _initialize_system_parameters(self):
|
||||
"""Initialize system parameters and connections."""
|
||||
# Set up LDAP connections
|
||||
self.samdb = self.storage.get_info('samdb')
|
||||
self.domain_sid = self.samdb.get_domain_sid()
|
||||
self.domain_dn = self.samdb.domain_dn()
|
||||
self.computer_dn = self._get_computer_dn()
|
||||
self.admin_group_sid = f'{self.domain_sid}-{WellKnown21RID.DOMAIN_ADMINS.value}'
|
||||
|
||||
# Set up time parameters
|
||||
self.expiration_date = self._get_expiration_date()
|
||||
self.expiration_date_int = self._convert_to_filetime(self.expiration_date)
|
||||
self.current_time_int = self._convert_to_filetime(datetime.now())
|
||||
|
||||
# Get current system state
|
||||
self.expiration_time_attr = self._get_expiration_time_attr()
|
||||
self.pass_last_mod_int = self._read_dconf_pass_last_mod()
|
||||
self.encryption_principal = self._get_encryption_principal()
|
||||
self.last_login_hours_ago = self._get_admin_login_hours_ago_after_timestamp()
|
||||
|
||||
def _get_computer_dn(self):
|
||||
"""
|
||||
Get the Distinguished Name of the computer account.
|
||||
|
||||
Returns:
|
||||
str: Computer's distinguished name in LDAP
|
||||
"""
|
||||
machine_name = self.storage.get_info('machine_name')
|
||||
search_filter = f'(sAMAccountName={machine_name})'
|
||||
results = self.samdb.search(base=self.domain_dn, expression=search_filter, attrs=['dn'])
|
||||
return results[0]['dn']
|
||||
|
||||
def _get_encryption_principal(self):
|
||||
"""
|
||||
Get the encryption principal for password encryption.
|
||||
|
||||
Returns:
|
||||
str: SID of the encryption principal
|
||||
"""
|
||||
encryption_principal = self.config.get('ADPasswordEncryptionPrincipal', None)
|
||||
if not encryption_principal:
|
||||
return self.admin_group_sid
|
||||
|
||||
return self._verify_encryption_principal(encryption_principal)
|
||||
|
||||
def _verify_encryption_principal(self, principal_name):
|
||||
"""
|
||||
Verify the encryption principal exists and get its SID.
|
||||
|
||||
Args:
|
||||
principal_name: Principal name to verify
|
||||
|
||||
Returns:
|
||||
str: SID of the encryption principal if found, or admin group SID as fallback
|
||||
"""
|
||||
try:
|
||||
# Try to resolve as domain\\user format
|
||||
domain = self.storage.get_info('domain')
|
||||
username = f'{domain}\\{principal_name}'
|
||||
output = subprocess.check_output(['wbinfo', '-n', username])
|
||||
sid = output.split()[0].decode('utf-8')
|
||||
return sid
|
||||
except subprocess.CalledProcessError:
|
||||
# Try to resolve directly as SID
|
||||
try:
|
||||
output = subprocess.check_output(['wbinfo', '-s', principal_name])
|
||||
return principal_name
|
||||
except subprocess.CalledProcessError:
|
||||
# Fallback to admin group SID
|
||||
logdata = {}
|
||||
logdata['principal_name'] = principal_name
|
||||
log('W30', logdata)
|
||||
return self.admin_group_sid
|
||||
|
||||
def _get_expiration_date(self, base_time=None):
|
||||
"""
|
||||
Calculate the password expiration date.
|
||||
|
||||
Args:
|
||||
base_time: Optional datetime to base calculation on, defaults to now
|
||||
|
||||
Returns:
|
||||
datetime: Password expiration date
|
||||
"""
|
||||
base = base_time or datetime.now()
|
||||
# Set to beginning of day and add password age
|
||||
return (base.replace(hour=0, minute=0, second=0, microsecond=0) +
|
||||
timedelta(days=int(self.password_age_days)))
|
||||
|
||||
def _convert_to_filetime(self, dt):
|
||||
"""
|
||||
Convert datetime to Windows filetime format (100ns intervals since 1601-01-01).
|
||||
|
||||
Args:
|
||||
dt: Datetime to convert
|
||||
|
||||
Returns:
|
||||
int: Windows filetime integer
|
||||
"""
|
||||
epoch_timedelta = timedelta(seconds=self._EPOCH_TIMESTAMP)
|
||||
new_dt = dt + epoch_timedelta
|
||||
return int(new_dt.timestamp() * self._HUNDREDS_OF_NANOSECONDS)
|
||||
|
||||
def _get_expiration_time_attr(self):
|
||||
"""
|
||||
Get the current password expiration time from LDAP.
|
||||
|
||||
Returns:
|
||||
int: Password expiration time as integer, or 0 if not found
|
||||
"""
|
||||
try:
|
||||
res = self.samdb.search(
|
||||
base=self.computer_dn,
|
||||
scope=ldb.SCOPE_BASE,
|
||||
expression="(objectClass=*)",
|
||||
attrs=[self._ATTR_PASSWORD_EXPIRATION_TIME]
|
||||
)
|
||||
return int(res[0].get(self._ATTR_PASSWORD_EXPIRATION_TIME, 0)[0])
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('W31', logdata)
|
||||
return 0
|
||||
|
||||
def _read_dconf_pass_last_mod(self):
|
||||
"""
|
||||
Read the password last modified time from dconf.
|
||||
|
||||
Returns:
|
||||
int: Timestamp of last password modification or current time if not found
|
||||
"""
|
||||
try:
|
||||
key_path = self._KEY_PASSWORD_LAST_MODIFIED + self.target_user
|
||||
last_modified = subprocess.check_output(
|
||||
['dconf', 'read', key_path],
|
||||
text=True
|
||||
).strip().strip("'\"")
|
||||
return int(last_modified)
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('W32', logdata)
|
||||
return self.current_time_int
|
||||
|
||||
def _write_dconf_pass_last_mod(self):
|
||||
"""
|
||||
Write the password last modified time to dconf.
|
||||
"""
|
||||
try:
|
||||
# Ensure dbus session is available
|
||||
self._ensure_dbus_session()
|
||||
|
||||
# Write current time to dconf
|
||||
key_path = self._KEY_PASSWORD_LAST_MODIFIED + self.target_user
|
||||
last_modified = f'"{self.current_time_int}"'
|
||||
subprocess.check_output(['dconf', 'write', key_path, last_modified])
|
||||
log('D222')
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('W28', logdata)
|
||||
|
||||
def _ensure_dbus_session(self):
|
||||
"""Ensure a D-Bus session is available for dconf operations."""
|
||||
dbus_address = os.getenv("DBUS_SESSION_BUS_ADDRESS")
|
||||
if not dbus_address:
|
||||
result = subprocess.run(
|
||||
["dbus-daemon", "--fork", "--session", "--print-address"],
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
dbus_address = result.stdout.strip()
|
||||
os.environ["DBUS_SESSION_BUS_ADDRESS"] = dbus_address
|
||||
|
||||
|
||||
def _get_changed_password_hours_ago(self):
|
||||
"""
|
||||
Calculate how many hours ago the password was last changed.
|
||||
|
||||
Returns:
|
||||
int: Hours since password was last changed, or 0 if error
|
||||
"""
|
||||
logdata = {}
|
||||
logdata['target_user'] = self.target_user
|
||||
try:
|
||||
diff_time = self.current_time_int - self.pass_last_mod_int
|
||||
hours_difference = diff_time // 3.6e10
|
||||
hours_ago = int(hours_difference)
|
||||
logdata['hours_ago'] = hours_ago
|
||||
log('D225', logdata)
|
||||
return hours_ago
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('W34', logdata)
|
||||
return 0
|
||||
|
||||
def _generate_password(self):
|
||||
"""
|
||||
Generate a secure password based on policy settings.
|
||||
|
||||
Returns:
|
||||
str: Generated password meeting complexity requirements
|
||||
"""
|
||||
# Get password length from config
|
||||
password_length = self.config.get('PasswordLength', 14)
|
||||
if not isinstance(password_length, int) or not (8 <= password_length <= 64):
|
||||
password_length = 14
|
||||
|
||||
# Get password complexity from config
|
||||
password_complexity = self.config.get('PasswordComplexity', 4)
|
||||
if not isinstance(password_complexity, int) or not (1 <= password_complexity <= 4):
|
||||
password_complexity = 4
|
||||
|
||||
# Get character set based on complexity
|
||||
char_set = self._PASSWORD_COMPLEXITY.get(password_complexity, self._PASSWORD_COMPLEXITY[4])
|
||||
|
||||
# Generate initial password
|
||||
password = ''.join(secrets.choice(char_set) for _ in range(password_length))
|
||||
|
||||
# Ensure password meets complexity requirements
|
||||
if password_complexity >= 3 and not any(c.isdigit() for c in password):
|
||||
# Add a digit if required but missing
|
||||
digit = secrets.choice(string.digits)
|
||||
position = secrets.randbelow(len(password))
|
||||
password = password[:position] + digit + password[position:]
|
||||
|
||||
if password_complexity == 4 and not any(c in string.punctuation for c in password):
|
||||
# Add a special character if required but missing
|
||||
special_char = secrets.choice(string.punctuation)
|
||||
position = secrets.randbelow(len(password))
|
||||
password = password[:position] + special_char + password[position:]
|
||||
|
||||
return password
|
||||
|
||||
def _get_json_password_data(self, password):
|
||||
"""
|
||||
Format password information as JSON.
|
||||
|
||||
Args:
|
||||
password: The password
|
||||
|
||||
Returns:
|
||||
str: JSON formatted password information
|
||||
"""
|
||||
return f'{{"n":"{self.target_user}","t":"{self.expiration_date_int}","p":"{password}"}}'
|
||||
|
||||
def _create_password_blob(self, password):
|
||||
"""
|
||||
Create encrypted password blob for LDAP storage.
|
||||
|
||||
Args:
|
||||
password: Password to encrypt
|
||||
|
||||
Returns:
|
||||
bytes: Encrypted password blob
|
||||
"""
|
||||
# Create JSON data and encode as UTF-16LE with null terminator
|
||||
json_data = self._get_json_password_data(password)
|
||||
password_bytes = json_data.encode("utf-16-le") + b"\x00\x00"
|
||||
# Save and change loglevel
|
||||
logger = logging.getLogger()
|
||||
old_level = logger.level
|
||||
logger.setLevel(logging.ERROR)
|
||||
# Encrypt the password
|
||||
descriptor_string = f"SID={self.encryption_principal}"
|
||||
descriptor_handle = create_protection_descriptor(descriptor_string)
|
||||
secret_message = password_bytes
|
||||
# Resolve DPAPI-NG parameters dynamically using single Kerberos info fetch
|
||||
info = get_kerberos_domain_info()
|
||||
domain_realm = self._get_windows_realm(info)
|
||||
dc_fqdn = self._get_domain_controller_fqdn(info)
|
||||
machine_username = self._get_machine_account_username()
|
||||
if not domain_realm or not dc_fqdn or not machine_username:
|
||||
logdata = {
|
||||
'realm': bool(domain_realm),
|
||||
'dc_fqdn': bool(dc_fqdn),
|
||||
'machine_username': bool(machine_username)
|
||||
}
|
||||
log('E78', logdata)
|
||||
return None
|
||||
dpapi_blob = protect_secret(
|
||||
descriptor_handle,
|
||||
secret_message,
|
||||
domain=domain_realm,
|
||||
server=dc_fqdn,
|
||||
username=machine_username
|
||||
)
|
||||
# Restoreloglevel
|
||||
logger.setLevel(old_level)
|
||||
# Create full blob with metadata
|
||||
return self._add_blob_metadata(dpapi_blob)
|
||||
|
||||
def _add_blob_metadata(self, dpapi_blob):
|
||||
"""
|
||||
Add metadata to the encrypted password blob.
|
||||
|
||||
Args:
|
||||
dpapi_blob: Encrypted password blob
|
||||
|
||||
Returns:
|
||||
bytes: Complete blob with metadata
|
||||
"""
|
||||
# Convert timestamp to correct format
|
||||
left, right = struct.unpack('<LL', struct.pack('Q', self.current_time_int))
|
||||
packed = struct.pack('<LL', right, left)
|
||||
|
||||
# Add blob length and padding
|
||||
prefix = packed + struct.pack('<i', len(dpapi_blob)) + b'\x00\x00\x00\x00'
|
||||
|
||||
# Combine metadata and encrypted blob
|
||||
return prefix + dpapi_blob
|
||||
|
||||
def _get_windows_realm(self, info):
|
||||
"""Return Kerberos/Windows realm in FQDN upper-case form (e.g., EXAMPLE.COM)."""
|
||||
try:
|
||||
realm = info.get('principal')
|
||||
# If principal like 'HOST/NAME@REALM', extract realm
|
||||
if isinstance(realm, str) and '@' in realm:
|
||||
realm = realm.rsplit('@', 1)[-1]
|
||||
if isinstance(realm, str) and realm:
|
||||
return realm.upper()
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _get_domain_controller_fqdn(self, info):
|
||||
"""Determine a domain controller FQDN using Kerberos info only."""
|
||||
try:
|
||||
pdc = info.get('pdc_dns_name')
|
||||
if isinstance(pdc, str) and pdc:
|
||||
return pdc
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _get_machine_account_username(self):
|
||||
"""Return machine account username with trailing '$' (e.g., HOSTNAME$)."""
|
||||
try:
|
||||
name = get_machine_name()
|
||||
if not isinstance(name, str):
|
||||
name = str(name)
|
||||
if not name:
|
||||
return None
|
||||
return name if name.endswith('$') else f'{name}$'
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _change_user_password(self, new_password):
|
||||
"""
|
||||
Change the password for the target user.
|
||||
|
||||
Args:
|
||||
new_password: New password to set
|
||||
|
||||
Returns:
|
||||
bool: True if password was changed successfully, False otherwise
|
||||
"""
|
||||
logdata = {'target_user': self.target_user}
|
||||
try:
|
||||
# Use chpasswd to change the password
|
||||
process = subprocess.Popen(
|
||||
["chpasswd"],
|
||||
stdin=subprocess.PIPE,
|
||||
text=True
|
||||
)
|
||||
process.communicate(f"{self.target_user}:{new_password}")
|
||||
|
||||
# Record the time of change
|
||||
self._write_dconf_pass_last_mod()
|
||||
log('D221', logdata)
|
||||
return True
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('W27', logdata)
|
||||
return False
|
||||
|
||||
def _update_ldap_password(self, encrypted_blob):
|
||||
"""
|
||||
Update the encrypted password and expiration time in LDAP.
|
||||
|
||||
Args:
|
||||
encrypted_blob: Encrypted password blob
|
||||
|
||||
Returns:
|
||||
bool: True if LDAP was updated successfully, False otherwise
|
||||
"""
|
||||
logdata = {'computer_dn': self.computer_dn}
|
||||
try:
|
||||
# Create LDAP modification message
|
||||
mod_msg = ldb.Message()
|
||||
mod_msg.dn = self.computer_dn
|
||||
|
||||
# Update password blob
|
||||
mod_msg[self._ATTR_ENCRYPTED_PASSWORD] = ldb.MessageElement(
|
||||
encrypted_blob,
|
||||
ldb.FLAG_MOD_REPLACE,
|
||||
self._ATTR_ENCRYPTED_PASSWORD
|
||||
)
|
||||
|
||||
# Update expiration time
|
||||
mod_msg[self._ATTR_PASSWORD_EXPIRATION_TIME] = ldb.MessageElement(
|
||||
str(self.expiration_date_int),
|
||||
ldb.FLAG_MOD_REPLACE,
|
||||
self._ATTR_PASSWORD_EXPIRATION_TIME
|
||||
)
|
||||
|
||||
# Perform the LDAP modification
|
||||
self.samdb.modify(mod_msg)
|
||||
log('D226', logdata)
|
||||
return True
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('E75', logdata)
|
||||
return False
|
||||
|
||||
def _should_update_password(self):
|
||||
"""
|
||||
Determine if the password should be updated based on policy.
|
||||
|
||||
Returns:
|
||||
tuple: (bool: update needed, bool: perform post-action)
|
||||
"""
|
||||
# Check if password has expired
|
||||
if not self._is_password_expired():
|
||||
# Password not expired, check if post-login action needed
|
||||
return self._check_post_login_action()
|
||||
|
||||
# Password has expired, update needed
|
||||
return True, False
|
||||
|
||||
def _is_password_expired(self):
|
||||
"""
|
||||
Check if the password has expired according to policy.
|
||||
|
||||
Returns:
|
||||
bool: True if password has expired, False otherwise
|
||||
"""
|
||||
# Case 1: No expiration protection, check LDAP attribute
|
||||
if not self.password_expiration_protection:
|
||||
if self.expiration_time_attr > self.current_time_int:
|
||||
return False
|
||||
# Case 2: With expiration protection, check both policy and LDAP
|
||||
elif self.password_expiration_protection:
|
||||
policy_expiry = self.pass_last_mod_int + (self.password_age_days * int(self._DAY_FLOAT))
|
||||
if policy_expiry > self.current_time_int and self.expiration_time_attr > self.current_time_int:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _check_post_login_action(self):
|
||||
"""
|
||||
Check if a post-login password change action should be performed.
|
||||
|
||||
Returns:
|
||||
tuple: (bool: update needed, bool: perform post-action)
|
||||
"""
|
||||
# Check if password was changed after last login
|
||||
if self._get_changed_password_hours_ago() < self.last_login_hours_ago:
|
||||
return False, False
|
||||
|
||||
# Check if enough time has passed since login
|
||||
if self.last_login_hours_ago < self.post_authentication_reset_delay:
|
||||
return False, False
|
||||
|
||||
# Check if action is configured
|
||||
if self.post_authentication_actions == self._ACTION_NONE:
|
||||
return False, False
|
||||
|
||||
# Update needed, determine if post-action required
|
||||
return True, self.post_authentication_actions > self._ACTION_CHANGE_PASSWORD
|
||||
|
||||
def _perform_post_action(self):
|
||||
"""
|
||||
Perform post-password-change action based on configuration.
|
||||
"""
|
||||
if self.post_authentication_actions == self._ACTION_TERMINATE_SESSIONS:
|
||||
self._terminate_user_sessions()
|
||||
elif self.post_authentication_actions == self._ACTION_REBOOT:
|
||||
log('D220')
|
||||
subprocess.run(["reboot"])
|
||||
|
||||
def _terminate_user_sessions(self):
|
||||
"""
|
||||
Terminates all processes associated with the active sessions of the target user.
|
||||
"""
|
||||
# Get active sessions for the target user
|
||||
user_sessions = [user for user in psutil.users() if user.name == self.target_user]
|
||||
logdata = {'target_user': self.target_user}
|
||||
if not user_sessions:
|
||||
log('D227', logdata)
|
||||
return
|
||||
|
||||
# Terminate each session
|
||||
for session in user_sessions:
|
||||
try:
|
||||
# Get the process and terminate it
|
||||
proc = psutil.Process(session.pid)
|
||||
proc.kill() # Send SIGKILL
|
||||
logdata['pid'] = session.pid
|
||||
log('D228')
|
||||
except (psutil.NoSuchProcess, psutil.AccessDenied) as exc:
|
||||
logdata['pid'] = session.pid
|
||||
logdata['exc'] = exc
|
||||
log('W35', logdata)
|
||||
|
||||
def update_laps_password(self):
|
||||
"""
|
||||
Update the LAPS password if needed based on policy.
|
||||
Checks expiration and login times to determine if update is needed.
|
||||
"""
|
||||
# Check if password update is needed
|
||||
update_needed, perform_post_action = self._should_update_password()
|
||||
|
||||
if not update_needed:
|
||||
log('D229')
|
||||
return False
|
||||
|
||||
# Generate new password
|
||||
password = self._generate_password()
|
||||
|
||||
# Create encrypted password blob
|
||||
encrypted_blob = self._create_password_blob(password)
|
||||
if not encrypted_blob:
|
||||
log('E78')
|
||||
return False
|
||||
|
||||
# Update password in LDAP
|
||||
ldap_success = self._update_ldap_password(encrypted_blob)
|
||||
|
||||
if not ldap_success:
|
||||
return False
|
||||
|
||||
# Change local user password
|
||||
local_success = self._change_user_password(password)
|
||||
|
||||
if not local_success:
|
||||
log('E76')
|
||||
return False
|
||||
|
||||
log('D230')
|
||||
|
||||
# Perform post-action if configured
|
||||
if perform_post_action:
|
||||
self._perform_post_action()
|
||||
|
||||
def apply(self):
|
||||
"""
|
||||
Main entry point for the LAPS applier.
|
||||
"""
|
||||
if self.__module_enabled:
|
||||
log('D218')
|
||||
self.update_laps_password()
|
||||
else:
|
||||
log('D219')
|
||||
|
||||
def _parse_login_time_from_last_line(self, line: str) -> datetime:
|
||||
match_login_dt = re.search(
|
||||
r"((?:Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s+\w{3}\s+\d{1,2}\s+\d{2}:\d{2}:\d{2}\s+\d{4})",
|
||||
line
|
||||
)
|
||||
|
||||
if not match_login_dt:
|
||||
return None
|
||||
|
||||
login_dt_str = match_login_dt.group(1)
|
||||
try:
|
||||
dt_naive = datetime.strptime(login_dt_str, "%a %b %d %H:%M:%S %Y")
|
||||
login_dt_utc: datetime
|
||||
if _DATEUTIL_AVAILABLE:
|
||||
local_tz = tz.tzlocal()
|
||||
dt_local = dt_naive.replace(tzinfo=local_tz)
|
||||
login_dt_utc = dt_local.astimezone(timezone.utc)
|
||||
else:
|
||||
system_local_tz = datetime.now().astimezone().tzinfo
|
||||
if system_local_tz:
|
||||
dt_local = dt_naive.replace(tzinfo=system_local_tz)
|
||||
login_dt_utc = dt_local.astimezone(timezone.utc)
|
||||
else:
|
||||
login_dt_utc = dt_naive.replace(tzinfo=timezone.utc)
|
||||
log('W38')
|
||||
return login_dt_utc
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def _get_user_login_datetimes_utc(self) -> list[datetime]:
|
||||
command = ["last", "-F", "-w", self.target_user]
|
||||
env = os.environ.copy()
|
||||
env["LC_TIME"] = "C"
|
||||
login_datetimes = []
|
||||
|
||||
try:
|
||||
process = subprocess.run(command, capture_output=True, text=True, check=False, env=env)
|
||||
if process.returncode != 0 and not ("no login record" in process.stderr.lower() or "no users logged in" in process.stdout.lower()):
|
||||
log('W39')
|
||||
return []
|
||||
output_lines = process.stdout.splitlines()
|
||||
except FileNotFoundError:
|
||||
log('W40')
|
||||
return []
|
||||
except Exception as e:
|
||||
log('W41')
|
||||
return []
|
||||
|
||||
for line in output_lines:
|
||||
if not line.strip() or "wtmp begins" in line or "btmp begins" in line:
|
||||
continue
|
||||
if not line.startswith(self.target_user):
|
||||
continue
|
||||
login_dt_utc = self._parse_login_time_from_last_line(line)
|
||||
if login_dt_utc:
|
||||
login_datetimes.append(login_dt_utc)
|
||||
|
||||
return login_datetimes
|
||||
|
||||
def _get_admin_login_hours_ago_after_timestamp(self) -> int:
|
||||
# Convert Windows FileTime to datetime
|
||||
reference_dt_utc = datetime.fromtimestamp(
|
||||
(self.pass_last_mod_int / self._HUNDREDS_OF_NANOSECONDS) - self._EPOCH_TIMESTAMP,
|
||||
tz=timezone.utc
|
||||
)
|
||||
|
||||
if not (reference_dt_utc.tzinfo is timezone.utc or
|
||||
(reference_dt_utc.tzinfo is not None and reference_dt_utc.tzinfo.utcoffset(reference_dt_utc) == timedelta(0))):
|
||||
log('W42')
|
||||
return 0
|
||||
|
||||
user_login_times_utc = self._get_user_login_datetimes_utc()
|
||||
if not user_login_times_utc:
|
||||
log('D232')
|
||||
return 0
|
||||
|
||||
most_recent_login_after_reference_utc = None
|
||||
for login_time_utc in user_login_times_utc[::-1]:
|
||||
if login_time_utc >= reference_dt_utc:
|
||||
most_recent_login_after_reference_utc = login_time_utc
|
||||
break
|
||||
|
||||
if most_recent_login_after_reference_utc:
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
time_delta_seconds = (now_utc - most_recent_login_after_reference_utc).total_seconds()
|
||||
hours_ago = int(time_delta_seconds / 3600.0)
|
||||
log('D233')
|
||||
return hours_ago
|
||||
else:
|
||||
log('D234')
|
||||
return 0
|
||||
56
gpoa/frontend/networkshare_applier.py
Normal file
56
gpoa/frontend/networkshare_applier.py
Normal file
@@ -0,0 +1,56 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .appliers.netshare import Networkshare
|
||||
|
||||
|
||||
class networkshare_applier(applier_frontend):
|
||||
__module_name = 'NetworksharesApplier'
|
||||
__module_name_user = 'NetworksharesApplierUser'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
|
||||
def __init__(self, storage, username = None):
|
||||
self.storage = storage
|
||||
self.username = username
|
||||
self.networkshare_info = self.storage.get_networkshare()
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
self.__module_enabled_user = check_enabled(self.storage, self.__module_name_user, self.__module_experimental)
|
||||
|
||||
def run(self):
|
||||
for networkshare in self.networkshare_info:
|
||||
Networkshare(networkshare, self.username)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D187')
|
||||
self.run()
|
||||
else:
|
||||
log('D181')
|
||||
def admin_context_apply(self):
|
||||
pass
|
||||
|
||||
def user_context_apply(self):
|
||||
if self.__module_enabled_user:
|
||||
log('D188')
|
||||
self.run()
|
||||
else:
|
||||
log('D189')
|
||||
@@ -17,16 +17,13 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
from enum import Enum
|
||||
import subprocess
|
||||
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from util.logging import slogm, log
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
|
||||
class NTPServerType(Enum):
|
||||
@@ -77,8 +74,7 @@ class ntp_applier(applier_frontend):
|
||||
srv = None
|
||||
if server:
|
||||
srv = server.data.rpartition(',')[0]
|
||||
logdata = dict()
|
||||
logdata['srv'] = srv
|
||||
logdata = {'srv': srv}
|
||||
log('D122', logdata)
|
||||
|
||||
start_command = ['/usr/bin/systemctl', 'start', 'chronyd']
|
||||
@@ -92,8 +88,7 @@ class ntp_applier(applier_frontend):
|
||||
proc.wait()
|
||||
|
||||
if srv:
|
||||
logdata = dict()
|
||||
logdata['srv'] = srv
|
||||
logdata = {'srv': srv}
|
||||
log('D124', logdata)
|
||||
|
||||
proc = subprocess.Popen(chrony_disconnect_all)
|
||||
@@ -117,30 +112,32 @@ class ntp_applier(applier_frontend):
|
||||
ntp_server_enabled = self.storage.get_hklm_entry(self.ntp_server_enabled)
|
||||
ntp_client_enabled = self.storage.get_hklm_entry(self.ntp_client_enabled)
|
||||
|
||||
if NTPServerType.NTP.value != server_type.data:
|
||||
logdata = dict()
|
||||
logdata['server_type'] = server_type
|
||||
log('W10', logdata)
|
||||
else:
|
||||
log('D126')
|
||||
if '1' == ntp_server_enabled.data:
|
||||
log('D127')
|
||||
self._start_chrony_client(server_address)
|
||||
self._chrony_as_server()
|
||||
elif '0' == ntp_server_enabled.data:
|
||||
log('D128')
|
||||
self._chrony_as_client()
|
||||
if server_type and server_type.data:
|
||||
if NTPServerType.NTP.value != server_type.data:
|
||||
logdata = {'server_type': server_type}
|
||||
log('W10', logdata)
|
||||
else:
|
||||
log('D129')
|
||||
log('D126')
|
||||
if ntp_server_enabled:
|
||||
if '1' == ntp_server_enabled.data and server_address:
|
||||
log('D127')
|
||||
self._start_chrony_client(server_address)
|
||||
self._chrony_as_server()
|
||||
elif '0' == ntp_server_enabled.data:
|
||||
log('D128')
|
||||
self._chrony_as_client()
|
||||
else:
|
||||
log('D129')
|
||||
|
||||
if '1' == ntp_client_enabled.data:
|
||||
log('D130')
|
||||
self._start_chrony_client()
|
||||
elif '0' == ntp_client_enabled.data:
|
||||
log('D131')
|
||||
self._stop_chrony_client()
|
||||
else:
|
||||
log('D132')
|
||||
elif ntp_client_enabled:
|
||||
if '1' == ntp_client_enabled.data:
|
||||
log('D130')
|
||||
self._start_chrony_client()
|
||||
elif '0' == ntp_client_enabled.data:
|
||||
log('D131')
|
||||
self._stop_chrony_client()
|
||||
else:
|
||||
log('D132')
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -18,22 +18,16 @@
|
||||
|
||||
import logging
|
||||
import subprocess
|
||||
from util.logging import slogm, log
|
||||
from util.rpm import (
|
||||
update
|
||||
, install_rpm
|
||||
, remove_rpm
|
||||
)
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
|
||||
class package_applier(applier_frontend):
|
||||
__module_name = 'PackagesApplier'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__install_key_name = 'Install'
|
||||
__remove_key_name = 'Remove'
|
||||
__sync_key_name = 'Sync'
|
||||
@@ -45,7 +39,7 @@ class package_applier(applier_frontend):
|
||||
install_branch = '{}\\{}%'.format(self.__hklm_branch, self.__install_key_name)
|
||||
remove_branch = '{}\\{}%'.format(self.__hklm_branch, self.__remove_key_name)
|
||||
sync_branch = '{}\\{}%'.format(self.__hklm_branch, self.__sync_key_name)
|
||||
self.fulcmd = list()
|
||||
self.fulcmd = []
|
||||
self.fulcmd.append('/usr/libexec/gpupdate/pkcon_runner')
|
||||
self.fulcmd.append('--loglevel')
|
||||
logger = logging.getLogger()
|
||||
@@ -62,23 +56,20 @@ class package_applier(applier_frontend):
|
||||
)
|
||||
def run(self):
|
||||
for flag in self.sync_packages_setting:
|
||||
if flag.data:
|
||||
self.flagSync = bool(int(flag.data))
|
||||
self.flagSync = bool(flag.data)
|
||||
|
||||
if 0 < self.install_packages_setting.count() or 0 < self.remove_packages_setting.count():
|
||||
if self.flagSync:
|
||||
try:
|
||||
subprocess.check_call(self.fulcmd)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
logdata = {'msg': str(exc)}
|
||||
log('E55', logdata)
|
||||
else:
|
||||
try:
|
||||
subprocess.Popen(self.fulcmd,close_fds=False)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
logdata = {'msg': str(exc)}
|
||||
log('E61', logdata)
|
||||
|
||||
def apply(self):
|
||||
@@ -91,21 +82,20 @@ class package_applier(applier_frontend):
|
||||
|
||||
class package_applier_user(applier_frontend):
|
||||
__module_name = 'PackagesApplierUser'
|
||||
__module_experimental = True
|
||||
__module_enabled = False
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__install_key_name = 'Install'
|
||||
__remove_key_name = 'Remove'
|
||||
__sync_key_name = 'Sync'
|
||||
__hkcu_branch = 'Software\\BaseALT\\Policies\\Packages'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self.fulcmd = list()
|
||||
self.fulcmd = []
|
||||
self.fulcmd.append('/usr/libexec/gpupdate/pkcon_runner')
|
||||
self.fulcmd.append('--sid')
|
||||
self.fulcmd.append(self.sid)
|
||||
self.fulcmd.append('--user')
|
||||
self.fulcmd.append(self.username)
|
||||
self.fulcmd.append('--loglevel')
|
||||
logger = logging.getLogger()
|
||||
self.fulcmd.append(str(logger.level))
|
||||
@@ -114,12 +104,12 @@ class package_applier_user(applier_frontend):
|
||||
remove_branch = '{}\\{}%'.format(self.__hkcu_branch, self.__remove_key_name)
|
||||
sync_branch = '{}\\{}%'.format(self.__hkcu_branch, self.__sync_key_name)
|
||||
|
||||
self.install_packages_setting = self.storage.filter_hkcu_entries(self.sid, install_branch)
|
||||
self.remove_packages_setting = self.storage.filter_hkcu_entries(self.sid, remove_branch)
|
||||
self.sync_packages_setting = self.storage.filter_hkcu_entries(self.sid, sync_branch)
|
||||
self.install_packages_setting = self.storage.filter_hkcu_entries(install_branch)
|
||||
self.remove_packages_setting = self.storage.filter_hkcu_entries(remove_branch)
|
||||
self.sync_packages_setting = self.storage.filter_hkcu_entries(sync_branch)
|
||||
self.flagSync = False
|
||||
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_enabled)
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def user_context_apply(self):
|
||||
'''
|
||||
@@ -137,15 +127,13 @@ class package_applier_user(applier_frontend):
|
||||
try:
|
||||
subprocess.check_call(self.fulcmd)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
logdata = {'msg': str(exc)}
|
||||
log('E60', logdata)
|
||||
else:
|
||||
try:
|
||||
subprocess.Popen(self.fulcmd,close_fds=False)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
logdata = {'msg': str(exc)}
|
||||
log('E62', logdata)
|
||||
|
||||
def admin_context_apply(self):
|
||||
|
||||
@@ -16,39 +16,79 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
applier_frontend,
|
||||
check_enabled,
|
||||
check_windows_mapping_enabled,
|
||||
)
|
||||
from .appliers.polkit import polkit
|
||||
from util.logging import slogm, log
|
||||
|
||||
import logging
|
||||
|
||||
class polkit_applier(applier_frontend):
|
||||
__module_name = 'PolkitApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__deny_all = 'Software\\Policies\\Microsoft\\Windows\\RemovableStorageDevices\\Deny_All'
|
||||
__deny_all_win = 'Software\\Policies\\Microsoft\\Windows\\RemovableStorageDevices\\Deny_All'
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\Polkit\\'
|
||||
__registry_locks_branch = 'Software\\BaseALT\\Policies\\PolkitLocks\\'
|
||||
__polkit_map = {
|
||||
__deny_all: ['49-gpoa_disk_permissions', { 'Deny_All': 0 }]
|
||||
__deny_all_win: ['49-gpoa_disk_permissions', { 'Deny_All': 0 }],
|
||||
__registry_branch : ['49-alt_group_policy_permissions', {}],
|
||||
__registry_locks_branch : ['47-alt_group_policy_permissions', {}]
|
||||
}
|
||||
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
deny_all = storage.filter_hklm_entries(self.__deny_all).first()
|
||||
deny_all_win = None
|
||||
if check_windows_mapping_enabled(self.storage):
|
||||
deny_all_win = storage.filter_hklm_entries(self.__deny_all_win).first()
|
||||
# Deny_All hook: initialize defaults
|
||||
template_file = self.__polkit_map[self.__deny_all][0]
|
||||
template_vars = self.__polkit_map[self.__deny_all][1]
|
||||
if deny_all:
|
||||
logdata = dict()
|
||||
logdata['Deny_All'] = deny_all.data
|
||||
polkit_filter = '{}%'.format(self.__registry_branch)
|
||||
polkit_locks_filter = '{}%'.format(self.__registry_locks_branch)
|
||||
self.polkit_keys = self.storage.filter_hklm_entries(polkit_filter)
|
||||
self.polkit_locks = self.storage.filter_hklm_entries(polkit_locks_filter)
|
||||
template_file = self.__polkit_map[self.__deny_all_win][0]
|
||||
template_vars = self.__polkit_map[self.__deny_all_win][1]
|
||||
template_file_all = self.__polkit_map[self.__registry_branch][0]
|
||||
template_vars_all = self.__polkit_map[self.__registry_branch][1]
|
||||
template_file_all_lock = self.__polkit_map[self.__registry_locks_branch][0]
|
||||
template_vars_all_lock = self.__polkit_map[self.__registry_locks_branch][1]
|
||||
locks = []
|
||||
for lock in self.polkit_locks:
|
||||
if bool(int(lock.data)):
|
||||
locks.append(lock.valuename)
|
||||
|
||||
dict_lists_rules = {'No': [[], []],
|
||||
'Yes': [[], []],
|
||||
'Auth_self' : [[], []],
|
||||
'Auth_admin': [[], []],
|
||||
'Auth_self_keep': [[], []],
|
||||
'Auth_admin_keep': [[], []]}
|
||||
|
||||
check_and_add_to_list = (lambda it, act: dict_lists_rules[act][0].append(it.valuename)
|
||||
if it.valuename not in locks
|
||||
else dict_lists_rules[act][1].append(it.valuename))
|
||||
|
||||
for it_data in self.polkit_keys:
|
||||
check_and_add_to_list(it_data, it_data.data)
|
||||
|
||||
for key, item in dict_lists_rules.items():
|
||||
self.__polkit_map[self.__registry_branch][1][key] = item[0]
|
||||
self.__polkit_map[self.__registry_locks_branch][1][key] = item[1]
|
||||
|
||||
if deny_all_win:
|
||||
logdata = {}
|
||||
logdata['Deny_All_win'] = deny_all_win.data
|
||||
log('D69', logdata)
|
||||
self.__polkit_map[self.__deny_all][1]['Deny_All'] = deny_all.data
|
||||
self.__polkit_map[self.__deny_all_win][1]['Deny_All'] = deny_all_win.data
|
||||
else:
|
||||
log('D71')
|
||||
self.policies = []
|
||||
self.policies.append(polkit(template_file, template_vars))
|
||||
self.policies.append(polkit(template_file_all, template_vars_all))
|
||||
self.policies.append(polkit(template_file_all_lock, template_vars_all_lock))
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
@@ -70,31 +110,54 @@ class polkit_applier_user(applier_frontend):
|
||||
__module_name = 'PolkitApplierUser'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__deny_all = 'Software\\Policies\\Microsoft\\Windows\\RemovableStorageDevices\\Deny_All'
|
||||
__deny_all_win = 'Software\\Policies\\Microsoft\\Windows\\RemovableStorageDevices\\Deny_All'
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\Polkit\\'
|
||||
__polkit_map = {
|
||||
__deny_all: ['48-gpoa_disk_permissions_user', { 'Deny_All': 0, 'User': '' }]
|
||||
__deny_all_win: ['48-gpoa_disk_permissions_user', { 'Deny_All': 0, 'User': '' }],
|
||||
__registry_branch : ['48-alt_group_policy_permissions_user', {'User': ''}]
|
||||
}
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
|
||||
deny_all = storage.filter_hkcu_entries(self.sid, self.__deny_all).first()
|
||||
deny_all_win = None
|
||||
if check_windows_mapping_enabled(self.storage):
|
||||
deny_all_win = storage.filter_hkcu_entries(self.__deny_all_win).first()
|
||||
polkit_filter = '{}%'.format(self.__registry_branch)
|
||||
self.polkit_keys = self.storage.filter_hkcu_entries(polkit_filter)
|
||||
# Deny_All hook: initialize defaults
|
||||
template_file = self.__polkit_map[self.__deny_all][0]
|
||||
template_vars = self.__polkit_map[self.__deny_all][1]
|
||||
if deny_all:
|
||||
logdata = dict()
|
||||
template_file = self.__polkit_map[self.__deny_all_win][0]
|
||||
template_vars = self.__polkit_map[self.__deny_all_win][1]
|
||||
template_file_all = self.__polkit_map[self.__registry_branch][0]
|
||||
template_vars_all = self.__polkit_map[self.__registry_branch][1]
|
||||
|
||||
dict_lists_rules = {'No': [],
|
||||
'Yes': [],
|
||||
'Auth_self': [],
|
||||
'Auth_admin': [],
|
||||
'Auth_self_keep': [],
|
||||
'Auth_admin_keep': []}
|
||||
|
||||
for it_data in self.polkit_keys:
|
||||
dict_lists_rules[it_data.data].append(it_data.valuename)
|
||||
|
||||
self.__polkit_map[self.__registry_branch][1]['User'] = self.username
|
||||
|
||||
for key, item in dict_lists_rules.items():
|
||||
self.__polkit_map[self.__registry_branch][1][key] = item
|
||||
|
||||
if deny_all_win:
|
||||
logdata = {}
|
||||
logdata['user'] = self.username
|
||||
logdata['Deny_All'] = deny_all.data
|
||||
logdata['Deny_All_win'] = deny_all_win.data
|
||||
log('D70', logdata)
|
||||
self.__polkit_map[self.__deny_all][1]['Deny_All'] = deny_all.data
|
||||
self.__polkit_map[self.__deny_all][1]['User'] = self.username
|
||||
self.__polkit_map[self.__deny_all_win][1]['Deny_All'] = deny_all_win.data
|
||||
self.__polkit_map[self.__deny_all_win][1]['User'] = self.username
|
||||
else:
|
||||
log('D72')
|
||||
self.policies = []
|
||||
self.policies.append(polkit(template_file, template_vars, self.username))
|
||||
self.policies.append(polkit(template_file_all, template_vars_all, self.username))
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
|
||||
153
gpoa/frontend/scripts_applier.py
Normal file
153
gpoa/frontend/scripts_applier.py
Normal file
@@ -0,0 +1,153 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .appliers.folder import remove_dir_tree
|
||||
|
||||
|
||||
class scripts_applier(applier_frontend):
|
||||
__module_name = 'ScriptsApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__cache_scripts = '/var/cache/gpupdate_scripts_cache/machine/'
|
||||
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
self.startup_scripts = self.storage.get_scripts('STARTUP')
|
||||
self.shutdown_scripts = self.storage.get_scripts('SHUTDOWN')
|
||||
self.folder_path = Path(self.__cache_scripts)
|
||||
self.__module_enabled = check_enabled(self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def cleaning_cache(self):
|
||||
log('D160')
|
||||
try:
|
||||
remove_dir_tree(self.folder_path, True, True, True,)
|
||||
except FileNotFoundError as exc:
|
||||
log('D154')
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('E64', logdata)
|
||||
|
||||
def filling_cache(self):
|
||||
'''
|
||||
Creating and updating folder directories for scripts and copying them
|
||||
'''
|
||||
self.folder_path.mkdir(parents=True, exist_ok=True)
|
||||
for ts in self.startup_scripts:
|
||||
script_path = os.path.join(self.__cache_scripts, 'STARTUP')
|
||||
install_script(ts, script_path, '700')
|
||||
for ts in self.shutdown_scripts:
|
||||
script_path = os.path.join(self.__cache_scripts, 'SHUTDOWN')
|
||||
install_script(ts, script_path, '700')
|
||||
|
||||
def run(self):
|
||||
self.filling_cache()
|
||||
|
||||
def apply(self):
|
||||
self.cleaning_cache()
|
||||
if self.__module_enabled:
|
||||
log('D156')
|
||||
self.run()
|
||||
else:
|
||||
log('D157')
|
||||
|
||||
class scripts_applier_user(applier_frontend):
|
||||
__module_name = 'ScriptsApplierUser'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__cache_scripts = '/var/cache/gpupdate_scripts_cache/users/'
|
||||
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.logon_scripts = self.storage.get_scripts('LOGON')
|
||||
self.logoff_scripts = self.storage.get_scripts('LOGOFF')
|
||||
self.username = username
|
||||
self.folder_path = Path(self.__cache_scripts + self.username)
|
||||
self.__module_enabled = check_enabled(self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def cleaning_cache(self):
|
||||
log('D161')
|
||||
try:
|
||||
remove_dir_tree(self.folder_path, True, True, True,)
|
||||
except FileNotFoundError as exc:
|
||||
log('D155')
|
||||
except Exception as exc:
|
||||
logdata = {'exc': exc}
|
||||
log('E65', logdata)
|
||||
|
||||
def filling_cache(self):
|
||||
'''
|
||||
Creating and updating folder directories for scripts and copying them
|
||||
'''
|
||||
self.folder_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for ts in self.logon_scripts:
|
||||
script_path = os.path.join(self.__cache_scripts, self.username, 'LOGON')
|
||||
install_script(ts, script_path, '755')
|
||||
for ts in self.logoff_scripts:
|
||||
script_path = os.path.join(self.__cache_scripts, self.username, 'LOGOFF')
|
||||
install_script(ts, script_path, '755')
|
||||
|
||||
def user_context_apply(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
self.filling_cache()
|
||||
|
||||
def admin_context_apply(self):
|
||||
self.cleaning_cache()
|
||||
if self.__module_enabled:
|
||||
log('D158')
|
||||
self.run()
|
||||
else:
|
||||
log('D159')
|
||||
|
||||
def install_script(storage_script_entry, script_dir, access_permissions):
|
||||
'''
|
||||
Copy scripts to specific directories and
|
||||
if given arguments
|
||||
create directories for them and copy them there
|
||||
'''
|
||||
dir_cr = Path(script_dir)
|
||||
dir_cr.mkdir(parents=True, exist_ok=True)
|
||||
if storage_script_entry.number is None:
|
||||
return
|
||||
script_name = str(storage_script_entry.number).zfill(5) + '_' + os.path.basename(storage_script_entry.path)
|
||||
script_file = os.path.join(script_dir, script_name)
|
||||
shutil.copyfile(storage_script_entry.path, script_file)
|
||||
|
||||
os.chmod(script_file, int(access_permissions, base = 8))
|
||||
if storage_script_entry.args:
|
||||
dir_path = script_dir + '/' + script_name + '.arg'
|
||||
dir_arg = Path(dir_path)
|
||||
dir_arg.mkdir(parents=True, exist_ok=True)
|
||||
file_arg = open(dir_path + '/arg', 'w')
|
||||
file_arg.write(storage_script_entry.args)
|
||||
file_arg.close()
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,30 +16,26 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from gpt.shortcuts import json2sc
|
||||
from gpt.shortcuts import get_ttype, shortcut
|
||||
from util.logging import log
|
||||
from util.util import get_homedir, homedir_exists, string_to_literal_eval
|
||||
from util.windows import expand_windows_var
|
||||
from util.logging import slogm, log
|
||||
from util.util import (
|
||||
get_homedir,
|
||||
homedir_exists
|
||||
)
|
||||
|
||||
def storage_get_shortcuts(storage, sid, username=None):
|
||||
'''
|
||||
Query storage for shortcuts' rows for specified SID.
|
||||
'''
|
||||
shortcut_objs = storage.get_shortcuts(sid)
|
||||
shortcuts = list()
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
for sc_obj in shortcut_objs:
|
||||
sc = json2sc(sc_obj.shortcut)
|
||||
|
||||
def storage_get_shortcuts(storage, username=None, shortcuts_machine=None):
|
||||
'''
|
||||
Query storage for shortcuts' rows for username.
|
||||
'''
|
||||
shortcut_objs = storage.get_shortcuts()
|
||||
shortcuts = []
|
||||
if username and shortcuts_machine:
|
||||
shortcut_objs += shortcuts_machine
|
||||
|
||||
for sc in shortcut_objs:
|
||||
if username:
|
||||
sc.set_expanded_path(expand_windows_var(sc.path, username))
|
||||
shortcuts.append(sc)
|
||||
@@ -55,9 +51,7 @@ def apply_shortcut(shortcut, username=None):
|
||||
dest_abspath = shortcut.dest
|
||||
if not dest_abspath.startswith('/') and not dest_abspath.startswith('%'):
|
||||
dest_abspath = '%HOME%/' + dest_abspath
|
||||
logdata = dict()
|
||||
logdata['shortcut'] = dest_abspath
|
||||
logdata['for'] = username
|
||||
logdata = {'shortcut': dest_abspath, 'for': username}
|
||||
log('D105', logdata)
|
||||
dest_abspath = expand_windows_var(dest_abspath, username).replace('\\', '/') + '.desktop'
|
||||
|
||||
@@ -68,31 +62,24 @@ def apply_shortcut(shortcut, username=None):
|
||||
if dest_abspath.startswith(get_homedir(username)):
|
||||
# Don't try to operate on non-existent directory
|
||||
if not homedir_exists(username):
|
||||
logdata = dict()
|
||||
logdata['user'] = username
|
||||
logdata['dest_abspath'] = dest_abspath
|
||||
logdata = {'user': username, 'dest_abspath': dest_abspath}
|
||||
log('W7', logdata)
|
||||
return None
|
||||
else:
|
||||
logdata = dict()
|
||||
logdata['user'] = username
|
||||
logdata['bad path'] = dest_abspath
|
||||
logdata = {'user': username, 'bad path': dest_abspath}
|
||||
log('W8', logdata)
|
||||
return None
|
||||
|
||||
if '%' in dest_abspath:
|
||||
logdata = dict()
|
||||
logdata['dest_abspath'] = dest_abspath
|
||||
logdata = {'dest_abspath': dest_abspath}
|
||||
log('E53', logdata)
|
||||
return None
|
||||
|
||||
if not dest_abspath.startswith('/'):
|
||||
logdata = dict()
|
||||
logdata['dest_abspath'] = dest_abspath
|
||||
logdata = {'dest_abspath': dest_abspath}
|
||||
log('E54', logdata)
|
||||
return None
|
||||
logdata = dict()
|
||||
logdata['file'] = dest_abspath
|
||||
logdata = {'file': dest_abspath}
|
||||
logdata['with_action'] = shortcut.action
|
||||
log('D106', logdata)
|
||||
shortcut.apply_desktop(dest_abspath)
|
||||
@@ -111,7 +98,7 @@ class shortcut_applier(applier_frontend):
|
||||
)
|
||||
|
||||
def run(self):
|
||||
shortcuts = storage_get_shortcuts(self.storage, self.storage.get_info('machine_sid'))
|
||||
shortcuts = storage_get_shortcuts(self.storage)
|
||||
if shortcuts:
|
||||
for sc in shortcuts:
|
||||
apply_shortcut(sc)
|
||||
@@ -122,9 +109,7 @@ class shortcut_applier(applier_frontend):
|
||||
# /usr/local/share/applications
|
||||
subprocess.check_call(['/usr/bin/update-desktop-database'])
|
||||
else:
|
||||
logdata = dict()
|
||||
logdata['machine_sid'] = self.storage.get_info('machine_sid')
|
||||
log('D100', logdata)
|
||||
log('D100')
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
@@ -137,14 +122,45 @@ class shortcut_applier_user(applier_frontend):
|
||||
__module_name = 'ShortcutsApplierUser'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__REGISTRY_PATH_SHORTCATSMERGE= '/Software/BaseALT/Policies/GPUpdate/ShortcutsMerge'
|
||||
__DCONF_REGISTRY_PATH_PREFERENCES_MACHINE = 'Software/BaseALT/Policies/Preferences/Machine'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.sid = sid
|
||||
self.username = username
|
||||
self.__module_enabled = check_enabled(self.storage, self.__module_name, self.__module_experimental)
|
||||
|
||||
def get_machine_shortcuts(self):
|
||||
result = []
|
||||
try:
|
||||
storage_machine_dict = self.storage.get_dictionary_from_dconf_file_db()
|
||||
machine_shortcuts = storage_machine_dict.get(
|
||||
self.__DCONF_REGISTRY_PATH_PREFERENCES_MACHINE, dict()).get('Shortcuts')
|
||||
shortcut_objs = string_to_literal_eval(machine_shortcuts)
|
||||
for obj in shortcut_objs:
|
||||
shortcut_machine =shortcut(
|
||||
obj.get('dest'),
|
||||
obj.get('path'),
|
||||
obj.get('arguments'),
|
||||
obj.get('name'),
|
||||
obj.get('action'),
|
||||
get_ttype(obj.get('target_type')))
|
||||
shortcut_machine.set_usercontext(1)
|
||||
result.append(shortcut_machine)
|
||||
except:
|
||||
return None
|
||||
return result
|
||||
|
||||
|
||||
|
||||
def check_enabled_shortcuts_merge(self):
|
||||
return self.storage.get_key_value(self.__REGISTRY_PATH_SHORTCATSMERGE)
|
||||
|
||||
def run(self, in_usercontext):
|
||||
shortcuts = storage_get_shortcuts(self.storage, self.sid, self.username)
|
||||
shortcuts_machine = None
|
||||
if self.check_enabled_shortcuts_merge():
|
||||
shortcuts_machine = self.get_machine_shortcuts()
|
||||
shortcuts = storage_get_shortcuts(self.storage, self.username, shortcuts_machine)
|
||||
|
||||
if shortcuts:
|
||||
for sc in shortcuts:
|
||||
@@ -153,8 +169,7 @@ class shortcut_applier_user(applier_frontend):
|
||||
if not in_usercontext and not sc.is_usercontext():
|
||||
apply_shortcut(sc, self.username)
|
||||
else:
|
||||
logdata = dict()
|
||||
logdata['sid'] = self.sid
|
||||
logdata = {'username': self.username}
|
||||
log('D100', logdata)
|
||||
|
||||
def user_context_apply(self):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,24 +16,21 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .applier_frontend import (
|
||||
applier_frontend
|
||||
, check_enabled
|
||||
)
|
||||
from .appliers.systemd import systemd_unit
|
||||
from util.logging import slogm, log
|
||||
from util.logging import log
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .appliers.systemd import systemd_unit
|
||||
|
||||
import logging
|
||||
|
||||
class systemd_applier(applier_frontend):
|
||||
__module_name = 'SystemdApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\SystemdUnits'
|
||||
__registry_branch = 'Software/BaseALT/Policies/SystemdUnits'
|
||||
|
||||
def __init__(self, storage):
|
||||
self.storage = storage
|
||||
self.systemd_unit_settings = self.storage.filter_hklm_entries('Software\\BaseALT\\Policies\\SystemdUnits%')
|
||||
self.systemd_unit_settings = self.storage.filter_hklm_entries(self.__registry_branch)
|
||||
self.units = []
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
@@ -43,23 +40,18 @@ class systemd_applier(applier_frontend):
|
||||
|
||||
def run(self):
|
||||
for setting in self.systemd_unit_settings:
|
||||
valuename = setting.hive_key.rpartition('\\')[2]
|
||||
try:
|
||||
self.units.append(systemd_unit(valuename, int(setting.data)))
|
||||
logdata = dict()
|
||||
logdata['unit'] = format(valuename)
|
||||
self.units.append(systemd_unit(setting.valuename, int(setting.data)))
|
||||
logdata = {'unit': format(setting.valuename)}
|
||||
log('I4', logdata)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['unit'] = format(valuename)
|
||||
logdata['exc'] = exc
|
||||
logdata = {'unit': format(setting.valuename), 'exc': exc}
|
||||
log('I5', logdata)
|
||||
for unit in self.units:
|
||||
try:
|
||||
unit.apply()
|
||||
except:
|
||||
logdata = dict()
|
||||
logdata['unit'] = unit.unit_name
|
||||
logdata = {'unit': unit.unit_name}
|
||||
log('E45', logdata)
|
||||
|
||||
def apply(self):
|
||||
@@ -76,9 +68,9 @@ class systemd_applier_user(applier_frontend):
|
||||
__module_name = 'SystemdApplierUser'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__registry_branch = 'Software\\BaseALT\\Policies\\SystemdUnits'
|
||||
__registry_branch = 'Software/BaseALT/Policies/SystemdUnits'
|
||||
|
||||
def __init__(self, storage, sid, username):
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
|
||||
def user_context_apply(self):
|
||||
|
||||
67
gpoa/frontend/thunderbird_applier.py
Normal file
67
gpoa/frontend/thunderbird_applier.py
Normal file
@@ -0,0 +1,67 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2024-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from util.logging import log
|
||||
from util.util import is_machine_name
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
from .firefox_applier import create_dict
|
||||
|
||||
|
||||
class thunderbird_applier(applier_frontend):
|
||||
__module_name = 'ThunderbirdApplier'
|
||||
__module_experimental = False
|
||||
__module_enabled = True
|
||||
__registry_branch = 'Software/Policies/Mozilla/Thunderbird'
|
||||
__thunderbird_policies = '/etc/thunderbird/policies'
|
||||
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.username = username
|
||||
self._is_machine_name = is_machine_name(self.username)
|
||||
self.policies = {}
|
||||
self.policies_json = {'policies': self.policies}
|
||||
self.thunderbird_keys = self.storage.filter_hklm_entries(self.__registry_branch)
|
||||
self.policies_gen = {}
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
|
||||
def machine_apply(self):
|
||||
'''
|
||||
Write policies.json to Thunderbird.
|
||||
'''
|
||||
self.policies_json = create_dict(self.thunderbird_keys, self.__registry_branch)
|
||||
|
||||
destfile = os.path.join(self.__thunderbird_policies, 'policies.json')
|
||||
os.makedirs(self.__thunderbird_policies, exist_ok=True)
|
||||
with open(destfile, 'w') as f:
|
||||
json.dump(self.policies_json, f)
|
||||
logdata = {'destfile': destfile}
|
||||
log('D212', logdata)
|
||||
|
||||
def apply(self):
|
||||
if self.__module_enabled:
|
||||
log('D213')
|
||||
self.machine_apply()
|
||||
else:
|
||||
log('D214')
|
||||
191
gpoa/frontend/yandex_browser_applier.py
Normal file
191
gpoa/frontend/yandex_browser_applier.py
Normal file
@@ -0,0 +1,191 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from util.logging import log
|
||||
from util.util import is_machine_name, string_to_literal_eval
|
||||
|
||||
from .applier_frontend import applier_frontend, check_enabled
|
||||
|
||||
|
||||
class yandex_browser_applier(applier_frontend):
|
||||
__module_name = 'YandexBrowserApplier'
|
||||
__module_enabled = True
|
||||
__module_experimental = False
|
||||
__registry_branch = 'Software/Policies/YandexBrowser'
|
||||
__managed_policies_path = '/etc/opt/yandex/browser/policies/managed'
|
||||
__recommended_policies_path = '/etc/opt/yandex/browser/policies/recommended'
|
||||
|
||||
def __init__(self, storage, username):
|
||||
self.storage = storage
|
||||
self.username = username
|
||||
self._is_machine_name = is_machine_name(self.username)
|
||||
self.yandex_keys = self.storage.filter_hklm_entries(self.__registry_branch)
|
||||
|
||||
self.policies_json = {}
|
||||
|
||||
self.__module_enabled = check_enabled(
|
||||
self.storage
|
||||
, self.__module_name
|
||||
, self.__module_experimental
|
||||
)
|
||||
|
||||
def machine_apply(self):
|
||||
'''
|
||||
Apply machine settings.
|
||||
'''
|
||||
|
||||
destfile = os.path.join(self.__managed_policies_path, 'policies.json')
|
||||
|
||||
try:
|
||||
recommended__json = self.policies_json.pop('Recommended')
|
||||
except:
|
||||
recommended__json = {}
|
||||
|
||||
#Replacing all nested dictionaries with a list
|
||||
dict_item_to_list = (
|
||||
lambda target_dict :
|
||||
{key:[*val.values()] if type(val) == dict else string_to_literal_eval(val) for key,val in target_dict.items()}
|
||||
)
|
||||
os.makedirs(self.__managed_policies_path, exist_ok=True)
|
||||
with open(destfile, 'w') as f:
|
||||
json.dump(dict_item_to_list(self.policies_json), f)
|
||||
logdata = {'destfile': destfile}
|
||||
log('D185', logdata)
|
||||
|
||||
destfilerec = os.path.join(self.__recommended_policies_path, 'policies.json')
|
||||
os.makedirs(self.__recommended_policies_path, exist_ok=True)
|
||||
with open(destfilerec, 'w') as f:
|
||||
json.dump(dict_item_to_list(recommended__json), f)
|
||||
logdata = {'destfilerec': destfilerec}
|
||||
log('D185', logdata)
|
||||
|
||||
|
||||
def apply(self):
|
||||
'''
|
||||
All actual job done here.
|
||||
'''
|
||||
if self.__module_enabled:
|
||||
log('D183')
|
||||
self.create_dict(self.yandex_keys)
|
||||
self.machine_apply()
|
||||
else:
|
||||
log('D184')
|
||||
|
||||
def get_valuename_typeint(self):
|
||||
'''
|
||||
List of keys resulting from parsing chrome.admx with parsing_chrom_admx_intvalues.py
|
||||
'''
|
||||
valuename_typeint = (['DefaultPageSaveSettings',
|
||||
'DefaultUploadSetting',
|
||||
'YandexAutoLaunchMode',
|
||||
'DefaultClipboardSetting',
|
||||
'DefaultFileSystemReadGuardSetting',
|
||||
'DefaultFileSystemWriteGuardSetting',
|
||||
'DefaultImagesSetting',
|
||||
'DefaultJavaScriptJitSetting',
|
||||
'DefaultJavaScriptSetting',
|
||||
'DefaultLocalFontsSetting',
|
||||
'DefaultPopupsSetting',
|
||||
'DefaultSensorsSetting',
|
||||
'DefaultSerialGuardSetting',
|
||||
'DefaultWebBluetoothGuardSetting',
|
||||
'DefaultWebHidGuardSetting',
|
||||
'DefaultWebUsbGuardSetting',
|
||||
'DefaultWindowManagementSetting',
|
||||
'SafeSitesFilterBehavior',
|
||||
'YandexUserFeedbackMode',
|
||||
'TurboSettings',
|
||||
'SidePanelMode',
|
||||
'RestoreOnStartup',
|
||||
'RestoreOnStartup_recommended',
|
||||
'BrowserSwitcherParsingMode',
|
||||
'DefaultNotificationsSetting',
|
||||
'YandexPowerSavingMode',
|
||||
'ChromeVariations',
|
||||
'DeveloperToolsAvailability',
|
||||
'DownloadRestrictions',
|
||||
'NetworkPredictionOptions',
|
||||
'DownloadRestrictions_recommended',
|
||||
'NetworkPredictionOptions_recommended',
|
||||
'DefaultCookiesSetting',
|
||||
'DefaultGeolocationSetting',
|
||||
'IncognitoModeAvailability',
|
||||
'DefaultPrintingSettings',
|
||||
'DefaultPluginsSetting',
|
||||
'DefaultInsecureContentSetting',
|
||||
'PasswordProtectionWarningTrigger',
|
||||
'SafeBrowsingProtectionLevel',
|
||||
'SafeBrowsingProtectionLevel_recommended',
|
||||
'DiskCacheSize'])
|
||||
return valuename_typeint
|
||||
|
||||
|
||||
def get_boolean(self,data):
|
||||
if data in ['0', 'false', None, 'none', 0]:
|
||||
return False
|
||||
if data in ['1', 'true', 1]:
|
||||
return True
|
||||
def get_parts(self, hivekeyname):
|
||||
'''
|
||||
Parse registry path string and leave key parameters
|
||||
'''
|
||||
parts = hivekeyname.replace(self.__registry_branch, '').split('/')
|
||||
return parts
|
||||
|
||||
|
||||
def create_dict(self, yandex_keys):
|
||||
'''
|
||||
Collect dictionaries from registry keys into a general dictionary
|
||||
'''
|
||||
counts = {}
|
||||
#getting the list of keys to read as an integer
|
||||
valuename_typeint = self.get_valuename_typeint()
|
||||
for it_data in yandex_keys:
|
||||
branch = counts
|
||||
try:
|
||||
if type(it_data.data) is bytes:
|
||||
it_data.data = it_data.data.decode(encoding='utf-16').replace('\x00','')
|
||||
parts = self.get_parts(it_data.hive_key)
|
||||
#creating a nested dictionary from elements
|
||||
for part in parts[:-1]:
|
||||
branch = branch.setdefault(part, {})
|
||||
#dictionary key value initialization
|
||||
if it_data.type == 4:
|
||||
if it_data.valuename in valuename_typeint:
|
||||
branch[parts[-1]] = int(it_data.data)
|
||||
else:
|
||||
branch[parts[-1]] = self.get_boolean(it_data.data)
|
||||
else:
|
||||
if it_data.data[0] == '[' and it_data.data[-1] == ']':
|
||||
try:
|
||||
branch[parts[-1]] = json.loads(str(it_data.data))
|
||||
except:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
else:
|
||||
branch[parts[-1]] = str(it_data.data).replace('\\', '/')
|
||||
|
||||
except Exception as exc:
|
||||
logdata = {'Exception': exc, 'keyname': it_data.keyname}
|
||||
log('D178', logdata)
|
||||
try:
|
||||
self.policies_json = counts['']
|
||||
except:
|
||||
self.policies_json = {}
|
||||
24
gpoa/frontend_plugins/__init__.py
Normal file
24
gpoa/frontend_plugins/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Frontend plugins package for GPOA.
|
||||
|
||||
This package contains display policy and other frontend-related plugins
|
||||
that can be dynamically loaded by the plugin manager.
|
||||
"""
|
||||
747
gpoa/frontend_plugins/dm_applier.py
Normal file
747
gpoa/frontend_plugins/dm_applier.py
Normal file
@@ -0,0 +1,747 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
# Import only what's absolutely necessary
|
||||
try:
|
||||
from gpoa.frontend.appliers.systemd import systemd_unit
|
||||
except ImportError:
|
||||
# Fallback for testing
|
||||
systemd_unit = None
|
||||
|
||||
try:
|
||||
from gpoa.util.gpoa_ini_parsing import GpoaConfigObj
|
||||
except ImportError:
|
||||
# Fallback for testing
|
||||
GpoaConfigObj = None
|
||||
|
||||
from gpoa.plugin.plugin_base import FrontendPlugin
|
||||
|
||||
|
||||
class DMApplier(FrontendPlugin):
|
||||
"""
|
||||
Display Manager Applier - handles loading of display manager policy keys
|
||||
from registry (machine/user) and user preferences.
|
||||
|
||||
Also includes DMConfigGenerator functionality for display manager configuration.
|
||||
"""
|
||||
|
||||
__registry_path = 'Software/BaseALT/Policies/DisplayManager'
|
||||
domain = 'dm_applier'
|
||||
|
||||
def __init__(self, dict_dconf_db, username=None, fs_file_cache=None):
|
||||
super().__init__(dict_dconf_db, username, fs_file_cache)
|
||||
|
||||
# Initialize plugin-specific logger - locale_dir will be set by plugin_manager
|
||||
self._init_plugin_log(
|
||||
message_dict={
|
||||
'i': {
|
||||
1: "Display Manager Applier initialized",
|
||||
2: "Display manager configuration generated successfully",
|
||||
3: "Display Manager Applier execution started",
|
||||
4: "Display manager configuration completed successfully",
|
||||
5: "LightDM greeter configuration generated successfully",
|
||||
6: "GDM theme modified successfully",
|
||||
7: "GDM backup restored successfully"
|
||||
},
|
||||
'w': {
|
||||
10: "No display managers detected",
|
||||
11: "No background configuration to apply",
|
||||
12: "GDM backup file not found",
|
||||
13: "Backup mode only supported for GDM"
|
||||
},
|
||||
'e': {
|
||||
20: "Configuration file path is invalid or inaccessible",
|
||||
21: "Failed to generate display manager configuration",
|
||||
22: "Unknown display manager config directory",
|
||||
23: "Failed to generate display manager configuration",
|
||||
24: "Display Manager Applier execution failed",
|
||||
25: "GDM theme gresource not found",
|
||||
26: "Failed to extract GDM gresource",
|
||||
27: "Failed to modify GDM background",
|
||||
28: "Failed to recompile GDM gresource",
|
||||
29: "Failed to restore GDM backup"
|
||||
},
|
||||
'd': {
|
||||
30: "Display manager detection details",
|
||||
31: "Display manager configuration details",
|
||||
32: "Removed empty configuration value",
|
||||
33: "GDM background modification details",
|
||||
34: "GDM backup operation details"
|
||||
}
|
||||
},
|
||||
# locale_dir will be set by plugin_manager during plugin loading
|
||||
domain="dm_applier"
|
||||
)
|
||||
|
||||
self.config = self.get_dict_registry(self.__registry_path)
|
||||
|
||||
# DMConfigGenerator configuration - only background settings
|
||||
background_path = self.config.get("Greeter.Background", None)
|
||||
self.backup = background_path == 'backup'
|
||||
if background_path and not self.backup:
|
||||
normalized_path = background_path.replace('\\', '/')
|
||||
fs_file_cache.store(normalized_path)
|
||||
self.dm_config = {
|
||||
"Greeter.Background": fs_file_cache.get(normalized_path)
|
||||
}
|
||||
else:
|
||||
self.dm_config = {
|
||||
"Greeter.Background": ''
|
||||
}
|
||||
|
||||
self.log("I1") # Display Manager Applier initialized
|
||||
|
||||
@classmethod
|
||||
def _get_plugin_prefix(cls):
|
||||
"""Return plugin prefix for translation lookup."""
|
||||
return "dm_applier"
|
||||
|
||||
def _prepare_conf(self, path):
|
||||
"""
|
||||
Load existing file or create new, preserving all comments and structure.
|
||||
"""
|
||||
try:
|
||||
conf = GpoaConfigObj(path, encoding="utf-8", create_empty=True)
|
||||
return conf
|
||||
except Exception as exc:
|
||||
self.log("E20", {"path": path, "error": str(exc)})
|
||||
return None
|
||||
|
||||
def _clean_empty_values(self, section):
|
||||
"""
|
||||
Remove keys with empty values from configuration section.
|
||||
Avoids writing empty values to config files.
|
||||
"""
|
||||
if not section:
|
||||
return
|
||||
|
||||
# Create list of keys to remove (can't modify dict during iteration)
|
||||
keys_to_remove = []
|
||||
for key, value in section.items():
|
||||
# Remove keys with empty strings, None, or whitespace-only values
|
||||
if value is None or (isinstance(value, str) and not value.strip()):
|
||||
keys_to_remove.append(key)
|
||||
|
||||
# Remove the identified keys
|
||||
for key in keys_to_remove:
|
||||
del section[key]
|
||||
self.log("D32", {"key": key, "section": str(section)})
|
||||
|
||||
def generate_lightdm(self, path):
|
||||
if not path or not os.path.isabs(path):
|
||||
self.log("E20", {"path": path}) # Configuration file path is invalid or inaccessible
|
||||
return None
|
||||
|
||||
conf = self._prepare_conf(path)
|
||||
if conf is None:
|
||||
return None
|
||||
section = conf.setdefault("Seat:*", {})
|
||||
|
||||
# Set values only if they have meaningful content (avoid writing empty values)
|
||||
if self.dm_config["Greeter.Background"]:
|
||||
section["greeter-background"] = self.dm_config["Greeter.Background"]
|
||||
|
||||
# Remove any existing empty values that might have been set previously
|
||||
self._clean_empty_values(section)
|
||||
|
||||
# Comments example:
|
||||
conf.initial_comment = ["# LightDM custom config"]
|
||||
try:
|
||||
conf.write()
|
||||
self.log("I2", {"path": path, "dm": "lightdm"})
|
||||
return conf
|
||||
except Exception as exc:
|
||||
self.log("E21", {"path": path, "error": str(exc)})
|
||||
return None
|
||||
|
||||
|
||||
def generate_gdm(self, path):
|
||||
"""Generate GDM configuration by modifying gnome-shell-theme.gresource"""
|
||||
# Check if we need to restore from backup
|
||||
if self.backup:
|
||||
return self._restore_gdm_backup()
|
||||
|
||||
if not self.dm_config["Greeter.Background"]:
|
||||
return None
|
||||
|
||||
background_path = self.dm_config["Greeter.Background"]
|
||||
|
||||
try:
|
||||
# Find gnome-shell-theme.gresource
|
||||
gresource_path = self._find_gnome_shell_gresource()
|
||||
if not gresource_path:
|
||||
self.log("E25", {"path": "gnome-shell-theme.gresource"})
|
||||
return None
|
||||
|
||||
# Create backup if it doesn't exist
|
||||
backup_path = gresource_path + '.backup'
|
||||
if not os.path.exists(backup_path):
|
||||
shutil.copy2(gresource_path, backup_path)
|
||||
self.log("D34", {"action": "backup_created", "backup": backup_path})
|
||||
|
||||
# Extract gresource to temporary directory
|
||||
temp_dir = self._extract_gresource(gresource_path)
|
||||
if not temp_dir:
|
||||
return None
|
||||
|
||||
# Modify background in theme files
|
||||
modified = self._modify_gdm_background(temp_dir, background_path)
|
||||
if not modified:
|
||||
shutil.rmtree(temp_dir)
|
||||
return None
|
||||
|
||||
# Recompile gresource
|
||||
success = self._recompile_gresource(temp_dir, gresource_path)
|
||||
|
||||
# Clean up temporary directory
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
if success:
|
||||
self.log("I6", {"path": gresource_path, "background": background_path})
|
||||
return True
|
||||
else:
|
||||
self.log("E28", {"path": gresource_path})
|
||||
return None
|
||||
|
||||
except Exception as exc:
|
||||
self.log("E21", {"path": "gnome-shell-theme.gresource", "error": str(exc), "dm": "gdm"})
|
||||
return None
|
||||
|
||||
def _find_gnome_shell_gresource(self):
|
||||
"""Find gnome-shell-theme.gresource file"""
|
||||
possible_paths = [
|
||||
"/usr/share/gnome-shell/gnome-shell-theme.gresource",
|
||||
"/usr/share/gnome-shell/theme/gnome-shell-theme.gresource",
|
||||
"/usr/share/gdm/gnome-shell-theme.gresource",
|
||||
"/usr/local/share/gnome-shell/gnome-shell-theme.gresource"
|
||||
]
|
||||
|
||||
for path in possible_paths:
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
def _restore_gdm_backup(self):
|
||||
"""Restore GDM gresource from backup if available"""
|
||||
try:
|
||||
# Find gnome-shell-theme.gresource
|
||||
gresource_path = self._find_gnome_shell_gresource()
|
||||
if not gresource_path:
|
||||
self.log("E25", {"path": "gnome-shell-theme.gresource"})
|
||||
return None
|
||||
|
||||
backup_path = gresource_path + '.backup'
|
||||
if not os.path.exists(backup_path):
|
||||
self.log("W12", {"backup": backup_path})
|
||||
return None
|
||||
|
||||
# Restore from backup
|
||||
shutil.copy2(backup_path, gresource_path)
|
||||
self.log("I7", {"path": gresource_path})
|
||||
return True
|
||||
|
||||
except Exception as exc:
|
||||
self.log("E29", {"path": "gnome-shell-theme.gresource", "error": str(exc)})
|
||||
return None
|
||||
|
||||
def _extract_gresource(self, gresource_path):
|
||||
"""Extract gresource file to temporary directory by creating XML from gresource list"""
|
||||
try:
|
||||
temp_dir = "/tmp/gdm_theme_" + str(os.getpid())
|
||||
os.makedirs(temp_dir, exist_ok=True)
|
||||
|
||||
# Get list of resources from gresource file
|
||||
cmd_list = ["gresource", "list", gresource_path]
|
||||
result_list = subprocess.run(cmd_list, capture_output=True, text=True)
|
||||
|
||||
if result_list.returncode != 0:
|
||||
self.log("E26", {"path": gresource_path, "error": result_list.stderr})
|
||||
shutil.rmtree(temp_dir)
|
||||
return None
|
||||
|
||||
resource_paths = result_list.stdout.strip().split('\n')
|
||||
if not resource_paths or not resource_paths[0]:
|
||||
self.log("E26", {"path": gresource_path, "error": "No resources found in gresource file"})
|
||||
shutil.rmtree(temp_dir)
|
||||
return None
|
||||
|
||||
# Extract prefix from resource paths (remove filename from first path)
|
||||
first_resource = resource_paths[0]
|
||||
prefix = os.path.dirname(first_resource)
|
||||
|
||||
# Create temporary XML file using proper XML generation
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
# Create root element
|
||||
gresources = ET.Element('gresources')
|
||||
gresource = ET.SubElement(gresources, 'gresource', prefix=prefix)
|
||||
|
||||
for resource_path in resource_paths:
|
||||
# Extract filename from resource path
|
||||
filename = os.path.basename(resource_path)
|
||||
ET.SubElement(gresource, 'file').text = filename
|
||||
|
||||
# Extract the resource to temporary directory
|
||||
cmd_extract = ["gresource", "extract", gresource_path, resource_path]
|
||||
result_extract = subprocess.run(cmd_extract, capture_output=True, text=True)
|
||||
|
||||
if result_extract.returncode == 0:
|
||||
# Write extracted content to file
|
||||
output_path = os.path.join(temp_dir, filename)
|
||||
with open(output_path, 'w') as f:
|
||||
f.write(result_extract.stdout)
|
||||
else:
|
||||
self.log("E26", {"path": gresource_path, "error": f"Failed to extract {resource_path}: {result_extract.stderr}"})
|
||||
|
||||
# Write XML file with proper formatting
|
||||
xml_file = os.path.join(temp_dir, "gnome-shell-theme.gresource.xml")
|
||||
tree = ET.ElementTree(gresources)
|
||||
tree.write(xml_file, encoding='utf-8', xml_declaration=True)
|
||||
|
||||
return temp_dir
|
||||
|
||||
except Exception as exc:
|
||||
self.log("E26", {"path": gresource_path, "error": str(exc)})
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def _modify_gdm_background(self, temp_dir, background_path):
|
||||
"""Modify background in GDM theme files - specifically target gnome-shell-dark.css and gnome-shell-light.css"""
|
||||
try:
|
||||
# Target specific CSS files that contain GDM background definitions
|
||||
target_css_files = ["gnome-shell-dark.css", "gnome-shell-light.css"]
|
||||
|
||||
modified = False
|
||||
for css_filename in target_css_files:
|
||||
css_file = os.path.join(temp_dir, css_filename)
|
||||
if not os.path.exists(css_file):
|
||||
continue
|
||||
|
||||
with open(css_file, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Look for background-related CSS rules
|
||||
patterns = [
|
||||
# Handle only #lockDialogGroup background with file://// (4 slashes)
|
||||
r'(#lockDialogGroup\s*{[^}]*background:\s*[^;]*)url\(file:////[^)]+\)',
|
||||
# Handle only #lockDialogGroup background with file:/// (3 slashes)
|
||||
r'(#lockDialogGroup\s*{[^}]*background:\s*[^;]*)url\(file:///[^)]+\)'
|
||||
]
|
||||
|
||||
for pattern in patterns:
|
||||
# Use lambda function to handle optional groups gracefully
|
||||
def replace_url(match):
|
||||
groups = match.groups()
|
||||
return f'{groups[0]}url(file:///{background_path})'
|
||||
|
||||
new_content = re.sub(pattern, replace_url, content)
|
||||
if new_content != content:
|
||||
with open(css_file, 'w') as f:
|
||||
f.write(new_content)
|
||||
modified = True
|
||||
self.log("D33", {"file": css_filename, "background": background_path})
|
||||
break
|
||||
|
||||
return modified
|
||||
|
||||
except Exception as exc:
|
||||
self.log("E27", {"path": temp_dir, "error": str(exc)})
|
||||
return False
|
||||
|
||||
def _recompile_gresource(self, temp_dir, gresource_path):
|
||||
"""Recompile gresource from modified files using temporary XML"""
|
||||
try:
|
||||
# Use the temporary XML file created during extraction
|
||||
xml_file = os.path.join(temp_dir, "gnome-shell-theme.gresource.xml")
|
||||
if not os.path.exists(xml_file):
|
||||
self.log("E28", {"path": gresource_path, "error": "Temporary XML file not found"})
|
||||
return False
|
||||
|
||||
# Recompile gresource - run from temp directory where files are located
|
||||
original_cwd = os.getcwd()
|
||||
try:
|
||||
os.chdir(temp_dir)
|
||||
cmd = ["glib-compile-resources", "--target", gresource_path, "gnome-shell-theme.gresource.xml"]
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
|
||||
if result.returncode == 0:
|
||||
return True
|
||||
else:
|
||||
self.log("E28", {"path": gresource_path, "error": result.stderr})
|
||||
return False
|
||||
finally:
|
||||
os.chdir(original_cwd)
|
||||
|
||||
except Exception as exc:
|
||||
self.log("E28", {"path": gresource_path, "error": str(exc)})
|
||||
return False
|
||||
|
||||
def generate_sddm(self, path):
|
||||
conf = self._prepare_conf(path)
|
||||
if conf is None:
|
||||
return None
|
||||
|
||||
# Set values only if they have meaningful content
|
||||
if self.dm_config["Greeter.Background"]:
|
||||
theme = conf.setdefault("Theme", {})
|
||||
theme["Background"] = self.dm_config["Greeter.Background"]
|
||||
|
||||
# Clean up empty values from all sections
|
||||
self._clean_empty_values(theme)
|
||||
|
||||
conf.write()
|
||||
return conf
|
||||
|
||||
def write_config(self, dm_name, directory):
|
||||
if self.backup and dm_name!='gdm':
|
||||
self.log("W13", {"dm": dm_name})
|
||||
return
|
||||
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
filename = os.path.join(directory, "50-custom.conf")
|
||||
gen = {
|
||||
"lightdm": self.generate_lightdm,
|
||||
"gdm": self.generate_gdm,
|
||||
"sddm": self.generate_sddm
|
||||
}.get(dm_name)
|
||||
if not gen:
|
||||
raise ValueError("Unknown DM: {}".format(dm_name))
|
||||
|
||||
result = gen(filename)
|
||||
|
||||
# For LightDM, always generate greeter configuration if needed
|
||||
if dm_name == "lightdm":
|
||||
self._generate_lightdm_greeter_config()
|
||||
|
||||
# Return True if configuration was created or if we have background settings
|
||||
return result is not None or self.dm_config["Greeter.Background"]
|
||||
|
||||
def _detect_lightdm_greeter(self):
|
||||
"""Detect which LightDM greeter is being used"""
|
||||
|
||||
# Check main lightdm.conf
|
||||
lightdm_conf_path = "/etc/lightdm/lightdm.conf"
|
||||
if os.path.exists(lightdm_conf_path):
|
||||
with open(lightdm_conf_path, 'r') as f:
|
||||
for line in f:
|
||||
if line.strip().startswith("greeter-session") and not line.strip().startswith('#'):
|
||||
greeter = line.split('=')[1].strip()
|
||||
self.log("D30", {"greeter": greeter, "source": "lightdm.conf"}) # Greeter detection details
|
||||
return greeter
|
||||
|
||||
# Check lightdm.conf.d directory
|
||||
lightdm_conf_d = "/etc/lightdm/lightdm.conf.d"
|
||||
if os.path.exists(lightdm_conf_d):
|
||||
for file in sorted(os.listdir(lightdm_conf_d)):
|
||||
if file.endswith('.conf'):
|
||||
file_path = os.path.join(lightdm_conf_d, file)
|
||||
with open(file_path, 'r') as f:
|
||||
for line in f:
|
||||
if line.strip().startswith("greeter-session") and not line.strip().startswith('#'):
|
||||
greeter = line.split('=')[1].strip()
|
||||
self.log("D30", {"greeter": greeter, "source": file}) # Greeter detection details
|
||||
return greeter
|
||||
|
||||
# Check default greeter
|
||||
default_greeter_path = "/usr/share/xgreeters/lightdm-default-greeter.desktop"
|
||||
if os.path.exists(default_greeter_path):
|
||||
with open(default_greeter_path, 'r') as f:
|
||||
for line in f:
|
||||
if line.strip().startswith("Exec=") and not line.strip().startswith('#'):
|
||||
greeter_exec = line.split('=')[1].strip()
|
||||
# Extract greeter name from exec path
|
||||
greeter_name = os.path.basename(greeter_exec)
|
||||
self.log("D30", {"greeter": greeter_name, "source": "default-greeter"}) # Greeter detection details
|
||||
return greeter_name
|
||||
|
||||
# Fallback to gtk-greeter (most common)
|
||||
self.log("D30", {"greeter": "lightdm-gtk-greeter", "source": "fallback"}) # Greeter detection details
|
||||
return "lightdm-gtk-greeter"
|
||||
|
||||
def _generate_lightdm_greeter_config(self):
|
||||
"""Generate configuration for the detected LightDM greeter"""
|
||||
|
||||
# Only generate if we have background settings
|
||||
if not self.dm_config["Greeter.Background"]:
|
||||
return
|
||||
|
||||
greeter_name = self._detect_lightdm_greeter()
|
||||
|
||||
# Map greeter names to configuration files and settings
|
||||
greeter_configs = {
|
||||
"lightdm-gtk-greeter": {
|
||||
"config_path": "/etc/lightdm/lightdm-gtk-greeter.conf",
|
||||
"section": "greeter",
|
||||
"background_key": "background",
|
||||
"theme_key": "theme-name"
|
||||
},
|
||||
"lightdm-webkit2-greeter": {
|
||||
"config_path": "/etc/lightdm/lightdm-webkit2-greeter.conf",
|
||||
"section": "greeter",
|
||||
"background_key": "background",
|
||||
"theme_key": "theme"
|
||||
},
|
||||
"lightdm-unity-greeter": {
|
||||
"config_path": "/etc/lightdm/lightdm-unity-greeter.conf",
|
||||
"section": "greeter",
|
||||
"background_key": "background",
|
||||
"theme_key": "theme-name"
|
||||
},
|
||||
"lightdm-slick-greeter": {
|
||||
"config_path": "/etc/lightdm/lightdm-slick-greeter.conf",
|
||||
"section": "greeter",
|
||||
"background_key": "background",
|
||||
"theme_key": "theme-name"
|
||||
},
|
||||
"lightdm-kde-greeter": {
|
||||
"config_path": "/etc/lightdm/lightdm-kde-greeter.conf",
|
||||
"section": "greeter",
|
||||
"background_key": "background",
|
||||
"theme_key": "theme"
|
||||
}
|
||||
}
|
||||
|
||||
config_info = greeter_configs.get(greeter_name)
|
||||
if not config_info:
|
||||
self.log("E22", {"greeter": greeter_name}) # Unknown greeter type
|
||||
return
|
||||
|
||||
conf = self._prepare_conf(config_info["config_path"])
|
||||
|
||||
# Get or create the greeter section
|
||||
greeter_section = conf.setdefault(config_info["section"], {})
|
||||
|
||||
# Apply background setting only if it has meaningful content
|
||||
if self.dm_config["Greeter.Background"]:
|
||||
greeter_section[config_info["background_key"]] = self.dm_config["Greeter.Background"]
|
||||
|
||||
# Clean up any empty values in the greeter section
|
||||
self._clean_empty_values(greeter_section)
|
||||
|
||||
conf.initial_comment = [f"# {greeter_name} custom config"]
|
||||
try:
|
||||
conf.write()
|
||||
self.log("I5", {"path": config_info["config_path"], "greeter": greeter_name})
|
||||
except Exception as exc:
|
||||
self.log("E21", {"path": config_info["config_path"], "error": str(exc)})
|
||||
|
||||
def detect_dm(self):
|
||||
"""Detect available and active display managers with fallback methods"""
|
||||
result = {"available": [], "active": None}
|
||||
|
||||
# Check for available DMs using multiple methods
|
||||
available_dms = self._detect_available_dms()
|
||||
result["available"] = available_dms
|
||||
|
||||
# Check active DM with fallbacks
|
||||
active_dm = self._detect_active_dm_with_fallback(available_dms)
|
||||
if active_dm:
|
||||
result["active"] = active_dm
|
||||
|
||||
return result
|
||||
|
||||
def _detect_available_dms(self):
|
||||
"""Detect available display managers using multiple reliable methods"""
|
||||
available = []
|
||||
|
||||
# Method 1: Check systemd unit files
|
||||
systemd_units = [
|
||||
("lightdm", "lightdm.service"),
|
||||
("gdm", "gdm.service"),
|
||||
("gdm", "gdm3.service"),
|
||||
("sddm", "sddm.service")
|
||||
]
|
||||
|
||||
for dm_name, unit_name in systemd_units:
|
||||
if self._check_systemd_unit_exists(unit_name):
|
||||
if dm_name not in available:
|
||||
available.append(dm_name)
|
||||
|
||||
# Method 2: Check binary availability as fallback
|
||||
binary_checks = [
|
||||
("lightdm", ["lightdm"]),
|
||||
("gdm", ["gdm", "gdm3"]),
|
||||
("sddm", ["sddm"])
|
||||
]
|
||||
|
||||
for dm_name, binaries in binary_checks:
|
||||
if dm_name not in available:
|
||||
if any(shutil.which(binary) for binary in binaries):
|
||||
available.append(dm_name)
|
||||
|
||||
return available
|
||||
|
||||
def _detect_active_dm_with_fallback(self, available_dms):
|
||||
"""Detect active DM with multiple fallback methods"""
|
||||
# Primary method: systemd D-Bus
|
||||
active_dm = self._check_systemd_dm()
|
||||
if active_dm:
|
||||
return active_dm
|
||||
|
||||
# Fallback 1: Check running processes
|
||||
active_dm = self._check_running_processes(available_dms)
|
||||
if active_dm:
|
||||
return active_dm
|
||||
|
||||
# Fallback 2: Check display manager symlink
|
||||
active_dm = self._check_display_manager_symlink()
|
||||
if active_dm:
|
||||
return active_dm
|
||||
|
||||
return None
|
||||
|
||||
def _check_systemd_unit_exists(self, unit_name):
|
||||
"""Check if systemd unit exists without requiring D-Bus"""
|
||||
unit_paths = [
|
||||
f"/etc/systemd/system/{unit_name}",
|
||||
f"/usr/lib/systemd/system/{unit_name}",
|
||||
f"/lib/systemd/system/{unit_name}"
|
||||
]
|
||||
return any(os.path.exists(path) for path in unit_paths)
|
||||
|
||||
def _check_running_processes(self, available_dms):
|
||||
"""Check running processes for display manager indicators"""
|
||||
try:
|
||||
import psutil
|
||||
for proc in psutil.process_iter(['name']):
|
||||
proc_name = proc.info['name'].lower()
|
||||
for dm in available_dms:
|
||||
if dm in proc_name:
|
||||
return dm
|
||||
except (ImportError, psutil.NoSuchProcess):
|
||||
pass
|
||||
return None
|
||||
|
||||
def _check_display_manager_symlink(self):
|
||||
"""Check /etc/systemd/system/display-manager.service symlink"""
|
||||
symlink_path = "/etc/systemd/system/display-manager.service"
|
||||
if os.path.islink(symlink_path):
|
||||
target = os.readlink(symlink_path)
|
||||
for dm in ["lightdm", "gdm", "sddm"]:
|
||||
if dm in target:
|
||||
return dm
|
||||
return None
|
||||
|
||||
def _check_systemd_dm(self):
|
||||
"""
|
||||
Check active display manager via systemd D-Bus API with improved error handling.
|
||||
Returns dm name (lightdm/gdm/sddm) or None if not active.
|
||||
"""
|
||||
try:
|
||||
dm_unit = systemd_unit("display-manager.service", 1)
|
||||
state = dm_unit._get_state()
|
||||
if state in ("active", "activating"):
|
||||
unit_path = str(dm_unit.unit) # D-Bus object path, e.g. /org/.../lightdm_2eservice
|
||||
# More robust DM name extraction
|
||||
dm_mapping = {
|
||||
"lightdm": "lightdm",
|
||||
"gdm": "gdm",
|
||||
"sddm": "sddm"
|
||||
}
|
||||
for key, dm_name in dm_mapping.items():
|
||||
if key in unit_path.lower():
|
||||
return dm_name
|
||||
except Exception as exc:
|
||||
self.log("D30", {"unit": "display-manager.service", "error": str(exc)})
|
||||
return None
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Main plugin execution method with improved error handling and validation.
|
||||
Detects active display manager and applies configuration.
|
||||
"""
|
||||
self.log("I3")
|
||||
|
||||
try:
|
||||
# Validate configuration before proceeding
|
||||
if not self._validate_configuration():
|
||||
self.log("W11")
|
||||
if not self.backup:
|
||||
return False
|
||||
|
||||
# Detect available and active display managers
|
||||
dm_info = self.detect_dm()
|
||||
self.log("D30", {"dm_info": dm_info})
|
||||
|
||||
if not dm_info["available"]:
|
||||
self.log("W10")
|
||||
return False
|
||||
|
||||
# Use active DM or first available
|
||||
target_dm = dm_info["active"] or (dm_info["available"][0] if dm_info["available"] else None)
|
||||
|
||||
if not target_dm:
|
||||
self.log("W10")
|
||||
return False
|
||||
|
||||
# Determine config directory based on DM
|
||||
config_dir = self._get_config_directory(target_dm)
|
||||
if not config_dir:
|
||||
self.log("E22", {"dm": target_dm})
|
||||
return False
|
||||
|
||||
# Generate configuration
|
||||
result = self.write_config(target_dm, config_dir)
|
||||
|
||||
if result:
|
||||
self.log("I4", {"dm": target_dm, "config_dir": config_dir})
|
||||
return True
|
||||
else:
|
||||
self.log("E23", {"dm": target_dm, "config_dir": config_dir})
|
||||
return False
|
||||
|
||||
except Exception as exc:
|
||||
self.log("E24", {"error": str(exc)})
|
||||
return False
|
||||
|
||||
def _validate_configuration(self):
|
||||
"""Validate DM configuration before applying"""
|
||||
# Check if we have background configuration to apply
|
||||
return bool(self.dm_config["Greeter.Background"])
|
||||
|
||||
def _get_config_directory(self, dm_name):
|
||||
"""Get configuration directory for display manager with fallbacks"""
|
||||
config_dirs = {
|
||||
"lightdm": ["/etc/lightdm/lightdm.conf.d", "/etc/lightdm"],
|
||||
"gdm": ["/etc/gdm/custom.conf.d", "/etc/gdm"],
|
||||
"sddm": ["/etc/sddm.conf.d", "/etc/sddm"]
|
||||
}
|
||||
|
||||
dirs = config_dirs.get(dm_name, [])
|
||||
for config_dir in dirs:
|
||||
if os.path.exists(config_dir):
|
||||
return config_dir
|
||||
|
||||
# If no existing directory, use the primary one
|
||||
return dirs[0] if dirs else None
|
||||
|
||||
|
||||
def create_machine_applier(dict_dconf_db, username=None, fs_file_cache=None):
|
||||
"""Factory function to create DMApplier instance"""
|
||||
return DMApplier(dict_dconf_db, username, fs_file_cache)
|
||||
|
||||
|
||||
def create_user_applier(dict_dconf_db, username=None, fs_file_cache=None):
|
||||
"""Factory function to create DMApplier instance"""
|
||||
pass
|
||||
93
gpoa/frontend_plugins/locale/ru_RU/LC_MESSAGES/dm_applier.po
Normal file
93
gpoa/frontend_plugins/locale/ru_RU/LC_MESSAGES/dm_applier.po
Normal file
@@ -0,0 +1,93 @@
|
||||
# Russian translations for dm_applier plugin.
|
||||
# Copyright (C) 2025 BaseALT Ltd.
|
||||
# This file is distributed under the same license as the dm_applier plugin.
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: dm_applier\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-01-18 12:00+0000\n"
|
||||
"PO-Revision-Date: 2025-01-18 12:00+0000\n"
|
||||
"Last-Translator: Automatically generated\n"
|
||||
"Language-Team: Russian\n"
|
||||
"Language: ru_RU\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
|
||||
|
||||
# DM Applier messages
|
||||
msgid "Display Manager Applier initialized"
|
||||
msgstr "Инициализирован апплаер дисплей менеджера"
|
||||
|
||||
msgid "Display manager configuration generated successfully"
|
||||
msgstr "Конфигурация дисплей менеджера успешно сгенерирована"
|
||||
|
||||
msgid "Display Manager Applier execution started"
|
||||
msgstr "Запущено выполнение апплаера дисплей менеджера"
|
||||
|
||||
msgid "Display manager configuration completed successfully"
|
||||
msgstr "Конфигурация дисплей менеджера успешно завершена"
|
||||
|
||||
msgid "LightDM greeter configuration generated successfully"
|
||||
msgstr "Конфигурация LightDM greeter успешно сгенерирована"
|
||||
|
||||
msgid "GDM theme modified successfully"
|
||||
msgstr "Тема GDM успешно изменена"
|
||||
|
||||
msgid "GDM backup restored successfully"
|
||||
msgstr "Резервная копия GDM успешно восстановлена"
|
||||
|
||||
msgid "No display managers detected"
|
||||
msgstr "Дисплей менеджеры не обнаружены"
|
||||
|
||||
msgid "No background configuration to apply"
|
||||
msgstr "Нет конфигурации фона для применения"
|
||||
|
||||
msgid "GDM backup file not found"
|
||||
msgstr "Резервная копия GDM не найдена"
|
||||
|
||||
msgid "Backup mode only supported for GDM"
|
||||
msgstr "Режим восстановления поддерживается только для GDM"
|
||||
|
||||
msgid "Configuration file path is invalid or inaccessible"
|
||||
msgstr "Путь к файлу конфигурации недействителен или недоступен"
|
||||
|
||||
msgid "Failed to generate display manager configuration"
|
||||
msgstr "Не удалось сгенерировать конфигурацию дисплей менеджера"
|
||||
|
||||
msgid "Unknown display manager config directory"
|
||||
msgstr "Неизвестный каталог конфигурации дисплей менеджера"
|
||||
|
||||
msgid "Display Manager Applier execution failed"
|
||||
msgstr "Выполнение апплаера дисплей менеджера завершилось ошибкой"
|
||||
|
||||
msgid "GDM theme gresource not found"
|
||||
msgstr "GDM тема gresource не найдена"
|
||||
|
||||
msgid "Failed to extract GDM gresource"
|
||||
msgstr "Не удалось извлечь GDM gresource"
|
||||
|
||||
msgid "Failed to modify GDM background"
|
||||
msgstr "Не удалось изменить фон GDM"
|
||||
|
||||
msgid "Failed to recompile GDM gresource"
|
||||
msgstr "Не удалось перекомпилировать GDM gresource"
|
||||
|
||||
msgid "Failed to restore GDM backup"
|
||||
msgstr "Не удалось восстановить резервную копию GDM"
|
||||
|
||||
msgid "Display manager detection details"
|
||||
msgstr "Детали обнаружения дисплей менеджера"
|
||||
|
||||
msgid "Display manager configuration details"
|
||||
msgstr "Детали конфигурации дисплей менеджера"
|
||||
|
||||
msgid "Removed empty configuration value"
|
||||
msgstr "Удалено пустое значение конфигурации"
|
||||
|
||||
msgid "GDM background modification details"
|
||||
msgstr "Детали изменения фона GDM"
|
||||
|
||||
msgid "GDM backup operation details"
|
||||
msgstr "Детали операции резервного копирования GDM"
|
||||
16
gpoa/gpoa
16
gpoa/gpoa
@@ -23,10 +23,11 @@ import signal
|
||||
import gettext
|
||||
import locale
|
||||
|
||||
from backend import backend_factory
|
||||
from backend import backend_factory, save_dconf
|
||||
from frontend.frontend_manager import frontend_manager, determine_username
|
||||
from plugin import plugin_manager
|
||||
from gpoa.plugin import plugin_manager
|
||||
from messages import message_with_code
|
||||
from storage import Dconf_registry
|
||||
|
||||
from util.util import get_machine_name
|
||||
from util.users import (
|
||||
@@ -61,6 +62,9 @@ def parse_arguments():
|
||||
arguments.add_argument('--list-backends',
|
||||
action='store_true',
|
||||
help='Show list of available backends')
|
||||
arguments.add_argument('--force',
|
||||
action='store_true',
|
||||
help='Force GPT download')
|
||||
arguments.add_argument('--loglevel',
|
||||
type=int,
|
||||
default=4,
|
||||
@@ -120,7 +124,7 @@ class gpoa_controller:
|
||||
print('local')
|
||||
print('samba')
|
||||
return
|
||||
self.start_plugins()
|
||||
Dconf_registry._force = self.__args.force
|
||||
self.start_backend()
|
||||
|
||||
def start_backend(self):
|
||||
@@ -148,6 +152,7 @@ class gpoa_controller:
|
||||
try:
|
||||
back.retrieve_and_store()
|
||||
# Start frontend only on successful backend finish
|
||||
save_dconf(self.username, self.is_machine, nodomain)
|
||||
self.start_frontend()
|
||||
except Exception as exc:
|
||||
logdata = dict({'message': str(exc)})
|
||||
@@ -159,6 +164,7 @@ class gpoa_controller:
|
||||
einfo = geterr()
|
||||
logdata.update(einfo)
|
||||
log('E3', logdata)
|
||||
self.start_plugins(self.is_machine, self.username)
|
||||
|
||||
def start_frontend(self):
|
||||
'''
|
||||
@@ -174,12 +180,12 @@ class gpoa_controller:
|
||||
logdata.update(einfo)
|
||||
log('E4', logdata)
|
||||
|
||||
def start_plugins(self):
|
||||
def start_plugins(self, is_machine, username):
|
||||
'''
|
||||
Function to start supplementary facilities
|
||||
'''
|
||||
if not self.__args.noplugins:
|
||||
pm = plugin_manager()
|
||||
pm = plugin_manager(is_machine, username)
|
||||
pm.run()
|
||||
|
||||
def main():
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,12 +16,15 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
from base64 import b64decode
|
||||
from Crypto.Cipher import AES
|
||||
import json
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from util.xml import get_xml_root
|
||||
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
|
||||
def decrypt_pass(cpassword):
|
||||
'''
|
||||
AES key for cpassword decryption: http://msdn.microsoft.com/en-us/library/2c15cbf0-f086-4c74-8b70-1f2fa45dd4be%28v=PROT.13%29#endNote2
|
||||
@@ -47,7 +50,7 @@ def decrypt_pass(cpassword):
|
||||
# decrypt() returns byte array which is immutable and we need to
|
||||
# strip padding, then convert UTF-16LE to UTF-8
|
||||
binstr = decrypter.decrypt(password)
|
||||
by = list()
|
||||
by = []
|
||||
for item in binstr:
|
||||
if item != 16:
|
||||
by.append(item)
|
||||
@@ -57,7 +60,7 @@ def decrypt_pass(cpassword):
|
||||
return utf8str.decode()
|
||||
|
||||
def read_drives(drives_file):
|
||||
drives = list()
|
||||
drives = []
|
||||
|
||||
for drive in get_xml_root(drives_file):
|
||||
drive_obj = drivemap()
|
||||
@@ -67,14 +70,20 @@ def read_drives(drives_file):
|
||||
drive_obj.set_pass(decrypt_pass(props.get('cpassword')))
|
||||
drive_obj.set_dir(props.get('letter'))
|
||||
drive_obj.set_path(props.get('path'))
|
||||
drive_obj.set_action(props.get('action'))
|
||||
drive_obj.set_thisDrive(props.get('thisDrive'))
|
||||
drive_obj.set_allDrives(props.get('allDrives'))
|
||||
drive_obj.set_label(props.get('label'))
|
||||
drive_obj.set_persistent(props.get('persistent'))
|
||||
drive_obj.set_useLetter(props.get('useLetter'))
|
||||
|
||||
drives.append(drive_obj)
|
||||
|
||||
return drives
|
||||
|
||||
def merge_drives(storage, sid, drive_objects, policy_name):
|
||||
def merge_drives(storage, drive_objects, policy_name):
|
||||
for drive in drive_objects:
|
||||
storage.add_drive(sid, drive, policy_name)
|
||||
storage.add_drive(drive, policy_name)
|
||||
|
||||
def json2drive(json_str):
|
||||
json_obj = json.loads(json_str)
|
||||
@@ -87,12 +96,18 @@ def json2drive(json_str):
|
||||
|
||||
return drive_obj
|
||||
|
||||
class drivemap:
|
||||
class drivemap(DynamicAttributes):
|
||||
def __init__(self):
|
||||
self.login = None
|
||||
self.password = None
|
||||
self.dir = None
|
||||
self.path = None
|
||||
self.action = None
|
||||
self.thisDrive = None
|
||||
self.allDrives = None
|
||||
self.label = None
|
||||
self.persistent = None
|
||||
self.useLetter = None
|
||||
|
||||
def set_login(self, username):
|
||||
self.login = username
|
||||
@@ -110,14 +125,32 @@ class drivemap:
|
||||
def set_path(self, path):
|
||||
self.path = path
|
||||
|
||||
def set_action(self, action):
|
||||
self.action = action
|
||||
|
||||
def set_thisDrive(self, thisDrive):
|
||||
self.thisDrive = thisDrive
|
||||
|
||||
def set_allDrives(self, allDrives):
|
||||
self.allDrives = allDrives
|
||||
|
||||
def set_label(self, label):
|
||||
self.label = label
|
||||
|
||||
def set_persistent(self, persistent):
|
||||
self.persistent = persistent
|
||||
|
||||
def set_useLetter(self, useLetter):
|
||||
self.useLetter = useLetter
|
||||
|
||||
def to_json(self):
|
||||
drive = dict()
|
||||
drive = {}
|
||||
drive['login'] = self.login
|
||||
drive['password'] = self.password
|
||||
drive['dir'] = self.dir
|
||||
drive['path'] = self.path
|
||||
|
||||
contents = dict()
|
||||
contents = {}
|
||||
contents['drive'] = drive
|
||||
|
||||
return json.dumps(contents)
|
||||
|
||||
57
gpoa/gpt/dynamic_attributes.py
Normal file
57
gpoa/gpt/dynamic_attributes.py
Normal file
@@ -0,0 +1,57 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2024 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class DynamicAttributes:
|
||||
def __init__(self, **kwargs):
|
||||
self.policy_name = None
|
||||
for key, value in kwargs.items():
|
||||
self.__setattr__(key, value)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if isinstance(value, Enum):
|
||||
value = str(value)
|
||||
if isinstance(value, str):
|
||||
for q in ["'", "\""]:
|
||||
if any(q in ch for ch in value):
|
||||
value = value.replace(q, "″")
|
||||
self.__dict__[key] = value
|
||||
|
||||
def items(self):
|
||||
return self.__dict__.items()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__dict__.items())
|
||||
|
||||
def get_original_value(self, key):
|
||||
value = self.__dict__.get(key)
|
||||
if isinstance(value, str):
|
||||
value = value.replace("″", "'")
|
||||
return value
|
||||
|
||||
class RegistryKeyMetadata(DynamicAttributes):
|
||||
def __init__(self, policy_name, type, is_list=None, mod_previous_value=None):
|
||||
self.policy_name = policy_name
|
||||
self.type = type
|
||||
self.reloaded_with_policy_key = None
|
||||
self.is_list = is_list
|
||||
self.mod_previous_value = mod_previous_value
|
||||
|
||||
def __repr__(self):
|
||||
return str(dict(self))
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -18,48 +18,30 @@
|
||||
|
||||
from util.xml import get_xml_root
|
||||
|
||||
from enum import Enum
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
class FileAction(Enum):
|
||||
CREATE = 'C'
|
||||
REPLACE = 'R'
|
||||
UPDATE = 'U'
|
||||
DELETE = 'D'
|
||||
|
||||
|
||||
def action_letter2enum(letter):
|
||||
if letter in ['C', 'R', 'U', 'D']:
|
||||
if letter == 'C': return FileAction.CREATE
|
||||
if letter == 'R': return FileAction.REPLACE
|
||||
if letter == 'U': return FileAction.UPDATE
|
||||
if letter == 'D': return FileAction.DELETE
|
||||
|
||||
return FileAction.CREATE
|
||||
|
||||
def read_envvars(envvars_file):
|
||||
variables = list()
|
||||
variables = []
|
||||
|
||||
for var in get_xml_root(envvars_file):
|
||||
props = var.find('Properties')
|
||||
name = props.get('name')
|
||||
value = props.get('value')
|
||||
var_obj = envvar(name, value)
|
||||
var_obj.set_action(action_letter2enum(props.get('action', default='C')))
|
||||
action = props.get('action', default='C')
|
||||
var_obj = envvar(name, value, action)
|
||||
|
||||
variables.append(var_obj)
|
||||
|
||||
return variables
|
||||
|
||||
def merge_envvars(storage, sid, envvar_objects, policy_name):
|
||||
def merge_envvars(storage, envvar_objects, policy_name):
|
||||
for envv in envvar_objects:
|
||||
storage.add_envvar(sid, envv, policy_name)
|
||||
storage.add_envvar(envv, policy_name)
|
||||
|
||||
class envvar:
|
||||
def __init__(self, name, value):
|
||||
class envvar(DynamicAttributes):
|
||||
def __init__(self, name, value, action):
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.action = FileAction.CREATE
|
||||
|
||||
def set_action(self, action):
|
||||
self.action = action
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -18,21 +18,45 @@
|
||||
|
||||
from util.xml import get_xml_root
|
||||
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
|
||||
def read_files(filesxml):
|
||||
files = list()
|
||||
files = []
|
||||
|
||||
for fil in get_xml_root(filesxml):
|
||||
fil_obj = fileentry()
|
||||
|
||||
props = fil.find('Properties')
|
||||
fil_obj = fileentry(props.get('fromPath'))
|
||||
fil_obj.set_action(props.get('action', default='C'))
|
||||
fil_obj.set_target_path(props.get('targetPath', default=None))
|
||||
fil_obj.set_read_only(props.get('readOnly', default=None))
|
||||
fil_obj.set_archive(props.get('archive', default=None))
|
||||
fil_obj.set_hidden(props.get('hidden', default=None))
|
||||
fil_obj.set_suppress(props.get('suppress', default=None))
|
||||
fil_obj.set_executable(props.get('executable', default=None))
|
||||
files.append(fil_obj)
|
||||
|
||||
return files
|
||||
|
||||
def merge_files(storage, sid, file_objects, policy_name):
|
||||
def merge_files(storage, file_objects, policy_name):
|
||||
for fileobj in file_objects:
|
||||
pass
|
||||
storage.add_file(fileobj, policy_name)
|
||||
|
||||
class fileentry:
|
||||
def __init__(self):
|
||||
pass
|
||||
class fileentry(DynamicAttributes):
|
||||
def __init__(self, fromPath):
|
||||
self.fromPath = fromPath
|
||||
|
||||
def set_action(self, action):
|
||||
self.action = action
|
||||
def set_target_path(self, targetPath):
|
||||
self.targetPath = targetPath
|
||||
def set_read_only(self, readOnly):
|
||||
self.readOnly = readOnly
|
||||
def set_archive(self, archive):
|
||||
self.archive = archive
|
||||
def set_hidden(self, hidden):
|
||||
self.hidden = hidden
|
||||
def set_suppress(self, suppress):
|
||||
self.suppress = suppress
|
||||
def set_executable(self, executable):
|
||||
self.executable = executable
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -17,27 +17,9 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
from util.xml import get_xml_root
|
||||
|
||||
|
||||
class FileAction(Enum):
|
||||
CREATE = 'C'
|
||||
REPLACE = 'R'
|
||||
UPDATE = 'U'
|
||||
DELETE = 'D'
|
||||
|
||||
|
||||
def action_letter2enum(letter):
|
||||
if letter in ['C', 'R', 'U', 'D']:
|
||||
if letter == 'C': return FileAction.CREATE
|
||||
if letter == 'R': return FileAction.REPLACE
|
||||
if letter == 'U': return FileAction.UPDATE
|
||||
if letter == 'D': return FileAction.DELETE
|
||||
|
||||
return FileAction.CREATE
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
|
||||
def action_enum2letter(enumitem):
|
||||
@@ -57,32 +39,36 @@ def folder_int2bool(val):
|
||||
|
||||
|
||||
def read_folders(folders_file):
|
||||
folders = list()
|
||||
folders = []
|
||||
|
||||
for fld in get_xml_root(folders_file):
|
||||
props = fld.find('Properties')
|
||||
fld_obj = folderentry(props.get('path'))
|
||||
fld_obj.set_action(action_letter2enum(props.get('action', default='C')))
|
||||
path = props.get('path')
|
||||
action = props.get('action', default='C')
|
||||
fld_obj = folderentry(path, action)
|
||||
fld_obj.set_delete_folder(folder_int2bool(props.get('deleteFolder', default=1)))
|
||||
fld_obj.set_delete_sub_folders(folder_int2bool(props.get('deleteSubFolders', default=1)))
|
||||
fld_obj.set_delete_files(folder_int2bool(props.get('deleteFiles', default=1)))
|
||||
fld_obj.set_hidden_folder(folder_int2bool(props.get('hidden', default=0)))
|
||||
|
||||
folders.append(fld_obj)
|
||||
|
||||
|
||||
return folders
|
||||
|
||||
def merge_folders(storage, sid, folder_objects, policy_name):
|
||||
def merge_folders(storage, folder_objects, policy_name):
|
||||
for folder in folder_objects:
|
||||
storage.add_folder(sid, folder, policy_name)
|
||||
storage.add_folder(folder, policy_name)
|
||||
|
||||
|
||||
class folderentry:
|
||||
def __init__(self, path):
|
||||
class folderentry(DynamicAttributes):
|
||||
def __init__(self, path, action):
|
||||
self.path = path
|
||||
self.action = FileAction.CREATE
|
||||
self.action = action
|
||||
self.delete_folder = False
|
||||
self.delete_sub_folders = False
|
||||
self.delete_files = False
|
||||
self.hidden_folder = False
|
||||
|
||||
def set_action(self, action):
|
||||
self.action = action
|
||||
@@ -96,3 +82,5 @@ class folderentry:
|
||||
def set_delete_files(self, del_bool):
|
||||
self.delete_files = del_bool
|
||||
|
||||
def set_hidden_folder(self, hid_bool):
|
||||
self.hidden_folder = hid_bool
|
||||
49
gpoa/gpt/gpo_dconf_mapping.py
Normal file
49
gpoa/gpt/gpo_dconf_mapping.py
Normal file
@@ -0,0 +1,49 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2024 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
|
||||
class GpoInfoDconf(DynamicAttributes):
|
||||
_counter = 0
|
||||
def __init__(self, gpo) -> None:
|
||||
GpoInfoDconf._counter += 1
|
||||
self.counter = GpoInfoDconf._counter
|
||||
self.display_name = None
|
||||
self.name = None
|
||||
self.version = None
|
||||
self.link = None
|
||||
self._fill_attributes(gpo)
|
||||
|
||||
def _fill_attributes(self, gpo):
|
||||
try:
|
||||
self.display_name = gpo.display_name
|
||||
except:
|
||||
self.display_name = "Unknown"
|
||||
try:
|
||||
self.name = gpo.name
|
||||
except:
|
||||
self.name = "Unknown"
|
||||
try:
|
||||
self.version = gpo.version
|
||||
except:
|
||||
self.version = "Unknown"
|
||||
try:
|
||||
self.link = gpo.link
|
||||
except:
|
||||
self.link = "Unknown"
|
||||
236
gpoa/gpt/gpt.py
236
gpoa/gpt/gpt.py
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,63 +16,30 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from enum import Enum, unique
|
||||
import os
|
||||
from pathlib import Path
|
||||
from enum import Enum, unique
|
||||
|
||||
from samba.gp_parse.gp_pol import GPPolParser
|
||||
|
||||
from storage import registry_factory
|
||||
|
||||
from .polfile import (
|
||||
read_polfile
|
||||
, merge_polfile
|
||||
)
|
||||
from .shortcuts import (
|
||||
read_shortcuts
|
||||
, merge_shortcuts
|
||||
)
|
||||
from .services import (
|
||||
read_services
|
||||
, merge_services
|
||||
)
|
||||
from .printers import (
|
||||
read_printers
|
||||
, merge_printers
|
||||
)
|
||||
from .inifiles import (
|
||||
read_inifiles
|
||||
, merge_inifiles
|
||||
)
|
||||
from .folders import (
|
||||
read_folders
|
||||
, merge_folders
|
||||
)
|
||||
from .files import (
|
||||
read_files
|
||||
, merge_files
|
||||
)
|
||||
from .envvars import (
|
||||
read_envvars
|
||||
, merge_envvars
|
||||
)
|
||||
from .drives import (
|
||||
read_drives
|
||||
, merge_drives
|
||||
)
|
||||
from .tasks import (
|
||||
read_tasks
|
||||
, merge_tasks
|
||||
)
|
||||
|
||||
from storage.dconf_registry import add_to_dict
|
||||
import util
|
||||
import util.preg
|
||||
from util.paths import (
|
||||
local_policy_path,
|
||||
cache_dir,
|
||||
local_policy_cache
|
||||
)
|
||||
from util.logging import log
|
||||
from util.paths import cache_dir, local_policy_cache, local_policy_path
|
||||
import util.preg
|
||||
|
||||
from .drives import merge_drives, read_drives
|
||||
from .envvars import merge_envvars, read_envvars
|
||||
from .files import merge_files, read_files
|
||||
from .folders import merge_folders, read_folders
|
||||
from .inifiles import merge_inifiles, read_inifiles
|
||||
from .networkshares import merge_networkshares, read_networkshares
|
||||
from .polfile import merge_polfile, read_polfile
|
||||
from .printers import merge_printers, read_printers
|
||||
from .scriptsini import merge_scripts, read_scripts
|
||||
from .services import merge_services, read_services
|
||||
from .shortcuts import merge_shortcuts, read_shortcuts
|
||||
from .tasks import merge_tasks, read_tasks
|
||||
|
||||
|
||||
@unique
|
||||
@@ -87,6 +54,8 @@ class FileType(Enum):
|
||||
INIFILES = 'inifiles.xml'
|
||||
SERVICES = 'services.xml'
|
||||
PRINTERS = 'printers.xml'
|
||||
SCRIPTS = 'scripts.ini'
|
||||
NETWORKSHARES = 'networkshares.xml'
|
||||
|
||||
def get_preftype(path_to_file):
|
||||
fpath = Path(path_to_file)
|
||||
@@ -100,7 +69,7 @@ def get_preftype(path_to_file):
|
||||
return None
|
||||
|
||||
def pref_parsers():
|
||||
parsers = dict()
|
||||
parsers = {}
|
||||
|
||||
parsers[FileType.PREG] = read_polfile
|
||||
parsers[FileType.SHORTCUTS] = read_shortcuts
|
||||
@@ -112,6 +81,8 @@ def pref_parsers():
|
||||
parsers[FileType.INIFILES] = read_inifiles
|
||||
parsers[FileType.SERVICES] = read_services
|
||||
parsers[FileType.PRINTERS] = read_printers
|
||||
parsers[FileType.SCRIPTS] = read_scripts
|
||||
parsers[FileType.NETWORKSHARES] = read_networkshares
|
||||
|
||||
return parsers
|
||||
|
||||
@@ -120,7 +91,7 @@ def get_parser(preference_type):
|
||||
return parsers[preference_type]
|
||||
|
||||
def pref_mergers():
|
||||
mergers = dict()
|
||||
mergers = {}
|
||||
|
||||
mergers[FileType.PREG] = merge_polfile
|
||||
mergers[FileType.SHORTCUTS] = merge_shortcuts
|
||||
@@ -132,6 +103,8 @@ def pref_mergers():
|
||||
mergers[FileType.INIFILES] = merge_inifiles
|
||||
mergers[FileType.SERVICES] = merge_services
|
||||
mergers[FileType.PRINTERS] = merge_printers
|
||||
mergers[FileType.SCRIPTS] = merge_scripts
|
||||
mergers[FileType.NETWORKSHARES] = merge_networkshares
|
||||
|
||||
return mergers
|
||||
|
||||
@@ -140,20 +113,22 @@ def get_merger(preference_type):
|
||||
return mergers[preference_type]
|
||||
|
||||
class gpt:
|
||||
__user_policy_mode_key = 'Software\\Policies\\Microsoft\\Windows\\System\\UserPolicyMode'
|
||||
|
||||
def __init__(self, gpt_path, sid):
|
||||
def __init__(self, gpt_path, username='Machine', gpo_info=None):
|
||||
add_to_dict(gpt_path, username, gpo_info)
|
||||
self.path = gpt_path
|
||||
self.sid = sid
|
||||
self.storage = registry_factory('registry')
|
||||
self.username = username
|
||||
self.storage = registry_factory()
|
||||
self.storage._gpt_read_flag = True
|
||||
self.gpo_info = gpo_info
|
||||
self.name = ''
|
||||
|
||||
self.guid = self.path.rpartition('/')[2]
|
||||
if 'default' == self.guid:
|
||||
self.guid = 'Local Policy'
|
||||
|
||||
self._machine_path = find_dir(self.path, 'Machine')
|
||||
self._user_path = find_dir(self.path, 'User')
|
||||
self._scripts_machine_path = find_dir(self._machine_path, 'Scripts')
|
||||
self._scripts_user_path = find_dir(self._user_path, 'Scripts')
|
||||
|
||||
self.settings_list = [
|
||||
'shortcuts'
|
||||
@@ -165,92 +140,88 @@ class gpt:
|
||||
, 'inifiles'
|
||||
, 'services'
|
||||
, 'scheduledtasks'
|
||||
, 'scripts'
|
||||
, 'networkshares'
|
||||
]
|
||||
self.settings = dict()
|
||||
self.settings['machine'] = dict()
|
||||
self.settings['user'] = dict()
|
||||
self.settings = {}
|
||||
self.settings['machine'] = {}
|
||||
self.settings['user'] = {}
|
||||
self.settings['machine']['regpol'] = find_file(self._machine_path, 'registry.pol')
|
||||
self.settings['user']['regpol'] = find_file(self._user_path, 'registry.pol')
|
||||
for setting in self.settings_list:
|
||||
machine_preffile = find_preffile(self._machine_path, setting)
|
||||
user_preffile = find_preffile(self._user_path, setting)
|
||||
mlogdata = dict({'setting': setting, 'prefpath': machine_preffile})
|
||||
mlogdata = {'setting': setting, 'prefpath': machine_preffile}
|
||||
log('D24', mlogdata)
|
||||
self.settings['machine'][setting] = machine_preffile
|
||||
ulogdata = dict({'setting': setting, 'prefpath': user_preffile})
|
||||
ulogdata = {'setting': setting, 'prefpath': user_preffile}
|
||||
log('D23', ulogdata)
|
||||
self.settings['user'][setting] = user_preffile
|
||||
|
||||
self.settings['machine']['scripts'] = find_file(self._scripts_machine_path, 'scripts.ini')
|
||||
self.settings['user']['scripts'] = find_file(self._scripts_user_path, 'scripts.ini')
|
||||
|
||||
|
||||
def set_name(self, name):
|
||||
'''
|
||||
Set human-readable GPT name.
|
||||
'''
|
||||
self.name = name
|
||||
|
||||
def get_policy_mode(self):
|
||||
def merge_machine(self):
|
||||
'''
|
||||
Get UserPolicyMode parameter value in order to determine if it
|
||||
is possible to work with user's part of GPT. This value is
|
||||
checked only if working for user's SID.
|
||||
Merge machine settings to storage.
|
||||
'''
|
||||
upm = self.storage.get_hklm_entry(self.__user_policy_mode_key)
|
||||
if not upm:
|
||||
upm = 0
|
||||
upm = int(upm)
|
||||
if 0 > upm or 2 > upm:
|
||||
upm = 0
|
||||
try:
|
||||
# Merge machine policies to registry if possible
|
||||
if self.settings['machine']['regpol']:
|
||||
mlogdata = {'polfile': self.settings['machine']['regpol']}
|
||||
log('D34', mlogdata)
|
||||
util.preg.merge_polfile(self.settings['machine']['regpol'], policy_name=self.name, gpo_info=self.gpo_info)
|
||||
# Merge machine preferences to registry if possible
|
||||
for preference_name, preference_path in self.settings['machine'].items():
|
||||
if preference_path:
|
||||
preference_type = get_preftype(preference_path)
|
||||
logdata = {'pref': preference_type.value}
|
||||
log('D28', logdata)
|
||||
preference_parser = get_parser(preference_type)
|
||||
preference_merger = get_merger(preference_type)
|
||||
preference_objects = preference_parser(preference_path)
|
||||
preference_merger(self.storage, preference_objects, self.name)
|
||||
except Exception as exc:
|
||||
logdata = {}
|
||||
logdata['gpt'] = self.name
|
||||
logdata['msg'] = str(exc)
|
||||
log('E28', logdata)
|
||||
|
||||
return upm
|
||||
|
||||
def merge(self):
|
||||
def merge_user(self):
|
||||
'''
|
||||
Merge machine and user (if sid provided) settings to storage.
|
||||
Merge user settings to storage.
|
||||
'''
|
||||
if self.sid == self.storage.get_info('machine_sid'):
|
||||
try:
|
||||
# Merge machine settings to registry if possible
|
||||
for preference_name, preference_path in self.settings['machine'].items():
|
||||
if preference_path:
|
||||
preference_type = get_preftype(preference_path)
|
||||
logdata = dict({'pref': preference_type.value, 'sid': self.sid})
|
||||
log('D28', logdata)
|
||||
preference_parser = get_parser(preference_type)
|
||||
preference_merger = get_merger(preference_type)
|
||||
preference_objects = preference_parser(preference_path)
|
||||
preference_merger(self.storage, self.sid, preference_objects, self.name)
|
||||
if self.settings['user']['regpol']:
|
||||
mulogdata = dict({'polfile': self.settings['machine']['regpol']})
|
||||
log('D35', mulogdata)
|
||||
util.preg.merge_polfile(self.settings['user']['regpol'], sid=self.sid, policy_name=self.name)
|
||||
if self.settings['machine']['regpol']:
|
||||
mlogdata = dict({'polfile': self.settings['machine']['regpol']})
|
||||
log('D34', mlogdata)
|
||||
util.preg.merge_polfile(self.settings['machine']['regpol'], policy_name=self.name)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['gpt'] = self.name
|
||||
logdata['msg'] = str(exc)
|
||||
log('E28', logdata)
|
||||
else:
|
||||
# Merge user settings if UserPolicyMode set accordingly
|
||||
# and user settings (for HKCU) are exist.
|
||||
policy_mode = upm2str(self.get_policy_mode())
|
||||
if 'Merge' == policy_mode or 'Not configured' == policy_mode:
|
||||
try:
|
||||
for preference_name, preference_path in self.settings['user'].items():
|
||||
if preference_path:
|
||||
preference_type = get_preftype(preference_path)
|
||||
logdata = dict({'pref': preference_type.value, 'sid': self.sid})
|
||||
log('D29', logdata)
|
||||
preference_parser = get_parser(preference_type)
|
||||
preference_merger = get_merger(preference_type)
|
||||
preference_objects = preference_parser(preference_path)
|
||||
preference_merger(self.storage, self.sid, preference_objects, self.name)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['gpt'] = self.name
|
||||
logdata['msg'] = str(exc)
|
||||
log('E29', logdata)
|
||||
try:
|
||||
# Merge user policies to registry if possible
|
||||
if self.settings['user']['regpol']:
|
||||
mulogdata = {'polfile': self.settings['user']['regpol']}
|
||||
log('D35', mulogdata)
|
||||
util.preg.merge_polfile(self.settings['user']['regpol'],
|
||||
policy_name=self.name,
|
||||
username=self.username,
|
||||
gpo_info=self.gpo_info)
|
||||
# Merge user preferences to registry if possible
|
||||
for preference_name, preference_path in self.settings['user'].items():
|
||||
if preference_path:
|
||||
preference_type = get_preftype(preference_path)
|
||||
logdata = {'pref': preference_type.value}
|
||||
log('D29', logdata)
|
||||
preference_parser = get_parser(preference_type)
|
||||
preference_merger = get_merger(preference_type)
|
||||
preference_objects = preference_parser(preference_path)
|
||||
preference_merger(self.storage, preference_objects, self.name)
|
||||
except Exception as exc:
|
||||
logdata = {}
|
||||
logdata['gpt'] = self.name
|
||||
logdata['msg'] = str(exc)
|
||||
log('E29', logdata)
|
||||
|
||||
def find_dir(search_path, name):
|
||||
'''
|
||||
@@ -340,28 +311,13 @@ def lp2gpt():
|
||||
# Write PReg
|
||||
polparser.write_binary(os.path.join(destdir, 'Registry.pol'))
|
||||
|
||||
def get_local_gpt(sid):
|
||||
def get_local_gpt():
|
||||
'''
|
||||
Convert default policy to GPT and create object out of it.
|
||||
'''
|
||||
log('D25')
|
||||
lp2gpt()
|
||||
local_policy = gpt(str(local_policy_cache()), sid)
|
||||
local_policy = gpt(str(local_policy_cache()))
|
||||
local_policy.set_name('Local Policy')
|
||||
|
||||
return local_policy
|
||||
|
||||
def upm2str(upm_num):
|
||||
'''
|
||||
Translate UserPolicyMode to string.
|
||||
'''
|
||||
result = 'Not configured'
|
||||
|
||||
if upm_num in [1, '1']:
|
||||
result = 'Replace'
|
||||
|
||||
if upm_num in [2, '2']:
|
||||
result = 'Merge'
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -18,21 +18,38 @@
|
||||
|
||||
from util.xml import get_xml_root
|
||||
|
||||
def read_inifiles(inifiles_file):
|
||||
inifiles = list()
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
for inifile in get_xml_root(inifiles_file):
|
||||
ini_obj = inifile()
|
||||
|
||||
def read_inifiles(inifiles_file):
|
||||
inifiles = []
|
||||
|
||||
for ini in get_xml_root(inifiles_file):
|
||||
prors = ini.find('Properties')
|
||||
ini_obj = inifile(prors.get('path'))
|
||||
ini_obj.set_section(prors.get('section', default=None))
|
||||
ini_obj.set_property(prors.get('property', default=None))
|
||||
ini_obj.set_value(prors.get('value', default=None))
|
||||
ini_obj.set_action(prors.get('action', default='C'))
|
||||
|
||||
inifiles.append(ini_obj)
|
||||
|
||||
return inifiles
|
||||
|
||||
def merge_inifiles(storage, sid, inifile_objects, policy_name):
|
||||
for inifile in inifile_objects:
|
||||
pass
|
||||
def merge_inifiles(storage, inifile_objects, policy_name):
|
||||
for iniobj in inifile_objects:
|
||||
storage.add_ini(iniobj, policy_name)
|
||||
|
||||
def inifile():
|
||||
def __init__(self):
|
||||
pass
|
||||
class inifile(DynamicAttributes):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def set_section(self, section):
|
||||
self.section = section
|
||||
def set_property(self, property):
|
||||
self.property = property
|
||||
def set_value(self, value):
|
||||
self.value = value
|
||||
def set_action(self, action):
|
||||
self.action = action
|
||||
|
||||
|
||||
59
gpoa/gpt/networkshares.py
Normal file
59
gpoa/gpt/networkshares.py
Normal file
@@ -0,0 +1,59 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from util.xml import get_xml_root
|
||||
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
|
||||
def read_networkshares(networksharesxml):
|
||||
networkshares = []
|
||||
|
||||
for share in get_xml_root(networksharesxml):
|
||||
props = share.find('Properties')
|
||||
networkshare_obj = networkshare(props.get('name'))
|
||||
networkshare_obj.set_action(props.get('action', default='C'))
|
||||
networkshare_obj.set_path(props.get('path', default=None))
|
||||
networkshare_obj.set_all_regular(props.get('allRegular', default=None))
|
||||
networkshare_obj.set_comment(props.get('comment', default=None))
|
||||
networkshare_obj.set_limitUsers(props.get('limitUsers', default=None))
|
||||
networkshare_obj.set_abe(props.get('abe', default=None))
|
||||
networkshares.append(networkshare_obj)
|
||||
|
||||
return networkshares
|
||||
|
||||
def merge_networkshares(storage, networkshares_objects, policy_name):
|
||||
for networkshareobj in networkshares_objects:
|
||||
storage.add_networkshare(networkshareobj, policy_name)
|
||||
|
||||
class networkshare(DynamicAttributes):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def set_action(self, action):
|
||||
self.action = action
|
||||
def set_path(self, path):
|
||||
self.path = path
|
||||
def set_all_regular(self, allRegular):
|
||||
self.allRegular = allRegular
|
||||
def set_comment(self, comment):
|
||||
self.comment = comment
|
||||
def set_limitUsers(self, limitUsers):
|
||||
self.limitUsers = limitUsers
|
||||
def set_abe(self, abe):
|
||||
self.abe = abe
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,17 +16,13 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from util.preg import (
|
||||
load_preg
|
||||
)
|
||||
from util.preg import load_preg
|
||||
|
||||
|
||||
def read_polfile(filename):
|
||||
return load_preg(filename).entries
|
||||
|
||||
def merge_polfile(storage, sid, policy_objects, policy_name):
|
||||
for entry in policy_objects:
|
||||
if not sid:
|
||||
storage.add_hklm_entry(entry, policy_name)
|
||||
else:
|
||||
storage.add_hkcu_entry(entry, sid, policy_name)
|
||||
def merge_polfile(storage, policy_objects, policy_name):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -20,11 +20,14 @@ import json
|
||||
|
||||
from util.xml import get_xml_root
|
||||
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
|
||||
def read_printers(printers_file):
|
||||
'''
|
||||
Read printer configurations from Printer.xml
|
||||
'''
|
||||
printers = list()
|
||||
printers = []
|
||||
|
||||
for prn in get_xml_root(printers_file):
|
||||
prn_obj = printer(prn.tag, prn.get('name'), prn.get('status'))
|
||||
@@ -41,9 +44,9 @@ def read_printers(printers_file):
|
||||
|
||||
return printers
|
||||
|
||||
def merge_printers(storage, sid, printer_objects, policy_name):
|
||||
def merge_printers(storage, printer_objects, policy_name):
|
||||
for device in printer_objects:
|
||||
storage.add_printer(sid, device, policy_name)
|
||||
storage.add_printer(device, policy_name)
|
||||
|
||||
def json2printer(json_str):
|
||||
'''
|
||||
@@ -60,7 +63,7 @@ def json2printer(json_str):
|
||||
|
||||
return prn
|
||||
|
||||
class printer:
|
||||
class printer(DynamicAttributes):
|
||||
def __init__(self, ptype, name, status):
|
||||
'''
|
||||
ptype may be one of:
|
||||
@@ -100,7 +103,7 @@ class printer:
|
||||
'''
|
||||
Return string-serialized JSON representation of the object.
|
||||
'''
|
||||
printer = dict()
|
||||
printer = {}
|
||||
printer['type'] = self.printer_type
|
||||
printer['name'] = self.name
|
||||
printer['status'] = self.status
|
||||
@@ -112,7 +115,7 @@ class printer:
|
||||
|
||||
# Nesting JSON object into JSON object makes it easier to add
|
||||
# metadata if needed.
|
||||
config = dict()
|
||||
config = {}
|
||||
config['printer'] = printer
|
||||
|
||||
return json.dumps(config)
|
||||
|
||||
150
gpoa/gpt/scriptsini.py
Normal file
150
gpoa/gpt/scriptsini.py
Normal file
@@ -0,0 +1,150 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import configparser
|
||||
import os
|
||||
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
|
||||
def read_scripts(scripts_file):
|
||||
scripts = Scripts_lists()
|
||||
|
||||
logon_scripts = {}
|
||||
logoff_scripts = {}
|
||||
startup_scripts = {}
|
||||
shutdown_scripts = {}
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.read(scripts_file, encoding = 'utf-16')
|
||||
scripts_file_dir = os.path.dirname(scripts_file)
|
||||
|
||||
actions = config.sections()
|
||||
|
||||
for act in actions:
|
||||
act_upper = act.upper()
|
||||
if act_upper == 'LOGON':
|
||||
section_scripts = logon_scripts
|
||||
elif act_upper == 'LOGOFF':
|
||||
section_scripts = logoff_scripts
|
||||
elif act_upper == 'STARTUP':
|
||||
section_scripts = startup_scripts
|
||||
elif act_upper == 'SHUTDOWN':
|
||||
section_scripts = shutdown_scripts
|
||||
else:
|
||||
continue
|
||||
|
||||
for key in config[act]:
|
||||
key_lower = key.lower()
|
||||
key_split = key_lower.split('cmdline')
|
||||
if len(key_split) > 1 and not key_split[1]:
|
||||
if key_split[0].isdigit():
|
||||
key_index = int(key_split[0])
|
||||
section_scripts[key_index] = Script(act, scripts_file_dir, config[act][key])
|
||||
key_split = key_lower.split('parameters')
|
||||
if len(key_split) > 1 and not key_split[1]:
|
||||
if key_split[0].isdigit():
|
||||
key_index = int(key_split[0])
|
||||
section_scripts[key_index].set_args(config[act][key])
|
||||
if logon_scripts:
|
||||
for i in sorted(logon_scripts.keys()):
|
||||
scripts.add_script('LOGON', logon_scripts[i])
|
||||
|
||||
if logoff_scripts:
|
||||
for i in sorted(logoff_scripts.keys()):
|
||||
scripts.add_script('LOGOFF', logoff_scripts[i])
|
||||
|
||||
if startup_scripts:
|
||||
for i in sorted(startup_scripts.keys()):
|
||||
scripts.add_script('STARTUP', startup_scripts[i])
|
||||
|
||||
if shutdown_scripts:
|
||||
for i in sorted(shutdown_scripts.keys()):
|
||||
scripts.add_script('SHUTDOWN', shutdown_scripts[i])
|
||||
|
||||
|
||||
return scripts
|
||||
|
||||
def merge_scripts(storage, scripts_objects, policy_name):
|
||||
for script in scripts_objects.get_logon_scripts():
|
||||
storage.add_script(script, policy_name)
|
||||
for script in scripts_objects.get_logoff_scripts():
|
||||
storage.add_script(script, policy_name)
|
||||
for script in scripts_objects.get_startup_scripts():
|
||||
storage.add_script(script, policy_name)
|
||||
for script in scripts_objects.get_shutdown_scripts():
|
||||
storage.add_script(script, policy_name)
|
||||
|
||||
class Scripts_lists:
|
||||
def __init__ (self):
|
||||
self.__logon_scripts = []
|
||||
self.__logoff_scripts = []
|
||||
self.__startup_scripts = []
|
||||
self.__shutdown_scripts = []
|
||||
|
||||
def get_logon_scripts(self):
|
||||
return self.__logon_scripts
|
||||
def get_logoff_scripts(self):
|
||||
return self.__logoff_scripts
|
||||
def get_startup_scripts(self):
|
||||
return self.__startup_scripts
|
||||
def get_shutdown_scripts(self):
|
||||
return self.__shutdown_scripts
|
||||
|
||||
def add_script(self, action, script):
|
||||
if action == 'LOGON':
|
||||
self.get_logon_scripts().append(script)
|
||||
elif action == 'LOGOFF':
|
||||
self.get_logoff_scripts().append(script)
|
||||
elif action == 'STARTUP':
|
||||
self.get_startup_scripts().append(script)
|
||||
elif action == 'SHUTDOWN':
|
||||
self.get_shutdown_scripts().append(script)
|
||||
|
||||
|
||||
class Script(DynamicAttributes):
|
||||
__logon_counter = 0
|
||||
__logoff_counter = 0
|
||||
__startup_counter = 0
|
||||
__shutdown_counter = 0
|
||||
|
||||
def __init__(self, action, script_dir, script_filename):
|
||||
action_upper = action.upper()
|
||||
self.action = action_upper
|
||||
self.path = os.path.join(script_dir, action_upper, script_filename.upper())
|
||||
if not os.path.isfile(self.path):
|
||||
self.number = None
|
||||
return None
|
||||
self.args = None
|
||||
|
||||
if action_upper == 'LOGON':
|
||||
self.number = Script.__logon_counter
|
||||
Script.__logon_counter += 1
|
||||
elif action_upper == 'LOGOFF':
|
||||
self.number = Script.__logoff_counter
|
||||
Script.__logoff_counter += 1
|
||||
elif action_upper == 'STARTUP':
|
||||
self.number = Script.__startup_counter
|
||||
Script.__startup_counter += 1
|
||||
elif action_upper == 'SHUTDOWN':
|
||||
self.number = Script.__shutdown_counter
|
||||
Script.__shutdown_counter += 1
|
||||
|
||||
def set_args(self, args):
|
||||
self.args = args
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -18,11 +18,14 @@
|
||||
|
||||
from util.xml import get_xml_root
|
||||
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
|
||||
def read_services(service_file):
|
||||
'''
|
||||
Read Services.xml from GPT.
|
||||
'''
|
||||
services = list()
|
||||
services = []
|
||||
|
||||
for srv in get_xml_root(service_file):
|
||||
srv_obj = service(srv.get('name'))
|
||||
@@ -39,18 +42,18 @@ def read_services(service_file):
|
||||
|
||||
return services
|
||||
|
||||
def merge_services(storage, sid, service_objects, policy_name):
|
||||
def merge_services(storage, service_objects, policy_name):
|
||||
for srv in service_objects:
|
||||
pass
|
||||
|
||||
class service:
|
||||
class service(DynamicAttributes):
|
||||
def __init__(self, name):
|
||||
self.unit = name
|
||||
self.servname = None
|
||||
self.serviceaction = None
|
||||
|
||||
def set_clsid(self, clsid):
|
||||
self.guid = uid
|
||||
self.guid = clsid
|
||||
|
||||
def set_usercontext(self, usercontext=False):
|
||||
ctx = False
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,22 +16,26 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from enum import Enum
|
||||
import json
|
||||
from pathlib import Path
|
||||
import stat
|
||||
import logging
|
||||
from enum import Enum
|
||||
|
||||
from xml.etree import ElementTree
|
||||
from xdg.DesktopEntry import DesktopEntry
|
||||
import json
|
||||
|
||||
from util.paths import get_desktop_files_directory
|
||||
from util.windows import transform_windows_path
|
||||
from util.xml import get_xml_root
|
||||
from xdg.DesktopEntry import DesktopEntry
|
||||
|
||||
from .dynamic_attributes import DynamicAttributes
|
||||
|
||||
|
||||
class TargetType(Enum):
|
||||
FILESYSTEM = 'FILESYSTEM'
|
||||
URL = 'URL'
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
def get_ttype(targetstr):
|
||||
'''
|
||||
Validation function for targetType property
|
||||
@@ -42,7 +46,7 @@ def get_ttype(targetstr):
|
||||
'''
|
||||
ttype = TargetType.FILESYSTEM
|
||||
|
||||
if targetstr == 'URL':
|
||||
if targetstr == 'URL'or targetstr == TargetType.URL:
|
||||
ttype = TargetType.URL
|
||||
|
||||
return ttype
|
||||
@@ -66,7 +70,7 @@ def read_shortcuts(shortcuts_file):
|
||||
|
||||
:shortcuts_file: Location of Shortcuts.xml
|
||||
'''
|
||||
shortcuts = list()
|
||||
shortcuts = []
|
||||
|
||||
for link in get_xml_root(shortcuts_file):
|
||||
props = link.find('Properties')
|
||||
@@ -85,32 +89,34 @@ def read_shortcuts(shortcuts_file):
|
||||
sc.set_guid(link.get('uid'))
|
||||
sc.set_usercontext(link.get('userContext', False))
|
||||
sc.set_icon(props.get('iconPath'))
|
||||
if props.get('comment'):
|
||||
sc.set_comment(props.get('comment'))
|
||||
|
||||
shortcuts.append(sc)
|
||||
|
||||
return shortcuts
|
||||
|
||||
def merge_shortcuts(storage, sid, shortcut_objects, policy_name):
|
||||
def merge_shortcuts(storage, shortcut_objects, policy_name):
|
||||
for shortcut in shortcut_objects:
|
||||
storage.add_shortcut(sid, shortcut, policy_name)
|
||||
storage.add_shortcut(shortcut, policy_name)
|
||||
|
||||
def json2sc(json_str):
|
||||
'''
|
||||
Build shortcut out of string-serialized JSON
|
||||
'''
|
||||
json_obj = json.loads(json_str)
|
||||
link_type = get_ttype(json_obj['type'])
|
||||
|
||||
sc = shortcut(json_obj['dest'], json_obj['path'], json_obj['arguments'], json_obj['name'], json_obj['action'], link_type)
|
||||
sc.set_changed(json_obj['changed'])
|
||||
sc.set_clsid(json_obj['clsid'])
|
||||
sc.set_guid(json_obj['guid'])
|
||||
sc.set_usercontext(json_obj['is_in_user_context'])
|
||||
if 'icon' in json_obj:
|
||||
sc.set_icon(json_obj['icon'])
|
||||
def find_desktop_entry(binary_path):
|
||||
desktop_dir = get_desktop_files_directory()
|
||||
binary_name = ''.join(binary_path.split('/')[-1])
|
||||
desktop_file_path = Path(f"{desktop_dir}/{binary_name}.desktop")
|
||||
|
||||
return sc
|
||||
if desktop_file_path.exists():
|
||||
desktop_entry = DesktopEntry()
|
||||
desktop_entry.parse(desktop_file_path)
|
||||
return desktop_entry
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class shortcut(DynamicAttributes):
|
||||
_ignore_fields = {"desktop_file_template", "desktop_file"}
|
||||
|
||||
class shortcut:
|
||||
def __init__(self, dest, path, arguments, name=None, action=None, ttype=TargetType.FILESYSTEM):
|
||||
'''
|
||||
:param dest: Path to resulting file on file system
|
||||
@@ -119,16 +125,42 @@ class shortcut:
|
||||
:param name: Name of the application
|
||||
:param type: Link type - FILESYSTEM or URL
|
||||
'''
|
||||
self.dest = dest
|
||||
self.dest = self.replace_slashes(dest)
|
||||
self.path = path
|
||||
self.expanded_path = None
|
||||
self.arguments = arguments
|
||||
self.name = name
|
||||
self.name = self.replace_name(name)
|
||||
self.action = action
|
||||
self.changed = ''
|
||||
self.icon = None
|
||||
self.comment = ''
|
||||
self.is_in_user_context = self.set_usercontext()
|
||||
self.type = ttype
|
||||
self.desktop_file_template = None
|
||||
|
||||
|
||||
def items(self):
|
||||
return ((k, v) for k, v in super().items() if k not in self._ignore_fields)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.items())
|
||||
|
||||
|
||||
def replace_slashes(self, input_path):
|
||||
if input_path.startswith('%'):
|
||||
index = input_path.find('%', 1)
|
||||
if index != -1:
|
||||
replace_path = input_path[:index + 2] + input_path[index + 2:].replace('/','-')
|
||||
return replace_path
|
||||
return input_path.replace('/','-')
|
||||
|
||||
def replace_name(self, input_name):
|
||||
if input_name.startswith('%'):
|
||||
index = input_name.find('%', 1)
|
||||
if index != -1:
|
||||
replace_name = input_name[index + 2:]
|
||||
return replace_name
|
||||
return input_name
|
||||
|
||||
def __str__(self):
|
||||
result = self.to_json()
|
||||
@@ -149,6 +181,9 @@ class shortcut:
|
||||
def set_icon(self, icon_name):
|
||||
self.icon = icon_name
|
||||
|
||||
def set_comment(self, comment):
|
||||
self.comment = comment
|
||||
|
||||
def set_type(self, ttype):
|
||||
'''
|
||||
Set type of the hyperlink - FILESYSTEM or URL
|
||||
@@ -177,28 +212,6 @@ class shortcut:
|
||||
def is_usercontext(self):
|
||||
return self.is_in_user_context
|
||||
|
||||
def to_json(self):
|
||||
'''
|
||||
Return shortcut's JSON for further serialization.
|
||||
'''
|
||||
content = dict()
|
||||
content['dest'] = self.dest
|
||||
content['path'] = self.path
|
||||
content['name'] = self.name
|
||||
content['arguments'] = self.arguments
|
||||
content['clsid'] = self.clsid
|
||||
content['guid'] = self.guid
|
||||
content['changed'] = self.changed
|
||||
content['action'] = self.action
|
||||
content['is_in_user_context'] = self.is_in_user_context
|
||||
content['type'] = ttype2str(self.type)
|
||||
if self.icon:
|
||||
content['icon'] = self.icon
|
||||
result = self.desktop()
|
||||
result.content.update(content)
|
||||
|
||||
return json.dumps(result.content)
|
||||
|
||||
def desktop(self, dest=None):
|
||||
'''
|
||||
Returns desktop file object which may be written to disk.
|
||||
@@ -206,6 +219,7 @@ class shortcut:
|
||||
if dest:
|
||||
self.desktop_file = DesktopEntry(dest)
|
||||
else:
|
||||
self.desktop_file_template = find_desktop_entry(self.path)
|
||||
self.desktop_file = DesktopEntry()
|
||||
self.desktop_file.addGroup('Desktop Entry')
|
||||
self.desktop_file.set('Version', '1.0')
|
||||
@@ -217,7 +231,7 @@ class shortcut:
|
||||
'''
|
||||
Update desktop file object from internal data.
|
||||
'''
|
||||
if self.type == TargetType.URL:
|
||||
if get_ttype(self.type) == TargetType.URL:
|
||||
self.desktop_file.set('Type', 'Link')
|
||||
else:
|
||||
self.desktop_file.set('Type', 'Application')
|
||||
@@ -227,14 +241,21 @@ class shortcut:
|
||||
desktop_path = self.path
|
||||
if self.expanded_path:
|
||||
desktop_path = self.expanded_path
|
||||
if self.type == TargetType.URL:
|
||||
if get_ttype(self.type) == TargetType.URL:
|
||||
self.desktop_file.set('URL', desktop_path)
|
||||
else:
|
||||
self.desktop_file.set('Terminal', 'false')
|
||||
self.desktop_file.set('Exec', '{} {}'.format(desktop_path, self.arguments))
|
||||
str2bool_lambda = (lambda boolstr: boolstr if isinstance(boolstr, bool)
|
||||
else boolstr and boolstr.lower() in ['True', 'true', 'yes', '1'])
|
||||
if self.desktop_file_template:
|
||||
terminal_state = str2bool_lambda(self.desktop_file_template.get('Terminal'))
|
||||
self.desktop_file.set('Terminal', 'true' if terminal_state else 'false')
|
||||
self.desktop_file.set('Exec', '{} {}'.format(desktop_path, self.get_original_value('arguments')))
|
||||
self.desktop_file.set('Comment', self.comment)
|
||||
|
||||
if self.icon:
|
||||
self.desktop_file.set('Icon', self.icon)
|
||||
elif self.desktop_file_template and self.desktop_file_template.get('Icon', False):
|
||||
self.desktop_file.set('Icon', self.desktop_file_template.get('Icon'))
|
||||
|
||||
def _write_desktop(self, dest, create_only=False, read_firstly=False):
|
||||
'''
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -19,7 +19,7 @@
|
||||
def read_tasks(filename):
|
||||
pass
|
||||
|
||||
def merge_tasks(storage, sid, task_objects, policy_name):
|
||||
def merge_tasks(storage, task_objects, policy_name):
|
||||
for task in task_objects:
|
||||
pass
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -25,6 +25,7 @@ import os
|
||||
import sys
|
||||
import pwd
|
||||
import signal
|
||||
from storage import Dconf_registry
|
||||
|
||||
from util.users import (
|
||||
is_root
|
||||
@@ -47,37 +48,52 @@ from util.logging import log
|
||||
class file_runner:
|
||||
_gpoa_exe = '/usr/sbin/gpoa'
|
||||
|
||||
def __init__(self, username=None):
|
||||
def __init__(self, loglevel, username=None):
|
||||
self._user = username
|
||||
self._loglevel = loglevel
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
Call gpoa utility to generate scripts
|
||||
'''
|
||||
gpoa_cmd = [self._gpoa_exe]
|
||||
if self._loglevel != None:
|
||||
gpoa_cmd += ["--loglevel", str(self._loglevel)]
|
||||
if self._user:
|
||||
gpoa_cmd += [self._user]
|
||||
|
||||
output = subprocess.call(gpoa_cmd)
|
||||
sys.exit(output)
|
||||
subprocess.check_output(gpoa_cmd)
|
||||
|
||||
def parse_cli_arguments():
|
||||
'''
|
||||
Command line argument parser
|
||||
'''
|
||||
argparser = argparse.ArgumentParser(description='Update group policies for the specified user')
|
||||
argparser = argparse.ArgumentParser(description='Update group policies for computer and the specified user')
|
||||
argparser.add_argument('-u',
|
||||
'--user',
|
||||
default=None,
|
||||
help='Name of the user for GPO update')
|
||||
argparser.add_argument('--target',
|
||||
argparser.add_argument('-t',
|
||||
'--target',
|
||||
default=None,
|
||||
type=str,
|
||||
type=str.upper,
|
||||
choices=["ALL", "USER", "COMPUTER"],
|
||||
help='Specify if it is needed to update user\'s or computer\'s policies')
|
||||
argparser.add_argument('--loglevel',
|
||||
argparser.add_argument('-l',
|
||||
'--loglevel',
|
||||
type=int,
|
||||
default=5,
|
||||
help='Set logging verbosity level')
|
||||
argparser.add_argument('-f',
|
||||
'--force',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Force GPT download')
|
||||
argparser.add_argument('-s',
|
||||
'--system',
|
||||
action='store_true',
|
||||
default=None,
|
||||
help='Run gpoa directly in system mode')
|
||||
|
||||
return argparser.parse_args()
|
||||
|
||||
@@ -87,46 +103,62 @@ def runner_factory(args, target):
|
||||
factors taken into account.
|
||||
'''
|
||||
username = None
|
||||
target = target.upper()
|
||||
if is_root():
|
||||
# Only root may specify any username to update.
|
||||
try:
|
||||
if args.user:
|
||||
username = pwd.getpwnam(args.user).pw_name
|
||||
else:
|
||||
target = 'Computer'
|
||||
target = 'COMPUTER'
|
||||
except:
|
||||
username = None
|
||||
logdata = dict({'username': args.user})
|
||||
logdata = {'username': args.user}
|
||||
log('W1', logdata)
|
||||
else:
|
||||
# User may only perform gpupdate for machine (None) or
|
||||
# itself (os.getusername()).
|
||||
username = pwd.getpwuid(os.getuid()).pw_name
|
||||
if args.user != username:
|
||||
logdata = dict({'username': username})
|
||||
logdata = {'username': username}
|
||||
log('W2', logdata)
|
||||
|
||||
if args.system:
|
||||
return try_directly(username, target, args.loglevel)
|
||||
else:
|
||||
return try_by_oddjob(username, target)
|
||||
|
||||
def try_by_oddjob(username, target):
|
||||
'''
|
||||
Run group policies applying by oddjob service
|
||||
'''
|
||||
if is_oddjobd_gpupdate_accessible():
|
||||
log('D13')
|
||||
computer_runner = None
|
||||
user_runner = None
|
||||
if target == 'All' or target == 'Computer':
|
||||
if target == 'ALL' or target == 'COMPUTER':
|
||||
computer_runner = dbus_runner()
|
||||
if username:
|
||||
if target == 'All' or target == 'User':
|
||||
if target == 'ALL' or target == 'USER':
|
||||
user_runner = dbus_runner(username)
|
||||
return (computer_runner, user_runner)
|
||||
else:
|
||||
log('W3')
|
||||
|
||||
return None
|
||||
|
||||
def try_directly(username, target, loglevel):
|
||||
'''
|
||||
Run group policies applying directly
|
||||
'''
|
||||
if is_root():
|
||||
log('D14')
|
||||
computer_runner = None
|
||||
user_runner = None
|
||||
if target == 'All' or target == 'Computer':
|
||||
computer_runner = file_runner()
|
||||
if target == 'All' or target == 'User':
|
||||
user_runner = file_runner(username)
|
||||
if target == 'ALL' or target == 'COMPUTER':
|
||||
computer_runner = file_runner(loglevel)
|
||||
if target == 'ALL' or target == 'USER':
|
||||
user_runner = file_runner(loglevel, username)
|
||||
return (computer_runner, user_runner)
|
||||
else:
|
||||
log('E1')
|
||||
@@ -135,10 +167,17 @@ def runner_factory(args, target):
|
||||
|
||||
def main():
|
||||
args = parse_cli_arguments()
|
||||
locale.bindtextdomain('gpoa', '/usr/lib/python3/site-packages/gpoa/locale')
|
||||
gettext.bindtextdomain('gpoa', '/usr/lib/python3/site-packages/gpoa/locale')
|
||||
|
||||
# Set up locale for main application
|
||||
import os
|
||||
base_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
main_locale_path = os.path.join(base_dir, 'locale')
|
||||
locale.bindtextdomain('gpoa', main_locale_path)
|
||||
gettext.bindtextdomain('gpoa', main_locale_path)
|
||||
gettext.textdomain('gpoa')
|
||||
|
||||
set_loglevel(args.loglevel)
|
||||
Dconf_registry._force = args.force
|
||||
gpo_appliers = runner_factory(args, process_target(args.target))
|
||||
|
||||
if gpo_appliers:
|
||||
@@ -146,7 +185,7 @@ def main():
|
||||
try:
|
||||
gpo_appliers[0].run()
|
||||
except Exception as exc:
|
||||
logdata = dict({'error': str(exc)})
|
||||
logdata = {'error': str(exc)}
|
||||
log('E5')
|
||||
return int(ExitCodeUpdater.FAIL_GPUPDATE_COMPUTER_NOREPLY)
|
||||
|
||||
@@ -154,7 +193,7 @@ def main():
|
||||
try:
|
||||
gpo_appliers[1].run()
|
||||
except Exception as exc:
|
||||
logdata = dict({'error': str(exc)})
|
||||
logdata = {'error': str(exc)}
|
||||
log('E6', logdata)
|
||||
return int(ExitCodeUpdater.FAIL_GPUPDATE_USER_NOREPLY)
|
||||
else:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -19,9 +19,7 @@
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import subprocess
|
||||
|
||||
from util.util import (
|
||||
runcmd
|
||||
@@ -32,6 +30,7 @@ from util.util import (
|
||||
)
|
||||
from util.config import GPConfig
|
||||
from util.paths import get_custom_policy_dir
|
||||
from frontend.appliers.ini_file import Ini_file
|
||||
|
||||
|
||||
class Runner:
|
||||
@@ -61,7 +60,8 @@ def parse_arguments():
|
||||
|
||||
parser_disable = subparsers.add_parser('disable',
|
||||
help='Disable Group Policy subsystem')
|
||||
|
||||
parser_update = subparsers.add_parser('update',
|
||||
help='Update state')
|
||||
parser_write = subparsers.add_parser('write',
|
||||
help='Operate on Group Policies (enable or disable)')
|
||||
parser_set_backend = subparsers.add_parser('set-backend',
|
||||
@@ -78,7 +78,7 @@ def parse_arguments():
|
||||
type=str,
|
||||
nargs='?',
|
||||
const='backend',
|
||||
choices=['local', 'samba'],
|
||||
choices=['local', 'samba', 'freeipa'],
|
||||
help='Backend (source of settings) name')
|
||||
|
||||
parser_write.add_argument('status',
|
||||
@@ -93,7 +93,7 @@ def parse_arguments():
|
||||
type=str,
|
||||
nargs='?',
|
||||
const='backend',
|
||||
choices=['local', 'samba'],
|
||||
choices=['local', 'samba', 'freeipa'],
|
||||
help='Backend (source of settings) name')
|
||||
|
||||
parser_enable.add_argument('--local-policy',
|
||||
@@ -102,9 +102,19 @@ def parse_arguments():
|
||||
parser_enable.add_argument('--backend',
|
||||
default='samba',
|
||||
type=str,
|
||||
choices=['local', 'samba'],
|
||||
choices=['local', 'samba', 'freeipa'],
|
||||
help='Backend (source of settings) name')
|
||||
|
||||
parser_update.add_argument('--local-policy',
|
||||
default=None,
|
||||
help='Name of local policy to enable')
|
||||
parser_update.add_argument('--backend',
|
||||
default='samba',
|
||||
type=str,
|
||||
choices=['local', 'samba', 'freeipa'],
|
||||
help='Backend (source of settings) name')
|
||||
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def validate_policy_name(policy_name):
|
||||
@@ -135,10 +145,10 @@ def is_unit_enabled(unit_name, unit_global=False):
|
||||
|
||||
def get_status():
|
||||
'''
|
||||
Check that gpupdate.service and gpupdate-user.service are enabled.
|
||||
Check that gpupdate.timer and gpupdate-user.timer are enabled.
|
||||
'''
|
||||
is_gpupdate = is_unit_enabled('gpupdate.service')
|
||||
is_gpupdate_user = is_unit_enabled('gpupdate-user.service', unit_global=True)
|
||||
is_gpupdate = is_unit_enabled('gpupdate.timer')
|
||||
is_gpupdate_user = is_unit_enabled('gpupdate-user.timer', unit_global=True)
|
||||
|
||||
if is_gpupdate and is_gpupdate_user:
|
||||
return True
|
||||
@@ -173,7 +183,11 @@ def disable_gp():
|
||||
cmd_set_local_policy = ['/usr/sbin/control', 'system-policy', 'local']
|
||||
cmd_disable_gpupdate_service = ['/bin/systemctl', 'disable', 'gpupdate.service']
|
||||
cmd_disable_gpupdate_user_service = ['/bin/systemctl', '--global', 'disable', 'gpupdate-user.service']
|
||||
cmd_disable_gpupdate_timer = ['/bin/systemctl', 'disable', 'gpupdate.timer']
|
||||
cmd_disable_gpupdate_user_timer = ['/bin/systemctl', '--global', 'disable', 'gpupdate-user.timer']
|
||||
cmd_control_system_auth = ['/usr/sbin/control', 'system-auth']
|
||||
cmd_disable_gpupdate_scripts_service = ['/bin/systemctl', 'disable', 'gpupdate-scripts-run.service']
|
||||
cmd_disable_gpupdate_scripts_user_service = ['/bin/systemctl', '--global', 'disable', 'gpupdate-scripts-run-user.service']
|
||||
|
||||
config = GPConfig()
|
||||
|
||||
@@ -189,6 +203,10 @@ def disable_gp():
|
||||
runcmd(cmd_set_local_policy)
|
||||
runcmd(cmd_disable_gpupdate_service)
|
||||
runcmd(cmd_disable_gpupdate_user_service)
|
||||
runcmd(cmd_disable_gpupdate_timer)
|
||||
runcmd(cmd_disable_gpupdate_user_timer)
|
||||
runcmd(cmd_disable_gpupdate_scripts_service)
|
||||
runcmd(cmd_disable_gpupdate_scripts_user_service)
|
||||
config.set_local_policy_template()
|
||||
config.set_backend()
|
||||
|
||||
@@ -199,7 +217,13 @@ def enable_gp(policy_name, backend_type):
|
||||
cmd_set_gpupdate_policy = ['/usr/sbin/control', 'system-policy', 'gpupdate']
|
||||
cmd_gpoa_nodomain = ['/usr/sbin/gpoa', '--nodomain', '--loglevel', '5']
|
||||
cmd_enable_gpupdate_service = ['/bin/systemctl', 'enable', 'gpupdate.service']
|
||||
cmd_enable_gpupdate_user_service = ['/bin/systemctl', '--global', 'enable', 'gpupdate-user.service']
|
||||
cmd_enable_gpupdate_user_service = ['/bin/systemctl', '--global', 'disable', 'gpupdate-user.service']
|
||||
cmd_enable_gpupdate_timer = ['/bin/systemctl', 'enable', 'gpupdate.timer']
|
||||
cmd_enable_gpupdate_user_timer = ['/bin/systemctl', '--global', 'enable', 'gpupdate-user.timer']
|
||||
cmd_enable_gpupdate_scripts_service = ['/bin/systemctl', 'enable', 'gpupdate-scripts-run.service']
|
||||
cmd_enable_gpupdate_user_scripts_service = ['/bin/systemctl', '--global', 'enable', 'gpupdate-scripts-run-user.service']
|
||||
cmd_ipa_client_samba = ['/usr/sbin/ipa-client-samba', '--unattended']
|
||||
|
||||
|
||||
config = GPConfig()
|
||||
|
||||
@@ -231,7 +255,51 @@ def enable_gp(policy_name, backend_type):
|
||||
# Enable gpupdate-setup.service for all users
|
||||
if not rollback_on_error(cmd_enable_gpupdate_user_service):
|
||||
return
|
||||
if not is_unit_enabled('gpupdate-user.service', unit_global=True):
|
||||
# Enable gpupdate-scripts-run.service
|
||||
if not rollback_on_error(cmd_enable_gpupdate_scripts_service):
|
||||
return
|
||||
if not is_unit_enabled('gpupdate-scripts-run.service'):
|
||||
disable_gp()
|
||||
return
|
||||
# Enable gpupdate-scripts-run-user.service for all users
|
||||
if not rollback_on_error(cmd_enable_gpupdate_user_scripts_service):
|
||||
return
|
||||
if not is_unit_enabled('gpupdate-scripts-run-user.service', unit_global=True):
|
||||
disable_gp()
|
||||
return
|
||||
|
||||
# Enable gpupdate.timer
|
||||
if not rollback_on_error(cmd_enable_gpupdate_timer):
|
||||
return
|
||||
if not is_unit_enabled('gpupdate.timer'):
|
||||
disable_gp()
|
||||
return
|
||||
|
||||
if backend_type == 'freeipa':
|
||||
result = runcmd(cmd_ipa_client_samba)
|
||||
if result[0] != 0:
|
||||
if "already configured" in str(result[1]) or "already exists" in str(result[1]):
|
||||
print("FreeIPA is already configured")
|
||||
else:
|
||||
print(str(result))
|
||||
return
|
||||
else:
|
||||
print(str(result))
|
||||
|
||||
ini_obj = type("ini", (), {})()
|
||||
ini_obj.path = "/etc/samba/smb.conf"
|
||||
ini_obj.section = "global"
|
||||
ini_obj.action = "UPDATE"
|
||||
ini_obj.property = "log level"
|
||||
ini_obj.value = "0"
|
||||
|
||||
Ini_file(ini_obj)
|
||||
|
||||
|
||||
# Enable gpupdate-setup.timer for all users
|
||||
if not rollback_on_error(cmd_enable_gpupdate_user_timer):
|
||||
return
|
||||
if not is_unit_enabled('gpupdate-user.timer', unit_global=True):
|
||||
disable_gp()
|
||||
return
|
||||
|
||||
@@ -299,20 +367,25 @@ def act_default_policy():
|
||||
def main():
|
||||
arguments = parse_arguments()
|
||||
|
||||
action = dict()
|
||||
action['list'] = act_list
|
||||
action['list-backends'] = act_list_backends
|
||||
action['status'] = act_status
|
||||
action['set-backend'] = act_set_backend
|
||||
action['write'] = act_write
|
||||
action['enable'] = act_enable
|
||||
action['disable'] = disable_gp
|
||||
action['active-policy'] = act_active_policy
|
||||
action['active-backend'] = act_active_backend
|
||||
action['default-policy'] = act_default_policy
|
||||
action = {
|
||||
'list': act_list,
|
||||
'list-backends': act_list_backends,
|
||||
'status': act_status,
|
||||
'set-backend': act_set_backend,
|
||||
'write': act_write,
|
||||
'enable': act_enable,
|
||||
'update': act_enable,
|
||||
'disable': disable_gp,
|
||||
'active-policy': act_active_policy,
|
||||
'active-backend': act_active_backend,
|
||||
'default-policy': act_default_policy
|
||||
}
|
||||
|
||||
if arguments.action == None:
|
||||
action['status']()
|
||||
elif arguments.action == 'update':
|
||||
if get_status():
|
||||
action[arguments.action](arguments.local_policy, arguments.backend)
|
||||
elif arguments.action == 'enable':
|
||||
action[arguments.action](arguments.local_policy, arguments.backend)
|
||||
elif arguments.action == 'write':
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -62,6 +62,18 @@ msgstr "Политика Chromium"
|
||||
msgid "Set user property to"
|
||||
msgstr "Установка свойств для пользователя"
|
||||
|
||||
msgid "The line in the configuration file was cleared"
|
||||
msgstr "В конфигурационном файле была очищена строка"
|
||||
|
||||
msgid "Found GPT in cache"
|
||||
msgstr "Найден GPT в кеше"
|
||||
|
||||
msgid "Got GPO list for trusted user"
|
||||
msgstr "Получен список GPO для доверенного пользователя"
|
||||
|
||||
msgid "Restarting systemd unit"
|
||||
msgstr "Перезапуск unit systemd"
|
||||
|
||||
# Error
|
||||
msgid "Insufficient permissions to run gpupdate"
|
||||
msgstr "Недостаточно прав для запуска gpupdate"
|
||||
@@ -192,9 +204,6 @@ msgstr "Ошибка обновления конфигурации dconf"
|
||||
msgid "Unable to cache specified URI for user"
|
||||
msgstr "Невозможно кэшировать указанный URI для пользователя"
|
||||
|
||||
msgid "Chromium preferences file does not exist at the moment"
|
||||
msgstr "Файл настроек Chromium в данный момент не существует"
|
||||
|
||||
msgid "Error during attempt to read Chromium preferences for user"
|
||||
msgstr "Ошибка при попытке прочитать настройки Chromium для пользователя"
|
||||
|
||||
@@ -222,6 +231,60 @@ msgstr "Ошибка при запуске pkcon_runner асинхронно д
|
||||
msgid "Error running pkcon_runner async for user"
|
||||
msgstr "Ошибка при запуске pkcon_runner асинхронно для пользователя"
|
||||
|
||||
msgid "Error merging user GPT (from machine GPO)"
|
||||
msgstr "Ошибка слияния пользовательской групповой политики (машинная часть)"
|
||||
|
||||
msgid "Error cleaning directory for machine"
|
||||
msgstr "Ошибка очистки каталога для машины"
|
||||
|
||||
msgid "Error cleaning directory for user"
|
||||
msgstr "Ошибка очистки каталога для пользователя"
|
||||
|
||||
msgid "Error while executing command for widgets"
|
||||
msgstr "Ошибка при выполнении команды для виджетов"
|
||||
|
||||
msgid "Error creating environment variables"
|
||||
msgstr "Ошибка создания переменных среды"
|
||||
|
||||
msgid "Error running kwriteconfig5 command"
|
||||
msgstr "Ошибка выполнения команды kwriteconfig5"
|
||||
|
||||
msgid "Error getting list of keys"
|
||||
msgstr "Ошибка получения списка ключей"
|
||||
|
||||
msgid "Error getting key value"
|
||||
msgstr "Ошибка при получении значения ключей"
|
||||
|
||||
msgid "Failed to update dconf database"
|
||||
msgstr "Не удалось обновить базу данных dconf"
|
||||
|
||||
msgid "Exception occurred while updating dconf database"
|
||||
msgstr "Возникло исключение при обновлении базы данных dconf"
|
||||
|
||||
msgid "Failed to retrieve data from dconf database"
|
||||
msgstr "Не удалось получить данные из базы dconf"
|
||||
|
||||
msgid "Autofs restart failed"
|
||||
msgstr "Перезапуск Autofs не удался"
|
||||
|
||||
msgid "Failed to update LDAP with new password data"
|
||||
msgstr "Не удалось обновить LDAP новыми данными пароля"
|
||||
|
||||
msgid "Failed to change local user password"
|
||||
msgstr "Не удалось изменить пароль локального пользователя"
|
||||
|
||||
msgid "Unable to restart systemd unit"
|
||||
msgstr "Не удалось перезапустить unit systemd"
|
||||
|
||||
msgid "Kerberos info unavailable; cannot construct DPAPI parameters"
|
||||
msgstr "Информация Kerberos недоступна; невозможно сформировать параметры DPAPI"
|
||||
|
||||
msgid "Unable to initialize Freeipa backend"
|
||||
msgstr "Невозможно инициализировать бэкэнд Freeipa"
|
||||
|
||||
msgid "FreeIPA API Error"
|
||||
msgstr "Ошибка API FreeIPA"
|
||||
|
||||
# Error_end
|
||||
|
||||
# Debug
|
||||
@@ -234,12 +297,6 @@ msgstr "Имя пользователя не указано - будет исп
|
||||
msgid "Initializing plugin manager"
|
||||
msgstr "Инициализация плагинов"
|
||||
|
||||
msgid "ADP plugin initialized"
|
||||
msgstr "Инициализирован плагин ADP"
|
||||
|
||||
msgid "Running ADP plugin"
|
||||
msgstr "Запущен плагин ADP"
|
||||
|
||||
msgid "Starting GPOA for user via D-Bus"
|
||||
msgstr "Запускается GPOA для пользователя обращением к oddjobd через D-Bus"
|
||||
|
||||
@@ -516,11 +573,11 @@ msgstr "Запуск применение папок для машины"
|
||||
msgid "Folder applier for machine will not be started"
|
||||
msgstr "Применение папок для машины не запускается"
|
||||
|
||||
msgid "Running Folder applier for user in administrator context"
|
||||
msgstr "Запуск применение папок для пользователя в контексте администратора"
|
||||
msgid "Folder creation skipped for machine"
|
||||
msgstr "Создание папки для машины пропущено"
|
||||
|
||||
msgid "Folder applier for user in administrator context will not be started"
|
||||
msgstr "Применение папок для пользователя в контексте администратора не запускается"
|
||||
msgid "Folder creation skipped for user"
|
||||
msgstr "Создание папки для пользователя пропущено"
|
||||
|
||||
msgid "Running Folder applier for user in user context"
|
||||
msgstr "Запуск применение папок для пользователя в контексте пользователя"
|
||||
@@ -597,11 +654,11 @@ msgstr "Запуск применение настроек Envvar для маш
|
||||
msgid "Envvar applier for machine will not be started"
|
||||
msgstr "Применение настроек Envvar для машины не запускается"
|
||||
|
||||
msgid "Running Envvar applier for user in user context"
|
||||
msgstr "Запуск применение настроек Envvar для пользователя в контексте пользователя"
|
||||
msgid "Running Envvar applier for user in admin context"
|
||||
msgstr "Запуск применение настроек Envvar для пользователя в контексте администратора"
|
||||
|
||||
msgid "Envvar applier for user in user context will not be started"
|
||||
msgstr "Применение настроек Envvar для пользователя в контексте пользователя не запускается"
|
||||
msgid "Envvar applier for user in admin context will not be started"
|
||||
msgstr "Применение настроек Envvar для пользователя в контексте администратора не запускается"
|
||||
|
||||
msgid "Running Package applier for machine"
|
||||
msgstr "Запуск установки пакетов для машины"
|
||||
@@ -654,6 +711,279 @@ msgstr "Не удалось найти настройки gsettings для ма
|
||||
msgid "Failed to found user gsettings"
|
||||
msgstr "Не удалось найти настройки gsettings пользователя"
|
||||
|
||||
msgid "Configure user Group Policy loopback processing mode"
|
||||
msgstr "Настройка режима обработки замыкания пользовательской групповой политики"
|
||||
|
||||
msgid "Saving information about script"
|
||||
msgstr "Сохранение информации о скрипте"
|
||||
|
||||
msgid "No machine scripts directory to clean up"
|
||||
msgstr "Нет каталога машинных скриптов для очистки"
|
||||
|
||||
msgid "No user scripts directory to clean up"
|
||||
msgstr "Нет каталога пользовательских скриптов для очистки"
|
||||
|
||||
msgid "Prepare Scripts applier for machine"
|
||||
msgstr "Подготовка к применению машинных скриптов"
|
||||
|
||||
msgid "Scripts applier for machine will not be started"
|
||||
msgstr "Применение машинных скриптов не запускается"
|
||||
|
||||
msgid "Prepare Scripts applier for user in user context"
|
||||
msgstr "Подготовка к применению скриптов пользователя в его контексте"
|
||||
|
||||
msgid "Scripts applier for user in user context will not be started"
|
||||
msgstr "Применение скриптов пользователя в его контексте не запускается"
|
||||
|
||||
msgid "Clean machine scripts directory"
|
||||
msgstr "Очистка каталога машинных скриптов"
|
||||
|
||||
msgid "Clean user scripts directory"
|
||||
msgstr "Очистка каталога пользовательских скриптов"
|
||||
|
||||
msgid "Saving information about file"
|
||||
msgstr "Сохранение информации о файле"
|
||||
|
||||
msgid "Failed to return file path"
|
||||
msgstr "Не удалось вернуть путь к файлу"
|
||||
|
||||
msgid "Failed to create file"
|
||||
msgstr "Не удалось создать файл"
|
||||
|
||||
msgid "Failed to delete file"
|
||||
msgstr "Не удалось удалить файл"
|
||||
|
||||
msgid "Failed to update file"
|
||||
msgstr "Не удалось обновить файл"
|
||||
|
||||
msgid "Running File copy applier for machine"
|
||||
msgstr "Запуск применение настроек копирования файлов для машины"
|
||||
|
||||
msgid "Running File copy applier for machine will not be started"
|
||||
msgstr "Применение настроек копирования файлов для машины не будет запущено"
|
||||
|
||||
msgid "Running File copy applier for user in administrator context"
|
||||
msgstr "Запуск применение настроек копирования файлов для пользователя в контексте администратора"
|
||||
|
||||
msgid "Running File copy applier for user in administrator context will not be started"
|
||||
msgstr "Применение настроек копирования файлов для пользователя в контексте администратора не будет запущено"
|
||||
|
||||
msgid "Running ini applier for machine"
|
||||
msgstr "Запуск применение настроек ini файлов для машины"
|
||||
|
||||
msgid "Running ini applier for machine will not be started"
|
||||
msgstr "Применение настроек ini файлов для машины не будет запущено"
|
||||
|
||||
msgid "Running ini applier for user in user context"
|
||||
msgstr "Запуск применение настроек ini файлов для пользователя в контексте пользователя"
|
||||
|
||||
msgid "Running ini applier for user in user context will not be started"
|
||||
msgstr "Применение настроек ini файлов для пользователя в контексте пользователя не будет запущено"
|
||||
|
||||
msgid "Ini-file path not recognized"
|
||||
msgstr "Путь к ini-файлу не распознан"
|
||||
|
||||
msgid "Ini-file is not readable"
|
||||
msgstr "Ini-файл не читается"
|
||||
|
||||
msgid "Saving information about ini-file"
|
||||
msgstr "Сохранение информации об ini-файле"
|
||||
|
||||
msgid "Dictionary key generation failed"
|
||||
msgstr "Формирования ключа словаря не удалось"
|
||||
|
||||
msgid "Running CIFS applier for machine"
|
||||
msgstr "Запуск применение настроек CIFS для машины"
|
||||
|
||||
msgid "CIFS applier for machine will not be started"
|
||||
msgstr "Применение настроек CIFS для машины не будет запущено"
|
||||
|
||||
msgid "Saving information about network shares"
|
||||
msgstr "Сохранение информации о сетевых ресурсах"
|
||||
|
||||
msgid "Running networkshare applier for machine"
|
||||
msgstr "Запуск применение настроек сетевых каталогов для машины"
|
||||
|
||||
msgid "Running networkshare applier for machine will not be starte"
|
||||
msgstr "Применение настроек сетевых каталогов для машины не будет запущено"
|
||||
|
||||
msgid "Apply network share data action failed"
|
||||
msgstr "Не удалось применить действие с данными общего сетевого ресурса"
|
||||
|
||||
msgid "Running yandex_browser_applier for machine"
|
||||
msgstr "Запуск yandex_browser_applier для машины"
|
||||
|
||||
msgid "Yandex_browser_applier for machine will not be started"
|
||||
msgstr "Yandex_browser_applier для машины не запустится"
|
||||
|
||||
msgid "Wrote YandexBrowser preferences to"
|
||||
msgstr "Запись настройки Яндекс Браузера в"
|
||||
|
||||
msgid "Running networkshare applier for user"
|
||||
msgstr "Запуск применение настроек сетевых каталогов для пользователя"
|
||||
|
||||
msgid "File copy"
|
||||
msgstr "Копирование файла"
|
||||
|
||||
msgid "Running networkshare applier for user will not be started"
|
||||
msgstr "Применение настроек сетевых каталогов для пользователя не будет запущено"
|
||||
|
||||
msgid "File update"
|
||||
msgstr "Обновление файла"
|
||||
|
||||
msgid "Applying settings for network share"
|
||||
msgstr "Применение настроек для сетевой папки"
|
||||
|
||||
msgid "Deleting a file"
|
||||
msgstr "Удаление файла"
|
||||
|
||||
msgid "Running GPOA by root for user"
|
||||
msgstr "Запуск GPOA от root для пользователя"
|
||||
|
||||
msgid "The GPOA process was started for computer"
|
||||
msgstr "Процесс GPOA запущен для компьютера"
|
||||
|
||||
msgid "Running networkshare applier for machine will not be started"
|
||||
msgstr "Применение настроек сетевых каталогов для машины не будет запущено"
|
||||
|
||||
msgid "Failed to create a symlink to the network drives mountpoint"
|
||||
msgstr "Не удалось создать ссылку на точку монтирования сетевых дисков пользователя"
|
||||
|
||||
msgid "Failed to create a symlink to the system network drives mountpoint"
|
||||
msgstr "Не удалось создать ссылку на точку монтирования системных сетевых дисков"
|
||||
|
||||
msgid "Failed to create a symlink to the hidden network drives mountpoint"
|
||||
msgstr "Не удалось создать ссылку на точку монтирования скрытых сетевых дисков пользователя"
|
||||
|
||||
msgid "Failed to create a symlink to the hidden system network drives mountpoint"
|
||||
msgstr "Не удалось создать ссылку на точку монтирования скрытых системных сетевых дисков"
|
||||
|
||||
msgid "Running KDE applier for machine"
|
||||
msgstr "Запуск применения настроек KDE для машины"
|
||||
|
||||
msgid "KDE applier for machine will not be started"
|
||||
msgstr "Применение настроек KDE для машины не удалось"
|
||||
|
||||
msgid "Running KDE applier for user in user context"
|
||||
msgstr "Запуск применения настроек KDE в контексте пользователя"
|
||||
|
||||
msgid "KDE applier for user in user context will not be started"
|
||||
msgstr "KDE в контексте пользователя не запускается"
|
||||
|
||||
msgid "Changing the configuration file"
|
||||
msgstr "Изменение конфигурационного файла"
|
||||
|
||||
msgid "Widget command completed successfully"
|
||||
msgstr "Команда для виджетов выполнена успешно"
|
||||
|
||||
msgid "Getting a list of keys"
|
||||
msgstr "Получение списка ключей"
|
||||
|
||||
msgid "Getting the key value"
|
||||
msgstr "Получение значения ключа"
|
||||
|
||||
msgid "Successfully updated dconf database"
|
||||
msgstr "База данных dconf успешно обновлена"
|
||||
|
||||
msgid "Creating a dictionary with keys and values from the dconf database"
|
||||
msgstr "Формирование словаря с ключами и значениями из базы dconf"
|
||||
|
||||
msgid "No entry found for the specified path"
|
||||
msgstr "Не найдено записей по указанному пути"
|
||||
|
||||
msgid "Creating an ini file with policies for dconf"
|
||||
msgstr "Создание ini-файла с политиками для dconf"
|
||||
|
||||
msgid "GPO version was not found"
|
||||
msgstr "Версия GPO не найдена"
|
||||
|
||||
msgid "SYSVOL entry found in cache"
|
||||
msgstr "Запись SYSVOL найдена в кеше"
|
||||
|
||||
msgid "Wrote Thunderbird preferences to"
|
||||
msgstr "Настройки Thunderbird записаны в"
|
||||
|
||||
msgid "Running Thunderbird applier for machine"
|
||||
msgstr "Запуск применение настроек Thunderbird для машины"
|
||||
|
||||
msgid "Thunderbird applier for machine will not be started"
|
||||
msgstr "Применение настроек Thunderbird для компьютера не запускается"
|
||||
|
||||
msgid "The environment file has been cleaned"
|
||||
msgstr "Файл environment очищен"
|
||||
|
||||
msgid "Cleanup of file environment failed"
|
||||
msgstr "Очистка файла environment не удалась"
|
||||
|
||||
msgid "Failed to get dictionary"
|
||||
msgstr "Не удалось получить словарь"
|
||||
|
||||
msgid "LAPS applier started"
|
||||
msgstr "Запущен обработчик LAPS"
|
||||
|
||||
msgid "LAPS applier is disabled"
|
||||
msgstr "Обработчик LAPS отключен"
|
||||
|
||||
msgid "Rebooting system after password change"
|
||||
msgstr "Перезагрузка системы после смены пароля"
|
||||
|
||||
msgid "Password changed"
|
||||
msgstr "Пароль изменён"
|
||||
|
||||
msgid "Writing password changes time"
|
||||
msgstr "Запись времени изменения пароля"
|
||||
|
||||
msgid "Requirements not met"
|
||||
msgstr "Требования не выполнены"
|
||||
|
||||
msgid "The number of hours from the moment of the last user entrance"
|
||||
msgstr "Количество часов с момента последнего входа пользователя"
|
||||
|
||||
msgid "The number of hours since the password has last changed"
|
||||
msgstr "Количество часов с момента последнего изменения пароля"
|
||||
|
||||
msgid "LDAP updated with new password data"
|
||||
msgstr "LDAP обновлён новыми данными пароля"
|
||||
|
||||
msgid "No active sessions found"
|
||||
msgstr "Активные сеансы не найдены"
|
||||
|
||||
msgid "Process terminated"
|
||||
msgstr "Процесс завершён"
|
||||
|
||||
msgid "Password update not needed"
|
||||
msgstr "Обновление пароля не требуется"
|
||||
|
||||
msgid "Password successfully updated"
|
||||
msgstr "Пароль успешно обновлён"
|
||||
|
||||
msgid "Cleaning the autofs catalog"
|
||||
msgstr "Очистка каталога autofs"
|
||||
|
||||
msgid "No user login records found"
|
||||
msgstr "Не найдены записи о входе пользователя"
|
||||
|
||||
msgid "Calculating time since the first user login after their password change"
|
||||
msgstr "Расчет времени с момента первого входа пользователя после изменения их пароля"
|
||||
|
||||
msgid "No logins found after password change"
|
||||
msgstr "Не найдены входы после изменения пароля"
|
||||
|
||||
msgid "User not found in passwd database"
|
||||
msgstr "Пользователь не найден в базе данных паролей"
|
||||
|
||||
msgid "Unknown message type, no message assigned"
|
||||
msgstr "Неизвестный тип сообщения"
|
||||
|
||||
msgid "Plugin is disabled"
|
||||
msgstr "Плагин отключен"
|
||||
|
||||
msgid "Running plugin"
|
||||
msgstr "Запуск плагина"
|
||||
|
||||
msgid "Failed to load cached versions"
|
||||
msgstr "Не удалось загрузить кешированные версии"
|
||||
|
||||
# Debug_end
|
||||
|
||||
# Warning
|
||||
@@ -691,6 +1021,113 @@ msgstr "CUPS не установлен: настройки принтера не
|
||||
msgid "Unsupported NTP server type"
|
||||
msgstr "Неподдерживаемый тип сервера NTP"
|
||||
|
||||
msgid "Failed to read the list of files"
|
||||
msgstr "Не удалось прочитать список файлов"
|
||||
|
||||
msgid "Failed to caching the file"
|
||||
msgstr "Не удалось кэшировать файл"
|
||||
|
||||
msgid "Could not create a valid list of keys"
|
||||
msgstr "Не удалось создать допустимый список ключей"
|
||||
|
||||
msgid "Failed to copy file"
|
||||
msgstr "Не удалось скопировать файл"
|
||||
|
||||
msgid "Failed to create KDE settings list"
|
||||
msgstr "Не удалось создать список настроек KDE"
|
||||
|
||||
msgid "Could not find tools to configure KDE"
|
||||
msgstr "Не удалось найти инструменты для настройки KDE"
|
||||
|
||||
msgid "Failed to open KDE settings"
|
||||
msgstr "Не удалось открыть настройки KDE"
|
||||
|
||||
msgid "Failed to change KDE configuration file"
|
||||
msgstr "Не удалось изменить файл конфигурации KDE"
|
||||
|
||||
msgid "Error connecting to server"
|
||||
msgstr "Ошибка при подключении к серверу"
|
||||
|
||||
msgid "Wallpaper configuration file not found"
|
||||
msgstr "Конфигурационный файл для обоев не найден"
|
||||
|
||||
msgid "The user setting was not installed, conflict with computer setting"
|
||||
msgstr "Пользовательская настройка не была установлена, конфликт с настройкой компьютера"
|
||||
|
||||
msgid "Action for ini file failed"
|
||||
msgstr "Не удалось выполнить действие для INI-файла"
|
||||
|
||||
msgid "Couldn't get the uid"
|
||||
msgstr "Не удалось получить uid"
|
||||
|
||||
msgid "Failed to load content from remote host"
|
||||
msgstr "Не удалось загрузить контент с удаленного узла"
|
||||
|
||||
msgid "Force mode activated"
|
||||
msgstr "Режим force задействован"
|
||||
|
||||
msgid "Failed to change password"
|
||||
msgstr "Не удалось изменить пароль"
|
||||
|
||||
msgid "Failed to write password modification time"
|
||||
msgstr "Не удалось записать время изменения пароля"
|
||||
|
||||
msgid "LAPS requirements not met, module disabled"
|
||||
msgstr "Требования LAPS не выполнены, модуль отключён"
|
||||
|
||||
msgid "Could not resolve encryption principal name. Return admin group SID"
|
||||
msgstr "Не удалось определить имя шифрования. Возвращён SID группы администраторов"
|
||||
|
||||
msgid "Failed to get expiration time from LDAP"
|
||||
msgstr "Не удалось получить время истечения срока действия из LDAP"
|
||||
|
||||
msgid "Failed to read password modification time from dconf"
|
||||
msgstr "Не удалось прочитать время изменения пароля из dconf"
|
||||
|
||||
msgid "Failed to get last login time"
|
||||
msgstr "Не удалось получить время последнего входа"
|
||||
|
||||
msgid "Failed to calculate password age"
|
||||
msgstr "Не удалось вычислить возраст пароля"
|
||||
|
||||
msgid "Failed to terminate process"
|
||||
msgstr "Не удалось завершить процесс"
|
||||
|
||||
msgid "The user was not found to change the password"
|
||||
msgstr "Пользователь для изменения пароля не был найден"
|
||||
|
||||
msgid "Error while cleaning the autofs catalog"
|
||||
msgstr "Ошибка при очистке каталога autofs"
|
||||
|
||||
msgid "Problem with timezone detection"
|
||||
msgstr "Проблема с определением часового пояса"
|
||||
|
||||
msgid "Error executing last command"
|
||||
msgstr "Ошибка выполнения команды last"
|
||||
|
||||
msgid "Last command not found"
|
||||
msgstr "Команда last не найдена"
|
||||
|
||||
msgid "Error getting user login times"
|
||||
msgstr "Ошибка получения времени входа пользователя"
|
||||
|
||||
msgid "Invalid timezone in reference datetime"
|
||||
msgstr "Некорректный часовой пояс в reference datetime"
|
||||
|
||||
msgid "wbinfo SID lookup failed; will try as trusted domain user"
|
||||
msgstr "Ошибка получения SID через wbinfo; будет предпринята попытка как для пользователя доверенного домена"
|
||||
|
||||
msgid "Plugin is not valid API object"
|
||||
msgstr "Плагин не является допустимым объектом API"
|
||||
|
||||
msgid "Error loading plugin from file"
|
||||
msgstr "Ошибка загрузки плагина из файла"
|
||||
|
||||
msgid "Plugin failed to apply with user privileges"
|
||||
msgstr "Плагин не смог примениться с правами пользователя"
|
||||
|
||||
# Warning_end
|
||||
|
||||
# Fatal
|
||||
msgid "Unable to refresh GPO list"
|
||||
msgstr "Невозможно обновить список объектов групповых политик"
|
||||
@@ -704,7 +1141,5 @@ msgstr "Не удалось получить GPT для пользователя
|
||||
msgid "Unknown fatal code"
|
||||
msgstr "Неизвестный код фатальной ошибки"
|
||||
|
||||
# get_message
|
||||
msgid "Unknown message type, no message assigned"
|
||||
msgstr "Неизвестный тип сообщения"
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -19,8 +19,9 @@
|
||||
|
||||
import gettext
|
||||
|
||||
|
||||
def info_code(code):
|
||||
info_ids = dict()
|
||||
info_ids = {}
|
||||
info_ids[1] = 'Got GPO list for username'
|
||||
info_ids[2] = 'Got GPO'
|
||||
info_ids[3] = 'Working with control'
|
||||
@@ -30,11 +31,15 @@ def info_code(code):
|
||||
info_ids[7] = 'Firefox policy'
|
||||
info_ids[8] = 'Chromium policy'
|
||||
info_ids[9] = 'Set user property to'
|
||||
info_ids[10] = 'The line in the configuration file was cleared'
|
||||
info_ids[11] = 'Found GPT in cache'
|
||||
info_ids[12] = 'Got GPO list for trusted user'
|
||||
info_ids[13] = 'Restarting systemd unit'
|
||||
|
||||
return info_ids.get(code, 'Unknown info code')
|
||||
|
||||
def error_code(code):
|
||||
error_ids = dict()
|
||||
error_ids = {}
|
||||
error_ids[1] = 'Insufficient permissions to run gpupdate'
|
||||
error_ids[2] = 'gpupdate will not be started'
|
||||
error_ids[3] = 'Backend execution error'
|
||||
@@ -43,7 +48,7 @@ def error_code(code):
|
||||
error_ids[6] = 'Error running GPOA for user'
|
||||
error_ids[7] = 'Unable to initialize Samba backend'
|
||||
error_ids[8] = 'Unable to initialize no-domain backend'
|
||||
error_ids[9] = 'Error running ADP'
|
||||
error_ids[9] = 'Error running plugin'
|
||||
error_ids[10] = 'Unable to determine DC hostname'
|
||||
error_ids[11] = 'Error occured while running applier with user privileges'
|
||||
error_ids[12] = 'Unable to initialize backend'
|
||||
@@ -85,7 +90,6 @@ def error_code(code):
|
||||
error_ids[48] = 'Error recompiling global GSettings schemas'
|
||||
error_ids[49] = 'Error update configuration dconf'
|
||||
error_ids[50] = 'Unable to cache specified URI for user'
|
||||
error_ids[51] = 'Chromium preferences file does not exist at the moment'
|
||||
error_ids[52] = 'Error during attempt to read Chromium preferences for user'
|
||||
error_ids[53] = 'Fail for applying shortcut to file with \'%\''
|
||||
error_ids[54] = 'Fail for applying shortcut to not absolute path'
|
||||
@@ -97,17 +101,33 @@ def error_code(code):
|
||||
error_ids[60] = 'Error running pkcon_runner sync for user'
|
||||
error_ids[61] = 'Error running pkcon_runner async for machine'
|
||||
error_ids[62] = 'Error running pkcon_runner async for user'
|
||||
|
||||
|
||||
error_ids[63] = 'Error merging user GPT (from machine GPO)'
|
||||
error_ids[64] = 'Error to cleanup directory for machine'
|
||||
error_ids[65] = 'Error to cleanup directory for user'
|
||||
error_ids[66] = 'Error while executing command for widgets'
|
||||
error_ids[67] = 'Error creating environment variables'
|
||||
error_ids[68] = 'Error running kwriteconfig5 command'
|
||||
error_ids[69] = 'Error getting list of keys'
|
||||
error_ids[70] = 'Error getting key value'
|
||||
error_ids[71] = 'Failed to update dconf database'
|
||||
error_ids[72] = 'Exception occurred while updating dconf database'
|
||||
error_ids[73] = 'Failed to retrieve data from dconf database'
|
||||
error_ids[74] = 'Autofs restart failed'
|
||||
error_ids[75] = 'Failed to update LDAP with new password data'
|
||||
error_ids[76] = 'Failed to change local user password'
|
||||
error_ids[77] = 'Unable to restart systemd unit'
|
||||
error_ids[78] = 'Kerberos info unavailable; cannot construct DPAPI parameters'
|
||||
error_ids[79] = 'Unable to initialize Freeipa backend'
|
||||
error_ids[80] = 'FreeIPA API error'
|
||||
return error_ids.get(code, 'Unknown error code')
|
||||
|
||||
def debug_code(code):
|
||||
debug_ids = dict()
|
||||
debug_ids = {}
|
||||
debug_ids[1] = 'The GPOA process was started for user'
|
||||
debug_ids[2] = 'Username is not specified - will use username of the current process'
|
||||
debug_ids[3] = 'Initializing plugin manager'
|
||||
debug_ids[4] = 'ADP plugin initialized'
|
||||
debug_ids[5] = 'Running ADP plugin'
|
||||
debug_ids[4] = 'Running plugin'
|
||||
#debug_ids[5] = ''
|
||||
debug_ids[6] = 'Starting GPOA for user via D-Bus'
|
||||
debug_ids[7] = 'Cache directory determined'
|
||||
debug_ids[8] = 'Initializing local backend without domain'
|
||||
@@ -211,8 +231,8 @@ def debug_code(code):
|
||||
debug_ids[106] = 'Applying shortcut file to'
|
||||
debug_ids[107] = 'Running Folder applier for machine'
|
||||
debug_ids[108] = 'Folder applier for machine will not be started'
|
||||
debug_ids[109] = 'Running Folder applier for user in administrator context'
|
||||
debug_ids[110] = 'Folder applier for user in administrator context will not be started'
|
||||
debug_ids[109] = 'Folder creation skipped for machine'
|
||||
debug_ids[110] = 'Folder creation skipped for user'
|
||||
debug_ids[111] = 'Running Folder applier for user in user context'
|
||||
debug_ids[112] = 'Folder applier for user in user context will not be started'
|
||||
debug_ids[113] = 'Running CUPS applier for machine'
|
||||
@@ -238,8 +258,8 @@ def debug_code(code):
|
||||
debug_ids[133] = 'NTP applier for machine will not be started'
|
||||
debug_ids[134] = 'Running Envvar applier for machine'
|
||||
debug_ids[135] = 'Envvar applier for machine will not be started'
|
||||
debug_ids[136] = 'Running Envvar applier for user in user context'
|
||||
debug_ids[137] = 'Envvar applier for user in user context will not be started'
|
||||
debug_ids[136] = 'Running Envvar applier for user in admin context'
|
||||
debug_ids[137] = 'Envvar applier for user in admin context will not be started'
|
||||
debug_ids[138] = 'Running Package applier for machine'
|
||||
debug_ids[139] = 'Package applier for machine will not be started'
|
||||
debug_ids[140] = 'Running Package applier for user in administrator context'
|
||||
@@ -254,11 +274,96 @@ def debug_code(code):
|
||||
debug_ids[149] = 'Removing a package'
|
||||
debug_ids[150] = 'Failed to found gsettings for machine'
|
||||
debug_ids[151] = 'Failed to found user gsettings'
|
||||
debug_ids[152] = 'Configure user Group Policy loopback processing mode'
|
||||
debug_ids[153] = 'Saving information about script'
|
||||
debug_ids[154] = 'No machine scripts directory to clean up'
|
||||
debug_ids[155] = 'No user scripts directory to clean up'
|
||||
debug_ids[156] = 'Prepare Scripts applier for machine'
|
||||
debug_ids[157] = 'Scripts applier for machine will not be started'
|
||||
debug_ids[158] = 'Prepare Scripts applier for user in user context'
|
||||
debug_ids[159] = 'Scripts applier for user in user context will not be started'
|
||||
debug_ids[160] = 'Clean machine scripts directory'
|
||||
debug_ids[161] = 'Clean user scripts directory'
|
||||
debug_ids[162] = 'Saving information about file'
|
||||
debug_ids[163] = 'Failed to return file path'
|
||||
debug_ids[164] = 'Failed to create file'
|
||||
debug_ids[165] = 'Failed to delete file'
|
||||
debug_ids[166] = 'Failed to update file'
|
||||
debug_ids[167] = 'Running File copy applier for machine'
|
||||
debug_ids[168] = 'Running File copy applier for machine will not be started'
|
||||
debug_ids[169] = 'Running File copy applier for user in administrator context'
|
||||
debug_ids[170] = 'Running File copy applier for user in administrator context will not be started'
|
||||
debug_ids[171] = 'Running ini applier for machine'
|
||||
debug_ids[172] = 'Running ini applier for machine will not be started'
|
||||
debug_ids[173] = 'Running ini applier for user in user context'
|
||||
debug_ids[174] = 'Running ini applier for user in user context will not be started'
|
||||
debug_ids[175] = 'Ini-file path not recognized'
|
||||
debug_ids[176] = 'Ini-file is not readable'
|
||||
debug_ids[177] = 'Saving information about ini-file'
|
||||
debug_ids[178] = 'Dictionary key generation failed'
|
||||
debug_ids[179] = 'Running CIFS applier for machine'
|
||||
debug_ids[180] = 'CIFS applier for machine will not be started'
|
||||
debug_ids[181] = 'Running networkshare applier for machine will not be started'
|
||||
debug_ids[182] = 'Apply network share data action failed'
|
||||
debug_ids[183] = 'Running yandex_browser_applier for machine'
|
||||
debug_ids[184] = 'Yandex_browser_applier for machine will not be started'
|
||||
debug_ids[185] = 'Wrote YandexBrowser preferences to'
|
||||
debug_ids[186] = 'Saving information about network shares'
|
||||
debug_ids[187] = 'Running networkshare applier for machine'
|
||||
debug_ids[188] = 'Running networkshare applier for user'
|
||||
debug_ids[189] = 'Running networkshare applier for user will not be started'
|
||||
debug_ids[190] = 'Applying settings for network share'
|
||||
debug_ids[191] = 'File copy'
|
||||
debug_ids[192] = 'File update'
|
||||
debug_ids[193] = 'Deleting a file'
|
||||
debug_ids[194] = 'Failed to create a symlink to the network drives mountpoint'
|
||||
debug_ids[195] = 'Failed to create a symlink to the system network drives mountpoint'
|
||||
debug_ids[196] = 'Failed to create a symlink to the hidden network drives mountpoint'
|
||||
debug_ids[197] = 'Failed to create a symlink to the hidden system network drives mountpoint'
|
||||
debug_ids[198] = 'Running KDE applier for machine'
|
||||
debug_ids[199] = 'KDE applier for machine will not be started'
|
||||
debug_ids[200] = 'Running KDE applier for user in user context'
|
||||
debug_ids[201] = 'KDE applier for user in user context will not be started'
|
||||
debug_ids[202] = 'Changing the configuration file'
|
||||
debug_ids[203] = 'Widget command completed successfully'
|
||||
debug_ids[204] = 'Getting a list of keys'
|
||||
debug_ids[205] = 'Getting the key value'
|
||||
debug_ids[206] = 'Successfully updated dconf database'
|
||||
debug_ids[207] = 'Creating a dictionary with keys and values from the dconf database'
|
||||
debug_ids[208] = 'No entry found for the specified path'
|
||||
debug_ids[209] = 'Creating an ini file with policies for dconf'
|
||||
debug_ids[211] = 'SYSVOL entry found in cache'
|
||||
debug_ids[212] = 'Wrote Thunderbird preferences to'
|
||||
debug_ids[213] = 'Running Thunderbird applier for machine'
|
||||
debug_ids[214] = 'Thunderbird applier for machine will not be started'
|
||||
debug_ids[215] = 'The environment file has been cleaned'
|
||||
debug_ids[216] = 'Cleanup of file environment failed'
|
||||
debug_ids[217] = 'Failed to get dictionary'
|
||||
debug_ids[218] = 'LAPS applier started'
|
||||
debug_ids[219] = 'LAPS applier is disabled'
|
||||
debug_ids[220] = 'Rebooting system after password change'
|
||||
debug_ids[221] = 'Password changed'
|
||||
debug_ids[222] = 'Writing password changes time'
|
||||
debug_ids[223] = 'Requirements not met'
|
||||
debug_ids[224] = 'The number of hours from the moment of the last user entrance'
|
||||
debug_ids[225] = 'The number of hours since the password has last changed'
|
||||
debug_ids[226] = 'LDAP updated with new password data'
|
||||
debug_ids[227] = 'No active sessions found'
|
||||
debug_ids[228] = 'Process terminated'
|
||||
debug_ids[229] = 'Password update not needed'
|
||||
debug_ids[230] = 'Password successfully updated'
|
||||
debug_ids[231] = 'Cleaning the autofs catalog'
|
||||
debug_ids[232] = 'No user login records found'
|
||||
debug_ids[233] = 'Calculating time since the first user login after their password change'
|
||||
debug_ids[234] = 'No logins found after password change'
|
||||
debug_ids[235] = 'User not found in passwd database'
|
||||
debug_ids[236] = 'Plugin is disabled'
|
||||
debug_ids[237] = 'Failed to load cached versions'
|
||||
|
||||
return debug_ids.get(code, 'Unknown debug code')
|
||||
|
||||
def warning_code(code):
|
||||
warning_ids = dict()
|
||||
warning_ids = {}
|
||||
warning_ids[1] = (
|
||||
'Unable to perform gpupdate for non-existent user, '
|
||||
'will update machine settings'
|
||||
@@ -276,11 +381,46 @@ def warning_code(code):
|
||||
warning_ids[9] = 'CUPS is not installed: no printer settings will be deployed'
|
||||
warning_ids[10] = 'Unsupported NTP server type'
|
||||
warning_ids[11] = 'Unable to refresh GPO list'
|
||||
warning_ids[12] = 'Failed to read the list of files'
|
||||
warning_ids[13] = 'Failed to caching the file'
|
||||
warning_ids[14] = 'Could not create a valid list of keys'
|
||||
warning_ids[15] = 'Failed to copy file'
|
||||
warning_ids[16] = 'Failed to create KDE settings list'
|
||||
warning_ids[17] = 'Could not find tools to configure KDE'
|
||||
warning_ids[18] = 'Failed to open KDE settings'
|
||||
warning_ids[19] = 'Failed to change KDE configuration file'
|
||||
warning_ids[20] = 'Error connecting to server'
|
||||
warning_ids[21] = 'Wallpaper configuration file not found'
|
||||
warning_ids[22] = 'The user setting was not installed, conflict with computer setting'
|
||||
warning_ids[23] = 'Action for ini file failed'
|
||||
warning_ids[24] = 'Couldn\'t get the uid'
|
||||
warning_ids[25] = 'Failed to load content from remote host'
|
||||
warning_ids[26] = 'Force mode activated'
|
||||
warning_ids[27] = 'Failed to change password'
|
||||
warning_ids[28] = 'Failed to write password modification time'
|
||||
warning_ids[29] = 'LAPS requirements not met, module disabled'
|
||||
warning_ids[30] = 'Could not resolve encryption principal name. Return admin group SID'
|
||||
warning_ids[31] = 'Failed to get expiration time from LDAP'
|
||||
warning_ids[32] = 'Failed to read password modification time from dconf'
|
||||
warning_ids[33] = 'Failed to get last login time'
|
||||
warning_ids[34] = 'Failed to calculate password age'
|
||||
warning_ids[35] = 'Failed to terminate process'
|
||||
warning_ids[36] = 'The user was not found to change the password'
|
||||
warning_ids[37] = 'Error while cleaning the autofs catalog'
|
||||
warning_ids[38] = 'Problem with timezone detection'
|
||||
warning_ids[39] = 'Error executing last command'
|
||||
warning_ids[40] = 'Last command not found'
|
||||
warning_ids[41] = 'Error getting user login times'
|
||||
warning_ids[42] = 'Invalid timezone in reference datetime'
|
||||
warning_ids[43] = 'wbinfo SID lookup failed; will try as trusted domain user'
|
||||
warning_ids[44] = 'Plugin is not valid API object'
|
||||
warning_ids[45] = 'Error loading plugin from file'
|
||||
warning_ids[46] = 'Plugin failed to apply with user privileges'
|
||||
|
||||
return warning_ids.get(code, 'Unknown warning code')
|
||||
|
||||
def fatal_code(code):
|
||||
fatal_ids = dict()
|
||||
fatal_ids = {}
|
||||
fatal_ids[1] = 'Unable to refresh GPO list'
|
||||
fatal_ids[2] = 'Error getting GPTs for machine'
|
||||
fatal_ids[3] = 'Error getting GPTs for user'
|
||||
@@ -304,7 +444,7 @@ def get_message(code):
|
||||
return retstr
|
||||
|
||||
def message_with_code(code):
|
||||
retstr = '[' + code[0:1] + code[1:].rjust(5, '0') + ']| ' + gettext.gettext(get_message(code))
|
||||
retstr = 'core' + '[' + code[0:1] + code[1:].rjust(7, '0') + ']| ' + gettext.gettext(get_message(code))
|
||||
|
||||
return retstr
|
||||
|
||||
|
||||
@@ -20,15 +20,12 @@
|
||||
import rpm
|
||||
import subprocess
|
||||
from gpoa.storage import registry_factory
|
||||
from util.util import get_uid_by_username, string_to_literal_eval
|
||||
import logging
|
||||
from util.logging import log
|
||||
import argparse
|
||||
import gettext
|
||||
import locale
|
||||
from messages import message_with_code
|
||||
from util.arguments import (
|
||||
set_loglevel
|
||||
)
|
||||
|
||||
|
||||
def is_rpm_installed(rpm_name):
|
||||
@@ -44,58 +41,54 @@ def is_rpm_installed(rpm_name):
|
||||
|
||||
class Pkcon_applier:
|
||||
|
||||
def __init__(self, sid = None):
|
||||
self.__install_key_name = 'Install'
|
||||
self.__remove_key_name = 'Remove'
|
||||
self.__hkcu_branch = 'Software\\BaseALT\\Policies\\Packages'
|
||||
self.__hklm_branch = 'Software\\BaseALT\\Policies\\Packages'
|
||||
def __init__(self, user = None):
|
||||
install_key_name = 'Install'
|
||||
remove_key_name = 'Remove'
|
||||
hklm_branch = 'Software/BaseALT/Policies/Packages'
|
||||
self.__install_command = ['/usr/bin/pkcon', '-y', 'install']
|
||||
self.__remove_command = ['/usr/bin/pkcon', '-y', 'remove']
|
||||
self.__reinstall_command = ['/usr/bin/pkcon', '-y', 'reinstall']
|
||||
self.install_packages = set()
|
||||
self.remove_packages = set()
|
||||
self.storage = registry_factory('registry')
|
||||
if sid:
|
||||
install_branch_user = '{}\\{}%'.format(self.__hkcu_branch, self.__install_key_name)
|
||||
remove_branch_user = '{}\\{}%'.format(self.__hkcu_branch, self.__remove_key_name)
|
||||
self.install_packages_setting = self.storage.filter_hkcu_entries(sid, install_branch_user)
|
||||
self.remove_packages_setting = self.storage.filter_hkcu_entries(sid, remove_branch_user)
|
||||
self.storage = registry_factory()
|
||||
if user:
|
||||
uid = get_uid_by_username(user)
|
||||
dict_dconf_db = self.storage.get_dictionary_from_dconf_file_db(uid)
|
||||
else:
|
||||
install_branch = '{}\\{}%'.format(self.__hklm_branch, self.__install_key_name)
|
||||
remove_branch = '{}\\{}%'.format(self.__hklm_branch, self.__remove_key_name)
|
||||
self.install_packages_setting = self.storage.filter_hklm_entries(install_branch)
|
||||
self.remove_packages_setting = self.storage.filter_hklm_entries(remove_branch)
|
||||
dict_dconf_db = self.storage.get_dictionary_from_dconf_file_db()
|
||||
dict_packages = dict_dconf_db.get(hklm_branch,{})
|
||||
self.install_packages_setting = string_to_literal_eval(dict_packages.get(install_key_name,[]))
|
||||
self.remove_packages_setting = string_to_literal_eval(dict_packages.get(remove_key_name,[]))
|
||||
|
||||
for package in self.install_packages_setting:
|
||||
if not is_rpm_installed(package.data):
|
||||
self.install_packages.add(package.data)
|
||||
package = package.strip()
|
||||
if not is_rpm_installed(package):
|
||||
self.install_packages.add(package)
|
||||
for package in self.remove_packages_setting:
|
||||
if package.data in self.install_packages:
|
||||
self.install_packages.remove(package.data)
|
||||
if is_rpm_installed(package.data):
|
||||
self.remove_packages.add(package.data)
|
||||
package = package.strip()
|
||||
if package in self.install_packages:
|
||||
self.install_packages.remove(package)
|
||||
if is_rpm_installed(package):
|
||||
self.remove_packages.add(package)
|
||||
|
||||
def apply(self):
|
||||
log('D142')
|
||||
self.update()
|
||||
for package in self.remove_packages:
|
||||
logdata = {'name': package}
|
||||
try:
|
||||
logdata = dict()
|
||||
logdata['name'] = package
|
||||
log('D149', logdata)
|
||||
self.remove_pkg(package)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exc'] = exc
|
||||
log('E58', logdata)
|
||||
|
||||
for package in self.install_packages:
|
||||
logdata = {'name': package}
|
||||
try:
|
||||
logdata = dict()
|
||||
logdata['name'] = package
|
||||
log('D148', logdata)
|
||||
self.install_pkg(package)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['exc'] = exc
|
||||
log('E57', logdata)
|
||||
|
||||
@@ -109,7 +102,7 @@ class Pkcon_applier:
|
||||
pass
|
||||
|
||||
def remove_pkg(self, package_name):
|
||||
fullcmd = self.__remove_command
|
||||
fullcmd = list(self.__remove_command)
|
||||
fullcmd.append(package_name)
|
||||
return subprocess.check_output(fullcmd)
|
||||
|
||||
@@ -120,15 +113,14 @@ class Pkcon_applier:
|
||||
try:
|
||||
res = subprocess.check_output(['/usr/bin/apt-get', 'update'], encoding='utf-8')
|
||||
msg = str(res).split('\n')
|
||||
logdata = dict()
|
||||
logdata = {}
|
||||
for mslog in msg:
|
||||
ms = str(mslog).split(' ')
|
||||
if ms:
|
||||
logdata = {ms[0]: ms[1:-1]}
|
||||
log('D143', logdata)
|
||||
except Exception as exc:
|
||||
logdata = dict()
|
||||
logdata['msg'] = exc
|
||||
logdata = {'msg': exc}
|
||||
log('E56',logdata)
|
||||
|
||||
if __name__ == '__main__':
|
||||
@@ -137,13 +129,13 @@ if __name__ == '__main__':
|
||||
gettext.textdomain('gpoa')
|
||||
logger = logging.getLogger()
|
||||
parser = argparse.ArgumentParser(description='Package applier')
|
||||
parser.add_argument('--sid', type = str, help = 'sid', nargs = '?', default = None)
|
||||
parser.add_argument('--user', type = str, help = 'user', nargs = '?', default = None)
|
||||
parser.add_argument('--loglevel', type = int, help = 'loglevel', nargs = '?', default = 30)
|
||||
|
||||
args = parser.parse_args()
|
||||
logger.setLevel(args.loglevel)
|
||||
if args.sid:
|
||||
applier = Pkcon_applier(args.sid)
|
||||
if args.user:
|
||||
applier = Pkcon_applier(args.user)
|
||||
else:
|
||||
applier = Pkcon_applier()
|
||||
applier.apply()
|
||||
|
||||
@@ -17,4 +17,5 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .plugin_manager import plugin_manager
|
||||
from .messages import register_plugin_messages, get_plugin_message, get_all_plugin_messages
|
||||
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
from util.rpm import is_rpm_installed
|
||||
from .exceptions import PluginInitError
|
||||
from util.logging import slogm
|
||||
from messages import message_with_code
|
||||
|
||||
class adp:
|
||||
def __init__(self):
|
||||
if not is_rpm_installed('adp'):
|
||||
raise PluginInitError(message_with_code('W5'))
|
||||
logging.info(slogm(message_with_code('D4')))
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
logging.info(slogm(message_with_code('D5')))
|
||||
subprocess.call(['/usr/bin/adp', 'fetch'])
|
||||
subprocess.call(['/usr/bin/adp', 'apply'])
|
||||
except Exception as exc:
|
||||
logging.error(slogm(message_with_code('E9')))
|
||||
raise exc
|
||||
|
||||
180
gpoa/plugin/messages.py
Normal file
180
gpoa/plugin/messages.py
Normal file
@@ -0,0 +1,180 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Plugin message registry for GPOA plugins.
|
||||
|
||||
This module allows plugins to register their message codes and descriptions
|
||||
without modifying the main messages.py file.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import inspect
|
||||
import gettext
|
||||
import importlib.util
|
||||
from pathlib import Path
|
||||
|
||||
_plugin_messages = {}
|
||||
_plugin_translations = {}
|
||||
|
||||
def _load_plugin_translations(domain):
|
||||
"""
|
||||
Load translations for a specific plugin from its locale directory.
|
||||
|
||||
Dynamically searches for plugin modules across the entire project.
|
||||
|
||||
Args:
|
||||
domain (str): Plugin domain/prefix
|
||||
"""
|
||||
try:
|
||||
# Try to find the plugin module that registered these messages
|
||||
for prefix, msgs in _plugin_messages.items():
|
||||
if prefix == domain:
|
||||
# Search through all loaded modules to find the plugin class
|
||||
for module_name, module in list(sys.modules.items()):
|
||||
if module and hasattr(module, '__dict__'):
|
||||
for name, obj in module.__dict__.items():
|
||||
# Check if this is a class with the domain attribute
|
||||
if (isinstance(obj, type) and
|
||||
hasattr(obj, 'domain') and
|
||||
obj.domain == domain):
|
||||
# Found the plugin class, now find its file
|
||||
try:
|
||||
plugin_file = Path(inspect.getfile(obj))
|
||||
plugin_dir = plugin_file.parent
|
||||
# Look for locale directory in plugin directory
|
||||
locale_dir = plugin_dir / 'locale'
|
||||
if locale_dir.exists():
|
||||
# Try to load translations
|
||||
lang = 'ru_RU' # Default to Russian
|
||||
lc_messages_dir = locale_dir / lang / 'LC_MESSAGES'
|
||||
if lc_messages_dir.exists():
|
||||
# Look for .po files
|
||||
po_files = list(lc_messages_dir.glob('*.po'))
|
||||
for po_file in po_files:
|
||||
try:
|
||||
translation = gettext.translation(
|
||||
po_file.stem,
|
||||
localedir=str(locale_dir),
|
||||
languages=[lang]
|
||||
)
|
||||
_plugin_translations[domain] = translation
|
||||
return # Successfully loaded translations
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
# If not found in plugin directory, check parent directories
|
||||
# (for plugins that are in subdirectories)
|
||||
parent_dirs_to_check = [
|
||||
plugin_dir.parent / 'locale', # Parent directory
|
||||
plugin_dir.parent.parent / 'locale' # Grandparent directory
|
||||
]
|
||||
for parent_locale_dir in parent_dirs_to_check:
|
||||
if parent_locale_dir.exists():
|
||||
lang = 'ru_RU'
|
||||
lc_messages_dir = parent_locale_dir / lang / 'LC_MESSAGES'
|
||||
if lc_messages_dir.exists():
|
||||
po_files = list(lc_messages_dir.glob('*.po'))
|
||||
for po_file in po_files:
|
||||
try:
|
||||
translation = gettext.translation(
|
||||
po_file.stem,
|
||||
localedir=str(parent_locale_dir),
|
||||
languages=[lang]
|
||||
)
|
||||
_plugin_translations[domain] = translation
|
||||
return # Successfully loaded translations
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
except (TypeError, OSError):
|
||||
# Could not get file path for the class
|
||||
continue
|
||||
break
|
||||
|
||||
# If not found through module inspection, try system-wide gpupdate plugins directory
|
||||
gpupdate_plugins_locale = Path('/usr/lib/gpupdate/plugins/locale')
|
||||
if gpupdate_plugins_locale.exists():
|
||||
lang = 'ru_RU'
|
||||
lc_messages_dir = gpupdate_plugins_locale / lang / 'LC_MESSAGES'
|
||||
if lc_messages_dir.exists():
|
||||
# Look for .po files matching the plugin prefix
|
||||
po_files = list(lc_messages_dir.glob(f'*{domain.lower()}*.po'))
|
||||
if not po_files:
|
||||
# Try any .po file if no specific match
|
||||
po_files = list(lc_messages_dir.glob('*.po'))
|
||||
|
||||
for po_file in po_files:
|
||||
try:
|
||||
translation = gettext.translation(
|
||||
po_file.stem,
|
||||
localedir=str(gpupdate_plugins_locale),
|
||||
languages=[lang]
|
||||
)
|
||||
_plugin_translations[domain] = translation
|
||||
return # Successfully loaded translations
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
except Exception:
|
||||
# Silently fail if translations cannot be loaded
|
||||
pass
|
||||
|
||||
def register_plugin_messages(domain, messages_dict):
|
||||
"""
|
||||
Register message codes for a plugin.
|
||||
|
||||
Args:
|
||||
domain (str): Plugin domain/prefix
|
||||
messages_dict (dict): Dictionary mapping message codes to descriptions
|
||||
"""
|
||||
_plugin_messages[domain] = messages_dict
|
||||
|
||||
# Try to load plugin-specific translations
|
||||
_load_plugin_translations(domain)
|
||||
|
||||
def get_plugin_message(domain, code):
|
||||
"""
|
||||
Get message description for a plugin-specific code.
|
||||
|
||||
Args:
|
||||
domain (str): Plugin domain/prefix
|
||||
code (int): Message code
|
||||
|
||||
Returns:
|
||||
str: Message description or generic message if not found
|
||||
"""
|
||||
plugin_msgs = _plugin_messages.get(domain, {})
|
||||
message_text = plugin_msgs.get(code, f"Plugin {domain} message {code}")
|
||||
|
||||
# Try to translate the message if translations are available
|
||||
translation = _plugin_translations.get(domain)
|
||||
if translation:
|
||||
try:
|
||||
return translation.gettext(message_text)
|
||||
except:
|
||||
pass
|
||||
|
||||
return message_text
|
||||
|
||||
def get_all_plugin_messages():
|
||||
"""
|
||||
Get all registered plugin messages.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of all registered plugin messages
|
||||
"""
|
||||
return _plugin_messages.copy()
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,12 +16,75 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from abc import ABC
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import final
|
||||
from gpoa.util.util import string_to_literal_eval
|
||||
from gpoa.util.logging import log
|
||||
from gpoa.plugin.plugin_log import PluginLog
|
||||
from gpoa.storage.dconf_registry import Dconf_registry
|
||||
|
||||
class plugin():
|
||||
def __init__(self, plugin_name):
|
||||
self.plugin_name = plugin_name
|
||||
class plugin(ABC):
|
||||
def __init__(self, dict_dconf_db={}, username=None, fs_file_cache=None):
|
||||
self.dict_dconf_db = dict_dconf_db
|
||||
self.file_cache = fs_file_cache
|
||||
self.username = username
|
||||
self._log = None
|
||||
self.plugin_name = self.__class__.__name__
|
||||
|
||||
@final
|
||||
def apply(self):
|
||||
"""Apply the plugin with current privileges"""
|
||||
self.run()
|
||||
|
||||
@final
|
||||
def apply_user(self, username):
|
||||
"""Apply the plugin with user privileges"""
|
||||
from util.system import with_privileges
|
||||
|
||||
def run_with_user():
|
||||
try:
|
||||
result = self.run()
|
||||
# Ensure result is JSON-serializable
|
||||
return {"success": True, "result": result}
|
||||
except Exception as exc:
|
||||
# Return error information in JSON-serializable format
|
||||
return {"success": False, "error": str(exc)}
|
||||
|
||||
try:
|
||||
execution_result = with_privileges(username, run_with_user)
|
||||
if execution_result and execution_result.get("success"):
|
||||
result = execution_result.get("result", True)
|
||||
return result
|
||||
else:
|
||||
return False
|
||||
except:
|
||||
return False
|
||||
|
||||
@final
|
||||
def get_dict_registry(self, prefix=''):
|
||||
"""Get the dictionary from the registry"""
|
||||
return string_to_literal_eval(self.dict_dconf_db.get(prefix,{}))
|
||||
|
||||
def _init_plugin_log(self, message_dict=None, locale_dir=None, domain=None):
|
||||
"""Initialize plugin-specific logger with message codes."""
|
||||
self._log = PluginLog(message_dict, locale_dir, domain, self.plugin_name)
|
||||
|
||||
def log(self, message_code, data=None):
|
||||
"""
|
||||
Log message using plugin-specific logger with message codes.
|
||||
|
||||
Args:
|
||||
message_code (str): Message code in format 'W1', 'E2', etc.
|
||||
data (dict): Additional data for message formatting
|
||||
"""
|
||||
if self._log:
|
||||
self._log(message_code, data)
|
||||
else:
|
||||
# Fallback to basic logging
|
||||
level_char = message_code[0] if message_code else 'E'
|
||||
log(level_char, {"plugin": self.__class__.__name__, "message": f"Message {message_code}", "data": data})
|
||||
|
||||
@abstractmethod
|
||||
def run(self):
|
||||
pass
|
||||
|
||||
|
||||
44
gpoa/plugin/plugin_base.py
Normal file
44
gpoa/plugin/plugin_base.py
Normal file
@@ -0,0 +1,44 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from abc import abstractmethod
|
||||
from gpoa.plugin.plugin import plugin
|
||||
|
||||
class FrontendPlugin(plugin):
|
||||
"""
|
||||
Base class for frontend plugins with simplified logging support.
|
||||
"""
|
||||
|
||||
def __init__(self, dict_dconf_db={}, username=None, fs_file_cache=None):
|
||||
super().__init__(dict_dconf_db, username, fs_file_cache)
|
||||
|
||||
@abstractmethod
|
||||
def run(self):
|
||||
"""
|
||||
Abstract method that must be implemented by concrete plugins.
|
||||
This method should contain the main plugin execution logic.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
276
gpoa/plugin/plugin_log.py
Normal file
276
gpoa/plugin/plugin_log.py
Normal file
@@ -0,0 +1,276 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import gettext
|
||||
import locale
|
||||
import logging
|
||||
import inspect
|
||||
from pathlib import Path
|
||||
|
||||
from gpoa.util.logging import slogm
|
||||
from gpoa.plugin.messages import register_plugin_messages
|
||||
|
||||
|
||||
class PluginLog:
|
||||
"""
|
||||
Plugin logging class with message codes and translations support.
|
||||
|
||||
Usage:
|
||||
log = PluginLog({
|
||||
'w': {1: 'Warning message template {param}'},
|
||||
'e': {1: 'Error message template {param}'},
|
||||
'i': {1: 'Info message template {param}'},
|
||||
'd': {1: 'Debug message template {param}'}
|
||||
}, domain='dm_applier')
|
||||
|
||||
log('W1', {'param': 'value'})
|
||||
"""
|
||||
|
||||
def __init__(self, message_dict=None, locale_dir=None, domain=None, plugin_name=None):
|
||||
"""
|
||||
Initialize plugin logger.
|
||||
|
||||
Args:
|
||||
message_dict (dict): Dictionary with message templates
|
||||
locale_dir (str): Path to locale directory for translations
|
||||
domain (str): Translation domain name (required for translations)
|
||||
"""
|
||||
self.message_dict = message_dict or {}
|
||||
self.locale_dir = locale_dir
|
||||
self.domain = domain or 'plugin'
|
||||
self._translation = None
|
||||
self.plugin_name = plugin_name
|
||||
# Register plugin messages
|
||||
if message_dict:
|
||||
# Convert to flat dictionary for registration
|
||||
flat_messages = {}
|
||||
for level, level_dict in message_dict.items():
|
||||
for code, message in level_dict.items():
|
||||
flat_messages[code] = message
|
||||
|
||||
register_plugin_messages(self.domain, flat_messages)
|
||||
|
||||
# Auto-detect locale directory only if explicitly None (not provided)
|
||||
# If locale_dir is an empty string or other falsy value, don't auto-detect
|
||||
if self.locale_dir is None:
|
||||
self._auto_detect_locale_dir()
|
||||
|
||||
# Load translations
|
||||
self._load_translations()
|
||||
|
||||
def _auto_detect_locale_dir(self):
|
||||
"""Auto-detect locale directory based on plugin file location."""
|
||||
try:
|
||||
# Try to find the calling plugin module
|
||||
frame = inspect.currentframe()
|
||||
while frame:
|
||||
module = frame.f_globals.get('__name__', '')
|
||||
if module and 'plugin' in module:
|
||||
module_file = frame.f_globals.get('__file__', '')
|
||||
if module_file:
|
||||
plugin_dir = Path(module_file).parent
|
||||
# First try: locale directory in plugin's own directory
|
||||
locale_candidate = plugin_dir / 'locale'
|
||||
if locale_candidate.exists():
|
||||
self.locale_dir = str(locale_candidate)
|
||||
return
|
||||
# Second try: common locale directory for frontend plugins
|
||||
if 'frontend_plugins' in str(plugin_dir):
|
||||
frontend_plugins_dir = plugin_dir.parent
|
||||
common_locale_dir = frontend_plugins_dir / 'locale'
|
||||
if common_locale_dir.exists():
|
||||
self.locale_dir = str(common_locale_dir)
|
||||
return
|
||||
frame = frame.f_back
|
||||
# Third try: relative to current working directory
|
||||
cwd_locale = Path.cwd() / 'gpoa' / 'frontend_plugins' / 'locale'
|
||||
if cwd_locale.exists():
|
||||
self.locale_dir = str(cwd_locale)
|
||||
return
|
||||
# Fourth try: relative to script location
|
||||
script_dir = Path(__file__).parent.parent.parent / 'frontend_plugins' / 'locale'
|
||||
if script_dir.exists():
|
||||
self.locale_dir = str(script_dir)
|
||||
return
|
||||
# Fifth try: system installation path for frontend plugins
|
||||
system_paths = [
|
||||
'/usr/lib/python3/site-packages/gpoa/frontend_plugins/locale',
|
||||
'/usr/local/lib/python3/site-packages/gpoa/frontend_plugins/locale'
|
||||
]
|
||||
for path in system_paths:
|
||||
if os.path.exists(path):
|
||||
self.locale_dir = path
|
||||
return
|
||||
|
||||
# Sixth try: system-wide gpupdate package locale directory
|
||||
gpupdate_package_locale = Path('/usr/lib/python3/site-packages/gpoa/locale')
|
||||
if gpupdate_package_locale.exists():
|
||||
self.locale_dir = str(gpupdate_package_locale)
|
||||
return
|
||||
|
||||
# Seventh try: system-wide locale directory (fallback)
|
||||
system_locale_dir = Path('/usr/share/locale')
|
||||
if system_locale_dir.exists():
|
||||
self.locale_dir = str(system_locale_dir)
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
def _load_translations(self):
|
||||
"""Load translations for the plugin using system locale."""
|
||||
if self.locale_dir:
|
||||
# Use only self.domain as the translation file name
|
||||
# This aligns with the convention that plugin translation files
|
||||
# are always named according to the domain
|
||||
domain = self.domain
|
||||
|
||||
try:
|
||||
# Get system locale
|
||||
system_locale = locale.getdefaultlocale()[0]
|
||||
languages = [system_locale] if system_locale else ['ru_RU']
|
||||
|
||||
# First try: load from the detected locale_dir without fallback
|
||||
try:
|
||||
self._translation = gettext.translation(
|
||||
domain,
|
||||
localedir=self.locale_dir,
|
||||
languages=languages,
|
||||
fallback=False
|
||||
)
|
||||
except FileNotFoundError:
|
||||
# File not found, try with fallback
|
||||
self._translation = gettext.translation(
|
||||
domain,
|
||||
localedir=self.locale_dir,
|
||||
languages=languages,
|
||||
fallback=True
|
||||
)
|
||||
|
||||
# Check if we got real translations or NullTranslations
|
||||
if isinstance(self._translation, gettext.NullTranslations):
|
||||
# Try loading from system locale directory as fallback
|
||||
try:
|
||||
self._translation = gettext.translation(
|
||||
domain,
|
||||
localedir='/usr/share/locale',
|
||||
languages=languages,
|
||||
fallback=False
|
||||
)
|
||||
except FileNotFoundError:
|
||||
# File not found in system directory, use fallback
|
||||
self._translation = gettext.translation(
|
||||
domain,
|
||||
localedir='/usr/share/locale',
|
||||
languages=languages,
|
||||
fallback=True
|
||||
)
|
||||
|
||||
except Exception:
|
||||
# If any exception occurs, fall back to NullTranslations
|
||||
self._translation = gettext.NullTranslations()
|
||||
|
||||
# Ensure _translation is set even if all attempts failed
|
||||
if not hasattr(self, '_translation'):
|
||||
self._translation = gettext.NullTranslations()
|
||||
else:
|
||||
self._translation = gettext.NullTranslations()
|
||||
|
||||
def _get_message_template(self, level, code):
|
||||
"""Get message template for given level and code."""
|
||||
level_dict = self.message_dict.get(level, {})
|
||||
return level_dict.get(code, 'Unknown message {code}')
|
||||
|
||||
def _format_message(self, level, code, data=None):
|
||||
"""Format message with data and apply translation."""
|
||||
template = self._get_message_template(level, code)
|
||||
# Apply translation
|
||||
translated_template = self._translation.gettext(template)
|
||||
# Format with data if provided
|
||||
if data and isinstance(data, dict):
|
||||
try:
|
||||
return translated_template.format(**data)
|
||||
except:
|
||||
return "{} | {}".format(translated_template, data)
|
||||
return translated_template
|
||||
|
||||
def _get_full_code(self, level_char, code):
|
||||
"""Get full message code without plugin prefix."""
|
||||
return f"{level_char}{code:05d}"
|
||||
|
||||
def __call__(self, message_code, data=None):
|
||||
"""
|
||||
Log a message with the given code and data.
|
||||
|
||||
Args:
|
||||
message_code (str): Message code in format 'W1', 'E2', etc.
|
||||
data (dict): Additional data for message formatting
|
||||
"""
|
||||
if not message_code or len(message_code) < 2:
|
||||
logging.error(slogm("Invalid message code format", {"code": message_code}))
|
||||
return
|
||||
level_char = message_code[0].lower()
|
||||
try:
|
||||
code_num = int(message_code[1:])
|
||||
except ValueError:
|
||||
logging.error(slogm("Invalid message code number", {"code": message_code}))
|
||||
return
|
||||
|
||||
# Get the formatted message
|
||||
message = self._format_message(level_char, code_num, data)
|
||||
# Create full message code for logging
|
||||
full_code = self._get_full_code(level_char.upper(), code_num)
|
||||
# Format the log message like main code: [Code]| Message | data
|
||||
full_code = self._get_full_code(level_char.upper(), code_num)
|
||||
log_message = f"{self.plugin_name}[{full_code}]| {message}"
|
||||
if data:
|
||||
log_message += f"|{data}"
|
||||
|
||||
# Log with appropriate level - no kwargs needed
|
||||
if level_char == 'i':
|
||||
logging.info(slogm(log_message))
|
||||
elif level_char == 'w':
|
||||
logging.warning(slogm(log_message))
|
||||
elif level_char == 'e':
|
||||
logging.error(slogm(log_message))
|
||||
elif level_char == 'd':
|
||||
logging.debug(slogm(log_message))
|
||||
elif level_char == 'f':
|
||||
logging.fatal(slogm(log_message))
|
||||
else:
|
||||
logging.info(slogm(log_message))
|
||||
|
||||
def info(self, code, data=None):
|
||||
"""Log info message."""
|
||||
self(f"I{code}", data)
|
||||
|
||||
def warning(self, code, data=None):
|
||||
"""Log warning message."""
|
||||
self(f"W{code}", data)
|
||||
|
||||
def error(self, code, data=None):
|
||||
"""Log error message."""
|
||||
self(f"E{code}", data)
|
||||
|
||||
def debug(self, code, data=None):
|
||||
"""Log debug message."""
|
||||
self(f"D{code}", data)
|
||||
|
||||
def fatal(self, code, data=None):
|
||||
"""Log fatal message."""
|
||||
self(f"F{code}", data)
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,25 +16,203 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
import importlib.util
|
||||
import inspect
|
||||
from pathlib import Path
|
||||
|
||||
from gpoa.util.logging import log
|
||||
from gpoa.util.paths import gpupdate_plugins_path
|
||||
from gpoa.util.util import string_to_literal_eval
|
||||
|
||||
from .adp import adp
|
||||
from .roles import roles
|
||||
from .exceptions import PluginInitError
|
||||
from .plugin import plugin
|
||||
from util.logging import slogm
|
||||
from messages import message_with_code
|
||||
from gpoa.storage import registry_factory
|
||||
from gpoa.storage.fs_file_cache import fs_file_cache
|
||||
from gpoa.util.util import get_uid_by_username
|
||||
|
||||
|
||||
class plugin_manager:
|
||||
def __init__(self):
|
||||
self.plugins = dict()
|
||||
logging.debug(slogm(message_with_code('D3')))
|
||||
try:
|
||||
self.plugins['adp'] = adp()
|
||||
except PluginInitError as exc:
|
||||
logging.warning(slogm(str(exc)))
|
||||
def __init__(self, is_machine, username):
|
||||
self.is_machine = is_machine
|
||||
self.username = username
|
||||
self.file_cache = fs_file_cache('file_cache', self.username)
|
||||
self.list_plugins = []
|
||||
self.dict_dconf_db = self.get_dict_dconf_db()
|
||||
self.filling_settings()
|
||||
self.plugins = self.load_plugins()
|
||||
log('D3')
|
||||
|
||||
def get_dict_dconf_db(self):
|
||||
dconf_storage = registry_factory()
|
||||
if self.username and not self.is_machine:
|
||||
uid = get_uid_by_username(self.username)
|
||||
dict_dconf_db = dconf_storage.get_dictionary_from_dconf_file_db(uid)
|
||||
else:
|
||||
dict_dconf_db = dconf_storage.get_dictionary_from_dconf_file_db()
|
||||
return dict_dconf_db
|
||||
|
||||
def filling_settings(self):
|
||||
"""Filling in settings"""
|
||||
dict_gpupdate_key = string_to_literal_eval(
|
||||
self.dict_dconf_db.get('Software/BaseALT/Policies/GPUpdate',{}))
|
||||
self.plugins_enable = dict_gpupdate_key.get('Plugins')
|
||||
self.plugins_list = dict_gpupdate_key.get('PluginsList')
|
||||
|
||||
def check_enabled_plugin(self, plugin_name):
|
||||
"""Check if the plugin is enabled"""
|
||||
if not self.plugins_enable:
|
||||
return False
|
||||
|
||||
if isinstance(self.plugins_list, list):
|
||||
return plugin_name in self.plugins_list
|
||||
# if the list is missing or not a list, consider the plugin enabled
|
||||
return True
|
||||
|
||||
def run(self):
|
||||
self.plugins.get('adp', plugin('adp')).run()
|
||||
self.plugins.get('roles', plugin('roles')).run()
|
||||
"""Run the plugins with appropriate privileges"""
|
||||
for plugin_obj in self.plugins:
|
||||
if self.is_valid_api_object(plugin_obj):
|
||||
# Set execution context for plugins that support it
|
||||
if hasattr(plugin_obj, 'set_context'):
|
||||
plugin_obj.set_context(self.is_machine, self.username)
|
||||
if self.check_enabled_plugin(plugin_obj.plugin_name):
|
||||
log('D4', {'plugin_name': plugin_obj.plugin_name})
|
||||
|
||||
# Use apply_user for user context, apply for machine context
|
||||
if not self.is_machine and self.username:
|
||||
result = plugin_obj.apply_user(self.username)
|
||||
if result is False:
|
||||
log('W46', {'plugin_name': plugin_obj.plugin_name, 'username': self.username})
|
||||
else:
|
||||
plugin_obj.apply()
|
||||
else:
|
||||
log('D236', {'plugin_name': plugin_obj.plugin_name})
|
||||
else:
|
||||
log('W44', {'plugin_name': getattr(plugin_obj, 'plugin_name', 'unknown')})
|
||||
|
||||
def load_plugins(self):
|
||||
"""Load plugins from multiple directories"""
|
||||
plugins = []
|
||||
|
||||
# Default plugin directories
|
||||
plugin_dirs = [
|
||||
# Frontend plugins
|
||||
Path(gpupdate_plugins_path()).absolute(),
|
||||
# System-wide plugins
|
||||
Path("/usr/lib/gpupdate/plugins")
|
||||
]
|
||||
|
||||
for plugin_dir in plugin_dirs:
|
||||
if plugin_dir.exists() and plugin_dir.is_dir():
|
||||
plugins.extend(self._load_plugins_from_directory(plugin_dir))
|
||||
|
||||
return plugins
|
||||
|
||||
def _load_plugins_from_directory(self, directory):
|
||||
"""Load plugins from a specific directory"""
|
||||
plugins = []
|
||||
|
||||
for file_path in directory.glob("*.py"):
|
||||
if file_path.name == "__init__.py":
|
||||
continue
|
||||
|
||||
try:
|
||||
plugin_obj = self._load_plugin_from_file(file_path)
|
||||
if plugin_obj:
|
||||
plugins.append(plugin_obj)
|
||||
except Exception as exc:
|
||||
log('W45', {'plugin_file': file_path.name, 'error': str(exc)})
|
||||
|
||||
return plugins
|
||||
|
||||
def _load_plugin_from_file(self, file_path):
|
||||
"""Load a single plugin from a Python file"""
|
||||
module_name = file_path.stem
|
||||
|
||||
# Load the module
|
||||
spec = importlib.util.spec_from_file_location(module_name, file_path)
|
||||
if not spec or not spec.loader or module_name in self.list_plugins:
|
||||
return None
|
||||
# Save the list of names to prevent repetition
|
||||
self.list_plugins.append(module_name)
|
||||
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
|
||||
# Find factory functions based on context
|
||||
factory_funcs = []
|
||||
target_factory_names = []
|
||||
|
||||
if self.is_machine:
|
||||
target_factory_names = ['create_machine_applier', 'create_plugin']
|
||||
else:
|
||||
target_factory_names = ['create_user_applier', 'create_plugin']
|
||||
|
||||
for name, obj in inspect.getmembers(module):
|
||||
if (inspect.isfunction(obj) and
|
||||
name.lower() in target_factory_names and
|
||||
callable(obj)):
|
||||
factory_funcs.append(obj)
|
||||
|
||||
# Create plugin instance
|
||||
|
||||
|
||||
if factory_funcs:
|
||||
# Use factory function if available
|
||||
plugin_instance = factory_funcs[0](self.dict_dconf_db, self.username, self.file_cache)
|
||||
else:
|
||||
# No suitable factory function found for this context
|
||||
return None
|
||||
|
||||
# Auto-detect locale directory for this plugin and initialize/update logger
|
||||
if hasattr(plugin_instance, '_init_plugin_log'):
|
||||
plugin_file = file_path
|
||||
plugin_dir = plugin_file.parent
|
||||
|
||||
# First try: locale directory in plugin's own directory
|
||||
locale_candidate = plugin_dir / 'locale'
|
||||
|
||||
# Second try: common locale directory for frontend plugins
|
||||
if not locale_candidate.exists() and 'frontend_plugins' in str(plugin_dir):
|
||||
frontend_plugins_dir = plugin_dir.parent
|
||||
common_locale_dir = frontend_plugins_dir / 'locale'
|
||||
if common_locale_dir.exists():
|
||||
locale_candidate = common_locale_dir
|
||||
|
||||
# Third try: system-wide gpupdate plugins locale directory
|
||||
if not locale_candidate.exists():
|
||||
gpupdate_plugins_locale = Path('/usr/lib/gpupdate/plugins/locale')
|
||||
if gpupdate_plugins_locale.exists():
|
||||
locale_candidate = gpupdate_plugins_locale
|
||||
|
||||
if locale_candidate.exists():
|
||||
# If logger already exists, reinitialize it with the correct locale directory
|
||||
if hasattr(plugin_instance, '_log') and plugin_instance._log is not None:
|
||||
# Save message_dict and domain from existing logger
|
||||
message_dict = getattr(plugin_instance._log, 'message_dict', None)
|
||||
domain = getattr(plugin_instance._log, 'domain', None)
|
||||
|
||||
# Reinitialize logger with proper locale directory
|
||||
plugin_instance._log = None
|
||||
else:
|
||||
message_dict = None
|
||||
domain = None
|
||||
|
||||
# Get domain from plugin instance or use class name
|
||||
if not domain:
|
||||
domain = getattr(plugin_instance, 'domain', plugin_instance.__class__.__name__.lower())
|
||||
|
||||
# Initialize plugin logger with the found locale directory
|
||||
plugin_instance._init_plugin_log(
|
||||
message_dict=message_dict,
|
||||
locale_dir=str(locale_candidate),
|
||||
domain=domain
|
||||
)
|
||||
|
||||
return plugin_instance
|
||||
|
||||
return None
|
||||
|
||||
def is_valid_api_object(self, obj):
|
||||
"""Check if the object is a valid plugin API object"""
|
||||
return isinstance(obj, plugin)
|
||||
|
||||
@@ -16,12 +16,17 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from util.roles import fill_roles
|
||||
from gpoa.util.roles import fill_roles
|
||||
from .plugin import plugin
|
||||
|
||||
class roles:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
class roles(plugin):
|
||||
def __init__(self, user=None):
|
||||
super().__init__(user)
|
||||
self.plugin_name = "roles"
|
||||
|
||||
def run(self):
|
||||
fill_roles()
|
||||
# Roles plugin logic would go here
|
||||
# For now, just pass as the original was doing nothing
|
||||
pass
|
||||
|
||||
|
||||
156
gpoa/scripts_runner
Executable file
156
gpoa/scripts_runner
Executable file
@@ -0,0 +1,156 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import subprocess
|
||||
import argparse
|
||||
import os
|
||||
from pathlib import Path
|
||||
import psutil
|
||||
import time
|
||||
|
||||
class Scripts_runner:
|
||||
'''
|
||||
A class for an object that iterates over directories with scripts
|
||||
in the desired sequence and launches them
|
||||
'''
|
||||
def __init__(self, work_mode = None, user_name = None, action = None):
|
||||
self.dir_scripts_machine = '/var/cache/gpupdate_scripts_cache/machine/'
|
||||
self.dir_scripts_users = '/var/cache/gpupdate_scripts_cache/users/'
|
||||
self.user_name = user_name
|
||||
self.list_with_all_commands = []
|
||||
stack_dir = None
|
||||
if work_mode and work_mode.upper() == 'MACHINE':
|
||||
stack_dir = self.machine_runner_fill()
|
||||
elif work_mode and work_mode.upper() == 'USER':
|
||||
stack_dir = self.user_runner_fill()
|
||||
else:
|
||||
print('Invalid arguments entered')
|
||||
return
|
||||
if action:
|
||||
self.action = action.upper()
|
||||
else:
|
||||
print('Action needed')
|
||||
return
|
||||
|
||||
self.find_action(stack_dir)
|
||||
for it_cmd in self.list_with_all_commands:
|
||||
print(self.run_cmd_subprocess(it_cmd))
|
||||
|
||||
def user_runner_fill(self):
|
||||
return self.get_stack_dir(self.dir_scripts_users + self.user_name)
|
||||
|
||||
def machine_runner_fill(self):
|
||||
return self.get_stack_dir(self.dir_scripts_machine)
|
||||
|
||||
def get_stack_dir(self, path_dir):
|
||||
stack_dir = []
|
||||
try:
|
||||
dir_script = Path(path_dir)
|
||||
for it_dir in dir_script.iterdir():
|
||||
stack_dir.append(str(it_dir))
|
||||
return stack_dir
|
||||
except Exception as exc:
|
||||
print(exc)
|
||||
return None
|
||||
|
||||
def find_action(self, stack_dir):
|
||||
if not stack_dir:
|
||||
return
|
||||
list_tmp = []
|
||||
while stack_dir:
|
||||
path_turn = stack_dir.pop()
|
||||
basename = os.path.basename(path_turn)
|
||||
if basename == self.action:
|
||||
list_tmp = self.get_stack_dir(path_turn)
|
||||
if list_tmp:
|
||||
self.fill_list_cmd(list_tmp)
|
||||
|
||||
|
||||
def fill_list_cmd(self, list_tmp):
|
||||
list_tmp = sorted(list_tmp)
|
||||
for file_in_task_dir in list_tmp:
|
||||
suffix = os.path.basename(file_in_task_dir)[-4:]
|
||||
if suffix == '.arg':
|
||||
try:
|
||||
arg = self.read_args(file_in_task_dir)
|
||||
for it_arg in arg.split():
|
||||
self.list_with_all_commands[-1].append(it_arg)
|
||||
except Exception as exc:
|
||||
print('Argument read for {}: {}'.format(self.list_with_all_commands.pop(), exc))
|
||||
else:
|
||||
cmd = []
|
||||
cmd.append(file_in_task_dir)
|
||||
self.list_with_all_commands.append(cmd)
|
||||
|
||||
|
||||
def read_args(self, path):
|
||||
with open(path + '/arg') as f:
|
||||
args = f.readlines()
|
||||
return args[0]
|
||||
|
||||
def run_cmd_subprocess(self, cmd):
|
||||
try:
|
||||
subprocess.run(cmd)
|
||||
return 'Script run: {}'.format(cmd)
|
||||
except Exception as exc:
|
||||
return exc
|
||||
|
||||
def find_process_by_name_and_script(name, script_path):
|
||||
|
||||
for proc in psutil.process_iter(['pid', 'name', 'cmdline']):
|
||||
try:
|
||||
# Check if the process name matches and the script path is in the command line arguments
|
||||
if proc.info['name'] == name and script_path in proc.info['cmdline']:
|
||||
return proc
|
||||
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
|
||||
continue
|
||||
return None
|
||||
|
||||
def wait_for_process(name, script_path, check_interval=1):
|
||||
|
||||
process = find_process_by_name_and_script(name, script_path)
|
||||
if not process:
|
||||
print(f"Process with name {name} and script path {script_path} not found.")
|
||||
return
|
||||
|
||||
try:
|
||||
# Loop to wait for the process to finish
|
||||
while process.is_running():
|
||||
print(f"Waiting for process {name} with PID {process.pid} to finish...")
|
||||
time.sleep(check_interval)
|
||||
print(f"Process {name} with PID {process.pid} has finished.")
|
||||
return
|
||||
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
||||
print(f"Process {name} with PID {process.pid} is no longer accessible.")
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Scripts runner')
|
||||
parser.add_argument('--mode', type = str, help = 'MACHINE or USER', nargs = '?', default = None)
|
||||
parser.add_argument('--user', type = str, help = 'User name ', nargs = '?', default = None)
|
||||
parser.add_argument('--action', type = str, help = 'MACHINE : [STARTUP or SHUTDOWN], USER : [LOGON or LOGOFF]', nargs = '?', default = None)
|
||||
|
||||
process_name = "python3"
|
||||
script_path = "/usr/sbin/gpoa"
|
||||
wait_for_process(process_name, script_path)
|
||||
args = parser.parse_args()
|
||||
try:
|
||||
Scripts_runner(args.mode, args.user, args.action)
|
||||
except Exception as exc:
|
||||
print(exc)
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
# Copyright (C) 2019-2023 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -16,12 +16,20 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .sqlite_registry import sqlite_registry
|
||||
from .sqlite_cache import sqlite_cache
|
||||
|
||||
def cache_factory(cache_name):
|
||||
return sqlite_cache(cache_name)
|
||||
from .dconf_registry import Dconf_registry
|
||||
|
||||
def registry_factory(registry_name='registry', registry_dir=None):
|
||||
return sqlite_registry(registry_name, registry_dir)
|
||||
|
||||
def registry_factory(registry_name='', envprofile=None , username=None):
|
||||
if username:
|
||||
Dconf_registry._username = username
|
||||
else:
|
||||
Dconf_registry._envprofile = 'system'
|
||||
if envprofile:
|
||||
Dconf_registry._envprofile = envprofile
|
||||
|
||||
if registry_name == 'dconf':
|
||||
return Dconf_registry()
|
||||
else:
|
||||
return Dconf_registry
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
from abc import ABC
|
||||
|
||||
|
||||
class cache(ABC):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
869
gpoa/storage/dconf_registry.py
Normal file
869
gpoa/storage/dconf_registry.py
Normal file
@@ -0,0 +1,869 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2025 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from collections import OrderedDict
|
||||
import itertools
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
import gi
|
||||
from gpoa.gpt.dynamic_attributes import RegistryKeyMetadata
|
||||
from gpoa.util.logging import log
|
||||
from gpoa.util.paths import get_dconf_config_path
|
||||
from gpoa.util.util import (
|
||||
add_prefix_to_keys,
|
||||
clean_data,
|
||||
get_uid_by_username,
|
||||
remove_keys_with_prefix,
|
||||
string_to_literal_eval,
|
||||
touch_file,
|
||||
try_dict_to_literal_eval,
|
||||
)
|
||||
|
||||
gi.require_version("Gvdb", "1.0")
|
||||
gi.require_version("GLib", "2.0")
|
||||
from gi.repository import GLib, Gvdb
|
||||
|
||||
|
||||
class PregDconf():
|
||||
def __init__(self, keyname, valuename, type_preg, data):
|
||||
self.keyname = keyname
|
||||
self.valuename = valuename
|
||||
self.hive_key = '{}/{}'.format(self.keyname, self.valuename)
|
||||
self.type = type_preg
|
||||
self.data = data
|
||||
|
||||
|
||||
class gplist(list):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def first(self):
|
||||
if self:
|
||||
return self[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def count(self):
|
||||
return len(self)
|
||||
|
||||
class Dconf_registry():
|
||||
'''
|
||||
A class variable that represents a global registry dictionary shared among instances of the class
|
||||
'''
|
||||
_GpoPriority = 'Software/BaseALT/Policies/GpoPriority'
|
||||
_gpo_name = set()
|
||||
global_registry_dict = {_GpoPriority:{}}
|
||||
previous_global_registry_dict = {}
|
||||
__template_file = '/usr/share/dconf/user_mandatory.template'
|
||||
_policies_path = 'Software/'
|
||||
_policies_win_path = 'SOFTWARE/'
|
||||
_gpt_read_flag = False
|
||||
_force = False
|
||||
__dconf_dict_flag = False
|
||||
__dconf_dict = {}
|
||||
_dconf_db = {}
|
||||
_dict_gpo_name_version_cache = {}
|
||||
_username = None
|
||||
_uid = None
|
||||
_envprofile = None
|
||||
_path_bin_system = "/etc/dconf/db/policy"
|
||||
|
||||
list_keys = []
|
||||
_info = {}
|
||||
_counter_gpt = itertools.count(0)
|
||||
|
||||
shortcuts = []
|
||||
folders = []
|
||||
files = []
|
||||
drives = []
|
||||
scheduledtasks = []
|
||||
environmentvariables = []
|
||||
inifiles = []
|
||||
services = []
|
||||
printers = []
|
||||
scripts = []
|
||||
networkshares = []
|
||||
|
||||
_true_strings = {
|
||||
"True",
|
||||
"true",
|
||||
"TRUE",
|
||||
"yes",
|
||||
"Yes",
|
||||
"enabled",
|
||||
"enable",
|
||||
"Enabled",
|
||||
"Enable",
|
||||
'1'
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def set_info(cls, key , data):
|
||||
cls._info[key] = data
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_info(cls, key):
|
||||
return cls._info.setdefault(key, None)
|
||||
|
||||
@staticmethod
|
||||
def get_next_number():
|
||||
return next(Dconf_registry._counter_gpt)
|
||||
|
||||
@staticmethod
|
||||
def get_matching_keys(path):
|
||||
if path[0] != '/':
|
||||
path = '/' + path
|
||||
logdata = {}
|
||||
envprofile = get_dconf_envprofile()
|
||||
try:
|
||||
process = subprocess.Popen(['dconf', 'list', path],
|
||||
env=envprofile, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
logdata['path'] = path
|
||||
log('D204', logdata)
|
||||
output, error = process.communicate()
|
||||
if not output and not error:
|
||||
return
|
||||
if not error:
|
||||
keys = output.strip().split('\n')
|
||||
for key in keys:
|
||||
Dconf_registry.get_matching_keys(f'{path}{key}')
|
||||
else:
|
||||
Dconf_registry.list_keys.append(path)
|
||||
return Dconf_registry.list_keys
|
||||
except Exception as exc:
|
||||
logdata['exc'] = exc
|
||||
log('E69', logdata)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_key_values(keys):
|
||||
key_values = {}
|
||||
for key in keys:
|
||||
key_values[key] = Dconf_registry.get_key_value(key)
|
||||
return key_values
|
||||
|
||||
@staticmethod
|
||||
def get_key_value(key):
|
||||
logdata = {}
|
||||
envprofile = get_dconf_envprofile()
|
||||
try:
|
||||
process = subprocess.Popen(['dconf', 'read', key],
|
||||
env=envprofile, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
logdata['key'] = key
|
||||
output, error = process.communicate()
|
||||
|
||||
if not error:
|
||||
return string_to_literal_eval(string_to_literal_eval(output))
|
||||
else:
|
||||
return None
|
||||
except Exception as exc:
|
||||
logdata['exc'] = exc
|
||||
log('E70', logdata)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def dconf_update(uid=None):
|
||||
logdata = {}
|
||||
path_dconf_config = get_dconf_config_path(uid)
|
||||
db_file = path_dconf_config[:-3]
|
||||
try:
|
||||
process = subprocess.Popen(['dconf', 'compile', db_file, path_dconf_config],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
output, error = process.communicate()
|
||||
|
||||
if error:
|
||||
logdata['error'] = error
|
||||
log('E71', logdata)
|
||||
else:
|
||||
logdata['outpupt'] = output
|
||||
log('D206', logdata)
|
||||
except Exception as exc:
|
||||
logdata['exc'] = exc
|
||||
log('E72', logdata)
|
||||
|
||||
@classmethod
|
||||
def check_profile_template(cls):
|
||||
if Path(cls.__template_file).exists():
|
||||
return True
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def update_dict_to_previous(cls):
|
||||
dict_clean_previous = remove_keys_with_prefix(cls._dconf_db)
|
||||
dict_with_previous = add_prefix_to_keys(dict_clean_previous)
|
||||
cls.global_registry_dict.update(dict_with_previous)
|
||||
|
||||
@classmethod
|
||||
def apply_template(cls, uid):
|
||||
logdata = {}
|
||||
if uid and cls.check_profile_template():
|
||||
with open(cls.__template_file, "r") as f:
|
||||
template = f.read()
|
||||
# Replace the "{uid}" placeholder with the actual UID value
|
||||
content = template.replace("{{uid}}", str(uid))
|
||||
|
||||
elif uid:
|
||||
content = f"user-db:user\n" \
|
||||
f"system-db:distr\n" \
|
||||
f"system-db:policy\n" \
|
||||
f"system-db:policy{uid}\n" \
|
||||
f"system-db:local\n" \
|
||||
f"system-db:default\n" \
|
||||
f"system-db:local\n" \
|
||||
f"system-db:policy{uid}\n" \
|
||||
f"system-db:policy\n" \
|
||||
f"system-db:distr\n"
|
||||
else:
|
||||
logdata['uid'] = uid
|
||||
log('W24', logdata)
|
||||
return
|
||||
|
||||
user_mandatory = f'/run/dconf/user/{uid}'
|
||||
touch_file(user_mandatory)
|
||||
|
||||
with open(user_mandatory, "w") as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_policies_from_dconf(cls):
|
||||
return cls.get_dictionary_from_dconf(cls._policies_path, cls._policies_win_path)
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_dictionary_from_dconf(self, *startswith_list):
|
||||
output_dict = {}
|
||||
for startswith in startswith_list:
|
||||
dconf_dict = self.get_key_values(self.get_matching_keys(startswith))
|
||||
for key, value in dconf_dict.items():
|
||||
keys_tmp = key.split('/')
|
||||
update_dict(output_dict.setdefault('/'.join(keys_tmp[:-1])[1:], {}), {keys_tmp[-1]: str(value)})
|
||||
|
||||
log('D207')
|
||||
return output_dict
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_dictionary_from_dconf_file_db(self, uid=None, path_bin=None, save_dconf_db=False):
|
||||
logdata = {}
|
||||
error_skip = None
|
||||
if path_bin:
|
||||
error_skip = True
|
||||
elif not uid:
|
||||
path_bin = self._path_bin_system
|
||||
else:
|
||||
path_bin = self._path_bin_system + str(uid)
|
||||
output_dict = {}
|
||||
try:
|
||||
if (GLib.file_get_contents(path_bin)[0]):
|
||||
bytes1 = GLib.Bytes.new(GLib.file_get_contents(path_bin)[1])
|
||||
table = Gvdb.Table.new_from_bytes(bytes1, True)
|
||||
|
||||
name_list = Gvdb.Table.get_names(table)
|
||||
for name in name_list:
|
||||
value = Gvdb.Table.get_value(table, name)
|
||||
if value is None:
|
||||
continue
|
||||
list_path = name.split('/')
|
||||
if value.is_of_type(GLib.VariantType('s')):
|
||||
part = output_dict.setdefault('/'.join(list_path[1:-1]), {})
|
||||
part[list_path[-1]] = value.get_string()
|
||||
elif value.is_of_type(GLib.VariantType('i')):
|
||||
part = output_dict.setdefault('/'.join(list_path[1:-1]), {})
|
||||
part[list_path[-1]] = value.get_int32()
|
||||
except Exception as exc:
|
||||
logdata['exc'] = exc
|
||||
logdata['path_bin'] = path_bin
|
||||
if not error_skip:
|
||||
log('E73', logdata)
|
||||
else:
|
||||
log('D217', logdata)
|
||||
if save_dconf_db:
|
||||
Dconf_registry._dconf_db = output_dict
|
||||
return output_dict
|
||||
|
||||
|
||||
@classmethod
|
||||
def filter_entries(cls, startswith, registry_dict = None):
|
||||
if not registry_dict:
|
||||
registry_dict = cls.global_registry_dict
|
||||
if startswith[-1] == '%':
|
||||
startswith = startswith[:-1]
|
||||
if startswith[-1] == '/' or startswith[-1] == '\\':
|
||||
startswith = startswith[:-1]
|
||||
return filter_dict_keys(startswith, flatten_dictionary(registry_dict))
|
||||
return filter_dict_keys(startswith, flatten_dictionary(registry_dict))
|
||||
|
||||
|
||||
@classmethod
|
||||
def filter_hklm_entries(cls, startswith):
|
||||
pregs = cls.filter_entries(startswith)
|
||||
list_entiers = list()
|
||||
for keyname, value in pregs.items():
|
||||
if isinstance(value, dict):
|
||||
for valuename, data in value.items():
|
||||
list_entiers.append(PregDconf(
|
||||
keyname, convert_string_dconf(valuename), find_preg_type(data), data))
|
||||
elif isinstance(value, list):
|
||||
for data in value:
|
||||
list_entiers.append(PregDconf(
|
||||
keyname, data, find_preg_type(data), data))
|
||||
else:
|
||||
list_entiers.append(PregDconf(
|
||||
'/'.join(keyname.split('/')[:-1]), convert_string_dconf(keyname.split('/')[-1]), find_preg_type(value), value))
|
||||
|
||||
|
||||
return gplist(list_entiers)
|
||||
|
||||
|
||||
@classmethod
|
||||
def filter_hkcu_entries(cls, startswith):
|
||||
return cls.filter_hklm_entries(startswith)
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_storage(cls,dictionary = None):
|
||||
if dictionary:
|
||||
result = dictionary
|
||||
elif Dconf_registry._gpt_read_flag:
|
||||
result = Dconf_registry.global_registry_dict
|
||||
else:
|
||||
if Dconf_registry.__dconf_dict_flag:
|
||||
result = Dconf_registry.__dconf_dict
|
||||
else:
|
||||
Dconf_registry.__dconf_dict = Dconf_registry.get_policies_from_dconf()
|
||||
result = Dconf_registry.__dconf_dict
|
||||
Dconf_registry.__dconf_dict_flag = True
|
||||
return result
|
||||
|
||||
|
||||
@classmethod
|
||||
def filling_storage_from_dconf(cls):
|
||||
Dconf_registry.global_registry_dict = Dconf_registry.get_storage()
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_entry(cls, path, dictionary = None, preg = True):
|
||||
logdata = {}
|
||||
result = Dconf_registry.get_storage(dictionary)
|
||||
|
||||
keys = path.split("\\") if "\\" in path else path.split("/")
|
||||
key = '/'.join(keys[:-1]) if keys[0] else '/'.join(keys[:-1])[1:]
|
||||
|
||||
if isinstance(result, dict) and key in result.keys():
|
||||
data = result.get(key).get(keys[-1])
|
||||
return PregDconf(
|
||||
key, convert_string_dconf(keys[-1]), find_preg_type(data), data) if preg else data
|
||||
else:
|
||||
logdata['path'] = path
|
||||
log('D208', logdata)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def check_enable_key(cls ,key):
|
||||
data = cls.get_entry(key, preg = False)
|
||||
if data:
|
||||
if isinstance(data, str):
|
||||
return True if data in cls._true_strings else False
|
||||
elif isinstance(data, int):
|
||||
return bool(data)
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def get_hkcu_entry(cls, hive_key, dictionary = None):
|
||||
return cls.get_hklm_entry(hive_key, dictionary)
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_hklm_entry(cls, hive_key, dictionary = None):
|
||||
return cls.get_entry(hive_key, dictionary)
|
||||
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_shortcut(cls, sc_obj, policy_name):
|
||||
sc_obj.policy_name = policy_name
|
||||
cls.shortcuts.append(sc_obj)
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_printer(cls, pobj, policy_name):
|
||||
pobj.policy_name = policy_name
|
||||
cls.printers.append(pobj)
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_drive(cls, dobj, policy_name):
|
||||
dobj.policy_name = policy_name
|
||||
cls.drives.append(dobj)
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_folder(cls, fobj, policy_name):
|
||||
fobj.policy_name = policy_name
|
||||
cls.folders.append(fobj)
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_envvar(self, evobj, policy_name):
|
||||
evobj.policy_name = policy_name
|
||||
self.environmentvariables.append(evobj)
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_script(cls, scrobj, policy_name):
|
||||
scrobj.policy_name = policy_name
|
||||
cls.scripts.append(scrobj)
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_file(cls, fileobj, policy_name):
|
||||
fileobj.policy_name = policy_name
|
||||
cls.files.append(fileobj)
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_ini(cls, iniobj, policy_name):
|
||||
iniobj.policy_name = policy_name
|
||||
cls.inifiles.append(iniobj)
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_networkshare(cls, networkshareobj, policy_name):
|
||||
networkshareobj.policy_name = policy_name
|
||||
cls.networkshares.append(networkshareobj)
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_shortcuts(cls):
|
||||
return cls.shortcuts
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_printers(cls):
|
||||
return cls.printers
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_drives(cls):
|
||||
return cls.drives
|
||||
|
||||
@classmethod
|
||||
def get_folders(cls):
|
||||
return cls.folders
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_envvars(cls):
|
||||
return cls.environmentvariables
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_scripts(cls, action):
|
||||
action_scripts = list()
|
||||
for part in cls.scripts:
|
||||
if action == 'LOGON' and part.action == 'LOGON':
|
||||
action_scripts.append(part)
|
||||
elif action == 'LOGOFF' and part.action == 'LOGOFF':
|
||||
action_scripts.append(part)
|
||||
elif action == 'STARTUP' and part.action == 'STARTUP':
|
||||
action_scripts.append(part)
|
||||
elif action == 'SHUTDOWN' and part.action == 'SHUTDOWN':
|
||||
action_scripts.append(part)
|
||||
return action_scripts
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_files(cls):
|
||||
return cls.files
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_networkshare(cls):
|
||||
return cls.networkshares
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_ini(cls):
|
||||
return cls.inifiles
|
||||
|
||||
|
||||
@classmethod
|
||||
def wipe_user(cls):
|
||||
cls.wipe_hklm()
|
||||
|
||||
|
||||
@classmethod
|
||||
def wipe_hklm(cls):
|
||||
cls.global_registry_dict = dict({cls._GpoPriority:{}})
|
||||
|
||||
|
||||
def filter_dict_keys(starting_string, input_dict):
|
||||
result = dict()
|
||||
for key in input_dict:
|
||||
key_list = remove_empty_values(re.split(r'\\|/', key))
|
||||
start_list = remove_empty_values(re.split(r'\\|/', starting_string))
|
||||
if key_list[:len(start_list)] == start_list:
|
||||
result[key] = input_dict.get(key)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def find_preg_type(argument):
|
||||
if isinstance(argument, int):
|
||||
return 4
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
def update_dict(dict1, dict2, save_key=None):
|
||||
'''
|
||||
Updates dict1 with the key-value pairs from dict2
|
||||
'''
|
||||
for key, value in dict2.items():
|
||||
if key in dict1:
|
||||
# If both values are dictionaries, recursively call the update_dict function
|
||||
if isinstance(dict1[key], dict) and isinstance(value, dict):
|
||||
save_key = key
|
||||
update_dict(dict1[key], value, save_key)
|
||||
# If the value in dict1 is a list, extend it with unique values from value
|
||||
elif isinstance(dict1[key], list):
|
||||
dict1[key].extend(set(value) - set(dict1[key]))
|
||||
else:
|
||||
# If the value in dict1 is not a dictionary or the value in dict2 is not a dictionary,
|
||||
# replace the value in dict1 with the value from dict2
|
||||
if save_key and save_key.startswith('Source'):
|
||||
value.reloaded_with_policy_key = [dict1[key].policy_name]
|
||||
if dict1[key].reloaded_with_policy_key:
|
||||
value.reloaded_with_policy_key += dict1[key].reloaded_with_policy_key
|
||||
dict1[key] = value
|
||||
else:
|
||||
dict1[key] = value
|
||||
else:
|
||||
# If the key does not exist in dict1, add the key-value pair from dict2 to dict1
|
||||
dict1[key] = value
|
||||
|
||||
|
||||
def add_to_dict(string, username, gpo_info):
|
||||
if gpo_info:
|
||||
counter = gpo_info.counter
|
||||
display_name = gpo_info.display_name
|
||||
name = gpo_info.name
|
||||
version = gpo_info.version
|
||||
else:
|
||||
counter = 0
|
||||
display_name = 'Local Policy'
|
||||
name = None
|
||||
version = None
|
||||
|
||||
if username is None or username == 'Machine':
|
||||
machine= '{}/Machine/{}'.format(Dconf_registry._GpoPriority, counter)
|
||||
dictionary = Dconf_registry.global_registry_dict.setdefault(machine, dict())
|
||||
else:
|
||||
if name in Dconf_registry._gpo_name:
|
||||
return
|
||||
user = '{}/User/{}'.format(Dconf_registry._GpoPriority, counter)
|
||||
dictionary = Dconf_registry.global_registry_dict.setdefault(user, dict())
|
||||
Dconf_registry._gpo_name.add(name)
|
||||
|
||||
dictionary['display_name'] = display_name
|
||||
dictionary['name'] = name
|
||||
dictionary['version'] = str(version)
|
||||
dictionary['correct_path'] = string
|
||||
|
||||
def get_mod_previous_value(key_source, key_valuename):
|
||||
previous_sourc = try_dict_to_literal_eval(Dconf_registry._dconf_db
|
||||
.get(key_source, {})
|
||||
.get(key_valuename, {}))
|
||||
return previous_sourc.get('mod_previous_value') if previous_sourc else None
|
||||
|
||||
def get_previous_value(key_source, key_valuename):
|
||||
previous = key_source.replace('Source', 'Previous')
|
||||
return (Dconf_registry._dconf_db
|
||||
.get(previous, {})
|
||||
.get(key_valuename, None))
|
||||
|
||||
def load_preg_dconf(pregfile, pathfile, policy_name, username, gpo_info):
|
||||
'''
|
||||
Loads the configuration from preg registry into a dictionary
|
||||
'''
|
||||
# Prefix for storing key data
|
||||
source_pre = "Source"
|
||||
dd = dict()
|
||||
for i in pregfile.entries:
|
||||
# Skip this entry if the valuename starts with '**del'
|
||||
if i.valuename.lower().startswith('**del'):
|
||||
continue
|
||||
valuename = convert_string_dconf(i.valuename)
|
||||
data = check_data(i.data, i.type)
|
||||
if i.valuename != i.data and i.valuename:
|
||||
key_registry_source = f"{source_pre}/{i.keyname}".replace('\\', '/')
|
||||
key_registry = f"{i.keyname}".replace('\\', '/')
|
||||
key_valuename = valuename.replace('\\', '/')
|
||||
if i.keyname.replace('\\', '/') in dd:
|
||||
# If the key exists in dd, update its value with the new key-value pair
|
||||
dd[i.keyname.replace('\\', '/')].update({key_valuename:data})
|
||||
mod_previous_value = get_mod_previous_value(key_registry_source, key_valuename)
|
||||
previous_value = get_previous_value(key_registry, key_valuename)
|
||||
if previous_value != data:
|
||||
(dd[key_registry_source]
|
||||
.update({key_valuename:RegistryKeyMetadata(policy_name, i.type, mod_previous_value=previous_value)}))
|
||||
else:
|
||||
(dd[key_registry_source]
|
||||
.update({key_valuename:RegistryKeyMetadata(policy_name, i.type, mod_previous_value=mod_previous_value)}))
|
||||
else:
|
||||
# If the key does not exist in dd, create a new key-value pair
|
||||
dd[i.keyname.replace('\\', '/')] = {key_valuename:data}
|
||||
mod_previous_value = get_mod_previous_value(key_registry_source, key_valuename)
|
||||
previous_value = get_previous_value(key_registry, key_valuename)
|
||||
if previous_value != data:
|
||||
dd[key_registry_source] = {key_valuename:RegistryKeyMetadata(policy_name, i.type, mod_previous_value=previous_value)}
|
||||
else:
|
||||
dd[key_registry_source] = {key_valuename:RegistryKeyMetadata(policy_name, i.type, mod_previous_value=mod_previous_value)}
|
||||
|
||||
elif not i.valuename:
|
||||
keyname_tmp = i.keyname.replace('\\', '/').split('/')
|
||||
keyname = '/'.join(keyname_tmp[:-1])
|
||||
mod_previous_value = get_mod_previous_value(f"{source_pre}/{keyname}", keyname_tmp[-1])
|
||||
previous_value = get_previous_value(f"{keyname}", keyname_tmp[-1])
|
||||
if keyname in dd:
|
||||
# If the key exists in dd, update its value with the new key-value pair
|
||||
dd[keyname].update({keyname_tmp[-1]:data})
|
||||
if previous_value != data:
|
||||
dd[f"{source_pre}/{keyname}"].update({keyname_tmp[-1]:RegistryKeyMetadata(policy_name, i.type, mod_previous_value=previous_value)})
|
||||
else:
|
||||
dd[f"{source_pre}/{keyname}"].update({keyname_tmp[-1]:RegistryKeyMetadata(policy_name, i.type, mod_previous_value=mod_previous_value)})
|
||||
else:
|
||||
# If the key does not exist in dd, create a new key-value pair
|
||||
dd[keyname] = {keyname_tmp[-1]:data}
|
||||
if previous_value != data:
|
||||
dd[f"{source_pre}/{keyname}"] = {keyname_tmp[-1]:RegistryKeyMetadata(policy_name, i.type, mod_previous_value=previous_value)}
|
||||
else:
|
||||
dd[f"{source_pre}/{keyname}"] = {keyname_tmp[-1]:RegistryKeyMetadata(policy_name, i.type, mod_previous_value=mod_previous_value)}
|
||||
|
||||
else:
|
||||
# If the value name is the same as the data,
|
||||
# split the keyname and add the data to the appropriate location in dd.
|
||||
all_list_key = i.keyname.split('\\')
|
||||
key_d ='/'.join(all_list_key[:-1])
|
||||
dd_target = dd.setdefault(key_d,{})
|
||||
key_source = f"Source/{key_d}"
|
||||
dd_target_source = dd.setdefault(key_source, {})
|
||||
data_list = dd_target.setdefault(all_list_key[-1], []).append(data)
|
||||
mod_previous_value = get_mod_previous_value(key_source, all_list_key[-1])
|
||||
previous_value = get_previous_value(key_d, all_list_key[-1])
|
||||
if previous_value != str(data_list):
|
||||
dd_target_source[all_list_key[-1]] = RegistryKeyMetadata(policy_name, i.type, is_list=True, mod_previous_value=previous_value)
|
||||
else:
|
||||
dd_target_source[all_list_key[-1]] = RegistryKeyMetadata(policy_name, i.type, is_list=True, mod_previous_value=mod_previous_value)
|
||||
|
||||
# Update the global registry dictionary with the contents of dd
|
||||
update_dict(Dconf_registry.global_registry_dict, dd)
|
||||
|
||||
|
||||
def create_dconf_ini_file(filename, data, uid=None, nodomain=None):
|
||||
'''
|
||||
Create an ini-file based on a dictionary of dictionaries.
|
||||
Args:
|
||||
data (dict): The dictionary of dictionaries containing the data for the ini-file.
|
||||
filename (str): The filename to save the ini-file.
|
||||
Returns:
|
||||
None
|
||||
Raises:
|
||||
None
|
||||
'''
|
||||
with open(filename, 'a' if nodomain else 'w') as file:
|
||||
for section, section_data in data.items():
|
||||
if not section:
|
||||
continue
|
||||
file.write(f'[{section}]\n')
|
||||
for key, value in section_data.items():
|
||||
if not key:
|
||||
continue
|
||||
if isinstance(value, int):
|
||||
file.write(f'{key} = {value}\n')
|
||||
else:
|
||||
file.write(f'{key} = "{value}"\n')
|
||||
file.write('\n')
|
||||
logdata = {'path': filename}
|
||||
log('D209', logdata)
|
||||
create_dconf_file_locks(filename, data)
|
||||
Dconf_registry.dconf_update(uid)
|
||||
|
||||
|
||||
def create_dconf_file_locks(filename_ini, data):
|
||||
"""
|
||||
Creates a dconf lock file based on the provided filename and data.
|
||||
|
||||
:param filename_ini: Path to the ini file (str)
|
||||
:param data: Dictionary containing configuration data
|
||||
"""
|
||||
# Extract the path parts up to the directory of the ini file
|
||||
tmp_lock = filename_ini.split('/')[:-1]
|
||||
|
||||
# Construct the path to the lock file
|
||||
file_lock = '/'.join(tmp_lock + ['locks', tmp_lock[-1][:-1] + 'pol'])
|
||||
|
||||
# Create an empty lock file
|
||||
touch_file(file_lock)
|
||||
|
||||
# Open the lock file for writing
|
||||
with open(file_lock, 'w') as file:
|
||||
# Iterate over all lock keys obtained from the data
|
||||
for key_lock in get_keys_dconf_locks(data):
|
||||
# Remove the "lock/" prefix from the key and split into parts
|
||||
key = key_lock.split('/')[1:]
|
||||
# Write the cleaned key to the lock file
|
||||
file.write(f'{key}\n')
|
||||
|
||||
def get_keys_dconf_locks(data):
|
||||
"""
|
||||
Extracts keys from the provided data that start with "Locks/"
|
||||
and have a value of 1.
|
||||
|
||||
:param data: Dictionary containing configuration data
|
||||
:return: List of lock keys (str) without the "Locks/" prefix
|
||||
"""
|
||||
result = []
|
||||
# Flatten the nested dictionary into a single-level dictionary
|
||||
flatten_data = flatten_dictionary(data)
|
||||
|
||||
# Iterate through all keys in the flattened dictionary
|
||||
for key in flatten_data:
|
||||
# Check if the key starts with "Locks/" and its value is 1
|
||||
if key.startswith('Locks/') and flatten_data[key] == 1:
|
||||
# Remove the "Locks" prefix and append to the result
|
||||
result.append(key.removeprefix('Locks'))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def check_data(data, t_data):
|
||||
if isinstance(data, bytes):
|
||||
if t_data == 7:
|
||||
return clean_data(data.decode('utf-16').replace('\x00',''))
|
||||
else:
|
||||
return None
|
||||
elif t_data == 4:
|
||||
return data
|
||||
return clean_data(data)
|
||||
|
||||
def convert_string_dconf(input_string):
|
||||
macros = {
|
||||
'#': '%sharp%',
|
||||
';': '%semicolon%',
|
||||
'//': '%doubleslash%',
|
||||
'/': '%oneslash%'
|
||||
}
|
||||
output_string = input_string
|
||||
for key, value in macros.items():
|
||||
if key in input_string:
|
||||
output_string = input_string.replace(key, value)
|
||||
elif value in input_string:
|
||||
output_string = input_string.replace(value, key)
|
||||
|
||||
return output_string
|
||||
|
||||
def remove_empty_values(input_list):
|
||||
return list(filter(None, input_list))
|
||||
|
||||
def flatten_dictionary(input_dict, result=None, current_key=''):
|
||||
if result is None:
|
||||
result = {}
|
||||
|
||||
for key, value in input_dict.items():
|
||||
new_key = f"{current_key}/{key}" if current_key else key
|
||||
|
||||
if isinstance(value, dict):
|
||||
flatten_dictionary(value, result, new_key)
|
||||
else:
|
||||
result[new_key] = value
|
||||
|
||||
return result
|
||||
|
||||
def get_dconf_envprofile():
|
||||
dconf_envprofile = {'default': {'DCONF_PROFILE': 'default'},
|
||||
'local': {'DCONF_PROFILE': 'local'},
|
||||
'system': {'DCONF_PROFILE': 'system'}
|
||||
}
|
||||
|
||||
if Dconf_registry._envprofile:
|
||||
return dconf_envprofile.get(Dconf_registry._envprofile, dconf_envprofile['system'])
|
||||
|
||||
if not Dconf_registry._username:
|
||||
return dconf_envprofile['system']
|
||||
|
||||
profile = '/run/dconf/user/{}'.format(get_uid_by_username(Dconf_registry._username))
|
||||
return {'DCONF_PROFILE': profile}
|
||||
|
||||
|
||||
def convert_elements_to_list_dicts(elements):
|
||||
return list(map(lambda x: dict(x), elements))
|
||||
|
||||
def remove_duplicate_dicts_in_list(list_dict):
|
||||
return convert_elements_to_list_dicts(list(OrderedDict((tuple(sorted(d.items())), d) for d in list_dict).values()))
|
||||
|
||||
def add_preferences_to_global_registry_dict(username, is_machine):
|
||||
if is_machine:
|
||||
prefix = 'Software/BaseALT/Policies/Preferences/Machine'
|
||||
else:
|
||||
prefix = f'Software/BaseALT/Policies/Preferences/{username}'
|
||||
|
||||
preferences_global = [('Shortcuts',remove_duplicate_dicts_in_list(Dconf_registry.shortcuts)),
|
||||
('Folders',remove_duplicate_dicts_in_list(Dconf_registry.folders)),
|
||||
('Files',remove_duplicate_dicts_in_list(Dconf_registry.files)),
|
||||
('Drives',remove_duplicate_dicts_in_list(Dconf_registry.drives)),
|
||||
('Scheduledtasks',remove_duplicate_dicts_in_list(Dconf_registry.scheduledtasks)),
|
||||
('Environmentvariables',remove_duplicate_dicts_in_list(Dconf_registry.environmentvariables)),
|
||||
('Inifiles',remove_duplicate_dicts_in_list(Dconf_registry.inifiles)),
|
||||
('Services',remove_duplicate_dicts_in_list(Dconf_registry.services)),
|
||||
('Printers',remove_duplicate_dicts_in_list(Dconf_registry.printers)),
|
||||
('Scripts',remove_duplicate_dicts_in_list(Dconf_registry.scripts)),
|
||||
('Networkshares',remove_duplicate_dicts_in_list(Dconf_registry.networkshares))]
|
||||
|
||||
preferences_global_dict = dict()
|
||||
preferences_global_dict[prefix] = dict()
|
||||
|
||||
for key, val in preferences_global:
|
||||
preferences_global_dict[prefix].update({key:clean_data(str(val))})
|
||||
|
||||
update_dict(Dconf_registry.global_registry_dict, preferences_global_dict)
|
||||
|
||||
def extract_display_name_version(data, username):
|
||||
policy_force = data.get('Software/BaseALT/Policies/GPUpdate', {}).get('Force', False)
|
||||
if Dconf_registry._force or policy_force:
|
||||
logdata = {'username': username}
|
||||
log('W26', logdata)
|
||||
return {}
|
||||
result = {}
|
||||
tmp = {}
|
||||
if isinstance(data, dict):
|
||||
for key in data.keys():
|
||||
if key.startswith(Dconf_registry._GpoPriority+'/'):
|
||||
tmp[key] = data[key]
|
||||
for value in tmp.values():
|
||||
if isinstance(value, dict) and value.get('version', 'None')!='None' and value.get('display_name'):
|
||||
result[value['display_name']] = {'version': value['version'], 'correct_path': value['correct_path']}
|
||||
Dconf_registry._dict_gpo_name_version_cache = result
|
||||
return result
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2021 BaseALT Ltd. <org@basealt.ru>
|
||||
# Copyright (C) 2021-2025 BaseALT Ltd. <org@basealt.ru>
|
||||
# Copyright (C) 2021 Igor Chudov <nir@nir.org.ru>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
@@ -20,61 +20,78 @@
|
||||
import os
|
||||
import os.path
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
|
||||
import smbc
|
||||
|
||||
|
||||
from util.logging import log
|
||||
from util.paths import file_cache_dir, UNCPath
|
||||
from util.exceptions import NotUNCPathError
|
||||
from util.logging import log
|
||||
from util.paths import UNCPath, file_cache_dir, file_cache_path_home
|
||||
from util.util import get_machine_name
|
||||
|
||||
|
||||
class fs_file_cache:
|
||||
__read_blocksize = 4096
|
||||
|
||||
def __init__(self, cache_name):
|
||||
def __init__(self, cache_name, username = None):
|
||||
self.cache_name = cache_name
|
||||
self.storage_uri = file_cache_dir()
|
||||
logdata = dict({'cache_file': self.storage_uri})
|
||||
self.username = username
|
||||
if username and username != get_machine_name():
|
||||
try:
|
||||
self.storage_uri = file_cache_path_home(username)
|
||||
except:
|
||||
self.storage_uri = file_cache_dir()
|
||||
else:
|
||||
self.storage_uri = file_cache_dir()
|
||||
logdata = {'cache_file': self.storage_uri}
|
||||
log('D20', logdata)
|
||||
self.samba_context = smbc.Context(use_kerberos=1)
|
||||
#, debug=10)
|
||||
|
||||
def store(self, uri):
|
||||
destdir = uri
|
||||
def store(self, uri, destfile = None):
|
||||
try:
|
||||
uri_path = UNCPath(uri)
|
||||
file_name = os.path.basename(uri_path.get_path())
|
||||
file_path = os.path.dirname(uri_path.get_path())
|
||||
destdir = Path('{}/{}/{}'.format(self.storage_uri,
|
||||
uri_path.get_domain(),
|
||||
file_path))
|
||||
if not destfile:
|
||||
file_name = os.path.basename(uri_path.get_path())
|
||||
file_path = os.path.dirname(uri_path.get_path())
|
||||
destdir = Path('{}/{}/{}'.format(self.storage_uri,
|
||||
uri_path.get_domain(),
|
||||
file_path))
|
||||
else:
|
||||
destdir = destfile.parent
|
||||
except NotUNCPathError:
|
||||
return None
|
||||
|
||||
except Exception as exc:
|
||||
logdata = dict({'exception': str(exc)})
|
||||
logdata = {'exception': str(exc)}
|
||||
log('D144', logdata)
|
||||
raise exc
|
||||
|
||||
if not destdir.exists():
|
||||
destdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
destfile = Path('{}/{}/{}'.format(self.storage_uri,
|
||||
uri_path.get_domain(),
|
||||
uri_path.get_path()))
|
||||
if not destfile:
|
||||
destfile = Path('{}/{}/{}'.format(self.storage_uri,
|
||||
uri_path.get_domain(),
|
||||
uri_path.get_path()))
|
||||
|
||||
with open(destfile, 'wb') as df:
|
||||
df.truncate()
|
||||
df.flush()
|
||||
try:
|
||||
file_handler = self.samba_context.open(str(uri_path), os.O_RDONLY)
|
||||
while True:
|
||||
data = file_handler.read(self.__read_blocksize)
|
||||
if not data:
|
||||
break
|
||||
df.write(data)
|
||||
df.flush()
|
||||
except Exception as exc:
|
||||
logdata = dict({'exception': str(exc)})
|
||||
log('E35', logdata)
|
||||
raise exc
|
||||
try:
|
||||
fd, tmpfile = tempfile.mkstemp('', str(destfile))
|
||||
df = os.fdopen(fd, 'wb')
|
||||
file_handler = self.samba_context.open(str(uri_path), os.O_RDONLY)
|
||||
while True:
|
||||
data = file_handler.read(self.__read_blocksize)
|
||||
if not data:
|
||||
break
|
||||
df.write(data)
|
||||
df.close()
|
||||
os.rename(tmpfile, destfile)
|
||||
os.chmod(destfile, 0o644)
|
||||
except Exception as exc:
|
||||
logdata = {'exception': str(exc)}
|
||||
log('W25', logdata)
|
||||
tmppath = Path(tmpfile)
|
||||
if tmppath.exists():
|
||||
tmppath.unlink()
|
||||
|
||||
def get(self, uri):
|
||||
destfile = uri
|
||||
@@ -86,12 +103,28 @@ class fs_file_cache:
|
||||
uri_path.get_domain(),
|
||||
uri_path.get_path()))
|
||||
except NotUNCPathError as exc:
|
||||
logdata = dict({'path': str(exc)})
|
||||
logdata = {'path': str(exc)}
|
||||
log('D62', logdata)
|
||||
except Exception as exc:
|
||||
logdata = dict({'exception': str(exc)})
|
||||
logdata = {'exception': str(exc)}
|
||||
log('E36', logdata)
|
||||
raise exc
|
||||
if Path(destfile).exists():
|
||||
return str(destfile)
|
||||
else:
|
||||
return None
|
||||
|
||||
return str(destfile)
|
||||
|
||||
def get_ls_smbdir(self, uri):
|
||||
type_file_smb = 8
|
||||
try:
|
||||
uri_path = UNCPath(uri)
|
||||
opendir = self.samba_context.opendir(str(uri_path))
|
||||
ls_obj = opendir.getdents()
|
||||
ls = [obj.name for obj in ls_obj if obj.smbc_type == type_file_smb]
|
||||
return ls
|
||||
except Exception as exc:
|
||||
if Path(uri).exists():
|
||||
return None
|
||||
logdata = {'exception': str(exc)}
|
||||
log('W12', logdata)
|
||||
return None
|
||||
|
||||
@@ -1,176 +0,0 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
class samba_preg(object):
|
||||
'''
|
||||
Object mapping representing HKLM entry (registry key without SID)
|
||||
'''
|
||||
def __init__(self, preg_obj, policy_name):
|
||||
self.policy_name = policy_name
|
||||
self.keyname = preg_obj.keyname
|
||||
self.valuename = preg_obj.valuename
|
||||
self.hive_key = '{}\\{}'.format(self.keyname, self.valuename)
|
||||
self.type = preg_obj.type
|
||||
self.data = preg_obj.data
|
||||
|
||||
def update_fields(self):
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['type'] = self.type
|
||||
fields['data'] = self.data
|
||||
|
||||
return fields
|
||||
|
||||
class samba_hkcu_preg(object):
|
||||
'''
|
||||
Object mapping representing HKCU entry (registry key with SID)
|
||||
'''
|
||||
def __init__(self, sid, preg_obj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.keyname = preg_obj.keyname
|
||||
self.valuename = preg_obj.valuename
|
||||
self.hive_key = '{}\\{}'.format(self.keyname, self.valuename)
|
||||
self.type = preg_obj.type
|
||||
self.data = preg_obj.data
|
||||
|
||||
def update_fields(self):
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['type'] = self.type
|
||||
fields['data'] = self.data
|
||||
|
||||
return fields
|
||||
|
||||
class ad_shortcut(object):
|
||||
'''
|
||||
Object mapping representing Windows shortcut.
|
||||
'''
|
||||
def __init__(self, sid, sc, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.path = sc.dest
|
||||
self.shortcut = sc.to_json()
|
||||
|
||||
def update_fields(self):
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['path'] = self.path
|
||||
fields['shortcut'] = self.shortcut
|
||||
|
||||
return fields
|
||||
|
||||
class info_entry(object):
|
||||
def __init__(self, name, value):
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
def update_fields(self):
|
||||
fields = dict()
|
||||
fields['value'] = self.value
|
||||
|
||||
return fields
|
||||
|
||||
class printer_entry(object):
|
||||
'''
|
||||
Object mapping representing Windows printer of some type.
|
||||
'''
|
||||
def __init__(self, sid, pobj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.name = pobj.name
|
||||
self.printer = pobj.to_json()
|
||||
|
||||
def update_fields(self):
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['name'] = self.name
|
||||
fields['printer'] = self.printer.to_json()
|
||||
|
||||
return fields
|
||||
|
||||
class drive_entry(object):
|
||||
'''
|
||||
Object mapping representing Samba share bound to drive letter
|
||||
'''
|
||||
def __init__(self, sid, dobj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.login = dobj.login
|
||||
self.password = dobj.password
|
||||
self.dir = dobj.dir
|
||||
self.path = dobj.path
|
||||
|
||||
def update_fields(self):
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['login'] = self.login
|
||||
fields['password'] = self.password
|
||||
fields['dir'] = self.dir
|
||||
fields['path'] = self.path
|
||||
|
||||
return fields
|
||||
|
||||
class folder_entry(object):
|
||||
'''
|
||||
Object mapping representing file system directory
|
||||
'''
|
||||
def __init__(self, sid, fobj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.path = fobj.path
|
||||
self.action = fobj.action.value
|
||||
self.delete_folder = str(fobj.delete_folder)
|
||||
self.delete_sub_folders = str(fobj.delete_sub_folders)
|
||||
self.delete_files = str(fobj.delete_files)
|
||||
|
||||
def update_fields(self):
|
||||
'''
|
||||
Return list of fields to update
|
||||
'''
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['action'] = self.action
|
||||
fields['delete_folder'] = self.delete_folder
|
||||
fields['delete_sub_folders'] = self.delete_sub_folders
|
||||
fields['delete_files'] = self.delete_files
|
||||
|
||||
return fields
|
||||
|
||||
class envvar_entry(object):
|
||||
'''
|
||||
Object mapping representing environment variables
|
||||
'''
|
||||
def __init__(self, sid, evobj, policy_name):
|
||||
self.sid = sid
|
||||
self.policy_name = policy_name
|
||||
self.name = evobj.name
|
||||
self.value = evobj.value
|
||||
self.action = evobj.action.value
|
||||
|
||||
def update_fields(self):
|
||||
'''
|
||||
Return list of fields to update
|
||||
'''
|
||||
fields = dict()
|
||||
fields['policy_name'] = self.policy_name
|
||||
fields['action'] = self.action
|
||||
fields['value'] = self.value
|
||||
|
||||
return fields
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
from abc import ABC
|
||||
|
||||
|
||||
class registry(ABC):
|
||||
def __init__(self, db_name):
|
||||
pass
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .cache import cache
|
||||
|
||||
import os
|
||||
|
||||
from sqlalchemy import (
|
||||
create_engine,
|
||||
Table,
|
||||
Column,
|
||||
Integer,
|
||||
String,
|
||||
MetaData
|
||||
)
|
||||
from sqlalchemy.orm import (
|
||||
mapper,
|
||||
sessionmaker
|
||||
)
|
||||
|
||||
from util.logging import log
|
||||
from util.paths import cache_dir
|
||||
|
||||
def mapping_factory(mapper_suffix):
|
||||
exec(
|
||||
'''
|
||||
class mapped_id_{}(object):
|
||||
def __init__(self, str_id, value):
|
||||
self.str_id = str_id
|
||||
self.value = str(value)
|
||||
'''.format(mapper_suffix)
|
||||
)
|
||||
return eval('mapped_id_{}'.format(mapper_suffix))
|
||||
|
||||
class sqlite_cache(cache):
|
||||
def __init__(self, cache_name):
|
||||
self.cache_name = cache_name
|
||||
self.mapper_obj = mapping_factory(self.cache_name)
|
||||
self.storage_uri = os.path.join('sqlite:///{}/{}.sqlite'.format(cache_dir(), self.cache_name))
|
||||
logdata = dict({'cache_file': self.storage_uri})
|
||||
log('D20', logdata)
|
||||
self.db_cnt = create_engine(self.storage_uri, echo=False)
|
||||
self.__metadata = MetaData(self.db_cnt)
|
||||
self.cache_table = Table(
|
||||
self.cache_name,
|
||||
self.__metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('str_id', String(65536), unique=True),
|
||||
Column('value', String)
|
||||
)
|
||||
|
||||
self.__metadata.create_all(self.db_cnt)
|
||||
Session = sessionmaker(bind=self.db_cnt)
|
||||
self.db_session = Session()
|
||||
mapper(self.mapper_obj, self.cache_table)
|
||||
|
||||
def store(self, str_id, value):
|
||||
obj = self.mapper_obj(str_id, value)
|
||||
self._upsert(obj)
|
||||
|
||||
def get(self, obj_id):
|
||||
result = self.db_session.query(self.mapper_obj).filter(self.mapper_obj.str_id == obj_id).first()
|
||||
return result
|
||||
|
||||
def get_default(self, obj_id, default_value):
|
||||
result = self.get(obj_id)
|
||||
if result == None:
|
||||
logdata = dict()
|
||||
logdata['object'] = obj_id
|
||||
log('D43', logdata)
|
||||
self.store(obj_id, default_value)
|
||||
return str(default_value)
|
||||
return result.value
|
||||
|
||||
def _upsert(self, obj):
|
||||
try:
|
||||
self.db_session.add(obj)
|
||||
self.db_session.commit()
|
||||
except Exception as exc:
|
||||
self.db_session.rollback()
|
||||
logdata = dict()
|
||||
logdata['msg'] = str(exc)
|
||||
log('D44', logdata)
|
||||
self.db_session.query(self.mapper_obj).filter(self.mapper_obj.str_id == obj.str_id).update({ 'value': obj.value })
|
||||
self.db_session.commit()
|
||||
|
||||
@@ -1,461 +0,0 @@
|
||||
#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2020 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
|
||||
from sqlalchemy import (
|
||||
create_engine,
|
||||
Table,
|
||||
Column,
|
||||
Integer,
|
||||
String,
|
||||
MetaData,
|
||||
UniqueConstraint
|
||||
)
|
||||
from sqlalchemy.orm import (
|
||||
mapper,
|
||||
sessionmaker
|
||||
)
|
||||
|
||||
from util.logging import log
|
||||
from util.paths import cache_dir
|
||||
from .registry import registry
|
||||
from .record_types import (
|
||||
samba_preg
|
||||
, samba_hkcu_preg
|
||||
, ad_shortcut
|
||||
, info_entry
|
||||
, printer_entry
|
||||
, drive_entry
|
||||
, folder_entry
|
||||
, envvar_entry
|
||||
)
|
||||
|
||||
class sqlite_registry(registry):
|
||||
def __init__(self, db_name, registry_cache_dir=None):
|
||||
self.db_name = db_name
|
||||
cdir = registry_cache_dir
|
||||
if cdir == None:
|
||||
cdir = cache_dir()
|
||||
self.db_path = os.path.join('sqlite:///{}/{}.sqlite'.format(cdir, self.db_name))
|
||||
self.db_cnt = create_engine(self.db_path, echo=False)
|
||||
self.__metadata = MetaData(self.db_cnt)
|
||||
self.__info = Table(
|
||||
'info',
|
||||
self.__metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('name', String(65536), unique=True),
|
||||
Column('value', String(65536))
|
||||
)
|
||||
self.__hklm = Table(
|
||||
'HKLM'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('hive_key', String(65536, collation='NOCASE'),
|
||||
unique=True)
|
||||
, Column('keyname', String(collation='NOCASE'))
|
||||
, Column('valuename', String(collation='NOCASE'))
|
||||
, Column('policy_name', String)
|
||||
, Column('type', Integer)
|
||||
, Column('data', String)
|
||||
)
|
||||
self.__hkcu = Table(
|
||||
'HKCU'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('hive_key', String(65536, collation='NOCASE'))
|
||||
, Column('keyname', String(collation='NOCASE'))
|
||||
, Column('valuename', String(collation='NOCASE'))
|
||||
, Column('policy_name', String)
|
||||
, Column('type', Integer)
|
||||
, Column('data', String)
|
||||
, UniqueConstraint('sid', 'hive_key')
|
||||
)
|
||||
self.__shortcuts = Table(
|
||||
'Shortcuts'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('path', String)
|
||||
, Column('policy_name', String)
|
||||
, Column('shortcut', String)
|
||||
, UniqueConstraint('sid', 'path')
|
||||
)
|
||||
self.__printers = Table(
|
||||
'Printers'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('name', String)
|
||||
, Column('policy_name', String)
|
||||
, Column('printer', String)
|
||||
, UniqueConstraint('sid', 'name')
|
||||
)
|
||||
self.__drives = Table(
|
||||
'Drives'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('login', String)
|
||||
, Column('password', String)
|
||||
, Column('dir', String)
|
||||
, Column('policy_name', String)
|
||||
, Column('path', String)
|
||||
, UniqueConstraint('sid', 'dir')
|
||||
)
|
||||
self.__folders = Table(
|
||||
'Folders'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('path', String)
|
||||
, Column('policy_name', String)
|
||||
, Column('action', String)
|
||||
, Column('delete_folder', String)
|
||||
, Column('delete_sub_folders', String)
|
||||
, Column('delete_files', String)
|
||||
, UniqueConstraint('sid', 'path')
|
||||
)
|
||||
self.__envvars = Table(
|
||||
'Envvars'
|
||||
, self.__metadata
|
||||
, Column('id', Integer, primary_key=True)
|
||||
, Column('sid', String)
|
||||
, Column('name', String)
|
||||
, Column('policy_name', String)
|
||||
, Column('action', String)
|
||||
, Column('value', String)
|
||||
, UniqueConstraint('sid', 'name')
|
||||
)
|
||||
self.__metadata.create_all(self.db_cnt)
|
||||
Session = sessionmaker(bind=self.db_cnt)
|
||||
self.db_session = Session()
|
||||
try:
|
||||
mapper(info_entry, self.__info)
|
||||
mapper(samba_preg, self.__hklm)
|
||||
mapper(samba_hkcu_preg, self.__hkcu)
|
||||
mapper(ad_shortcut, self.__shortcuts)
|
||||
mapper(printer_entry, self.__printers)
|
||||
mapper(drive_entry, self.__drives)
|
||||
mapper(folder_entry, self.__folders)
|
||||
mapper(envvar_entry, self.__envvars)
|
||||
except:
|
||||
pass
|
||||
#logging.error('Error creating mapper')
|
||||
|
||||
def _add(self, row):
|
||||
try:
|
||||
self.db_session.add(row)
|
||||
self.db_session.commit()
|
||||
except Exception as exc:
|
||||
self.db_session.rollback()
|
||||
raise exc
|
||||
|
||||
def _info_upsert(self, row):
|
||||
try:
|
||||
self._add(row)
|
||||
except:
|
||||
(self
|
||||
.db_session.query(info_entry)
|
||||
.filter(info_entry.name == row.name)
|
||||
.update(row.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def _hklm_upsert(self, row):
|
||||
try:
|
||||
self._add(row)
|
||||
except:
|
||||
(self
|
||||
.db_session
|
||||
.query(samba_preg)
|
||||
.filter(samba_preg.hive_key == row.hive_key)
|
||||
.update(row.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def _hkcu_upsert(self, row):
|
||||
try:
|
||||
self._add(row)
|
||||
except Exception as exc:
|
||||
(self
|
||||
.db_session
|
||||
.query(samba_hkcu_preg)
|
||||
.filter(samba_hkcu_preg.sid == row.sid)
|
||||
.filter(samba_hkcu_preg.hive_key == row.hive_key)
|
||||
.update(row.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def _shortcut_upsert(self, row):
|
||||
try:
|
||||
self._add(row)
|
||||
except:
|
||||
(self
|
||||
.db_session
|
||||
.query(ad_shortcut)
|
||||
.filter(ad_shortcut.sid == row.sid)
|
||||
.filter(ad_shortcut.path == row.path)
|
||||
.update(row.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def _printer_upsert(self, row):
|
||||
try:
|
||||
self._add(row)
|
||||
except:
|
||||
(self
|
||||
.db_session
|
||||
.query(printer_entry)
|
||||
.filter(printer_entry.sid == row.sid)
|
||||
.filter(printer_entry.name == row.name)
|
||||
.update(row.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def _drive_upsert(self, row):
|
||||
try:
|
||||
self._add(row)
|
||||
except:
|
||||
(self
|
||||
.db_session
|
||||
.query(drive_entry)
|
||||
.filter(drive_entry.sid == row.sid)
|
||||
.filter(drive_entry.dir == row.dir)
|
||||
.update(row.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def set_info(self, name, value):
|
||||
ientry = info_entry(name, value)
|
||||
logdata = dict()
|
||||
logdata['varname'] = name
|
||||
logdata['value'] = value
|
||||
log('D19', logdata)
|
||||
self._info_upsert(ientry)
|
||||
|
||||
def _delete_hklm_keyname(self, keyname):
|
||||
'''
|
||||
Delete PReg hive_key from HKEY_LOCAL_MACHINE
|
||||
'''
|
||||
logdata = dict({'keyname': keyname})
|
||||
try:
|
||||
(self
|
||||
.db_session
|
||||
.query(samba_preg)
|
||||
.filter(samba_preg.keyname == keyname)
|
||||
.delete(synchronize_session=False))
|
||||
self.db_session.commit()
|
||||
log('D65', logdata)
|
||||
except Exception as exc:
|
||||
log('D63', logdata)
|
||||
|
||||
def add_hklm_entry(self, preg_entry, policy_name):
|
||||
'''
|
||||
Write PReg entry to HKEY_LOCAL_MACHINE
|
||||
'''
|
||||
pentry = samba_preg(preg_entry, policy_name)
|
||||
if not pentry.valuename.startswith('**'):
|
||||
self._hklm_upsert(pentry)
|
||||
else:
|
||||
logdata = dict({'key': pentry.hive_key})
|
||||
if pentry.valuename.lower() == '**delvals.':
|
||||
self._delete_hklm_keyname(pentry.keyname)
|
||||
else:
|
||||
log('D27', logdata)
|
||||
|
||||
def _delete_hkcu_keyname(self, keyname, sid):
|
||||
'''
|
||||
Delete PReg hive_key from HKEY_CURRENT_USER
|
||||
'''
|
||||
logdata = dict({'sid': sid, 'keyname': keyname})
|
||||
try:
|
||||
(self
|
||||
.db_session
|
||||
.query(samba_hkcu_preg)
|
||||
.filter(samba_hkcu_preg.sid == sid)
|
||||
.filter(samba_hkcu_preg.keyname == keyname)
|
||||
.delete(synchronize_session=False))
|
||||
self.db_session.commit()
|
||||
log('D66', logdata)
|
||||
except:
|
||||
log('D64', logdata)
|
||||
|
||||
def add_hkcu_entry(self, preg_entry, sid, policy_name):
|
||||
'''
|
||||
Write PReg entry to HKEY_CURRENT_USER
|
||||
'''
|
||||
hkcu_pentry = samba_hkcu_preg(sid, preg_entry, policy_name)
|
||||
logdata = dict({'sid': sid, 'policy': policy_name, 'key': hkcu_pentry.hive_key})
|
||||
if not hkcu_pentry.valuename.startswith('**'):
|
||||
log('D26', logdata)
|
||||
self._hkcu_upsert(hkcu_pentry)
|
||||
else:
|
||||
if hkcu_pentry.valuename.lower() == '**delvals.':
|
||||
self._delete_hkcu_keyname(hkcu_pentry.keyname, sid)
|
||||
else:
|
||||
log('D51', logdata)
|
||||
|
||||
def add_shortcut(self, sid, sc_obj, policy_name):
|
||||
'''
|
||||
Store shortcut information in the database
|
||||
'''
|
||||
sc_entry = ad_shortcut(sid, sc_obj, policy_name)
|
||||
logdata = dict()
|
||||
logdata['link'] = sc_entry.path
|
||||
logdata['sid'] = sid
|
||||
log('D41', logdata)
|
||||
self._shortcut_upsert(sc_entry)
|
||||
|
||||
def add_printer(self, sid, pobj, policy_name):
|
||||
'''
|
||||
Store printer configuration in the database
|
||||
'''
|
||||
prn_entry = printer_entry(sid, pobj, policy_name)
|
||||
logdata = dict()
|
||||
logdata['printer'] = prn_entry.name
|
||||
logdata['sid'] = sid
|
||||
log('D40', logdata)
|
||||
self._printer_upsert(prn_entry)
|
||||
|
||||
def add_drive(self, sid, dobj, policy_name):
|
||||
drv_entry = drive_entry(sid, dobj, policy_name)
|
||||
logdata = dict()
|
||||
logdata['uri'] = drv_entry.path
|
||||
logdata['sid'] = sid
|
||||
log('D39', logdata)
|
||||
self._drive_upsert(drv_entry)
|
||||
|
||||
def add_folder(self, sid, fobj, policy_name):
|
||||
fld_entry = folder_entry(sid, fobj, policy_name)
|
||||
logdata = dict()
|
||||
logdata['folder'] = fld_entry.path
|
||||
logdata['sid'] = sid
|
||||
log('D42', logdata)
|
||||
try:
|
||||
self._add(fld_entry)
|
||||
except Exception as exc:
|
||||
(self
|
||||
._filter_sid_obj(folder_entry, sid)
|
||||
.filter(folder_entry.path == fld_entry.path)
|
||||
.update(fld_entry.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def add_envvar(self, sid, evobj, policy_name):
|
||||
ev_entry = envvar_entry(sid, evobj, policy_name)
|
||||
logdata = dict()
|
||||
logdata['envvar'] = ev_entry.name
|
||||
logdata['sid'] = sid
|
||||
log('D53', logdata)
|
||||
try:
|
||||
self._add(ev_entry)
|
||||
except Exception as exc:
|
||||
(self
|
||||
._filter_sid_obj(envvar_entry, sid)
|
||||
.filter(envvar_entry.name == ev_entry.name)
|
||||
.update(ev_entry.update_fields()))
|
||||
self.db_session.commit()
|
||||
|
||||
def _filter_sid_obj(self, row_object, sid):
|
||||
res = (self
|
||||
.db_session
|
||||
.query(row_object)
|
||||
.filter(row_object.sid == sid))
|
||||
return res
|
||||
|
||||
def _filter_sid_list(self, row_object, sid):
|
||||
res = (self
|
||||
.db_session
|
||||
.query(row_object)
|
||||
.filter(row_object.sid == sid)
|
||||
.order_by(row_object.id)
|
||||
.all())
|
||||
return res
|
||||
|
||||
def get_shortcuts(self, sid):
|
||||
return self._filter_sid_list(ad_shortcut, sid)
|
||||
|
||||
def get_printers(self, sid):
|
||||
return self._filter_sid_list(printer_entry, sid)
|
||||
|
||||
def get_drives(self, sid):
|
||||
return self._filter_sid_list(drive_entry, sid)
|
||||
|
||||
def get_folders(self, sid):
|
||||
return self._filter_sid_list(folder_entry, sid)
|
||||
|
||||
def get_envvars(self, sid):
|
||||
return self._filter_sid_list(envvar_entry, sid)
|
||||
|
||||
def get_hkcu_entry(self, sid, hive_key):
|
||||
res = (self
|
||||
.db_session
|
||||
.query(samba_hkcu_preg)
|
||||
.filter(samba_hkcu_preg.sid == sid)
|
||||
.filter(samba_hkcu_preg.hive_key == hive_key)
|
||||
.first())
|
||||
# Try to get the value from machine SID as a default if no option is set.
|
||||
if not res:
|
||||
machine_sid = self.get_info('machine_sid')
|
||||
res = self.db_session.query(samba_hkcu_preg).filter(samba_hkcu_preg.sid == machine_sid).filter(samba_hkcu_preg.hive_key == hive_key).first()
|
||||
return res
|
||||
|
||||
def filter_hkcu_entries(self, sid, startswith):
|
||||
res = (self
|
||||
.db_session
|
||||
.query(samba_hkcu_preg)
|
||||
.filter(samba_hkcu_preg.sid == sid)
|
||||
.filter(samba_hkcu_preg.hive_key.like(startswith)))
|
||||
return res
|
||||
|
||||
def get_info(self, name):
|
||||
res = (self
|
||||
.db_session
|
||||
.query(info_entry)
|
||||
.filter(info_entry.name == name)
|
||||
.first())
|
||||
return res.value
|
||||
|
||||
def get_hklm_entry(self, hive_key):
|
||||
res = (self
|
||||
.db_session
|
||||
.query(samba_preg)
|
||||
.filter(samba_preg.hive_key == hive_key)
|
||||
.first())
|
||||
return res
|
||||
|
||||
def filter_hklm_entries(self, startswith):
|
||||
res = (self
|
||||
.db_session
|
||||
.query(samba_preg)
|
||||
.filter(samba_preg.hive_key.like(startswith)))
|
||||
return res
|
||||
|
||||
def wipe_user(self, sid):
|
||||
self._wipe_sid(samba_hkcu_preg, sid)
|
||||
self._wipe_sid(ad_shortcut, sid)
|
||||
self._wipe_sid(printer_entry, sid)
|
||||
self._wipe_sid(drive_entry, sid)
|
||||
|
||||
def _wipe_sid(self, row_object, sid):
|
||||
(self
|
||||
.db_session
|
||||
.query(row_object)
|
||||
.filter(row_object.sid == sid)
|
||||
.delete())
|
||||
self.db_session.commit()
|
||||
|
||||
def wipe_hklm(self):
|
||||
self.db_session.query(samba_preg).delete()
|
||||
self.db_session.commit()
|
||||
|
||||
63
gpoa/templates/47-alt_group_policy_permissions.rules.j2
Normal file
63
gpoa/templates/47-alt_group_policy_permissions.rules.j2
Normal file
@@ -0,0 +1,63 @@
|
||||
{#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#}
|
||||
|
||||
{% if No|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in No -%}
|
||||
action.id == "{{res}}"{% if No|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.NO;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Yes|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Yes -%}
|
||||
action.id == "{{res}}"{% if Yes|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.YES;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Auth_self|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_self -%}
|
||||
action.id == "{{res}}"{% if Auth_self|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.AUTH_SELF;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Auth_admin|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_admin -%}
|
||||
action.id == "{{res}}"{% if Auth_admin|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.AUTH_ADMIN;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Auth_self_keep|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_self_keep -%}
|
||||
action.id == "{{res}}"{% if Auth_self_keep|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.AUTH_SELF_KEEP;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Auth_admin_keep|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_admin_keep -%}
|
||||
action.id == "{{res}}"{% if Auth_admin_keep|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.AUTH_ADMIN_KEEP;
|
||||
}
|
||||
});
|
||||
|
||||
{% endif %}
|
||||
63
gpoa/templates/48-alt_group_policy_permissions_user.rules.j2
Normal file
63
gpoa/templates/48-alt_group_policy_permissions_user.rules.j2
Normal file
@@ -0,0 +1,63 @@
|
||||
{#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#}
|
||||
|
||||
{% if No|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in No -%}
|
||||
action.id == "{{res}}" {% if No|length == loop.index %}&&{% else %}||{% endif %}
|
||||
{% endfor %}subject.user == "{{User}}") {
|
||||
return polkit.Result.NO;
|
||||
}
|
||||
});{% endif %}{% if Yes|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Yes -%}
|
||||
action.id == "{{res}}" {% if Yes|length == loop.index %}&&{% else %}||{% endif %}
|
||||
{% endfor %}subject.user == "{{User}}") {
|
||||
return polkit.Result.YES;
|
||||
}
|
||||
});{% endif %}{% if Auth_self|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_self -%}
|
||||
action.id == "{{res}}" {% if Auth_self|length == loop.index %}&&{% else %}||{% endif %}
|
||||
{% endfor %}subject.user == "{{User}}") {
|
||||
return polkit.Result.AUTH_SELF;
|
||||
}
|
||||
});{% endif %}{% if Auth_admin|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_admin -%}
|
||||
action.id == "{{res}}" {% if Auth_admin|length == loop.index %}&&{% else %}||{% endif %}
|
||||
{% endfor %}subject.user == "{{User}}") {
|
||||
return polkit.Result.AUTH_ADMIN;
|
||||
}
|
||||
});{% endif %}{% if Auth_self_keep|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_self_keep -%}
|
||||
action.id == "{{res}}" {% if Auth_self_keep|length == loop.index %}&&{% else %}||{% endif %}
|
||||
{% endfor %}subject.user == "{{User}}") {
|
||||
return polkit.Result.AUTH_SELF_KEEP;
|
||||
}
|
||||
});{% endif %}{% if Auth_admin_keep|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_admin_keep -%}
|
||||
action.id == "{{res}}" {% if Auth_admin_keep|length == loop.index %}&&{% else %}||{% endif %}
|
||||
{% endfor %}subject.user == "{{User}}") {
|
||||
return polkit.Result.AUTH_ADMIN_KEEP;
|
||||
}
|
||||
});
|
||||
{% endif %}
|
||||
@@ -17,7 +17,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#}
|
||||
|
||||
{% if Deny_All == '1' %}
|
||||
{% if Deny_All == 1 %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ((action.id == "org.freedesktop.udisks2.filesystem-mount" ||
|
||||
action.id == "org.freedesktop.udisks2.filesystem-mount-system" ||
|
||||
|
||||
63
gpoa/templates/49-alt_group_policy_permissions.rules.j2
Normal file
63
gpoa/templates/49-alt_group_policy_permissions.rules.j2
Normal file
@@ -0,0 +1,63 @@
|
||||
{#
|
||||
# GPOA - GPO Applier for Linux
|
||||
#
|
||||
# Copyright (C) 2019-2022 BaseALT Ltd.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#}
|
||||
|
||||
{% if No|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in No -%}
|
||||
action.id == "{{res}}"{% if No|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.NO;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Yes|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Yes -%}
|
||||
action.id == "{{res}}"{% if Yes|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.YES;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Auth_self|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_self -%}
|
||||
action.id == "{{res}}"{% if Auth_self|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.AUTH_SELF;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Auth_admin|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_admin -%}
|
||||
action.id == "{{res}}"{% if Auth_admin|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.AUTH_ADMIN;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Auth_self_keep|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_self_keep -%}
|
||||
action.id == "{{res}}"{% if Auth_self_keep|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.AUTH_SELF_KEEP;
|
||||
}
|
||||
});
|
||||
{% endif %}{% if Auth_admin_keep|length %}
|
||||
polkit.addRule(function (action, subject) {
|
||||
if ({% for res in Auth_admin_keep -%}
|
||||
action.id == "{{res}}"{% if Auth_admin_keep|length == loop.index %}){ {% else %} ||{% endif %}
|
||||
{% endfor %} return polkit.Result.AUTH_ADMIN_KEEP;
|
||||
}
|
||||
});
|
||||
|
||||
{% endif %}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user