mirror of
https://github.com/samba-team/samba.git
synced 2024-12-22 13:34:15 +03:00
s4-python: Various formatting fixes.
* Trailing whitespace * use of "==" where "is" should be used * double spaces
This commit is contained in:
parent
6986f7bdda
commit
2a797f29aa
@ -132,8 +132,10 @@ class Parallel(object):
|
||||
self.frozen = []
|
||||
elif not self.count:
|
||||
(jobs, tmp) = self.manager.get_next_set()
|
||||
if jobs != None: self.maxjobs = jobs
|
||||
if tmp: self.outstanding += tmp
|
||||
if jobs is not None:
|
||||
self.maxjobs = jobs
|
||||
if tmp:
|
||||
self.outstanding += tmp
|
||||
break
|
||||
|
||||
def get_out(self):
|
||||
|
@ -154,7 +154,7 @@ def abi_process_file(fname, version, symmap):
|
||||
|
||||
def abi_write_vscript(vscript, libname, current_version, versions, symmap, abi_match):
|
||||
'''write a vscript file for a library in --version-script format
|
||||
|
||||
|
||||
:param vscript: Path to the vscript file
|
||||
:param libname: Name of the library, uppercased
|
||||
:param current_version: Current version
|
||||
|
@ -62,9 +62,9 @@ def COMPOUND_END(conf, result):
|
||||
conf.check_message_1 = conf.saved_check_message_1
|
||||
conf.check_message_2 = conf.saved_check_message_2
|
||||
p = conf.check_message_2
|
||||
if result == True:
|
||||
if result:
|
||||
p('ok ')
|
||||
elif result == False:
|
||||
elif not result:
|
||||
p('not found', 'YELLOW')
|
||||
else:
|
||||
p(result)
|
||||
@ -241,7 +241,7 @@ def CHECK_FUNC(conf, f, link=True, lib=None, headers=None):
|
||||
|
||||
conf.COMPOUND_START('Checking for %s' % f)
|
||||
|
||||
if link is None or link == True:
|
||||
if link is None or link:
|
||||
ret = CHECK_CODE(conf,
|
||||
# this is based on the autoconf strategy
|
||||
'''
|
||||
@ -284,7 +284,7 @@ def CHECK_FUNC(conf, f, link=True, lib=None, headers=None):
|
||||
headers=headers,
|
||||
msg='Checking for macro %s' % f)
|
||||
|
||||
if not ret and (link is None or link == False):
|
||||
if not ret and (link is None or not link):
|
||||
ret = CHECK_VARIABLE(conf, f,
|
||||
define=define,
|
||||
headers=headers,
|
||||
@ -470,7 +470,7 @@ def CONFIG_SET(conf, option):
|
||||
if option not in conf.env:
|
||||
return False
|
||||
v = conf.env[option]
|
||||
if v == None:
|
||||
if v is None:
|
||||
return False
|
||||
if v == []:
|
||||
return False
|
||||
|
@ -78,7 +78,7 @@ def LIB_MAY_BE_BUNDLED(conf, libname):
|
||||
|
||||
@conf
|
||||
def LIB_MUST_BE_BUNDLED(conf, libname):
|
||||
return ('ALL' in conf.env.BUNDLED_LIBS or
|
||||
return ('ALL' in conf.env.BUNDLED_LIBS or
|
||||
libname in conf.env.BUNDLED_LIBS)
|
||||
|
||||
@conf
|
||||
|
@ -136,7 +136,7 @@ def build_includes(self):
|
||||
includes = []
|
||||
|
||||
# maybe add local includes
|
||||
if getattr(self, 'local_include', True) == True and getattr(self, 'local_include_first', True):
|
||||
if getattr(self, 'local_include', True) and getattr(self, 'local_include_first', True):
|
||||
includes.append('.')
|
||||
|
||||
includes.extend(self.samba_includes_extended)
|
||||
@ -153,7 +153,7 @@ def build_includes(self):
|
||||
t = bld.name_to_obj(d, bld.env)
|
||||
bld.ASSERT(t is not None, "Unable to find dependency %s for %s" % (d, self.sname))
|
||||
inclist = getattr(t, 'samba_includes_extended', [])[:]
|
||||
if getattr(t, 'local_include', True) == True:
|
||||
if getattr(t, 'local_include', True):
|
||||
inclist.append('.')
|
||||
if inclist == []:
|
||||
continue
|
||||
@ -169,7 +169,7 @@ def build_includes(self):
|
||||
relpath = os_path_relpath(inc, mypath)
|
||||
includes.append(relpath)
|
||||
|
||||
if getattr(self, 'local_include', True) == True and not getattr(self, 'local_include_first', True):
|
||||
if getattr(self, 'local_include', True) and not getattr(self, 'local_include_first', True):
|
||||
includes.append('.')
|
||||
|
||||
# now transform the includes list to be relative to the top directory
|
||||
@ -306,7 +306,7 @@ def check_orphaned_targets(bld, tgt_list):
|
||||
debug('deps: checking for orphaned targets')
|
||||
|
||||
for t in tgt_list:
|
||||
if getattr(t, 'samba_used', False) == True:
|
||||
if getattr(t, 'samba_used', False):
|
||||
continue
|
||||
type = target_dict[t.sname]
|
||||
if not type in ['BINARY', 'LIBRARY', 'MODULE', 'ET', 'PYTHON']:
|
||||
|
@ -98,10 +98,10 @@ def vcs_dir_contents(path):
|
||||
return Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env).split()
|
||||
|
||||
|
||||
def dist(appname='',version=''):
|
||||
def dist(appname='', version=''):
|
||||
|
||||
def add_files_to_tarball(tar, srcdir, srcsubdir, dstdir, dstsubdir, blacklist, files):
|
||||
if blacklist == None:
|
||||
if blacklist is None:
|
||||
blacklist = []
|
||||
for f in files:
|
||||
abspath = os.path.join(srcdir, f)
|
||||
|
@ -119,7 +119,6 @@ def public_headers_simple(bld, public_headers, header_path=None, public_headers_
|
||||
h_name = h
|
||||
inst_name = os.path.basename(h)
|
||||
bld.INSTALL_FILES('${INCLUDEDIR}', h_name, destname=inst_name)
|
||||
|
||||
|
||||
|
||||
def PUBLIC_HEADERS(bld, public_headers, header_path=None, public_headers_install=True):
|
||||
|
@ -31,7 +31,7 @@ def apply_incpaths(self):
|
||||
|
||||
for path in self.to_list(self.includes):
|
||||
if not path in lst:
|
||||
if preproc.go_absolute or path[0] != '/': #os.path.isabs(path):
|
||||
if preproc.go_absolute or path[0] != '/': # os.path.isabs(path):
|
||||
lst.append(path)
|
||||
else:
|
||||
self.env.prepend_value('CPPPATH', path)
|
||||
|
@ -21,7 +21,7 @@ def write_version_header(task):
|
||||
|
||||
def SAMBA_MKVERSION(bld, target):
|
||||
'''generate the version.h header for Samba'''
|
||||
t = bld.SAMBA_GENERATOR('VERSION',
|
||||
t = bld.SAMBA_GENERATOR('VERSION',
|
||||
rule=write_version_header,
|
||||
source= 'VERSION',
|
||||
target=target,
|
||||
|
@ -256,7 +256,7 @@ def ENFORCE_GROUP_ORDERING(bld):
|
||||
@feature('*')
|
||||
@before('exec_rule', 'apply_core', 'collect')
|
||||
def force_previous_groups(self):
|
||||
if getattr(self.bld, 'enforced_group_ordering', False) == True:
|
||||
if getattr(self.bld, 'enforced_group_ordering', False):
|
||||
return
|
||||
self.bld.enforced_group_ordering = True
|
||||
|
||||
@ -274,7 +274,7 @@ def ENFORCE_GROUP_ORDERING(bld):
|
||||
debug('group: Forcing up to group %s for target %s',
|
||||
group_name(g), self.name or self.target)
|
||||
break
|
||||
if stop != None:
|
||||
if stop is not None:
|
||||
break
|
||||
if stop is None:
|
||||
return
|
||||
@ -502,15 +502,15 @@ def CHECK_MAKEFLAGS(bld):
|
||||
if v == 'j':
|
||||
jobs_set = True
|
||||
elif v == 'k':
|
||||
Options.options.keep = True
|
||||
Options.options.keep = True
|
||||
elif opt == '-j':
|
||||
jobs_set = True
|
||||
elif opt == '-k':
|
||||
Options.options.keep = True
|
||||
Options.options.keep = True
|
||||
if not jobs_set:
|
||||
# default to one job
|
||||
Options.options.jobs = 1
|
||||
|
||||
|
||||
Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS
|
||||
|
||||
option_groups = {}
|
||||
|
@ -132,7 +132,7 @@ class SambaVersion(object):
|
||||
def __init__(self, version_dict, path, env=None, is_install=True):
|
||||
'''Determine the version number of samba
|
||||
|
||||
See VERSION for the format. Entries on that file are
|
||||
See VERSION for the format. Entries on that file are
|
||||
also accepted as dictionary entries here
|
||||
'''
|
||||
|
||||
|
@ -47,7 +47,7 @@ def replace_refill_task_list(self):
|
||||
# paranoia
|
||||
if bin_base[-4:] != '/bin':
|
||||
raise Utils.WafError("Invalid bin base: %s" % bin_base)
|
||||
|
||||
|
||||
# obtain the expected list of files
|
||||
expected = []
|
||||
for i in range(len(bld.task_manager.groups)):
|
||||
|
@ -285,7 +285,7 @@ def SAMBA_LIBRARY(bld, libname, source,
|
||||
if pc_files is not None and not private_library:
|
||||
bld.PKG_CONFIG_FILES(pc_files, vnum=vnum)
|
||||
|
||||
if (manpages is not None and 'XSLTPROC_MANPAGES' in bld.env and
|
||||
if (manpages is not None and 'XSLTPROC_MANPAGES' in bld.env and
|
||||
bld.env['XSLTPROC_MANPAGES']):
|
||||
bld.MANPAGES(manpages)
|
||||
|
||||
@ -670,7 +670,7 @@ def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None):
|
||||
bld.SET_BUILD_GROUP('build_source')
|
||||
for s in TO_LIST(source):
|
||||
iname = s
|
||||
if installname != None:
|
||||
if installname is not None:
|
||||
iname = installname
|
||||
target = os.path.join(installdir, iname)
|
||||
tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target))
|
||||
|
@ -150,11 +150,16 @@ class dnsobj(object):
|
||||
raise Exception("Received unexpected DNS reply of type %s" % self.type)
|
||||
|
||||
def __str__(self):
|
||||
if d.type == "A": return "%s %s %s" % (self.type, self.name, self.ip)
|
||||
if d.type == "AAAA": return "%s %s %s" % (self.type, self.name, self.ip)
|
||||
if d.type == "SRV": return "%s %s %s %s" % (self.type, self.name, self.dest, self.port)
|
||||
if d.type == "CNAME": return "%s %s %s" % (self.type, self.name, self.dest)
|
||||
if d.type == "NS": return "%s %s %s" % (self.type, self.name, self.dest)
|
||||
if d.type == "A":
|
||||
return "%s %s %s" % (self.type, self.name, self.ip)
|
||||
if d.type == "AAAA":
|
||||
return "%s %s %s" % (self.type, self.name, self.ip)
|
||||
if d.type == "SRV":
|
||||
return "%s %s %s %s" % (self.type, self.name, self.dest, self.port)
|
||||
if d.type == "CNAME":
|
||||
return "%s %s %s" % (self.type, self.name, self.dest)
|
||||
if d.type == "NS":
|
||||
return "%s %s %s" % (self.type, self.name, self.dest)
|
||||
|
||||
|
||||
def parse_dns_line(line, sub_vars):
|
||||
|
@ -301,10 +301,10 @@ class KCC(object):
|
||||
#
|
||||
# Bit NTDSCONN_OPT_RODC_TOPOLOGY is clear in cn!options
|
||||
if same_site:
|
||||
if cn_conn.is_generated() == False:
|
||||
if not cn_conn.is_generated():
|
||||
continue
|
||||
|
||||
if self.my_site.is_cleanup_ntdsconn_disabled() == True:
|
||||
if self.my_site.is_cleanup_ntdsconn_disabled():
|
||||
continue
|
||||
|
||||
# Loop thru connections looking for a duplicate that
|
||||
@ -328,10 +328,10 @@ class KCC(object):
|
||||
(cn_conn.whenCreated == cn2_conn.whenCreated and
|
||||
cmp(cn_conn.guid, cn2_conn.guid) < 0))
|
||||
|
||||
if lesser == True:
|
||||
if lesser:
|
||||
break
|
||||
|
||||
if lesser and cn_conn.is_rodc_topology() == False:
|
||||
if lesser and not cn_conn.is_rodc_topology():
|
||||
cn_conn.to_be_deleted = True
|
||||
|
||||
# Given an nTDSConnection object cn, if the DC with the nTDSDSA
|
||||
@ -359,29 +359,29 @@ class KCC(object):
|
||||
#
|
||||
else: # different site
|
||||
|
||||
if mydsa.is_istg() == False:
|
||||
if not mydsa.is_istg():
|
||||
continue
|
||||
|
||||
if cn_conn.is_generated() == False:
|
||||
if not cn_conn.is_generated():
|
||||
continue
|
||||
|
||||
if self.keep_connection(cn_conn) == True:
|
||||
if self.keep_connection(cn_conn):
|
||||
continue
|
||||
|
||||
# XXX - To be implemented
|
||||
|
||||
if all_connected == False:
|
||||
if not all_connected:
|
||||
continue
|
||||
|
||||
if cn_conn.is_rodc_topology() == False:
|
||||
if not cn_conn.is_rodc_topology():
|
||||
cn_conn.to_be_deleted = True
|
||||
|
||||
|
||||
if opts.readonly:
|
||||
for dnstr, connect in mydsa.connect_table.items():
|
||||
if connect.to_be_deleted == True:
|
||||
if connect.to_be_deleted:
|
||||
logger.info("TO BE DELETED:\n%s" % connect)
|
||||
if connect.to_be_added == True:
|
||||
if connect.to_be_added:
|
||||
logger.info("TO BE ADDED:\n%s" % connect)
|
||||
|
||||
# Peform deletion from our tables but perform
|
||||
@ -480,7 +480,7 @@ class KCC(object):
|
||||
drsuapi.DRSUAPI_DRS_NEVER_NOTIFY) == 0x0:
|
||||
t_repsFrom.replica_flags |= drsuapi.DRSUAPI_DRS_NEVER_NOTIFY
|
||||
|
||||
elif same_site == False:
|
||||
elif not same_site:
|
||||
|
||||
if (t_repsFrom.replica_flags &
|
||||
drsuapi.DRSUAPI_DRS_NEVER_NOTIFY) == 0x0:
|
||||
@ -491,7 +491,7 @@ class KCC(object):
|
||||
# not in the same site and the
|
||||
# NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION bit is
|
||||
# clear in cn!options
|
||||
if (same_site == False and
|
||||
if (not same_site and
|
||||
(cn_conn.options &
|
||||
dsdb.NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION) == 0x0):
|
||||
|
||||
@ -509,7 +509,7 @@ class KCC(object):
|
||||
|
||||
# Bits DRS_DISABLE_AUTO_SYNC and DRS_DISABLE_PERIODIC_SYNC are
|
||||
# set in t.replicaFlags if and only if cn!enabledConnection = false.
|
||||
if cn_conn.is_enabled() == False:
|
||||
if not cn_conn.is_enabled():
|
||||
|
||||
if (t_repsFrom.replica_flags &
|
||||
drsuapi.DRSUAPI_DRS_DISABLE_AUTO_SYNC) == 0x0:
|
||||
@ -586,10 +586,10 @@ class KCC(object):
|
||||
# We're not currently supporting SMTP replication
|
||||
# so is_smtp_replication_available() is currently
|
||||
# always returning False
|
||||
if (same_site == True or
|
||||
cn_conn.transport_dnstr == None or
|
||||
if (same_site or
|
||||
cn_conn.transport_dnstr is None or
|
||||
cn_conn.transport_dnstr.find("CN=IP") == 0 or
|
||||
is_smtp_replication_available() == False):
|
||||
not is_smtp_replication_available()):
|
||||
|
||||
if (t_repsFrom.replica_flags &
|
||||
drsuapi.DRSUAPI_DRS_MAIL_REP) != 0x0:
|
||||
@ -614,14 +614,14 @@ class KCC(object):
|
||||
t_repsFrom.dns_name2 = nastr
|
||||
|
||||
else:
|
||||
if (t_repsFrom.replica_flags & \
|
||||
if (t_repsFrom.replica_flags &
|
||||
drsuapi.DRSUAPI_DRS_MAIL_REP) == 0x0:
|
||||
t_repsFrom.replica_flags |= drsuapi.DRSUAPI_DRS_MAIL_REP
|
||||
|
||||
# We have a transport type but its not an
|
||||
# object in the database
|
||||
if cn_conn.transport_dnstr not in self.transport_table.keys():
|
||||
raise Exception("Missing inter-site transport - (%s)" % \
|
||||
raise Exception("Missing inter-site transport - (%s)" %
|
||||
cn_conn.transport_dnstr)
|
||||
|
||||
x_transport = self.transport_table[cn_conn.transport_dnstr]
|
||||
@ -656,7 +656,7 @@ class KCC(object):
|
||||
attrs=attrs)
|
||||
except ldb.ldbError, (enum, estr):
|
||||
raise Exception \
|
||||
("Unable to find attr (%s) for (%s) - (%s)" % \
|
||||
("Unable to find attr (%s) for (%s) - (%s)" %
|
||||
(x_transport.address_attr, pdnstr, estr))
|
||||
|
||||
msg = res[0]
|
||||
@ -696,15 +696,14 @@ class KCC(object):
|
||||
# cn!fromServer references an nTDSDSA object.
|
||||
s_dsa = None
|
||||
|
||||
if cn_conn.is_enabled() == True and \
|
||||
cn_conn.is_rodc_topology() == False:
|
||||
if cn_conn.is_enabled() and not cn_conn.is_rodc_topology():
|
||||
|
||||
s_dnstr = cn_conn.get_from_dnstr()
|
||||
if s_dnstr is not None:
|
||||
s_dsa = self.get_dsa(s_dnstr)
|
||||
|
||||
# No DSA matching this source DN string?
|
||||
if s_dsa == None:
|
||||
if s_dsa is None:
|
||||
return False, None
|
||||
|
||||
# To imply a repsFrom tuple is needed, each of these
|
||||
@ -718,7 +717,7 @@ class KCC(object):
|
||||
# the local DC
|
||||
s_rep = s_dsa.get_current_replica(n_rep.nc_dnstr)
|
||||
|
||||
if s_rep is None or s_rep.is_present() == False:
|
||||
if s_rep is None or not s_rep.is_present():
|
||||
return False, None
|
||||
|
||||
# To imply a repsFrom tuple is needed, each of these
|
||||
@ -733,10 +732,10 @@ class KCC(object):
|
||||
# replica, cn!transportType has no value, or
|
||||
# cn!transportType has an RDN of CN=IP.
|
||||
#
|
||||
implied = (s_rep.is_ro() == False or n_rep.is_partial() == True) and \
|
||||
(n_rep.is_domain() == False or
|
||||
n_rep.is_partial() == True or
|
||||
cn_conn.transport_dnstr == None or
|
||||
implied = (not s_rep.is_ro() or n_rep.is_partial()) and \
|
||||
(not n_rep.is_domain() or
|
||||
n_rep.is_partial() or
|
||||
cn_conn.transport_dnstr is None or
|
||||
cn_conn.transport_dnstr.find("CN=IP") == 0)
|
||||
|
||||
if implied:
|
||||
@ -815,7 +814,7 @@ class KCC(object):
|
||||
# is a child of the local DC's nTDSDSA object and
|
||||
# (cn!fromServer = s) and (cn!options) does not contain
|
||||
# NTDSCONN_OPT_RODC_TOPOLOGY or NULL if no such (cn) exists.
|
||||
if cn_conn and cn_conn.is_rodc_topology() == True:
|
||||
if cn_conn and cn_conn.is_rodc_topology():
|
||||
cn_conn = None
|
||||
|
||||
# KCC removes this repsFrom tuple if any of the following
|
||||
@ -831,8 +830,8 @@ class KCC(object):
|
||||
s_rep = s_dsa.get_current_replica(n_rep.nc_dnstr)
|
||||
|
||||
if cn_conn is None or \
|
||||
s_rep is None or s_rep.is_present() == False or \
|
||||
(n_rep.is_ro() == False and s_rep.is_partial() == True):
|
||||
s_rep is None or not s_rep.is_present() or \
|
||||
(not n_rep.is_ro() and s_rep.is_partial()):
|
||||
|
||||
t_repsFrom.to_be_deleted = True
|
||||
continue
|
||||
@ -846,7 +845,7 @@ class KCC(object):
|
||||
for cn_dnstr, cn_conn in self.my_dsa.connect_table.items():
|
||||
|
||||
implied, s_dsa = self.is_repsFrom_implied(n_rep, cn_conn)
|
||||
if implied == False:
|
||||
if not implied:
|
||||
continue
|
||||
|
||||
# Loop thru the existing repsFrom tupples (if any) and
|
||||
@ -860,7 +859,7 @@ class KCC(object):
|
||||
s_dsa = None
|
||||
break
|
||||
|
||||
if s_dsa == None:
|
||||
if s_dsa is None:
|
||||
continue
|
||||
|
||||
# Create a new RepsFromTo and proceed to modify
|
||||
@ -995,7 +994,7 @@ class KCC(object):
|
||||
# Skip dc
|
||||
if self.my_site.same_site(dsa):
|
||||
needed, ro, partial = part.should_be_present(dsa)
|
||||
if needed == False or (partial == True and partial_ok == False):
|
||||
if not needed or (partial and not partial_ok):
|
||||
continue
|
||||
|
||||
# ELSE
|
||||
@ -1005,7 +1004,7 @@ class KCC(object):
|
||||
# Skip dc
|
||||
else:
|
||||
rep = dsa.get_current_replica(part.nc_dnstr)
|
||||
if rep is None or (rep.is_partial() and partial_ok == False):
|
||||
if rep is None or (rep.is_partial() and not partial_ok):
|
||||
continue
|
||||
|
||||
# IF AmIRODC() and cr!nCName corresponds to default NC then
|
||||
@ -1013,7 +1012,7 @@ class KCC(object):
|
||||
# IF dsaobj.msDS-Behavior-Version < DS_BEHAVIOR_WIN2008
|
||||
# Skip dc
|
||||
if self.my_dsa.is_ro() and part.is_default():
|
||||
if dsa.is_minimum_behavior(DS_BEHAVIOR_WIN2008) == False:
|
||||
if not dsa.is_minimum_behavior(DS_BEHAVIOR_WIN2008):
|
||||
continue
|
||||
|
||||
# IF t!name != "IP" and the parent object of dc has no value for
|
||||
@ -1036,7 +1035,7 @@ class KCC(object):
|
||||
|
||||
# IF BridgeheadDCFailed(dc!objectGUID, detectFailedDCs) = TRUE
|
||||
# Skip dc
|
||||
if self.is_bridgehead_failed(dsa, detect_failed) == True:
|
||||
if self.is_bridgehead_failed(dsa, detect_failed):
|
||||
continue
|
||||
|
||||
logger.debug("get_all_bridgeheads: dsadn=%s" % dsa.dsa_dnstr)
|
||||
@ -1048,7 +1047,7 @@ class KCC(object):
|
||||
# servers, and otherwise by ascending objectGUID
|
||||
# ELSE
|
||||
# SORT bhs in a random order
|
||||
if site.is_random_bridgehead_disabled() == True:
|
||||
if site.is_random_bridgehead_disabled():
|
||||
bhs.sort(sort_dsa_by_gc_and_guid)
|
||||
else:
|
||||
random.shuffle(bhs)
|
||||
@ -1132,8 +1131,8 @@ class KCC(object):
|
||||
|
||||
# IF bits NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT and
|
||||
# NTDSCONN_OPT_USE_NOTIFY are set in cn
|
||||
if cn.is_override_notify_default() == True and \
|
||||
cn.is_use_notify() == True:
|
||||
if cn.is_override_notify_default() and \
|
||||
cn.is_use_notify():
|
||||
|
||||
# IF bit NTDSSITELINK_OPT_USE_NOTIFY is clear in
|
||||
# ri.Options
|
||||
@ -1142,7 +1141,7 @@ class KCC(object):
|
||||
# NTDSCONN_OPT_USE_NOTIFY in cn!options
|
||||
if (link_opt & dsdb.NTDSSITELINK_OPT_USE_NOTIFY) == 0:
|
||||
cn.options &= \
|
||||
~(dsdb.NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT | \
|
||||
~(dsdb.NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT |
|
||||
dsdb.NTDSCONN_OPT_USE_NOTIFY)
|
||||
cn.set_modified(True)
|
||||
|
||||
@ -1156,13 +1155,13 @@ class KCC(object):
|
||||
# NTDSCONN_OPT_USE_NOTIFY in cn!options
|
||||
if (link_opt & dsdb.NTDSSITELINK_OPT_USE_NOTIFY) != 0:
|
||||
cn.options |= \
|
||||
(dsdb.NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT | \
|
||||
(dsdb.NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT |
|
||||
dsdb.NTDSCONN_OPT_USE_NOTIFY)
|
||||
cn.set_modified(True)
|
||||
|
||||
|
||||
# IF bit NTDSCONN_OPT_TWOWAY_SYNC is set in cn!options
|
||||
if cn.is_twoway_sync() == True:
|
||||
if cn.is_twoway_sync():
|
||||
|
||||
# IF bit NTDSSITELINK_OPT_TWOWAY_SYNC is clear in
|
||||
# ri.Options
|
||||
@ -1186,14 +1185,14 @@ class KCC(object):
|
||||
|
||||
# IF bit NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION is set
|
||||
# in cn!options
|
||||
if cn.is_intersite_compression_disabled() == True:
|
||||
if cn.is_intersite_compression_disabled():
|
||||
|
||||
# IF bit NTDSSITELINK_OPT_DISABLE_COMPRESSION is clear
|
||||
# in ri.Options
|
||||
# Perform an originating update to clear bit
|
||||
# NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION in
|
||||
# cn!options
|
||||
if (link_opt & \
|
||||
if (link_opt &
|
||||
dsdb.NTDSSITELINK_OPT_DISABLE_COMPRESSION) == 0:
|
||||
cn.options &= \
|
||||
~dsdb.NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION
|
||||
@ -1206,7 +1205,7 @@ class KCC(object):
|
||||
# Perform an originating update to set bit
|
||||
# NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION in
|
||||
# cn!options
|
||||
if (link_opt & \
|
||||
if (link_opt &
|
||||
dsdb.NTDSSITELINK_OPT_DISABLE_COMPRESSION) != 0:
|
||||
cn.options |= \
|
||||
dsdb.NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION
|
||||
@ -1214,7 +1213,7 @@ class KCC(object):
|
||||
|
||||
# Display any modified connection
|
||||
if opts.readonly:
|
||||
if cn.to_be_modified == True:
|
||||
if cn.to_be_modified:
|
||||
logger.info("TO BE MODIFIED:\n%s" % cn)
|
||||
|
||||
ldsa.commit_connections(self.samdb, ro=True)
|
||||
@ -1295,7 +1294,7 @@ class KCC(object):
|
||||
|
||||
# Display any added connection
|
||||
if opts.readonly:
|
||||
if cn.to_be_added == True:
|
||||
if cn.to_be_added:
|
||||
logger.info("TO BE ADDED:\n%s" % cn)
|
||||
|
||||
lbh.commit_connections(self.samdb, ro=True)
|
||||
@ -1658,7 +1657,7 @@ class KCC(object):
|
||||
# If we haven't been told to turn off stale connection
|
||||
# detection and this dsa has a stale connection then
|
||||
# continue
|
||||
if detect_stale and self.is_stale_link_connection(dc_s) == True:
|
||||
if detect_stale and self.is_stale_link_connection(dc_s):
|
||||
continue
|
||||
|
||||
# Replica meets criteria. Add it to table indexed
|
||||
@ -1669,7 +1668,7 @@ class KCC(object):
|
||||
# on the local DC, append to R each partial replica (p of x)
|
||||
# such that p "is present" on a DC satisfying the same
|
||||
# criteria defined above for full replica DCs.
|
||||
if partial == True:
|
||||
if partial:
|
||||
|
||||
# Now we loop thru all the DSAs looking for
|
||||
# partial NC replicas that match the naming
|
||||
@ -1731,7 +1730,7 @@ class KCC(object):
|
||||
# If we haven't been told to turn off stale connection
|
||||
# detection and this dsa has a stale connection then
|
||||
# continue
|
||||
if detect_stale and self.is_stale_link_connection(dc_s) == True:
|
||||
if detect_stale and self.is_stale_link_connection(dc_s):
|
||||
continue
|
||||
|
||||
# Replica meets criteria. Add it to table indexed
|
||||
@ -1808,11 +1807,11 @@ class KCC(object):
|
||||
findex = rindex = random.randint(0, r_len-1)
|
||||
|
||||
# while this node doesn't have sufficient edges
|
||||
while tnode.has_sufficient_edges() == False:
|
||||
while not tnode.has_sufficient_edges():
|
||||
# If this edge can be successfully added (i.e. not
|
||||
# the same node and edge doesn't already exist) then
|
||||
# select a new random index for the next round
|
||||
if tnode.add_edge_from(graph_list[rindex].dsa_dnstr) == True:
|
||||
if tnode.add_edge_from(graph_list[rindex].dsa_dnstr):
|
||||
findex = rindex = random.randint(0, r_len-1)
|
||||
else:
|
||||
# Otherwise continue looking against each node
|
||||
@ -1850,7 +1849,7 @@ class KCC(object):
|
||||
if mysite.is_intrasite_topology_disabled():
|
||||
return
|
||||
|
||||
detect_stale = (mysite.is_detect_stale_disabled() == False)
|
||||
detect_stale = (not mysite.is_detect_stale_disabled())
|
||||
|
||||
# Loop thru all the partitions.
|
||||
for partdn, part in self.part_table.items():
|
||||
@ -1890,11 +1889,11 @@ class KCC(object):
|
||||
if opts.readonly:
|
||||
# Display any to be added or modified repsFrom
|
||||
for dnstr, connect in mydsa.connect_table.items():
|
||||
if connect.to_be_deleted == True:
|
||||
if connect.to_be_deleted:
|
||||
logger.info("TO BE DELETED:\n%s" % connect)
|
||||
if connect.to_be_modified == True:
|
||||
if connect.to_be_modified:
|
||||
logger.info("TO BE MODIFIED:\n%s" % connect)
|
||||
if connect.to_be_added == True:
|
||||
if connect.to_be_added:
|
||||
logger.info("TO BE ADDED:\n%s" % connect)
|
||||
|
||||
mydsa.commit_connections(self.samdb, ro=True)
|
||||
@ -2330,9 +2329,9 @@ def sort_replica_by_dsa_guid(rep1, rep2):
|
||||
return cmp(rep1.rep_dsa_guid, rep2.rep_dsa_guid)
|
||||
|
||||
def sort_dsa_by_gc_and_guid(dsa1, dsa2):
|
||||
if dsa1.is_gc() == True and dsa2.is_gc() == False:
|
||||
if dsa1.is_gc() and not dsa2.is_gc():
|
||||
return -1
|
||||
if dsa1.is_gc() == False and dsa2.is_gc() == True:
|
||||
if not dsa1.is_gc() and dsa2.is_gc():
|
||||
return +1
|
||||
return cmp(dsa1.dsa_guid, dsa2.dsa_guid)
|
||||
|
||||
|
@ -869,7 +869,7 @@ def checkKeepAttributeOldMtd(delta, att, reference, current,
|
||||
else:
|
||||
if hashOverwrittenAtt.get(att)&2**msgElt.flags() :
|
||||
continue
|
||||
elif hashOverwrittenAtt.get(att)==never:
|
||||
elif hashOverwrittenAtt.get(att) == never:
|
||||
delta.remove(att)
|
||||
continue
|
||||
|
||||
@ -961,8 +961,8 @@ def checkKeepAttributeWithMetadata(delta, att, message, reference, current,
|
||||
message(CHANGESD, "%ssd are not identical:\n%s" % (txt, diff))
|
||||
txt = ""
|
||||
if attrUSN == -1:
|
||||
message(CHANGESD, "But the SD has been changed by someonelse "\
|
||||
"so it's impossible to know if the difference"\
|
||||
message(CHANGESD, "But the SD has been changed by someonelse "
|
||||
"so it's impossible to know if the difference"
|
||||
" cames from the modification or from a previous bug")
|
||||
dnNotToRecalculate.append(str(dn))
|
||||
else:
|
||||
@ -1342,7 +1342,7 @@ def rebuild_sd(samdb, names):
|
||||
listKeys.sort(dn_sort)
|
||||
|
||||
if len(dnToRecalculate) != 0:
|
||||
message(CHANGESD, "%d DNs have been marked as needed to be recalculated"\
|
||||
message(CHANGESD, "%d DNs have been marked as needed to be recalculated"
|
||||
", recalculating %d due to inheritance"
|
||||
% (len(dnToRecalculate), len(listKeys)))
|
||||
|
||||
@ -1372,7 +1372,7 @@ def hasATProvision(samdb):
|
||||
scope=SCOPE_BASE,
|
||||
attrs=["dn"])
|
||||
|
||||
if entry != None and len(entry) == 1:
|
||||
if entry is not None and len(entry) == 1:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@ -1695,10 +1695,10 @@ if __name__ == '__main__':
|
||||
v = v + 1
|
||||
|
||||
message(CHANGE,
|
||||
"Find last provision USN, %d invocation(s) for a total of %d ranges" % \
|
||||
"Find last provision USN, %d invocation(s) for a total of %d ranges" %
|
||||
(len(lastProvisionUSNs.keys()), v /2 ))
|
||||
|
||||
if lastProvisionUSNs.get("default") != None:
|
||||
if lastProvisionUSNs.get("default") is not None:
|
||||
message(CHANGE, "Old style for usn ranges used")
|
||||
lastProvisionUSNs[str(names.invocation)] = lastProvisionUSNs["default"]
|
||||
del lastProvisionUSNs["default"]
|
||||
@ -1709,7 +1709,7 @@ if __name__ == '__main__':
|
||||
minobj = 5
|
||||
(hash_id, nb_obj) = findprovisionrange(ldbs.sam, ldb.Dn(ldbs.sam, str(names.rootdn)))
|
||||
message(SIMPLE, "Here is a list of changes that modified more than %d objects in 1 minute." % minobj)
|
||||
message(SIMPLE, "Usually changes made by provision and upgradeprovision are those who affect a couple"\
|
||||
message(SIMPLE, "Usually changes made by provision and upgradeprovision are those who affect a couple"
|
||||
" of hundred of objects or more")
|
||||
message(SIMPLE, "Total number of objects: %d" % nb_obj)
|
||||
message(SIMPLE, "")
|
||||
|
@ -105,7 +105,7 @@ class Ldb(_Ldb):
|
||||
# Allow admins to force non-sync ldb for all databases
|
||||
if lp is not None:
|
||||
nosync_p = lp.get("nosync", "ldb")
|
||||
if nosync_p is not None and nosync_p == True:
|
||||
if nosync_p is not None and nosync_p:
|
||||
flags |= ldb.FLG_NOSYNC
|
||||
|
||||
self.set_create_perms(0600)
|
||||
|
@ -380,8 +380,8 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
|
||||
m = ldb.Message()
|
||||
m.dn = obj.dn
|
||||
m['lastKnownParent'] = ldb.MessageElement(str(obj.dn.parent()), ldb.FLAG_MOD_REPLACE, 'lastKnownParent')
|
||||
|
||||
if self.do_modify(m, [],
|
||||
|
||||
if self.do_modify(m, [],
|
||||
"Failed to set lastKnownParent on lostAndFound object at %s" % (new_dn + lost_and_found)):
|
||||
self.report("Set lastKnownParent on lostAndFound object at %s" % (new_dn + lost_and_found))
|
||||
keep_transaction = True
|
||||
@ -473,7 +473,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
|
||||
if is_deleted and not target_is_deleted and reverse_link_name is not None:
|
||||
revealed_dn = self.find_revealed_link(obj.dn, attrname, guid)
|
||||
rmd_flags = revealed_dn.dn.get_extended_component("RMD_FLAGS")
|
||||
if rmd_flags != None and (int(rmd_flags) & 1) == 0:
|
||||
if rmd_flags is not None and (int(rmd_flags) & 1) == 0:
|
||||
# the RMD_FLAGS for this link should be 1, as the target is deleted
|
||||
self.err_incorrect_rmd_flags(obj, attrname, revealed_dn)
|
||||
continue
|
||||
@ -551,7 +551,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
|
||||
return True
|
||||
if dn == self.rid_dn:
|
||||
return True
|
||||
|
||||
|
||||
return False
|
||||
|
||||
def calculate_instancetype(self, dn):
|
||||
|
@ -246,7 +246,7 @@ class drs_Replicate(object):
|
||||
|
||||
while True:
|
||||
(level, ctr) = self.drs.DsGetNCChanges(self.drs_handle, req_level, req)
|
||||
if ctr.first_object == None and ctr.object_count != 0:
|
||||
if ctr.first_object is None and ctr.object_count != 0:
|
||||
raise RuntimeError("DsGetNCChanges: NULL first_object with object_count=%u" % (ctr.object_count))
|
||||
self.net.replicate_chunk(self.replication_state, level, ctr,
|
||||
schema=schema, req_level=req_level, req=req)
|
||||
|
@ -73,7 +73,7 @@ class NamingContext(object):
|
||||
scope=ldb.SCOPE_BASE, attrs=attrs)
|
||||
|
||||
except ldb.LdbError, (enum, estr):
|
||||
raise Exception("Unable to find naming context (%s)" % \
|
||||
raise Exception("Unable to find naming context (%s)" %
|
||||
(self.nc_dnstr, estr))
|
||||
msg = res[0]
|
||||
if "objectGUID" in msg:
|
||||
@ -213,7 +213,7 @@ class NCReplica(NamingContext):
|
||||
|
||||
def set_instantiated_flags(self, flags=None):
|
||||
'''Set or clear NC replica instantiated flags'''
|
||||
if (flags == None):
|
||||
if flags is None:
|
||||
self.rep_instantiated_flags = 0
|
||||
else:
|
||||
self.rep_instantiated_flags = flags
|
||||
@ -337,7 +337,7 @@ class NCReplica(NamingContext):
|
||||
# replacement list. Build a list
|
||||
# of to be deleted reps which we will
|
||||
# remove from rep_repsFrom list below
|
||||
if repsFrom.to_be_deleted == True:
|
||||
if repsFrom.to_be_deleted:
|
||||
delreps.append(repsFrom)
|
||||
modify = True
|
||||
continue
|
||||
@ -361,7 +361,7 @@ class NCReplica(NamingContext):
|
||||
# need to be deleted or input option has informed
|
||||
# us to be "readonly" (ro). Leave database
|
||||
# record "as is"
|
||||
if modify == False or ro == True:
|
||||
if not modify or ro:
|
||||
return
|
||||
|
||||
m = ldb.Message()
|
||||
@ -380,7 +380,7 @@ class NCReplica(NamingContext):
|
||||
def dumpstr_to_be_deleted(self):
|
||||
text=""
|
||||
for repsFrom in self.rep_repsFrom:
|
||||
if repsFrom.to_be_deleted == True:
|
||||
if repsFrom.to_be_deleted:
|
||||
if text:
|
||||
text = text + "\n%s" % repsFrom
|
||||
else:
|
||||
@ -390,7 +390,7 @@ class NCReplica(NamingContext):
|
||||
def dumpstr_to_be_modified(self):
|
||||
text=""
|
||||
for repsFrom in self.rep_repsFrom:
|
||||
if repsFrom.is_modified() == True:
|
||||
if repsFrom.is_modified():
|
||||
if text:
|
||||
text = text + "\n%s" % repsFrom
|
||||
else:
|
||||
@ -948,13 +948,13 @@ class NTDSConnection(object):
|
||||
self.to_be_deleted = False
|
||||
|
||||
# No database modification requested
|
||||
if ro == True:
|
||||
if ro:
|
||||
return
|
||||
|
||||
try:
|
||||
samdb.delete(self.dnstr)
|
||||
except ldb.LdbError, (enum, estr):
|
||||
raise Exception("Could not delete nTDSConnection for (%s) - (%s)" % \
|
||||
raise Exception("Could not delete nTDSConnection for (%s) - (%s)" %
|
||||
(self.dnstr, estr))
|
||||
|
||||
def commit_added(self, samdb, ro=False):
|
||||
@ -966,7 +966,7 @@ class NTDSConnection(object):
|
||||
self.to_be_added = False
|
||||
|
||||
# No database modification requested
|
||||
if ro == True:
|
||||
if ro:
|
||||
return
|
||||
|
||||
# First verify we don't have this entry to ensure nothing
|
||||
@ -979,10 +979,10 @@ class NTDSConnection(object):
|
||||
|
||||
except ldb.LdbError, (enum, estr):
|
||||
if enum != ldb.ERR_NO_SUCH_OBJECT:
|
||||
raise Exception("Unable to search for (%s) - (%s)" % \
|
||||
raise Exception("Unable to search for (%s) - (%s)" %
|
||||
(self.dnstr, estr))
|
||||
if found:
|
||||
raise Exception("nTDSConnection for (%s) already exists!" % \
|
||||
raise Exception("nTDSConnection for (%s) already exists!" %
|
||||
self.dnstr)
|
||||
|
||||
if self.enabled:
|
||||
@ -995,10 +995,10 @@ class NTDSConnection(object):
|
||||
m.dn = ldb.Dn(samdb, self.dnstr)
|
||||
|
||||
m["objectClass"] = \
|
||||
ldb.MessageElement("nTDSConnection", ldb.FLAG_MOD_ADD, \
|
||||
ldb.MessageElement("nTDSConnection", ldb.FLAG_MOD_ADD,
|
||||
"objectClass")
|
||||
m["showInAdvancedViewOnly"] = \
|
||||
ldb.MessageElement("TRUE", ldb.FLAG_MOD_ADD, \
|
||||
ldb.MessageElement("TRUE", ldb.FLAG_MOD_ADD,
|
||||
"showInAdvancedViewOnly")
|
||||
m["enabledConnection"] = \
|
||||
ldb.MessageElement(enablestr, ldb.FLAG_MOD_ADD, "enabledConnection")
|
||||
@ -1007,12 +1007,12 @@ class NTDSConnection(object):
|
||||
m["options"] = \
|
||||
ldb.MessageElement(str(self.options), ldb.FLAG_MOD_ADD, "options")
|
||||
m["systemFlags"] = \
|
||||
ldb.MessageElement(str(self.system_flags), ldb.FLAG_MOD_ADD, \
|
||||
ldb.MessageElement(str(self.system_flags), ldb.FLAG_MOD_ADD,
|
||||
"systemFlags")
|
||||
|
||||
if self.transport_dnstr is not None:
|
||||
m["transportType"] = \
|
||||
ldb.MessageElement(str(self.transport_dnstr), ldb.FLAG_MOD_ADD, \
|
||||
ldb.MessageElement(str(self.transport_dnstr), ldb.FLAG_MOD_ADD,
|
||||
"transportType")
|
||||
|
||||
if self.schedule is not None:
|
||||
@ -1022,7 +1022,7 @@ class NTDSConnection(object):
|
||||
try:
|
||||
samdb.add(m)
|
||||
except ldb.LdbError, (enum, estr):
|
||||
raise Exception("Could not add nTDSConnection for (%s) - (%s)" % \
|
||||
raise Exception("Could not add nTDSConnection for (%s) - (%s)" %
|
||||
(self.dnstr, estr))
|
||||
|
||||
def commit_modified(self, samdb, ro=False):
|
||||
@ -1034,7 +1034,7 @@ class NTDSConnection(object):
|
||||
self.to_be_modified = False
|
||||
|
||||
# No database modification requested
|
||||
if ro == True:
|
||||
if ro:
|
||||
return
|
||||
|
||||
# First verify we have this entry to ensure nothing
|
||||
@ -1047,9 +1047,9 @@ class NTDSConnection(object):
|
||||
if enum == ldb.ERR_NO_SUCH_OBJECT:
|
||||
found = False
|
||||
else:
|
||||
raise Exception("Unable to search for (%s) - (%s)" % \
|
||||
raise Exception("Unable to search for (%s) - (%s)" %
|
||||
(self.dnstr, estr))
|
||||
if found == False:
|
||||
if not found:
|
||||
raise Exception("nTDSConnection for (%s) doesn't exist!" %
|
||||
self.dnstr)
|
||||
|
||||
@ -1143,7 +1143,7 @@ class NTDSConnection(object):
|
||||
sched.headerArray[i].offset:
|
||||
return False
|
||||
|
||||
for a, b in zip(self.schedule.dataArray[i].slots, \
|
||||
for a, b in zip(self.schedule.dataArray[i].slots,
|
||||
sched.dataArray[i].slots):
|
||||
if a != b:
|
||||
return False
|
||||
@ -1617,7 +1617,7 @@ class Site(object):
|
||||
|
||||
# If readonly database then do not perform a
|
||||
# persistent update
|
||||
if ro == True:
|
||||
if ro:
|
||||
return True
|
||||
|
||||
# Perform update to the samdb
|
||||
@ -1767,7 +1767,7 @@ class GraphNode(object):
|
||||
# the DC on which ri "is present".
|
||||
#
|
||||
# c.options does not contain NTDSCONN_OPT_RODC_TOPOLOGY
|
||||
if connect and connect.is_rodc_topology() == False:
|
||||
if connect and not connect.is_rodc_topology():
|
||||
exists = True
|
||||
else:
|
||||
exists = False
|
||||
@ -2157,7 +2157,7 @@ class Vertex(object):
|
||||
|
||||
# We have a full replica which is the largest
|
||||
# value so exit
|
||||
if rep.is_partial() == False:
|
||||
if not rep.is_partial():
|
||||
self.color = VertexColor.red
|
||||
break
|
||||
else:
|
||||
|
@ -28,7 +28,7 @@ bitFields = {}
|
||||
bitFields["searchflags"] = {
|
||||
'fATTINDEX': 31, # IX
|
||||
'fPDNTATTINDEX': 30, # PI
|
||||
'fANR': 29, #AR
|
||||
'fANR': 29, # AR
|
||||
'fPRESERVEONDELETE': 28, # PR
|
||||
'fCOPY': 27, # CP
|
||||
'fTUPLEINDEX': 26, # TP
|
||||
@ -85,7 +85,7 @@ multivalued_attrs = set(["auxiliaryclass","maycontain","mustcontain","posssuperi
|
||||
def __read_folded_line(f, buffer):
|
||||
""" reads a line from an LDIF file, unfolding it"""
|
||||
line = buffer
|
||||
|
||||
|
||||
while True:
|
||||
l = f.readline()
|
||||
|
||||
@ -98,7 +98,7 @@ def __read_folded_line(f, buffer):
|
||||
# preserves '\n '
|
||||
line = line + l
|
||||
else:
|
||||
# non-continued line
|
||||
# non-continued line
|
||||
if line == "":
|
||||
line = l
|
||||
|
||||
@ -111,7 +111,7 @@ def __read_folded_line(f, buffer):
|
||||
# buffer contains the start of the next possibly folded line
|
||||
buffer = l
|
||||
break
|
||||
|
||||
|
||||
return (line, buffer)
|
||||
|
||||
|
||||
@ -122,13 +122,13 @@ def __read_raw_entries(f):
|
||||
attr_type_re = re.compile("^([A-Za-z]+[A-Za-z0-9-]*):")
|
||||
|
||||
buffer = ""
|
||||
|
||||
|
||||
while True:
|
||||
entry = []
|
||||
|
||||
|
||||
while True:
|
||||
(l, buffer) = __read_folded_line(f, buffer)
|
||||
|
||||
|
||||
if l[:1] == "#":
|
||||
continue
|
||||
|
||||
@ -140,7 +140,7 @@ def __read_raw_entries(f):
|
||||
if m:
|
||||
if l[-1:] == "\n":
|
||||
l = l[:-1]
|
||||
|
||||
|
||||
entry.append(l)
|
||||
else:
|
||||
print >>sys.stderr, "Invalid line: %s" % l,
|
||||
@ -170,7 +170,7 @@ def __convert_bitfield(key, value):
|
||||
|
||||
value = value.replace("\n ", "")
|
||||
value = value.replace(" ", "")
|
||||
|
||||
|
||||
try:
|
||||
# some attributes already have numeric values
|
||||
o = int(value)
|
||||
@ -186,7 +186,7 @@ def __convert_bitfield(key, value):
|
||||
def __write_ldif_one(entry):
|
||||
"""Write out entry as LDIF"""
|
||||
out = []
|
||||
|
||||
|
||||
for l in entry:
|
||||
if isinstance(l[1], str):
|
||||
vl = [l[1]]
|
||||
@ -196,21 +196,21 @@ def __write_ldif_one(entry):
|
||||
if l[0].lower() == 'omobjectclass':
|
||||
out.append("%s:: %s" % (l[0], l[1]))
|
||||
continue
|
||||
|
||||
|
||||
for v in vl:
|
||||
out.append("%s: %s" % (l[0], v))
|
||||
|
||||
|
||||
return "\n".join(out)
|
||||
|
||||
|
||||
def __transform_entry(entry, objectClass):
|
||||
"""Perform transformations required to convert the LDIF-like schema
|
||||
file entries to LDIF, including Samba-specific stuff."""
|
||||
|
||||
|
||||
entry = [l.split(":", 1) for l in entry]
|
||||
|
||||
cn = ""
|
||||
|
||||
|
||||
for l in entry:
|
||||
key = l[0].lower()
|
||||
l[1] = l[1].lstrip()
|
||||
@ -243,7 +243,7 @@ def __transform_entry(entry, objectClass):
|
||||
entry.insert(2, ["objectGUID", str(uuid.uuid4())])
|
||||
entry.insert(2, ["adminDescription", cn])
|
||||
entry.insert(2, ["adminDisplayName", cn])
|
||||
|
||||
|
||||
for l in entry:
|
||||
key = l[0].lower()
|
||||
|
||||
@ -256,7 +256,7 @@ def __parse_schema_file(filename, objectClass):
|
||||
"""Load and transform a schema file."""
|
||||
|
||||
out = []
|
||||
|
||||
|
||||
f = open(filename, "rU")
|
||||
for entry in __read_raw_entries(f):
|
||||
out.append(__write_ldif_one(__transform_entry(entry, objectClass)))
|
||||
@ -269,7 +269,7 @@ def read_ms_schema(attr_file, classes_file, dump_attributes = True, dump_classes
|
||||
|
||||
attr_ldif = ""
|
||||
classes_ldif = ""
|
||||
|
||||
|
||||
if dump_attributes:
|
||||
attr_ldif = __parse_schema_file(attr_file, "attributeSchema")
|
||||
if dump_classes:
|
||||
|
@ -158,7 +158,7 @@ class Command(object):
|
||||
undetermined_max_args = True
|
||||
else:
|
||||
max_args += 1
|
||||
if (len(args) < min_args) or (undetermined_max_args == False and len(args) > max_args):
|
||||
if (len(args) < min_args) or (not undetermined_max_args and len(args) > max_args):
|
||||
parser.print_usage()
|
||||
return -1
|
||||
|
||||
|
@ -56,7 +56,7 @@ class cmd_delegation_show(Command):
|
||||
# to the correct domain
|
||||
(cleanedaccount, realm, domain) = _get_user_realm_domain(accountname)
|
||||
|
||||
res = sam.search(expression="sAMAccountName=%s" %
|
||||
res = sam.search(expression="sAMAccountName=%s" %
|
||||
ldb.binary_encode(cleanedaccount),
|
||||
scope=ldb.SCOPE_SUBTREE,
|
||||
attrs=["userAccountControl", "msDS-AllowedToDelegateTo"])
|
||||
@ -189,7 +189,7 @@ class cmd_delegation_add_service(Command):
|
||||
# to the correct domain
|
||||
(cleanedaccount, realm, domain) = _get_user_realm_domain(accountname)
|
||||
|
||||
res = sam.search(expression="sAMAccountName=%s" %
|
||||
res = sam.search(expression="sAMAccountName=%s" %
|
||||
ldb.binary_encode(cleanedaccount),
|
||||
scope=ldb.SCOPE_SUBTREE,
|
||||
attrs=["msDS-AllowedToDelegateTo"])
|
||||
@ -233,7 +233,7 @@ class cmd_delegation_del_service(Command):
|
||||
# to the correct domain
|
||||
(cleanedaccount, realm, domain) = _get_user_realm_domain(accountname)
|
||||
|
||||
res = sam.search(expression="sAMAccountName=%s" %
|
||||
res = sam.search(expression="sAMAccountName=%s" %
|
||||
ldb.binary_encode(cleanedaccount),
|
||||
scope=ldb.SCOPE_SUBTREE,
|
||||
attrs=["msDS-AllowedToDelegateTo"])
|
||||
|
@ -173,9 +173,9 @@ class cmd_domain_provision(Command):
|
||||
help="choose machine password (otherwise random)"),
|
||||
Option("--dns-backend", type="choice", metavar="NAMESERVER-BACKEND",
|
||||
choices=["SAMBA_INTERNAL", "BIND9_FLATFILE", "BIND9_DLZ", "NONE"],
|
||||
help="The DNS server backend. SAMBA_INTERNAL is the builtin name server (default), " \
|
||||
"BIND9_FLATFILE uses bind9 text database to store zone information, " \
|
||||
"BIND9_DLZ uses samba4 AD to store zone information, " \
|
||||
help="The DNS server backend. SAMBA_INTERNAL is the builtin name server (default), "
|
||||
"BIND9_FLATFILE uses bind9 text database to store zone information, "
|
||||
"BIND9_DLZ uses samba4 AD to store zone information, "
|
||||
"NONE skips the DNS setup entirely (not recommended)",
|
||||
default="SAMBA_INTERNAL"),
|
||||
Option("--dnspass", type="string", metavar="PASSWORD",
|
||||
@ -452,8 +452,8 @@ class cmd_domain_dcpromo(Command):
|
||||
action="store_true"),
|
||||
Option("--dns-backend", type="choice", metavar="NAMESERVER-BACKEND",
|
||||
choices=["SAMBA_INTERNAL", "BIND9_DLZ", "NONE"],
|
||||
help="The DNS server backend. SAMBA_INTERNAL is the builtin name server (default), " \
|
||||
"BIND9_DLZ uses samba4 AD to store zone information, " \
|
||||
help="The DNS server backend. SAMBA_INTERNAL is the builtin name server (default), "
|
||||
"BIND9_DLZ uses samba4 AD to store zone information, "
|
||||
"NONE skips the DNS setup entirely (this DC will not be a DNS server)",
|
||||
default="SAMBA_INTERNAL")
|
||||
]
|
||||
@ -518,8 +518,8 @@ class cmd_domain_join(Command):
|
||||
action="store_true"),
|
||||
Option("--dns-backend", type="choice", metavar="NAMESERVER-BACKEND",
|
||||
choices=["SAMBA_INTERNAL", "BIND9_DLZ", "NONE"],
|
||||
help="The DNS server backend. SAMBA_INTERNAL is the builtin name server (default), " \
|
||||
"BIND9_DLZ uses samba4 AD to store zone information, " \
|
||||
help="The DNS server backend. SAMBA_INTERNAL is the builtin name server (default), "
|
||||
"BIND9_DLZ uses samba4 AD to store zone information, "
|
||||
"NONE skips the DNS setup entirely (this DC will not be a DNS server)",
|
||||
default="SAMBA_INTERNAL")
|
||||
]
|
||||
@ -1223,9 +1223,9 @@ class cmd_domain_classicupgrade(Command):
|
||||
action="store_true"),
|
||||
Option("--dns-backend", type="choice", metavar="NAMESERVER-BACKEND",
|
||||
choices=["SAMBA_INTERNAL", "BIND9_FLATFILE", "BIND9_DLZ", "NONE"],
|
||||
help="The DNS server backend. SAMBA_INTERNAL is the builtin name server (default), " \
|
||||
"BIND9_FLATFILE uses bind9 text database to store zone information, " \
|
||||
"BIND9_DLZ uses samba4 AD to store zone information, " \
|
||||
help="The DNS server backend. SAMBA_INTERNAL is the builtin name server (default), "
|
||||
"BIND9_FLATFILE uses bind9 text database to store zone information, "
|
||||
"BIND9_DLZ uses samba4 AD to store zone information, "
|
||||
"NONE skips the DNS setup entirely (this DC will not be a DNS server)",
|
||||
default="SAMBA_INTERNAL")
|
||||
]
|
||||
|
@ -256,7 +256,7 @@ all=all of the above"""),
|
||||
creds = credopts.get_credentials(lp, fallback_machine=True)
|
||||
|
||||
samdb = SamDB(url=H, session_info=system_session(),
|
||||
credentials=creds, lp=lp)
|
||||
credentials=creds, lp=lp)
|
||||
|
||||
if role == "all":
|
||||
transfer_role(self.outf, "rid", samdb)
|
||||
|
@ -879,7 +879,7 @@ class cmd_create(Command):
|
||||
Option("--tmpdir", help="Temporary directory for copying policy files", type=str)
|
||||
]
|
||||
|
||||
def run(self, displayname, H=None, tmpdir=None, sambaopts=None, credopts=None,
|
||||
def run(self, displayname, H=None, tmpdir=None, sambaopts=None, credopts=None,
|
||||
versionopts=None):
|
||||
|
||||
self.lp = sambaopts.get_loadparm()
|
||||
@ -926,7 +926,7 @@ class cmd_create(Command):
|
||||
gpt_contents = "[General]\r\nVersion=0\r\n"
|
||||
file(os.path.join(gpodir, "GPT.INI"), "w").write(gpt_contents)
|
||||
except Exception, e:
|
||||
raise CommandError("Error Creating GPO files", e)
|
||||
raise CommandError("Error Creating GPO files", e)
|
||||
|
||||
# Connect to DC over SMB
|
||||
[dom_name, service, sharepath] = parse_unc(unc_path)
|
||||
|
@ -28,14 +28,13 @@ import sys
|
||||
import samba
|
||||
import samba.getopt as options
|
||||
from samba import Ldb
|
||||
from samba.ndr import ndr_pack, ndr_unpack
|
||||
from samba.ndr import ndr_unpack
|
||||
from samba.dcerpc import security
|
||||
from ldb import SCOPE_SUBTREE, SCOPE_ONELEVEL, SCOPE_BASE, ERR_NO_SUCH_OBJECT, LdbError
|
||||
from samba.netcmd import (
|
||||
Command,
|
||||
CommandError,
|
||||
Option,
|
||||
SuperCommand,
|
||||
)
|
||||
|
||||
global summary
|
||||
@ -103,7 +102,7 @@ class LDAPBase(object):
|
||||
def find_servers(self):
|
||||
"""
|
||||
"""
|
||||
res = self.ldb.search(base="OU=Domain Controllers,%s" % self.base_dn, \
|
||||
res = self.ldb.search(base="OU=Domain Controllers,%s" % self.base_dn,
|
||||
scope=SCOPE_SUBTREE, expression="(objectClass=computer)", attrs=["cn"])
|
||||
assert len(res) > 0
|
||||
srv = []
|
||||
@ -112,7 +111,7 @@ class LDAPBase(object):
|
||||
return srv
|
||||
|
||||
def find_netbios(self):
|
||||
res = self.ldb.search(base="CN=Partitions,%s" % self.config_dn, \
|
||||
res = self.ldb.search(base="CN=Partitions,%s" % self.config_dn,
|
||||
scope=SCOPE_SUBTREE, attrs=["nETBIOSName"])
|
||||
assert len(res) > 0
|
||||
for x in res:
|
||||
|
@ -172,7 +172,7 @@ class cmd_ntacl_sysvolreset(Command):
|
||||
netlogon = lp.get("path", "netlogon")
|
||||
sysvol = lp.get("path", "sysvol")
|
||||
try:
|
||||
samdb = SamDB(session_info=system_session(),
|
||||
samdb = SamDB(session_info=system_session(),
|
||||
lp=lp)
|
||||
except Exception, e:
|
||||
raise CommandError("Unable to open samdb:", e)
|
||||
@ -207,10 +207,10 @@ class cmd_ntacl_sysvolreset(Command):
|
||||
|
||||
if use_ntvfs:
|
||||
logger.warning("Please note that POSIX permissions have NOT been changed, only the stored NT ACL")
|
||||
|
||||
|
||||
provision.setsysvolacl(samdb, netlogon, sysvol,
|
||||
LA_uid, BA_gid, domain_sid,
|
||||
lp.get("realm").lower(), samdb.domain_dn(),
|
||||
LA_uid, BA_gid, domain_sid,
|
||||
lp.get("realm").lower(), samdb.domain_dn(),
|
||||
lp, use_ntvfs=use_ntvfs)
|
||||
|
||||
class cmd_ntacl_sysvolcheck(Command):
|
||||
@ -223,8 +223,7 @@ class cmd_ntacl_sysvolcheck(Command):
|
||||
"versionopts": options.VersionOptions,
|
||||
}
|
||||
|
||||
def run(self,
|
||||
credopts=None, sambaopts=None, versionopts=None):
|
||||
def run(self, credopts=None, sambaopts=None, versionopts=None):
|
||||
lp = sambaopts.get_loadparm()
|
||||
path = lp.private_path("secrets.ldb")
|
||||
creds = credopts.get_credentials(lp)
|
||||
@ -234,16 +233,15 @@ class cmd_ntacl_sysvolcheck(Command):
|
||||
netlogon = lp.get("path", "netlogon")
|
||||
sysvol = lp.get("path", "sysvol")
|
||||
try:
|
||||
samdb = SamDB(session_info=system_session(),
|
||||
lp=lp)
|
||||
samdb = SamDB(session_info=system_session(), lp=lp)
|
||||
except Exception, e:
|
||||
raise CommandError("Unable to open samdb:", e)
|
||||
|
||||
domain_sid = security.dom_sid(samdb.domain_sid)
|
||||
|
||||
provision.checksysvolacl(samdb, netlogon, sysvol,
|
||||
domain_sid,
|
||||
lp.get("realm").lower(), samdb.domain_dn(),
|
||||
domain_sid,
|
||||
lp.get("realm").lower(), samdb.domain_dn(),
|
||||
lp)
|
||||
|
||||
|
||||
|
@ -60,7 +60,7 @@ class cmd_spn_list(Command):
|
||||
spns = res[0].get("servicePrincipalName")
|
||||
found = False
|
||||
flag = ldb.FLAG_MOD_ADD
|
||||
if spns != None:
|
||||
if spns is not None:
|
||||
self.outf.write(
|
||||
"User %s has the following servicePrincipalName: \n" %
|
||||
res[0].dn)
|
||||
@ -84,7 +84,7 @@ class cmd_spn_add(Command):
|
||||
"versionopts": options.VersionOptions,
|
||||
}
|
||||
takes_options = [
|
||||
Option("--force", help="Force the addition of the spn"\
|
||||
Option("--force", help="Force the addition of the spn"
|
||||
" even it exists already", action="store_true"),
|
||||
]
|
||||
takes_args = ["name", "user"]
|
||||
@ -99,7 +99,7 @@ class cmd_spn_add(Command):
|
||||
res = sam.search(
|
||||
expression="servicePrincipalName=%s" % ldb.binary_encode(name),
|
||||
scope=ldb.SCOPE_SUBTREE)
|
||||
if len(res) != 0 and not force:
|
||||
if len(res) != 0 and not force:
|
||||
raise CommandError("Service principal %s already"
|
||||
" affected to another user" % name)
|
||||
|
||||
@ -114,7 +114,7 @@ class cmd_spn_add(Command):
|
||||
tab = []
|
||||
found = False
|
||||
flag = ldb.FLAG_MOD_ADD
|
||||
if spns != None:
|
||||
if spns is not None:
|
||||
for e in spns:
|
||||
if str(e) == name:
|
||||
found = True
|
||||
@ -172,8 +172,8 @@ class cmd_spn_delete(Command):
|
||||
listUser = ""
|
||||
for r in res:
|
||||
listUser = "%s\n%s" % (listUser, str(r.dn))
|
||||
raise CommandError("More than one user has the spn %s "\
|
||||
"and no specific user was specified, list of users"\
|
||||
raise CommandError("More than one user has the spn %s "
|
||||
"and no specific user was specified, list of users"
|
||||
" with this spn:%s" % (name, listUser))
|
||||
else:
|
||||
result=res[0]
|
||||
@ -182,7 +182,7 @@ class cmd_spn_delete(Command):
|
||||
msg = ldb.Message()
|
||||
spns = result.get("servicePrincipalName")
|
||||
tab = []
|
||||
if spns != None:
|
||||
if spns is not None:
|
||||
for e in spns:
|
||||
if str(e) != name:
|
||||
tab.append(str(e))
|
||||
|
@ -63,7 +63,7 @@ def getntacl(lp, file, backend=None, eadbfile=None, direct_db_access=True):
|
||||
attribute = backend_obj.wrap_getxattr(dbname, file,
|
||||
xattr.XATTR_NTACL_NAME)
|
||||
except Exception:
|
||||
# FIXME: Don't catch all exceptions, just those related to opening
|
||||
# FIXME: Don't catch all exceptions, just those related to opening
|
||||
# xattrdb
|
||||
print "Fail to open %s" % dbname
|
||||
attribute = samba.xattr_native.wrap_getxattr(file,
|
||||
@ -96,10 +96,10 @@ def setntacl(lp, file, sddl, domsid, backend=None, eadbfile=None, use_ntvfs=True
|
||||
backend_obj.wrap_setxattr(dbname,
|
||||
file, xattr.XATTR_NTACL_NAME, ndr_pack(ntacl))
|
||||
except Exception:
|
||||
# FIXME: Don't catch all exceptions, just those related to opening
|
||||
# FIXME: Don't catch all exceptions, just those related to opening
|
||||
# xattrdb
|
||||
print "Fail to open %s" % dbname
|
||||
samba.xattr_native.wrap_setxattr(file, xattr.XATTR_NTACL_NAME,
|
||||
samba.xattr_native.wrap_setxattr(file, xattr.XATTR_NTACL_NAME,
|
||||
ndr_pack(ntacl))
|
||||
else:
|
||||
samba.xattr_native.wrap_setxattr(file, xattr.XATTR_NTACL_NAME,
|
||||
@ -144,14 +144,14 @@ def ldapmask2filemask(ldm):
|
||||
filemask = ldm & STANDARD_RIGHTS_ALL
|
||||
|
||||
if (ldm & RIGHT_DS_READ_PROPERTY) and (ldm & RIGHT_DS_LIST_CONTENTS):
|
||||
filemask = filemask | (SYNCHRONIZE | FILE_LIST_DIRECTORY |\
|
||||
FILE_READ_ATTRIBUTES | FILE_READ_EA |\
|
||||
filemask = filemask | (SYNCHRONIZE | FILE_LIST_DIRECTORY |
|
||||
FILE_READ_ATTRIBUTES | FILE_READ_EA |
|
||||
FILE_READ_DATA | FILE_EXECUTE)
|
||||
|
||||
if ldm & RIGHT_DS_WRITE_PROPERTY:
|
||||
filemask = filemask | (SYNCHRONIZE | FILE_WRITE_DATA |\
|
||||
FILE_APPEND_DATA | FILE_WRITE_EA |\
|
||||
FILE_WRITE_ATTRIBUTES | FILE_ADD_FILE |\
|
||||
filemask = filemask | (SYNCHRONIZE | FILE_WRITE_DATA |
|
||||
FILE_APPEND_DATA | FILE_WRITE_EA |
|
||||
FILE_WRITE_ATTRIBUTES | FILE_ADD_FILE |
|
||||
FILE_ADD_SUBDIRECTORY)
|
||||
|
||||
if ldm & RIGHT_DS_CREATE_CHILD:
|
||||
@ -165,7 +165,7 @@ def ldapmask2filemask(ldm):
|
||||
|
||||
def dsacl2fsacl(dssddl, domsid):
|
||||
"""
|
||||
|
||||
|
||||
This function takes an the SDDL representation of a DS
|
||||
ACL and return the SDDL representation of this ACL adapted
|
||||
for files. It's used for Policy object provision
|
||||
|
@ -358,7 +358,7 @@ def get_last_provision_usn(sam):
|
||||
if (len(myids) > 0 and id not in myids):
|
||||
continue
|
||||
tab2 = p.split(tab1[0])
|
||||
if range.get(id) == None:
|
||||
if range.get(id) is None:
|
||||
range[id] = []
|
||||
range[id].append(tab2[0])
|
||||
range[id].append(tab2[1])
|
||||
@ -1743,7 +1743,7 @@ def sanitize_server_role(role):
|
||||
"active directory domain controller", "standalone server")
|
||||
"""
|
||||
try:
|
||||
return _ROLES_MAP[role]
|
||||
return _ROLES_MAP[role]
|
||||
except KeyError:
|
||||
raise ValueError(role)
|
||||
|
||||
|
@ -384,7 +384,7 @@ class OpenLDAPBackend(LDAPBackend):
|
||||
lnkattr = self.schema.linked_attributes()
|
||||
refint_attributes = ""
|
||||
memberof_config = "# Generated from Samba4 schema\n"
|
||||
for att in lnkattr.keys():
|
||||
for att in lnkattr.keys():
|
||||
if lnkattr[att] is not None:
|
||||
refint_attributes = refint_attributes + " " + att
|
||||
|
||||
|
@ -155,16 +155,16 @@ dn: @INDEXLIST
|
||||
return dsdb._dsdb_convert_schema_to_openldap(self.ldb, target, mapping)
|
||||
|
||||
|
||||
# Return a hash with the forward attribute as a key and the back as the value
|
||||
# Return a hash with the forward attribute as a key and the back as the value
|
||||
def get_linked_attributes(schemadn,schemaldb):
|
||||
attrs = ["linkID", "lDAPDisplayName"]
|
||||
res = schemaldb.search(expression="(&(linkID=*)(!(linkID:1.2.840.113556.1.4.803:=1))(objectclass=attributeSchema)(attributeSyntax=2.5.5.1))", base=schemadn, scope=SCOPE_ONELEVEL, attrs=attrs)
|
||||
attributes = {}
|
||||
for i in range (0, len(res)):
|
||||
expression = "(&(objectclass=attributeSchema)(linkID=%d)(attributeSyntax=2.5.5.1))" % (int(res[i]["linkID"][0])+1)
|
||||
target = schemaldb.searchone(basedn=schemadn,
|
||||
expression=expression,
|
||||
attribute="lDAPDisplayName",
|
||||
target = schemaldb.searchone(basedn=schemadn,
|
||||
expression=expression,
|
||||
attribute="lDAPDisplayName",
|
||||
scope=SCOPE_SUBTREE)
|
||||
if target is not None:
|
||||
attributes[str(res[i]["lDAPDisplayName"])]=str(target)
|
||||
|
@ -15,9 +15,9 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
"""Tests for the Auth Python bindings.
|
||||
"""Tests for the Auth Python bindings.
|
||||
|
||||
Note that this just tests the bindings work. It does not intend to test
|
||||
Note that this just tests the bindings work. It does not intend to test
|
||||
the functionality, that's already done in other tests.
|
||||
"""
|
||||
|
||||
|
@ -36,11 +36,11 @@ class SubstituteVarTestCase(TestCase):
|
||||
samba.substitute_var("foo ${bar}", {"bar": "bla"}))
|
||||
|
||||
def test_broken(self):
|
||||
self.assertEquals("foo ${bdkjfhsdkfh sdkfh ",
|
||||
self.assertEquals("foo ${bdkjfhsdkfh sdkfh ",
|
||||
samba.substitute_var("foo ${bdkjfhsdkfh sdkfh ", {"bar": "bla"}))
|
||||
|
||||
def test_unknown_var(self):
|
||||
self.assertEquals("foo ${bla} gsff",
|
||||
self.assertEquals("foo ${bla} gsff",
|
||||
samba.substitute_var("foo ${bla} gsff", {"bar": "bla"}))
|
||||
|
||||
def test_check_all_substituted(self):
|
||||
|
@ -15,9 +15,9 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
"""Tests for the Credentials Python bindings.
|
||||
"""Tests for the Credentials Python bindings.
|
||||
|
||||
Note that this just tests the bindings work. It does not intend to test
|
||||
Note that this just tests the bindings work. It does not intend to test
|
||||
the functionality, that's already done in other tests.
|
||||
"""
|
||||
|
||||
@ -78,7 +78,7 @@ class CredentialsTests(samba.tests.TestCase):
|
||||
|
||||
def test_get_nt_hash(self):
|
||||
self.creds.set_password("geheim")
|
||||
self.assertEquals('\xc2\xae\x1f\xe6\xe6H\x84cRE>\x81o*\xeb\x93',
|
||||
self.assertEquals('\xc2\xae\x1f\xe6\xe6H\x84cRE>\x81o*\xeb\x93',
|
||||
self.creds.get_nt_hash())
|
||||
|
||||
def test_guess(self):
|
||||
|
@ -26,26 +26,26 @@ class BareTestCase(samba.tests.TestCase):
|
||||
|
||||
def test_bare(self):
|
||||
# Connect to the echo pipe
|
||||
x = ClientConnection("ncalrpc:localhost[DEFAULT]",
|
||||
x = ClientConnection("ncalrpc:localhost[DEFAULT]",
|
||||
("60a15ec5-4de8-11d7-a637-005056a20182", 1),
|
||||
lp_ctx=samba.tests.env_loadparm())
|
||||
self.assertEquals("\x01\x00\x00\x00", x.request(0, chr(0) * 4))
|
||||
|
||||
def test_alter_context(self):
|
||||
x = ClientConnection("ncalrpc:localhost[DEFAULT]",
|
||||
x = ClientConnection("ncalrpc:localhost[DEFAULT]",
|
||||
("12345778-1234-abcd-ef00-0123456789ac", 1),
|
||||
lp_ctx=samba.tests.env_loadparm())
|
||||
y = ClientConnection("ncalrpc:localhost",
|
||||
y = ClientConnection("ncalrpc:localhost",
|
||||
("60a15ec5-4de8-11d7-a637-005056a20182", 1),
|
||||
basis_connection=x, lp_ctx=samba.tests.env_loadparm())
|
||||
x.alter_context(("60a15ec5-4de8-11d7-a637-005056a20182", 1))
|
||||
# FIXME: self.assertEquals("\x01\x00\x00\x00", x.request(0, chr(0) * 4))
|
||||
|
||||
def test_two_connections(self):
|
||||
x = ClientConnection("ncalrpc:localhost[DEFAULT]",
|
||||
("60a15ec5-4de8-11d7-a637-005056a20182", 1),
|
||||
x = ClientConnection("ncalrpc:localhost[DEFAULT]",
|
||||
("60a15ec5-4de8-11d7-a637-005056a20182", 1),
|
||||
lp_ctx=samba.tests.env_loadparm())
|
||||
y = ClientConnection("ncalrpc:localhost",
|
||||
y = ClientConnection("ncalrpc:localhost",
|
||||
("60a15ec5-4de8-11d7-a637-005056a20182", 1),
|
||||
basis_connection=x, lp_ctx=samba.tests.env_loadparm())
|
||||
self.assertEquals("\x01\x00\x00\x00", y.request(0, chr(0) * 4))
|
||||
|
@ -44,7 +44,7 @@ class WinregTests(RpcInterfaceTestCase):
|
||||
self.conn.CloseKey(handle)
|
||||
|
||||
def test_getkeyinfo(self):
|
||||
handle = self.conn.OpenHKLM(None,
|
||||
handle = self.conn.OpenHKLM(None,
|
||||
winreg.KEY_QUERY_VALUE | winreg.KEY_ENUMERATE_SUB_KEYS)
|
||||
x = self.conn.QueryInfoKey(handle, winreg.String())
|
||||
self.assertEquals(9, len(x)) # should return a 9-tuple
|
||||
|
@ -44,6 +44,6 @@ class UnixinfoTests(RpcInterfaceTestCase):
|
||||
|
||||
def test_uidtosid(self):
|
||||
self.conn.UidToSid(1000)
|
||||
|
||||
|
||||
def test_uidtosid_fail(self):
|
||||
self.assertRaises(TypeError, self.conn.UidToSid, "100")
|
||||
|
@ -47,13 +47,13 @@ class DNSTest(TestCase):
|
||||
def assert_dns_rcode_equals(self, packet, rcode):
|
||||
"Helper function to check return code"
|
||||
p_errcode = packet.operation & 0x000F
|
||||
self.assertEquals(p_errcode, rcode, "Expected RCODE %s, got %s" % \
|
||||
self.assertEquals(p_errcode, rcode, "Expected RCODE %s, got %s" %
|
||||
(self.errstr(rcode), self.errstr(p_errcode)))
|
||||
|
||||
def assert_dns_opcode_equals(self, packet, opcode):
|
||||
"Helper function to check opcode"
|
||||
p_opcode = packet.operation & 0x7800
|
||||
self.assertEquals(p_opcode, opcode, "Expected OPCODE %s, got %s" % \
|
||||
self.assertEquals(p_opcode, opcode, "Expected OPCODE %s, got %s" %
|
||||
(opcode, p_opcode))
|
||||
|
||||
def make_name_packet(self, opcode, qid=None):
|
||||
|
@ -17,7 +17,7 @@
|
||||
|
||||
"""Tests for GENSEC.
|
||||
|
||||
Note that this just tests the bindings work. It does not intend to test
|
||||
Note that this just tests the bindings work. It does not intend to test
|
||||
the functionality, that's already done in other tests.
|
||||
"""
|
||||
|
||||
@ -54,7 +54,7 @@ class GensecTests(samba.tests.TestCase):
|
||||
self.gensec_client.want_feature(gensec.FEATURE_SEAL)
|
||||
self.gensec_client.start_mech_by_sasl_name("GSSAPI")
|
||||
|
||||
self.gensec_server = gensec.Security.start_server(settings=self.settings,
|
||||
self.gensec_server = gensec.Security.start_server(settings=self.settings,
|
||||
auth_context=auth.AuthContext(lp_ctx=self.lp_ctx))
|
||||
creds = Credentials()
|
||||
creds.guess(self.lp_ctx)
|
||||
@ -69,7 +69,7 @@ class GensecTests(samba.tests.TestCase):
|
||||
server_to_client = ""
|
||||
|
||||
"""Run the actual call loop"""
|
||||
while client_finished == False and server_finished == False:
|
||||
while not client_finished and not server_finished:
|
||||
if not client_finished:
|
||||
print "running client gensec_update"
|
||||
(client_finished, client_to_server) = self.gensec_client.update(server_to_client)
|
||||
@ -118,7 +118,7 @@ class GensecTests(samba.tests.TestCase):
|
||||
|
||||
"""Run the actual call loop"""
|
||||
i = 0
|
||||
while client_finished == False or server_finished == False:
|
||||
while not client_finished or not server_finished:
|
||||
i += 1
|
||||
if not client_finished:
|
||||
print "running client gensec_update: %d: %r" % (len(server_to_client), server_to_client)
|
||||
|
@ -126,7 +126,7 @@ class Disabled(object):
|
||||
class SanitizeServerRoleTests(TestCase):
|
||||
|
||||
def test_same(self):
|
||||
self.assertEquals("standalone server",
|
||||
self.assertEquals("standalone server",
|
||||
sanitize_server_role("standalone server"))
|
||||
self.assertEquals("member server",
|
||||
sanitize_server_role("member server"))
|
||||
|
@ -55,8 +55,8 @@ class RegistryTestCase(TestCase):
|
||||
self.assertEquals(["SOFTWARE", "SYSTEM"], self.registry.subkeys("HKLM"))
|
||||
|
||||
def test_values(self):
|
||||
self.assertEquals({'DisplayName': (1L, 'E\x00v\x00e\x00n\x00t\x00 \x00L\x00o\x00g\x00\x00\x00'),
|
||||
'ErrorControl': (4L, '\x01\x00\x00\x00')},
|
||||
self.assertEquals({'DisplayName': (1L, 'E\x00v\x00e\x00n\x00t\x00 \x00L\x00o\x00g\x00\x00\x00'),
|
||||
'ErrorControl': (4L, '\x01\x00\x00\x00')},
|
||||
self.registry.values("HKLM/SYSTEM/CURRENTCONTROLSET/SERVICES/EVENTLOG"))
|
||||
|
||||
|
||||
|
@ -56,7 +56,7 @@ class MapBaseTestCase(TestCaseInTempDir):
|
||||
"@LIST": "rootdse,paged_results,server_sort,asq,samldb,password_hash,operational,objectguid,rdn_name,samba3sam,samba3sid,show_deleted,partition"})
|
||||
|
||||
ldb.add({"dn": "@PARTITION",
|
||||
"partition": ["%s" % (s4.basedn_casefold),
|
||||
"partition": ["%s" % (s4.basedn_casefold),
|
||||
"%s" % (s3.basedn_casefold)],
|
||||
"replicateEntries": ["@ATTRIBUTES", "@INDEXLIST"],
|
||||
"modules": "*:"})
|
||||
@ -79,7 +79,7 @@ class MapBaseTestCase(TestCaseInTempDir):
|
||||
tempdir = self.tempdir
|
||||
|
||||
class Target:
|
||||
"""Simple helper class that contains data for a specific SAM
|
||||
"""Simple helper class that contains data for a specific SAM
|
||||
connection."""
|
||||
|
||||
def __init__(self, basedn, dn, lp):
|
||||
@ -169,9 +169,9 @@ class Samba3SamTestCase(MapBaseTestCase):
|
||||
"""Looking up mapped entry containing SID"""
|
||||
msg = self.ldb.search(expression="(cn=Replicator)")
|
||||
self.assertEquals(len(msg), 1)
|
||||
self.assertEquals(str(msg[0].dn),
|
||||
self.assertEquals(str(msg[0].dn),
|
||||
"cn=Replicator,ou=Groups,dc=vernstok,dc=nl")
|
||||
self.assertTrue("objectSid" in msg[0])
|
||||
self.assertTrue("objectSid" in msg[0])
|
||||
self.assertSidEquals("S-1-5-21-4231626423-2410014848-2360679739-552",
|
||||
msg[0]["objectSid"])
|
||||
oc = set(msg[0]["objectClass"])
|
||||
@ -180,28 +180,28 @@ class Samba3SamTestCase(MapBaseTestCase):
|
||||
def test_search_by_objclass(self):
|
||||
"""Looking up by objectClass"""
|
||||
msg = self.ldb.search(expression="(|(objectClass=user)(cn=Administrator))")
|
||||
self.assertEquals(set([str(m.dn) for m in msg]),
|
||||
set(["unixName=Administrator,ou=Users,dc=vernstok,dc=nl",
|
||||
self.assertEquals(set([str(m.dn) for m in msg]),
|
||||
set(["unixName=Administrator,ou=Users,dc=vernstok,dc=nl",
|
||||
"unixName=nobody,ou=Users,dc=vernstok,dc=nl"]))
|
||||
|
||||
def test_s3sam_modify(self):
|
||||
# Adding a record that will be fallbacked
|
||||
self.ldb.add({"dn": "cn=Foo",
|
||||
"foo": "bar",
|
||||
"blah": "Blie",
|
||||
"cn": "Foo",
|
||||
self.ldb.add({"dn": "cn=Foo",
|
||||
"foo": "bar",
|
||||
"blah": "Blie",
|
||||
"cn": "Foo",
|
||||
"showInAdvancedViewOnly": "TRUE"}
|
||||
)
|
||||
|
||||
# Checking for existence of record (local)
|
||||
# TODO: This record must be searched in the local database, which is
|
||||
# TODO: This record must be searched in the local database, which is
|
||||
# currently only supported for base searches
|
||||
# msg = ldb.search(expression="(cn=Foo)", ['foo','blah','cn','showInAdvancedViewOnly')]
|
||||
# TODO: Actually, this version should work as well but doesn't...
|
||||
#
|
||||
#
|
||||
msg = self.ldb.search(expression="(cn=Foo)", base="cn=Foo",
|
||||
scope=SCOPE_BASE,
|
||||
#
|
||||
#
|
||||
msg = self.ldb.search(expression="(cn=Foo)", base="cn=Foo",
|
||||
scope=SCOPE_BASE,
|
||||
attrs=['foo','blah','cn','showInAdvancedViewOnly'])
|
||||
self.assertEquals(len(msg), 1)
|
||||
self.assertEquals(str(msg[0]["showInAdvancedViewOnly"]), "TRUE")
|
||||
@ -216,14 +216,14 @@ class Samba3SamTestCase(MapBaseTestCase):
|
||||
"cn": "Niemand"})
|
||||
|
||||
# Checking for existence of record (remote)
|
||||
msg = self.ldb.search(expression="(unixName=bin)",
|
||||
msg = self.ldb.search(expression="(unixName=bin)",
|
||||
attrs=['unixName','cn','dn', 'sambaUnicodePwd'])
|
||||
self.assertEquals(len(msg), 1)
|
||||
self.assertEquals(str(msg[0]["cn"]), "Niemand")
|
||||
self.assertEquals(str(msg[0]["sambaUnicodePwd"]), "geheim")
|
||||
|
||||
# Checking for existence of record (local && remote)
|
||||
msg = self.ldb.search(expression="(&(unixName=bin)(sambaUnicodePwd=geheim))",
|
||||
msg = self.ldb.search(expression="(&(unixName=bin)(sambaUnicodePwd=geheim))",
|
||||
attrs=['unixName','cn','dn', 'sambaUnicodePwd'])
|
||||
self.assertEquals(len(msg), 1) # TODO: should check with more records
|
||||
self.assertEquals(str(msg[0]["cn"]), "Niemand")
|
||||
@ -231,7 +231,7 @@ class Samba3SamTestCase(MapBaseTestCase):
|
||||
self.assertEquals(str(msg[0]["sambaUnicodePwd"]), "geheim")
|
||||
|
||||
# Checking for existence of record (local || remote)
|
||||
msg = self.ldb.search(expression="(|(unixName=bin)(sambaUnicodePwd=geheim))",
|
||||
msg = self.ldb.search(expression="(|(unixName=bin)(sambaUnicodePwd=geheim))",
|
||||
attrs=['unixName','cn','dn', 'sambaUnicodePwd'])
|
||||
#print "got %d replies" % len(msg)
|
||||
self.assertEquals(len(msg), 1) # TODO: should check with more records
|
||||
@ -242,7 +242,7 @@ class Samba3SamTestCase(MapBaseTestCase):
|
||||
# Checking for data in destination database
|
||||
msg = self.samba3.db.search(expression="(cn=Niemand)")
|
||||
self.assertTrue(len(msg) >= 1)
|
||||
self.assertEquals(str(msg[0]["sambaSID"]),
|
||||
self.assertEquals(str(msg[0]["sambaSID"]),
|
||||
"S-1-5-21-4231626423-2410014848-2360679739-2001")
|
||||
self.assertEquals(str(msg[0]["displayName"]), "Niemand")
|
||||
|
||||
@ -286,13 +286,13 @@ delete: description
|
||||
self.assertTrue(not "description" in msg[0])
|
||||
|
||||
# Renaming record...
|
||||
self.ldb.rename("cn=Niemand,cn=Users,dc=vernstok,dc=nl",
|
||||
self.ldb.rename("cn=Niemand,cn=Users,dc=vernstok,dc=nl",
|
||||
"cn=Niemand2,cn=Users,dc=vernstok,dc=nl")
|
||||
|
||||
# Checking whether DN has changed...
|
||||
msg = self.ldb.search(expression="(cn=Niemand2)")
|
||||
self.assertEquals(len(msg), 1)
|
||||
self.assertEquals(str(msg[0].dn),
|
||||
self.assertEquals(str(msg[0].dn),
|
||||
"cn=Niemand2,cn=Users,dc=vernstok,dc=nl")
|
||||
|
||||
# Deleting record...
|
||||
@ -377,7 +377,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
"objectClass": "posixAccount",
|
||||
"cn": "A",
|
||||
"sambaNextRid": "x",
|
||||
"sambaBadPasswordCount": "x",
|
||||
"sambaBadPasswordCount": "x",
|
||||
"sambaLogonTime": "x",
|
||||
"description": "x",
|
||||
"sambaSID": "S-1-5-21-4231626423-2410014848-2360679739-552",
|
||||
@ -405,7 +405,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
|
||||
# Search remote record by local DN
|
||||
dn = self.samba4.dn("cn=A")
|
||||
res = self.ldb.search(dn, scope=SCOPE_BASE,
|
||||
res = self.ldb.search(dn, scope=SCOPE_BASE,
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn)
|
||||
@ -414,7 +414,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
|
||||
# Search remote record by remote DN
|
||||
dn = self.samba3.dn("cn=A")
|
||||
res = self.samba3.db.search(dn, scope=SCOPE_BASE,
|
||||
res = self.samba3.db.search(dn, scope=SCOPE_BASE,
|
||||
attrs=["dnsHostName", "lastLogon", "sambaLogonTime"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn)
|
||||
@ -424,7 +424,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
|
||||
# Search split record by local DN
|
||||
dn = self.samba4.dn("cn=X")
|
||||
res = self.ldb.search(dn, scope=SCOPE_BASE,
|
||||
res = self.ldb.search(dn, scope=SCOPE_BASE,
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn)
|
||||
@ -433,7 +433,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
|
||||
# Search split record by remote DN
|
||||
dn = self.samba3.dn("cn=X")
|
||||
res = self.samba3.db.search(dn, scope=SCOPE_BASE,
|
||||
res = self.samba3.db.search(dn, scope=SCOPE_BASE,
|
||||
attrs=["dnsHostName", "lastLogon", "sambaLogonTime"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn)
|
||||
@ -444,7 +444,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
# Testing search by attribute
|
||||
|
||||
# Search by ignored attribute
|
||||
res = self.ldb.search(expression="(revision=x)", scope=SCOPE_DEFAULT,
|
||||
res = self.ldb.search(expression="(revision=x)", scope=SCOPE_DEFAULT,
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 2)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -456,7 +456,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[1]["lastLogon"]), "y")
|
||||
|
||||
# Search by kept attribute
|
||||
res = self.ldb.search(expression="(description=y)",
|
||||
res = self.ldb.search(expression="(description=y)",
|
||||
scope=SCOPE_DEFAULT, attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 2)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -500,10 +500,10 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
res[0]["objectSid"])
|
||||
self.assertTrue("objectSid" in res[0])
|
||||
|
||||
# Search by generated attribute
|
||||
# Search by generated attribute
|
||||
# In most cases, this even works when the mapping is missing
|
||||
# a `convert_operator' by enumerating the remote db.
|
||||
res = self.ldb.search(expression="(primaryGroupID=512)",
|
||||
res = self.ldb.search(expression="(primaryGroupID=512)",
|
||||
attrs=["dnsHostName", "lastLogon", "primaryGroupID"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), self.samba4.dn("cn=A"))
|
||||
@ -520,10 +520,10 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
# print obj + ": " + res[i][obj]
|
||||
# }
|
||||
# print "---"
|
||||
#
|
||||
#
|
||||
|
||||
# Search by remote name of renamed attribute */
|
||||
res = self.ldb.search(expression="(sambaBadPasswordCount=*)",
|
||||
res = self.ldb.search(expression="(sambaBadPasswordCount=*)",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 0)
|
||||
|
||||
@ -562,7 +562,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
# Testing search by parse tree
|
||||
|
||||
# Search by conjunction of local attributes
|
||||
res = self.ldb.search(expression="(&(codePage=x)(revision=x))",
|
||||
res = self.ldb.search(expression="(&(codePage=x)(revision=x))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 2)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -574,7 +574,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[1]["lastLogon"]), "y")
|
||||
|
||||
# Search by conjunction of remote attributes
|
||||
res = self.ldb.search(expression="(&(lastLogon=x)(description=x))",
|
||||
res = self.ldb.search(expression="(&(lastLogon=x)(description=x))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 2)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -584,9 +584,9 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[1].dn), self.samba4.dn("cn=X"))
|
||||
self.assertEquals(str(res[1]["dnsHostName"]), "x")
|
||||
self.assertEquals(str(res[1]["lastLogon"]), "x")
|
||||
|
||||
# Search by conjunction of local and remote attribute
|
||||
res = self.ldb.search(expression="(&(codePage=x)(description=x))",
|
||||
|
||||
# Search by conjunction of local and remote attribute
|
||||
res = self.ldb.search(expression="(&(codePage=x)(description=x))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 2)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -599,15 +599,15 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
|
||||
# Search by conjunction of local and remote attribute w/o match
|
||||
attrs = ["dnsHostName", "lastLogon"]
|
||||
res = self.ldb.search(expression="(&(codePage=x)(nextRid=x))",
|
||||
res = self.ldb.search(expression="(&(codePage=x)(nextRid=x))",
|
||||
attrs=attrs)
|
||||
self.assertEquals(len(res), 0)
|
||||
res = self.ldb.search(expression="(&(revision=x)(lastLogon=z))",
|
||||
res = self.ldb.search(expression="(&(revision=x)(lastLogon=z))",
|
||||
attrs=attrs)
|
||||
self.assertEquals(len(res), 0)
|
||||
|
||||
# Search by disjunction of local attributes
|
||||
res = self.ldb.search(expression="(|(revision=x)(dnsHostName=x))",
|
||||
res = self.ldb.search(expression="(|(revision=x)(dnsHostName=x))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 2)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -619,7 +619,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[1]["lastLogon"]), "y")
|
||||
|
||||
# Search by disjunction of remote attributes
|
||||
res = self.ldb.search(expression="(|(badPwdCount=x)(lastLogon=x))",
|
||||
res = self.ldb.search(expression="(|(badPwdCount=x)(lastLogon=x))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 3)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -634,7 +634,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[2]["lastLogon"]), "x")
|
||||
|
||||
# Search by disjunction of local and remote attribute
|
||||
res = self.ldb.search(expression="(|(revision=x)(lastLogon=y))",
|
||||
res = self.ldb.search(expression="(|(revision=x)(lastLogon=y))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 3)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -649,12 +649,12 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[2]["lastLogon"]), "y")
|
||||
|
||||
# Search by disjunction of local and remote attribute w/o match
|
||||
res = self.ldb.search(expression="(|(codePage=y)(nextRid=z))",
|
||||
res = self.ldb.search(expression="(|(codePage=y)(nextRid=z))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 0)
|
||||
|
||||
# Search by negated local attribute
|
||||
res = self.ldb.search(expression="(!(revision=x))",
|
||||
res = self.ldb.search(expression="(!(revision=x))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 6)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -672,7 +672,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[3]["lastLogon"]), "z")
|
||||
|
||||
# Search by negated remote attribute
|
||||
res = self.ldb.search(expression="(!(description=x))",
|
||||
res = self.ldb.search(expression="(!(description=x))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 4)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -684,7 +684,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[1]["lastLogon"]), "z")
|
||||
|
||||
# Search by negated conjunction of local attributes
|
||||
res = self.ldb.search(expression="(!(&(codePage=x)(revision=x)))",
|
||||
res = self.ldb.search(expression="(!(&(codePage=x)(revision=x)))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 6)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -702,7 +702,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[3]["lastLogon"]), "z")
|
||||
|
||||
# Search by negated conjunction of remote attributes
|
||||
res = self.ldb.search(expression="(!(&(lastLogon=x)(description=x)))",
|
||||
res = self.ldb.search(expression="(!(&(lastLogon=x)(description=x)))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 6)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -720,7 +720,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[3]["lastLogon"]), "z")
|
||||
|
||||
# Search by negated conjunction of local and remote attribute
|
||||
res = self.ldb.search(expression="(!(&(codePage=x)(description=x)))",
|
||||
res = self.ldb.search(expression="(!(&(codePage=x)(description=x)))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 6)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -738,7 +738,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[3]["lastLogon"]), "z")
|
||||
|
||||
# Search by negated disjunction of local attributes
|
||||
res = self.ldb.search(expression="(!(|(revision=x)(dnsHostName=x)))",
|
||||
res = self.ldb.search(expression="(!(|(revision=x)(dnsHostName=x)))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
self.assertEquals(str(res[0].dn), self.samba4.dn("cn=A"))
|
||||
@ -755,7 +755,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[3]["lastLogon"]), "z")
|
||||
|
||||
# Search by negated disjunction of remote attributes
|
||||
res = self.ldb.search(expression="(!(|(badPwdCount=x)(lastLogon=x)))",
|
||||
res = self.ldb.search(expression="(!(|(badPwdCount=x)(lastLogon=x)))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 5)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -770,7 +770,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[2]["lastLogon"]), "z")
|
||||
|
||||
# Search by negated disjunction of local and remote attribute
|
||||
res = self.ldb.search(expression="(!(|(revision=x)(lastLogon=y)))",
|
||||
res = self.ldb.search(expression="(!(|(revision=x)(lastLogon=y)))",
|
||||
attrs=["dnsHostName", "lastLogon"])
|
||||
self.assertEquals(len(res), 5)
|
||||
res = sorted(res, key=attrgetter('dn'))
|
||||
@ -813,7 +813,7 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
"""Modification of local records."""
|
||||
# Add local record
|
||||
dn = "cn=test,dc=idealx,dc=org"
|
||||
self.ldb.add({"dn": dn,
|
||||
self.ldb.add({"dn": dn,
|
||||
"cn": "test",
|
||||
"foo": "bar",
|
||||
"revision": "1",
|
||||
@ -827,11 +827,11 @@ objectSid: S-1-5-21-4231626423-2410014848-2360679739-552
|
||||
self.assertEquals(str(res[0]["revision"]), "1")
|
||||
self.assertEquals(str(res[0]["description"]), "test")
|
||||
# Check it's not in the local db
|
||||
res = self.samba4.db.search(expression="(cn=test)",
|
||||
res = self.samba4.db.search(expression="(cn=test)",
|
||||
scope=SCOPE_DEFAULT, attrs=attrs)
|
||||
self.assertEquals(len(res), 0)
|
||||
# Check it's not in the remote db
|
||||
res = self.samba3.db.search(expression="(cn=test)",
|
||||
res = self.samba3.db.search(expression="(cn=test)",
|
||||
scope=SCOPE_DEFAULT, attrs=attrs)
|
||||
self.assertEquals(len(res), 0)
|
||||
|
||||
@ -874,13 +874,13 @@ description: foo
|
||||
# Add remote record
|
||||
dn = self.samba4.dn("cn=test")
|
||||
dn2 = self.samba3.dn("cn=test")
|
||||
self.samba3.db.add({"dn": dn2,
|
||||
self.samba3.db.add({"dn": dn2,
|
||||
"cn": "test",
|
||||
"description": "foo",
|
||||
"sambaBadPasswordCount": "3",
|
||||
"sambaNextRid": "1001"})
|
||||
# Check it's there
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE,
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE,
|
||||
attrs=["description", "sambaBadPasswordCount", "sambaNextRid"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn2)
|
||||
@ -909,7 +909,7 @@ badPwdCount: 4
|
||||
"""
|
||||
self.ldb.modify_ldif(ldif)
|
||||
# Check in mapped db
|
||||
res = self.ldb.search(dn, scope=SCOPE_BASE,
|
||||
res = self.ldb.search(dn, scope=SCOPE_BASE,
|
||||
attrs=["description", "badPwdCount", "nextRid"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn)
|
||||
@ -917,7 +917,7 @@ badPwdCount: 4
|
||||
self.assertEquals(str(res[0]["badPwdCount"]), "4")
|
||||
self.assertEquals(str(res[0]["nextRid"]), "1001")
|
||||
# Check in remote db
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE,
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE,
|
||||
attrs=["description", "sambaBadPasswordCount", "sambaNextRid"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn2)
|
||||
@ -930,16 +930,16 @@ badPwdCount: 4
|
||||
self.ldb.rename(dn, dn2)
|
||||
# Check in mapped db
|
||||
dn = dn2
|
||||
res = self.ldb.search(dn, scope=SCOPE_BASE,
|
||||
res = self.ldb.search(dn, scope=SCOPE_BASE,
|
||||
attrs=["description", "badPwdCount", "nextRid"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn)
|
||||
self.assertEquals(str(res[0]["description"]), "test")
|
||||
self.assertEquals(str(res[0]["badPwdCount"]), "4")
|
||||
self.assertEquals(str(res[0]["nextRid"]), "1001")
|
||||
# Check in remote db
|
||||
# Check in remote db
|
||||
dn2 = self.samba3.dn("cn=toast")
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE,
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE,
|
||||
attrs=["description", "sambaBadPasswordCount", "sambaNextRid"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn2)
|
||||
@ -961,7 +961,7 @@ badPwdCount: 4
|
||||
# Add remote record (same as before)
|
||||
dn = self.samba4.dn("cn=test")
|
||||
dn2 = self.samba3.dn("cn=test")
|
||||
self.samba3.db.add({"dn": dn2,
|
||||
self.samba3.db.add({"dn": dn2,
|
||||
"cn": "test",
|
||||
"description": "foo",
|
||||
"sambaBadPasswordCount": "3",
|
||||
@ -1030,7 +1030,7 @@ description: test
|
||||
self.assertTrue(not "nextRid" in res[0])
|
||||
self.assertEquals(str(res[0]["revision"]), "1")
|
||||
# Check in remote db
|
||||
attrs = ["description", "sambaBadPasswordCount", "sambaNextRid",
|
||||
attrs = ["description", "sambaBadPasswordCount", "sambaNextRid",
|
||||
"revision"]
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE, attrs=attrs)
|
||||
self.assertEquals(len(res), 1)
|
||||
@ -1069,7 +1069,7 @@ revision: 2
|
||||
self.assertTrue(not "nextRid" in res[0])
|
||||
self.assertEquals(str(res[0]["revision"]), "2")
|
||||
# Check in remote db
|
||||
attrs = ["description", "sambaBadPasswordCount", "sambaNextRid",
|
||||
attrs = ["description", "sambaBadPasswordCount", "sambaNextRid",
|
||||
"revision"]
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE, attrs=attrs)
|
||||
self.assertEquals(len(res), 1)
|
||||
@ -1102,8 +1102,8 @@ revision: 2
|
||||
self.assertEquals(str(res[0]["revision"]), "2")
|
||||
# Check in remote db
|
||||
dn2 = self.samba3.dn("cn=toast")
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE,
|
||||
attrs=["description", "sambaBadPasswordCount", "sambaNextRid",
|
||||
res = self.samba3.db.search(dn2, scope=SCOPE_BASE,
|
||||
attrs=["description", "sambaBadPasswordCount", "sambaNextRid",
|
||||
"revision"])
|
||||
self.assertEquals(len(res), 1)
|
||||
self.assertEquals(str(res[0].dn), dn2)
|
||||
|
@ -29,14 +29,14 @@ class NtACLCmdSysvolTestCase(SambaToolCmdTest):
|
||||
|
||||
|
||||
def test_ntvfs(self):
|
||||
(result, out, err) = self.runsubcmd("ntacl", "sysvolreset",
|
||||
(result, out, err) = self.runsubcmd("ntacl", "sysvolreset",
|
||||
"--use-ntvfs")
|
||||
self.assertCmdSuccess(result)
|
||||
self.assertEquals(out,"","Shouldn't be any output messages")
|
||||
self.assertIn("Please note that POSIX permissions have NOT been changed, only the stored NT ACL", err)
|
||||
|
||||
def test_s3fs(self):
|
||||
(result, out, err) = self.runsubcmd("ntacl", "sysvolreset",
|
||||
(result, out, err) = self.runsubcmd("ntacl", "sysvolreset",
|
||||
"--use-s3fs")
|
||||
|
||||
self.assertCmdSuccess(result)
|
||||
@ -44,7 +44,7 @@ class NtACLCmdSysvolTestCase(SambaToolCmdTest):
|
||||
self.assertEquals(out,"","Shouldn't be any output messages")
|
||||
|
||||
def test_ntvfs_check(self):
|
||||
(result, out, err) = self.runsubcmd("ntacl", "sysvolreset",
|
||||
(result, out, err) = self.runsubcmd("ntacl", "sysvolreset",
|
||||
"--use-ntvfs")
|
||||
self.assertCmdSuccess(result)
|
||||
self.assertEquals(out,"","Shouldn't be any output messages")
|
||||
@ -56,7 +56,7 @@ class NtACLCmdSysvolTestCase(SambaToolCmdTest):
|
||||
self.assertEquals(out,"","Shouldn't be any output messages")
|
||||
|
||||
def test_s3fs_check(self):
|
||||
(result, out, err) = self.runsubcmd("ntacl", "sysvolreset",
|
||||
(result, out, err) = self.runsubcmd("ntacl", "sysvolreset",
|
||||
"--use-s3fs")
|
||||
|
||||
self.assertCmdSuccess(result)
|
||||
|
@ -54,7 +54,7 @@ class SamDBTestCase(TestCaseInTempDir):
|
||||
|
||||
hostname="foo"
|
||||
domain="EXAMPLE"
|
||||
dnsdomain="example.com"
|
||||
dnsdomain="example.com"
|
||||
serverrole="domain controller"
|
||||
policyguid_dc = DEFAULT_DC_POLICY_GUID
|
||||
|
||||
@ -65,10 +65,10 @@ class SamDBTestCase(TestCaseInTempDir):
|
||||
self.lp = param.LoadParm()
|
||||
self.lp.load(smbconf)
|
||||
|
||||
names = guess_names(lp=self.lp, hostname=hostname,
|
||||
domain=domain, dnsdomain=dnsdomain,
|
||||
serverrole=serverrole,
|
||||
domaindn=self.domaindn, configdn=configdn,
|
||||
names = guess_names(lp=self.lp, hostname=hostname,
|
||||
domain=domain, dnsdomain=dnsdomain,
|
||||
serverrole=serverrole,
|
||||
domaindn=self.domaindn, configdn=configdn,
|
||||
schemadn=schemadn)
|
||||
|
||||
paths = provision_paths_from_lp(self.lp, names.dnsdomain)
|
||||
|
@ -18,7 +18,7 @@
|
||||
"""Tests for samba.upgradeprovision."""
|
||||
|
||||
import os
|
||||
from samba.upgradehelpers import (usn_in_range, dn_sort,
|
||||
from samba.upgradehelpers import (usn_in_range, dn_sort,
|
||||
get_diff_sddls, update_secrets,
|
||||
construct_existor_expr)
|
||||
|
||||
|
@ -48,7 +48,7 @@ class XattrTests(TestCase):
|
||||
tempf = self._tmpfilename()
|
||||
open(tempf, 'w').write("empty")
|
||||
try:
|
||||
samba.xattr_native.wrap_setxattr(tempf, "user.unittests",
|
||||
samba.xattr_native.wrap_setxattr(tempf, "user.unittests",
|
||||
ndr_pack(ntacl))
|
||||
except IOError:
|
||||
raise TestSkipped("the filesystem where the tests are runned do not support XATTR")
|
||||
|
@ -883,9 +883,9 @@ Please fix this account before attempting to upgrade again
|
||||
s4_passdb.add_sam_account(userdata[username])
|
||||
if username in uids:
|
||||
add_ad_posix_idmap_entry(result.samdb, userdata[username].user_sid, uids[username], "ID_TYPE_UID", logger)
|
||||
if (username in homes) and (homes[username] != None) and \
|
||||
(username in shells) and (shells[username] != None) and \
|
||||
(username in pgids) and (pgids[username] != None):
|
||||
if (username in homes) and (homes[username] is not None) and \
|
||||
(username in shells) and (shells[username] is not None) and \
|
||||
(username in pgids) and (pgids[username] is not None):
|
||||
add_posix_attrs(samdb=result.samdb, sid=userdata[username].user_sid, name=username, nisdomain=domainname.lower(), xid_type="ID_TYPE_UID", home=homes[username], shell=shells[username], pgid=pgids[username], logger=logger)
|
||||
|
||||
logger.info("Adding users to groups")
|
||||
|
@ -252,9 +252,9 @@ def newprovision(names, creds, session, smbconf, provdir, logger):
|
||||
invocationid=names.invocation, adminpass=names.adminpass,
|
||||
krbtgtpass=None, machinepass=None, dnspass=None, root=None,
|
||||
nobody=None, wheel=None, users=None,
|
||||
serverrole="domain controller",
|
||||
serverrole="domain controller",
|
||||
backend_type=None, ldapadminpass=None, ol_mmr_urls=None,
|
||||
slapd_path=None,
|
||||
slapd_path=None,
|
||||
dom_for_fun_level=names.domainlevel, dns_backend=dns_backend,
|
||||
useeadb=True, use_ntvfs=True)
|
||||
|
||||
@ -864,7 +864,7 @@ def findprovisionrange(samdb, basedn):
|
||||
minutestamp =_glue.nttime2unix(o.originating_change_time)/60
|
||||
hash_ts = hash_id.get(str(o.originating_invocation_id))
|
||||
|
||||
if hash_ts == None:
|
||||
if hash_ts is None:
|
||||
ob = {}
|
||||
ob["min"] = o.originating_usn
|
||||
ob["max"] = o.originating_usn
|
||||
@ -873,7 +873,7 @@ def findprovisionrange(samdb, basedn):
|
||||
hash_ts = {}
|
||||
else:
|
||||
ob = hash_ts.get(minutestamp)
|
||||
if ob == None:
|
||||
if ob is None:
|
||||
ob = {}
|
||||
ob["min"] = o.originating_usn
|
||||
ob["max"] = o.originating_usn
|
||||
@ -935,12 +935,12 @@ def print_provision_ranges(dic, limit_print, dest, samdb_path, invocationid):
|
||||
|
||||
for k in kept_record:
|
||||
obj = hash_ts[k]
|
||||
if obj.get("skipped") == None:
|
||||
if obj.get("skipped") is None:
|
||||
ldif = "%slastProvisionUSN: %d-%d;%s\n" % (ldif, obj["min"],
|
||||
obj["max"], id)
|
||||
|
||||
if ldif != "":
|
||||
if dest == None:
|
||||
if dest is None:
|
||||
dest = "/tmp"
|
||||
|
||||
file = tempfile.mktemp(dir=dest, prefix="usnprov", suffix=".ldif")
|
||||
|
Loading…
Reference in New Issue
Block a user