core/various: python3 compat, prepare for python2 -> python3
see https://review.gluster.org/#/c/19788/, https://review.gluster.org/#/c/19871/, https://review.gluster.org/#/c/19952/, and https://review.gluster.org/#/c/20104/ https://review.gluster.org/#/c/20162/ This patch changes uses of map() and raise(), and a few cases of print() that were overlooked in the prior patch that fixed print. Note: Fedora packaging guidelines require explicit shebangs, so popular practices like #!/usr/bin/env python and #!/usr/bin/python are not allowed; they must be #!/usr/bin/python2 or #!/usr/bin/python3 Note: Selected small fixes from 2to3 utility. Specifically apply, basestring, funcattrs, idioms, numliterals, set_literal, types, urllib, zip, map, and raise have already been applied. Also version agnostic imports for urllib, cpickle, socketserver, _thread, queue, etc., suggested by Aravinda in https://review.gluster.org/#/c/19767/1 Note: these 2to3 fixes report no changes are necessary: asserts, buffer, exec, execfile, exitfunc, filter, getcwdu, intern, itertools, metaclass, methodattrs, ne, next, nonzero, operator, paren, raw_input, reduce, reload, renames, repr, standarderror, sys_exc, throw, tuple_params, xreadlines. Change-Id: Id62ea491e4ab5dd390075c5c6d9d889cf6f9da27 updates: #411 Signed-off-by: Kaleb S. KEITHLEY <kkeithle@redhat.com>
This commit is contained in:
parent
d788cc59b1
commit
ee75c5abc4
@ -38,7 +38,7 @@ def get_param(names, types):
|
||||
# Convert two separate tuples to one of (name, type) sub-tuples.
|
||||
as_tuples = list(zip(types, names))
|
||||
# Convert each sub-tuple into a "type name" string.
|
||||
as_strings = map(string.join, as_tuples)
|
||||
as_strings = list(map(string.join, as_tuples))
|
||||
# Join all of those into one big string.
|
||||
return string.join(as_strings, ",\n\t")
|
||||
|
||||
@ -58,7 +58,7 @@ def generate(tmpl, name, table):
|
||||
#Args are (var1, var2,...)
|
||||
sdict["@WIND_ARGS@"] = string.join(w_arg_names, ", ")
|
||||
sdict["@UNWIND_ARGS@"] = string.join(u_arg_names, ", ")
|
||||
sdict["@ERROR_ARGS@"] = string.join(map(get_error_arg, u_arg_types), ", ")
|
||||
sdict["@ERROR_ARGS@"] = string.join(list(map(get_error_arg, u_arg_types)), ", ")
|
||||
sdict["@WIND_PARAMS@"] = get_param(w_arg_names, w_arg_types)
|
||||
sdict["@UNWIND_PARAMS@"] = get_param(u_arg_names, u_arg_types)
|
||||
sdict["@FUNC_PARAMS@"] = get_param(fn_arg_names, fn_arg_types)
|
||||
|
@ -58,11 +58,11 @@ def load (path):
|
||||
continue
|
||||
if text[0] == "volume":
|
||||
if xlator:
|
||||
raise RuntimeError, "nested volume definition"
|
||||
raise RuntimeError("nested volume definition")
|
||||
xlator = Translator(text[1])
|
||||
continue
|
||||
if not xlator:
|
||||
raise RuntimeError, "text outside volume definition"
|
||||
raise RuntimeError("text outside volume definition")
|
||||
if text[0] == "type":
|
||||
xlator.xl_type = text[1]
|
||||
continue
|
||||
@ -78,9 +78,9 @@ def load (path):
|
||||
last_xlator = xlator
|
||||
xlator = None
|
||||
continue
|
||||
raise RuntimeError, "unrecognized keyword %s" % text[0]
|
||||
raise RuntimeError("unrecognized keyword %s" % text[0])
|
||||
if xlator:
|
||||
raise RuntimeError, "unclosed volume definition"
|
||||
raise RuntimeError("unclosed volume definition")
|
||||
return all_xlators, last_xlator
|
||||
|
||||
def generate (graph, last, stream=sys.stdout):
|
||||
|
@ -12,6 +12,7 @@
|
||||
later), or the GNU General Public License, version 2 (GPLv2), in all
|
||||
cases as published by the Free Software Foundation.
|
||||
'''
|
||||
from __future__ import print_function
|
||||
import os, sys, re
|
||||
from stat import *
|
||||
import subprocess
|
||||
@ -51,17 +52,17 @@ epilog_msg='''
|
||||
|
||||
def print_msg(log_type, path, xattr_dict = {}, stbuf = "", dir_size = None):
|
||||
if log_type == QUOTA_VERBOSE:
|
||||
print '%-24s %-60s\nxattr_values: %s\n%s\n' % ("Verbose", path , xattr_dict, stbuf)
|
||||
print('%-24s %-60s\nxattr_values: %s\n%s\n' % {"Verbose", path , xattr_dict, stbuf})
|
||||
elif log_type == QUOTA_META_ABSENT:
|
||||
print '%-24s %-60s\n%s\n' % ("Quota-Meta Absent", path , xattr_dict)
|
||||
print('%-24s %-60s\n%s\n' % {"Quota-Meta Absent", path , xattr_dict})
|
||||
elif log_type == QUOTA_SIZE_MISMATCH:
|
||||
print "mismatch"
|
||||
print("mismatch")
|
||||
if dir_size is not None:
|
||||
print '%24s %60s %12s %12s' % ("Size Mismatch",path , xattr_dict['contri_size'],
|
||||
dir_size)
|
||||
print('%24s %60s %12s %12s' % {"Size Mismatch", path , xattr_dict['contri_size'],
|
||||
dir_size})
|
||||
else:
|
||||
print '%-24s %-60s %-12i %-12i' % ("Size Mismatch",path , xattr_dict['contri_size'],
|
||||
stbuf.st_size)
|
||||
print('%-24s %-60s %-12i %-12i' % {"Size Mismatch", path , xattr_dict['contri_size'],
|
||||
stbuf.st_size})
|
||||
|
||||
def size_differs_lot(s1, s2):
|
||||
'''
|
||||
@ -119,12 +120,12 @@ def fix_xattr(file_name, mark_dirty):
|
||||
if mnt_path is None:
|
||||
return
|
||||
if mark_dirty:
|
||||
print "MARKING DIRTY: " + file_name
|
||||
print("MARKING DIRTY: " + file_name)
|
||||
out = subprocess.check_output (["/usr/bin/setfattr", "-n",
|
||||
"trusted.glusterfs.quota.dirty",
|
||||
"-v", IS_DIRTY, file_name])
|
||||
rel_path = os.path.relpath(file_name, brick_path)
|
||||
print "stat on " + mnt_path + "/" + rel_path
|
||||
print("stat on " + mnt_path + "/" + rel_path)
|
||||
stbuf = os.lstat(mnt_path + "/" + rel_path)
|
||||
|
||||
obj_fix_count += 1
|
||||
@ -175,7 +176,7 @@ def get_quota_xattr_brick(dpath):
|
||||
xattr_dict['version'] = xattr_version
|
||||
else:
|
||||
if xattr_version != xattr_dict['version']:
|
||||
print "Multiple xattr version found"
|
||||
print("Multiple xattr version found")
|
||||
|
||||
|
||||
cur_parent = xattr_key.split(".")[3]
|
||||
@ -280,7 +281,7 @@ def walktree(t_dir, hard_link_dict):
|
||||
if S_ISDIR(stbuf.st_mode):
|
||||
# It's a directory, recurse into it
|
||||
if entry == '.glusterfs':
|
||||
print "skipping " + pathname
|
||||
print("skipping " + pathname)
|
||||
continue
|
||||
descendent_hardlinks = {}
|
||||
subtree_size = walktree(pathname, descendent_hardlinks)
|
||||
@ -317,7 +318,7 @@ def walktree(t_dir, hard_link_dict):
|
||||
|
||||
else:
|
||||
# Unknown file type, print a message
|
||||
print 'Skipping %s, due to file mode' % pathname
|
||||
print('Skipping %s, due to file mode' % (pathname))
|
||||
|
||||
if t_dir not in aggr_size:
|
||||
aggr_size[t_dir] = 0
|
||||
@ -372,7 +373,7 @@ if __name__ == '__main__':
|
||||
else:
|
||||
walktree(brick_path, hard_link_dict)
|
||||
|
||||
print "Files verified : " + str(file_count)
|
||||
print "Directories verified : " + str(dir_count)
|
||||
print("Files verified : " + str(file_count))
|
||||
print("Directories verified : " + str(dir_count))
|
||||
if mnt_path is not None:
|
||||
print "Objects Fixed : " + str(obj_fix_count)
|
||||
print("Objects Fixed : " + str(obj_fix_count))
|
||||
|
@ -99,11 +99,11 @@ def load (path):
|
||||
continue
|
||||
if text[0] == "volume":
|
||||
if xlator:
|
||||
raise RuntimeError, "nested volume definition"
|
||||
raise RuntimeError("nested volume definition")
|
||||
xlator = Translator(text[1])
|
||||
continue
|
||||
if not xlator:
|
||||
raise RuntimeError, "text outside volume definition"
|
||||
raise RuntimeError("text outside volume definition")
|
||||
if text[0] == "type":
|
||||
xlator.type = text[1]
|
||||
continue
|
||||
@ -119,9 +119,9 @@ def load (path):
|
||||
last_xlator = xlator
|
||||
xlator = None
|
||||
continue
|
||||
raise RuntimeError, "unrecognized keyword %s" % text[0]
|
||||
raise RuntimeError("unrecognized keyword %s" % text[0])
|
||||
if xlator:
|
||||
raise RuntimeError, "unclosed volume definition"
|
||||
raise RuntimeError("unclosed volume definition")
|
||||
return all_xlators, last_xlator
|
||||
|
||||
def generate (graph, last, stream=sys.stdout):
|
||||
@ -157,7 +157,7 @@ def push_filter (graph, old_xl, filt_type, opts={}):
|
||||
|
||||
def delete (graph, victim):
|
||||
if len(victim.subvols) != 1:
|
||||
raise RuntimeError, "attempt to delete non-unary translator"
|
||||
raise RuntimeError("attempt to delete non-unary translator")
|
||||
for xl in graph.itervalues():
|
||||
while xl.subvols.count(victim):
|
||||
i = xl.subvols.index(victim)
|
||||
|
@ -1248,7 +1248,7 @@ class GMasterChangelogMixin(GMasterCommon):
|
||||
if done:
|
||||
xtl = (int(change.split('.')[-1]) - 1, 0)
|
||||
self.upd_stime(xtl)
|
||||
map(self.changelog_done_func, changes)
|
||||
list(map(self.changelog_done_func, changes))
|
||||
self.archive_and_purge_changelogs(changes)
|
||||
|
||||
# Reset Data counter after sync
|
||||
@ -1263,7 +1263,7 @@ class GMasterChangelogMixin(GMasterCommon):
|
||||
if tries == gconf.get("max-rsync-retries"):
|
||||
logging.error(lf('changelogs could not be processed '
|
||||
'completely - moving on...',
|
||||
files=map(os.path.basename, changes)))
|
||||
files=list(map(os.path.basename, changes))))
|
||||
|
||||
# Reset data counter on failure
|
||||
self.status.dec_value("data", self.files_in_batch)
|
||||
@ -1273,7 +1273,7 @@ class GMasterChangelogMixin(GMasterCommon):
|
||||
if done:
|
||||
xtl = (int(change.split('.')[-1]) - 1, 0)
|
||||
self.upd_stime(xtl)
|
||||
map(self.changelog_done_func, changes)
|
||||
list(map(self.changelog_done_func, changes))
|
||||
self.archive_and_purge_changelogs(changes)
|
||||
break
|
||||
# it's either entry_ops() or Rsync that failed to do it's
|
||||
@ -1284,7 +1284,7 @@ class GMasterChangelogMixin(GMasterCommon):
|
||||
# again.
|
||||
# TODO: remove entry retries when it's gets fixed.
|
||||
logging.warn(lf('incomplete sync, retrying changelogs',
|
||||
files=map(os.path.basename, changes)))
|
||||
files=list(map(os.path.basename, changes))))
|
||||
|
||||
# Reset the Data counter before Retry
|
||||
self.status.dec_value("data", self.files_in_batch)
|
||||
|
@ -720,14 +720,14 @@ def get_subs (names, types, cbktypes=None):
|
||||
# Convert two separate tuples to one of (name, type) sub-tuples.
|
||||
as_tuples = list(zip(types,names))
|
||||
# Convert each sub-tuple into a "type name" string.
|
||||
as_strings = map(string.join,as_tuples)
|
||||
as_strings = list(map(string.join,as_tuples))
|
||||
# Join all of those into one big string.
|
||||
sdict["@LONG_ARGS@"] = string.join(as_strings,",\n\t")
|
||||
# So much more readable than string.join(map(string.join,zip(...))))
|
||||
sdict["@ERROR_ARGS@"] = string.join(map(get_error_arg,types),", ")
|
||||
sdict["@ERROR_ARGS@"] = string.join(list(map(get_error_arg,types)),", ")
|
||||
if cbktypes is not None:
|
||||
sdict["@CBK_ERROR_ARGS@"] = string.join(map(
|
||||
get_error_arg,cbktypes),", ")
|
||||
sdict["@CBK_ERROR_ARGS@"] = string.join(list(map(
|
||||
get_error_arg,cbktypes)),", ")
|
||||
return sdict
|
||||
|
||||
def generate (tmpl, name, subs):
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
|
||||
@ -97,22 +98,22 @@ utime_setattr_ops = ['setattr', 'fsetattr']
|
||||
def gen_defaults():
|
||||
for name in ops:
|
||||
if name in utime_ops:
|
||||
print generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs)
|
||||
print generate(FOPS_COMMON_TEMPLATE, name, fop_subs)
|
||||
print(generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs))
|
||||
print(generate(FOPS_COMMON_TEMPLATE, name, fop_subs))
|
||||
if name in utime_read_op:
|
||||
print generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs)
|
||||
print generate(FOPS_READ_TEMPLATE, name, fop_subs)
|
||||
print(generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs))
|
||||
print(generate(FOPS_READ_TEMPLATE, name, fop_subs))
|
||||
if name in utime_write_op:
|
||||
print generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs)
|
||||
print generate(FOPS_WRITE_TEMPLATE, name, fop_subs)
|
||||
print(generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs))
|
||||
print(generate(FOPS_WRITE_TEMPLATE, name, fop_subs))
|
||||
if name in utime_setattr_ops:
|
||||
print generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs)
|
||||
print generate(FOPS_SETATTR_TEMPLATE, name, fop_subs)
|
||||
print(generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs))
|
||||
print(generate(FOPS_SETATTR_TEMPLATE, name, fop_subs))
|
||||
|
||||
for l in open(sys.argv[1], 'r').readlines():
|
||||
if l.find('#pragma generate') != -1:
|
||||
print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
|
||||
print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
|
||||
gen_defaults()
|
||||
print "/* END GENERATED CODE */"
|
||||
print("/* END GENERATED CODE */")
|
||||
else:
|
||||
print l[:-1]
|
||||
print(l[:-1])
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
|
||||
@ -22,13 +23,13 @@ utime_ops = ['fallocate', 'zerofill', 'opendir', 'readlink', 'mknod', 'mkdir',
|
||||
def gen_defaults():
|
||||
for name, value in ops.iteritems():
|
||||
if name in utime_ops:
|
||||
print generate(OP_FOP_TEMPLATE, name, fop_subs)
|
||||
print(generate(OP_FOP_TEMPLATE, name, fop_subs))
|
||||
|
||||
|
||||
for l in open(sys.argv[1], 'r').readlines():
|
||||
if l.find('#pragma generate') != -1:
|
||||
print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
|
||||
print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
|
||||
gen_defaults()
|
||||
print "/* END GENERATED CODE */"
|
||||
print("/* END GENERATED CODE */")
|
||||
else:
|
||||
print l[:-1]
|
||||
print(l[:-1])
|
||||
|
Loading…
Reference in New Issue
Block a user