Clean up the failed-buids.py script
* Use the Jenkins API instead of looking up every job. * Unify the output function with Pystache * Mostly follow pep8 guidelines and make the script more pythonic Change-Id: Ia5e0061638732e293e94b3a8ad9e25a7db65cfa5 Signed-off-by: Nigel Babu <nigelb@redhat.com> Reviewed-on: http://review.gluster.org/15034 Smoke: Gluster Build System <jenkins@build.gluster.org> NetBSD-regression: NetBSD Build System <jenkins@build.gluster.org> CentOS-regression: Gluster Build System <jenkins@build.gluster.org> Reviewed-by: Jeff Darcy <jdarcy@redhat.com>
This commit is contained in:
parent
ddcf0183ad
commit
8bb8e3d394
@ -1,146 +1,179 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import blessings
|
||||
import HTMLParser
|
||||
import requests
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
import sys
|
||||
import re
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
from datetime import date, timedelta, datetime
|
||||
from dateutil.parser import parse
|
||||
from datetime import timedelta, datetime
|
||||
from pystache import render
|
||||
|
||||
# This tool goes though the Gluster regression links and checks for failures
|
||||
|
||||
BASE='https://build.gluster.org'
|
||||
TERM=blessings.Terminal()
|
||||
MAX_BUILDS=1000
|
||||
summary=defaultdict(list)
|
||||
VERBOSE=None
|
||||
total_builds=0
|
||||
failed_builds=0
|
||||
BASE = 'https://build.gluster.org'
|
||||
TERM = blessings.Terminal()
|
||||
MAX_BUILDS = 1000
|
||||
summary = defaultdict(list)
|
||||
VERBOSE = None
|
||||
|
||||
def process_failure (url, cut_off_date):
|
||||
global failed_builds
|
||||
text = requests.get(url,verify=False).text
|
||||
|
||||
def process_failure(url, node):
|
||||
text = requests.get(url, verify=False).text
|
||||
accum = []
|
||||
for t in text.split('\n'):
|
||||
if t.find("BUILD_TIMESTAMP=") != -1 and cut_off_date != None:
|
||||
build_date = parse (t, fuzzy=True)
|
||||
if build_date.date() < cut_off_date:
|
||||
return 1
|
||||
elif t.find("Result: FAIL") != -1:
|
||||
failed_builds=failed_builds+1
|
||||
if VERBOSE == True: print TERM.red + ('FAILURE on %s' % BASE+url) + TERM.normal
|
||||
if t.find("Result: FAIL") != -1:
|
||||
for t2 in accum:
|
||||
if VERBOSE == True: print t2.encode('utf-8')
|
||||
if VERBOSE:
|
||||
print t2.encode('utf-8')
|
||||
if t2.find("Wstat") != -1:
|
||||
test_case = re.search('\./tests/.*\.t',t2)
|
||||
if test_case:
|
||||
summary[test_case.group()].append(url)
|
||||
accum = []
|
||||
elif t.find("Result: PASS") != -1:
|
||||
test_case = re.search('\./tests/.*\.t', t2)
|
||||
if test_case:
|
||||
summary[test_case.group()].append((url, node))
|
||||
accum = []
|
||||
elif t.find("cur_cores=/") != -1:
|
||||
summary["core"].append([t.split("/")[1]])
|
||||
summary["core"].append(url)
|
||||
else:
|
||||
accum.append(t)
|
||||
return 0
|
||||
|
||||
class FailureFinder (HTMLParser.HTMLParser):
|
||||
def __init__ (*args):
|
||||
apply(HTMLParser.HTMLParser.__init__,args)
|
||||
self = args[0]
|
||||
self.last_href = None
|
||||
def handle_starttag (self, tag, attrs):
|
||||
if tag == 'a':
|
||||
return self.is_a_tag (attrs)
|
||||
if tag == 'img':
|
||||
return self.is_img_tag (attrs)
|
||||
def is_a_tag (self, attrs):
|
||||
attrs_dict = dict(attrs)
|
||||
try:
|
||||
if attrs_dict['class'] != 'build-status-link':
|
||||
return
|
||||
except KeyError:
|
||||
return
|
||||
self.last_href = attrs_dict['href']
|
||||
def is_img_tag (self, attrs):
|
||||
if self.last_href == None:
|
||||
return
|
||||
attrs_dict = dict(attrs)
|
||||
try:
|
||||
if attrs_dict['alt'].find('Failed') == -1:
|
||||
return
|
||||
except KeyError:
|
||||
return
|
||||
process_failure(BASE+self.last_href, None)
|
||||
self.last_href = None
|
||||
|
||||
def main (url):
|
||||
parser = FailureFinder()
|
||||
text = requests.get(url,verify=False).text
|
||||
parser.feed(text)
|
||||
def print_summary(failed_builds, total_builds, html=False):
|
||||
# All the templates
|
||||
count = [
|
||||
'{{failed}} of {{total}} regressions failed',
|
||||
'<p><b>{{failed}}</b> of <b>{{total}}</b> regressions failed</p>'
|
||||
]
|
||||
regression_link = [
|
||||
'\tRegression Link: {{link}}\n'
|
||||
'\tNode: {{node}}',
|
||||
'<p> Regression Link: {{link}}</p>'
|
||||
'<p> Node: {{node}}</p>'
|
||||
]
|
||||
component = [
|
||||
'\tComponent: {{comp}}',
|
||||
'<p> Component: {{comp}}</p>'
|
||||
]
|
||||
failure_count = [
|
||||
''.join([
|
||||
TERM.red,
|
||||
'{{test}} ; Failed {{count}} times',
|
||||
TERM.normal
|
||||
]),
|
||||
(
|
||||
'<p><font color="red"><b>{{test}};</b> Failed <b>{{count}}'
|
||||
'</b> times</font></p>'
|
||||
)
|
||||
]
|
||||
|
||||
def print_summary_html():
|
||||
print "<p><b>%d</b> of <b>%d</b> regressions failed</p>" % (failed_builds, total_builds)
|
||||
for k,v in summary.iteritems():
|
||||
template = 0
|
||||
if html:
|
||||
template = 1
|
||||
print render(
|
||||
count[template],
|
||||
{'failed': failed_builds, 'total': total_builds}
|
||||
)
|
||||
for k, v in summary.iteritems():
|
||||
if k == 'core':
|
||||
print "<p><font color='red'><b> Found cores :</b></font></p>"
|
||||
for cmp,lnk in zip(v[::2], v[1::2]):
|
||||
print "<p> Component: %s</p>" % (cmp)
|
||||
print "<p> Regression Link: %s</p>" % (lnk)
|
||||
print ''.join([TERM.red, "Found cores:", TERM.normal])
|
||||
for comp, link in zip(v[::2], v[1::2]):
|
||||
print render(component[template], {'comp': comp})
|
||||
print render(
|
||||
regression_link[template],
|
||||
{'link': link[0], 'node': link[1]}
|
||||
)
|
||||
else:
|
||||
print "<p><font color='red'><b> %s ;</b> Failed <b>%d</b> times</font></p>" % (k, len(v))
|
||||
for lnk in v:
|
||||
print "<p> Regression Link: <a href=\"%s\">%s</a></p>" % (lnk, lnk)
|
||||
print render(failure_count[template], {'test': k, 'count': len(v)})
|
||||
for link in v:
|
||||
print render(
|
||||
regression_link[template],
|
||||
{'link': link[0], 'node': link[1]}
|
||||
)
|
||||
|
||||
def print_summary():
|
||||
print "%d of %d regressions failed" % (failed_builds, total_builds)
|
||||
for k,v in summary.iteritems():
|
||||
if k == 'core':
|
||||
print TERM.red + "Found cores:" + TERM.normal
|
||||
for cmp,lnk in zip(v[::2], v[1::2]):
|
||||
print "\tComponent: %s" % (cmp)
|
||||
print "\tRegression Link: %s" % (lnk)
|
||||
else:
|
||||
print TERM.red + "%s ; Failed %d times" % (k, len(v)) + TERM.normal
|
||||
for lnk in v:
|
||||
print "\tRegression Link: %s" % (lnk)
|
||||
|
||||
def get_summary (build_id, cut_off_date, reg_link):
|
||||
global total_builds
|
||||
for i in xrange(build_id, build_id-MAX_BUILDS, -1):
|
||||
url=BASE+reg_link+str(i)+"/consoleFull"
|
||||
ret = process_failure(url, cut_off_date)
|
||||
if ret == 1:
|
||||
total_builds = build_id - i
|
||||
return
|
||||
def get_summary(cut_off_date, reg_link):
|
||||
'''
|
||||
Get links to the failed jobs
|
||||
'''
|
||||
success_count = 0
|
||||
failure_count = 0
|
||||
for page in xrange(0, MAX_BUILDS, 100):
|
||||
build_info = requests.get(''.join([
|
||||
BASE,
|
||||
reg_link,
|
||||
'api/json?depth=1&tree=allBuilds'
|
||||
'[url,result,timestamp,builtOn]',
|
||||
'{{{0},{1}}}'.format(page, page+100)
|
||||
]), verify=False).json()
|
||||
for build in build_info.get('allBuilds'):
|
||||
if datetime.fromtimestamp(build['timestamp']/1000) < cut_off_date:
|
||||
# stop when timestamp older than cut off date
|
||||
return failure_count, failure_count + success_count
|
||||
if build['result'] in [None, 'SUCCESS']:
|
||||
# pass when build is a success or ongoing
|
||||
success_count += 1
|
||||
continue
|
||||
if VERBOSE:
|
||||
print ''.join([
|
||||
TERM.red,
|
||||
'FAILURE on {0}'.format(build['url']),
|
||||
TERM.normal
|
||||
])
|
||||
url = ''.join([build['url'], 'consoleText'])
|
||||
failure_count += 1
|
||||
process_failure(url, build['builtOn'])
|
||||
return failure_count, failure_count + success_count
|
||||
|
||||
if __name__ == '__main__':
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("get-summary")
|
||||
parser.add_argument("last_no_of_days", default=1, type=int, help="Regression summary of last number of days")
|
||||
parser.add_argument("regression_link", default="centos", nargs='+', help="\"centos\" | \"netbsd\" | any other regression link")
|
||||
parser.add_argument("--verbose", default="false", action="store_true", help="Print a detailed report of each test case that is failed")
|
||||
parser.add_argument("--html_report", default="false", action="store_true", help="Print a brief report of failed regressions in html format")
|
||||
args = parser.parse_args()
|
||||
num_days=args.last_no_of_days
|
||||
cut_off_date=date.today() - timedelta(days=num_days)
|
||||
VERBOSE = args.verbose
|
||||
for reg in args.regression_link:
|
||||
|
||||
def main(num_days, regression_link, html_report):
|
||||
cut_off_date = datetime.today() - timedelta(days=num_days)
|
||||
failure = 0
|
||||
total = 0
|
||||
for reg in regression_link:
|
||||
if reg == 'centos':
|
||||
reg_link = '/job/rackspace-regression-2GB-triggered/'
|
||||
elif reg == 'netbsd':
|
||||
reg_link = '/job/rackspace-netbsd7-regression-triggered/'
|
||||
else:
|
||||
reg_link = reg
|
||||
build_id = int(requests.get(BASE+reg_link+"lastBuild/buildNumber", verify=False).text)
|
||||
get_summary(build_id, cut_off_date, reg_link)
|
||||
if args.html_report == True:
|
||||
print_summary_html()
|
||||
else:
|
||||
print_summary()
|
||||
counts = get_summary(cut_off_date, reg_link)
|
||||
failure += counts[0]
|
||||
total += counts[1]
|
||||
print_summary(failure, total, html_report)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("get-summary")
|
||||
parser.add_argument(
|
||||
"last_no_of_days",
|
||||
default=1,
|
||||
type=int,
|
||||
help="Regression summary of last number of days"
|
||||
)
|
||||
parser.add_argument(
|
||||
"regression_link",
|
||||
default="centos",
|
||||
nargs='+',
|
||||
help="\"centos\" | \"netbsd\" | any other regression link"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Print a detailed report of each test case that is failed"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--html-report",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Print a brief report of failed regressions in html format"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
VERBOSE = args.verbose
|
||||
main(
|
||||
num_days=args.last_no_of_days,
|
||||
regression_link=args.regression_link,
|
||||
html_report=args.html_report
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user