port-compare/port_stats/interactive.py
Ivan A. Melnikov 8af9e1d7b1 Make sure NONE packages are properly colorized
Previously, BY_NAME did not include packages that were
absent from both repositories. This meant that some functions
like `gspi` ignored them, which is inconvenient because
it does not show packages that were never built (like new
kernel flavors) or already deleted.

This commit addresses this in the following way:
- colorize.colorize() now takes additional argument,
  an explicit list of package names to colorize
- for interactive console, this list is formed from
  both repositories, AND all the package names found
  in PACKAGE_TASKS.
2019-12-04 11:10:07 +04:00

334 lines
9.4 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""Interactive python shell with port_stats
What's in the box:
* loading:
- load() -- (re)loads the task and packages information
- use(repo) -- switch to using <repo>
- load(repo) -- load stuff and start using the given repo instead of default
* display packages:
- pt(name) -- prints information about the package tasks
- spt(name) -- prints information about the package tasks, short version
- spi(name) -- prints the short package information
- gspi(pattern), g -- prints the short package information for
all the packages with name matching pattern ('g' for 'grep')
- list_spi(pkgs_list) -- same as gspi, but for python lists
of strings instead of patterns
* display tasks:
- ti(num) -- prints information about the task #num
- fti(num) -- prints information about the task #num, full version (a dict)
- fresh(num) -- display subtasks that would update packages
* display logs:
- logs(num, [idx]), l -- print logs (logs/events.X.Y.log) for task #num
- build_log(num, [subtask_id], [arch], [kind]) -- print the build
logs for a subtask of given task
* reports:
- stats() -- packages statistics
- update_days() -- all the packages that need to be rebuild,
sorted by days pending.
- display_tasks([infos], [num], [min_id], [include], [exclude]) --
display tasks (all from current repo by default)
- next_tasks([min_id], [include], [exclude]) -- display
recycler's tasks that would update packages
* to= targets:
- dump
- append_to
- pager
* other:
- doc(): print this message.
You can also enjoy autocompletion with <TAB>.
"""
from __future__ import print_function
import atexit
import json
import logging
import os
import re
import readline
import rlcompleter # noqa
import sys
import time
from pydoc import pager
from port_stats import colorize
from port_stats import lists
from port_stats import logs as m_logs
from port_stats import reports
from port_stats import tasks
from port_stats import utils
LOG = logging.getLogger('port_stats.interactive')
def get_task(taskid, task_dict):
try:
return task_dict[int(taskid)]
except Exception:
raise ValueError('Task not found: %s' % taskid)
# {{{ to= targets
def dump(string):
print(string)
def append_to(filename):
def _write(string):
if os.path.exists(filename):
LOG.info('File %s already exists, appending', filename)
with open(filename, 'a') as f:
f.write(string)
f.write('\n')
return _write
# }}}
# The Interactive Part
CONFIG = None
CURRENT = None
REPOS = {}
TASKS = {}
PACKAGE_TASKS = {}
UPDATE_TIMES = {}
BY_NAME = {}
BY_COLOR = {}
def repo_tasks(task_dict=None):
for _, t in sorted(TASKS.iteritems()):
if t['repo'] == CURRENT['new']:
yield t
def use(repo=None):
global PACKAGE_TASKS, UPDATE_TIMES, BY_NAME, BY_COLOR, CURRENT
if repo is not None:
try:
CURRENT = (c for c in CONFIG['colorize']
if c['name'] == repo).next()
except StopIteration:
raise ValueError('Unknown repo: ' + repo)
elif CURRENT is None:
CURRENT = CONFIG['colorize'][0]
LOG.info("Updating data structures for %s...", CURRENT['name'])
sys.ps1 = CURRENT['name'] + '>>> '
PACKAGE_TASKS = tasks.tasks_by_package(repo_tasks())
UPDATE_TIMES = tasks.last_update_times(repo_tasks())
packages = (frozenset(PACKAGE_TASKS)
.union(p.name for p in REPOS[CURRENT['base']])
.union(p.name for p in REPOS[CURRENT['new']]))
BY_NAME, BY_COLOR = colorize.colorize(REPOS[CURRENT['base']],
REPOS[CURRENT['new']],
packages)
LOG.info("DONE")
def load(repo=None):
global TASKS, REPOS
TASKS = dict((t['taskid'], t)
for t in tasks.load_tasks(CONFIG['tasks'], cache=TASKS))
REPOS = lists.read_all_srclists(CONFIG['repos'])
LOG.info("Got %s tasks, %s repositories", len(TASKS), len(REPOS))
for name in sorted(REPOS):
LOG.info(" %s: %s srpms", name, len(REPOS[name]))
use(repo)
def pt(pkg, to=print):
# ts = sorted(PACKAGE_TASKS[pkg], key=lambda t: int(t['taskid']))
display_tasks(PACKAGE_TASKS[pkg], to=to)
def spt(pkg, to=print):
to(tasks.format_tasks_short(PACKAGE_TASKS.get(pkg)))
def spi(pkg, to=print):
to(reports.package_one_line(pkg, BY_NAME, PACKAGE_TASKS))
def update_days(to=pager):
to(reports.update_days(BY_NAME, PACKAGE_TASKS, UPDATE_TIMES, time.time()))
def _spi_by_predicate(pred, colors):
colors = colors or colorize.COLORS
return '\n'.join(
reports.package_one_line(name, BY_NAME, PACKAGE_TASKS)
for name in sorted(BY_NAME)
if pred(name) and BY_NAME[name][0] in colors)
def gspi(pattern, colors=None, to=print):
p = re.compile(pattern)
to(_spi_by_predicate(p.search, colors))
g = gspi # noqa
def list_spi(pkgs, colors=None, to=print):
if isinstance(pkgs, basestring):
pkgs = pkgs.split()
pset = frozenset(pkgs)
lines = _spi_by_predicate(pset.__contains__, colors)
unknown = pset - frozenset(BY_NAME)
if unknown:
lines += "\nNot in repos:\n\t" + "\n\t".join(sorted(unknown))
to(lines)
def _colorizer():
return colorize.package_colorizer(BY_NAME, 'NONE')
def ti(num, to=print):
to(tasks.format_task(get_task(num, TASKS), _colorizer()))
def display_tasks(infos=None, num=None, min_id=None,
include=None, exclude=None, to=pager):
infos = infos or repo_tasks()
if min_id is not None:
infos = (t for t in infos if t['taskid'] >= min_id)
czr = _colorizer()
infos = (tasks.format_task(t, czr) for t in infos)
if include:
pi = re.compile(include)
infos = (t for t in infos if pi.search(t))
if exclude:
pe = re.compile(exclude)
infos = (t for t in infos if not pe.search(t))
infos = list(infos)
total = len(infos)
if num is not None:
infos = infos[-num:]
to("%s of %s infos\n\n%s" % (len(infos), total, '\n\n'.join(infos)))
def _fresh_subtasks(info):
result = {}
for subtaskid, subtask in info['subtasks'].items():
p = tasks.subtask_package(subtask)
color, bp, np = BY_NAME.get(p, ('MISSING', None, None))
if not np:
continue
if color == 'EXTRA':
result[subtaskid] = subtask
continue
# for outdated packages, check for newer versions
# for SLATE packages, check for base repo version rebuilds
rebuild_nevr = np if color in ('ORANGE', 'YELLOW') else bp
strict = info['task_ctime'] < UPDATE_TIMES.get(p, 0)
if tasks.subtask_is_updating(subtask, rebuild_nevr, strict):
# subtask is interesting, let's save it
result[subtaskid] = subtask
return result
def fresh(num, color=True, to=print):
subtasks = _fresh_subtasks(get_task(num, TASKS))
if subtasks:
clr = _colorizer() if color else None
items = sorted((int(k), tasks.format_subtask(s, clr))
for k, s in subtasks.iteritems())
to('\n'.join('%6d %s' % item for item in items))
else:
to('Nothing interesting in task %s' % num)
def next_tasks(min_id=None, include=None, exclude=None, to=pager):
infos = (t for t in repo_tasks()
if (t['state'] != 'DONE'
and t['owner'] == 'recycler'
and _fresh_subtasks(t)))
display_tasks(infos, None, min_id, include, exclude, to)
def fti(num, to=print):
to(utils.format_dict(get_task(num, TASKS), indent=True))
def _page_file(log_file, to):
with open(log_file, 'r') as f:
log = f.read().decode('utf-8', errors='replace')
to(log_file + ':\n\n' + log)
def logs(num, idx=-1, to=pager):
log_file = m_logs.task_event_logs(get_task(num, TASKS))[idx]
_page_file(log_file, to)
l = logs # noqa
def _default_arch():
return CONFIG['repos'][CURRENT['new']]['arch'][0]
def build_log(task_id, subtask_id=None, arch=None, kind='log', to=pager):
task_id = int(task_id)
arch = (arch or _default_arch()).lower()
info = get_task(task_id, TASKS)
if subtask_id is None:
subtask_id = min(utils.maybe_int(x) for x in info['subtasks'].keys())
log_file = os.path.join(info['task_path'], 'build',
str(subtask_id), arch, kind)
_page_file(log_file, to)
bl = build_log # noqa
def stats(names=None, to=dump):
total = len(names) if names else len(REPOS[CURRENT['new']])
to(reports.color_totals(BY_COLOR, names, total, summary=True))
def doc(to=dump):
to(__doc__)
def interactive_setup():
# Bind TAB to complete
readline.parse_and_bind('tab:complete')
# Set history file ~\.pythonhistory
histfile = os.path.join(os.environ['HOME'], '.pythonhistory')
# Attempt read of histfile
try:
readline.read_history_file(histfile)
except IOError:
pass
# Write history file at shell exit
atexit.register(readline.write_history_file, histfile)
# Configure logging
logging.basicConfig(
format='%(asctime)s %(levelname)-5s %(name)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
stream=sys.stderr, level=logging.INFO)
config = sys.argv[1]
LOG.info("Loading configuraition file: %s", config)
with open(config, 'r') as f:
global CONFIG
CONFIG = json.load(f)
if __name__ == '__main__':
interactive_setup()
doc()