repos-cmp/repos_cmp/repos.py
Ivan A. Melnikov 47d5f6fa92 repos: Add candidates_to_build
This is a helper function to find out which packages
from one repo can probably be immediately build for
the other one.

The implementation is suboptimal, but works just fine
for now.
2024-12-20 13:08:12 +04:00

866 lines
29 KiB
Python

# This module does not use str: use bytes everywhere.
import collections
import graphlib
import itertools
import logging
import re
import time
import rpm
from repos_cmp import colors
from repos_cmp import lists
from repos_cmp import rpm_ffi
LOG = logging.getLogger(__name__)
def _src_name(source):
if source.endswith(b'.src.rpm'):
return source.rsplit(b'-', 2)[0]
return source
class Dependency(collections.namedtuple(
'Dependency', ['name', 'flags', 'version'])):
HEADER_TRIPLETS = {
'require': (rpm.RPMTAG_REQUIRENAME,
rpm.RPMTAG_REQUIREFLAGS,
rpm.RPMTAG_REQUIREVERSION),
'provide': (rpm.RPMTAG_PROVIDENAME,
rpm.RPMTAG_PROVIDEFLAGS,
rpm.RPMTAG_PROVIDEVERSION)
}
RPMSENSE_COMPARE = (rpm.RPMSENSE_EQUAL
| rpm.RPMSENSE_GREATER
| rpm.RPMSENSE_LESS)
@classmethod
def from_names(cls, names):
for name in names:
yield cls(name, 0, None)
@classmethod
def from_header(cls, header, kind):
tp, tf, tv = cls.HEADER_TRIPLETS[kind]
triplets = zip(header[tp], header[tf], header[tv])
for name, flags, version in triplets:
if not name.startswith(b'rpmlib('):
yield cls(name, flags, version or None)
if kind == 'provide':
for name in header[rpm.RPMTAG_FILENAMES]:
yield cls(name, 0, None)
yield cls(header[rpm.RPMTAG_NAME],
rpm.RPMSENSE_EQUAL,
header.format('%{EVR}:%{DISTTAG}').encode('utf-8'))
@classmethod
def wrap(cls, other):
if isinstance(other, cls):
return other
if isinstance(other, bytes):
return cls(other, 0, None)
raise ValueError("Don't know how to make %s from %s"
% (cls, repr(other)))
def is_provide_for(self, other):
if self.name != other.name:
return False
if self.version is None or other.version is None:
return True
return bool(rpm_ffi.ranges_overlap(
self.name, self.version, self.flags,
other.name, other.version, other.flags))
def is_setversion(self):
return self.version and self.version.startswith(b'set:')
def pretty_str(self):
if not self.flags and not self.version:
return '{}[ANY]'.format(self.name.decode())
if self.is_setversion():
return '{}[set:<>]'.format(self.name.decode())
return '{}[{} {}]'.format(
self.name.decode(), self.flags,
self.version.decode() if self.version else None)
class Source:
def __init__(self, name, epoch, version, release):
self.name = name
self.epoch = epoch
self.version = version
self.release = release
self.requires = set()
self.bin_names = set()
@classmethod
def from_header(cls, header):
'''Initialize Source from rpm.header corresponding to SRPM'''
src = cls(header[rpm.RPMTAG_NAME],
header[rpm.RPMTAG_EPOCH],
header[rpm.RPMTAG_VERSION],
header[rpm.RPMTAG_RELEASE])
src.requires.update(Dependency.from_header(header, 'require'))
return src
@property
def source_rpm(self):
return b'%s-%s-%s.src.rpm' % (
self.name, self.version, self.release)
def __repr__(self):
return 'Source[{} {} {} {}]'.format(
self.name, self.epoch, self.version, self.release)
class Binary:
def __init__(self, name, epoch, version, release, source_rpm):
self.name = name
self.epoch = epoch
self.version = version
self.release = release
self.source_rpm = source_rpm
self.source_name = source_rpm.rsplit(b'-', 2)[0]
self.requires = set()
self.provides = set()
@classmethod
def from_header(cls, header):
'''Initialize Source from rpm.header corresponding to SRPM'''
pkg = cls(header[rpm.RPMTAG_NAME],
header[rpm.RPMTAG_EPOCH],
header[rpm.RPMTAG_VERSION],
header[rpm.RPMTAG_RELEASE],
header[rpm.RPMTAG_SOURCERPM])
pkg.requires.update(Dependency.from_header(header, 'require'))
pkg.provides.update(Dependency.from_header(header, 'provide'))
return pkg
def __repr__(self):
return 'Binary[{} {} {} {}]'.format(
self.name, self.epoch, self.version, self.release)
class Chroot:
'''Set of binary packages'''
def __init__(self):
self.provides = collections.defaultdict(set)
self.binaries = dict()
def add(self, binary):
name = binary.name
if name in self.binaries:
LOG.error("Duplicate binaries: %s %s",
binary, self.binaries[name])
self.binaries[name] = binary
for prov in binary.provides:
self.provides[prov.name].add(prov)
def unmets(self):
for b in self.binaries.values():
for dep in b.requires:
if not self.is_provided(dep):
yield dep
def is_provided(self, dep):
return any(p.is_provide_for(dep)
for p in self.provides.get(dep.name, ()))
def copy(self):
copy = Chroot()
copy.provides = self.provides.copy()
copy.binaries = self.binaries.copy()
return copy
# consider this repository components by default
DEFAULT_COMPONENTS = ('classic', 'checkinstall')
PKG_INIT_LIST = tuple(Dependency.from_names(
(b'setup', b'filesystem', b'rpm', b'fakeroot')))
PKG_BUILD_LIST = tuple(Dependency.from_names(
(b'basesystem', b'rpm-build', b'kernel-headers-common',
b'sisyphus_check', b'time')))
class Repository:
def __init__(self, repo_name, sources, binaries, bits):
self._config = None
self.name = repo_name
self.sources = sources
self.binaries = binaries
self.bits = bits
self.reverse_prov = {} # name -> [(provide, binary)]
self.addon = None
self.reverse_prov_addon = {}
self.base_chroot = None
self.build_chroot = None
self.timestamp = None
self.update_indexes()
def copy(self, new_name):
return Repository(new_name,
dict(self.sources),
dict(self.binaries),
self.bits)
def copy_with_addon(self, new_name, binaries):
extended = self.copy(new_name)
extended.addon = binaries
extended.update_indexes()
return extended
@classmethod
def _reverse_provides_index(cls, binaries):
if not binaries:
return {}
rprov = collections.defaultdict(list)
for b in binaries.values():
for p in b.provides:
rprov[p.name].append((p, b))
return dict(rprov)
def update_indexes(self):
self.reverse_prov = self._reverse_provides_index(self.binaries)
self.reverse_prov_addon = self._reverse_provides_index(self.addon)
self.base_chroot = None
self.build_chroot = None
self.base_chroot = self.chroot_for(PKG_INIT_LIST, build=False)
self.build_chroot = self.chroot_for(PKG_BUILD_LIST, build=False)
self.timestamp = time.time()
@classmethod
def load(cls, repo_name, path, arch, components=DEFAULT_COMPONENTS):
src_list, bin_list = lists.read_pkglist_heders_for_repo(
path, arch, components)
# xxx: not the very best heuristics
bits = 64 if any('64' in a for a in arch) else 32
sources = {}
for header in src_list:
name = header[rpm.RPMTAG_NAME]
if name not in sources:
sources[name] = Source.from_header(header)
binaries = {}
for header in bin_list:
name = header[rpm.RPMTAG_NAME]
found = binaries.get(name)
if not found:
binaries[name] = Binary.from_header(header)
else:
LOG.warning('Duplicate binaries: %s %s', found,
header.format('%{NAME}-%{EVR}:%{DISTTAG})'))
return cls(repo_name, sources, binaries, bits=bits)
@classmethod
def load_from_config(cls, repo_name, config):
return cls.load(repo_name, config['path'], config['arch'])
def binaries_from(self, *source_names):
'''Return binary packages build from this source'''
sources = set(source_names)
for b in self.binaries.values():
if b.source_name in sources:
yield b
def _providers(self, dependency, index):
for item in index.get(dependency.name, ()):
if item[0].is_provide_for(dependency):
yield item
def providers(self, dependency):
yield from self._providers(dependency, self.reverse_prov)
def addon_providers(self, dependency):
if self.addon:
yield from self._providers(dependency, self.reverse_prov_addon)
def source_for_dependency(self, dependency):
return [dep[1].source_rpm
for dep in self.reverse_prov[dependency]]
def _unmets(self, packages):
result = []
for pkg in packages:
for dep in pkg.requires:
if not any(self.providers(dep)):
result.append((pkg, dep))
return result
def unmets(self):
return self._unmets(self.binaries.values())
def build_unmets(self):
return self._unmets(self.sources.values())
def delete_sources(self, *source_names):
bin_names = [b.name for b in self.binaries_from(*source_names)]
for source in source_names:
del self.sources[source]
for name in bin_names:
del self.binaries[name]
self.update_indexes()
def chroot_for(self, requires, prefer=(), build=True):
prefer = list(prefer)
# rudimentary pkgpriorities
prefer.append(b'altlinux-release-sisyphus')
base_chroot = self.build_chroot if build else self.base_chroot
if base_chroot is not None:
chroot = base_chroot.copy()
else:
chroot = Chroot()
stack = list(reversed(list(requires)))
while stack:
dep = stack.pop()
if chroot.is_provided(dep):
continue
LOG.debug("looking up %s", dep)
providers = set(item[1] for item in self.providers(dep))
if not providers:
providers = set(item[1] for item in self.addon_providers(dep))
if not providers:
LOG.debug("Unmet dependency: %s", dep)
continue
if len(providers) > 1:
preferred = [p for p in providers
if p.source_name in prefer]
if preferred and len(preferred) < len(providers):
providers = preferred
if len(providers) > 1:
LOG.debug('Ambiguous provide: %s (%s)', dep, providers)
# choose an (almost) random provider
p = providers.pop()
chroot.add(p)
stack.extend(p.requires)
return chroot
class TripletReport:
def __init__(self, bases, triplets):
'''Make a report.
Arguments:
bases: iterable over repositories
(used later for validity check)
triplets: iterable over triplets
(kind, dependency, binary)
'''
self._bases = dict((b.name, b.timestamp) for b in bases)
self.triplets = list(triplets)
def is_valid(self, current_bases):
'''Check if this report is outdated'''
for b in current_bases:
our_ts = self._bases.get(b.name)
if our_ts and our_ts < b.timestamp:
return False
return True
def sources_for_deps(self):
return set(p.source_name for k, d, p in self.triplets)
def sources_for_build(self):
return set(p.source_name
for k, d, p in self.triplets
if _is_build_kind(k))
def basic_format(self, title=None, to=None):
"""Format a report
Arguments:
title: the title of report, if any
"""
lines = []
if title:
lines.append('\n== %s ==\n' % title)
by_source = collections.defaultdict(list)
for item in self.triplets:
by_source[item[2].source_rpm].append(item)
for that_source in sorted(by_source):
lines.append(that_source.decode())
by_knp = collections.defaultdict(list)
for kind, dep, provider in by_source[that_source]:
by_knp[(kind, provider.name.decode())].append(dep.pretty_str())
for (kind, pname), pdep in sorted(by_knp.items()):
lines.append(f"\t {kind} {pname} {' '.join(pdep)}")
result = '\n'.join(lines)
if to:
return to(result)
else:
return result
_SPECIAL_DEPS = (
# ABI parts:
b'(GCC_', b'(CXXABI_', b'(GLIBC_',
# elf loaders:
b'/lib64/ld', b'/lib/ld', b'ld-linux')
_GHC_HASHDEP = re.compile(rb'ghc[0-9\.]+\(.+-[^-]+-[^-]+\)')
_GHC_HASHLIB = re.compile(rb'^lib.*-ghc[0-9.]+.so')
def _from_64bit_dep(dep):
if dep.name.endswith(b'()(64bit)'):
new_name = dep.name[:-9]
elif dep.name.endswith(b'(64bit)'):
new_name = dep.name[:-7]
elif b'/lib64/' in dep.name:
new_name = dep.name.replace(b'/lib64/', b'/lib/')
else:
return dep
return Dependency(new_name, dep.flags, dep.version)
_BUILDREQ = '0000-BR'
_BUILD_UNMET = '0001-UN'
def _is_build_kind(kind):
return kind in (_BUILDREQ, _BUILD_UNMET)
class BuildReporter:
def __init__(self, from_repo, to_repo,
ignore=(), prefer=(),
caching=True, ignore_file_deps=False):
self.from_repo = from_repo
self.to_repo = to_repo
self.ignore = ignore
self.prefer = prefer
self._cache = {} if caching else None
self.ignore_file_deps = ignore_file_deps
self._warned_on = set()
def _warn(self, text, subject):
if subject in self._warned_on:
return
LOG.warning(text, subject)
self._warned_on.add(subject)
def report(self, source_name):
if self._cache is not None:
cached_result = self._cache.get(source_name)
if cached_result:
return cached_result
report = self._raw_report(source_name)
if self._cache is not None:
self._cache[source_name] = report
return report
def _same_source(self, source_name):
sourceA = self.from_repo.sources.get(source_name)
sourceB = self.to_repo.sources.get(source_name)
return (sourceA and sourceB
and sourceA.epoch == sourceB.epoch
and sourceA.version == sourceB.version
and sourceA.release == sourceB.release)
def _raw_report(self, source_name):
"""Build report for one source, by name
Returns an iterable over tuples (kind, dep, provider).
"""
LOG.debug("Building build report for %s", source_name)
# XXX: assumes from_repo is x86_64
assert self.from_repo.bits == 64
translate = (self.to_repo.bits != 64)
from_source = self.from_repo.sources[source_name]
# find what's missing for the build chroot in to_repo
chroot = self.to_repo.chroot_for(
from_source.requires, prefer=self.prefer)
# distinguish unmets from build requirements that are missing
buildreqs = {dep for dep in from_source.requires}
missing_reqs = ((_BUILDREQ, dep) for dep in buildreqs
if not chroot.is_provided(dep))
chroot_unmets = ((_BUILD_UNMET, dep) for dep in chroot.unmets()
if dep not in buildreqs)
# add requirements for the binary packages this source package produces
# in the from_repo; if needed, try to translate from 64 to 32 bits
binreqs = (
(b.name.decode(), (_from_64bit_dep(dep) if translate else dep))
for b in self.from_repo.binaries_from(source_name)
for dep in b.requires)
result = set()
for kind, dep in itertools.chain(missing_reqs, chroot_unmets, binreqs):
# skip some platform-specific stuff
if any(x in dep.name for x in _SPECIAL_DEPS):
continue
# ignore file dependencies if we're asked to
if self.ignore_file_deps and not _is_build_kind(kind):
if dep.name.startswith(b'/'):
continue
# skip dependencies already present in to_repo
if any(self.to_repo.providers(dep)):
continue
# skip inter-sub-package dependencies
if any(p.source_name == source_name
for _d, p in self.from_repo.providers(dep)):
continue
# set-versions may be platform-dependent.
# double-check that if have the same source
if (dep.is_setversion()
or _GHC_HASHLIB.match(dep.name)
or _GHC_HASHDEP.fullmatch(dep.name)):
if any(self._same_source(p.source_name)
for _d, p in self.from_repo.providers(dep)):
continue
# ok, it's true missing dependency
# let's look up providers
providers = set(provider
for _dep, provider in self.from_repo.providers(dep)
if provider.source_name not in self.ignore)
if not providers:
self._warn("No providers for %s", dep)
elif len(providers) > 1:
preferred = [p for p in providers
if p.source_name in self.prefer]
if preferred and len(preferred) < len(providers):
providers = preferred
if len(providers) > 1:
self._warn("Multiple providers for %s", dep)
result.update((kind, dep, provider) for provider in providers)
return TripletReport([self.from_repo, self.to_repo], result)
def recursive_closure(self, source_names, stop_names=()):
seen = list(source_names)
queue = list(source_names)
stop_names = set(stop_names)
while queue:
cur = queue.pop()
missing = self.report(cur).sources_for_deps()
unseen = missing.difference(seen)
seen.extend(unseen)
# don't recur into packages from stop_names
queue.extend(unseen - stop_names)
return seen
def format_reports(self, source_names, stop_names=(), to=None):
stop_names = set(stop_names)
result = []
for name in source_names:
result.append(f"\n== {name.decode()} ==")
in_to = self.to_repo.sources.get(name)
srpm = in_to.source_rpm.decode() if in_to else 'NONE'
result.append(f"{self.to_repo.name} has {srpm}\n")
if name in stop_names:
result.append('not going deeper (stop_names)')
else:
result.append(self.report(name).basic_format().strip())
return to('\n'.join(result)) if to else '\n'.join(result)
def missing_rating(build_reporter, ignore=None):
result = collections.defaultdict(set)
ignore = re.compile(ignore).search if ignore else lambda x: False
sources = (s for s in build_reporter.from_repo.sources
if s not in build_reporter.to_repo.sources
and b'gost' not in s
and not s.startswith(b'kernel-')
and not ignore(s))
for name in sources:
try:
rbr = build_reporter.recursive_closure([name])
except Exception:
LOG.error("Failed to build recursive build report "
"for %s", name.decode(), exc_info=True)
else:
for other_name in rbr:
if other_name != name and not ignore(name):
result[other_name].add(name)
return dict(result)
def format_missing_rating(rating, to=None):
rr = sorted(rating.items(), key=lambda x: (-len(x[1]), x[0]))
result = []
for name, where in rr:
result.append(f"{name.decode()}: {len(where)}")
result.append(" " + b' '.join(sorted(where)).decode())
return to('\n'.join(result)) if to else '\n'.join(result)
def recursive_build_report(from_repo, to_repo, *source_names,
ignore=(), ignore_sort=(), summary=True):
requested_source_names = set(_src_name(s) for s in source_names)
reporter = BuildReporter(from_repo, to_repo, ignore=ignore,
prefer=requested_source_names)
names = reporter.recursive_closure(requested_source_names)
reports = [reporter.format_reports(names), '']
if not summary:
return '\n'.join(reports)
# expand the build requires with the pkg reqs of dependencies:
full_req = {}
for source in names:
chroot = from_repo.chroot_for(from_repo.sources[source].requires,
prefer=names)
dep_sources = set()
for b in chroot.binaries.values():
if b.source_name not in names:
continue
if (b.source_name, source) in ignore_sort:
continue
dep_sources.add(b.source_name)
full_req[source] = dep_sources
cycles = []
while True:
try:
order = list(graphlib.TopologicalSorter(full_req).static_order())
except graphlib.CycleError as ex:
LOG.debug("Cycle detected: %s", ex)
cycle = ex.args[1]
cycles.append(cycle)
# break the cycle and retry
full_req[cycle[1]].remove(cycle[0])
else:
break
def letter(source):
if source in requested_source_names:
return ' '
to_source = to_repo.sources.get(source)
if not to_source:
return '+' # new
from_source = from_repo.sources.get(source)
if not from_source:
return '?'
return 'u'
reports.append('\n== SUMMARY ==\n')
for source in order:
br = reporter.report(source)
before = br.sources_for_build()
after = br.sources_for_deps() - before
reports.append('{} {:60} {} :: {}'.format(
letter(source),
from_repo.sources[source].source_rpm.decode(),
b' '.join(sorted(before)).decode(),
b' '.join(sorted(after)).decode()))
if cycles:
reports.append('\nCycles:')
reports.extend('\t%s' % ' '.join(source.decode() for source in c)
for c in cycles)
special = set()
for source_name in order:
source = to_repo.sources.get(source_name)
if not source:
continue
if b'.mips' in source.release or b'.rv64' in source.release:
special.add(source.source_rpm)
if special:
reports.append('\nThe followng packages are special:')
reports.extend('\t%s' % source.decode()
for source in sorted(special))
return '\n'.join(reports)
def _unmets_without(repo, kind, *source_names):
next_repo = repo.copy('removal_test_' + repo.name)
next_repo.delete_sources(*source_names)
if kind in ('bin', 'full'):
old_unmets = set(repo.unmets())
bin_unmets = (u for u in next_repo.unmets() if u not in old_unmets)
pass
else:
bin_unmets = ()
if kind in ('build', 'full'):
old_build_unmets = set(repo.build_unmets())
build_unmets = (u for u in next_repo.build_unmets()
if u not in old_build_unmets)
else:
build_unmets = ()
return itertools.chain(
(('unmet', d, b) for b, d in bin_unmets),
(('BUILD', d, b) for b, d in build_unmets))
def unmets_report(repo):
return TripletReport([repo], itertools.chain(
(('unmet', d, b) for b, d in repo.unmets()),
(('BUILD', d, b) for b, d in repo.build_unmets())))
def who_cares(repo, *source_names):
'''What new unmets deletion of the pacakge will produce?'''
return TripletReport([repo], _unmets_without(repo, 'full', *source_names))
def diff_deps(base_repo, repo, *source_names):
base_unmets = set(x[2].source_name for x in
_unmets_without(base_repo, 'bin', *source_names)
if x[2].source_name in repo.sources)
repo_unmets = set(x[2].source_name for x in
_unmets_without(repo, 'bin', *source_names))
return b'\n'.join(itertools.chain(
(b'== ' + base_repo.name.encode() + b' ==',),
sorted(base_unmets - repo_unmets),
(b'\n== ' + repo.name.encode() + b' ==',),
sorted(repo_unmets - base_unmets), # don't ignore extras
)).decode()
_GOOD_EXTRAS_PREFIXES = (b'kernel-', b'u-boot', b'riscv', b'fu540', b'opensbi')
def extras_to_remove(base_repo, repo, ignore=()):
extras = set(repo.sources) - set(base_repo.sources) - set(ignore)
# filter out certain packages that must be left alone
filtered_extras = set(
name for name in extras
if (not any(name.startswith(p) for p in _GOOD_EXTRAS_PREFIXES)
and b'jpp' not in repo.sources[name].release))
while True:
LOG.info('Checking %d filtered extras', len(filtered_extras))
leave_alone = set()
for kind, req, pkg in _unmets_without(repo, 'full', *filtered_extras):
for b in repo.binaries_from(*filtered_extras):
if b.source_name in leave_alone:
continue
if any(p.is_provide_for(req) for p in b.provides):
leave_alone.add(b.source_name)
if leave_alone:
filtered_extras -= leave_alone
else:
break
for name in sorted(filtered_extras):
print(name.decode())
def read_list(filename):
'''Read list of names from a file'''
with open(filename, 'rb') as f:
return f.read().split()
def write_list(data, filename):
'''Write a list of name to the file'''
with open(filename, 'wb') as f:
f.write(b'\n'.join(data))
f.write(b'\n')
def candidates_to_build(build_reporter,
ignore=(),
ignore_jpp=False):
ignore = set(ignore)
ignore.update(build_reporter.to_repo.sources)
for name, src in build_reporter.from_repo.sources.items():
if name in ignore:
continue
if ignore_jpp and b'jpp' in src.release:
continue
if build_reporter.report(name).triplets:
continue
yield name
def _format_evr(pkg):
if not pkg:
return 'MISSING'
if pkg.epoch is None:
return (b'%s-%s' % (pkg.version, pkg.release)).decode()
return ('%s:%s-%s' % (str(pkg.epoch),
pkg.version.decode(),
pkg.release.decode()))
def colorize_diff(base_repo, target_repo, include=None, exclude=None):
names = set(base_repo.sources).union(target_repo.sources)
result = []
for name in sorted(names):
base_pkg = base_repo.sources.get(name)
target_pkg = target_repo.sources.get(name)
color = colors.colorize_pair(base_pkg, target_pkg)
result.append('\t'.join((color.name, name.decode(),
_format_evr(base_pkg),
_format_evr(target_pkg))))
if include:
pi = re.compile(include)
result = (t for t in result if pi.search(t))
if exclude:
pe = re.compile(exclude)
result = (t for t in result if not pe.search(t))
return '\n'.join(result)
def colorize_stats(base_repo, target_repo):
names = set(base_repo.sources).union(target_repo.sources)
cnt = collections.Counter()
for name in names:
base_pkg = base_repo.sources.get(name)
target_pkg = target_repo.sources.get(name)
color = colors.colorize_pair(base_pkg, target_pkg)
cnt[color] += 1
result = []
for color in colors.Color:
result.append('%10s: %5s // %s' % (
color.name, cnt[color], color.help))
result.append('%10s: %5s' % ('[TOTAL]', len(target_repo.sources)))
return '\n'.join(result)
def load_repos(config):
repos = {}
for name, cfg in config['repos'].items():
LOG.info('Loading %s', name)
repos[name] = Repository.load_from_config(name, cfg)
for name, cfg in config.get('addons', {}).items():
LOG.info('Loading %s', name)
base = repos[cfg['base']]
addon = Repository.load_from_config(name, cfg)
repos[name] = base.copy_with_addon(name, addon.binaries)
del addon
LOG.info("Loading DONE")
return repos
if __name__ == '__main__':
from repos_cmp.utils import interactive_setup
from pydoc import pager # noqa
CONFIG = interactive_setup()
globals().update(load_repos(CONFIG))