0.5.5cnc1-alt3
- Introduced APT::Ignore-dpkg support and set this flag by default, to address #0002119.
This commit is contained in:
parent
e3ad0ef694
commit
3212342525
@ -1,4 +1,7 @@
|
||||
copy: *.8
|
||||
copy: *.conf
|
||||
copy: *.patch
|
||||
copy: *.po
|
||||
copy: *.rsync
|
||||
tar.bz2: apt
|
||||
copy: rpmpriorities
|
||||
copy: genbasedir
|
||||
|
59
README.rsync
Normal file
59
README.rsync
Normal file
@ -0,0 +1,59 @@
|
||||
Пакет apt-rsync предоставляет метод 'rsync' для APT.
|
||||
Это значит, что apt-get сможет выкачивать пакеты по протоколу rsync.
|
||||
Поддерживается работа через HTTP-proxy.
|
||||
Для работы метода 'rsync' необходимо, чтобы был установлен пакет rsync,
|
||||
скомпилированный с патчем для поддержки опции "--apt-support".
|
||||
В ALT Linux Sisyphus поддерживается начиная с rsync-2.5.5-alt3.
|
||||
|
||||
Чтобы использовать метод 'rsync', необходимо добавить в /etc/apt/sources.list
|
||||
строчки вида:
|
||||
|
||||
rpm [alt] rsync://rsync.altlinux.ru/ALTLinux/Sisyphus i586 classic
|
||||
rpm-src [alt] rsync://rsync.altlinux.ru/ALTLinux/Sisyphus i586 classic
|
||||
|
||||
Для конфигурирования можно использовать следующие параметры
|
||||
(в /etc/apt/apt.conf или из командной строки):
|
||||
|
||||
Debug::rsync = "true|false"
|
||||
Разрешает вывод отладочной информации.
|
||||
По умолчанию: false
|
||||
|
||||
Acquire::rsync::program
|
||||
Указывает путь к программе rsync.
|
||||
По умолчанию: /usr/bin/rsync
|
||||
|
||||
Acquire::rsync::Timeout
|
||||
Передаёт параметр "--timeout" с указанным значением программе rsync.
|
||||
По умолчанию: не передаёт этот параметр для rsync.
|
||||
|
||||
Acquire::rsync::proxy
|
||||
Acquire::rsync::proxy::hostname
|
||||
Указывает, что необходимо использовать HTTP-proxy. Для этого будет
|
||||
установлена переменная окружения RSYNC_PROXY при вызове программы
|
||||
rsync.
|
||||
При использовании репозитариев на разных хостах возможно задание
|
||||
proxy для всех хостов и для каждого в отдельности.
|
||||
Значение для конкретного хоста перекрывает глобальное значение.
|
||||
Можно использовать специальное значение "none" для указания, что
|
||||
необходимо использовать прямое соединение.
|
||||
Формат: hostname:port или "none"
|
||||
Пример:
|
||||
|
||||
Acquire::rsync::proxy="192.168.1.1:3128"
|
||||
Acquire::rsync::proxy::mirror.localdomain="none"
|
||||
|
||||
Будет использоваться http-proxy "192.168.1.1:3128" для всех хостов,
|
||||
кроме mirror.localdomain, для которого будет использоваться прямое
|
||||
соединение.
|
||||
|
||||
Acquire::rsync::options::
|
||||
Позволяет задавать любые дополнительные опции при вызове программы
|
||||
rsync.
|
||||
Примеры:
|
||||
|
||||
Acquire::rsync::options:: "-z";
|
||||
Acquire::rsync::options { "-z"; "--some-other-option"; };
|
||||
|
||||
Первый вариант можно использовать, если все необходимые опции можно
|
||||
указать одной строкой. При наличии нескольких раздельных опций
|
||||
необходимо использовать второй вариант.
|
@ -1,21 +0,0 @@
|
||||
diff -k.orig -urN apt-0.3.19cnc32/cmdline/apt-cdrom.cc.orig apt-0.3.19cnc32/cmdline/apt-cdrom.cc
|
||||
--- apt-0.3.19cnc32/cmdline/apt-cdrom.cc.orig Thu Jan 4 23:26:14 2001
|
||||
+++ apt-0.3.19cnc32/cmdline/apt-cdrom.cc Mon Jan 22 17:22:32 2001
|
||||
@@ -723,7 +723,7 @@
|
||||
if (0)
|
||||
return _error->Error(_("Unable to locate any package files, perhaps this is not a Debian Disc"));
|
||||
else
|
||||
- return _error->Error(_("Unable to locate any package files, perhaps this is not a Conectiva Disc"));
|
||||
+ return _error->Error(_("Unable to locate any package files, perhaps this is not a ALT Linux Disc"));
|
||||
}
|
||||
// Check if the CD is in the database
|
||||
string Name;
|
||||
@@ -748,7 +748,7 @@
|
||||
if (_config->FindB("APT::CDROM::Rename",false) == true ||
|
||||
Name.empty() == true)
|
||||
{
|
||||
- cout << _("Please provide a name for this Disc, such as 'MyDistro 6.0 Disk 1'");
|
||||
+ cout << _("Please provide a name for this Disc, such as 'Spring 2001 Disk 1'");
|
||||
|
||||
while (1)
|
||||
{
|
@ -1,11 +0,0 @@
|
||||
--- apt-0.3.19cnc52/buildlib/environment.mak.in.orig Thu Aug 2 01:31:58 2001
|
||||
+++ apt-0.3.19cnc52/buildlib/environment.mak.in Sun Aug 5 13:49:41 2001
|
||||
@@ -22,7 +22,7 @@
|
||||
|
||||
# Dep generation - this only works for gnu stuff
|
||||
GCC3DEP = @GCC3DEP@
|
||||
-INLINEDEPFLAG = -MD
|
||||
+#INLINEDEPFLAG = -MD
|
||||
|
||||
# Debian doc stuff
|
||||
DEBIANDOC_HTML = @DEBIANDOC_HTML@
|
@ -1,11 +0,0 @@
|
||||
--- apt-0.3.19cnc53/apt-pkg/rpm/rpmpm.cc~ Tue Nov 13 20:32:08 2001
|
||||
+++ apt-0.3.19cnc53/apt-pkg/rpm/rpmpm.cc Fri Nov 16 19:10:19 2001
|
||||
@@ -402,7 +402,7 @@
|
||||
if (WIFEXITED(Status) == 0 || WEXITSTATUS(Status) != 0)
|
||||
{
|
||||
if (WIFSIGNALED(Status) != 0)
|
||||
- return _error->Error(_("Sub-process %s terminated by signal (%i)") ,Args[0], WTERMSIG(Status) );
|
||||
+ return _error->Error(_("Sub-process %s terminated by signal (%s)") ,Args[0], strsignal(WTERMSIG(Status)) );
|
||||
|
||||
if (WIFEXITED(Status) != 0)
|
||||
return _error->Error(_("Sub-process %s returned an error code (%u)"),Args[0],
|
@ -25,9 +25,9 @@ Comments ?
|
||||
Stelian.
|
||||
|
||||
|
||||
--- apt-0.3.19cnc53/apt-pkg/algorithms.cc.orig Wed Nov 21 17:45:34 2001
|
||||
+++ apt-0.3.19cnc53/apt-pkg/algorithms.cc Wed Nov 21 17:46:12 2001
|
||||
@@ -454,6 +454,8 @@
|
||||
--- apt-0.5.4cnc9/apt-pkg/algorithms.cc.orig Wed Nov 21 17:45:34 2001
|
||||
+++ apt-0.5.4cnc9/apt-pkg/algorithms.cc Wed Nov 21 17:46:12 2001
|
||||
@@ -566,6 +566,8 @@
|
||||
{
|
||||
if (D->Type == pkgCache::Dep::Depends || D->Type == pkgCache::Dep::PreDepends)
|
||||
Scores[D.TargetPkg()->ID]++;
|
||||
|
@ -1,18 +0,0 @@
|
||||
diff -ur apt-0.3.19cnc55~/apt-pkg/init.cc apt-0.3.19cnc55/apt-pkg/init.cc
|
||||
--- apt-0.3.19cnc55~/apt-pkg/init.cc Thu Aug 2 01:35:12 2001
|
||||
+++ apt-0.3.19cnc55/apt-pkg/init.cc Thu Mar 21 18:47:24 2002
|
||||
@@ -38,12 +38,9 @@
|
||||
Cnf.Set("Dir::State::status","/var/lib/dpkg/status");
|
||||
} else {
|
||||
Cnf.Set("Acquire::cdrom::mount", "/mnt/cdrom");
|
||||
- Cnf.Set("RPM::AllowedDupPkgs::","^kernel$");
|
||||
- Cnf.Set("RPM::AllowedDupPkgs::", "kernel-smp");
|
||||
- Cnf.Set("RPM::AllowedDupPkgs::", "kernel-enterprise");
|
||||
+ Cnf.Set("RPM::AllowedDupPkgs::", "^(NVIDIA_)?(kernel|alsa)[0-9]*($|-up|-smp|-secure|-custom|-enterprise|-BOOT|-tape|-aureal)");
|
||||
|
||||
- Cnf.Set("RPM::HoldPkgs::", "kernel-source");
|
||||
- Cnf.Set("RPM::HoldPkgs::", "kernel-headers");
|
||||
+ Cnf.Set("RPM::HoldPkgs::", "^(kernel|alsa)[0-9]*-source");
|
||||
|
||||
Cnf.Set("Dir::State::status","/var/lib/rpm/status");
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
--- apt-0.3.19cnc55~/apt-pkg/rpm/rpmpm.cc Wed Mar 6 20:17:13 2002
|
||||
+++ apt-0.3.19cnc55/apt-pkg/rpm/rpmpm.cc Thu Mar 21 22:35:05 2002
|
||||
@@ -287,14 +287,16 @@
|
||||
case OInstall:
|
||||
options = "-i";
|
||||
|
||||
- Args[n++] = "-i";
|
||||
+ Args[n++] = "-iv";
|
||||
|
||||
Args[n++] = "--replacepkgs";
|
||||
|
||||
if (noninteractive)
|
||||
Args[n++] = "--percent";
|
||||
- else
|
||||
+ else {
|
||||
Args[n++] = "-h";
|
||||
+ Args[n++] = "--fancypercent";
|
||||
+ }
|
||||
|
||||
if (_config->FindB("RPM::Force", false) == true)
|
||||
Args[n++] = "--force";
|
||||
@@ -309,8 +311,10 @@
|
||||
|
||||
if (noninteractive)
|
||||
Args[n++] = "--percent";
|
||||
- else
|
||||
+ else {
|
||||
Args[n++] = "-h";
|
||||
+ Args[n++] = "--fancypercent";
|
||||
+ }
|
||||
|
||||
if (_config->FindB("RPM::Force", false) == true)
|
||||
Args[n++] = "--force";
|
||||
@@ -330,6 +334,13 @@
|
||||
if (nodeps)
|
||||
Args[n++] = "--nodeps";
|
||||
|
||||
+ string cmd;
|
||||
+ for (unsigned i = 0; i < n; ++i)
|
||||
+ {
|
||||
+ if (cmd.length())
|
||||
+ cmd += ' ';
|
||||
+ cmd += Args[i];
|
||||
+ }
|
||||
|
||||
for (slist<char*>::iterator i = files->begin();
|
||||
i != files->end() && n < sizeof(Args);
|
||||
@@ -349,7 +360,7 @@
|
||||
return true;
|
||||
}
|
||||
|
||||
- cout << _("Executing RPM (")<<options<<")..." << endl;
|
||||
+ cout << _("Executing RPM (") << cmd << ")..." << endl;
|
||||
|
||||
cout << flush;
|
||||
clog << flush;
|
@ -1,95 +0,0 @@
|
||||
diff -ur apt-0.3.19cnc55/methods/makefile apt-0.3.19cnc53/methods/makefile
|
||||
--- apt-0.3.19cnc55/methods/makefile Fri Aug 10 18:03:50 2001
|
||||
+++ apt-0.3.19cnc53/methods/makefile Thu Mar 21 19:36:03 2002
|
||||
@@ -58,8 +58,8 @@
|
||||
include $(PROGRAM_H)
|
||||
|
||||
# The rsh method
|
||||
-#PROGRAM=rsh
|
||||
-#SLIBS = -lapt-pkg $(SOCKETLIBS) $(RPMLIBS)
|
||||
-#LIB_MAKES = apt-pkg/makefile
|
||||
-#SOURCE = rsh.cc
|
||||
-#include $(PROGRAM_H)
|
||||
+PROGRAM=rsh
|
||||
+SLIBS = -lapt-pkg $(SOCKETLIBS) $(RPMLIBS)
|
||||
+LIB_MAKES = apt-pkg/makefile
|
||||
+SOURCE = rsh.cc
|
||||
+include $(PROGRAM_H)
|
||||
diff -ur apt-0.3.19cnc55/methods/rsh.cc apt-0.3.19cnc53/methods/rsh.cc
|
||||
--- apt-0.3.19cnc55/methods/rsh.cc Fri Nov 30 23:34:13 2001
|
||||
+++ apt-0.3.19cnc53/methods/rsh.cc Thu Mar 21 20:05:08 2002
|
||||
@@ -271,7 +271,7 @@
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
bool RSHConn::Get(const char *Path,FileFd &To,unsigned long Resume,
|
||||
- Hashes &Hash,bool &Missing, unsigned long Size)
|
||||
+ MD5Summation &MD5,bool &Missing, unsigned long Size)
|
||||
{
|
||||
Missing = false;
|
||||
|
||||
@@ -284,7 +284,7 @@
|
||||
return false;
|
||||
|
||||
if (Resume != 0) {
|
||||
- if (Hash.AddFD(To.Fd(),Resume) == false) {
|
||||
+ if (MD5.AddFD(To.Fd(),Resume) == false) {
|
||||
_error->Errno("read","Problem hashing file");
|
||||
return false;
|
||||
}
|
||||
@@ -323,7 +323,7 @@
|
||||
}
|
||||
MyLen += Res;
|
||||
|
||||
- Hash.Add(Buffer,Res);
|
||||
+ MD5.Add(Buffer,Res);
|
||||
if (To.Write(Buffer,Res) == false)
|
||||
{
|
||||
Close();
|
||||
@@ -428,7 +428,7 @@
|
||||
}
|
||||
|
||||
// Open the file
|
||||
- Hashes Hash;
|
||||
+ MD5Summation MD5;
|
||||
{
|
||||
FileFd Fd(Itm->DestFile,FileFd::WriteAny);
|
||||
if (_error->PendingError() == true)
|
||||
@@ -441,7 +441,7 @@
|
||||
FailFd = Fd.Fd();
|
||||
|
||||
bool Missing;
|
||||
- if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Res.Size) == false)
|
||||
+ if (Server->Get(File,Fd,Res.ResumePoint,MD5,Missing,Res.Size) == false)
|
||||
{
|
||||
Fd.Close();
|
||||
|
||||
@@ -462,7 +462,7 @@
|
||||
}
|
||||
|
||||
Res.LastModified = FailTime;
|
||||
- Res.TakeHashes(Hash);
|
||||
+ Res.MD5Sum = MD5.Result();
|
||||
|
||||
// Timestamp
|
||||
struct utimbuf UBuf;
|
||||
diff -ur apt-0.3.19cnc55/methods/rsh.h apt-0.3.19cnc53/methods/rsh.h
|
||||
--- apt-0.3.19cnc55/methods/rsh.h Fri Nov 30 23:34:13 2001
|
||||
+++ apt-0.3.19cnc53/methods/rsh.h Thu Mar 21 20:05:08 2002
|
||||
@@ -12,7 +12,7 @@
|
||||
|
||||
#include <string>
|
||||
#include <apt-pkg/strutl.h>
|
||||
-#include <apt-pkg/hashes.h>
|
||||
+#include <apt-pkg/md5.h>
|
||||
#include <apt-pkg/acquire-method.h>
|
||||
#include <apt-pkg/fileutl.h>
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
bool Size(const char *Path,unsigned long &Size);
|
||||
bool ModTime(const char *Path, time_t &Time);
|
||||
bool Get(const char *Path,FileFd &To,unsigned long Resume,
|
||||
- Hashes &Hash,bool &Missing, unsigned long Size);
|
||||
+ MD5Summation &MD5,bool &Missing, unsigned long Size);
|
||||
|
||||
RSHConn(URI Srv);
|
||||
~RSHConn();
|
@ -1,489 +0,0 @@
|
||||
--- apt-0.3.19cnc55~/tools/genbasedir Wed Mar 6 20:17:13 2002
|
||||
+++ apt-0.3.19cnc55/tools/genbasedir Thu Mar 21 22:15:32 2002
|
||||
@@ -69,99 +69,103 @@
|
||||
# Language setting to generate a consistent pkglist.
|
||||
#
|
||||
|
||||
-usage="\
|
||||
-Usage: genbasedir [<options>] <distribution> <comp1> [<comp2> ... <compN>]\n\
|
||||
-Options:\n\
|
||||
- -s, --sign Generate and sign hashfile\n\
|
||||
- --hashonly Do hashfile stuff only\n\
|
||||
- --listonly Generate pkglists/srclists and quit\n\
|
||||
- --bz2only Generate only compressed lists\n\
|
||||
- --topdir=dir Top directory of repository\n\
|
||||
- --progress Show progress bars for genpkglist/gensrclist\n\
|
||||
- --updateinfo=file Update information file\n"
|
||||
+PROG="${0##*/}"
|
||||
|
||||
basedir=.
|
||||
signature=0
|
||||
listonly=0
|
||||
hashonly=0
|
||||
-updateinfo=""
|
||||
-mapi=0
|
||||
-bz2only=0
|
||||
+updateinfo=
|
||||
+mapi=
|
||||
+gpguid=
|
||||
+topdir=
|
||||
+bz2only=k
|
||||
progress=
|
||||
|
||||
# bloat is necessary for non-Conectiva distros, at least RH,
|
||||
# because they use file dependencies with a non-predictable
|
||||
# heuristic. So we can't strip-off paths that will probably
|
||||
# never appear in dependencies.
|
||||
-bloat=""
|
||||
-
|
||||
+bloat=
|
||||
|
||||
# flat is for repositories where RPMS and SRPMS are kept in the
|
||||
# same directory level.
|
||||
flat=""
|
||||
|
||||
-while test $# -gt 0 ; do
|
||||
- case "${1}" in
|
||||
- -h | --help)
|
||||
- echo -e "${usage}"
|
||||
- exit 0
|
||||
- ;;
|
||||
- --mapi)
|
||||
- # hee hee hee..
|
||||
- mapi=1
|
||||
- ;;
|
||||
- --listonly)
|
||||
- listonly=1
|
||||
- ;;
|
||||
- --hashonly)
|
||||
- hashonly=1
|
||||
- ;;
|
||||
- --bz2only)
|
||||
- bz2only=1
|
||||
- ;;
|
||||
- --updateinfo=*)
|
||||
- updateinfo=${1}
|
||||
- ;;
|
||||
- --bloat)
|
||||
- bloat="--bloat"
|
||||
- ;;
|
||||
- --flat)
|
||||
- flat="--flat"
|
||||
- ;;
|
||||
- --topdir=*)
|
||||
- topdir="`echo \"${1}\" | sed -e 's/^[^=]*=//'`"
|
||||
- if [ ! -d $topdir ]; then
|
||||
- echo "Invalid top directory for distribution ${topdir}" 1>&2
|
||||
- exit 1
|
||||
- fi
|
||||
- ;;
|
||||
- --progress)
|
||||
- progress="--progress"
|
||||
- ;;
|
||||
- -s | --sign)
|
||||
- signature=1;
|
||||
- ;;
|
||||
- -*)
|
||||
- echo -e "${usage}" 1>&2
|
||||
- exit 1
|
||||
- ;;
|
||||
- *)
|
||||
- break
|
||||
- ;;
|
||||
- esac
|
||||
- shift
|
||||
+USAGE()
|
||||
+{
|
||||
+ cat >&2 <<EOF
|
||||
+Usage: genbasedir [<options>] <distribution> <comp1> [<comp2> ... <compN>]
|
||||
+Options:
|
||||
+ -s, --sign Generate and sign hashfile
|
||||
+ --hashonly Do hashfile stuff only
|
||||
+ --listonly Generate pkglists/srclists and quit
|
||||
+ --bz2only Generate only compressed lists
|
||||
+ --topdir=dir Top directory of repository
|
||||
+ --updateinfo=file Update information file
|
||||
+ --bloat Do not strip the package file list. Needed for some
|
||||
+ distributions that use non-automatically generated file dependencies
|
||||
+ --uid=uid Pass different GPG user ID for signing
|
||||
+ --progress Show progress bars for genpkglist/gensrclist
|
||||
+EOF
|
||||
+ [ -n "$1" ] && exit "$1" || exit
|
||||
+}
|
||||
+
|
||||
+TEMP=`getopt -n $PROG -o hs -l help,mapi,listonly,bz2only,hashonly,updateinfo:,bloat,topdir:,sign,uid:,progress -- "$@"` || USAGE
|
||||
+eval set -- "$TEMP"
|
||||
+
|
||||
+while :; do
|
||||
+ case "$1" in
|
||||
+ --listonly) shift; listonly=1
|
||||
+ ;;
|
||||
+ --bz2only) shift; bz2only=
|
||||
+ ;;
|
||||
+ --hashonly) shift; hashonly=1
|
||||
+ ;;
|
||||
+ -s|--sign) shift; signature=1
|
||||
+ ;;
|
||||
+ --bloat) shift; bloat="--bloat"
|
||||
+ ;;
|
||||
+ --mapi) shift; mapi="--mapi"
|
||||
+ ;;
|
||||
+ --updateinfo) shift; updateinfo="$1"; shift
|
||||
+ ;;
|
||||
+ --uid) shift; gpguid="$1"; shift
|
||||
+ ;;
|
||||
+ --topdir) shift; topdir="$1"; shift
|
||||
+ ;;
|
||||
+ --flat) shift; float="--float"
|
||||
+ ;;
|
||||
+ --progress) shift; progress="--progress"
|
||||
+ ;;
|
||||
+ -h|--help) USAGE 0
|
||||
+ ;;
|
||||
+ --) shift; break
|
||||
+ ;;
|
||||
+ *) echo "$PROG: unrecognized option: $1" >&2; exit 1
|
||||
+ ;;
|
||||
+ esac
|
||||
done
|
||||
|
||||
-distro=${1}
|
||||
-shift
|
||||
+topdir="$(echo "$topdir" |sed '
|
||||
+s:/\(\./\)\+:/:g
|
||||
+s:/\+:/:g
|
||||
+s:/$::
|
||||
+')"
|
||||
|
||||
-components=$*
|
||||
+[ -n "$topdir" ] || USAGE 1
|
||||
|
||||
-if [ -z "$components" ]; then
|
||||
- echo -e "${usage}"
|
||||
- exit 0
|
||||
+if [ ! -d "$topdir" ]; then
|
||||
+ echo "Invalid top directory for distribution: $topdir" >&2
|
||||
+ exit 1
|
||||
fi
|
||||
|
||||
+distro=${1}
|
||||
+shift
|
||||
+
|
||||
+components="$*"
|
||||
+
|
||||
+[ -n "$components" ] || USAGE 1
|
||||
|
||||
getsize() {
|
||||
tmp=`wc -c $1`
|
||||
@@ -186,75 +190,88 @@
|
||||
basedir_=`echo ${distro}/base|tr -s /`
|
||||
basedir=${topdir}/$basedir_
|
||||
|
||||
+WORKDIR=
|
||||
+
|
||||
+Exit()
|
||||
+{
|
||||
+ RETVAL=$?
|
||||
+ trap '' EXIT
|
||||
+ [ -z "$WORKDIR" ] || rm -rf "$WORKDIR"
|
||||
+ exit $RETVAL
|
||||
+}
|
||||
+
|
||||
+trap 'Exit ' SIGHUP SIGPIPE SIGINT SIGQUIT SIGTERM EXIT
|
||||
+
|
||||
+WORKDIR="$(mktemp -dt "$PROG.XXXXXXXXXX")"
|
||||
+
|
||||
+SRCIDX_COMP="$WORKDIR/comp"
|
||||
+SRCIDX="$WORKDIR/total"
|
||||
+
|
||||
+saved_list=
|
||||
+
|
||||
+save_file()
|
||||
+{
|
||||
+ saved_list="$1"
|
||||
+
|
||||
+ if [ -f "$saved_list" ]; then
|
||||
+ mv -f "$saved_list" "$saved_list.old"
|
||||
+ else
|
||||
+ saved_list=
|
||||
+ fi
|
||||
+}
|
||||
|
||||
-# release file
|
||||
-# ------------
|
||||
+compare_file()
|
||||
+{
|
||||
+ if [ -n "$saved_list" -a -f "$saved_list.old" ]; then
|
||||
+ if cmp -s "$saved_list.old" "$saved_list"; then
|
||||
+ mv -f "$saved_list.old" "$saved_list"
|
||||
+ else
|
||||
+ rm -f "$saved_list.old"
|
||||
+ fi
|
||||
+ fi
|
||||
+}
|
||||
|
||||
-#for comp in ${components}; do
|
||||
-# true > ${basedir}/release.$comp
|
||||
-#
|
||||
-#done
|
||||
+for comp in ${components}; do
|
||||
+ [ -f "$basedir/release.$comp" ] || touch "$basedir/release.$comp"
|
||||
+done
|
||||
|
||||
|
||||
-if [ $hashonly -ne 1 ]; then
|
||||
+if [ "$hashonly" -ne 1 ]; then
|
||||
# package lists
|
||||
# -------------
|
||||
|
||||
-true > /tmp/srcidx.$$
|
||||
-
|
||||
for comp in ${components}; do
|
||||
echo -n "${comp}: "
|
||||
|
||||
echo -n "pkglist "
|
||||
|
||||
+ newlist="$basedir/pkglist.$comp"
|
||||
+
|
||||
# Save older pkglist
|
||||
- if [ -f $basedir/pkglist.$comp ]; then
|
||||
- mv -f $basedir/pkglist.$comp $basedir/pkglist.$comp.old
|
||||
- fi
|
||||
+ save_file "$newlist"
|
||||
|
||||
- if test x$updateinfo = x; then
|
||||
- (cd $basedir; genpkglist $progress $bloat --index /tmp/srcidx.$comp.$$ $topdir/${distro} $comp)
|
||||
+ :>"$SRCIDX_COMP"
|
||||
+ if [ -z "$updateinfo" ]; then
|
||||
+ (cd "$basedir"; genpkglist $progress $bloat --index "$SRCIDX_COMP" "$topdir/$distro" "$comp")
|
||||
else
|
||||
- (cd $basedir; genpkglist $progress $bloat --index /tmp/srcidx.$comp.$$ --info $updateinfo $topdir/${distro} $comp)
|
||||
+ (cd "$basedir"; genpkglist $progress $bloat --index "$SRCIDX_COMP" --info "$updateinfo" "$topdir/$distro" "$comp")
|
||||
fi
|
||||
+
|
||||
if [ $? -ne 0 ]; then
|
||||
echo
|
||||
echo "Error executing genpkglist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- if [ -f $basedir/pkglist.$comp ]; then
|
||||
-
|
||||
- # Compare with older pkglist.
|
||||
- if [ -f $basedir/pkglist.$comp.old ]; then
|
||||
- if cmp -s $basedir/pkglist.$comp.old $basedir/pkglist.$comp; then
|
||||
- mv -f $basedir/pkglist.$comp.old $basedir/pkglist.$comp
|
||||
- fi
|
||||
- fi
|
||||
-
|
||||
- # Save older compressed pkglist
|
||||
- if [ -f $basedir/pkglist.$comp.bz2 ]; then
|
||||
- mv -f $basedir/pkglist.$comp.bz2 $basedir/pkglist.$comp.bz2.old
|
||||
- fi
|
||||
-
|
||||
- bzip2 -c $basedir/pkglist.$comp > $basedir/pkglist.$comp.bz2
|
||||
-
|
||||
- # Compare with older compressed pkglist.
|
||||
- if [ -f $basedir/pkglist.$comp.bz2.old ]; then
|
||||
- if cmp -s $basedir/pkglist.$comp.bz2.old $basedir/pkglist.$comp.bz2; then
|
||||
- mv -f $basedir/pkglist.$comp.bz2.old $basedir/pkglist.$comp.bz2
|
||||
- fi
|
||||
- fi
|
||||
-
|
||||
- if [ $bz2only -eq 1 ]; then
|
||||
- rm -f $basedir/pkglist.$comp
|
||||
- fi
|
||||
+ # Compare with older pkglist.
|
||||
+ compare_file
|
||||
|
||||
- rm -f $basedir/pkglist.$comp.old
|
||||
- rm -f $basedir/pkglist.$comp.bz2.old
|
||||
+ if [ -f "$newlist" ]; then
|
||||
+ rm -f "$newlist.bz2"
|
||||
+ bzip2 -9$bz2only "$newlist"
|
||||
fi
|
||||
|
||||
- cat /tmp/srcidx.$comp.$$ >> /tmp/srcidx.$$
|
||||
+ cat "$SRCIDX_COMP" >> "$SRCIDX"
|
||||
|
||||
echo "done"
|
||||
done
|
||||
@@ -264,77 +281,38 @@
|
||||
|
||||
echo -n "srclist "
|
||||
|
||||
- # Save older srclist
|
||||
- if [ -f $basedir/srclist.$comp ]; then
|
||||
- mv -f $basedir/srclist.$comp $basedir/srclist.$comp.old
|
||||
- fi
|
||||
+ newlist="$basedir/srclist.$comp"
|
||||
|
||||
+ # Save older srclist
|
||||
+ save_file "$newlist"
|
||||
|
||||
- sfix="/.."
|
||||
- if test x$flat != x; then
|
||||
- sfix=""
|
||||
- fi
|
||||
+ :>"$SRCIDX_COMP"
|
||||
+ (cd "$basedir"; gensrclist $progress $flat $mapi "$topdir/$distro/.." "$comp" "$SRCIDX_COMP")
|
||||
|
||||
- if [ $mapi -ne 0 ]; then
|
||||
- (cd $basedir; gensrclist $progress $flat --mapi $topdir/${distro}${sfix} $comp /tmp/srcidx.$comp.$$)
|
||||
- else
|
||||
- (cd $basedir; gensrclist $progress $flat $topdir/${distro}${sfix} $comp /tmp/srcidx.$$)
|
||||
- fi
|
||||
if [ $? -ne 0 ]; then
|
||||
echo
|
||||
echo "Error executing gensrclist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
+ # Compare with older srclist.
|
||||
+ compare_file
|
||||
|
||||
-
|
||||
- if [ -f $basedir/srclist.$comp ]; then
|
||||
-
|
||||
- # Compare with older srclist.
|
||||
- if [ -f $basedir/srclist.$comp.old ]; then
|
||||
- if cmp -s $basedir/srclist.$comp.old $basedir/srclist.$comp; then
|
||||
- mv -f $basedir/srclist.$comp.old $basedir/srclist.$comp
|
||||
- fi
|
||||
- fi
|
||||
-
|
||||
- # Save older compressed srclist
|
||||
- if [ -f $basedir/srclist.$comp.bz2 ]; then
|
||||
- mv -f $basedir/srclist.$comp.bz2 $basedir/srclist.$comp.bz2.old
|
||||
- fi
|
||||
-
|
||||
- bzip2 -c $basedir/srclist.$comp > $basedir/srclist.$comp.bz2
|
||||
-
|
||||
- # Compare with older compressed srclist.
|
||||
- if [ -f $basedir/srclist.$comp.bz2.old ]; then
|
||||
- if cmp -s $basedir/srclist.$comp.bz2.old $basedir/srclist.$comp.bz2; then
|
||||
- mv -f $basedir/srclist.$comp.bz2.old $basedir/srclist.$comp.bz2
|
||||
- fi
|
||||
- fi
|
||||
-
|
||||
- if [ $bz2only -eq 1 ]; then
|
||||
- rm -f $basedir/srclist.$comp
|
||||
- fi
|
||||
-
|
||||
- rm -f $basedir/srclist.$comp.old
|
||||
- rm -f $basedir/srclist.$comp.bz2.old
|
||||
+ if [ -f "$newlist" ]; then
|
||||
+ rm -f "$newlist.bz2"
|
||||
+ bzip2 -9$bz2only "$newlist"
|
||||
fi
|
||||
|
||||
- rm -f /tmp/srcidx.$comp.$$
|
||||
-
|
||||
echo "done"
|
||||
done
|
||||
|
||||
fi
|
||||
|
||||
-rm -f /tmp/srcidx.$$
|
||||
-
|
||||
-if [ $listonly -eq 0 ]; then
|
||||
+if [ "$listonly" -eq 0 ]; then
|
||||
# Save older hashfile
|
||||
- if [ -f $basedir/hashfile ]; then
|
||||
- mv -f $basedir/hashfile $basedir/hashfile.old
|
||||
- fi
|
||||
+ save_file "$basedir/hashfile"
|
||||
hf=${basedir}/hashfile
|
||||
- true > $hf
|
||||
+ : > $hf
|
||||
else
|
||||
hf=/dev/null
|
||||
fi
|
||||
@@ -348,27 +326,21 @@
|
||||
srclist=${basedir}/srclist
|
||||
release=${basedir}/release
|
||||
|
||||
-for comp in ${components}; do
|
||||
- echo -n "${comp}: "
|
||||
-
|
||||
- echo -n "hashfile "
|
||||
- if [ -f ${pkglist}.$comp ]; then
|
||||
- phashstuff ${pkglist}.$comp ${pkglist_}.$comp >> $hf
|
||||
- fi
|
||||
- if [ -f ${srclist}.$comp ]; then
|
||||
- phashstuff ${srclist}.$comp ${srclist_}.$comp >> $hf
|
||||
- fi
|
||||
+phash()
|
||||
+{
|
||||
+ if [ -f "$1" ]; then
|
||||
+ phashstuff "$1" "$2" >> "$3"
|
||||
+ fi
|
||||
+}
|
||||
|
||||
- if [ -f ${pkglist}.$comp.bz2 ]; then
|
||||
- phashstuff ${pkglist}.$comp.bz2 ${pkglist_}.$comp.bz2 >> $hf
|
||||
- fi
|
||||
- if [ -f ${srclist}.$comp.bz2 ]; then
|
||||
- phashstuff ${srclist}.$comp.bz2 ${srclist_}.$comp.bz2 >> $hf
|
||||
- fi
|
||||
+for comp in ${components}; do
|
||||
+ echo -n "$comp: hashfile "
|
||||
|
||||
- if [ -f ${release}.$comp ]; then
|
||||
- phashstuff ${release}.$comp ${release_}.$comp >> $hf
|
||||
- fi
|
||||
+ phash "$pkglist.$comp" "$pkglist_.$comp" "$hf"
|
||||
+ phash "$srclist.$comp" "$srclist_.$comp" "$hf"
|
||||
+ phash "$pkglist.$comp.bz2" "$pkglist_.$comp.bz2" "$hf"
|
||||
+ phash "$srclist.$comp.bz2" "$srclist_.$comp.bz2" "$hf"
|
||||
+ phash "$release.$comp" "$release_.$comp" "$hf"
|
||||
|
||||
echo "done"
|
||||
done
|
||||
@@ -377,34 +349,26 @@
|
||||
|
||||
if [ $listonly -eq 0 ]; then
|
||||
# Compare with older hashfile.
|
||||
- if [ -f $basedir/hashfile.old ]; then
|
||||
- if cmp -s $basedir/hashfile.old $basedir/hashfile; then
|
||||
- mv -f $basedir/hashfile.old $basedir/hashfile
|
||||
- fi
|
||||
- fi
|
||||
+ compare_file
|
||||
fi
|
||||
|
||||
-if [ $signature -ne 0 -a $listonly -eq 0 ]; then
|
||||
+if [ "$signature" -ne 0 -a "$listonly" -eq 0 ]; then
|
||||
+ if [ -n "$gpguid" ]; then
|
||||
+ DEFAULTKEY="--default-key $gpguid"
|
||||
+ else
|
||||
+ DEFAULTKEY=
|
||||
+ fi
|
||||
|
||||
# Save older hashfile.gpg
|
||||
- if [ -f $basedir/hashfile.gpg ]; then
|
||||
- mv -f $basedir/hashfile.gpg $basedir/hashfile.gpg.old
|
||||
- fi
|
||||
+ save_file "$basedir/hashfile.gpg"
|
||||
|
||||
- gpg -armour -qs --yes $basedir/hashfile
|
||||
- mv -f $basedir/hashfile.asc $basedir/hashfile.gpg
|
||||
- rm -f $basedir/hashfile
|
||||
+ gpg -armour $DEFAULTKEY -qs --yes $basedir/hashfile
|
||||
+ mv -f "$basedir/hashfile.asc" "$basedir/hashfile.gpg"
|
||||
+ rm -f "$basedir/hashfile"
|
||||
|
||||
# Compare with older hashfile.gpg
|
||||
- if [ -f $basedir/hashfile.gpg.old ]; then
|
||||
- if cmp -s $basedir/hashfile.gpg.old $basedir/hashfile.gpg; then
|
||||
- mv -f $basedir/hashfile.gpg.old $basedir/hashfile.gpg
|
||||
- fi
|
||||
- fi
|
||||
+ compare_file
|
||||
fi
|
||||
-
|
||||
-rm -f $basedir/hashfile.old
|
||||
-rm -f $basedir/hashfile.gpg.old
|
||||
|
||||
echo "All your base are belong to us !!"
|
||||
|
@ -1,20 +1,14 @@
|
||||
diff -ur apt-0.3.19cnc55~/configure.in apt-0.3.19cnc55/configure.in
|
||||
--- apt-0.3.19cnc55~/configure.in Wed Mar 6 20:17:10 2002
|
||||
+++ apt-0.3.19cnc55/configure.in Tue Mar 26 21:14:21 2002
|
||||
@@ -64,20 +64,12 @@
|
||||
--- apt-0.5.4cnc1.orig/configure.in Tue Jul 9 12:28:05 2002
|
||||
+++ apt-0.5.4cnc1/configure.in Fri Jul 19 00:11:07 2002
|
||||
@@ -64,6 +64,7 @@
|
||||
dnl AC_MSG_ERROR(failed: I need posix threads, pthread)
|
||||
dnl fi
|
||||
|
||||
-dnl Check for DB2
|
||||
-AC_CHECK_HEADER(db2/db.h,
|
||||
- [AC_CHECK_LIB(db2,db_open,
|
||||
- [AC_DEFINE(HAVE_DB2) DB2LIB="-ldb2"])])
|
||||
-AC_SUBST(DB2LIB)
|
||||
-
|
||||
-
|
||||
dnl Check for rpm version --akk
|
||||
rpm_version="none"
|
||||
SAVE_LIBS="$LIBS"
|
||||
+dnl ALT:We need this to build apt-ftparchive
|
||||
dnl Check for DB2
|
||||
AC_CHECK_HEADER(db2/db.h,
|
||||
[AC_CHECK_LIB(db2,db_open,
|
||||
@@ -77,7 +78,6 @@
|
||||
SAVE_CPPFLAGS="$CPPFLAGS"
|
||||
|
||||
CPPFLAGS="$SAVE_CPPFLAGS -I/usr/include/rpm"
|
||||
@ -22,7 +16,7 @@ diff -ur apt-0.3.19cnc55~/configure.in apt-0.3.19cnc55/configure.in
|
||||
AC_CHECK_HEADER(rpm/rpmlib.h, rpm_header_ok=1, rpm_header_ok=0)
|
||||
|
||||
if test $rpm_header_ok = 1; then
|
||||
@@ -85,7 +77,7 @@
|
||||
@@ -85,7 +85,7 @@
|
||||
LIBS="$SAVE_LIBS -lrpm -lrpmio -lz -lbz2 -lpopt"
|
||||
AC_CHECK_LIB(rpmdb,rpmdbOpen,
|
||||
[RPMDBLIBS="-lrpmdb"],
|
||||
@ -31,17 +25,16 @@ diff -ur apt-0.3.19cnc55~/configure.in apt-0.3.19cnc55/configure.in
|
||||
|
||||
LIBS="$SAVE_LIBS $RPMDBLIBS -lrpmio -lz -lbz2 -lpopt"
|
||||
AC_CHECK_LIB(rpm,rpmdbGetIteratorOffset,
|
||||
@@ -100,6 +92,7 @@
|
||||
@@ -100,6 +100,7 @@
|
||||
[AC_DEFINE_UNQUOTED(HAVE_RPM, 1)
|
||||
RPMLIBS="-lrpm -ldb1 -lz -lbz2 -lpopt"
|
||||
SAVE_CPPFLAGS="$SAVE_CPPFLAGS -I/usr/include/rpm"
|
||||
+ AC_CHECK_HEADERS(db1/db.h)
|
||||
rpm_version="3"])
|
||||
fi
|
||||
fi
|
||||
diff -ur apt-0.3.19cnc55~/tools/cached_md5.cc apt-0.3.19cnc55/tools/cached_md5.cc
|
||||
--- apt-0.3.19cnc55~/tools/cached_md5.cc Sun Feb 17 01:46:11 2002
|
||||
+++ apt-0.3.19cnc55/tools/cached_md5.cc Sun Feb 17 01:46:25 2002
|
||||
if test $rpm_version != "none"; then
|
||||
--- apt-0.5.4cnc1.orig/tools/cached_md5.cc Tue Jul 9 02:41:48 2002
|
||||
+++ apt-0.5.4cnc1/tools/cached_md5.cc Thu Jul 18 22:47:39 2002
|
||||
@@ -39,6 +39,7 @@
|
||||
#include <errno.h>
|
||||
#include <fcntl.h>
|
||||
@ -50,9 +43,8 @@ diff -ur apt-0.3.19cnc55~/tools/cached_md5.cc apt-0.3.19cnc55/tools/cached_md5.c
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <sys/stat.h>
|
||||
diff -ur apt-0.3.19cnc55~/tools/hdlist2pkglist.cc apt-0.3.19cnc55/tools/hdlist2pkglist.cc
|
||||
--- apt-0.3.19cnc55~/tools/hdlist2pkglist.cc Sun Feb 17 01:46:11 2002
|
||||
+++ apt-0.3.19cnc55/tools/hdlist2pkglist.cc Sun Feb 17 01:46:46 2002
|
||||
--- apt-0.5.4cnc1.orig/tools/hdlist2pkglist.cc Tue Jul 9 02:42:13 2002
|
||||
+++ apt-0.5.4cnc1/tools/hdlist2pkglist.cc Thu Jul 18 22:48:22 2002
|
||||
@@ -37,6 +37,7 @@
|
||||
#include <errno.h>
|
||||
#include <fcntl.h>
|
36
apt-0.5.4cnc3-alt-md5hash-debug.patch
Normal file
36
apt-0.5.4cnc3-alt-md5hash-debug.patch
Normal file
@ -0,0 +1,36 @@
|
||||
--- apt-0.5.4cnc3.orig/apt-pkg/acquire-item.cc Sat Jul 27 13:02:53 2002
|
||||
+++ apt-0.5.4cnc3/apt-pkg/acquire-item.cc Sat Jul 27 13:34:34 2002
|
||||
@@ -786,7 +786,10 @@
|
||||
{
|
||||
if (Md5Hash != MD5)
|
||||
{
|
||||
- Status = StatError;
|
||||
+ if (_config->FindB("Debug::pkgAcquire::Auth", false)) {
|
||||
+ cerr << "md5 mismatch: " << Md5Hash << "!=" << MD5 << endl;
|
||||
+ }
|
||||
+ Status = StatError;
|
||||
ErrorText = _("MD5Sum mismatch");
|
||||
Rename(DestFile,DestFile + ".FAILED");
|
||||
return;
|
||||
@@ -916,6 +919,9 @@
|
||||
{
|
||||
if (Md5Hash != MD5)
|
||||
{
|
||||
+ if (_config->FindB("Debug::pkgAcquire::Auth", false)) {
|
||||
+ cerr << "md5 mismatch: " << Md5Hash << "!=" << MD5 << endl;
|
||||
+ }
|
||||
Status = StatError;
|
||||
ErrorText = "MD5Sum mismatch";
|
||||
Rename(DestFile,DestFile + ".FAILED");
|
||||
--- apt-0.5.4cnc3.orig/cmdline/apt-get.cc Sat Jul 27 13:02:53 2002
|
||||
+++ apt-0.5.4cnc3/cmdline/apt-get.cc Sat Jul 27 13:41:35 2002
|
||||
@@ -1709,6 +1709,9 @@
|
||||
I->Type != "tar")
|
||||
continue;
|
||||
|
||||
+ if (_config->FindB("Debug::pkgAcquire::Auth",false) == true)
|
||||
+ cerr << "I->Path = " << I->Path << ", I->MD5Hash = " << I->MD5Hash << endl;
|
||||
+
|
||||
new pkgAcqFile(&Fetcher,Last->Index().ArchiveURI(I->Path),
|
||||
I->MD5Hash,I->Size,
|
||||
Last->Index().SourceInfo(*Last,*I),Src);
|
36
apt-0.5.4cnc3-alt-select-genlist.patch
Normal file
36
apt-0.5.4cnc3-alt-select-genlist.patch
Normal file
@ -0,0 +1,36 @@
|
||||
diff -ur apt-0.5.4cnc3.orig/tools/genbasedir apt-0.5.4cnc3/tools/genbasedir
|
||||
--- apt-0.5.4cnc3.orig/tools/genbasedir Sat Jul 27 13:02:53 2002
|
||||
+++ apt-0.5.4cnc3/tools/genbasedir Sat Jul 27 14:05:42 2002
|
||||
@@ -26,6 +26,8 @@
|
||||
genbasedir /home/ftp/pub/conectiva\n\
|
||||
genbasedir /home/ftp/pub/conectiva main extra devel\n\
|
||||
"
|
||||
+GENPKGLIST=genpkglist-0.5
|
||||
+GENSRCLIST=gensrclist-0.5
|
||||
|
||||
getsize()
|
||||
{
|
||||
@@ -178,9 +180,9 @@
|
||||
fi
|
||||
|
||||
if test x$updateinfo = x; then
|
||||
- (cd $basedir; genpkglist $progress $bloat --index /tmp/srcidx.$comp.$$ $topdir $comp)
|
||||
+ (cd $basedir; "$GENPKGLIST" $progress $bloat --index /tmp/srcidx.$comp.$$ $topdir $comp)
|
||||
else
|
||||
- (cd $basedir; genpkglist $progress $bloat --index /tmp/srcidx.$comp.$$ --info $updateinfo $topdir $comp)
|
||||
+ (cd $basedir; "$GENPKGLIST" $progress $bloat --index /tmp/srcidx.$comp.$$ --info $updateinfo $topdir $comp)
|
||||
fi
|
||||
|
||||
if [ -f $basedir/pkglist.$comp ]; then
|
||||
@@ -230,9 +232,9 @@
|
||||
srctopdir=`cd $topdir; pwd`
|
||||
fi
|
||||
if [ $mapi -ne 0 ]; then
|
||||
- (cd $basedir; gensrclist $progress $flat --mapi $srctopdir $comp /tmp/srcidx.$comp.$$)
|
||||
+ (cd $basedir; "$GENSRCLIST" $progress $flat --mapi $srctopdir $comp /tmp/srcidx.$comp.$$)
|
||||
else
|
||||
- (cd $basedir; gensrclist $progress $flat $srctopdir $comp /tmp/srcidx.$$)
|
||||
+ (cd $basedir; "$GENSRCLIST" $progress $flat $srctopdir $comp /tmp/srcidx.$$)
|
||||
fi
|
||||
|
||||
if [ -f $basedir/srclist.$comp ]; then
|
16
apt-0.5.4cnc8-alt-rpm-fancypercent.patch
Normal file
16
apt-0.5.4cnc8-alt-rpm-fancypercent.patch
Normal file
@ -0,0 +1,16 @@
|
||||
2002-10-23 Dmitry V. Levin <ldv@altlinux.org>
|
||||
|
||||
* apt-pkg/rpm/rpmpm.cc(pkgRPMPM::ExecRPM):
|
||||
Pass "fancypercent" option if interactive.
|
||||
|
||||
--- apt-0.5.4cnc8/apt-pkg/rpm/rpmpm.cc.orig 2002-08-05 17:46:44 +0300
|
||||
+++ apt-0.5.4cnc8/apt-pkg/rpm/rpmpm.cc 2002-10-23 11:16:13 +0300
|
||||
@@ -266,6 +266,8 @@
|
||||
|
||||
if (Interactive == false && op != Item::RPMErase)
|
||||
Args[n++] = "--percent";
|
||||
+ if (Interactive)
|
||||
+ Args[n++] = "--fancypercent";
|
||||
|
||||
string rootdir = _config->Find("RPM::RootDir", "");
|
||||
if (!rootdir.empty())
|
31
apt-0.5.4cnc8-alt-rpm_cmd.patch
Normal file
31
apt-0.5.4cnc8-alt-rpm_cmd.patch
Normal file
@ -0,0 +1,31 @@
|
||||
2002-10-23 Dmitry V. Levin <ldv@altlinux.org>
|
||||
|
||||
* apt-pkg/rpm/rpmpm.cc(pkgRPMPM::ExecRPM):
|
||||
Report actual options being passed to rpm.
|
||||
|
||||
--- apt-0.5.4cnc8/apt-pkg/rpm/rpmpm.cc.orig 2002-08-05 17:46:44 +0300
|
||||
+++ apt-0.5.4cnc8/apt-pkg/rpm/rpmpm.cc 2002-10-23 11:16:13 +0300
|
||||
@@ -326,6 +326,14 @@
|
||||
if (_config->FindB("RPM::Order",false) == false)
|
||||
Args[n++] = "--noorder";
|
||||
|
||||
+ string cmd;
|
||||
+ for (unsigned i = 0; i < n; ++i)
|
||||
+ {
|
||||
+ if (!cmd.empty())
|
||||
+ cmd += ' ';
|
||||
+ cmd += Args[i];
|
||||
+ }
|
||||
+
|
||||
for (list<const char*>::iterator I = files.begin(); I != files.end(); I++)
|
||||
Args[n++] = *I;
|
||||
|
||||
@@ -339,7 +347,7 @@
|
||||
return true;
|
||||
}
|
||||
|
||||
- cout << _("Executing RPM (")<<operation<<")..." << endl;
|
||||
+ cout << _("Executing RPM (")<<cmd<<")..." << endl;
|
||||
|
||||
cout << flush;
|
||||
clog << flush;
|
41
apt-0.5.4cnc9-alt-bz2.patch
Normal file
41
apt-0.5.4cnc9-alt-bz2.patch
Normal file
@ -0,0 +1,41 @@
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/apt-pkg/acquire-item.cc apt-0.5.4cnc9.bz2/apt-pkg/acquire-item.cc
|
||||
--- apt-0.5.4cnc9.orig/apt-pkg/acquire-item.cc 2002-12-17 12:00:20 +0300
|
||||
+++ apt-0.5.4cnc9.bz2/apt-pkg/acquire-item.cc 2002-12-19 17:17:41 +0300
|
||||
@@ -188,7 +188,7 @@
|
||||
|
||||
// Create the item
|
||||
// CNC:2002-07-03
|
||||
- Desc.URI = URI + ".bz2";
|
||||
+ Desc.URI = URI + _config->Find("Acquire::ComprExtension", ".bz2");
|
||||
Desc.Description = URIDesc;
|
||||
Desc.Owner = this;
|
||||
Desc.ShortDesc = ShortDesc;
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/apt-pkg/init.cc apt-0.5.4cnc9.bz2/apt-pkg/init.cc
|
||||
--- apt-0.5.4cnc9.orig/apt-pkg/init.cc 2002-12-17 12:00:54 +0300
|
||||
+++ apt-0.5.4cnc9.bz2/apt-pkg/init.cc 2002-12-19 17:28:15 +0300
|
||||
@@ -71,6 +71,7 @@
|
||||
Cnf.Set("Dir::Etc::parts","apt.conf.d");
|
||||
Cnf.Set("Dir::Etc::preferences","preferences");
|
||||
Cnf.Set("Dir::Bin::methods","/usr/lib/apt/methods");
|
||||
+ Cnf.Set("Acquire::ComprExtension", ".bz2");
|
||||
|
||||
bool Res = true;
|
||||
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/methods/file.cc apt-0.5.4cnc9.bz2/methods/file.cc
|
||||
--- apt-0.5.4cnc9.orig/methods/file.cc 2002-07-23 21:54:53 +0400
|
||||
+++ apt-0.5.4cnc9.bz2/methods/file.cc 2002-12-19 17:24:32 +0300
|
||||
@@ -52,9 +52,11 @@
|
||||
Res.IMSHit = true;
|
||||
}
|
||||
|
||||
- // See if we can compute a file without a .gz exentsion
|
||||
- string::size_type Pos = File.rfind(".gz");
|
||||
- if (Pos + 3 == File.length())
|
||||
+ // See if we can compute a file without a .gz or .bz2 exentsion
|
||||
+ // Should we check here both .gz and .bz2 extensions?
|
||||
+ string ComprExtension = _config->Find("Acquire::ComprExtension", ".bz2");
|
||||
+ string::size_type Pos = File.rfind(ComprExtension);
|
||||
+ if (Pos + ComprExtension.length() == File.length())
|
||||
{
|
||||
File = string(File,0,Pos);
|
||||
if (stat(File.c_str(),&Buf) == 0)
|
17
apt-0.5.4cnc9-alt-getsrc-debug.patch
Normal file
17
apt-0.5.4cnc9-alt-getsrc-debug.patch
Normal file
@ -0,0 +1,17 @@
|
||||
helps to determine which of the branches is really used.
|
||||
Look at apt-0.5.4cnc9-alt-getsrc.patch for more info.
|
||||
|
||||
imz@altlinux.ru, 28 Sep 2002.
|
||||
|
||||
--- apt-0.5.4cnc9/cmdline/apt-get.cc.getsrc 2002-10-28 22:41:04 +0300
|
||||
+++ apt-0.5.4cnc9/cmdline/apt-get.cc 2002-10-28 23:44:30 +0300
|
||||
@@ -1184,6 +1184,9 @@
|
||||
}
|
||||
|
||||
// No source package name..
|
||||
+ // use the "matching against Binaries()" Find()'s feature.
|
||||
+ // DEBUG:
|
||||
+ printf("Src1 %s\n",Src.c_str());
|
||||
if (Src.empty() == true)
|
||||
Src = TmpSrc;
|
||||
|
131
apt-0.5.4cnc9-alt-getsrc.patch
Normal file
131
apt-0.5.4cnc9-alt-getsrc.patch
Normal file
@ -0,0 +1,131 @@
|
||||
This patch is the result of work initiated by Anton Denisov's
|
||||
<fire@kgpu.kamchatka.ru> feedback (and debug-patches).
|
||||
|
||||
There are three problems with apt-get source <pkgname>, this patch fixes two of them:
|
||||
|
||||
- rpmrecords.cc: SourcePackage() returns empty string because of incorrect
|
||||
rpmlib's return code interpretation (fixed, now it return in most cases
|
||||
the source package filename);
|
||||
- srcrecords.cc: Find() called from apt-get's DoSource() doesn't match anything
|
||||
because it matches against source package name, but gets a filename as an
|
||||
argument (in the case of the first branch in DoSource() when the return value
|
||||
of SourcePackage() is used) (fixed: now matching is done for corresponding
|
||||
fields).
|
||||
|
||||
This makes the first branch in DoSource() work which is almost all one may want.
|
||||
|
||||
9 July 2002
|
||||
imz@altlinux.ru
|
||||
|
||||
A bad side of this patch may be that in the first branch (matching against
|
||||
src.rpm name), it takes the full filenmame (including version-release) into
|
||||
account while the second branch does only matching against the name (version
|
||||
insensitive).
|
||||
|
||||
Ported to apt-0.5.4cnc9. (There is a separate apt-0.5.4cnc9-alt-getsrc-debug.patch
|
||||
which helps to determine which of the branches is really used.)
|
||||
|
||||
imz@altlinux.ru, 28 Sep 2002.
|
||||
|
||||
--- apt-0.5.4cnc9/apt-pkg/deb/debsrcrecords.h.getsrc 2002-07-25 22:07:18 +0400
|
||||
+++ apt-0.5.4cnc9/apt-pkg/deb/debsrcrecords.h 2002-10-29 00:45:07 +0300
|
||||
@@ -34,6 +34,9 @@
|
||||
virtual bool Step() {iOffset = Tags.Offset(); return Tags.Step(Sect);};
|
||||
virtual bool Jump(unsigned long Off) {iOffset = Off; return Tags.Jump(Sect,Off);};
|
||||
|
||||
+ // These refer to the archive file for the Version
|
||||
+ virtual string FileName() const {return Sect.FindS("Filename");};
|
||||
+
|
||||
virtual string Package() const {return Sect.FindS("Package");};
|
||||
virtual string Version() const {return Sect.FindS("Version");};
|
||||
virtual string Maintainer() const {return Sect.FindS("Maintainer");};
|
||||
--- apt-0.5.4cnc9/apt-pkg/rpm/rpmsrcrecords.h.getsrc 2002-08-09 00:07:33 +0400
|
||||
+++ apt-0.5.4cnc9/apt-pkg/rpm/rpmsrcrecords.h 2002-10-29 00:45:07 +0300
|
||||
@@ -44,6 +44,8 @@
|
||||
virtual bool Step();
|
||||
virtual bool Jump(unsigned long Off);
|
||||
|
||||
+ virtual string FileName() const;
|
||||
+
|
||||
virtual string Package() const;
|
||||
virtual string Version() const;
|
||||
virtual string Maintainer() const;
|
||||
--- apt-0.5.4cnc9/apt-pkg/rpm/rpmrecords.cc.getsrc 2002-08-19 20:37:48 +0400
|
||||
+++ apt-0.5.4cnc9/apt-pkg/rpm/rpmrecords.cc 2002-10-29 01:02:58 +0300
|
||||
@@ -174,7 +174,12 @@
|
||||
/* */
|
||||
string rpmRecordParser::SourcePkg()
|
||||
{
|
||||
- return "";
|
||||
+ char *str;
|
||||
+ int_32 count, type;
|
||||
+ assert(HeaderP != NULL);
|
||||
+ int rc = headerGetEntry(HeaderP, RPMTAG_SOURCERPM,
|
||||
+ &type, (void**)&str, &count);
|
||||
+ return string(rc?str:"");
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
--- apt-0.5.4cnc9/apt-pkg/rpm/rpmsrcrecords.cc.getsrc 2002-08-09 00:07:33 +0400
|
||||
+++ apt-0.5.4cnc9/apt-pkg/rpm/rpmsrcrecords.cc 2002-10-29 00:57:41 +0300
|
||||
@@ -143,6 +143,20 @@
|
||||
return true;
|
||||
}
|
||||
|
||||
+// RecordParser::FileName - Return the archive filename on the site /*{{{*/
|
||||
+// ---------------------------------------------------------------------
|
||||
+/* */
|
||||
+string rpmSrcRecordParser::FileName() const
|
||||
+{
|
||||
+ char *str;
|
||||
+ int_32 count, type;
|
||||
+ assert(HeaderP != NULL);
|
||||
+ int rc = headerGetEntry(HeaderP, CRPMTAG_FILENAME,
|
||||
+ &type, (void**)&str, &count);
|
||||
+ return string(rc?str:"");
|
||||
+}
|
||||
+ /*}}}*/
|
||||
+
|
||||
string rpmSrcRecordParser::Package() const
|
||||
{
|
||||
char *str;
|
||||
--- apt-0.5.4cnc9/apt-pkg/srcrecords.cc.getsrc 2002-07-23 21:54:50 +0400
|
||||
+++ apt-0.5.4cnc9/apt-pkg/srcrecords.cc 2002-10-29 00:45:07 +0300
|
||||
@@ -82,7 +82,7 @@
|
||||
/*}}}*/
|
||||
// SrcRecords::Find - Find the first source package with the given name /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
-/* This searches on both source package names and output binary names and
|
||||
+/* This searches on both source package filenames and output binary names and
|
||||
returns the first found. A 'cursor' like system is used to allow this
|
||||
function to be called multiple times to get successive entries */
|
||||
pkgSrcRecords::Parser *pkgSrcRecords::Find(const char *Package,bool SrcOnly)
|
||||
@@ -92,6 +92,8 @@
|
||||
|
||||
while (true)
|
||||
{
|
||||
+ // DEBUG:
|
||||
+ //std::cerr << "start loop" << std::endl;
|
||||
// Step to the next record, possibly switching files
|
||||
while ((*Current)->Step() == false)
|
||||
{
|
||||
@@ -107,7 +109,7 @@
|
||||
return 0;
|
||||
|
||||
// Source name hit
|
||||
- if ((*Current)->Package() == Package)
|
||||
+ if ((*Current)->FileName() == Package)
|
||||
return *Current;
|
||||
|
||||
if (SrcOnly == true)
|
||||
--- apt-0.5.4cnc9/apt-pkg/srcrecords.h.getsrc 2002-07-25 22:07:18 +0400
|
||||
+++ apt-0.5.4cnc9/apt-pkg/srcrecords.h 2002-10-29 00:45:07 +0300
|
||||
@@ -66,6 +66,8 @@
|
||||
virtual unsigned long Offset() = 0;
|
||||
virtual string AsStr() = 0;
|
||||
|
||||
+ virtual string FileName() const = 0;
|
||||
+
|
||||
virtual string Package() const = 0;
|
||||
virtual string Version() const = 0;
|
||||
virtual string Maintainer() const = 0;
|
18
apt-0.5.4cnc9-alt-install_virtual.patch
Normal file
18
apt-0.5.4cnc9-alt-install_virtual.patch
Normal file
@ -0,0 +1,18 @@
|
||||
2003-01-17 Anton Kachalov <mouse@altlinux.org>,
|
||||
Dmitry V. Levin <ldv@altlinux.org>
|
||||
|
||||
* cmdline/apt-get.cc (TryToInstall):
|
||||
Add APT::Install::Virtual support.
|
||||
|
||||
--- apt-0.5.4cnc9/cmdline/apt-get.cc.orig 2003-01-17 17:11:11 +0300
|
||||
+++ apt-0.5.4cnc9/cmdline/apt-get.cc 2003-01-17 17:19:10 +0300
|
||||
@@ -988,7 +988,8 @@ bool TryToInstall(pkgCache::PkgIterator
|
||||
/* This is a pure virtual package and there is a single available
|
||||
provides */
|
||||
if (Cache[Pkg].CandidateVer == 0 && Pkg->ProvidesList != 0 &&
|
||||
- Pkg.ProvidesList()->NextProvides == 0)
|
||||
+ (Pkg.ProvidesList()->NextProvides == 0 ||
|
||||
+ _config->FindB("APT::Install::Virtual", false)))
|
||||
{
|
||||
pkgCache::PkgIterator Tmp = Pkg.ProvidesList().OwnerPkg();
|
||||
ioprintf(c1out,_("Note, selecting %s instead of %s\n"),
|
35
apt-0.5.4cnc9-alt-install_virtual_version.patch
Normal file
35
apt-0.5.4cnc9-alt-install_virtual_version.patch
Normal file
@ -0,0 +1,35 @@
|
||||
2003-01-21 Anton Kachalov <mouse@altlinux.org>
|
||||
|
||||
* apt-pkg/versionmatch.cc (pkgVersionMatch::Find):
|
||||
Add APT::Install::VirtualVersion support.
|
||||
|
||||
--- apt-0.5.4cnc9/apt-pkg/versionmatch.cc.orig 2002-07-23 21:54:50 +0400
|
||||
+++ apt-0.5.4cnc9/apt-pkg/versionmatch.cc 2003-01-21 16:42:55 +0300
|
||||
@@ -15,6 +15,7 @@
|
||||
#pragma implementation "apt-pkg/versionmatch.h"
|
||||
#endif
|
||||
#include <apt-pkg/versionmatch.h>
|
||||
+#include <apt-pkg/configuration.h>
|
||||
|
||||
#include <apt-pkg/strutl.h>
|
||||
#include <apt-pkg/error.h>
|
||||
@@ -158,6 +159,19 @@ pkgCache::VerIterator pkgVersionMatch::Find
|
||||
return Ver;
|
||||
}
|
||||
|
||||
+ if (_config->FindB("APT::Install::VirtualVersion",false) == false)
|
||||
+ return Ver;
|
||||
+
|
||||
+ pkgCache::PrvIterator Prv = Pkg.ProvidesList();
|
||||
+ for (; Prv.end() == false; Prv++)
|
||||
+ {
|
||||
+ if (Type == Version)
|
||||
+ {
|
||||
+ if (MatchVer(Prv.ProvideVersion(),VerStr,VerPrefixMatch) == true)
|
||||
+ return Prv.OwnerVer();
|
||||
+ continue;
|
||||
+ }
|
||||
+ }
|
||||
// This will be Ended by now.
|
||||
return Ver;
|
||||
}
|
16
apt-0.5.4cnc9-alt-packagemanager-CheckRConflicts.patch
Normal file
16
apt-0.5.4cnc9-alt-packagemanager-CheckRConflicts.patch
Normal file
@ -0,0 +1,16 @@
|
||||
2002-12-26 Anton Kachalov <mouse@altlinux.org>
|
||||
|
||||
* apt-pkg/packagemanager.cc (pkgPackageManager::CheckRConflicts):
|
||||
Ignore versionless reverse dependencies.
|
||||
|
||||
--- apt-0.5.4cnc9/apt-pkg/packagemanager.cc.orig 2002-12-26 21:29:50 +0300
|
||||
+++ apt-0.5.4cnc9/apt-pkg/packagemanager.cc 2002-12-26 21:30:48 +0300
|
||||
@@ -216,7 +216,7 @@ bool pkgPackageManager::CheckRConflicts(
|
||||
if (D.ParentPkg() == Pkg || D.ParentVer() != D.ParentPkg().CurrentVer())
|
||||
continue;
|
||||
|
||||
- if (Cache.VS().CheckDep(Ver,D) == false) // CNC:2002-07-10
|
||||
+ if (D.TargetVer() == 0 || Cache.VS().CheckDep(Ver,D) == false) // CNC:2002-07-10
|
||||
continue;
|
||||
|
||||
if (EarlyRemove(D.ParentPkg()) == false)
|
35
apt-0.5.4cnc9-alt-pkgorderlist_score.patch
Normal file
35
apt-0.5.4cnc9-alt-pkgorderlist_score.patch
Normal file
@ -0,0 +1,35 @@
|
||||
--- apt-0.5.4cnc9/apt-pkg/orderlist.cc.orig 2002-07-23 21:54:50 +0400
|
||||
+++ apt-0.5.4cnc9/apt-pkg/orderlist.cc 2003-01-21 18:18:22 +0300
|
||||
@@ -293,30 +293,30 @@ int pkgOrderList::Score(PkgIterator Pkg)
|
||||
{
|
||||
// Removal is always done first
|
||||
if (Cache[Pkg].Delete() == true)
|
||||
return 200;
|
||||
|
||||
// This should never happen..
|
||||
if (Cache[Pkg].InstVerIter(Cache).end() == true)
|
||||
return -1;
|
||||
|
||||
- int Score = 0;
|
||||
+ int Score = 1;
|
||||
if ((Pkg->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
|
||||
Score += 100;
|
||||
|
||||
if (IsFlag(Pkg,Immediate) == true)
|
||||
Score += 10;
|
||||
|
||||
for (DepIterator D = Cache[Pkg].InstVerIter(Cache).DependsList();
|
||||
D.end() == false; D++)
|
||||
if (D->Type == pkgCache::Dep::PreDepends)
|
||||
{
|
||||
- Score += 50;
|
||||
+ Score -= 1;
|
||||
break;
|
||||
}
|
||||
|
||||
// Important Required Standard Optional Extra
|
||||
signed short PrioMap[] = {0,5,4,3,1,0};
|
||||
if (Cache[Pkg].InstVerIter(Cache)->Priority <= 5)
|
||||
Score += PrioMap[Cache[Pkg].InstVerIter(Cache)->Priority];
|
||||
return Score;
|
||||
}
|
11
apt-0.5.4cnc9-alt-rpmlistparser-kernel.patch
Normal file
11
apt-0.5.4cnc9-alt-rpmlistparser-kernel.patch
Normal file
@ -0,0 +1,11 @@
|
||||
--- apt-0.5.4cnc9/apt-pkg/rpm/rpmlistparser.cc.orig 2002-08-05 18:55:56 +0400
|
||||
+++ apt-0.5.4cnc9/apt-pkg/rpm/rpmlistparser.cc 2002-12-23 17:21:35 +0300
|
||||
@@ -123,8 +123,6 @@ string rpmListParser::Package()
|
||||
bool DupOk = false;
|
||||
string Name = str;
|
||||
|
||||
- if (strncmp(str,"kernel", 6)==0)
|
||||
- DupOk=true;
|
||||
for (vector<regex_t*>::iterator I = AllowedDupPackages.begin();
|
||||
I != AllowedDupPackages.end(); I++)
|
||||
{
|
666
apt-0.5.4cnc9-alt-rsync.patch
Normal file
666
apt-0.5.4cnc9-alt-rsync.patch
Normal file
@ -0,0 +1,666 @@
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/apt-pkg/acquire-item.h apt-0.5.4cnc9.rsync/apt-pkg/acquire-item.h
|
||||
--- apt-0.5.4cnc9.orig/apt-pkg/acquire-item.h 2002-07-23 21:54:50 +0400
|
||||
+++ apt-0.5.4cnc9.rsync/apt-pkg/acquire-item.h 2002-12-17 11:34:33 +0300
|
||||
@@ -59,6 +59,9 @@
|
||||
|
||||
// File to write the fetch into
|
||||
string DestFile;
|
||||
+ // Alternative temporary destination
|
||||
+ // Used if method (e.g. rsync) can't use directly DestFile
|
||||
+ string TmpFile;
|
||||
|
||||
// Action members invoked by the worker
|
||||
virtual void Failed(string Message,pkgAcquire::MethodConfig *Cnf);
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/apt-pkg/acquire-method.cc apt-0.5.4cnc9.rsync/apt-pkg/acquire-method.cc
|
||||
--- apt-0.5.4cnc9.orig/apt-pkg/acquire-method.cc 2002-07-23 21:54:50 +0400
|
||||
+++ apt-0.5.4cnc9.rsync/apt-pkg/acquire-method.cc 2002-12-17 11:34:33 +0300
|
||||
@@ -135,7 +135,7 @@
|
||||
if (Queue == 0)
|
||||
abort();
|
||||
|
||||
- char S[1024] = "";
|
||||
+ char S[2048] = "";
|
||||
char *End = S;
|
||||
|
||||
End += snprintf(S,sizeof(S),"200 URI Start\nURI: %s\n",Queue->Uri.c_str());
|
||||
@@ -150,6 +150,10 @@
|
||||
End += snprintf(End,sizeof(S)-4 - (End - S),"Resume-Point: %lu\n",
|
||||
Res.ResumePoint);
|
||||
|
||||
+ if (!Res.TmpFilename.empty())
|
||||
+ End += snprintf(End,sizeof(S)-4 - (End - S),"Tmp-Filename: %s\n",
|
||||
+ Res.TmpFilename.c_str());
|
||||
+
|
||||
strcat(End,"\n");
|
||||
if (write(STDOUT_FILENO,S,strlen(S)) != (signed)strlen(S))
|
||||
exit(100);
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/apt-pkg/acquire-method.h apt-0.5.4cnc9.rsync/apt-pkg/acquire-method.h
|
||||
--- apt-0.5.4cnc9.orig/apt-pkg/acquire-method.h 2002-07-23 21:54:50 +0400
|
||||
+++ apt-0.5.4cnc9.rsync/apt-pkg/acquire-method.h 2002-12-17 11:34:33 +0300
|
||||
@@ -47,6 +47,7 @@
|
||||
time_t LastModified;
|
||||
bool IMSHit;
|
||||
string Filename;
|
||||
+ string TmpFilename;
|
||||
unsigned long Size;
|
||||
unsigned long ResumePoint;
|
||||
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/apt-pkg/acquire-worker.cc apt-0.5.4cnc9.rsync/apt-pkg/acquire-worker.cc
|
||||
--- apt-0.5.4cnc9.orig/apt-pkg/acquire-worker.cc 2002-07-23 21:54:50 +0400
|
||||
+++ apt-0.5.4cnc9.rsync/apt-pkg/acquire-worker.cc 2002-12-17 11:34:33 +0300
|
||||
@@ -235,6 +235,7 @@
|
||||
CurrentSize = 0;
|
||||
TotalSize = atoi(LookupTag(Message,"Size","0").c_str());
|
||||
ResumePoint = atoi(LookupTag(Message,"Resume-Point","0").c_str());
|
||||
+ Itm->Owner->TmpFile = LookupTag(Message,"Tmp-Filename");
|
||||
Itm->Owner->Start(Message,atoi(LookupTag(Message,"Size","0").c_str()));
|
||||
|
||||
// Display update before completion
|
||||
@@ -526,7 +527,11 @@
|
||||
return;
|
||||
|
||||
struct stat Buf;
|
||||
- if (stat(CurrentItem->Owner->DestFile.c_str(),&Buf) != 0)
|
||||
+ int res = 1;
|
||||
+ if (CurrentItem->Owner->TmpFile.empty()==false)
|
||||
+ res = stat(CurrentItem->Owner->TmpFile.c_str(),&Buf);
|
||||
+
|
||||
+ if (res!=0 && stat(CurrentItem->Owner->DestFile.c_str(),&Buf) != 0)
|
||||
return;
|
||||
CurrentSize = Buf.st_size;
|
||||
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/methods/makefile apt-0.5.4cnc9.rsync/methods/makefile
|
||||
--- apt-0.5.4cnc9.orig/methods/makefile 2002-07-25 22:07:19 +0400
|
||||
+++ apt-0.5.4cnc9.rsync/methods/makefile 2002-12-17 11:34:33 +0300
|
||||
@@ -63,6 +63,13 @@
|
||||
SOURCE = gpg.cc
|
||||
include $(PROGRAM_H)
|
||||
|
||||
+# The rsync method
|
||||
+PROGRAM=rsync
|
||||
+SLIBS = -lapt-pkg $(SOCKETLIBS) $(RPMLIBS)
|
||||
+LIB_MAKES = apt-pkg/makefile
|
||||
+SOURCE = rsync.cc
|
||||
+include $(PROGRAM_H)
|
||||
+
|
||||
# SSH and vzip2 method symlink
|
||||
all: $(BIN)/ssh $(BIN)/bzip2
|
||||
veryclean: clean-$(BIN)/ssh clean-$(BIN)/bzip2
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/methods/rsync.cc apt-0.5.4cnc9.rsync/methods/rsync.cc
|
||||
--- apt-0.5.4cnc9.orig/methods/rsync.cc 1970-01-01 03:00:00 +0300
|
||||
+++ apt-0.5.4cnc9.rsync/methods/rsync.cc 2002-12-17 13:25:56 +0300
|
||||
@@ -0,0 +1,468 @@
|
||||
+// -*- mode: cpp; mode: fold -*-
|
||||
+// Description /*{{{*/
|
||||
+// $Id$
|
||||
+/* ######################################################################
|
||||
+
|
||||
+RSYNC Aquire Method - This is the RSYNC aquire method for APT.
|
||||
+
|
||||
+##################################################################### */
|
||||
+/*}}}*/
|
||||
+// Include Files /*{{{*/
|
||||
+#include <apt-pkg/fileutl.h>
|
||||
+#include <apt-pkg/acquire-method.h>
|
||||
+#include <apt-pkg/error.h>
|
||||
+#include <apt-pkg/md5.h>
|
||||
+
|
||||
+#include <fcntl.h>
|
||||
+#include <sys/stat.h>
|
||||
+#include <sys/time.h>
|
||||
+#include <utime.h>
|
||||
+#include <unistd.h>
|
||||
+#include <stdlib.h>
|
||||
+#include <signal.h>
|
||||
+#include <stdio.h>
|
||||
+#include <errno.h>
|
||||
+#include <stdarg.h>
|
||||
+#include <sys/wait.h>
|
||||
+#include <iostream>
|
||||
+
|
||||
+// Internet stuff
|
||||
+#include <netinet/in.h>
|
||||
+#include <sys/socket.h>
|
||||
+#include <arpa/inet.h>
|
||||
+#include <netdb.h>
|
||||
+
|
||||
+#include "rfc2553emu.h"
|
||||
+#include "connect.h"
|
||||
+#include "rsync-method.h"
|
||||
+/*}}}*/
|
||||
+
|
||||
+RsyncMethod::RsyncConn *RsyncMethod::server = 0;
|
||||
+RsyncMethod::ConnType RsyncMethod::connType = RsyncMethod::ConnTypeExec;
|
||||
+bool RsyncMethod::Debug = false;
|
||||
+unsigned int RsyncMethod::Timeout = 0;
|
||||
+
|
||||
+/* Argv implementation */
|
||||
+Argv::Argv(int msize): max_size(msize), size(0)
|
||||
+{
|
||||
+ args = new char*[max_size];
|
||||
+ memset(args,0, max_size * sizeof(char*));
|
||||
+}
|
||||
+
|
||||
+Argv::~Argv()
|
||||
+{
|
||||
+ for (int i=0; i<size; i++)
|
||||
+ delete args[i];
|
||||
+ delete [] args;
|
||||
+}
|
||||
+
|
||||
+bool Argv::add(const char *arg)
|
||||
+{
|
||||
+ if (arg==0)
|
||||
+ return false;
|
||||
+ if ( size+1 >= max_size && !resize() ) {
|
||||
+ cerr << "Failed to resize" << endl;
|
||||
+ return false;
|
||||
+ }
|
||||
+ int len = strlen(arg);
|
||||
+ args[size] = new char[len+1];
|
||||
+ strncpy(args[size], arg, len+1);
|
||||
+ ++size;
|
||||
+ return true;
|
||||
+}
|
||||
+
|
||||
+bool Argv::resize()
|
||||
+{
|
||||
+ static const int increment = 5;
|
||||
+ char **new_args = new char *[max_size+increment];
|
||||
+ memcpy(new_args,args,size*sizeof(char*));
|
||||
+ memset(new_args+size,0, (max_size+increment-size) * sizeof(char*));
|
||||
+ args = new_args;
|
||||
+ max_size += increment;
|
||||
+ return true;
|
||||
+}
|
||||
+
|
||||
+Argv::operator string()
|
||||
+{
|
||||
+ string res;
|
||||
+ for (char **p=args; *p; p++)
|
||||
+ res += *p, res += " ";
|
||||
+ return res;
|
||||
+}
|
||||
+
|
||||
+/* RsyncConn implementation */
|
||||
+
|
||||
+/** Static buffer for RSYNC_PROXY variable */
|
||||
+char RsyncMethod::RsyncConn::proxy_value[1024];
|
||||
+
|
||||
+bool RsyncMethod::RsyncConn::initProxy()
|
||||
+{
|
||||
+ if ( proxy.empty() )
|
||||
+ return true;
|
||||
+ if ( proxy == "none" ) {
|
||||
+ unsetenv("RSYNC_PROXY");
|
||||
+ return true;
|
||||
+ }
|
||||
+ bool res = true;
|
||||
+ string var("RSYNC_PROXY=");
|
||||
+ var += proxy;
|
||||
+ strncpy(proxy_value, var.c_str(), sizeof(proxy_value) );
|
||||
+ if ( proxy_value[sizeof(proxy_value)-1]==0 ) {
|
||||
+ if ( putenv(proxy_value)!=0 ) {
|
||||
+ res = false;
|
||||
+ _error->Error("Failed to set RSYNC_PROXY: %s", proxy_value);
|
||||
+ }
|
||||
+ } else {
|
||||
+ res = false;
|
||||
+ _error->Error("Failed to set RSYNC_PROXY: not enough space in buffer");
|
||||
+ }
|
||||
+ return res;
|
||||
+}
|
||||
+
|
||||
+/* RsyncConnExec implementation */
|
||||
+
|
||||
+RsyncMethod::RsyncConnExec::RsyncConnExec(URI u, const string &_proxy, const string &prog)
|
||||
+ : RsyncConn(u,_proxy), ChildPid(-1), ChildFd(-1)
|
||||
+{
|
||||
+ program = prog.empty() ? RSYNC_PROGRAM : prog;
|
||||
+}
|
||||
+
|
||||
+RsyncMethod::RsyncConnExec::~RsyncConnExec()
|
||||
+{
|
||||
+ if ( ChildPid>0 ) {
|
||||
+ kill(ChildPid, SIGTERM);
|
||||
+ waitpid(ChildPid, 0, 0);
|
||||
+ ChildPid = -1;
|
||||
+ }
|
||||
+ if ( ChildFd>=0 )
|
||||
+ close(ChildFd);
|
||||
+}
|
||||
+
|
||||
+bool RsyncMethod::RsyncConnExec::WaitChild(pkgAcqMethod *Owner, FetchResult &FRes, const char *To)
|
||||
+{
|
||||
+ static const int buflen = 1024;
|
||||
+ static char buf[buflen+1];
|
||||
+ int saved = 0;
|
||||
+ int status = 0, res = 0;
|
||||
+ fd_set readfd;
|
||||
+ struct timeval tv;
|
||||
+ if ( RsyncMethod::Debug )
|
||||
+ cerr << "RSYNC: WaitChild: fd=" << ChildFd << endl;
|
||||
+
|
||||
+ while (1) {
|
||||
+ FD_ZERO(&readfd);
|
||||
+ FD_SET(ChildFd,&readfd);
|
||||
+ FD_SET(0,&readfd);
|
||||
+
|
||||
+ tv.tv_sec = 1;
|
||||
+ tv.tv_usec = 0;
|
||||
+ res = select(ChildFd+1, &readfd, 0, 0, &tv);
|
||||
+
|
||||
+ if (res>0) {
|
||||
+ if ( FD_ISSET(ChildFd,&readfd) ) {
|
||||
+ int len = read(ChildFd,buf+saved, buflen-saved);
|
||||
+ if (len>0) {
|
||||
+ // Split buffer into single-line strings
|
||||
+ // and pass each string to ParseOutput.
|
||||
+ // Strings, that are not terminated with '\n' will
|
||||
+ // be stored in buffer for later completion.
|
||||
+ buf[saved+len] = 0;
|
||||
+ int start = 0;
|
||||
+ for (int off=saved; off<len; off++) {
|
||||
+ if ( buf[off]=='\n' ) {
|
||||
+ buf[off] = 0;
|
||||
+ ParseOutput(Owner,FRes,buf+start);
|
||||
+ start = off+1;
|
||||
+ }
|
||||
+ }
|
||||
+ saved = saved+len-start;
|
||||
+ if ( saved==buflen ) {
|
||||
+ // Parse process output even it was not terminated with '\n'
|
||||
+ // in case of full buffer (we can't read anything if there is
|
||||
+ // no free space in buffer).
|
||||
+ ParseOutput(Owner,FRes,buf);
|
||||
+ saved = 0;
|
||||
+ } else if ( saved>0 ) {
|
||||
+ if ( RsyncMethod::Debug )
|
||||
+ cerr << "RSYNC: Saved " << saved << " byted in buffer:"
|
||||
+ << endl << start << endl;
|
||||
+ // Move saved data to the beginning of the buffer
|
||||
+ // including trailing zero
|
||||
+ memmove(buf,buf+start,saved+1);
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ res = waitpid(ChildPid, &status, WNOHANG);
|
||||
+ if ((res>0 && WIFEXITED(status)) || res<0) {
|
||||
+ ChildPid = -1;
|
||||
+ if ( RsyncMethod::Debug )
|
||||
+ cerr << endl << "RSYNC: Closing ChildFd: " << ChildFd << endl;
|
||||
+ close(ChildFd);
|
||||
+ ChildFd = -1;
|
||||
+ // Parse end of process output if it was not terminated with '\n'
|
||||
+ if (saved>0)
|
||||
+ ParseOutput(Owner,FRes,buf);
|
||||
+ if (res < 0) {
|
||||
+ if ( RsyncMethod::Debug )
|
||||
+ cerr << endl << "RSYNC: Unknown status of child process " << ChildFd << endl;
|
||||
+ if (State == Done)
|
||||
+ return true;
|
||||
+ return false;
|
||||
+ }
|
||||
+ switch (WEXITSTATUS(status)) {
|
||||
+ case 0:
|
||||
+ return true;
|
||||
+ break;
|
||||
+ default:
|
||||
+ if ( State != Failed ) {
|
||||
+ State = Failed;
|
||||
+ _error->Error("rsync process terminated with exit code %d", WEXITSTATUS(status));
|
||||
+ }
|
||||
+ return false;
|
||||
+ break;
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ return false;
|
||||
+}
|
||||
+
|
||||
+void RsyncMethod::RsyncConnExec::ParseOutput(pkgAcqMethod *Owner, FetchResult &FRes, const char *buf)
|
||||
+{
|
||||
+ static const char * TMPFN = "Tmp-Filename: ";
|
||||
+ static const char * SIZE = "Size: ";
|
||||
+ static const char * START = "Start: ";
|
||||
+ static const char * DONE = "Done: ";
|
||||
+ static const char * FAILED= "Failed: ";
|
||||
+ const char * ptr;
|
||||
+
|
||||
+ //if ( RsyncMethod::Debug )
|
||||
+ // cerr << "ParseOutput: " << buf << endl;
|
||||
+
|
||||
+ ptr = strstr(buf,TMPFN);
|
||||
+ if (ptr) {
|
||||
+ ptr += strlen(TMPFN);
|
||||
+ const char *ptr2 = ptr;
|
||||
+ while (*ptr2!=0 && !isspace(*ptr2))
|
||||
+ ++ptr2;
|
||||
+ if (ptr!=ptr2) {
|
||||
+ char *tmpfn = new char[ptr2-ptr+1];
|
||||
+ bzero(tmpfn, ptr2-ptr+1);
|
||||
+ strncpy(tmpfn, ptr, ptr2-ptr);
|
||||
+ if (RsyncMethod::Debug)
|
||||
+ cerr << endl << "RSYNC: " << TMPFN << tmpfn << endl;
|
||||
+ FRes.TmpFilename = string(tmpfn);
|
||||
+ delete tmpfn;
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ ptr = strstr(buf,SIZE);
|
||||
+ if (ptr) {
|
||||
+ ptr += strlen(SIZE);
|
||||
+ unsigned long size = atol(ptr);
|
||||
+ if (RsyncMethod::Debug)
|
||||
+ cerr << "RSYNC: " << SIZE << size << endl;
|
||||
+ FRes.Size = size;
|
||||
+ }
|
||||
+
|
||||
+ ptr = strstr(buf,START);
|
||||
+ if (ptr) {
|
||||
+ State = Fetching;
|
||||
+ dynamic_cast<RsyncMethod*>(Owner)->Start(FRes);
|
||||
+ }
|
||||
+
|
||||
+ ptr = strstr(buf,DONE);
|
||||
+ if (ptr)
|
||||
+ State = Done;
|
||||
+
|
||||
+ ptr = strstr(buf,FAILED);
|
||||
+ if (ptr) {
|
||||
+ State = Failed;
|
||||
+ ptr += strlen(FAILED);
|
||||
+ const char *ptr2 = ptr;
|
||||
+ while (*ptr2!=0 && *ptr2!='\n')
|
||||
+ ++ptr2;
|
||||
+ if (ptr!=ptr2) {
|
||||
+ char *tmp = new char[ptr2-ptr+1];
|
||||
+ bzero(tmp, ptr2-ptr+1);
|
||||
+ strncpy(tmp, ptr, ptr2-ptr);
|
||||
+ _error->Error("%s",tmp);
|
||||
+ if (RsyncMethod::Debug)
|
||||
+ cerr << endl << FAILED << tmp << endl;
|
||||
+ delete tmp;
|
||||
+ } else {
|
||||
+ _error->Error("Child process failed (no description)");
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+bool RsyncMethod::RsyncConnExec::Get(pkgAcqMethod *Owner, FetchResult &FRes, const char *From, const char *To)
|
||||
+{
|
||||
+ int p[2];
|
||||
+ int res = 0;
|
||||
+ Argv argv(10);
|
||||
+
|
||||
+ State = Starting;
|
||||
+ if ( RsyncMethod::Debug )
|
||||
+ cerr << "RSYNC: Get: " << From << endl;
|
||||
+
|
||||
+ argv.add(program.c_str());
|
||||
+ argv.add("-Lpt");
|
||||
+ argv.add("--partial");
|
||||
+ argv.add("--apt-support");
|
||||
+ if (RsyncMethod::Timeout>0) {
|
||||
+ argv.add("--timeout");
|
||||
+ char S[10];
|
||||
+ sprintf(S,"%u",RsyncMethod::Timeout);
|
||||
+ argv.add(S);
|
||||
+ }
|
||||
+ // Add optional user-defined options to command line
|
||||
+ Configuration::Item const *Itm = _config->Tree("Acquire::rsync::options");
|
||||
+ if (Itm != 0 && Itm->Child != 0) {
|
||||
+ Itm = Itm->Child;
|
||||
+ while (Itm != 0) {
|
||||
+ if (Itm->Value.empty() == false)
|
||||
+ argv.add(Itm->Value.c_str());
|
||||
+ Itm = Itm->Next;
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ char port[12];
|
||||
+ if (srv.Port!=0)
|
||||
+ snprintf(port, sizeof(port), ":%u", srv.Port);
|
||||
+ else port[0] = 0;
|
||||
+ argv.add( "rsync://" + srv.Host + port + From);
|
||||
+ argv.add(To);
|
||||
+
|
||||
+ if ( pipe(p) ) {
|
||||
+ _error->Error("RSYNC: RsyncConnExec: Can't create pipe");
|
||||
+ return false;
|
||||
+ }
|
||||
+ if ( RsyncMethod::Debug )
|
||||
+ cerr << "RSYNC: Created pipe [" << p[0] << ',' << p[1] << ']' << endl;
|
||||
+ if ( RsyncMethod::Debug )
|
||||
+ cerr << "RSYNC: Starting: " << string(argv) << endl;
|
||||
+
|
||||
+ switch ( ChildPid = fork() ) {
|
||||
+ case -1:
|
||||
+ _error->Error("RsyncConnExec: Can't fork");
|
||||
+ return false;
|
||||
+ break;
|
||||
+ case 0:
|
||||
+ // Child process
|
||||
+ initProxy();
|
||||
+ //if ( RsyncMethod::Debug )
|
||||
+ // cerr << endl << "RSYNC_PROXY(" << srv.Host << "): " << getenv("RSYNC_PROXY") << endl;
|
||||
+ close(p[0]);
|
||||
+ res = dup2(p[1], STDOUT_FILENO);
|
||||
+ if (res==-1) {
|
||||
+ cout << "Failed: " << "Can't dup2(p[1], STDOUT_FILENO)" << endl;
|
||||
+ exit(100);
|
||||
+ }
|
||||
+ res = dup2(p[1], STDERR_FILENO);
|
||||
+ if (res==-1) {
|
||||
+ cout << "Failed: " << "Can't dup2(p[1], STDERR_FILENO)" << endl;
|
||||
+ exit(100);
|
||||
+ }
|
||||
+
|
||||
+ close(p[1]);
|
||||
+ execve(program.c_str(), argv, environ);
|
||||
+ cout << "Failed: " << "Can not execute " << program << endl;
|
||||
+ exit(100);
|
||||
+ break;
|
||||
+ default:
|
||||
+ // Parent process
|
||||
+ close(p[1]);
|
||||
+ ChildFd = p[0];
|
||||
+ return WaitChild(Owner,FRes,To);
|
||||
+ }
|
||||
+ return false;
|
||||
+}
|
||||
+
|
||||
+// RsyncMethod::RsyncMethod - Constructor /*{{{*/
|
||||
+// ---------------------------------------------------------------------
|
||||
+/* */
|
||||
+RsyncMethod::RsyncMethod() : pkgAcqMethod("1.0",SendConfig),
|
||||
+ RsyncProg(RSYNC_PROGRAM)
|
||||
+{
|
||||
+ signal(SIGTERM,SigTerm);
|
||||
+ signal(SIGINT,SigTerm);
|
||||
+}
|
||||
+/*}}}*/
|
||||
+// RsyncMethod::SigTerm - Handle a fatal signal /*{{{*/
|
||||
+// ---------------------------------------------------------------------
|
||||
+/* Delete existing server connection */
|
||||
+void RsyncMethod::SigTerm(int)
|
||||
+{
|
||||
+ delete server;
|
||||
+ _exit(100);
|
||||
+}
|
||||
+/*}}}*/
|
||||
+// RsyncMethod::Configuration - Handle a configuration message /*{{{*/
|
||||
+// ---------------------------------------------------------------------
|
||||
+/* We stash the desired pipeline depth */
|
||||
+bool RsyncMethod::Configuration(string Message)
|
||||
+{
|
||||
+ if (pkgAcqMethod::Configuration(Message) == false)
|
||||
+ return false;
|
||||
+
|
||||
+ Debug = _config->FindB("Debug::rsync",false);
|
||||
+ Timeout = _config->FindI("Acquire::rsync::Timeout",0);
|
||||
+ RsyncProg = _config->Find("Acquire::rsync::program",RSYNC_PROGRAM);
|
||||
+ return true;
|
||||
+}
|
||||
+/*}}}*/
|
||||
+// RsyncMethod::Fetch - Fetch a file /*{{{*/
|
||||
+// ---------------------------------------------------------------------
|
||||
+/* Fetch a single file, called by the base class.. */
|
||||
+bool RsyncMethod::Fetch(FetchItem *Itm)
|
||||
+{
|
||||
+ URI Get = Itm->Uri;
|
||||
+ const char *File = Get.Path.c_str();
|
||||
+ FetchResult Res;
|
||||
+ Res.Filename = Itm->DestFile;
|
||||
+ Res.IMSHit = false;
|
||||
+
|
||||
+ struct stat st;
|
||||
+ if ( stat(Itm->DestFile.c_str(), &st)==0 ) {
|
||||
+ Res.ResumePoint = st.st_size;
|
||||
+ }
|
||||
+
|
||||
+ string proxy = _config->Find(string("Acquire::rsync::proxy::")+Get.Host);
|
||||
+ if ( proxy.empty() )
|
||||
+ proxy = _config->Find("Acquire::rsync::proxy");
|
||||
+
|
||||
+ if (Debug)
|
||||
+ cerr << endl << "RSYNC: Proxy(" << Get.Host << "): " << proxy << endl;
|
||||
+
|
||||
+ // Don't compare now for the same server uri
|
||||
+ delete server;
|
||||
+ server = new RsyncConnExec(Get,proxy,RsyncProg);
|
||||
+
|
||||
+ if ( server->Get(this,Res,File,Itm->DestFile.c_str()) ) {
|
||||
+ if ( stat(Itm->DestFile.c_str(), &st)==0 ) {
|
||||
+ Res.Size = st.st_size;
|
||||
+ // Calculating MD5
|
||||
+ //
|
||||
+ int fd = open(Itm->DestFile.c_str(), O_RDONLY);
|
||||
+ if (fd>=0) {
|
||||
+ MD5Summation md5;
|
||||
+ md5.AddFD(fd,st.st_size);
|
||||
+ Res.MD5Sum = md5.Result();
|
||||
+ close(fd);
|
||||
+ }
|
||||
+ }
|
||||
+ URIDone(Res);
|
||||
+ return true;
|
||||
+ }
|
||||
+ Fail(true);
|
||||
+ return false;
|
||||
+}
|
||||
+/*}}}*/
|
||||
+
|
||||
+int main(int argc,const char *argv[])
|
||||
+{
|
||||
+ RsyncMethod Mth;
|
||||
+
|
||||
+ return Mth.Run();
|
||||
+}
|
||||
diff -urN -X apt.excludes apt-0.5.4cnc9.orig/methods/rsync-method.h apt-0.5.4cnc9.rsync/methods/rsync-method.h
|
||||
--- apt-0.5.4cnc9.orig/methods/rsync-method.h 1970-01-01 03:00:00 +0300
|
||||
+++ apt-0.5.4cnc9.rsync/methods/rsync-method.h 2002-12-17 11:34:33 +0300
|
||||
@@ -0,0 +1,102 @@
|
||||
+// -*- mode: cpp; mode: fold -*-
|
||||
+// Description /*{{{*/
|
||||
+// $Id$
|
||||
+/* ######################################################################
|
||||
+
|
||||
+RSYNC Aquire Method - This is the RSYNC aquire method for APT.
|
||||
+
|
||||
+##################################################################### */
|
||||
+/*}}}*/
|
||||
+#ifndef APT_RSYNC_H
|
||||
+#define APT_RSYNC_H
|
||||
+
|
||||
+using namespace std;
|
||||
+
|
||||
+static const char * RSYNC_PROGRAM = "/usr/bin/rsync";
|
||||
+
|
||||
+class Argv
|
||||
+{
|
||||
+ int max_size;
|
||||
+ int size;
|
||||
+ char **args;
|
||||
+
|
||||
+ public:
|
||||
+ Argv(int msize);
|
||||
+ ~Argv();
|
||||
+
|
||||
+ bool add(const char *arg);
|
||||
+ bool add(const string &arg) { return add( arg.c_str()); }
|
||||
+ bool resize();
|
||||
+ int getSize() { return size; }
|
||||
+ operator char**() { return args; }
|
||||
+ operator string();
|
||||
+};
|
||||
+
|
||||
+
|
||||
+class RsyncMethod : public pkgAcqMethod
|
||||
+{
|
||||
+ protected:
|
||||
+ enum ConnType {ConnTypeExec, ConnTypeProto};
|
||||
+
|
||||
+ class RsyncConn
|
||||
+ {
|
||||
+ public:
|
||||
+ enum ConnState {Idle,Starting,Connecting,Fetching,Failed,Done};
|
||||
+
|
||||
+ protected:
|
||||
+ URI srv;
|
||||
+ const string proxy;
|
||||
+ ConnState State;
|
||||
+
|
||||
+ static char proxy_value[1024];
|
||||
+ bool initProxy();
|
||||
+
|
||||
+ public:
|
||||
+ RsyncConn(URI u, const string &_proxy = ""): srv(u), proxy(_proxy) {}
|
||||
+ virtual ~RsyncConn() {}
|
||||
+
|
||||
+ virtual bool Get(pkgAcqMethod *Owner, FetchResult &FRes, const char *From, const char *To) = 0;
|
||||
+ };
|
||||
+
|
||||
+ class RsyncConnExec: public RsyncConn
|
||||
+ {
|
||||
+ // pid of child process
|
||||
+ pid_t ChildPid;
|
||||
+ // output of child process (stdout&stderr)
|
||||
+ int ChildFd;
|
||||
+ // Program to execute
|
||||
+ string program;
|
||||
+
|
||||
+ protected:
|
||||
+ bool WaitChild(pkgAcqMethod *Owner, FetchResult &FRes, const char *To);
|
||||
+ void ParseOutput(pkgAcqMethod *Owner, FetchResult &FRes, const char *buf);
|
||||
+
|
||||
+ public:
|
||||
+ RsyncConnExec(URI u, const string &_proxy, const string &prog);
|
||||
+ virtual ~RsyncConnExec();
|
||||
+
|
||||
+ virtual bool Get(pkgAcqMethod *Owner, FetchResult &FRes, const char *From, const char *To);
|
||||
+ };
|
||||
+
|
||||
+ static RsyncConn *server;
|
||||
+ static ConnType connType;
|
||||
+ static bool Debug;
|
||||
+ static unsigned int Timeout;
|
||||
+
|
||||
+ string RsyncProg;
|
||||
+
|
||||
+ static void SigTerm(int);
|
||||
+
|
||||
+ protected:
|
||||
+ virtual bool Fetch(FetchItem *Itm);
|
||||
+ virtual bool Configuration(string Message);
|
||||
+
|
||||
+ void Start(FetchResult &FRes)
|
||||
+ { URIStart(FRes); }
|
||||
+
|
||||
+ public:
|
||||
+
|
||||
+ RsyncMethod();
|
||||
+};
|
||||
+
|
||||
+#endif
|
37
apt-0.5.4cnc9-alt-specialchars.patch
Normal file
37
apt-0.5.4cnc9-alt-specialchars.patch
Normal file
@ -0,0 +1,37 @@
|
||||
2002-12-20 Anton V. Denisov <avd@altlinux.org>
|
||||
|
||||
* cmdline/apt-cdrom.cc (DoAdd):
|
||||
Simple workaround for ParseQuoteWord(Buffer,URI) == false
|
||||
if URI contains symbol '#'.
|
||||
ParseQuoteWord() need to be tested.
|
||||
|
||||
--- apt-0.5.4cnc9.orig/cmdline/apt-cdrom.cc 2002-12-14 14:35:00 +1200
|
||||
+++ apt-0.5.4cnc9/cmdline/apt-cdrom.cc 2002-12-20 00:17:51 +1200
|
||||
@@ -631,7 +631,7 @@ bool DoAdd(CommandLine &)
|
||||
// Escape special characters
|
||||
string::iterator J = Name.begin();
|
||||
for (; J != Name.end(); J++)
|
||||
- if (*J == '"' || *J == ']' || *J == '[')
|
||||
+ if (*J == '"' || *J == ']' || *J == '[' || *J == '#')
|
||||
*J = '_';
|
||||
|
||||
cout << "Found label '" << Name << "'" << endl;
|
||||
@@ -650,7 +650,8 @@ bool DoAdd(CommandLine &)
|
||||
if (Name.empty() == false &&
|
||||
Name.find('"') == string::npos &&
|
||||
Name.find('[') == string::npos &&
|
||||
- Name.find(']') == string::npos)
|
||||
+ Name.find(']') == string::npos &&
|
||||
+ Name.find('#') == string::npos)
|
||||
break;
|
||||
cout << "That is not a valid name, try again " << endl;
|
||||
}
|
||||
@@ -662,7 +663,7 @@ bool DoAdd(CommandLine &)
|
||||
// Escape special characters
|
||||
string::iterator J = Name.begin();
|
||||
for (; J != Name.end(); J++)
|
||||
- if (*J == '"' || *J == ']' || *J == '[')
|
||||
+ if (*J == '"' || *J == ']' || *J == '[' || *J == '#')
|
||||
*J = '_';
|
||||
|
||||
Database.Set("CD::" + ID,Name);
|
12
apt-0.5.5cnc1-alt-APT_DOMAIN.patch
Normal file
12
apt-0.5.5cnc1-alt-APT_DOMAIN.patch
Normal file
@ -0,0 +1,12 @@
|
||||
--- apt-0.5.5cnc1/apt-pkg/init.cc.orig 2003-02-07 19:14:18 +0300
|
||||
+++ apt-0.5.5cnc1/apt-pkg/init.cc 2003-02-07 19:26:23 +0300
|
||||
@@ -98,7 +98,9 @@
|
||||
|
||||
if (Cnf.Exists("Dir::Locale"))
|
||||
{
|
||||
+#ifdef APT_DOMAIN
|
||||
bindtextdomain(APT_DOMAIN,Cnf.FindDir("Dir::Locale").c_str());
|
||||
+#endif
|
||||
bindtextdomain(textdomain(0),Cnf.FindDir("Dir::Locale").c_str());
|
||||
}
|
||||
|
11
apt-0.5.5cnc1-alt-debsystem.patch
Normal file
11
apt-0.5.5cnc1-alt-debsystem.patch
Normal file
@ -0,0 +1,11 @@
|
||||
--- apt-0.5.5cnc1/apt-pkg/deb/debsystem.cc.orig 2002-07-23 21:54:51 +0400
|
||||
+++ apt-0.5.5cnc1/apt-pkg/deb/debsystem.cc 2003-02-13 16:05:56 +0300
|
||||
@@ -185,6 +185,8 @@
|
||||
signed debSystem::Score(Configuration const &Cnf)
|
||||
{
|
||||
signed Score = 0;
|
||||
+ if (Cnf.FindB("APT::Ignore-dpkg",true) == true)
|
||||
+ return Score;
|
||||
if (FileExists(Cnf.FindFile("Dir::State::status","/var/lib/dpkg/status")) == true)
|
||||
Score += 10;
|
||||
if (FileExists(Cnf.FindFile("Dir::Bin::dpkg","/usr/bin/dpkg")) == true)
|
33
apt-0.5.5cnc1-alt-distro.patch
Normal file
33
apt-0.5.5cnc1-alt-distro.patch
Normal file
@ -0,0 +1,33 @@
|
||||
--- apt-0.5.5cnc1/apt-pkg/rpm/rpmsystem.cc.orig 2002-11-25 21:25:28 +0300
|
||||
+++ apt-0.5.5cnc1/apt-pkg/rpm/rpmsystem.cc 2003-02-07 18:13:09 +0300
|
||||
@@ -125,7 +125,7 @@
|
||||
Cnf.CndSet("Dir::Etc::translateparts", "translate.list.d");
|
||||
Cnf.CndSet("Dir::State::prefetch", "prefetch");
|
||||
Cnf.CndSet("Dir::Locale","/usr/share/locale");
|
||||
- Cnf.CndSet("Acquire::DistroID","Conectiva"); // hee hee
|
||||
+ Cnf.CndSet("Acquire::DistroID","ALT Linux"); // hee hee
|
||||
Cnf.CndSet("Acquire::CDROM::Mount", "/mnt/cdrom");
|
||||
Cnf.CndSet("Acquire::CDROM::Copy-All", "true");
|
||||
|
||||
--- apt-0.5.5cnc1/cmdline/apt-cdrom.cc.orig 2003-01-29 18:11:14 +0300
|
||||
+++ apt-0.5.5cnc1/cmdline/apt-cdrom.cc 2003-02-07 18:11:58 +0300
|
||||
@@ -668,7 +668,7 @@
|
||||
Name.empty() == true)
|
||||
{
|
||||
// CNC:2002-07-11
|
||||
- cout << "Please provide a name for this Disc, such as 'Conectiva Disk 1'";
|
||||
+ cout << "Please provide a name for this Disc, such as 'ALT Linux Disk 1'";
|
||||
while (1)
|
||||
{
|
||||
Name = PromptLine("");
|
||||
--- apt-0.5.5cnc1/methods/ftp.cc.orig 2002-07-25 22:07:19 +0400
|
||||
+++ apt-0.5.5cnc1/methods/ftp.cc 2003-01-29 15:46:54 +0300
|
||||
@@ -185,7 +185,7 @@ bool FTPConn::Login()
|
||||
|
||||
// Setup the variables needed for authentication
|
||||
string User = "anonymous";
|
||||
- string Pass = "apt_get_ftp_2.1@debian.linux.user";
|
||||
+ string Pass = "apt_get_ftp_2.1@alt.linux.user";
|
||||
|
||||
// Fill in the user/pass
|
||||
if (ServerName.User.empty() == false)
|
33
apt-0.5.5cnc1-alt-methods_gpg_homedir.patch
Normal file
33
apt-0.5.5cnc1-alt-methods_gpg_homedir.patch
Normal file
@ -0,0 +1,33 @@
|
||||
--- apt-0.5.5cnc1/methods/gpg.cc.orig 2003-01-31 16:58:32 +0300
|
||||
+++ apt-0.5.5cnc1/methods/gpg.cc 2003-02-07 19:09:05 +0300
|
||||
@@ -195,7 +195,7 @@ char *getFileSigner(const char *file, co
|
||||
else if (pid == 0)
|
||||
{
|
||||
string path = _config->Find("Dir::Bin::gpg", "/usr/bin/gpg");
|
||||
- string pubring = "";
|
||||
+ string homedir = "";
|
||||
const char *argv[16];
|
||||
int argc = 0;
|
||||
|
||||
@@ -206,17 +206,16 @@ char *getFileSigner(const char *file, co
|
||||
dup2(fd[1], STDERR_FILENO);
|
||||
|
||||
unsetenv("LANG");
|
||||
+ unsetenv("LANGUAGE");
|
||||
unsetenv("LC_ALL");
|
||||
unsetenv("LC_MESSAGES");
|
||||
+ unsetenv("LC_CTYPE");
|
||||
|
||||
argv[argc++] = "gpg";
|
||||
argv[argc++] = "--batch";
|
||||
argv[argc++] = "--no-secmem-warning";
|
||||
- pubring = _config->Find("APT::GPG::Pubring");
|
||||
- if (pubring.empty() == false)
|
||||
- {
|
||||
- argv[argc++] = "--keyring"; argv[argc++] = pubring.c_str();
|
||||
- }
|
||||
+ homedir = _config->Find("APT::GPG::Homedir", "/usr/lib/alt-gpgkeys");
|
||||
+ argv[argc++] = "--homedir"; argv[argc++] = homedir.c_str();
|
||||
argv[argc++] = "--status-fd"; argv[argc++] = "2";
|
||||
|
||||
if (outfile != NULL)
|
11
apt-0.5.5cnc1-alt-pkgpriorities.patch
Normal file
11
apt-0.5.5cnc1-alt-pkgpriorities.patch
Normal file
@ -0,0 +1,11 @@
|
||||
--- apt-0.5.5cnc1/apt-pkg/rpm/rpmpackagedata.cc.orig 2003-01-29 16:52:32 +0300
|
||||
+++ apt-0.5.5cnc1/apt-pkg/rpm/rpmpackagedata.cc 2003-02-07 18:31:11 +0300
|
||||
@@ -16,7 +16,7 @@
|
||||
RPMPackageData::RPMPackageData()
|
||||
{
|
||||
// Populate priorities
|
||||
- string FileName = _config->FindFile("Dir::Etc::rpmpriorities");
|
||||
+ string FileName = _config->FindFile("Dir::Etc") + "pkgpriorities";
|
||||
FileFd F(FileName, FileFd::ReadOnly);
|
||||
if (_error->PendingError())
|
||||
{
|
11
apt-0.5.5cnc1-mattdm-manbuild.patch
Normal file
11
apt-0.5.5cnc1-mattdm-manbuild.patch
Normal file
@ -0,0 +1,11 @@
|
||||
--- apt-0.5.5cnc1_rc2/buildlib/environment.mak.in.orig 2003-02-02 12:34:15.000000000 -0500
|
||||
+++ apt-0.5.5cnc1_rc2/buildlib/environment.mak.in 2003-02-02 12:35:17.000000000 -0500
|
||||
@@ -35,7 +35,7 @@
|
||||
# SGML for the man pages
|
||||
NSGMLS = @NSGMLS@
|
||||
SGMLSPL = @SGMLSPL@
|
||||
-DOCBOOK2MAN := $(wildcard /usr/lib/perl5/sgmlspl-specs/docbook2man-spec.pl)
|
||||
+DOCBOOK2MAN := $(wildcard /usr/share/sgml/docbook/utils-*/helpers/docbook2man-spec.pl)
|
||||
|
||||
# Gettext settings
|
||||
GMSGFMT = @GMSGFMT@
|
@ -1,7 +1,5 @@
|
||||
.\" This manpage is copyright (C) 1998 Branden Robinson <branden@debian.org>.
|
||||
.\"
|
||||
.\" Updated for Conectiva by Alfredo K. Kojima <kojima@conectiva.com.br>.
|
||||
.\"
|
||||
.\" This is free software; you may redistribute it and/or modify
|
||||
.\" it under the terms of the GNU General Public License as
|
||||
.\" published by the Free Software Foundation; either version 2,
|
||||
@ -16,7 +14,7 @@
|
||||
.\" License along with APT; if not, write to the Free Software
|
||||
.\" Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
||||
.\" 02111-1307 USA
|
||||
.TH apt 8 "25 Oct 2000" "Debian GNU/Linux"
|
||||
.TH apt 8 "16 June 1998" "Debian GNU/Linux"
|
||||
.SH NAME
|
||||
apt \- Advanced Package Tool
|
||||
.SH SYNOPSIS
|
||||
@ -33,23 +31,20 @@ None.
|
||||
.SH SEE ALSO
|
||||
.BR apt-cache (8),
|
||||
.BR apt-get (8),
|
||||
.BR apt.conf (5),
|
||||
.BR sources.list (5)
|
||||
.SH DIAGNOSTICS
|
||||
apt returns zero on normal operation, decimal 100 on error.
|
||||
.SH BUGS
|
||||
This manpage isn't even started.
|
||||
.PP
|
||||
See <http://www.debian.org/Bugs/db/pa/lapt.html>. If you wish to report a
|
||||
See <http://bugs.debian.org/apt>. If you wish to report a
|
||||
bug in
|
||||
.BR apt ,
|
||||
please see
|
||||
.I /usr/doc/debian/bug-reporting.txt
|
||||
.I /usr/share/doc/debian/bug-reporting.txt
|
||||
or the
|
||||
.BR bug (1)
|
||||
command. If you are using apt on a RPM based
|
||||
system, please use http://distro\&.conectiva\&.com\&.br/bugzilla/\&.
|
||||
|
||||
command.
|
||||
.SH AUTHOR
|
||||
apt was written by the APT team <apt@packages\&.debian\&.org>
|
||||
and updated for RPM based systems by
|
||||
Conectiva S.A. <kojima@conectiva\&.com\&.br>
|
||||
apt was written by the APT team <apt@packages.debian.org>.
|
38
apt.conf
38
apt.conf
@ -3,34 +3,50 @@ Dir
|
||||
Bin
|
||||
{
|
||||
Methods "/usr/lib/apt";
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
APT
|
||||
{
|
||||
Get
|
||||
{
|
||||
Show-Upgraded "true";
|
||||
}
|
||||
};
|
||||
|
||||
GPG
|
||||
{
|
||||
PubringPath "/usr/lib/rpm/gnupg";
|
||||
}
|
||||
Pubring "/usr/lib/alt-gpgkeys/pubring.gpg";
|
||||
};
|
||||
|
||||
Architecture "i586";
|
||||
}
|
||||
};
|
||||
|
||||
Debug
|
||||
{
|
||||
Acquire::cdrom "false";
|
||||
Acquire::Ftp "false";
|
||||
Acquire::http "false";
|
||||
aptcdrom "false";
|
||||
NoLocking "false";
|
||||
pkgRPMPM "false";
|
||||
}
|
||||
pkgAcquire::Auth "false";
|
||||
pkgAcquire::Worker "false";
|
||||
pkgAcquire "false";
|
||||
pkgProblemResolver "false";
|
||||
pkgInitConfig "false";
|
||||
pkgOrderList "false";
|
||||
pkgPackageManager "false";
|
||||
pkgPolicy "false";
|
||||
identcdrom "false";
|
||||
};
|
||||
|
||||
Acquire::CDROM::Copy "true";
|
||||
Acquire::CDROM::mount "/mnt/cdrom";
|
||||
|
||||
RPM
|
||||
{
|
||||
Options "-vv";
|
||||
}
|
||||
|
||||
|
||||
Allow-Duplicated {"^(NVIDIA_)?(kernel|alsa)[0-9]*(-adv|-linus)?($|-up|-smp|-secure|-custom|-enterprise|-BOOT|-tape|-aureal)";};
|
||||
Hold {"^(kernel|alsa)[0-9]*-source";};
|
||||
Interactive "true";
|
||||
Order "true";
|
||||
};
|
||||
|
1
apt/.cvsignore
Normal file
1
apt/.cvsignore
Normal file
@ -0,0 +1 @@
|
||||
build
|
19
apt/AUTHORS
19
apt/AUTHORS
@ -1,19 +1,16 @@
|
||||
The project hierachy stands at:
|
||||
|
||||
CVS:jgg Jason Gunthorpe <jgg@debian.org>
|
||||
- Project leader
|
||||
|
||||
CVS:srivasta Manoj Srivastava <srivasta@datasync.com>
|
||||
- Dependency Expert
|
||||
- The Mad Cow incarnate
|
||||
|
||||
CVS:che Ben Gertzfield <che@debian.org>
|
||||
- Packaging and Releases
|
||||
|
||||
CVS:branden Branden Robinson <branden@purdue.edu>
|
||||
- Man Page Documentation
|
||||
CVS:bod Brendan O'Dea <bod@debian.org>
|
||||
- Perl Bindings
|
||||
|
||||
CVS:doogie Adam Heath <doogie@debian.org>
|
||||
- FTP method author
|
||||
CVS:tausq Randolph Chung <tausq@debian.org>
|
||||
- Patches, Fixes, Debugging, GUIs and Releases
|
||||
|
||||
Past Contributures:
|
||||
|
||||
@ -21,8 +18,12 @@ Brian White <bcwhite@verisim.com> - Project originator
|
||||
Tom Lees <tom@lpsg.demon.co.uk> - DPKG documentation and ideas
|
||||
Behan Webster <behanw@verisim.com> - Original GUI design
|
||||
Scott Ellis <storm@gate.net> - Original packaging and beta releases
|
||||
Branden Robinson <branden@purdue.edu> - Man Page Documentation
|
||||
Manoj Srivastava <srivasta@datasync.com> - 1st Generation FTP method and
|
||||
dselect setup script
|
||||
Adam Heath <doogie@debian.org> - 2nd Generation FTP method author
|
||||
Ben Collins <bcollins@debian.org> - Initial RSH method
|
||||
Many other bug reports through the Debian Bug system
|
||||
|
||||
NOTE: The ChangeLog generator will parse for names and email addresses. The
|
||||
'CVS:<name>' tag should indicate who this pair refers to.
|
||||
|
||||
|
@ -1,15 +1,20 @@
|
||||
|
||||
RPM port brought to you by
|
||||
0.5 RPM port brought to you by
|
||||
Gustavo Niemeyer <niemeyer@conectiva.com>
|
||||
Alfredo K. Kojima <kojima@conectiva.com.br>
|
||||
|
||||
0.3 RPM port brought to you by
|
||||
Alfredo K. Kojima <kojima@conectiva.com.br>
|
||||
|
||||
with consulting help from:
|
||||
Packaging: Ruda Moura <ruda@conectiva.com.br>
|
||||
Security: Andreas Hasenack <andreas@conectiva.com.br>
|
||||
Misc.: Claudio Matsuoka <claudio@conectiva.com.br>
|
||||
|
||||
|
||||
added user specified public keyring option for gpg, fixed a bug
|
||||
in the file method with authentication
|
||||
Alexander Bokovoy <ab@avilink.net>
|
||||
|
||||
solaris portability fixes
|
||||
AUSTIN MURPHY <amurphy@nbcs.rutgers.edu>
|
||||
|
||||
|
@ -8,10 +8,19 @@ To compile this you need a couple things
|
||||
- A working ANSI C++ compiler, this is not g++ 2.7.*
|
||||
g++ 2.8 works OK and newer egcs work well also. Nobody has tried it
|
||||
on other compilers :< You will need a properly working STL as well.
|
||||
g++ 3 does not presently work because they made the STL headers
|
||||
use namespaces.
|
||||
- A C library with the usual POSIX functions and a BSD socket layer.
|
||||
If you OS conforms to the Single User Spec then you are fine:
|
||||
If you OS conforms to the Single Unix Spec then you are fine:
|
||||
http://www.opengroup.org/onlinepubs/7908799/index.html
|
||||
|
||||
** NOTICE **
|
||||
The C++ global constructors do not link correctly when using non-shared
|
||||
libaries. This is probably the correct behavior of the linker, but I have
|
||||
not yet had time to devise a work around for it. The correct thing to
|
||||
do is add a reference to debSystem in apt-pkg/init.cc,
|
||||
assert(&debSystem == 0) would be fine for instance.
|
||||
|
||||
Guidelines
|
||||
~~~~~~~~~~
|
||||
I am not interested in making 'ultra portable code'. I will accept patches
|
||||
@ -27,12 +36,12 @@ functionality. Patches to make autoconf detect these cases and generate the
|
||||
required shims are OK.
|
||||
|
||||
Current shims:
|
||||
* C9x integer types 'inttypes.h'
|
||||
* sys/statvfs.h to convert from BSD/Linux statfs to SUS statvfs
|
||||
* C99 integer types 'inttypes.h'
|
||||
* sys/statvfs.h to convert from BSD/old-glibc statfs to SUS statvfs
|
||||
* rfc2553 hostname resolution (methods/rfc*), shims to normal gethostbyname.
|
||||
The more adventerous could steal the KAME IPv6 enabled resolvers for those
|
||||
OS's with IPv6 support but no rfc2553 (why?)
|
||||
* define _XOPEN_EXTENDES_SOURCE to bring in h_errno on HP-UX
|
||||
* define _XOPEN_EXTENDED_SOURCE to bring in h_errno on HP-UX
|
||||
* socklen_t shim in netdb.h if the OS does not have socklen_t
|
||||
|
||||
The only completely non-shimed OS is Linux with glibc2.1, glibc2.0 requires
|
||||
@ -42,23 +51,28 @@ Platform Notes
|
||||
~~~~~~~~~~~~~~
|
||||
Debian GNU Linux 2.1 'slink'
|
||||
Debian GNU Linux 'potato'
|
||||
Debian GNU Linux 'woody'
|
||||
* All Archs
|
||||
- Works flawlessly
|
||||
- You will want to have debiandoc-sgml and yodl installed to get
|
||||
- You will want to have debiandoc-sgml and docbook2man installed to get
|
||||
best results.
|
||||
- No IPv6 Support in glibc's < 2.1.
|
||||
|
||||
Sun Solaris
|
||||
SunOS cab101 5.7 Generic_106541-04 sun4u sparc
|
||||
SunOS csu201 5.8 Generic_108528-04 sun4u sparc
|
||||
- Works fine
|
||||
- Note, no IPv6 Support, OS lacks RFC 2553 hostname resolution
|
||||
|
||||
OpenBSD
|
||||
OpenBSD gsb086 2.5 CMPUT#0 i386 unknown
|
||||
- Works fine
|
||||
OpenBSD csu101 2.7 CMPUT#1 i386 unknown
|
||||
- OS needs 'ranlib' to generate the symbol table after 'ar'.. (not using
|
||||
GNU ar with the gnu tool chain :<)
|
||||
- Note, no IPv6 Support, OS lacks RFC 2553 hostname resolution
|
||||
- '2.5' does not have RFC 2553 hostname resolution, but '2.7' does
|
||||
- Testing on '2.7' suggests the OS has a bug in its handling of
|
||||
ftruncate on files that have been written via mmap. It fills the page
|
||||
that crosses the truncation boundary with 0's.
|
||||
|
||||
HP-UX
|
||||
HP-UX nyquist B.10.20 C 9000/780 2016574337 32-user license
|
||||
|
@ -1,30 +0,0 @@
|
||||
/AUTHORS.RPM/1.4/Wed Aug 1 21:35:12 2001//
|
||||
/Makefile/1.4/Wed Aug 1 22:05:11 2001//
|
||||
D/apt-pkg////
|
||||
D/buildlib////
|
||||
D/cmdline////
|
||||
D/debian////
|
||||
D/deity////
|
||||
D/doc////
|
||||
D/dselect////
|
||||
D/gui////
|
||||
D/intl////
|
||||
D/methods////
|
||||
D/po////
|
||||
D/test////
|
||||
D/tools////
|
||||
/AUTHORS/1.1.1.1/Fri Aug 10 13:57:00 2001//
|
||||
/COMPILING/1.1.1.1/Fri Aug 10 13:57:00 2001//
|
||||
/COPYING/1.1.1.1/Fri Aug 10 13:57:01 2001//
|
||||
/COPYING.GPL/1.1.1.1/Fri Aug 10 13:57:02 2001//
|
||||
/ChangeLog/1.2/Fri Aug 10 13:57:23 2001//
|
||||
/README.RPM/1.12/Fri Aug 10 13:57:23 2001//
|
||||
/README.make/1.1.1.1/Fri Aug 10 13:57:24 2001//
|
||||
/REPOSITORIO-APT-HOWTO/1.2/Fri Aug 10 13:57:24 2001//
|
||||
/TODO/1.2/Fri Aug 10 13:57:24 2001//
|
||||
/apt.dia/1.3/Fri Aug 10 13:57:26 2001//
|
||||
/docs.tar.gz/1.1/Fri Aug 10 13:58:18 2001//
|
||||
/mkinstalldirs/1.1/Fri Aug 10 13:58:18 2001//
|
||||
/rpmpriorities/1.2/Fri Aug 10 13:58:19 2001//
|
||||
/configure.in/1.75/Wed Mar 6 17:17:10 2002//
|
||||
/release/1.73/Wed Mar 6 17:17:10 2002//
|
@ -1 +0,0 @@
|
||||
rapt
|
@ -1 +0,0 @@
|
||||
:pserver:anonymous@cvs.conectiva.com.br:/home/cvs
|
1780
apt/ChangeLog
1780
apt/ChangeLog
File diff suppressed because it is too large
Load Diff
11
apt/Makefile
11
apt/Makefile
@ -6,14 +6,20 @@ ifndef NOISY
|
||||
.SILENT:
|
||||
endif
|
||||
|
||||
.PHONY: default
|
||||
default: startup all
|
||||
|
||||
.PHONY: headers library clean veryclean all binary program doc
|
||||
all headers library clean veryclean binary program doc dirs:
|
||||
$(MAKE) -C apt-pkg $@
|
||||
$(MAKE) -C apt-inst $@
|
||||
$(MAKE) -C methods $@
|
||||
$(MAKE) -C cmdline $@
|
||||
$(MAKE) -C ftparchive $@
|
||||
$(MAKE) -C dselect $@
|
||||
$(MAKE) -C tools $@
|
||||
$(MAKE) -C doc $@
|
||||
$(MAKE) -C po $@
|
||||
$(MAKE) -C tools $@
|
||||
|
||||
# Some very common aliases
|
||||
.PHONY: maintainer-clean dist-clean distclean pristine sanity
|
||||
@ -21,8 +27,9 @@ maintainer-clean dist-clean distclean pristine sanity: veryclean
|
||||
|
||||
# The startup target builds the necessary configure scripts. It should
|
||||
# be used after a CVS checkout.
|
||||
CONVERTED=environment.mak include/config.h makefile
|
||||
CONVERTED=environment.mak include/config.h include/apti18n.h makefile
|
||||
include buildlib/configure.mak
|
||||
$(BUILDDIR)/include/config.h: buildlib/config.h.in
|
||||
$(BUILDDIR)/include/apti18n.h: buildlib/apti18n.h.in
|
||||
$(BUILDDIR)/environment.mak: buildlib/environment.mak.in
|
||||
$(BUILDDIR)/makefile: buildlib/makefile.in
|
||||
|
@ -1,5 +1,4 @@
|
||||
|
||||
|
||||
RPM enabled APT
|
||||
|
||||
WARNING
|
||||
@ -12,7 +11,7 @@ DO NOT TRY TO COMPILE THIS ON A DEBIAN SYSTEM.
|
||||
THIS IS A WORK IN PROGRESS AND WILL NOT WORK ON DEBIAN
|
||||
RIGHT NOW.
|
||||
|
||||
If you find a problem, contact kojima@conectiva.com.br,
|
||||
If you find a problem, contact niemeyer@conectiva.com,
|
||||
not the original developers.
|
||||
|
||||
|
||||
@ -37,5 +36,3 @@ want simpler/friendlier documentation, you can
|
||||
get the APT+RPM HOWTO at:
|
||||
http://bazar.conectiva.com.br/~godoy/apt-howto/
|
||||
|
||||
|
||||
|
||||
|
@ -26,12 +26,12 @@ and configure substitutions across build makefiles is not used at all.
|
||||
|
||||
Furthermore, the make system runs with a current directory equal to the
|
||||
source directory irregardless of the destination directory. This means
|
||||
#include "" and #include <> work as epected and more importantly
|
||||
#include "" and #include <> work as expected and more importantly
|
||||
running 'make' in the source directory will work as expected. The
|
||||
environment variable or make parameter 'BUILD' set the build directory.
|
||||
environment variable or make parameter 'BUILD' sets the build directory.
|
||||
It may be an absolute path or a path relative to the top level directory.
|
||||
By default build/ will be used with a fall back to ./ This means
|
||||
you can get all the advantages of a build directory without having to
|
||||
By default build-arch/ then build/ will be used with a fall back to ./ This
|
||||
means you can get all the advantages of a build directory without having to
|
||||
cd into it to edit your source code!
|
||||
|
||||
The make system also performs dependency generation on the fly as the
|
||||
@ -49,10 +49,8 @@ the source directory but is logically divided in the following manner
|
||||
examples/
|
||||
include/
|
||||
apt-pkg/
|
||||
deity/
|
||||
obj/
|
||||
apt-pkg/
|
||||
deity/
|
||||
cmndline/
|
||||
[...]
|
||||
Only .o and .d files are placed in the obj/ subdirectory. The final compiled
|
||||
|
@ -1,117 +0,0 @@
|
||||
|
||||
|
||||
Repositório de APT HOWTO
|
||||
|
||||
|
||||
** Ingredientes
|
||||
|
||||
- 1 máquina com acesso rápido à rede e bastante largura de banda
|
||||
- 1 servidor de ftp (anônimo) ou http rodando
|
||||
|
||||
** Modo de Preparo
|
||||
|
||||
1) Vá ao diretório raiz do servidor de ftp/http
|
||||
2) Crie a seguinte estrutura de diretórios nele:
|
||||
|
||||
<versao>/SRPMS/
|
||||
<versao>/conectiva/RPMS.<comp1>/
|
||||
<versao>/conectiva/RPMS.<comp2>/
|
||||
...
|
||||
<versao>/conectiva/RPMS.<compn>/
|
||||
<versao>/conectiva/base/
|
||||
|
||||
Você pode substituir <versao> pela versão da distribuição
|
||||
que será disponibilizada ou se você estiver disponibilizando
|
||||
algum outro software, a versão da distribuição a que se destinam
|
||||
os pacotes.
|
||||
|
||||
<comp1>...<compn> são os diretórios dos componentes da distribuição
|
||||
e podem ser qualquer string.
|
||||
|
||||
No diretório SRPMS devem estar contidos os srpms dos pacotes
|
||||
do repositório.
|
||||
|
||||
3) Crie os arquivos de índice do apt (os pkglists) no diretório base.
|
||||
Para isso, vá ao diretório base e execute o seguinte comando para
|
||||
cada componente:
|
||||
|
||||
genpkglist <path>/<versao> <comp>
|
||||
|
||||
Onde:
|
||||
|
||||
<path>/<versao> é o caminho completo até o topo do diretório onde
|
||||
está o repositório
|
||||
|
||||
<comp> é o nome do componente.
|
||||
|
||||
Repita a operação para cada um dos componentes que você criou.
|
||||
|
||||
4) Comprima os arquivos de índice com gzip.
|
||||
|
||||
5) Distribua a linha do sources.list para o seu repositório. Ela
|
||||
tem o seguinte formato:
|
||||
|
||||
rpm URL/<path> <versao>/conectiva <comp1> <comp2> ... <compn>
|
||||
^ ^ ^ ^ ^ ^ ^
|
||||
| | | +------+------+-----+- Nomes dos
|
||||
| | | componentes
|
||||
| | |
|
||||
| | versao da distribuição e nome da distribuição
|
||||
| |
|
||||
| URL para o diretório raiz do repositório
|
||||
|
|
||||
Tipo de distribuição. No caso, rpm
|
||||
|
||||
** Exemplos:
|
||||
|
||||
## Quer se fazer um repositório de pacotes para o Conectiva 6.0, na máquina
|
||||
repo.conectiva.com.br, usando http:
|
||||
|
||||
cd /home/httpd
|
||||
mkdir -p coisas/6.0
|
||||
cd coisas/6.0
|
||||
mkdir SRPMS
|
||||
mkdir -p conectiva/RPMS.bla
|
||||
mkdir -p conectiva/base
|
||||
cp /meus/pacotes/*src.rpm SRPMS
|
||||
cp /meus/pacotes/*i386.rpm conectiva/RPMS.bla
|
||||
cd conectiva/base
|
||||
genpkglist /home/httpd/coisas/6.0/conectiva bla
|
||||
gzip pkglist.bla
|
||||
|
||||
Em sources.list deve se adicionar:
|
||||
|
||||
rpm http://repo.conectiva.com.br/coisas 6.0/conectiva bla
|
||||
|
||||
|
||||
## Quer se fazer um repositório de pacotes para a distribuição RedRat 20.0,
|
||||
em bla.redrat.com, usando ftp:
|
||||
|
||||
cd /home/ftp/pub
|
||||
mkdir -p stuff/20.0
|
||||
cd stuff/20.0
|
||||
mkdir SRPMS
|
||||
mkdir -p redrat/RPMS.1
|
||||
mkdir -p redrat/RPMS.2
|
||||
mkdir -p redrat/base
|
||||
cp /tmp/pacotes*src.rpm SRPMS
|
||||
cp <bla bla bla> redrat/RPMS.1
|
||||
cp <bla bla bla> redrat/RPMS.2
|
||||
cd redrat/base
|
||||
genpkglist /home/ftp/stuff/20.0/redrat 1
|
||||
genpkglist /home/ftp/stuff/20.0/redrat 2
|
||||
gzip pkglist.1
|
||||
gzip pkglist.2
|
||||
|
||||
Em sources.list deve se adicionar:
|
||||
|
||||
rpm ftp://bla.redrat.com/pub/stuff 20.0/redrat 1 2
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
7
apt/TODO
7
apt/TODO
@ -1,6 +1 @@
|
||||
- check support for installation of packages scattered across multiple CDs
|
||||
- port the authentication stuff to the aliencode branch of APT
|
||||
- port the RPM support to the aliencode branch of APT
|
||||
- rewrite rpm repository maintenance tools
|
||||
- package "hold"ing
|
||||
- package ignoring (exclude from apt-get check)
|
||||
- Implement hashing in file method.
|
||||
|
2860
apt/aclocal.m4
vendored
Normal file
2860
apt/aclocal.m4
vendored
Normal file
File diff suppressed because it is too large
Load Diff
154
apt/apt-inst/contrib/arfile.cc
Normal file
154
apt/apt-inst/contrib/arfile.cc
Normal file
@ -0,0 +1,154 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: arfile.cc,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
AR File - Handle an 'AR' archive
|
||||
|
||||
AR Archives have plain text headers at the start of each file
|
||||
section. The headers are aligned on a 2 byte boundry.
|
||||
|
||||
Information about the structure of AR files can be found in ar(5)
|
||||
on a BSD system, or in the binutils source.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include Files /*{{{*/
|
||||
#ifdef __GNUG__
|
||||
#pragma implementation "apt-pkg/arfile.h"
|
||||
#endif
|
||||
#include <apt-pkg/arfile.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
#include <apt-pkg/error.h>
|
||||
|
||||
#include <stdlib.h>
|
||||
/*}}}*/
|
||||
|
||||
struct ARArchive::MemberHeader
|
||||
{
|
||||
char Name[16];
|
||||
char MTime[12];
|
||||
char UID[6];
|
||||
char GID[6];
|
||||
char Mode[8];
|
||||
char Size[10];
|
||||
char Magic[2];
|
||||
};
|
||||
|
||||
// ARArchive::ARArchive - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
ARArchive::ARArchive(FileFd &File) : List(0), File(File)
|
||||
{
|
||||
LoadHeaders();
|
||||
}
|
||||
/*}}}*/
|
||||
// ARArchive::~ARArchive - Destructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
ARArchive::~ARArchive()
|
||||
{
|
||||
while (List != 0)
|
||||
{
|
||||
Member *Tmp = List;
|
||||
List = List->Next;
|
||||
delete Tmp;
|
||||
}
|
||||
}
|
||||
/*}}}*/
|
||||
// ARArchive::LoadHeaders - Load the headers from each file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* AR files are structured with a 8 byte magic string followed by a 60
|
||||
byte plain text header then the file data, another header, data, etc */
|
||||
bool ARArchive::LoadHeaders()
|
||||
{
|
||||
signed long Left = File.Size();
|
||||
|
||||
// Check the magic byte
|
||||
char Magic[8];
|
||||
if (File.Read(Magic,sizeof(Magic)) == false)
|
||||
return false;
|
||||
if (memcmp(Magic,"!<arch>\012",sizeof(Magic)) != 0)
|
||||
return _error->Error("Invalid archive signature");
|
||||
Left -= sizeof(Magic);
|
||||
|
||||
// Read the member list
|
||||
while (Left > 0)
|
||||
{
|
||||
MemberHeader Head;
|
||||
if (File.Read(&Head,sizeof(Head)) == false)
|
||||
return _error->Error("Error reading archive member header");
|
||||
Left -= sizeof(Head);
|
||||
|
||||
// Convert all of the integer members
|
||||
Member *Memb = new Member();
|
||||
if (StrToNum(Head.MTime,Memb->MTime,sizeof(Head.MTime)) == false ||
|
||||
StrToNum(Head.UID,Memb->UID,sizeof(Head.UID)) == false ||
|
||||
StrToNum(Head.GID,Memb->GID,sizeof(Head.GID)) == false ||
|
||||
StrToNum(Head.Mode,Memb->Mode,sizeof(Head.Mode),8) == false ||
|
||||
StrToNum(Head.Size,Memb->Size,sizeof(Head.Size)) == false)
|
||||
{
|
||||
delete Memb;
|
||||
return _error->Error("Invalid archive member header");
|
||||
}
|
||||
|
||||
// Check for an extra long name string
|
||||
if (memcmp(Head.Name,"#1/",3) == 0)
|
||||
{
|
||||
char S[300];
|
||||
unsigned long Len;
|
||||
if (StrToNum(Head.Name+3,Len,sizeof(Head.Size)-3) == false ||
|
||||
Len >= strlen(S))
|
||||
{
|
||||
delete Memb;
|
||||
return _error->Error("Invalid archive member header");
|
||||
}
|
||||
if (File.Read(S,Len) == false)
|
||||
return false;
|
||||
S[Len] = 0;
|
||||
Memb->Name = S;
|
||||
Memb->Size -= Len;
|
||||
Left -= Len;
|
||||
}
|
||||
else
|
||||
{
|
||||
unsigned int I = sizeof(Head.Name) - 1;
|
||||
for (; Head.Name[I] == ' '; I--);
|
||||
Memb->Name = string(Head.Name,0,I+1);
|
||||
}
|
||||
|
||||
// Account for the AR header alignment
|
||||
unsigned Skip = Memb->Size % 2;
|
||||
|
||||
// Add it to the list
|
||||
Memb->Next = List;
|
||||
List = Memb;
|
||||
Memb->Start = File.Tell();
|
||||
if (File.Skip(Memb->Size + Skip) == false)
|
||||
return false;
|
||||
if (Left < (signed)(Memb->Size + Skip))
|
||||
return _error->Error("Archive is too short");
|
||||
Left -= Memb->Size + Skip;
|
||||
}
|
||||
if (Left != 0)
|
||||
return _error->Error("Failed to read the archive headers");
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// ARArchive::FindMember - Find a name in the member list /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Find a member with the given name */
|
||||
const ARArchive::Member *ARArchive::FindMember(const char *Name) const
|
||||
{
|
||||
const Member *Res = List;
|
||||
while (Res != 0)
|
||||
{
|
||||
if (Res->Name == Name)
|
||||
return Res;
|
||||
Res = Res->Next;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
/*}}}*/
|
68
apt/apt-inst/contrib/arfile.h
Normal file
68
apt/apt-inst/contrib/arfile.h
Normal file
@ -0,0 +1,68 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: arfile.h,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
AR File - Handle an 'AR' archive
|
||||
|
||||
This is a reader for the usual 4.4 BSD AR format. It allows raw
|
||||
stream access to a single member at a time. Basically all this class
|
||||
provides is header parsing and verification. It is up to the client
|
||||
to correctly make use of the stream start/stop points.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
#ifndef PKGLIB_ARFILE_H
|
||||
#define PKGLIB_ARFILE_H
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/arfile.h"
|
||||
#endif
|
||||
|
||||
#include <string>
|
||||
#include <apt-pkg/fileutl.h>
|
||||
|
||||
class ARArchive
|
||||
{
|
||||
struct MemberHeader;
|
||||
public:
|
||||
struct Member;
|
||||
|
||||
protected:
|
||||
|
||||
// Linked list of members
|
||||
Member *List;
|
||||
|
||||
bool LoadHeaders();
|
||||
|
||||
public:
|
||||
|
||||
// The stream file
|
||||
FileFd &File;
|
||||
|
||||
// Locate a member by name
|
||||
const Member *FindMember(const char *Name) const;
|
||||
|
||||
ARArchive(FileFd &File);
|
||||
~ARArchive();
|
||||
};
|
||||
|
||||
// A member of the archive
|
||||
struct ARArchive::Member
|
||||
{
|
||||
// Fields from the header
|
||||
string Name;
|
||||
unsigned long MTime;
|
||||
unsigned long UID;
|
||||
unsigned long GID;
|
||||
unsigned long Mode;
|
||||
unsigned long Size;
|
||||
|
||||
// Location of the data.
|
||||
unsigned long Start;
|
||||
Member *Next;
|
||||
|
||||
Member() : Start(0), Next(0) {};
|
||||
};
|
||||
|
||||
#endif
|
343
apt/apt-inst/contrib/extracttar.cc
Normal file
343
apt/apt-inst/contrib/extracttar.cc
Normal file
@ -0,0 +1,343 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: extracttar.cc,v 1.4 2003/01/29 18:43:47 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Extract a Tar - Tar Extractor
|
||||
|
||||
Some performance measurements showed that zlib performed quite poorly
|
||||
in comparision to a forked gzip process. This tar extractor makes use
|
||||
of the fact that dup'd file descriptors have the same seek pointer
|
||||
and that gzip will not read past the end of a compressed stream,
|
||||
even if there is more data. We use the dup property to track extraction
|
||||
progress and the gzip feature to just feed gzip a fd in the middle
|
||||
of an AR file.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include Files /*{{{*/
|
||||
#ifdef __GNUG__
|
||||
#pragma implementation "apt-pkg/extracttar.h"
|
||||
#endif
|
||||
#include <apt-pkg/extracttar.h>
|
||||
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
#include <apt-pkg/configuration.h>
|
||||
#include <system.h>
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
#include <signal.h>
|
||||
#include <fcntl.h>
|
||||
#include <iostream>
|
||||
/*}}}*/
|
||||
using namespace std;
|
||||
|
||||
// The on disk header for a tar file.
|
||||
struct ExtractTar::TarHeader
|
||||
{
|
||||
char Name[100];
|
||||
char Mode[8];
|
||||
char UserID[8];
|
||||
char GroupID[8];
|
||||
char Size[12];
|
||||
char MTime[12];
|
||||
char Checksum[8];
|
||||
char LinkFlag;
|
||||
char LinkName[100];
|
||||
char MagicNumber[8];
|
||||
char UserName[32];
|
||||
char GroupName[32];
|
||||
char Major[8];
|
||||
char Minor[8];
|
||||
};
|
||||
|
||||
// ExtractTar::ExtractTar - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
ExtractTar::ExtractTar(FileFd &Fd,unsigned long Max) : File(Fd),
|
||||
MaxInSize(Max)
|
||||
|
||||
{
|
||||
GZPid = -1;
|
||||
InFd = -1;
|
||||
Eof = false;
|
||||
}
|
||||
/*}}}*/
|
||||
// ExtractTar::ExtractTar - Destructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
ExtractTar::~ExtractTar()
|
||||
{
|
||||
// Error close
|
||||
Done(true);
|
||||
}
|
||||
/*}}}*/
|
||||
// ExtractTar::Done - Reap the gzip sub process /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* If the force flag is given then error messages are suppressed - this
|
||||
means we hit the end of the tar file but there was still gzip data. */
|
||||
bool ExtractTar::Done(bool Force)
|
||||
{
|
||||
InFd.Close();
|
||||
if (GZPid <= 0)
|
||||
return true;
|
||||
|
||||
/* If there is a pending error then we are cleaning up gzip and are
|
||||
not interested in it's failures */
|
||||
if (_error->PendingError() == true)
|
||||
Force = true;
|
||||
|
||||
// Make sure we clean it up!
|
||||
kill(GZPid,SIGINT);
|
||||
if (ExecWait(GZPid,_config->Find("dir::bin::gzip","/bin/gzip").c_str(),
|
||||
Force) == false)
|
||||
{
|
||||
GZPid = -1;
|
||||
return Force;
|
||||
}
|
||||
|
||||
GZPid = -1;
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// ExtractTar::StartGzip - Startup gzip /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This creates a gzip sub process that has its input as the file itself.
|
||||
If this tar file is embedded into something like an ar file then
|
||||
gzip will efficiently ignore the extra bits. */
|
||||
bool ExtractTar::StartGzip()
|
||||
{
|
||||
int Pipes[2];
|
||||
if (pipe(Pipes) != 0)
|
||||
return _error->Errno("pipe","Failed to create pipes");
|
||||
|
||||
// Fork off the process
|
||||
GZPid = ExecFork();
|
||||
|
||||
// Spawn the subprocess
|
||||
if (GZPid == 0)
|
||||
{
|
||||
// Setup the FDs
|
||||
dup2(Pipes[1],STDOUT_FILENO);
|
||||
dup2(File.Fd(),STDIN_FILENO);
|
||||
int Fd = open("/dev/null",O_RDWR);
|
||||
if (Fd == -1)
|
||||
_exit(101);
|
||||
dup2(Fd,STDERR_FILENO);
|
||||
close(Fd);
|
||||
SetCloseExec(STDOUT_FILENO,false);
|
||||
SetCloseExec(STDIN_FILENO,false);
|
||||
SetCloseExec(STDERR_FILENO,false);
|
||||
|
||||
const char *Args[3];
|
||||
Args[0] = _config->Find("dir::bin::gzip","/bin/gzip").c_str();
|
||||
Args[1] = "-d";
|
||||
Args[2] = 0;
|
||||
execv(Args[0],(char **)Args);
|
||||
cerr << "Failed to exec gzip " << Args[0] << endl;
|
||||
_exit(100);
|
||||
}
|
||||
|
||||
// Fix up our FDs
|
||||
InFd.Fd(Pipes[0]);
|
||||
close(Pipes[1]);
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// ExtractTar::Go - Perform extraction /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This reads each 512 byte block from the archive and extracts the header
|
||||
information into the Item structure. Then it resolves the UID/GID and
|
||||
invokes the correct processing function. */
|
||||
bool ExtractTar::Go(pkgDirStream &Stream)
|
||||
{
|
||||
if (StartGzip() == false)
|
||||
return false;
|
||||
|
||||
// Loop over all blocks
|
||||
string LastLongLink;
|
||||
string LastLongName;
|
||||
while (1)
|
||||
{
|
||||
bool BadRecord = false;
|
||||
unsigned char Block[512];
|
||||
if (InFd.Read(Block,sizeof(Block),true) == false)
|
||||
return false;
|
||||
|
||||
if (InFd.Eof() == true)
|
||||
break;
|
||||
|
||||
// Get the checksum
|
||||
TarHeader *Tar = (TarHeader *)Block;
|
||||
unsigned long CheckSum;
|
||||
if (StrToNum(Tar->Checksum,CheckSum,sizeof(Tar->Checksum),8) == false)
|
||||
return _error->Error("Corrupted archive");
|
||||
|
||||
/* Compute the checksum field. The actual checksum is blanked out
|
||||
with spaces so it is not included in the computation */
|
||||
unsigned long NewSum = 0;
|
||||
memset(Tar->Checksum,' ',sizeof(Tar->Checksum));
|
||||
for (int I = 0; I != sizeof(Block); I++)
|
||||
NewSum += Block[I];
|
||||
|
||||
/* Check for a block of nulls - in this case we kill gzip, GNU tar
|
||||
does this.. */
|
||||
if (NewSum == ' '*sizeof(Tar->Checksum))
|
||||
return Done(true);
|
||||
|
||||
if (NewSum != CheckSum)
|
||||
return _error->Error("Tar Checksum failed, archive corrupted");
|
||||
|
||||
// Decode all of the fields
|
||||
pkgDirStream::Item Itm;
|
||||
if (StrToNum(Tar->Mode,Itm.Mode,sizeof(Tar->Mode),8) == false ||
|
||||
StrToNum(Tar->UserID,Itm.UID,sizeof(Tar->UserID),8) == false ||
|
||||
StrToNum(Tar->GroupID,Itm.GID,sizeof(Tar->GroupID),8) == false ||
|
||||
StrToNum(Tar->Size,Itm.Size,sizeof(Tar->Size),8) == false ||
|
||||
StrToNum(Tar->MTime,Itm.MTime,sizeof(Tar->MTime),8) == false ||
|
||||
StrToNum(Tar->Major,Itm.Major,sizeof(Tar->Major),8) == false ||
|
||||
StrToNum(Tar->Minor,Itm.Minor,sizeof(Tar->Minor),8) == false)
|
||||
return _error->Error("Corrupted archive");
|
||||
|
||||
// Grab the filename
|
||||
if (LastLongName.empty() == false)
|
||||
Itm.Name = (char *)LastLongName.c_str();
|
||||
else
|
||||
{
|
||||
Tar->Name[sizeof(Tar->Name)] = 0;
|
||||
Itm.Name = Tar->Name;
|
||||
}
|
||||
if (Itm.Name[0] == '.' && Itm.Name[1] == '/' && Itm.Name[2] != 0)
|
||||
Itm.Name += 2;
|
||||
|
||||
// Grab the link target
|
||||
Tar->Name[sizeof(Tar->LinkName)] = 0;
|
||||
Itm.LinkTarget = Tar->LinkName;
|
||||
|
||||
if (LastLongLink.empty() == false)
|
||||
Itm.LinkTarget = (char *)LastLongLink.c_str();
|
||||
|
||||
// Convert the type over
|
||||
switch (Tar->LinkFlag)
|
||||
{
|
||||
case NormalFile0:
|
||||
case NormalFile:
|
||||
Itm.Type = pkgDirStream::Item::File;
|
||||
break;
|
||||
|
||||
case HardLink:
|
||||
Itm.Type = pkgDirStream::Item::HardLink;
|
||||
break;
|
||||
|
||||
case SymbolicLink:
|
||||
Itm.Type = pkgDirStream::Item::SymbolicLink;
|
||||
break;
|
||||
|
||||
case CharacterDevice:
|
||||
Itm.Type = pkgDirStream::Item::CharDevice;
|
||||
break;
|
||||
|
||||
case BlockDevice:
|
||||
Itm.Type = pkgDirStream::Item::BlockDevice;
|
||||
break;
|
||||
|
||||
case Directory:
|
||||
Itm.Type = pkgDirStream::Item::Directory;
|
||||
break;
|
||||
|
||||
case FIFO:
|
||||
Itm.Type = pkgDirStream::Item::FIFO;
|
||||
break;
|
||||
|
||||
case GNU_LongLink:
|
||||
{
|
||||
unsigned long Length = Itm.Size;
|
||||
unsigned char Block[512];
|
||||
while (Length > 0)
|
||||
{
|
||||
if (InFd.Read(Block,sizeof(Block),true) == false)
|
||||
return false;
|
||||
if (Length <= sizeof(Block))
|
||||
{
|
||||
LastLongLink.append(Block,Block+sizeof(Block));
|
||||
break;
|
||||
}
|
||||
LastLongLink.append(Block,Block+sizeof(Block));
|
||||
Length -= sizeof(Block);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
case GNU_LongName:
|
||||
{
|
||||
unsigned long Length = Itm.Size;
|
||||
unsigned char Block[512];
|
||||
while (Length > 0)
|
||||
{
|
||||
if (InFd.Read(Block,sizeof(Block),true) == false)
|
||||
return false;
|
||||
if (Length < sizeof(Block))
|
||||
{
|
||||
LastLongName.append(Block,Block+sizeof(Block));
|
||||
break;
|
||||
}
|
||||
LastLongName.append(Block,Block+sizeof(Block));
|
||||
Length -= sizeof(Block);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
default:
|
||||
BadRecord = true;
|
||||
_error->Warning("Unkown TAR header type %u, member %s",(unsigned)Tar->LinkFlag,Tar->Name);
|
||||
break;
|
||||
}
|
||||
|
||||
int Fd = -1;
|
||||
if (BadRecord == false)
|
||||
if (Stream.DoItem(Itm,Fd) == false)
|
||||
return false;
|
||||
|
||||
// Copy the file over the FD
|
||||
unsigned long Size = Itm.Size;
|
||||
while (Size != 0)
|
||||
{
|
||||
unsigned char Junk[32*1024];
|
||||
unsigned long Read = MIN(Size,sizeof(Junk));
|
||||
if (InFd.Read(Junk,((Read+511)/512)*512) == false)
|
||||
return false;
|
||||
|
||||
if (BadRecord == false)
|
||||
{
|
||||
if (Fd > 0)
|
||||
{
|
||||
if (write(Fd,Junk,Read) != (signed)Read)
|
||||
return Stream.Fail(Itm,Fd);
|
||||
}
|
||||
else
|
||||
{
|
||||
/* An Fd of -2 means to send to a special processing
|
||||
function */
|
||||
if (Fd == -2)
|
||||
if (Stream.Process(Itm,Junk,Read,Itm.Size - Size) == false)
|
||||
return Stream.Fail(Itm,Fd);
|
||||
}
|
||||
}
|
||||
|
||||
Size -= Read;
|
||||
}
|
||||
|
||||
// And finish up
|
||||
if (Itm.Size != 0 && BadRecord == false)
|
||||
if (Stream.FinishedFile(Itm,Fd) == false)
|
||||
return false;
|
||||
|
||||
LastLongName.erase();
|
||||
LastLongLink.erase();
|
||||
}
|
||||
|
||||
return Done(false);
|
||||
}
|
||||
/*}}}*/
|
54
apt/apt-inst/contrib/extracttar.h
Normal file
54
apt/apt-inst/contrib/extracttar.h
Normal file
@ -0,0 +1,54 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: extracttar.h,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Extract a Tar - Tar Extractor
|
||||
|
||||
The tar extractor takes an ordinary gzip compressed tar stream from
|
||||
the given file and explodes it, passing the individual items to the
|
||||
given Directory Stream for processing.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
#ifndef PKGLIB_EXTRACTTAR_H
|
||||
#define PKGLIB_EXTRACTTAR_H
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/extracttar.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/fileutl.h>
|
||||
#include <apt-pkg/dirstream.h>
|
||||
|
||||
class ExtractTar
|
||||
{
|
||||
protected:
|
||||
|
||||
struct TarHeader;
|
||||
|
||||
// The varios types items can be
|
||||
enum ItemType {NormalFile0 = '\0',NormalFile = '0',HardLink = '1',
|
||||
SymbolicLink = '2',CharacterDevice = '3',
|
||||
BlockDevice = '4',Directory = '5',FIFO = '6',
|
||||
GNU_LongLink = 'K',GNU_LongName = 'L'};
|
||||
|
||||
FileFd &File;
|
||||
unsigned long MaxInSize;
|
||||
int GZPid;
|
||||
FileFd InFd;
|
||||
bool Eof;
|
||||
|
||||
// Fork and reap gzip
|
||||
bool StartGzip();
|
||||
bool Done(bool Force);
|
||||
|
||||
public:
|
||||
|
||||
bool Go(pkgDirStream &Stream);
|
||||
|
||||
ExtractTar(FileFd &Fd,unsigned long Max);
|
||||
virtual ~ExtractTar();
|
||||
};
|
||||
|
||||
#endif
|
30
apt/apt-inst/database.cc
Normal file
30
apt/apt-inst/database.cc
Normal file
@ -0,0 +1,30 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: database.cc,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Data Base Abstraction
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include Files /*{{{*/
|
||||
#ifdef __GNUG__
|
||||
#pragma implementation "apt-pkg/database.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/database.h>
|
||||
/*}}}*/
|
||||
|
||||
// DataBase::GetMetaTmp - Get the temp dir /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This re-initializes the meta temporary directory if it hasn't yet
|
||||
been inited for this cycle. The flag is the emptyness of MetaDir */
|
||||
bool pkgDataBase::GetMetaTmp(string &Dir)
|
||||
{
|
||||
if (MetaDir.empty() == true)
|
||||
if (InitMetaTmp(MetaDir) == false)
|
||||
return false;
|
||||
Dir = MetaDir;
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
56
apt/apt-inst/database.h
Normal file
56
apt/apt-inst/database.h
Normal file
@ -0,0 +1,56 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: database.h,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Data Base Abstraction
|
||||
|
||||
This class provides a simple interface to an abstract notion of a
|
||||
database directory for storing state information about the system.
|
||||
|
||||
The 'Meta' information for a package is the control information and
|
||||
setup scripts stored inside the archive. GetMetaTmp returns the name of
|
||||
a directory that is used to store named files containing the control
|
||||
information.
|
||||
|
||||
The File Listing is the database of installed files. It is loaded
|
||||
into the memory/persistent cache structure by the ReadFileList method.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
#ifndef PKGLIB_DATABASE_H
|
||||
#define PKGLIB_DATABASE_H
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/database.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/filelist.h>
|
||||
#include <apt-pkg/pkgcachegen.h>
|
||||
|
||||
class pkgDataBase
|
||||
{
|
||||
protected:
|
||||
|
||||
pkgCacheGenerator *Cache;
|
||||
pkgFLCache *FList;
|
||||
string MetaDir;
|
||||
virtual bool InitMetaTmp(string &Dir) = 0;
|
||||
|
||||
public:
|
||||
|
||||
// Some manipulators for the cache and generator
|
||||
inline pkgCache &GetCache() {return Cache->GetCache();};
|
||||
inline pkgFLCache &GetFLCache() {return *FList;};
|
||||
inline pkgCacheGenerator &GetGenerator() {return *Cache;};
|
||||
|
||||
bool GetMetaTmp(string &Dir);
|
||||
virtual bool ReadyFileList(OpProgress &Progress) = 0;
|
||||
virtual bool ReadyPkgCache(OpProgress &Progress) = 0;
|
||||
virtual bool LoadChanges() = 0;
|
||||
|
||||
pkgDataBase() : Cache(0), FList(0) {};
|
||||
virtual ~pkgDataBase() {delete Cache; delete FList;};
|
||||
};
|
||||
|
||||
#endif
|
262
apt/apt-inst/deb/debfile.cc
Normal file
262
apt/apt-inst/deb/debfile.cc
Normal file
@ -0,0 +1,262 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: debfile.cc,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Debian Archive File (.deb)
|
||||
|
||||
.DEB archives are AR files containing two tars and an empty marker
|
||||
member called 'debian-binary'. The two tars contain the meta data and
|
||||
the actual archive contents. Thus this class is a very simple wrapper
|
||||
around ar/tar to simply extract the right tar files.
|
||||
|
||||
It also uses the deb package list parser to parse the control file
|
||||
into the cache.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include Files /*{{{*/
|
||||
#ifdef __GNUG__
|
||||
#pragma implementation "apt-pkg/debfile.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/debfile.h>
|
||||
#include <apt-pkg/extracttar.h>
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/deblistparser.h>
|
||||
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
/*}}}*/
|
||||
|
||||
// DebFile::debDebFile - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Open the AR file and check for consistency */
|
||||
debDebFile::debDebFile(FileFd &File) : File(File), AR(File)
|
||||
{
|
||||
if (_error->PendingError() == true)
|
||||
return;
|
||||
|
||||
// Check the members for validity
|
||||
if (CheckMember("debian-binary") == false ||
|
||||
CheckMember("control.tar.gz") == false ||
|
||||
CheckMember("data.tar.gz") == false)
|
||||
return;
|
||||
}
|
||||
/*}}}*/
|
||||
// DebFile::CheckMember - Check if a named member is in the archive /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is used to check for a correct deb and to give nicer error messages
|
||||
for people playing around. */
|
||||
bool debDebFile::CheckMember(const char *Name)
|
||||
{
|
||||
if (AR.FindMember(Name) == 0)
|
||||
return _error->Error("This is not a valid DEB archive, missing '%s' member",Name);
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// DebFile::GotoMember - Jump to a Member /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Jump in the file to the start of a named member and return the information
|
||||
about that member. The caller can then read from the file up to the
|
||||
returned size. Note, since this relies on the file position this is
|
||||
a destructive operation, it also changes the last returned Member
|
||||
structure - so don't nest them! */
|
||||
const ARArchive::Member *debDebFile::GotoMember(const char *Name)
|
||||
{
|
||||
// Get the archive member and positition the file
|
||||
const ARArchive::Member *Member = AR.FindMember(Name);
|
||||
if (Member == 0)
|
||||
{
|
||||
_error->Error("Internal Error, could not locate member %s",Name);
|
||||
return 0;
|
||||
}
|
||||
if (File.Seek(Member->Start) == false)
|
||||
return 0;
|
||||
|
||||
return Member;
|
||||
}
|
||||
/*}}}*/
|
||||
// DebFile::ExtractControl - Extract Control information /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Extract the control information into the Database's temporary
|
||||
directory. */
|
||||
bool debDebFile::ExtractControl(pkgDataBase &DB)
|
||||
{
|
||||
// Get the archive member and positition the file
|
||||
const ARArchive::Member *Member = GotoMember("control.tar.gz");
|
||||
if (Member == 0)
|
||||
return false;
|
||||
|
||||
// Prepare Tar
|
||||
ControlExtract Extract;
|
||||
ExtractTar Tar(File,Member->Size);
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
|
||||
// Get into the temporary directory
|
||||
string Cwd = SafeGetCWD();
|
||||
string Tmp;
|
||||
if (DB.GetMetaTmp(Tmp) == false)
|
||||
return false;
|
||||
if (chdir(Tmp.c_str()) != 0)
|
||||
return _error->Errno("chdir","Couldn't change to %s",Tmp.c_str());
|
||||
|
||||
// Do extraction
|
||||
if (Tar.Go(Extract) == false)
|
||||
return false;
|
||||
|
||||
// Switch out of the tmp directory.
|
||||
if (chdir(Cwd.c_str()) != 0)
|
||||
chdir("/");
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// DebFile::ExtractArchive - Extract the archive data itself /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Simple wrapper around tar.. */
|
||||
bool debDebFile::ExtractArchive(pkgDirStream &Stream)
|
||||
{
|
||||
// Get the archive member and positition the file
|
||||
const ARArchive::Member *Member = AR.FindMember("data.tar.gz");
|
||||
if (Member == 0)
|
||||
return _error->Error("Internal Error, could not locate member");
|
||||
if (File.Seek(Member->Start) == false)
|
||||
return false;
|
||||
|
||||
// Prepare Tar
|
||||
ExtractTar Tar(File,Member->Size);
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
return Tar.Go(Stream);
|
||||
}
|
||||
/*}}}*/
|
||||
// DebFile::MergeControl - Merge the control information /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This reads the extracted control file into the cache and returns the
|
||||
version that was parsed. All this really does is select the correct
|
||||
parser and correct file to parse. */
|
||||
pkgCache::VerIterator debDebFile::MergeControl(pkgDataBase &DB)
|
||||
{
|
||||
// Open the control file
|
||||
string Tmp;
|
||||
if (DB.GetMetaTmp(Tmp) == false)
|
||||
return pkgCache::VerIterator(DB.GetCache());
|
||||
FileFd Fd(Tmp + "control",FileFd::ReadOnly);
|
||||
if (_error->PendingError() == true)
|
||||
return pkgCache::VerIterator(DB.GetCache());
|
||||
|
||||
// Parse it
|
||||
debListParser Parse(&Fd);
|
||||
pkgCache::VerIterator Ver(DB.GetCache());
|
||||
if (DB.GetGenerator().MergeList(Parse,&Ver) == false)
|
||||
return pkgCache::VerIterator(DB.GetCache());
|
||||
|
||||
if (Ver.end() == true)
|
||||
_error->Error("Failed to locate a valid control file");
|
||||
return Ver;
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
// DebFile::ControlExtract::DoItem - Control Tar Extraction /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This directory stream handler for the control tar handles extracting
|
||||
it into the temporary meta directory. It only extracts files, it does
|
||||
not create directories, links or anything else. */
|
||||
bool debDebFile::ControlExtract::DoItem(Item &Itm,int &Fd)
|
||||
{
|
||||
if (Itm.Type != Item::File)
|
||||
return true;
|
||||
|
||||
/* Cleanse the file name, prevent people from trying to unpack into
|
||||
absolute paths, .., etc */
|
||||
for (char *I = Itm.Name; *I != 0; I++)
|
||||
if (*I == '/')
|
||||
*I = '_';
|
||||
|
||||
/* Force the ownership to be root and ensure correct permissions,
|
||||
go-w, the rest are left untouched */
|
||||
Itm.UID = 0;
|
||||
Itm.GID = 0;
|
||||
Itm.Mode &= ~(S_IWGRP | S_IWOTH);
|
||||
|
||||
return pkgDirStream::DoItem(Itm,Fd);
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
// MemControlExtract::DoItem - Check if it is the control file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This sets up to extract the control block member file into a memory
|
||||
block of just the right size. All other files go into the bit bucket. */
|
||||
bool debDebFile::MemControlExtract::DoItem(Item &Itm,int &Fd)
|
||||
{
|
||||
// At the control file, allocate buffer memory.
|
||||
if (Member == Itm.Name)
|
||||
{
|
||||
delete [] Control;
|
||||
Control = new char[Itm.Size+2];
|
||||
IsControl = true;
|
||||
Fd = -2; // Signal to pass to Process
|
||||
Length = Itm.Size;
|
||||
}
|
||||
else
|
||||
IsControl = false;
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// MemControlExtract::Process - Process extracting the control file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Just memcopy the block from the tar extractor and put it in the right
|
||||
place in the pre-allocated memory block. */
|
||||
bool debDebFile::MemControlExtract::Process(Item &Itm,const unsigned char *Data,
|
||||
unsigned long Size,unsigned long Pos)
|
||||
{
|
||||
memcpy(Control + Pos, Data,Size);
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// MemControlExtract::Read - Read the control information from the deb /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This uses the internal tar extractor to fetch the control file, and then
|
||||
it parses it into a tag section parser. */
|
||||
bool debDebFile::MemControlExtract::Read(debDebFile &Deb)
|
||||
{
|
||||
// Get the archive member and positition the file
|
||||
const ARArchive::Member *Member = Deb.GotoMember("control.tar.gz");
|
||||
if (Member == 0)
|
||||
return false;
|
||||
|
||||
// Extract it.
|
||||
ExtractTar Tar(Deb.GetFile(),Member->Size);
|
||||
if (Tar.Go(*this) == false)
|
||||
return false;
|
||||
|
||||
if (Control == 0)
|
||||
return true;
|
||||
|
||||
Control[Length] = '\n';
|
||||
Control[Length+1] = '\n';
|
||||
if (Section.Scan(Control,Length+2) == false)
|
||||
return _error->Error("Unparsible control file");
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// MemControlExtract::TakeControl - Parse a memory block /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* The given memory block is loaded into the parser and parsed as a control
|
||||
record. */
|
||||
bool debDebFile::MemControlExtract::TakeControl(const void *Data,unsigned long Size)
|
||||
{
|
||||
delete [] Control;
|
||||
Control = new char[Size+2];
|
||||
Length = Size;
|
||||
memcpy(Control,Data,Size);
|
||||
|
||||
Control[Length] = '\n';
|
||||
Control[Length+1] = '\n';
|
||||
return Section.Scan(Control,Length+2);
|
||||
}
|
||||
/*}}}*/
|
||||
|
92
apt/apt-inst/deb/debfile.h
Normal file
92
apt/apt-inst/deb/debfile.h
Normal file
@ -0,0 +1,92 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: debfile.h,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Debian Archive File (.deb)
|
||||
|
||||
This Class handles all the operations performed directly on .deb
|
||||
files. It makes use of the AR and TAR classes to give the necessary
|
||||
external interface.
|
||||
|
||||
There are only two things that can be done with a raw package,
|
||||
extract it's control information and extract the contents itself.
|
||||
|
||||
This should probably subclass an as-yet unwritten super class to
|
||||
produce a generic archive mechanism.
|
||||
|
||||
The memory control file extractor is useful to extract a single file
|
||||
into memory from the control.tar.gz
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
#ifndef PKGLIB_DEBFILE_H
|
||||
#define PKGLIB_DEBFILE_H
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/debfile.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/arfile.h>
|
||||
#include <apt-pkg/database.h>
|
||||
#include <apt-pkg/dirstream.h>
|
||||
#include <apt-pkg/tagfile.h>
|
||||
|
||||
class debDebFile
|
||||
{
|
||||
protected:
|
||||
|
||||
FileFd &File;
|
||||
ARArchive AR;
|
||||
|
||||
bool CheckMember(const char *Name);
|
||||
|
||||
public:
|
||||
|
||||
class ControlExtract;
|
||||
class MemControlExtract;
|
||||
|
||||
bool ExtractControl(pkgDataBase &DB);
|
||||
bool ExtractArchive(pkgDirStream &Stream);
|
||||
pkgCache::VerIterator MergeControl(pkgDataBase &DB);
|
||||
const ARArchive::Member *GotoMember(const char *Name);
|
||||
inline FileFd &GetFile() {return File;};
|
||||
|
||||
debDebFile(FileFd &File);
|
||||
};
|
||||
|
||||
class debDebFile::ControlExtract : public pkgDirStream
|
||||
{
|
||||
public:
|
||||
|
||||
virtual bool DoItem(Item &Itm,int &Fd);
|
||||
};
|
||||
|
||||
class debDebFile::MemControlExtract : public pkgDirStream
|
||||
{
|
||||
bool IsControl;
|
||||
|
||||
public:
|
||||
|
||||
char *Control;
|
||||
pkgTagSection Section;
|
||||
unsigned long Length;
|
||||
string Member;
|
||||
|
||||
// Members from DirStream
|
||||
virtual bool DoItem(Item &Itm,int &Fd);
|
||||
virtual bool Process(Item &Itm,const unsigned char *Data,
|
||||
unsigned long Size,unsigned long Pos);
|
||||
|
||||
|
||||
// Helpers
|
||||
bool Read(debDebFile &Deb);
|
||||
bool TakeControl(const void *Data,unsigned long Size);
|
||||
|
||||
MemControlExtract() : IsControl(false), Control(0), Length(0), Member("control") {};
|
||||
MemControlExtract(string Member) : IsControl(false), Control(0), Length(0), Member(Member) {};
|
||||
~MemControlExtract() {delete [] Control;};
|
||||
};
|
||||
/*}}}*/
|
||||
|
||||
#endif
|
493
apt/apt-inst/deb/dpkgdb.cc
Normal file
493
apt/apt-inst/deb/dpkgdb.cc
Normal file
@ -0,0 +1,493 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: dpkgdb.cc,v 1.4 2003/01/29 18:43:47 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
DPKGv1 Database Implemenation
|
||||
|
||||
This class provides parsers and other implementations for the DPKGv1
|
||||
database. It reads the diversion file, the list files and the status
|
||||
file to build both the list of currently installed files and the
|
||||
currently installed package list.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include Files /*{{{*/
|
||||
#ifdef __GNUG__
|
||||
#pragma implementation "apt-pkg/dpkgdb.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/dpkgdb.h>
|
||||
#include <apt-pkg/configuration.h>
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/progress.h>
|
||||
#include <apt-pkg/tagfile.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <errno.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/mman.h>
|
||||
#include <fcntl.h>
|
||||
#include <unistd.h>
|
||||
#include <ctype.h>
|
||||
#include <iostream>
|
||||
/*}}}*/
|
||||
using namespace std;
|
||||
|
||||
// EraseDir - Erase A Directory /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is necessary to create a new empty sub directory. The caller should
|
||||
invoke mkdir after this with the proper permissions and check for
|
||||
error. Maybe stick this in fileutils */
|
||||
static bool EraseDir(const char *Dir)
|
||||
{
|
||||
// First we try a simple RM
|
||||
if (rmdir(Dir) == 0 ||
|
||||
errno == ENOENT)
|
||||
return true;
|
||||
|
||||
// A file? Easy enough..
|
||||
if (errno == ENOTDIR)
|
||||
{
|
||||
if (unlink(Dir) != 0)
|
||||
return _error->Errno("unlink","Failed to remove %s",Dir);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Should not happen
|
||||
if (errno != ENOTEMPTY)
|
||||
return _error->Errno("rmdir","Failed to remove %s",Dir);
|
||||
|
||||
// Purge it using rm
|
||||
int Pid = ExecFork();
|
||||
|
||||
// Spawn the subprocess
|
||||
if (Pid == 0)
|
||||
{
|
||||
execlp(_config->Find("Dir::Bin::rm","/bin/rm").c_str(),
|
||||
"rm","-rf","--",Dir,0);
|
||||
_exit(100);
|
||||
}
|
||||
return ExecWait(Pid,_config->Find("dir::bin::rm","/bin/rm").c_str());
|
||||
}
|
||||
/*}}}*/
|
||||
// DpkgDB::debDpkgDB - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
debDpkgDB::debDpkgDB() : CacheMap(0), FileMap(0)
|
||||
{
|
||||
AdminDir = flNotFile(_config->Find("Dir::State::status"));
|
||||
DiverInode = 0;
|
||||
DiverTime = 0;
|
||||
}
|
||||
/*}}}*/
|
||||
// DpkgDB::~debDpkgDB - Destructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
debDpkgDB::~debDpkgDB()
|
||||
{
|
||||
delete Cache;
|
||||
Cache = 0;
|
||||
delete CacheMap;
|
||||
CacheMap = 0;
|
||||
|
||||
delete FList;
|
||||
FList = 0;
|
||||
delete FileMap;
|
||||
FileMap = 0;
|
||||
}
|
||||
/*}}}*/
|
||||
// DpkgDB::InitMetaTmp - Get the temp dir for meta information /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This creats+empties the meta temporary directory /var/lib/dpkg/tmp.ci
|
||||
Only one package at a time can be using the returned meta directory. */
|
||||
bool debDpkgDB::InitMetaTmp(string &Dir)
|
||||
{
|
||||
string Tmp = AdminDir + "tmp.ci/";
|
||||
if (EraseDir(Tmp.c_str()) == false)
|
||||
return _error->Error("Unable to create %s",Tmp.c_str());
|
||||
if (mkdir(Tmp.c_str(),0755) != 0)
|
||||
return _error->Errno("mkdir","Unable to create %s",Tmp.c_str());
|
||||
|
||||
// Verify it is on the same filesystem as the main info directory
|
||||
dev_t Dev;
|
||||
struct stat St;
|
||||
if (stat((AdminDir + "info").c_str(),&St) != 0)
|
||||
return _error->Errno("stat","Failed to stat %sinfo",AdminDir.c_str());
|
||||
Dev = St.st_dev;
|
||||
if (stat(Tmp.c_str(),&St) != 0)
|
||||
return _error->Errno("stat","Failed to stat %s",Tmp.c_str());
|
||||
if (Dev != St.st_dev)
|
||||
return _error->Error("The info and temp directories need to be on the same filesystem");
|
||||
|
||||
// Done
|
||||
Dir = Tmp;
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// DpkgDB::ReadyPkgCache - Prepare the cache with the current status /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This reads in the status file into an empty cache. This really needs
|
||||
to be somehow unified with the high level APT notion of the Database
|
||||
directory, but there is no clear way on how to do that yet. */
|
||||
bool debDpkgDB::ReadyPkgCache(OpProgress &Progress)
|
||||
{
|
||||
if (Cache != 0)
|
||||
{
|
||||
Progress.OverallProgress(1,1,1,"Reading Package Lists");
|
||||
return true;
|
||||
}
|
||||
|
||||
if (CacheMap != 0)
|
||||
{
|
||||
delete CacheMap;
|
||||
CacheMap = 0;
|
||||
}
|
||||
|
||||
if (pkgMakeOnlyStatusCache(Progress,&CacheMap) == false)
|
||||
return false;
|
||||
Cache->DropProgress();
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// DpkgDB::ReadFList - Read the File Listings in /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This reads the file listing in from the state directory. This is a
|
||||
performance critical routine, as it needs to parse about 50k lines of
|
||||
text spread over a hundred or more files. For an initial cold start
|
||||
most of the time is spent in reading file inodes and so on, not
|
||||
actually parsing. */
|
||||
bool debDpkgDB::ReadFList(OpProgress &Progress)
|
||||
{
|
||||
// Count the number of packages we need to read information for
|
||||
unsigned long Total = 0;
|
||||
pkgCache &Cache = this->Cache->GetCache();
|
||||
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
|
||||
{
|
||||
// Only not installed packages have no files.
|
||||
if (I->CurrentState == pkgCache::State::NotInstalled)
|
||||
continue;
|
||||
Total++;
|
||||
}
|
||||
|
||||
/* Switch into the admin dir, this prevents useless lookups for the
|
||||
path components */
|
||||
string Cwd = SafeGetCWD();
|
||||
if (chdir((AdminDir + "info/").c_str()) != 0)
|
||||
return _error->Errno("chdir","Failed to change to the admin dir %sinfo",AdminDir.c_str());
|
||||
|
||||
// Allocate a buffer. Anything larger than this buffer will be mmaped
|
||||
unsigned long BufSize = 32*1024;
|
||||
char *Buffer = new char[BufSize];
|
||||
|
||||
// Begin Loading them
|
||||
unsigned long Count = 0;
|
||||
char Name[300];
|
||||
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
|
||||
{
|
||||
/* Only not installed packages have no files. ConfFile packages have
|
||||
file lists but we don't want to read them in */
|
||||
if (I->CurrentState == pkgCache::State::NotInstalled ||
|
||||
I->CurrentState == pkgCache::State::ConfigFiles)
|
||||
continue;
|
||||
|
||||
// Fetch a package handle to associate with the file
|
||||
pkgFLCache::PkgIterator FlPkg = FList->GetPkg(I.Name(),0,true);
|
||||
if (FlPkg.end() == true)
|
||||
{
|
||||
_error->Error("Internal Error getting a Package Name");
|
||||
break;
|
||||
}
|
||||
|
||||
Progress.OverallProgress(Count,Total,1,"Reading File Listing");
|
||||
|
||||
// Open the list file
|
||||
snprintf(Name,sizeof(Name),"%s.list",I.Name());
|
||||
int Fd = open(Name,O_RDONLY);
|
||||
|
||||
/* Okay this is very strange and bad.. Best thing is to bail and
|
||||
instruct the user to look into it. */
|
||||
struct stat Stat;
|
||||
if (Fd == -1 || fstat(Fd,&Stat) != 0)
|
||||
{
|
||||
_error->Errno("open","Failed to open the list file '%sinfo/%s'. If you "
|
||||
"cannot restore this file then make it empty "
|
||||
"and immediately re-install the same version of the package!",
|
||||
AdminDir.c_str(),Name);
|
||||
break;
|
||||
}
|
||||
|
||||
// Set File to be a memory buffer containing the whole file
|
||||
char *File;
|
||||
if ((unsigned)Stat.st_size < BufSize)
|
||||
{
|
||||
if (read(Fd,Buffer,Stat.st_size) != Stat.st_size)
|
||||
{
|
||||
_error->Errno("read","Failed reading the list file %sinfo/%s",
|
||||
AdminDir.c_str(),Name);
|
||||
close(Fd);
|
||||
break;
|
||||
}
|
||||
File = Buffer;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Use mmap
|
||||
File = (char *)mmap(0,Stat.st_size,PROT_READ,MAP_PRIVATE,Fd,0);
|
||||
if (File == (char *)(-1))
|
||||
{
|
||||
_error->Errno("mmap","Failed reading the list file %sinfo/%s",
|
||||
AdminDir.c_str(),Name);
|
||||
close(Fd);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse it
|
||||
const char *Start = File;
|
||||
const char *End = File;
|
||||
const char *Finish = File + Stat.st_size;
|
||||
for (; End < Finish; End++)
|
||||
{
|
||||
// Not an end of line
|
||||
if (*End != '\n' && End + 1 < Finish)
|
||||
continue;
|
||||
|
||||
// Skip blank lines
|
||||
if (End - Start > 1)
|
||||
{
|
||||
pkgFLCache::NodeIterator Node = FList->GetNode(Start,End,
|
||||
FlPkg.Offset(),true,false);
|
||||
if (Node.end() == true)
|
||||
{
|
||||
_error->Error("Internal Error getting a Node");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Skip past the end of line
|
||||
for (; *End == '\n' && End < Finish; End++);
|
||||
Start = End;
|
||||
}
|
||||
|
||||
close(Fd);
|
||||
if ((unsigned)Stat.st_size >= BufSize)
|
||||
munmap((caddr_t)File,Stat.st_size);
|
||||
|
||||
// Failed
|
||||
if (End < Finish)
|
||||
break;
|
||||
|
||||
Count++;
|
||||
}
|
||||
|
||||
delete [] Buffer;
|
||||
if (chdir(Cwd.c_str()) != 0)
|
||||
chdir("/");
|
||||
|
||||
return !_error->PendingError();
|
||||
}
|
||||
/*}}}*/
|
||||
// DpkgDB::ReadDiversions - Load the diversions file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Read the diversion file in from disk. This is usually invoked by
|
||||
LoadChanges before performing an operation that uses the FLCache. */
|
||||
bool debDpkgDB::ReadDiversions()
|
||||
{
|
||||
struct stat Stat;
|
||||
if (stat((AdminDir + "diversions").c_str(),&Stat) != 0)
|
||||
return true;
|
||||
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
|
||||
FILE *Fd = fopen((AdminDir + "diversions").c_str(),"r");
|
||||
if (Fd == 0)
|
||||
return _error->Errno("fopen","Failed to open the diversions file %sdiversions",AdminDir.c_str());
|
||||
|
||||
FList->BeginDiverLoad();
|
||||
while (1)
|
||||
{
|
||||
char From[300];
|
||||
char To[300];
|
||||
char Package[100];
|
||||
|
||||
// Read the three lines in
|
||||
if (fgets(From,sizeof(From),Fd) == 0)
|
||||
break;
|
||||
if (fgets(To,sizeof(To),Fd) == 0 ||
|
||||
fgets(Package,sizeof(Package),Fd) == 0)
|
||||
{
|
||||
_error->Error("The diversion file is corrupted");
|
||||
break;
|
||||
}
|
||||
|
||||
// Strip the \ns
|
||||
unsigned long Len = strlen(From);
|
||||
if (Len < 2 || From[Len-1] != '\n')
|
||||
_error->Error("Invalid line in the diversion file: %s",From);
|
||||
else
|
||||
From[Len-1] = 0;
|
||||
Len = strlen(To);
|
||||
if (Len < 2 || To[Len-1] != '\n')
|
||||
_error->Error("Invalid line in the diversion file: %s",To);
|
||||
else
|
||||
To[Len-1] = 0;
|
||||
Len = strlen(Package);
|
||||
if (Len < 2 || Package[Len-1] != '\n')
|
||||
_error->Error("Invalid line in the diversion file: %s",Package);
|
||||
else
|
||||
Package[Len-1] = 0;
|
||||
|
||||
// Make sure the lines were parsed OK
|
||||
if (_error->PendingError() == true)
|
||||
break;
|
||||
|
||||
// Fetch a package
|
||||
if (strcmp(Package,":") == 0)
|
||||
Package[0] = 0;
|
||||
pkgFLCache::PkgIterator FlPkg = FList->GetPkg(Package,0,true);
|
||||
if (FlPkg.end() == true)
|
||||
{
|
||||
_error->Error("Internal Error getting a Package Name");
|
||||
break;
|
||||
}
|
||||
|
||||
// Install the diversion
|
||||
if (FList->AddDiversion(FlPkg,From,To) == false)
|
||||
{
|
||||
_error->Error("Internal Error adding a diversion");
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (_error->PendingError() == false)
|
||||
FList->FinishDiverLoad();
|
||||
|
||||
DiverInode = Stat.st_ino;
|
||||
DiverTime = Stat.st_mtime;
|
||||
|
||||
fclose(Fd);
|
||||
return !_error->PendingError();
|
||||
}
|
||||
/*}}}*/
|
||||
// DpkgDB::ReadFileList - Read the file listing /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Read in the file listing. The file listing is created from three
|
||||
sources, *.list, Conffile sections and the Diversion table. */
|
||||
bool debDpkgDB::ReadyFileList(OpProgress &Progress)
|
||||
{
|
||||
if (Cache == 0)
|
||||
return _error->Error("The pkg cache must be initialize first");
|
||||
if (FList != 0)
|
||||
{
|
||||
Progress.OverallProgress(1,1,1,"Reading File List");
|
||||
return true;
|
||||
}
|
||||
|
||||
// Create the cache and read in the file listing
|
||||
FileMap = new DynamicMMap(MMap::Public);
|
||||
FList = new pkgFLCache(*FileMap);
|
||||
if (_error->PendingError() == true ||
|
||||
ReadFList(Progress) == false ||
|
||||
ReadConfFiles() == false ||
|
||||
ReadDiversions() == false)
|
||||
{
|
||||
delete FList;
|
||||
delete FileMap;
|
||||
FileMap = 0;
|
||||
FList = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
cout << "Node: " << FList->HeaderP->NodeCount << ',' << FList->HeaderP->UniqNodes << endl;
|
||||
cout << "Dir: " << FList->HeaderP->DirCount << endl;
|
||||
cout << "Package: " << FList->HeaderP->PackageCount << endl;
|
||||
cout << "HashSize: " << FList->HeaderP->HashSize << endl;
|
||||
cout << "Size: " << FileMap->Size() << endl;
|
||||
cout << endl;
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// DpkgDB::ReadConfFiles - Read the conf file sections from the s-file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Reading the conf files is done by reparsing the status file. This is
|
||||
actually rather fast so it is no big deal. */
|
||||
bool debDpkgDB::ReadConfFiles()
|
||||
{
|
||||
FileFd File(_config->FindFile("Dir::State::status"),FileFd::ReadOnly);
|
||||
pkgTagFile Tags(&File);
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
|
||||
pkgTagSection Section;
|
||||
while (1)
|
||||
{
|
||||
// Skip to the next section
|
||||
unsigned long Offset = Tags.Offset();
|
||||
if (Tags.Step(Section) == false)
|
||||
break;
|
||||
|
||||
// Parse the line
|
||||
const char *Start;
|
||||
const char *Stop;
|
||||
if (Section.Find("Conffiles",Start,Stop) == false)
|
||||
continue;
|
||||
|
||||
const char *PkgStart;
|
||||
const char *PkgEnd;
|
||||
if (Section.Find("Package",PkgStart,PkgEnd) == false)
|
||||
return _error->Error("Failed to find a Package: Header, offset %lu",Offset);
|
||||
|
||||
// Snag a package record for it
|
||||
pkgFLCache::PkgIterator FlPkg = FList->GetPkg(PkgStart,PkgEnd,true);
|
||||
if (FlPkg.end() == true)
|
||||
return _error->Error("Internal Error getting a Package Name");
|
||||
|
||||
// Parse the conf file lines
|
||||
while (1)
|
||||
{
|
||||
for (; isspace(*Start) != 0 && Start < Stop; Start++);
|
||||
if (Start == Stop)
|
||||
break;
|
||||
|
||||
// Split it into words
|
||||
const char *End = Start;
|
||||
for (; isspace(*End) == 0 && End < Stop; End++);
|
||||
const char *StartMd5 = End;
|
||||
for (; isspace(*StartMd5) != 0 && StartMd5 < Stop; StartMd5++);
|
||||
const char *EndMd5 = StartMd5;
|
||||
for (; isspace(*EndMd5) == 0 && EndMd5 < Stop; EndMd5++);
|
||||
if (StartMd5 == EndMd5 || Start == End)
|
||||
return _error->Error("Bad ConfFile section in the status file. Offset %lu",Offset);
|
||||
|
||||
// Insert a new entry
|
||||
unsigned char MD5[16];
|
||||
if (Hex2Num(string(StartMd5,EndMd5-StartMd5),MD5,16) == false)
|
||||
return _error->Error("Error parsing MD5. Offset %lu",Offset);
|
||||
|
||||
if (FList->AddConfFile(Start,End,FlPkg,MD5) == false)
|
||||
return false;
|
||||
Start = EndMd5;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// DpkgDB::LoadChanges - Read in any changed state files /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* The only file in the dpkg system that can change while packages are
|
||||
unpacking is the diversions file. */
|
||||
bool debDpkgDB::LoadChanges()
|
||||
{
|
||||
struct stat Stat;
|
||||
if (stat((AdminDir + "diversions").c_str(),&Stat) != 0)
|
||||
return true;
|
||||
if (DiverInode == Stat.st_ino && DiverTime == Stat.st_mtime)
|
||||
return true;
|
||||
return ReadDiversions();
|
||||
}
|
||||
/*}}}*/
|
53
apt/apt-inst/deb/dpkgdb.h
Normal file
53
apt/apt-inst/deb/dpkgdb.h
Normal file
@ -0,0 +1,53 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: dpkgdb.h,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
DPKGv1 Data Base Implemenation
|
||||
|
||||
The DPKGv1 database is typically stored in /var/lib/dpkg/. For
|
||||
DPKGv1 the 'meta' information is the contents of the .deb control.tar.gz
|
||||
member prepended by the package name. The meta information is unpacked
|
||||
in its temporary directory and then migrated into the main list dir
|
||||
at a checkpoint.
|
||||
|
||||
Journaling is providing by syncronized file writes to the updates sub
|
||||
directory.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
#ifndef PKGLIB_DPKGDB_H
|
||||
#define PKGLIB_DPKGDB_H
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/dpkgdb.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/database.h>
|
||||
|
||||
class debDpkgDB : public pkgDataBase
|
||||
{
|
||||
protected:
|
||||
|
||||
string AdminDir;
|
||||
DynamicMMap *CacheMap;
|
||||
DynamicMMap *FileMap;
|
||||
unsigned long DiverInode;
|
||||
signed long DiverTime;
|
||||
|
||||
virtual bool InitMetaTmp(string &Dir);
|
||||
bool ReadFList(OpProgress &Progress);
|
||||
bool ReadDiversions();
|
||||
bool ReadConfFiles();
|
||||
|
||||
public:
|
||||
|
||||
virtual bool ReadyFileList(OpProgress &Progress);
|
||||
virtual bool ReadyPkgCache(OpProgress &Progress);
|
||||
virtual bool LoadChanges();
|
||||
|
||||
debDpkgDB();
|
||||
virtual ~debDpkgDB();
|
||||
};
|
||||
|
||||
#endif
|
103
apt/apt-inst/dirstream.cc
Normal file
103
apt/apt-inst/dirstream.cc
Normal file
@ -0,0 +1,103 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: dirstream.cc,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Directory Stream
|
||||
|
||||
This class provides a simple basic extractor that can be used for
|
||||
a number of purposes.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include Files /*{{{*/
|
||||
#ifdef __GNUG__
|
||||
#pragma implementation "apt-pkg/dirstream.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/dirstream.h>
|
||||
#include <apt-pkg/error.h>
|
||||
|
||||
#include <fcntl.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/types.h>
|
||||
#include <errno.h>
|
||||
#include <utime.h>
|
||||
#include <unistd.h>
|
||||
/*}}}*/
|
||||
|
||||
// DirStream::DoItem - Process an item /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is a very simple extractor, it does not deal with things like
|
||||
overwriting directories with files and so on. */
|
||||
bool pkgDirStream::DoItem(Item &Itm,int &Fd)
|
||||
{
|
||||
switch (Itm.Type)
|
||||
{
|
||||
case Item::File:
|
||||
{
|
||||
/* Open the output file, NDELAY is used to prevent this from
|
||||
blowing up on device special files.. */
|
||||
int iFd = open(Itm.Name,O_NDELAY|O_WRONLY|O_CREAT|O_TRUNC|O_APPEND,
|
||||
Itm.Mode);
|
||||
if (iFd < 0)
|
||||
return _error->Errno("open","Failed write file %s",
|
||||
Itm.Name);
|
||||
|
||||
// fchmod deals with umask and fchown sets the ownership
|
||||
if (fchmod(iFd,Itm.Mode) != 0)
|
||||
return _error->Errno("fchmod","Failed write file %s",
|
||||
Itm.Name);
|
||||
if (fchown(iFd,Itm.UID,Itm.GID) != 0 && errno != EPERM)
|
||||
return _error->Errno("fchown","Failed write file %s",
|
||||
Itm.Name);
|
||||
Fd = iFd;
|
||||
return true;
|
||||
}
|
||||
|
||||
case Item::HardLink:
|
||||
case Item::SymbolicLink:
|
||||
case Item::CharDevice:
|
||||
case Item::BlockDevice:
|
||||
case Item::Directory:
|
||||
case Item::FIFO:
|
||||
break;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// DirStream::FinishedFile - Finished processing a file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
bool pkgDirStream::FinishedFile(Item &Itm,int Fd)
|
||||
{
|
||||
if (Fd < 0)
|
||||
return true;
|
||||
|
||||
if (close(Fd) != 0)
|
||||
return _error->Errno("close","Failed to close file %s",Itm.Name);
|
||||
|
||||
/* Set the modification times. The only way it can fail is if someone
|
||||
has futzed with our file, which is intolerable :> */
|
||||
struct utimbuf Time;
|
||||
Time.actime = Itm.MTime;
|
||||
Time.modtime = Itm.MTime;
|
||||
if (utime(Itm.Name,&Time) != 0)
|
||||
_error->Errno("utime","Failed to close file %s",Itm.Name);
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// DirStream::Fail - Failed processing a file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
bool pkgDirStream::Fail(Item &Itm,int Fd)
|
||||
{
|
||||
if (Fd < 0)
|
||||
return true;
|
||||
|
||||
close(Fd);
|
||||
return false;
|
||||
}
|
||||
/*}}}*/
|
61
apt/apt-inst/dirstream.h
Normal file
61
apt/apt-inst/dirstream.h
Normal file
@ -0,0 +1,61 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: dirstream.h,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Directory Stream
|
||||
|
||||
When unpacking the contents of the archive are passed into a directory
|
||||
stream class for analysis and processing. The class controls all aspects
|
||||
of actually writing the directory stream from disk. The low level
|
||||
archive handlers are only responsible for decoding the archive format
|
||||
and sending events (via method calls) to the specified directory
|
||||
stream.
|
||||
|
||||
When unpacking a real file the archive handler is passed back a file
|
||||
handle to write the data to, this is to support strange
|
||||
archives+unpacking methods. If that fd is -1 then the file data is
|
||||
simply ignored.
|
||||
|
||||
The provided defaults do the 'Right Thing' for a normal unpacking
|
||||
process (ie 'tar')
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
#ifndef PKGLIB_DIRSTREAM_H
|
||||
#define PKGLIB_DIRSTREAM_H
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/dirstream.h"
|
||||
#endif
|
||||
|
||||
class pkgDirStream
|
||||
{
|
||||
public:
|
||||
|
||||
// All possible information about a component
|
||||
struct Item
|
||||
{
|
||||
enum Type_t {File, HardLink, SymbolicLink, CharDevice, BlockDevice,
|
||||
Directory, FIFO} Type;
|
||||
char *Name;
|
||||
char *LinkTarget;
|
||||
unsigned long Mode;
|
||||
unsigned long UID;
|
||||
unsigned long GID;
|
||||
unsigned long Size;
|
||||
unsigned long MTime;
|
||||
unsigned long Major;
|
||||
unsigned long Minor;
|
||||
};
|
||||
|
||||
virtual bool DoItem(Item &Itm,int &Fd);
|
||||
virtual bool Fail(Item &Itm,int Fd);
|
||||
virtual bool FinishedFile(Item &Itm,int Fd);
|
||||
virtual bool Process(Item &Itm,const unsigned char *Data,
|
||||
unsigned long Size,unsigned long Pos) {return true;};
|
||||
|
||||
virtual ~pkgDirStream() {};
|
||||
};
|
||||
|
||||
#endif
|
5
apt/apt-inst/dpkg-diffs.txt
Normal file
5
apt/apt-inst/dpkg-diffs.txt
Normal file
@ -0,0 +1,5 @@
|
||||
- Replacing directories with files
|
||||
dpkg permits this with the weak condition that the directory is owned only
|
||||
by the package. APT requires that the directory have no files that are not
|
||||
owned by the package. Replaces are specifically not checked to prevent
|
||||
file list corruption.
|
511
apt/apt-inst/extract.cc
Normal file
511
apt/apt-inst/extract.cc
Normal file
@ -0,0 +1,511 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: extract.cc,v 1.4 2003/01/29 18:43:47 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Archive Extraction Directory Stream
|
||||
|
||||
Extraction for each file is a bit of an involved process. Each object
|
||||
undergoes an atomic backup, overwrite, erase sequence. First the
|
||||
object is unpacked to '.dpkg.new' then the original is hardlinked to
|
||||
'.dpkg.tmp' and finally the new object is renamed to overwrite the old
|
||||
one. From an external perspective the file never ceased to exist.
|
||||
After the archive has been sucessfully unpacked the .dpkg.tmp files
|
||||
are erased. A failure causes all the .dpkg.tmp files to be restored.
|
||||
|
||||
Decisions about unpacking go like this:
|
||||
- Store the original filename in the file listing
|
||||
- Resolve any diversions that would effect this file, all checks
|
||||
below apply to the diverted name, not the real one.
|
||||
- Resolve any symlinked configuration files.
|
||||
- If the existing file does not exist then .dpkg-tmp is checked for.
|
||||
[Note, this is reduced to only check if a file was expected to be
|
||||
there]
|
||||
- If the existing link/file is not a directory then it is replaced
|
||||
irregardless
|
||||
- If the existing link/directory is being replaced by a directory then
|
||||
absolutely nothing happens.
|
||||
- If the existing link/directory is being replaced by a link then
|
||||
absolutely nothing happens.
|
||||
- If the existing link/directory is being replaced by a non-directory
|
||||
then this will abort if the package is not the sole owner of the
|
||||
directory. [Note, this is changed to not happen if the directory
|
||||
non-empty - that is, it only includes files that are part of this
|
||||
package - prevents removing user files accidentally.]
|
||||
- If the non-directory exists in the listing database and it
|
||||
does not belong to the current package then an overwrite condition
|
||||
is invoked.
|
||||
|
||||
As we unpack we record the file list differences in the FL cache. If
|
||||
we need to unroll the the FL cache knows which files have been unpacked
|
||||
and can undo. When we need to erase then it knows which files have not
|
||||
been unpacked.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include Files /*{{{*/
|
||||
#ifdef __GNUG__
|
||||
#pragma implementation "apt-pkg/extract.h"
|
||||
#endif
|
||||
#include <apt-pkg/extract.h>
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/debversion.h>
|
||||
|
||||
#include <sys/stat.h>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <errno.h>
|
||||
#include <dirent.h>
|
||||
#include <iostream>
|
||||
/*}}}*/
|
||||
using namespace std;
|
||||
|
||||
static const char *TempExt = "dpkg-tmp";
|
||||
//static const char *NewExt = "dpkg-new";
|
||||
|
||||
// Extract::pkgExtract - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
pkgExtract::pkgExtract(pkgFLCache &FLCache,pkgCache::VerIterator Ver) :
|
||||
FLCache(FLCache), Ver(Ver)
|
||||
{
|
||||
FLPkg = FLCache.GetPkg(Ver.ParentPkg().Name(),true);
|
||||
if (FLPkg.end() == true)
|
||||
return;
|
||||
Debug = true;
|
||||
}
|
||||
/*}}}*/
|
||||
// Extract::DoItem - Handle a single item from the stream /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This performs the setup for the extraction.. */
|
||||
bool pkgExtract::DoItem(Item &Itm,int &Fd)
|
||||
{
|
||||
char Temp[sizeof(FileName)];
|
||||
|
||||
/* Strip any leading/trailing /s from the filename, then copy it to the
|
||||
temp buffer and re-apply the leading / We use a class variable
|
||||
to store the new filename for use by the three extraction funcs */
|
||||
char *End = FileName+1;
|
||||
const char *I = Itm.Name;
|
||||
for (; *I != 0 && *I == '/'; I++);
|
||||
*FileName = '/';
|
||||
for (; *I != 0 && End < FileName + sizeof(FileName); I++, End++)
|
||||
*End = *I;
|
||||
if (End + 20 >= FileName + sizeof(FileName))
|
||||
return _error->Error("The path %s is too long",Itm.Name);
|
||||
for (; End > FileName && End[-1] == '/'; End--);
|
||||
*End = 0;
|
||||
Itm.Name = FileName;
|
||||
|
||||
/* Lookup the file. Nde is the file [group] we are going to write to and
|
||||
RealNde is the actual node we are manipulating. Due to diversions
|
||||
they may be entirely different. */
|
||||
pkgFLCache::NodeIterator Nde = FLCache.GetNode(Itm.Name,End,0,false,false);
|
||||
pkgFLCache::NodeIterator RealNde = Nde;
|
||||
|
||||
// See if the file is already in the file listing
|
||||
unsigned long FileGroup = RealNde->File;
|
||||
for (; RealNde.end() == false && FileGroup == RealNde->File; RealNde++)
|
||||
if (RealNde.RealPackage() == FLPkg)
|
||||
break;
|
||||
|
||||
// Nope, create an entry
|
||||
if (RealNde.end() == true)
|
||||
{
|
||||
RealNde = FLCache.GetNode(Itm.Name,End,FLPkg.Offset(),true,false);
|
||||
if (RealNde.end() == true)
|
||||
return false;
|
||||
RealNde->Flags |= pkgFLCache::Node::NewFile;
|
||||
}
|
||||
|
||||
/* Check if this entry already was unpacked. The only time this should
|
||||
ever happen is if someone has hacked tar to support capabilities, in
|
||||
which case this needs to be modified anyhow.. */
|
||||
if ((RealNde->Flags & pkgFLCache::Node::Unpacked) ==
|
||||
pkgFLCache::Node::Unpacked)
|
||||
return _error->Error("Unpacking %s more than once",Itm.Name);
|
||||
|
||||
if (Nde.end() == true)
|
||||
Nde = RealNde;
|
||||
|
||||
/* Consider a diverted file - We are not permitted to divert directories,
|
||||
but everything else is fair game (including conf files!) */
|
||||
if ((Nde->Flags & pkgFLCache::Node::Diversion) != 0)
|
||||
{
|
||||
if (Itm.Type == Item::Directory)
|
||||
return _error->Error("The directory %s is diverted",Itm.Name);
|
||||
|
||||
/* A package overwriting a diversion target is just the same as
|
||||
overwriting a normally owned file and is checked for below in
|
||||
the overwrites mechanism */
|
||||
|
||||
/* If this package is trying to overwrite the target of a diversion,
|
||||
that is never, ever permitted */
|
||||
pkgFLCache::DiverIterator Div = Nde.Diversion();
|
||||
if (Div.DivertTo() == Nde)
|
||||
return _error->Error("The package is trying to write to the "
|
||||
"diversion target %s/%s",Nde.DirN(),Nde.File());
|
||||
|
||||
// See if it is us and we are following it in the right direction
|
||||
if (Div->OwnerPkg != FLPkg.Offset() && Div.DivertFrom() == Nde)
|
||||
{
|
||||
Nde = Div.DivertTo();
|
||||
End = FileName + snprintf(FileName,sizeof(FileName)-20,"%s/%s",
|
||||
Nde.DirN(),Nde.File());
|
||||
if (End <= FileName)
|
||||
return _error->Error("The diversion path is too long");
|
||||
}
|
||||
}
|
||||
|
||||
// Deal with symlinks and conf files
|
||||
if ((RealNde->Flags & pkgFLCache::Node::NewConfFile) ==
|
||||
pkgFLCache::Node::NewConfFile)
|
||||
{
|
||||
string Res = flNoLink(Itm.Name);
|
||||
if (Res.length() > sizeof(FileName))
|
||||
return _error->Error("The path %s is too long",Res.c_str());
|
||||
if (Debug == true)
|
||||
clog << "Followed conf file from " << FileName << " to " << Res << endl;
|
||||
Itm.Name = strcpy(FileName,Res.c_str());
|
||||
}
|
||||
|
||||
/* Get information about the existing file, and attempt to restore
|
||||
a backup if it does not exist */
|
||||
struct stat LExisting;
|
||||
bool EValid = false;
|
||||
if (lstat(Itm.Name,&LExisting) != 0)
|
||||
{
|
||||
// This is bad news.
|
||||
if (errno != ENOENT)
|
||||
return _error->Errno("stat","Failed to stat %s",Itm.Name);
|
||||
|
||||
// See if we can recover the backup file
|
||||
if (Nde.end() == false)
|
||||
{
|
||||
snprintf(Temp,sizeof(Temp),"%s.%s",Itm.Name,TempExt);
|
||||
if (rename(Temp,Itm.Name) != 0 && errno != ENOENT)
|
||||
return _error->Errno("rename","Failed to rename %s to %s",
|
||||
Temp,Itm.Name);
|
||||
if (stat(Itm.Name,&LExisting) != 0)
|
||||
{
|
||||
if (errno != ENOENT)
|
||||
return _error->Errno("stat","Failed to stat %s",Itm.Name);
|
||||
}
|
||||
else
|
||||
EValid = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
EValid = true;
|
||||
|
||||
/* If the file is a link we need to stat its destination, get the
|
||||
existing file modes */
|
||||
struct stat Existing = LExisting;
|
||||
if (EValid == true && S_ISLNK(Existing.st_mode))
|
||||
{
|
||||
if (stat(Itm.Name,&Existing) != 0)
|
||||
{
|
||||
if (errno != ENOENT)
|
||||
return _error->Errno("stat","Failed to stat %s",Itm.Name);
|
||||
Existing = LExisting;
|
||||
}
|
||||
}
|
||||
|
||||
// We pretend a non-existing file looks like it is a normal file
|
||||
if (EValid == false)
|
||||
Existing.st_mode = S_IFREG;
|
||||
|
||||
/* Okay, at this point 'Existing' is the stat information for the
|
||||
real non-link file */
|
||||
|
||||
/* The only way this can be a no-op is if a directory is being
|
||||
replaced by a directory or by a link */
|
||||
if (S_ISDIR(Existing.st_mode) != 0 &&
|
||||
(Itm.Type == Item::Directory || Itm.Type == Item::SymbolicLink))
|
||||
return true;
|
||||
|
||||
/* Non-Directory being replaced by non-directory. We check for over
|
||||
writes here. */
|
||||
if (Nde.end() == false)
|
||||
{
|
||||
if (HandleOverwrites(Nde) == false)
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Directory being replaced by a non-directory - this needs to see if
|
||||
the package is the owner and then see if the directory would be
|
||||
empty after the package is removed [ie no user files will be
|
||||
erased] */
|
||||
if (S_ISDIR(Existing.st_mode) != 0)
|
||||
{
|
||||
if (CheckDirReplace(Itm.Name) == false)
|
||||
return _error->Error("The directory %s is being replaced by a non-directory",Itm.Name);
|
||||
}
|
||||
|
||||
if (Debug == true)
|
||||
clog << "Extract " << string(Itm.Name,End) << endl;
|
||||
/* if (Count != 0)
|
||||
return _error->Error("Done");*/
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// Extract::Finished - Sequence finished, erase the temp files /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
bool pkgExtract::Finished()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// Extract::Aborted - Sequence aborted, undo all our unpacking /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This undoes everything that was done by all calls to the DoItem method
|
||||
and restores the File Listing cache to its original form. It bases its
|
||||
actions on the flags value for each node in the cache. */
|
||||
bool pkgExtract::Aborted()
|
||||
{
|
||||
if (Debug == true)
|
||||
clog << "Aborted, backing out" << endl;
|
||||
|
||||
pkgFLCache::NodeIterator Files = FLPkg.Files();
|
||||
map_ptrloc *Last = &FLPkg->Files;
|
||||
|
||||
/* Loop over all files, restore those that have been unpacked from their
|
||||
dpkg-tmp entires */
|
||||
while (Files.end() == false)
|
||||
{
|
||||
// Locate the hash bucket for the node and locate its group head
|
||||
pkgFLCache::NodeIterator Nde(FLCache,FLCache.HashNode(Files));
|
||||
for (; Nde.end() == false && Files->File != Nde->File; Nde++);
|
||||
if (Nde.end() == true)
|
||||
return _error->Error("Failed to locate node in its hash bucket");
|
||||
|
||||
if (snprintf(FileName,sizeof(FileName)-20,"%s/%s",
|
||||
Nde.DirN(),Nde.File()) <= 0)
|
||||
return _error->Error("The path is too long");
|
||||
|
||||
// Deal with diversions
|
||||
if ((Nde->Flags & pkgFLCache::Node::Diversion) != 0)
|
||||
{
|
||||
pkgFLCache::DiverIterator Div = Nde.Diversion();
|
||||
|
||||
// See if it is us and we are following it in the right direction
|
||||
if (Div->OwnerPkg != FLPkg.Offset() && Div.DivertFrom() == Nde)
|
||||
{
|
||||
Nde = Div.DivertTo();
|
||||
if (snprintf(FileName,sizeof(FileName)-20,"%s/%s",
|
||||
Nde.DirN(),Nde.File()) <= 0)
|
||||
return _error->Error("The diversion path is too long");
|
||||
}
|
||||
}
|
||||
|
||||
// Deal with overwrites+replaces
|
||||
for (; Nde.end() == false && Files->File == Nde->File; Nde++)
|
||||
{
|
||||
if ((Nde->Flags & pkgFLCache::Node::Replaced) ==
|
||||
pkgFLCache::Node::Replaced)
|
||||
{
|
||||
if (Debug == true)
|
||||
clog << "De-replaced " << FileName << " from " << Nde.RealPackage()->Name << endl;
|
||||
Nde->Flags &= ~pkgFLCache::Node::Replaced;
|
||||
}
|
||||
}
|
||||
|
||||
// Undo the change in the filesystem
|
||||
if (Debug == true)
|
||||
clog << "Backing out " << FileName;
|
||||
|
||||
// Remove a new node
|
||||
if ((Files->Flags & pkgFLCache::Node::NewFile) ==
|
||||
pkgFLCache::Node::NewFile)
|
||||
{
|
||||
if (Debug == true)
|
||||
clog << " [new node]" << endl;
|
||||
pkgFLCache::Node *Tmp = Files;
|
||||
Files++;
|
||||
*Last = Tmp->NextPkg;
|
||||
Tmp->NextPkg = 0;
|
||||
|
||||
FLCache.DropNode(Tmp - FLCache.NodeP);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (Debug == true)
|
||||
clog << endl;
|
||||
|
||||
Last = &Files->NextPkg;
|
||||
Files++;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// Extract::Fail - Extraction of a file Failed /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
bool pkgExtract::Fail(Item &Itm,int Fd)
|
||||
{
|
||||
return pkgDirStream::Fail(Itm,Fd);
|
||||
}
|
||||
/*}}}*/
|
||||
// Extract::FinishedFile - Finished a file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
bool pkgExtract::FinishedFile(Item &Itm,int Fd)
|
||||
{
|
||||
return pkgDirStream::FinishedFile(Itm,Fd);
|
||||
}
|
||||
/*}}}*/
|
||||
// Extract::HandleOverwrites - See if a replaces covers this overwrite /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Check if the file is in a package that is being replaced by this
|
||||
package or if the file is being overwritten. Note that if the file
|
||||
is really a directory but it has been erased from the filesystem
|
||||
this will fail with an overwrite message. This is a limitation of the
|
||||
dpkg file information format.
|
||||
|
||||
XX If a new package installs and another package replaces files in this
|
||||
package what should we do? */
|
||||
bool pkgExtract::HandleOverwrites(pkgFLCache::NodeIterator Nde,
|
||||
bool DiverCheck)
|
||||
{
|
||||
pkgFLCache::NodeIterator TmpNde = Nde;
|
||||
unsigned long DiverOwner = 0;
|
||||
unsigned long FileGroup = Nde->File;
|
||||
const char *FirstOwner = 0;
|
||||
for (; Nde.end() == false && FileGroup == Nde->File; Nde++)
|
||||
{
|
||||
if ((Nde->Flags & pkgFLCache::Node::Diversion) != 0)
|
||||
{
|
||||
/* Store the diversion owner if this is the forward direction
|
||||
of the diversion */
|
||||
if (DiverCheck == true)
|
||||
DiverOwner = Nde.Diversion()->OwnerPkg;
|
||||
continue;
|
||||
}
|
||||
|
||||
pkgFLCache::PkgIterator FPkg(FLCache,Nde.RealPackage());
|
||||
if (FPkg.end() == true || FPkg == FLPkg)
|
||||
continue;
|
||||
|
||||
/* This tests trips when we are checking a diversion to see
|
||||
if something has already been diverted by this diversion */
|
||||
if (FPkg.Offset() == DiverOwner)
|
||||
continue;
|
||||
FirstOwner = FPkg.Name();
|
||||
|
||||
// Now see if this package matches one in a replace depends
|
||||
pkgCache::DepIterator Dep = Ver.DependsList();
|
||||
bool Ok = false;
|
||||
for (; Dep.end() == false; Dep++)
|
||||
{
|
||||
if (Dep->Type != pkgCache::Dep::Replaces)
|
||||
continue;
|
||||
|
||||
// Does the replaces apply to this package?
|
||||
if (strcmp(Dep.TargetPkg().Name(),FPkg.Name()) != 0)
|
||||
continue;
|
||||
|
||||
/* Check the version for match. I do not think CurrentVer can be
|
||||
0 if we are here.. */
|
||||
pkgCache::PkgIterator Pkg = Dep.TargetPkg();
|
||||
if (Pkg->CurrentVer == 0)
|
||||
{
|
||||
_error->Warning("Overwrite package match with no version for %s",Pkg.Name());
|
||||
continue;
|
||||
}
|
||||
|
||||
// Replaces is met
|
||||
if (debVS.CheckDep(Pkg.CurrentVer().VerStr(),Dep->CompareOp,Dep.TargetVer()) == true)
|
||||
{
|
||||
if (Debug == true)
|
||||
clog << "Replaced file " << Nde.DirN() << '/' << Nde.File() << " from " << Pkg.Name() << endl;
|
||||
Nde->Flags |= pkgFLCache::Node::Replaced;
|
||||
Ok = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Negative Hit
|
||||
if (Ok == false)
|
||||
return _error->Error("File %s/%s overwrites the one in the package %s",
|
||||
Nde.DirN(),Nde.File(),FPkg.Name());
|
||||
}
|
||||
|
||||
/* If this is a diversion we might have to recurse to process
|
||||
the other side of it */
|
||||
if ((TmpNde->Flags & pkgFLCache::Node::Diversion) != 0)
|
||||
{
|
||||
pkgFLCache::DiverIterator Div = TmpNde.Diversion();
|
||||
if (Div.DivertTo() == TmpNde)
|
||||
return HandleOverwrites(Div.DivertFrom(),true);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// Extract::CheckDirReplace - See if this directory can be erased /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* If this directory is owned by a single package and that package is
|
||||
replacing it with something non-directoryish then dpkg allows this.
|
||||
We increase the requirement to be that the directory is non-empty after
|
||||
the package is removed */
|
||||
bool pkgExtract::CheckDirReplace(string Dir,unsigned int Depth)
|
||||
{
|
||||
// Looping?
|
||||
if (Depth > 40)
|
||||
return false;
|
||||
|
||||
if (Dir[Dir.size() - 1] != '/')
|
||||
Dir += '/';
|
||||
|
||||
DIR *D = opendir(Dir.c_str());
|
||||
if (D == 0)
|
||||
return _error->Errno("opendir","Unable to read %s",Dir.c_str());
|
||||
|
||||
string File;
|
||||
for (struct dirent *Dent = readdir(D); Dent != 0; Dent = readdir(D))
|
||||
{
|
||||
// Skip some files
|
||||
if (strcmp(Dent->d_name,".") == 0 ||
|
||||
strcmp(Dent->d_name,"..") == 0)
|
||||
continue;
|
||||
|
||||
// Look up the node
|
||||
File = Dir + Dent->d_name;
|
||||
pkgFLCache::NodeIterator Nde = FLCache.GetNode(File.c_str(),
|
||||
File.c_str() + File.length(),0,false,false);
|
||||
|
||||
// The file is not owned by this package
|
||||
if (Nde.end() != false || Nde.RealPackage() != FLPkg)
|
||||
{
|
||||
closedir(D);
|
||||
return false;
|
||||
}
|
||||
|
||||
// See if it is a directory
|
||||
struct stat St;
|
||||
if (lstat(File.c_str(),&St) != 0)
|
||||
{
|
||||
closedir(D);
|
||||
return _error->Errno("lstat","Unable to stat %s",File.c_str());
|
||||
}
|
||||
|
||||
// Recurse down directories
|
||||
if (S_ISDIR(St.st_mode) != 0)
|
||||
{
|
||||
if (CheckDirReplace(File,Depth + 1) == false)
|
||||
{
|
||||
closedir(D);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No conflicts
|
||||
closedir(D);
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
52
apt/apt-inst/extract.h
Normal file
52
apt/apt-inst/extract.h
Normal file
@ -0,0 +1,52 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: extract.h,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Archive Extraction Directory Stream
|
||||
|
||||
This Directory Stream implements extraction of an archive into the
|
||||
filesystem. It makes the choices on what files should be unpacked and
|
||||
replaces as well as guiding the actual unpacking.
|
||||
|
||||
When the unpacking sequence is completed one of the two functions,
|
||||
Finished or Aborted must be called.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
#ifndef PKGLIB_EXTRACT_H
|
||||
#define PKGLIB_EXTRACT_H
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/extract.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/dirstream.h>
|
||||
#include <apt-pkg/filelist.h>
|
||||
#include <apt-pkg/pkgcache.h>
|
||||
|
||||
class pkgExtract : public pkgDirStream
|
||||
{
|
||||
pkgFLCache &FLCache;
|
||||
pkgCache::VerIterator Ver;
|
||||
pkgFLCache::PkgIterator FLPkg;
|
||||
char FileName[1024];
|
||||
bool Debug;
|
||||
|
||||
bool HandleOverwrites(pkgFLCache::NodeIterator Nde,
|
||||
bool DiverCheck = false);
|
||||
bool CheckDirReplace(string Dir,unsigned int Depth = 0);
|
||||
|
||||
public:
|
||||
|
||||
virtual bool DoItem(Item &Itm,int &Fd);
|
||||
virtual bool Fail(Item &Itm,int Fd);
|
||||
virtual bool FinishedFile(Item &Itm,int Fd);
|
||||
|
||||
bool Finished();
|
||||
bool Aborted();
|
||||
|
||||
pkgExtract(pkgFLCache &FLCache,pkgCache::VerIterator Ver);
|
||||
};
|
||||
|
||||
#endif
|
590
apt/apt-inst/filelist.cc
Normal file
590
apt/apt-inst/filelist.cc
Normal file
@ -0,0 +1,590 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: filelist.cc,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
File Listing - Manages a Cache of File -> Package names.
|
||||
|
||||
Diversions add some signficant complexity to the system. To keep
|
||||
storage space down in the very special case of a diverted file no
|
||||
extra bytes are allocated in the Node structure. Instead a diversion
|
||||
is inserted directly into the hash table and its flag bit set. Every
|
||||
lookup for that filename will always return the diversion.
|
||||
|
||||
The hash buckets are stored in sorted form, with diversions having
|
||||
the higest sort order. Identical files are assigned the same file
|
||||
pointer, thus after a search all of the nodes owning that file can be
|
||||
found by iterating down the bucket.
|
||||
|
||||
Re-updates of diversions (another extremely special case) are done by
|
||||
marking all diversions as untouched, then loading the entire diversion
|
||||
list again, touching each diversion and then finally going back and
|
||||
releasing all untouched diversions. It is assumed that the diversion
|
||||
table will always be quite small and be a very irregular case.
|
||||
|
||||
Diversions that are user-installed are represented by a package with
|
||||
an empty name string.
|
||||
|
||||
Conf files are handled like diversions by changing the meaning of the
|
||||
Pointer field to point to a conf file entry - again to reduce over
|
||||
head for a special case.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include Files /*{{{*/
|
||||
#ifdef __GNUG__
|
||||
#pragma implementation "apt-pkg/filelist.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/filelist.h>
|
||||
#include <apt-pkg/mmap.h>
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <iostream>
|
||||
/*}}}*/
|
||||
|
||||
using namespace std;
|
||||
|
||||
// FlCache::Header::Header - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Initialize the header variables. These are the defaults used when
|
||||
creating new caches */
|
||||
pkgFLCache::Header::Header()
|
||||
{
|
||||
Signature = 0xEA3F1295;
|
||||
|
||||
/* Whenever the structures change the major version should be bumped,
|
||||
whenever the generator changes the minor version should be bumped. */
|
||||
MajorVersion = 1;
|
||||
MinorVersion = 0;
|
||||
Dirty = true;
|
||||
|
||||
HeaderSz = sizeof(pkgFLCache::Header);
|
||||
NodeSz = sizeof(pkgFLCache::Node);
|
||||
DirSz = sizeof(pkgFLCache::Directory);
|
||||
PackageSz = sizeof(pkgFLCache::Package);
|
||||
DiversionSz = sizeof(pkgFLCache::Diversion);
|
||||
ConfFileSz = sizeof(pkgFLCache::ConfFile);
|
||||
|
||||
NodeCount = 0;
|
||||
DirCount = 0;
|
||||
PackageCount = 0;
|
||||
DiversionCount = 0;
|
||||
ConfFileCount = 0;
|
||||
HashSize = 1 << 14;
|
||||
|
||||
FileHash = 0;
|
||||
DirTree = 0;
|
||||
Packages = 0;
|
||||
Diversions = 0;
|
||||
UniqNodes = 0;
|
||||
memset(Pools,0,sizeof(Pools));
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::Header::CheckSizes - Check if the two headers have same *sz /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Compare to make sure we are matching versions */
|
||||
bool pkgFLCache::Header::CheckSizes(Header &Against) const
|
||||
{
|
||||
if (HeaderSz == Against.HeaderSz &&
|
||||
NodeSz == Against.NodeSz &&
|
||||
DirSz == Against.DirSz &&
|
||||
DiversionSz == Against.DiversionSz &&
|
||||
PackageSz == Against.PackageSz &&
|
||||
ConfFileSz == Against.ConfFileSz)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
// FLCache::pkgFLCache - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* If this is a new cache then a new header and hash table are instantaited
|
||||
otherwise the existing ones are mearly attached */
|
||||
pkgFLCache::pkgFLCache(DynamicMMap &Map) : Map(Map)
|
||||
{
|
||||
if (_error->PendingError() == true)
|
||||
return;
|
||||
|
||||
LastTreeLookup = 0;
|
||||
LastLookupSize = 0;
|
||||
|
||||
// Apply the typecasts
|
||||
HeaderP = (Header *)Map.Data();
|
||||
NodeP = (Node *)Map.Data();
|
||||
DirP = (Directory *)Map.Data();
|
||||
DiverP = (Diversion *)Map.Data();
|
||||
PkgP = (Package *)Map.Data();
|
||||
ConfP = (ConfFile *)Map.Data();
|
||||
StrP = (char *)Map.Data();
|
||||
AnyP = (unsigned char *)Map.Data();
|
||||
|
||||
// New mapping, create the basic cache structures
|
||||
if (Map.Size() == 0)
|
||||
{
|
||||
Map.RawAllocate(sizeof(pkgFLCache::Header));
|
||||
*HeaderP = pkgFLCache::Header();
|
||||
HeaderP->FileHash = Map.RawAllocate(sizeof(pkgFLCache::Node)*HeaderP->HashSize,
|
||||
sizeof(pkgFLCache::Node))/sizeof(pkgFLCache::Node);
|
||||
}
|
||||
|
||||
FileHash = NodeP + HeaderP->FileHash;
|
||||
|
||||
// Setup the dynamic map manager
|
||||
HeaderP->Dirty = true;
|
||||
Map.Sync(0,sizeof(pkgFLCache::Header));
|
||||
Map.UsePools(*HeaderP->Pools,sizeof(HeaderP->Pools)/sizeof(HeaderP->Pools[0]));
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::TreeLookup - Perform a lookup in a generic tree /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is a simple generic tree lookup. The first three entries of
|
||||
the Directory structure are used as a template, but any other similar
|
||||
structure could be used in it's place. */
|
||||
map_ptrloc pkgFLCache::TreeLookup(map_ptrloc *Base,const char *Text,
|
||||
const char *TextEnd,unsigned long Size,
|
||||
unsigned int *Count,bool Insert)
|
||||
{
|
||||
pkgFLCache::Directory *Dir;
|
||||
|
||||
// Check our last entry cache
|
||||
if (LastTreeLookup != 0 && LastLookupSize == Size)
|
||||
{
|
||||
Dir = (pkgFLCache::Directory *)(AnyP + LastTreeLookup*Size);
|
||||
if (stringcmp(Text,TextEnd,StrP + Dir->Name) == 0)
|
||||
return LastTreeLookup;
|
||||
}
|
||||
|
||||
while (1)
|
||||
{
|
||||
// Allocate a new one
|
||||
if (*Base == 0)
|
||||
{
|
||||
if (Insert == false)
|
||||
return 0;
|
||||
|
||||
*Base = Map.Allocate(Size);
|
||||
if (*Base == 0)
|
||||
return 0;
|
||||
|
||||
(*Count)++;
|
||||
Dir = (pkgFLCache::Directory *)(AnyP + *Base*Size);
|
||||
Dir->Name = Map.WriteString(Text,TextEnd - Text);
|
||||
LastTreeLookup = *Base;
|
||||
LastLookupSize = Size;
|
||||
return *Base;
|
||||
}
|
||||
|
||||
// Compare this node
|
||||
Dir = (pkgFLCache::Directory *)(AnyP + *Base*Size);
|
||||
int Res = stringcmp(Text,TextEnd,StrP + Dir->Name);
|
||||
if (Res == 0)
|
||||
{
|
||||
LastTreeLookup = *Base;
|
||||
LastLookupSize = Size;
|
||||
return *Base;
|
||||
}
|
||||
|
||||
if (Res > 0)
|
||||
Base = &Dir->Left;
|
||||
if (Res < 0)
|
||||
Base = &Dir->Right;
|
||||
}
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::PrintTree - Print out a tree /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is a simple generic tree dumper, ment for debugging. */
|
||||
void pkgFLCache::PrintTree(map_ptrloc Base,unsigned long Size)
|
||||
{
|
||||
if (Base == 0)
|
||||
return;
|
||||
|
||||
pkgFLCache::Directory *Dir = (pkgFLCache::Directory *)(AnyP + Base*Size);
|
||||
PrintTree(Dir->Left,Size);
|
||||
cout << (StrP + Dir->Name) << endl;
|
||||
PrintTree(Dir->Right,Size);
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::GetPkg - Get a package pointer /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Locate a package by name in it's tree, this is just a wrapper for
|
||||
TreeLookup */
|
||||
pkgFLCache::PkgIterator pkgFLCache::GetPkg(const char *Name,const char *NameEnd,
|
||||
bool Insert)
|
||||
{
|
||||
if (NameEnd == 0)
|
||||
NameEnd = Name + strlen(Name);
|
||||
|
||||
map_ptrloc Pos = TreeLookup(&HeaderP->Packages,Name,NameEnd,
|
||||
sizeof(pkgFLCache::Package),
|
||||
&HeaderP->PackageCount,Insert);
|
||||
if (Pos == 0)
|
||||
return pkgFLCache::PkgIterator();
|
||||
return pkgFLCache::PkgIterator(*this,PkgP + Pos);
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::GetNode - Get the node associated with the filename /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Lookup a node in the hash table. If Insert is true then a new node is
|
||||
always inserted. The hash table can have multiple instances of a
|
||||
single name available. A search returns the first. It is important
|
||||
that additions for the same name insert after the first entry of
|
||||
the name group. */
|
||||
pkgFLCache::NodeIterator pkgFLCache::GetNode(const char *Name,
|
||||
const char *NameEnd,
|
||||
map_ptrloc Loc,
|
||||
bool Insert,bool Divert)
|
||||
{
|
||||
// Split the name into file and directory, hashing as it is copied
|
||||
const char *File = Name;
|
||||
unsigned long HashPos = 0;
|
||||
for (const char *I = Name; I < NameEnd; I++)
|
||||
{
|
||||
HashPos = 1637*HashPos + *I;
|
||||
if (*I == '/')
|
||||
File = I;
|
||||
}
|
||||
|
||||
// Search for it
|
||||
Node *Hash = NodeP + HeaderP->FileHash + (HashPos % HeaderP->HashSize);
|
||||
int Res = 0;
|
||||
map_ptrloc FilePtr = 0;
|
||||
while (Hash->Pointer != 0)
|
||||
{
|
||||
// Compare
|
||||
Res = stringcmp(File+1,NameEnd,StrP + Hash->File);
|
||||
if (Res == 0)
|
||||
Res = stringcmp(Name,File,StrP + DirP[Hash->Dir].Name);
|
||||
|
||||
// Diversion?
|
||||
if (Res == 0 && Insert == true)
|
||||
{
|
||||
/* Dir and File match exactly, we need to reuse the file name
|
||||
when we link it in */
|
||||
FilePtr = Hash->File;
|
||||
Res = Divert - ((Hash->Flags & Node::Diversion) == Node::Diversion);
|
||||
}
|
||||
|
||||
// Is a match
|
||||
if (Res == 0)
|
||||
{
|
||||
if (Insert == false)
|
||||
return NodeIterator(*this,Hash);
|
||||
|
||||
// Only one diversion per name!
|
||||
if (Divert == true)
|
||||
return NodeIterator(*this,Hash);
|
||||
break;
|
||||
}
|
||||
|
||||
// Out of sort order
|
||||
if (Res > 0)
|
||||
break;
|
||||
|
||||
if (Hash->Next != 0)
|
||||
Hash = NodeP + Hash->Next;
|
||||
else
|
||||
break;
|
||||
}
|
||||
|
||||
// Fail, not found
|
||||
if (Insert == false)
|
||||
return NodeIterator(*this);
|
||||
|
||||
// Find a directory node
|
||||
map_ptrloc Dir = TreeLookup(&HeaderP->DirTree,Name,File,
|
||||
sizeof(pkgFLCache::Directory),
|
||||
&HeaderP->DirCount,true);
|
||||
if (Dir == 0)
|
||||
return NodeIterator(*this);
|
||||
|
||||
// Allocate a new node
|
||||
if (Hash->Pointer != 0)
|
||||
{
|
||||
// Overwrite or append
|
||||
if (Res > 0)
|
||||
{
|
||||
Node *Next = NodeP + Map.Allocate(sizeof(*Hash));
|
||||
if (Next == NodeP)
|
||||
return NodeIterator(*this);
|
||||
*Next = *Hash;
|
||||
Hash->Next = Next - NodeP;
|
||||
}
|
||||
else
|
||||
{
|
||||
unsigned long NewNext = Map.Allocate(sizeof(*Hash));
|
||||
if (NewNext == 0)
|
||||
return NodeIterator(*this);
|
||||
NodeP[NewNext].Next = Hash->Next;
|
||||
Hash->Next = NewNext;
|
||||
Hash = NodeP + Hash->Next;
|
||||
}
|
||||
}
|
||||
|
||||
// Insert into the new item
|
||||
Hash->Dir = Dir;
|
||||
Hash->Pointer = Loc;
|
||||
Hash->Flags = 0;
|
||||
if (Divert == true)
|
||||
Hash->Flags |= Node::Diversion;
|
||||
|
||||
if (FilePtr != 0)
|
||||
Hash->File = FilePtr;
|
||||
else
|
||||
{
|
||||
HeaderP->UniqNodes++;
|
||||
Hash->File = Map.WriteString(File+1,NameEnd - File-1);
|
||||
}
|
||||
|
||||
// Link the node to the package list
|
||||
if (Divert == false && Loc == 0)
|
||||
{
|
||||
Hash->Next = PkgP[Loc].Files;
|
||||
PkgP[Loc].Files = Hash - NodeP;
|
||||
}
|
||||
|
||||
HeaderP->NodeCount++;
|
||||
return NodeIterator(*this,Hash);
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::HashNode - Return the hash bucket for the node /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is one of two hashing functions. The other is inlined into the
|
||||
GetNode routine. */
|
||||
pkgFLCache::Node *pkgFLCache::HashNode(NodeIterator const &Nde)
|
||||
{
|
||||
// Hash the node
|
||||
unsigned long HashPos = 0;
|
||||
for (const char *I = Nde.DirN(); *I != 0; I++)
|
||||
HashPos = 1637*HashPos + *I;
|
||||
HashPos = 1637*HashPos + '/';
|
||||
for (const char *I = Nde.File(); *I != 0; I++)
|
||||
HashPos = 1637*HashPos + *I;
|
||||
return NodeP + HeaderP->FileHash + (HashPos % HeaderP->HashSize);
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::DropNode - Drop a node from the hash table /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This erases a node from the hash table. Note that this does not unlink
|
||||
the node from the package linked list. */
|
||||
void pkgFLCache::DropNode(map_ptrloc N)
|
||||
{
|
||||
if (N == 0)
|
||||
return;
|
||||
|
||||
NodeIterator Nde(*this,NodeP + N);
|
||||
|
||||
if (Nde->NextPkg != 0)
|
||||
_error->Warning("DropNode called on still linked node");
|
||||
|
||||
// Locate it in the hash table
|
||||
Node *Last = 0;
|
||||
Node *Hash = HashNode(Nde);
|
||||
while (Hash->Pointer != 0)
|
||||
{
|
||||
// Got it
|
||||
if (Hash == Nde)
|
||||
{
|
||||
// Top of the bucket..
|
||||
if (Last == 0)
|
||||
{
|
||||
Hash->Pointer = 0;
|
||||
if (Hash->Next == 0)
|
||||
return;
|
||||
*Hash = NodeP[Hash->Next];
|
||||
// Release Hash->Next
|
||||
return;
|
||||
}
|
||||
Last->Next = Hash->Next;
|
||||
// Release Hash
|
||||
return;
|
||||
}
|
||||
|
||||
Last = Hash;
|
||||
if (Hash->Next != 0)
|
||||
Hash = NodeP + Hash->Next;
|
||||
else
|
||||
break;
|
||||
}
|
||||
|
||||
_error->Error("Failed to locate the hash element!");
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::BeginDiverLoad - Start reading new diversions /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Tag all the diversions as untouched */
|
||||
void pkgFLCache::BeginDiverLoad()
|
||||
{
|
||||
for (DiverIterator I = DiverBegin(); I.end() == false; I++)
|
||||
I->Flags = 0;
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::FinishDiverLoad - Finish up a new diversion load /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This drops any untouched diversions. In effect removing any diversions
|
||||
that where not loaded (ie missing from the diversion file) */
|
||||
void pkgFLCache::FinishDiverLoad()
|
||||
{
|
||||
map_ptrloc *Cur = &HeaderP->Diversions;
|
||||
while (*Cur != 0)
|
||||
{
|
||||
Diversion *Div = DiverP + *Cur;
|
||||
if ((Div->Flags & Diversion::Touched) == Diversion::Touched)
|
||||
{
|
||||
Cur = &Div->Next;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Purge!
|
||||
DropNode(Div->DivertTo);
|
||||
DropNode(Div->DivertFrom);
|
||||
*Cur = Div->Next;
|
||||
}
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::AddDiversion - Add a new diversion /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Add a new diversion to the diverion tables and make sure that it is
|
||||
unique and non-chaining. */
|
||||
bool pkgFLCache::AddDiversion(PkgIterator const &Owner,
|
||||
const char *From,const char *To)
|
||||
{
|
||||
/* Locate the two hash nodes we are going to manipulate. If there
|
||||
are pre-existing diversions then they will be returned */
|
||||
NodeIterator FromN = GetNode(From,From+strlen(From),0,true,true);
|
||||
NodeIterator ToN = GetNode(To,To+strlen(To),0,true,true);
|
||||
if (FromN.end() == true || ToN.end() == true)
|
||||
return _error->Error("Failed to allocate diversion");
|
||||
|
||||
// Should never happen
|
||||
if ((FromN->Flags & Node::Diversion) != Node::Diversion ||
|
||||
(ToN->Flags & Node::Diversion) != Node::Diversion)
|
||||
return _error->Error("Internal Error in AddDiversion");
|
||||
|
||||
// Now, try to reclaim an existing diversion..
|
||||
map_ptrloc Diver = 0;
|
||||
if (FromN->Pointer != 0)
|
||||
Diver = FromN->Pointer;
|
||||
|
||||
/* Make sure from and to point to the same diversion, if they dont
|
||||
then we are trying to intermix diversions - very bad */
|
||||
if (ToN->Pointer != 0 && ToN->Pointer != Diver)
|
||||
{
|
||||
// It could be that the other diversion is no longer in use
|
||||
if ((DiverP[ToN->Pointer].Flags & Diversion::Touched) == Diversion::Touched)
|
||||
return _error->Error("Trying to overwrite a diversion, %s -> %s and %s/%s",
|
||||
From,To,ToN.File(),ToN.Dir().Name());
|
||||
|
||||
// We can erase it.
|
||||
Diversion *Div = DiverP + ToN->Pointer;
|
||||
ToN->Pointer = 0;
|
||||
|
||||
if (Div->DivertTo == ToN.Offset())
|
||||
Div->DivertTo = 0;
|
||||
if (Div->DivertFrom == ToN.Offset())
|
||||
Div->DivertFrom = 0;
|
||||
|
||||
// This diversion will be cleaned up by FinishDiverLoad
|
||||
}
|
||||
|
||||
// Allocate a new diversion
|
||||
if (Diver == 0)
|
||||
{
|
||||
Diver = Map.Allocate(sizeof(Diversion));
|
||||
if (Diver == 0)
|
||||
return false;
|
||||
DiverP[Diver].Next = HeaderP->Diversions;
|
||||
HeaderP->Diversions = Diver;
|
||||
HeaderP->DiversionCount++;
|
||||
}
|
||||
|
||||
// Can only have one diversion of the same files
|
||||
Diversion *Div = DiverP + Diver;
|
||||
if ((Div->Flags & Diversion::Touched) == Diversion::Touched)
|
||||
return _error->Error("Double add of diversion %s -> %s",From,To);
|
||||
|
||||
// Setup the From/To links
|
||||
if (Div->DivertFrom != FromN.Offset() && Div->DivertFrom != ToN.Offset())
|
||||
DropNode(Div->DivertFrom);
|
||||
Div->DivertFrom = FromN.Offset();
|
||||
if (Div->DivertTo != FromN.Offset() && Div->DivertTo != ToN.Offset())
|
||||
DropNode(Div->DivertTo);
|
||||
Div->DivertTo = ToN.Offset();
|
||||
|
||||
// Link it to the two nodes
|
||||
FromN->Pointer = Diver;
|
||||
ToN->Pointer = Diver;
|
||||
|
||||
// And the package
|
||||
Div->OwnerPkg = Owner.Offset();
|
||||
Div->Flags |= Diversion::Touched;
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// FLCache::AddConfFile - Add a new configuration file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This simply adds a new conf file node to the hash table. This is only
|
||||
used by the status file reader. It associates a hash with each conf
|
||||
file entry that exists in the status file and the list file for
|
||||
the proper package. Duplicate conf files (across packages) are left
|
||||
up to other routines to deal with. */
|
||||
bool pkgFLCache::AddConfFile(const char *Name,const char *NameEnd,
|
||||
PkgIterator const &Owner,
|
||||
const unsigned char *Sum)
|
||||
{
|
||||
NodeIterator Nde = GetNode(Name,NameEnd,0,false,false);
|
||||
if (Nde.end() == true)
|
||||
return true;
|
||||
|
||||
unsigned long File = Nde->File;
|
||||
for (; Nde->File == File && Nde.end() == false; Nde++)
|
||||
{
|
||||
if (Nde.RealPackage() != Owner)
|
||||
continue;
|
||||
|
||||
if ((Nde->Flags & Node::ConfFile) == Node::ConfFile)
|
||||
return _error->Error("Duplicate conf file %s/%s",Nde.DirN(),Nde.File());
|
||||
|
||||
// Allocate a new conf file structure
|
||||
map_ptrloc Conf = Map.Allocate(sizeof(ConfFile));
|
||||
if (Conf == 0)
|
||||
return false;
|
||||
ConfP[Conf].OwnerPkg = Owner.Offset();
|
||||
memcpy(ConfP[Conf].MD5,Sum,sizeof(ConfP[Conf].MD5));
|
||||
|
||||
Nde->Pointer = Conf;
|
||||
Nde->Flags |= Node::ConfFile;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* This means the conf file has been replaced, but the entry in the
|
||||
status file was not updated */
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
// NodeIterator::RealPackage - Return the package for this node /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Since the package pointer is indirected in all sorts of interesting ways
|
||||
this is used to get a pointer to the owning package */
|
||||
pkgFLCache::Package *pkgFLCache::NodeIterator::RealPackage() const
|
||||
{
|
||||
if (Nde->Pointer == 0)
|
||||
return 0;
|
||||
|
||||
if ((Nde->Flags & Node::ConfFile) == Node::ConfFile)
|
||||
return Owner->PkgP + Owner->ConfP[Nde->Pointer].OwnerPkg;
|
||||
|
||||
// Diversions are ignored
|
||||
if ((Nde->Flags & Node::Diversion) == Node::Diversion)
|
||||
return 0;
|
||||
|
||||
return Owner->PkgP + Nde->Pointer;
|
||||
}
|
||||
/*}}}*/
|
314
apt/apt-inst/filelist.h
Normal file
314
apt/apt-inst/filelist.h
Normal file
@ -0,0 +1,314 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: filelist.h,v 1.2 2002/07/25 18:07:17 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
File Listing - Manages a Cache of File -> Package names.
|
||||
|
||||
This is identical to the Package cache, except that the generator
|
||||
(which is much simpler) is integrated directly into the main class,
|
||||
and it has been designed to handle live updates.
|
||||
|
||||
The storage content of the class is maintained in a memory map and is
|
||||
written directly to the file system. Performance is traded against
|
||||
space to give something that performs well and remains small.
|
||||
The average per file usage is 32 bytes which yeilds about a meg every
|
||||
36k files. Directory paths are collected into a binary tree and stored
|
||||
only once, this offsets the cost of the hash nodes enough to keep
|
||||
memory usage slightly less than the sum of the filenames.
|
||||
|
||||
The file names are stored into a fixed size chained hash table that is
|
||||
linked to the package name and to the directory component.
|
||||
|
||||
Each file node has a set of associated flags that indicate the current
|
||||
state of the file.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
#ifndef PKGLIB_FILELIST_H
|
||||
#define PKGLIB_FILELIST_H
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/filelist.h"
|
||||
#endif
|
||||
|
||||
#include <apt-pkg/mmap.h>
|
||||
|
||||
class pkgFLCache
|
||||
{
|
||||
public:
|
||||
struct Header;
|
||||
struct Node;
|
||||
struct Directory;
|
||||
struct Package;
|
||||
struct Diversion;
|
||||
struct ConfFile;
|
||||
|
||||
class NodeIterator;
|
||||
class DirIterator;
|
||||
class PkgIterator;
|
||||
class DiverIterator;
|
||||
|
||||
protected:
|
||||
string CacheFile;
|
||||
DynamicMMap ⤅
|
||||
map_ptrloc LastTreeLookup;
|
||||
unsigned long LastLookupSize;
|
||||
|
||||
// Helpers for the addition algorithms
|
||||
map_ptrloc TreeLookup(map_ptrloc *Base,const char *Text,const char *TextEnd,
|
||||
unsigned long Size,unsigned int *Count = 0,
|
||||
bool Insert = false);
|
||||
|
||||
public:
|
||||
|
||||
// Pointers to the arrays of items
|
||||
Header *HeaderP;
|
||||
Node *NodeP;
|
||||
Directory *DirP;
|
||||
Package *PkgP;
|
||||
Diversion *DiverP;
|
||||
ConfFile *ConfP;
|
||||
char *StrP;
|
||||
unsigned char *AnyP;
|
||||
|
||||
// Quick accessors
|
||||
Node *FileHash;
|
||||
|
||||
// Accessors
|
||||
Header &Head() {return *HeaderP;};
|
||||
void PrintTree(map_ptrloc Base,unsigned long Size);
|
||||
|
||||
// Add/Find things
|
||||
PkgIterator GetPkg(const char *Name,const char *End,bool Insert);
|
||||
inline PkgIterator GetPkg(const char *Name,bool Insert);
|
||||
NodeIterator GetNode(const char *Name,
|
||||
const char *NameEnd,
|
||||
map_ptrloc Loc,
|
||||
bool Insert,bool Divert);
|
||||
Node *HashNode(NodeIterator const &N);
|
||||
void DropNode(map_ptrloc Node);
|
||||
|
||||
inline DiverIterator DiverBegin();
|
||||
|
||||
// Diversion control
|
||||
void BeginDiverLoad();
|
||||
void FinishDiverLoad();
|
||||
bool AddDiversion(PkgIterator const &Owner,const char *From,
|
||||
const char *To);
|
||||
bool AddConfFile(const char *Name,const char *NameEnd,
|
||||
PkgIterator const &Owner,const unsigned char *Sum);
|
||||
|
||||
pkgFLCache(DynamicMMap &Map);
|
||||
// ~pkgFLCache();
|
||||
};
|
||||
|
||||
struct pkgFLCache::Header
|
||||
{
|
||||
// Signature information
|
||||
unsigned long Signature;
|
||||
short MajorVersion;
|
||||
short MinorVersion;
|
||||
bool Dirty;
|
||||
|
||||
// Size of structure values
|
||||
unsigned HeaderSz;
|
||||
unsigned NodeSz;
|
||||
unsigned DirSz;
|
||||
unsigned PackageSz;
|
||||
unsigned DiversionSz;
|
||||
unsigned ConfFileSz;
|
||||
|
||||
// Structure Counts;
|
||||
unsigned int NodeCount;
|
||||
unsigned int DirCount;
|
||||
unsigned int PackageCount;
|
||||
unsigned int DiversionCount;
|
||||
unsigned int ConfFileCount;
|
||||
unsigned int HashSize;
|
||||
unsigned long UniqNodes;
|
||||
|
||||
// Offsets
|
||||
map_ptrloc FileHash;
|
||||
map_ptrloc DirTree;
|
||||
map_ptrloc Packages;
|
||||
map_ptrloc Diversions;
|
||||
|
||||
/* Allocation pools, there should be one of these for each structure
|
||||
excluding the header */
|
||||
DynamicMMap::Pool Pools[5];
|
||||
|
||||
bool CheckSizes(Header &Against) const;
|
||||
Header();
|
||||
};
|
||||
|
||||
/* The bit field is used to advoid incurring an extra 4 bytes x 40000,
|
||||
Pointer is the most infrequently used member of the structure */
|
||||
struct pkgFLCache::Node
|
||||
{
|
||||
map_ptrloc Dir; // Dir
|
||||
map_ptrloc File; // String
|
||||
unsigned Pointer:24; // Package/Diversion/ConfFile
|
||||
unsigned Flags:8; // Package
|
||||
map_ptrloc Next; // Node
|
||||
map_ptrloc NextPkg; // Node
|
||||
|
||||
enum Flags {Diversion = (1<<0),ConfFile = (1<<1),
|
||||
NewConfFile = (1<<2),NewFile = (1<<3),
|
||||
Unpacked = (1<<4),Replaced = (1<<5)};
|
||||
};
|
||||
|
||||
struct pkgFLCache::Directory
|
||||
{
|
||||
map_ptrloc Left; // Directory
|
||||
map_ptrloc Right; // Directory
|
||||
map_ptrloc Name; // String
|
||||
};
|
||||
|
||||
struct pkgFLCache::Package
|
||||
{
|
||||
map_ptrloc Left; // Package
|
||||
map_ptrloc Right; // Package
|
||||
map_ptrloc Name; // String
|
||||
map_ptrloc Files; // Node
|
||||
};
|
||||
|
||||
struct pkgFLCache::Diversion
|
||||
{
|
||||
map_ptrloc OwnerPkg; // Package
|
||||
map_ptrloc DivertFrom; // Node
|
||||
map_ptrloc DivertTo; // String
|
||||
|
||||
map_ptrloc Next; // Diversion
|
||||
unsigned long Flags;
|
||||
|
||||
enum Flags {Touched = (1<<0)};
|
||||
};
|
||||
|
||||
struct pkgFLCache::ConfFile
|
||||
{
|
||||
map_ptrloc OwnerPkg; // Package
|
||||
unsigned char MD5[16];
|
||||
};
|
||||
|
||||
class pkgFLCache::PkgIterator
|
||||
{
|
||||
Package *Pkg;
|
||||
pkgFLCache *Owner;
|
||||
|
||||
public:
|
||||
|
||||
inline bool end() const {return Owner == 0 || Pkg == Owner->PkgP?true:false;}
|
||||
|
||||
// Accessors
|
||||
inline Package *operator ->() {return Pkg;};
|
||||
inline Package const *operator ->() const {return Pkg;};
|
||||
inline Package const &operator *() const {return *Pkg;};
|
||||
inline operator Package *() {return Pkg == Owner->PkgP?0:Pkg;};
|
||||
inline operator Package const *() const {return Pkg == Owner->PkgP?0:Pkg;};
|
||||
|
||||
inline unsigned long Offset() const {return Pkg - Owner->PkgP;};
|
||||
inline const char *Name() const {return Pkg->Name == 0?0:Owner->StrP + Pkg->Name;};
|
||||
inline pkgFLCache::NodeIterator Files() const;
|
||||
|
||||
PkgIterator() : Pkg(0), Owner(0) {};
|
||||
PkgIterator(pkgFLCache &Owner,Package *Trg) : Pkg(Trg), Owner(&Owner) {};
|
||||
};
|
||||
|
||||
class pkgFLCache::DirIterator
|
||||
{
|
||||
Directory *Dir;
|
||||
pkgFLCache *Owner;
|
||||
|
||||
public:
|
||||
|
||||
// Accessors
|
||||
inline Directory *operator ->() {return Dir;};
|
||||
inline Directory const *operator ->() const {return Dir;};
|
||||
inline Directory const &operator *() const {return *Dir;};
|
||||
inline operator Directory *() {return Dir == Owner->DirP?0:Dir;};
|
||||
inline operator Directory const *() const {return Dir == Owner->DirP?0:Dir;};
|
||||
|
||||
inline const char *Name() const {return Dir->Name == 0?0:Owner->StrP + Dir->Name;};
|
||||
|
||||
DirIterator() : Dir(0), Owner(0) {};
|
||||
DirIterator(pkgFLCache &Owner,Directory *Trg) : Dir(Trg), Owner(&Owner) {};
|
||||
};
|
||||
|
||||
class pkgFLCache::DiverIterator
|
||||
{
|
||||
Diversion *Diver;
|
||||
pkgFLCache *Owner;
|
||||
|
||||
public:
|
||||
|
||||
// Iteration
|
||||
void operator ++(int) {if (Diver != Owner->DiverP) Diver = Owner->DiverP + Diver->Next;};
|
||||
inline void operator ++() {operator ++(0);};
|
||||
inline bool end() const {return Owner == 0 || Diver == Owner->DiverP;};
|
||||
|
||||
// Accessors
|
||||
inline Diversion *operator ->() {return Diver;};
|
||||
inline Diversion const *operator ->() const {return Diver;};
|
||||
inline Diversion const &operator *() const {return *Diver;};
|
||||
inline operator Diversion *() {return Diver == Owner->DiverP?0:Diver;};
|
||||
inline operator Diversion const *() const {return Diver == Owner->DiverP?0:Diver;};
|
||||
|
||||
inline PkgIterator OwnerPkg() const {return PkgIterator(*Owner,Owner->PkgP + Diver->OwnerPkg);};
|
||||
inline NodeIterator DivertFrom() const;
|
||||
inline NodeIterator DivertTo() const;
|
||||
|
||||
DiverIterator() : Diver(0), Owner(0) {};
|
||||
DiverIterator(pkgFLCache &Owner,Diversion *Trg) : Diver(Trg), Owner(&Owner) {};
|
||||
};
|
||||
|
||||
class pkgFLCache::NodeIterator
|
||||
{
|
||||
Node *Nde;
|
||||
enum {NdePkg, NdeHash} Type;
|
||||
pkgFLCache *Owner;
|
||||
|
||||
public:
|
||||
|
||||
// Iteration
|
||||
void operator ++(int) {if (Nde != Owner->NodeP) Nde = Owner->NodeP +
|
||||
(Type == NdePkg?Nde->NextPkg:Nde->Next);};
|
||||
inline void operator ++() {operator ++(0);};
|
||||
inline bool end() const {return Owner == 0 || Nde == Owner->NodeP;};
|
||||
|
||||
// Accessors
|
||||
inline Node *operator ->() {return Nde;};
|
||||
inline Node const *operator ->() const {return Nde;};
|
||||
inline Node const &operator *() const {return *Nde;};
|
||||
inline operator Node *() {return Nde == Owner->NodeP?0:Nde;};
|
||||
inline operator Node const *() const {return Nde == Owner->NodeP?0:Nde;};
|
||||
inline unsigned long Offset() const {return Nde - Owner->NodeP;};
|
||||
inline DirIterator Dir() const {return DirIterator(*Owner,Owner->DirP + Nde->Dir);};
|
||||
inline DiverIterator Diversion() const {return DiverIterator(*Owner,Owner->DiverP + Nde->Pointer);};
|
||||
inline const char *File() const {return Nde->File == 0?0:Owner->StrP + Nde->File;};
|
||||
inline const char *DirN() const {return Owner->StrP + Owner->DirP[Nde->Dir].Name;};
|
||||
Package *RealPackage() const;
|
||||
|
||||
NodeIterator() : Nde(0), Type(NdeHash), Owner(0) {};
|
||||
NodeIterator(pkgFLCache &Owner) : Nde(Owner.NodeP), Type(NdeHash), Owner(&Owner) {};
|
||||
NodeIterator(pkgFLCache &Owner,Node *Trg) : Nde(Trg), Type(NdeHash), Owner(&Owner) {};
|
||||
NodeIterator(pkgFLCache &Owner,Node *Trg,Package *) : Nde(Trg), Type(NdePkg), Owner(&Owner) {};
|
||||
};
|
||||
|
||||
/* Inlines with forward references that cannot be included directly in their
|
||||
respsective classes */
|
||||
inline pkgFLCache::NodeIterator pkgFLCache::DiverIterator::DivertFrom() const
|
||||
{return NodeIterator(*Owner,Owner->NodeP + Diver->DivertFrom);};
|
||||
inline pkgFLCache::NodeIterator pkgFLCache::DiverIterator::DivertTo() const
|
||||
{return NodeIterator(*Owner,Owner->NodeP + Diver->DivertTo);};
|
||||
|
||||
inline pkgFLCache::NodeIterator pkgFLCache::PkgIterator::Files() const
|
||||
{return NodeIterator(*Owner,Owner->NodeP + Pkg->Files,Pkg);};
|
||||
|
||||
inline pkgFLCache::DiverIterator pkgFLCache::DiverBegin()
|
||||
{return DiverIterator(*this,DiverP + HeaderP->Diversions);};
|
||||
|
||||
inline pkgFLCache::PkgIterator pkgFLCache::GetPkg(const char *Name,bool Insert)
|
||||
{return GetPkg(Name,Name+strlen(Name),Insert);};
|
||||
|
||||
#endif
|
31
apt/apt-inst/makefile
Normal file
31
apt/apt-inst/makefile
Normal file
@ -0,0 +1,31 @@
|
||||
# -*- make -*-
|
||||
BASE=..
|
||||
SUBDIR=apt-inst
|
||||
|
||||
# Header location
|
||||
SUBDIRS = contrib deb
|
||||
HEADER_TARGETDIRS = apt-pkg
|
||||
|
||||
# Bring in the default rules
|
||||
include ../buildlib/defaults.mak
|
||||
|
||||
# The library name
|
||||
LIBRARY=apt-inst
|
||||
LIBEXT=$(GLIBC_VER)$(LIBSTDCPP_VER)
|
||||
MAJOR=1.0
|
||||
MINOR=0
|
||||
SLIBS=$(PTHREADLIB) -lapt-pkg
|
||||
|
||||
# Source code for the contributed non-core things
|
||||
SOURCE = contrib/extracttar.cc contrib/arfile.cc
|
||||
|
||||
# Source code for the main library
|
||||
SOURCE+= filelist.cc database.cc dirstream.cc extract.cc \
|
||||
deb/dpkgdb.cc deb/debfile.cc
|
||||
|
||||
# Public header files
|
||||
HEADERS = extracttar.h arfile.h filelist.h database.h extract.h \
|
||||
dpkgdb.h dirstream.h debfile.h
|
||||
|
||||
HEADERS := $(addprefix apt-pkg/,$(HEADERS))
|
||||
include $(LIBRARY_H)
|
@ -1,43 +0,0 @@
|
||||
/init.cc/1.16/Wed Aug 1 21:35:12 2001//
|
||||
D/cnc////
|
||||
D/contrib////
|
||||
D/deb////
|
||||
D/rpm////
|
||||
/acquire-method.cc/1.4/Fri Aug 10 13:58:21 2001//
|
||||
/acquire-method.h/1.2/Fri Aug 10 13:58:24 2001//
|
||||
/acquire-worker.cc/1.2/Fri Aug 10 13:58:24 2001//
|
||||
/acquire-worker.h/1.1.1.1/Fri Aug 10 13:58:24 2001//
|
||||
/acquire.cc/1.3/Fri Aug 10 13:58:24 2001//
|
||||
/acquire.h/1.2/Fri Aug 10 13:58:26 2001//
|
||||
/algorithms.h/1.1.1.1/Fri Aug 10 13:58:29 2001//
|
||||
/cachefile.cc/1.8/Fri Aug 10 13:58:29 2001//
|
||||
/cachefile.h/1.4/Fri Aug 10 13:58:29 2001//
|
||||
/cacheiterators.h/1.2/Fri Aug 10 13:58:31 2001//
|
||||
/clean.cc/1.2/Fri Aug 10 13:58:31 2001//
|
||||
/clean.h/1.1.1.1/Fri Aug 10 13:58:31 2001//
|
||||
/init.h/1.1.1.1/Fri Aug 10 13:58:33 2001//
|
||||
/makefile/1.12/Fri Aug 10 13:58:33 2001//
|
||||
/orderlist.cc/1.4/Fri Aug 10 13:58:34 2001//
|
||||
/orderlist.h/1.1.1.1/Fri Aug 10 13:58:34 2001//
|
||||
/packagemanager.h/1.2/Fri Aug 10 13:59:57 2001//
|
||||
/pkgcache.cc/1.11/Fri Aug 10 14:00:19 2001//
|
||||
/pkgcache.h/1.2/Fri Aug 10 14:00:19 2001//
|
||||
/pkgcachegen.h/1.5/Fri Aug 10 14:00:20 2001//
|
||||
/pkgrecords.cc/1.5/Fri Aug 10 14:00:20 2001//
|
||||
/pkgrecords.h/1.3/Fri Aug 10 14:00:20 2001//
|
||||
/srcrecords.cc/1.4/Fri Aug 10 14:00:21 2001//
|
||||
/srcrecords.h/1.2/Fri Aug 10 14:00:21 2001//
|
||||
/systemfactory.cc/1.8/Fri Aug 10 14:00:23 2001//
|
||||
/systemfactory.h/1.5/Fri Aug 10 14:00:23 2001//
|
||||
/tagfile.cc/1.2/Fri Aug 10 14:00:23 2001//
|
||||
/tagfile.h/1.1.1.1/Fri Aug 10 14:00:23 2001//
|
||||
/version.cc/1.4/Fri Aug 10 14:00:23 2001//
|
||||
/acquire-item.cc/1.20/Tue Nov 13 14:24:16 2001//
|
||||
/acquire-item.h/1.6/Tue Nov 13 14:24:16 2001//
|
||||
/sourcelist.cc/1.25/Tue Nov 13 14:24:16 2001//
|
||||
/sourcelist.h/1.9/Tue Nov 13 14:24:16 2001//
|
||||
/algorithms.cc/1.11/Fri Nov 16 01:13:06 2001//
|
||||
/depcache.cc/1.20/Fri Nov 16 01:13:06 2001//
|
||||
/depcache.h/1.3/Fri Nov 16 01:13:06 2001//
|
||||
/packagemanager.cc/1.15/Fri Nov 16 01:13:06 2001//
|
||||
/pkgcachegen.cc/1.24/Wed Mar 6 17:17:10 2002//
|
@ -1 +0,0 @@
|
||||
rapt/apt-pkg
|
@ -1 +0,0 @@
|
||||
:pserver:anonymous@cvs.conectiva.com.br:/home/cvs
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: acquire-item.cc,v 1.20 2001/11/12 16:34:00 kojima Exp $
|
||||
// $Id: acquire-item.cc,v 1.6 2003/01/29 14:03:40 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Acquire Item - Item to acquire
|
||||
@ -10,8 +10,7 @@
|
||||
time. The pkgAcqIndex class creates a second class upon instantiation
|
||||
to fetch the other index files because of this.
|
||||
|
||||
#####################################################################
|
||||
*/
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include Files /*{{{*/
|
||||
#ifdef __GNUG__
|
||||
@ -19,23 +18,64 @@
|
||||
#endif
|
||||
#include <apt-pkg/acquire-item.h>
|
||||
#include <apt-pkg/configuration.h>
|
||||
#include <apt-pkg/sourcelist.h>
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
#include <apt-pkg/fileutl.h>
|
||||
#include <apt-pkg/systemfactory.h>
|
||||
#include <apt-pkg/packagemanager.h>
|
||||
|
||||
// CNC:2002-07-03
|
||||
#include <apt-pkg/repository.h>
|
||||
#include <apt-pkg/md5.h>
|
||||
#include <iostream>
|
||||
using namespace std;
|
||||
|
||||
#include <apti18n.h>
|
||||
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
#include <errno.h>
|
||||
#include <string.h>
|
||||
#include <string>
|
||||
#include <stdio.h>
|
||||
|
||||
#include <i18n.h>
|
||||
|
||||
/*}}}*/
|
||||
|
||||
using std::string;
|
||||
|
||||
// CNC:2002-07-03
|
||||
// VerifyChecksums - Check MD5 and SHA-1 checksums of a file /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Returns false only if the checksums fail (the file not existing is not
|
||||
a checksum mismatch) */
|
||||
bool VerifyChecksums(string File,unsigned long Size,string MD5)
|
||||
{
|
||||
struct stat Buf;
|
||||
|
||||
if (stat(File.c_str(),&Buf) != 0)
|
||||
return true;
|
||||
|
||||
if (Buf.st_size != Size)
|
||||
{
|
||||
if (_config->FindB("Acquire::Verbose", false) == true)
|
||||
cout << "Size of "<<File<<" did not match what's in the checksum list and was redownloaded."<<endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (MD5.empty() == false)
|
||||
{
|
||||
MD5Summation md5sum = MD5Summation();
|
||||
FileFd F(File, FileFd::ReadOnly);
|
||||
|
||||
md5sum.AddFD(F.Fd(), F.Size());
|
||||
if (md5sum.Result().Value() != MD5)
|
||||
{
|
||||
if (_config->FindB("Acquire::Verbose", false) == true)
|
||||
cout << "MD5Sum of "<<File<<" did not match what's in the checksum list and was redownloaded."<<endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// Acquire::Item::Item - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
@ -123,92 +163,78 @@ void pkgAcquire::Item::Rename(string From,string To)
|
||||
if (rename(From.c_str(),To.c_str()) != 0)
|
||||
{
|
||||
char S[300];
|
||||
snprintf(S,sizeof(S),"rename failed, %s (%s -> %s).",strerror(errno),
|
||||
// CNC:2003-01-29 - Fix potential buffer overflow bug. */
|
||||
snprintf(S,sizeof(S),_("rename failed, %s (%s -> %s)."),strerror(errno),
|
||||
From.c_str(),To.c_str());
|
||||
Status = StatError;
|
||||
ErrorText = S;
|
||||
}
|
||||
}
|
||||
/*}}}*/
|
||||
bool pkgAcquire::Item::RecheckFile(string path, string MD5, unsigned long Size)
|
||||
{
|
||||
struct stat Buf;
|
||||
|
||||
if (stat(path.c_str(),&Buf) == 0)
|
||||
{
|
||||
if (Buf.st_size != Size)
|
||||
{
|
||||
if (_config->FindB("Acquire::Verbose", false) == true)
|
||||
_error->Warning(_("Size of %s did not match what's in the hashfile and was redownloaded."),
|
||||
path.c_str());
|
||||
return false;
|
||||
}
|
||||
|
||||
MD5Summation md5sum = MD5Summation();
|
||||
FileFd file = FileFd(path, FileFd::ReadOnly);
|
||||
|
||||
md5sum.AddFD(file.Fd(), file.Size());
|
||||
if (md5sum.Result().Value() != MD5)
|
||||
{
|
||||
if (_config->FindB("Acquire::Verbose", false) == true)
|
||||
_error->Warning(_("MD5 of %s did not match what's int the hashfile and was redownloaded."),
|
||||
path.c_str());
|
||||
return false;
|
||||
}
|
||||
file.Close();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// AcqIndex::AcqIndex - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* The package file is added to the queue and a second class is
|
||||
instantiated to fetch the revision file */
|
||||
pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,const pkgSourceList::Item *Location) :
|
||||
Item(Owner), Location(Location)
|
||||
// CNC:2002-07-03
|
||||
pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,pkgRepository *Repository,
|
||||
string URI,string URIDesc,string ShortDesc) :
|
||||
Item(Owner), RealURI(URI), Repository(Repository)
|
||||
{
|
||||
Decompression = false;
|
||||
Erase = false;
|
||||
|
||||
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
|
||||
DestFile += URItoFileName(Location->PackagesURI());
|
||||
DestFile += URItoFileName(URI);
|
||||
|
||||
// Create the item
|
||||
Desc.URI = Location->PackagesURI()+_config->Find("Acquire::ComprExtension");
|
||||
Desc.Description = Location->PackagesInfo();
|
||||
// CNC:2002-07-03
|
||||
Desc.URI = URI + ".bz2";
|
||||
Desc.Description = URIDesc;
|
||||
Desc.Owner = this;
|
||||
Desc.ShortDesc = ShortDesc;
|
||||
|
||||
// CNC:2002-07-03
|
||||
// If we're verifying authentication, check whether the size and
|
||||
// MD5 matches, if not, delete the cached files and force redownload
|
||||
string fname = Location->PackagesURI(true);
|
||||
string hash;
|
||||
unsigned int size;
|
||||
// checksums match, if not, delete the cached files and force redownload
|
||||
string MD5Hash;
|
||||
unsigned long Size;
|
||||
|
||||
if (Location->Repository->MD5HashForFile(fname, hash, size)
|
||||
&& !hash.empty() && size != 0)
|
||||
if (Repository != NULL)
|
||||
{
|
||||
string FinalFile = _config->FindDir("Dir::State::lists");
|
||||
FinalFile += URItoFileName(Location->PackagesURI());
|
||||
if (Repository->HasRelease() == true)
|
||||
{
|
||||
if (Repository->FindChecksums(RealURI,Size,MD5Hash) == false)
|
||||
{
|
||||
if (Repository->IsAuthenticated() == true)
|
||||
{
|
||||
_error->Error(_("%s is not listed in the checksum list for its repository"),
|
||||
RealURI.c_str());
|
||||
return;
|
||||
}
|
||||
else
|
||||
_error->Warning("Release file did not contain checksum information for %s",
|
||||
RealURI.c_str());
|
||||
}
|
||||
|
||||
if (!RecheckFile(FinalFile, hash, size))
|
||||
string FinalFile = _config->FindDir("Dir::State::lists");
|
||||
FinalFile += URItoFileName(RealURI);
|
||||
|
||||
if (VerifyChecksums(FinalFile,Size,MD5Hash) == false)
|
||||
{
|
||||
unlink(FinalFile.c_str());
|
||||
unlink(DestFile.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Set the short description to the archive component
|
||||
if (Location->Dist[Location->Dist.size() - 1] == '/')
|
||||
Desc.ShortDesc = Location->Dist;
|
||||
else
|
||||
Desc.ShortDesc = Location->Dist + '/' + Location->Section;
|
||||
else if (Repository->IsAuthenticated() == true)
|
||||
{
|
||||
_error->Error(_("Release information not available for %s"),
|
||||
URI.c_str());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
QueueURI(Desc);
|
||||
|
||||
// Create the Release fetch class
|
||||
new pkgAcqIndexRel(Owner,Location);
|
||||
}
|
||||
/*}}}*/
|
||||
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
|
||||
@ -217,7 +243,7 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,const pkgSourceList::Item *Location)
|
||||
string pkgAcqIndex::Custom600Headers()
|
||||
{
|
||||
string Final = _config->FindDir("Dir::State::lists");
|
||||
Final += URItoFileName(Location->PackagesURI());
|
||||
Final += URItoFileName(RealURI);
|
||||
|
||||
struct stat Buf;
|
||||
if (stat(Final.c_str(),&Buf) != 0)
|
||||
@ -240,54 +266,57 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5,
|
||||
|
||||
if (Decompression == true)
|
||||
{
|
||||
unsigned int size;
|
||||
string hash;
|
||||
// CNC:2002-07-03
|
||||
unsigned long FSize;
|
||||
string MD5Hash;
|
||||
|
||||
string fname = Location->PackagesURI(true);
|
||||
if (Repository != NULL && Repository->HasRelease() == true &&
|
||||
Repository->FindChecksums(RealURI,FSize,MD5Hash) == true)
|
||||
{
|
||||
// We must always get here if the repository is authenticated
|
||||
|
||||
if (!Location->Repository->MD5HashForFile(fname, hash, size))
|
||||
if (FSize != Size)
|
||||
{
|
||||
Status = StatAuthError;
|
||||
ErrorText = "Unauthenticated file";
|
||||
return;
|
||||
}
|
||||
// Check the size
|
||||
if (size != 0 && Size != size)
|
||||
{
|
||||
Status = StatAuthError;
|
||||
if (_config->FindB("Debug::pkgAcquire::Auth", false)) {
|
||||
cout << "size mismatch: " << size << "!=" <<Size<<endl;
|
||||
}
|
||||
ErrorText = "Size mismatch";
|
||||
return;
|
||||
}
|
||||
// Check the md5
|
||||
if (!MD5.empty() //akk needs to make gzip method return a MD5
|
||||
&& !hash.empty() && hash != MD5)
|
||||
{
|
||||
Status = StatAuthError;
|
||||
ErrorText = "MD5Sum mismatch";
|
||||
if (_config->FindB("Debug::pkgAcquire::Auth", false)) {
|
||||
cout << "md5 mismatch: " << hash << "!=" << MD5 << endl;
|
||||
}
|
||||
Status = StatError;
|
||||
ErrorText = _("Size mismatch");
|
||||
Rename(DestFile,DestFile + ".FAILED");
|
||||
if (_config->FindB("Acquire::Verbose",false) == true)
|
||||
_error->Warning("Size mismatch of index file %s: %ul was supposed to be %ul",
|
||||
RealURI.c_str(), Size, FSize);
|
||||
return;
|
||||
}
|
||||
|
||||
if (MD5.empty() == false && MD5Hash != MD5)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = _("MD5Sum mismatch");
|
||||
Rename(DestFile,DestFile + ".FAILED");
|
||||
if (_config->FindB("Acquire::Verbose",false) == true)
|
||||
_error->Warning("MD5Sum mismatch of index file %s: %s was supposed to be %s",
|
||||
RealURI.c_str(), MD5.c_str(), MD5Hash.c_str());
|
||||
return;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Redundant security check
|
||||
assert(Repository == NULL || Repository->IsAuthenticated() == false);
|
||||
}
|
||||
|
||||
// Done, move it into position
|
||||
string FinalFile = _config->FindDir("Dir::State::lists");
|
||||
FinalFile += URItoFileName(Location->PackagesURI());
|
||||
FinalFile += URItoFileName(RealURI);
|
||||
Rename(DestFile,FinalFile);
|
||||
chmod(FinalFile.c_str(),0644);
|
||||
|
||||
/* We restore the original name to DestFile so that the clean operation
|
||||
will work OK */
|
||||
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
|
||||
DestFile += URItoFileName(Location->PackagesURI());
|
||||
DestFile += URItoFileName(RealURI);
|
||||
|
||||
// Remove the compressed version.
|
||||
if (Erase == true)
|
||||
unlink(DestFile.c_str());
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@ -322,41 +351,6 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5,
|
||||
if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
|
||||
return;
|
||||
|
||||
unsigned int size;
|
||||
string hash;
|
||||
|
||||
string fname = Location->PackagesURI(true)
|
||||
+ _config->Find("Acquire::ComprExtension");
|
||||
|
||||
if (!Location->Repository->MD5HashForFile(fname, hash, size))
|
||||
{
|
||||
Status = StatAuthError;
|
||||
ErrorText = "Unauthenticated file";
|
||||
return;
|
||||
}
|
||||
|
||||
// Check the size
|
||||
if (size != 0 && Size != size)
|
||||
{
|
||||
Status = StatAuthError;
|
||||
if (_config->FindB("Debug::pkgAcquire::Auth", false)) {
|
||||
cout << "compressed size: " << size << "!=" <<Size<<endl;
|
||||
}
|
||||
ErrorText = "Size mismatch";
|
||||
return;
|
||||
}
|
||||
// Check the md5
|
||||
if (!hash.empty() && hash != MD5)
|
||||
{
|
||||
Status = StatAuthError;
|
||||
ErrorText = "MD5Sum mismatch";
|
||||
if (_config->FindB("Debug::pkgAcquire::Auth", false)) {
|
||||
cout << "compressed md5: " << hash << "!=" << MD5 << endl;
|
||||
}
|
||||
Rename(DestFile,DestFile + ".FAILED");
|
||||
return;
|
||||
}
|
||||
|
||||
if (FileName == DestFile)
|
||||
Erase = true;
|
||||
else
|
||||
@ -364,162 +358,74 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5,
|
||||
|
||||
Decompression = true;
|
||||
DestFile += ".decomp";
|
||||
Desc.URI = "gzip:" + FileName,Location->PackagesInfo();
|
||||
// CNC:2002-07-03
|
||||
Desc.URI = "bzip2:" + FileName;
|
||||
QueueURI(Desc);
|
||||
Mode = "gzip";
|
||||
// CNC:2002-07-03
|
||||
Mode = "bzip2";
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
// AcqHashes::AcqHashes - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* The package file is added to the queue and a second class is
|
||||
instantiated to fetch the revision file */
|
||||
pkgAcqHashes::pkgAcqHashes(pkgAcquire *Owner,
|
||||
pkgSourceList::RepositoryItem *Location) :
|
||||
Item(Owner), Location(Location)
|
||||
{
|
||||
Retries = _config->FindI("Acquire::Retries",0);
|
||||
|
||||
Authentication = false;
|
||||
|
||||
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
|
||||
DestFile += URItoFileName(Location->HashesURI());
|
||||
|
||||
// Remove the file, it must be always downloaded
|
||||
unlink(DestFile.c_str());
|
||||
string OldFile = _config->FindDir("Dir::State::lists");
|
||||
OldFile += URItoFileName(Location->HashesURI());
|
||||
unlink(OldFile.c_str());
|
||||
|
||||
// Create the item
|
||||
Desc.URI = Location->HashesURI() + ".gpg";
|
||||
Desc.Description = Location->HashesInfo();
|
||||
Desc.Owner = this;
|
||||
|
||||
Desc.ShortDesc = Location->Dist;
|
||||
|
||||
QueueURI(Desc);
|
||||
}
|
||||
/*}}}*/
|
||||
// AcqHashes::Done - Finished a fetch /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
void pkgAcqHashes::Done(string Message,unsigned long Size,string MD5,
|
||||
pkgAcquire::MethodConfig *Cfg)
|
||||
{
|
||||
Item::Done(Message,Size,MD5,Cfg);
|
||||
|
||||
if (Authentication == true)
|
||||
{
|
||||
// Done, move it into position
|
||||
string FinalFile = _config->FindDir("Dir::State::lists");
|
||||
FinalFile += URItoFileName(Location->HashesURI());
|
||||
Rename(DestFile,FinalFile);
|
||||
|
||||
/* We restore the original name to DestFile so that the clean operation
|
||||
will work OK */
|
||||
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
|
||||
DestFile += URItoFileName(Location->HashesURI());
|
||||
|
||||
string SignerFingerprint = LookupTag(Message,"Signature-Key");
|
||||
|
||||
if (SignerFingerprint != Location->Vendor->Fingerprint)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = _("Hashfile signer is not who it's supposed to be "
|
||||
"(expected ")+Location->Vendor->Fingerprint
|
||||
+_(", got ")+SignerFingerprint+")";
|
||||
return;
|
||||
}
|
||||
|
||||
// Update the hashes and file sizes for this repository
|
||||
if (!Location->UpdateHashes(FinalFile)) {
|
||||
Status = StatError;
|
||||
ErrorText = "Could not stash MD5 hashes to index files";
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
Complete = true;
|
||||
|
||||
string FileName = LookupTag(Message,"Filename");
|
||||
if (FileName.empty() == true)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = "Method gave a blank filename";
|
||||
}
|
||||
|
||||
if (FileName != DestFile)
|
||||
Local = true;
|
||||
|
||||
Authentication = true;
|
||||
DestFile += ".extracted";
|
||||
Desc.URI = "gpg:" + FileName,Location->HashesInfo();
|
||||
QueueURI(Desc);
|
||||
Mode = "gpg";
|
||||
}
|
||||
/*}}}*/
|
||||
// AcqHashes::Failed - Failure handler /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Here we try other sources */
|
||||
void pkgAcqHashes::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
|
||||
{
|
||||
ErrorText = LookupTag(Message,"Message");
|
||||
|
||||
// This is the retry counter
|
||||
if (Retries != 0 &&
|
||||
Cnf->LocalOnly == false &&
|
||||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
|
||||
{
|
||||
Retries--;
|
||||
// wait a little before retrying
|
||||
sleep(1);
|
||||
QueueURI(Desc);
|
||||
return;
|
||||
}
|
||||
|
||||
Item::Failed(Message,Cnf);
|
||||
}
|
||||
/*}}}*/
|
||||
// AcqIndexRel::pkgAcqIndexRel - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* The Release file is added to the queue */
|
||||
pkgAcqIndexRel::pkgAcqIndexRel(pkgAcquire *Owner,
|
||||
const pkgSourceList::Item *Location) :
|
||||
Item(Owner), Location(Location)
|
||||
// CNC:2002-07-03
|
||||
pkgAcqIndexRel::pkgAcqIndexRel(pkgAcquire *Owner,pkgRepository *Repository,
|
||||
string URI,string URIDesc,string ShortDesc,
|
||||
bool Master) :
|
||||
Item(Owner), RealURI(URI), Master(Master),
|
||||
Repository(Repository)
|
||||
{
|
||||
Retries = _config->FindI("Acquire::Retries",0);
|
||||
// CNC:2002-07-09
|
||||
assert(Master == false || Repository != NULL);
|
||||
Authentication = false;
|
||||
Erase = false;
|
||||
|
||||
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
|
||||
DestFile += URItoFileName(Location->ReleaseURI());
|
||||
DestFile += URItoFileName(URI);
|
||||
|
||||
// Create the item
|
||||
Desc.URI = Location->ReleaseURI();
|
||||
Desc.Description = Location->ReleaseInfo();
|
||||
Desc.URI = URI;
|
||||
Desc.Description = URIDesc;
|
||||
Desc.ShortDesc = ShortDesc;
|
||||
Desc.Owner = this;
|
||||
|
||||
// If we're verifying authentication, check whether the size and
|
||||
// MD5 matches, if not, delete the cached files and force redownload
|
||||
string hash;
|
||||
unsigned int size;
|
||||
if (Location->Repository->MD5HashForFile(Location->ReleaseURI(true), hash, size)
|
||||
&& !hash.empty() && size != 0)
|
||||
// CNC:2002-07-09
|
||||
string MD5Hash;
|
||||
unsigned long Size;
|
||||
if (Master == false && Repository != NULL)
|
||||
{
|
||||
string FinalFile = _config->FindDir("Dir::State::lists");
|
||||
FinalFile += URItoFileName(Location->ReleaseURI());
|
||||
if (Repository->HasRelease() == true)
|
||||
{
|
||||
if (Repository->FindChecksums(RealURI,Size,MD5Hash) == false)
|
||||
{
|
||||
if (Repository->IsAuthenticated() == true)
|
||||
{
|
||||
_error->Error(_("%s is not listed in the checksum list for its repository"),
|
||||
RealURI.c_str());
|
||||
return;
|
||||
}
|
||||
else
|
||||
_error->Warning("Release file did not contain checksum information for %s",
|
||||
RealURI.c_str());
|
||||
}
|
||||
|
||||
if (!RecheckFile(FinalFile, hash, size))
|
||||
string FinalFile = _config->FindDir("Dir::State::lists");
|
||||
FinalFile += URItoFileName(RealURI);
|
||||
|
||||
if (VerifyChecksums(FinalFile,Size,MD5Hash) == false)
|
||||
{
|
||||
unlink(FinalFile.c_str());
|
||||
unlink(DestFile.c_str());
|
||||
unlink(DestFile.c_str()); // Necessary?
|
||||
}
|
||||
}
|
||||
else if (Repository->IsAuthenticated() == true)
|
||||
{
|
||||
_error->Error(_("Release information not available for %s"),
|
||||
URI.c_str());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Set the short description to the archive component
|
||||
if (Location->Dist[Location->Dist.size() - 1] == '/')
|
||||
Desc.ShortDesc = Location->Dist;
|
||||
else
|
||||
Desc.ShortDesc = Location->Dist + '/' + Location->Section;
|
||||
|
||||
QueueURI(Desc);
|
||||
}
|
||||
@ -530,13 +436,17 @@ pkgAcqIndexRel::pkgAcqIndexRel(pkgAcquire *Owner,
|
||||
string pkgAcqIndexRel::Custom600Headers()
|
||||
{
|
||||
string Final = _config->FindDir("Dir::State::lists");
|
||||
Final += URItoFileName(Location->ReleaseURI());
|
||||
Final += URItoFileName(RealURI);
|
||||
|
||||
struct stat Buf;
|
||||
if (stat(Final.c_str(),&Buf) != 0)
|
||||
return "\nIndex-File: true";
|
||||
|
||||
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
|
||||
// CNC:2002-07-11
|
||||
string LOI = "";
|
||||
if (Master == true)
|
||||
LOI = "\nLocal-Only-IMS: true";
|
||||
return LOI + "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
|
||||
}
|
||||
/*}}}*/
|
||||
// AcqIndexRel::Done - Item downloaded OK /*{{{*/
|
||||
@ -549,6 +459,56 @@ void pkgAcqIndexRel::Done(string Message,unsigned long Size,string MD5,
|
||||
{
|
||||
Item::Done(Message,Size,MD5,Cfg);
|
||||
|
||||
// CNC:2002-07-03
|
||||
if (Authentication == true)
|
||||
{
|
||||
if (Repository->IsAuthenticated() == true)
|
||||
{
|
||||
// Do the fingerprint matching magic
|
||||
string FingerPrint = LookupTag(Message,"Signature-Fingerprint");
|
||||
|
||||
if (FingerPrint.empty() == true)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = _("No valid signatures found in Release file");
|
||||
return;
|
||||
}
|
||||
|
||||
// Match fingerprint of Release file
|
||||
if (Repository->Vendor->FingerPrint != FingerPrint)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = _("Signature fingerprint of Release file does not match (expected ")
|
||||
+Repository->Vendor->FingerPrint+_(", got ")+FingerPrint+")";
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Done, move it into position
|
||||
string FinalFile = _config->FindDir("Dir::State::lists");
|
||||
FinalFile += URItoFileName(RealURI);
|
||||
Rename(DestFile,FinalFile);
|
||||
chmod(FinalFile.c_str(),0644);
|
||||
|
||||
/* We restore the original name to DestFile so that the clean operation
|
||||
will work OK */
|
||||
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
|
||||
DestFile += URItoFileName(RealURI);
|
||||
|
||||
// Remove the compressed version.
|
||||
if (Erase == true)
|
||||
unlink(DestFile.c_str());
|
||||
|
||||
// Update the hashes and file sizes for this repository
|
||||
if (Repository->ParseRelease(FinalFile) == false &&
|
||||
Repository->IsAuthenticated() == true)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = _("Could not read checksum list from Release file");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
string FileName = LookupTag(Message,"Filename");
|
||||
if (FileName.empty() == true)
|
||||
{
|
||||
@ -557,47 +517,23 @@ void pkgAcqIndexRel::Done(string Message,unsigned long Size,string MD5,
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
|
||||
unsigned int size;
|
||||
string hash;
|
||||
|
||||
string fname = Location->ReleaseURI(true);
|
||||
|
||||
if (!Location->Repository->MD5HashForFile(fname, hash, size))
|
||||
{
|
||||
Status = StatAuthError;
|
||||
ErrorText = "Unauthenticated file";
|
||||
return;
|
||||
}
|
||||
|
||||
// Check the size
|
||||
if (size != 0 && Size != 0 && Size != size)
|
||||
{
|
||||
if (_config->FindB("Debug::pkgAcquire::Auth", false)) {
|
||||
cout << "size: " << size << "!=" <<Size<<endl;
|
||||
}
|
||||
Status = StatError;
|
||||
ErrorText = "Size mismatch";
|
||||
return;
|
||||
}
|
||||
// Check the md5
|
||||
if (!hash.empty() && !MD5.empty() && hash != MD5)
|
||||
{
|
||||
if (_config->FindB("Debug::pkgAcquire::Auth", false)) {
|
||||
cout << "md5: " << hash << "!=" <<MD5<<endl;
|
||||
}
|
||||
Status = StatError;
|
||||
ErrorText = "MD5Sum mismatch";
|
||||
Rename(DestFile,DestFile + ".FAILED");
|
||||
return;
|
||||
}
|
||||
|
||||
// CNC:2002-07-11
|
||||
Erase = false;
|
||||
Complete = true;
|
||||
|
||||
// The files timestamp matches
|
||||
if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
|
||||
{
|
||||
// CNC:2002-07-11
|
||||
if (Master == true)
|
||||
{
|
||||
// We've got a LocalOnly IMS
|
||||
string FinalFile = _config->FindDir("Dir::State::lists");
|
||||
FinalFile += URItoFileName(RealURI);
|
||||
Repository->ParseRelease(FinalFile);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// We have to copy it into place
|
||||
if (FileName != DestFile)
|
||||
@ -608,10 +544,61 @@ void pkgAcqIndexRel::Done(string Message,unsigned long Size,string MD5,
|
||||
return;
|
||||
}
|
||||
|
||||
// CNC:2002-07-03
|
||||
unsigned long FSize;
|
||||
string MD5Hash;
|
||||
if (Master == false && Repository != NULL
|
||||
&& Repository->HasRelease() == true
|
||||
&& Repository->FindChecksums(RealURI,FSize,MD5Hash) == true)
|
||||
{
|
||||
if (FSize != Size)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = _("Size mismatch");
|
||||
Rename(DestFile,DestFile + ".FAILED");
|
||||
if (_config->FindB("Acquire::Verbose",false) == true)
|
||||
_error->Warning("Size mismatch of index file %s: %ul was supposed to be %ul",
|
||||
RealURI.c_str(), Size, FSize);
|
||||
return;
|
||||
}
|
||||
if (MD5.empty() == false && MD5Hash != MD5)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = _("MD5Sum mismatch");
|
||||
Rename(DestFile,DestFile + ".FAILED");
|
||||
if (_config->FindB("Acquire::Verbose",false) == true)
|
||||
_error->Warning("MD5Sum mismatch of index file %s: %s was supposed to be %s",
|
||||
RealURI.c_str(), MD5.c_str(), MD5Hash.c_str());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (Master == false || Repository->IsAuthenticated() == false)
|
||||
{
|
||||
// Done, move it into position
|
||||
string FinalFile = _config->FindDir("Dir::State::lists");
|
||||
FinalFile += URItoFileName(Location->ReleaseURI());
|
||||
FinalFile += URItoFileName(RealURI);
|
||||
Rename(DestFile,FinalFile);
|
||||
chmod(FinalFile.c_str(),0644);
|
||||
|
||||
// extract checksums from the Release file
|
||||
if (Master == true)
|
||||
Repository->ParseRelease(FinalFile);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (FileName == DestFile)
|
||||
Erase = true;
|
||||
else
|
||||
Local = true;
|
||||
|
||||
// Still have the authentication phase
|
||||
Authentication = true;
|
||||
DestFile += ".auth";
|
||||
Desc.URI = "gpg:" + FileName;
|
||||
QueueURI(Desc);
|
||||
Mode = "gpg";
|
||||
}
|
||||
}
|
||||
/*}}}*/
|
||||
// AcqIndexRel::Failed - Silence failure messages for missing rel files /*{{{*/
|
||||
@ -619,24 +606,11 @@ void pkgAcqIndexRel::Done(string Message,unsigned long Size,string MD5,
|
||||
/* */
|
||||
void pkgAcqIndexRel::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
|
||||
{
|
||||
|
||||
ErrorText = LookupTag(Message,"Message");
|
||||
|
||||
// This is the retry counter
|
||||
if (Retries != 0 &&
|
||||
Cnf->LocalOnly == false &&
|
||||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
|
||||
{
|
||||
Retries--;
|
||||
// wait a little before retrying
|
||||
sleep(1);
|
||||
QueueURI(Desc);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (Cnf->LocalOnly == true ||
|
||||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
|
||||
{
|
||||
// CNC:2002-07-03
|
||||
if (Master == false || Repository->IsAuthenticated() == false)
|
||||
{
|
||||
// Ignore this
|
||||
Status = StatDone;
|
||||
@ -644,6 +618,7 @@ void pkgAcqIndexRel::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
|
||||
Dequeue();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Item::Failed(Message,Cnf);
|
||||
}
|
||||
@ -663,8 +638,9 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
|
||||
|
||||
if (Version.Arch() == 0)
|
||||
{
|
||||
_error->Error("I wasn't able to locate file for the %s package. "
|
||||
"This might mean you need to manually fix this package. (due to missing arch)",
|
||||
_error->Error(_("I wasn't able to locate a file for the %s package. "
|
||||
"This might mean you need to manually fix this package. "
|
||||
"(due to missing arch)"),
|
||||
Version.ParentPkg().Name());
|
||||
return;
|
||||
}
|
||||
@ -697,8 +673,8 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
|
||||
|
||||
// Select a source
|
||||
if (QueueNext() == false && _error->PendingError() == false)
|
||||
_error->Error("I wasn't able to locate file for the %s package. "
|
||||
"This might mean you need to manually fix this package.",
|
||||
_error->Error(_("I wasn't able to locate file for the %s package. "
|
||||
"This might mean you need to manually fix this package."),
|
||||
Version.ParentPkg().Name());
|
||||
}
|
||||
/*}}}*/
|
||||
@ -716,13 +692,8 @@ bool pkgAcqArchive::QueueNext()
|
||||
continue;
|
||||
|
||||
// Try to cross match against the source list
|
||||
string PkgFile = flNotDir(Vf.File().FileName());
|
||||
pkgSourceList::const_iterator Location;
|
||||
for (Location = Sources->begin(); Location != Sources->end(); Location++)
|
||||
if (PkgFile == URItoFileName(Location->PackagesURI()))
|
||||
break;
|
||||
|
||||
if (Location == Sources->end())
|
||||
pkgIndexFile *Index;
|
||||
if (Sources->FindIndex(Vf.File(),Index) == false)
|
||||
continue;
|
||||
|
||||
// Grab the text package record
|
||||
@ -730,12 +701,12 @@ bool pkgAcqArchive::QueueNext()
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
|
||||
PkgFile = Parse.FileName();
|
||||
string PkgFile = Parse.FileName();
|
||||
MD5 = Parse.MD5Hash();
|
||||
if (PkgFile.empty() == true)
|
||||
return _error->Error("The package index files are corrupted. No Filename: "
|
||||
"field for package %s."
|
||||
,Version.ParentPkg().Name());
|
||||
return _error->Error(_("The package index files are corrupted. No Filename: "
|
||||
"field for package %s."),
|
||||
Version.ParentPkg().Name());
|
||||
|
||||
// See if we already have the file. (Legacy filenames)
|
||||
FileSize = Version->Size;
|
||||
@ -790,8 +761,9 @@ bool pkgAcqArchive::QueueNext()
|
||||
}
|
||||
|
||||
// Create the item
|
||||
Desc.URI = Location->ArchiveURI(PkgFile);
|
||||
Desc.Description = Location->ArchiveInfo(Version);
|
||||
Local = false;
|
||||
Desc.URI = Index->ArchiveURI(PkgFile);
|
||||
Desc.Description = Index->ArchiveInfo(Version);
|
||||
Desc.Owner = this;
|
||||
Desc.ShortDesc = Version.ParentPkg().Name();
|
||||
QueueURI(Desc);
|
||||
@ -814,7 +786,7 @@ void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash,
|
||||
if (Size != Version->Size)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = "Size mismatch";
|
||||
ErrorText = _("Size mismatch");
|
||||
return;
|
||||
}
|
||||
|
||||
@ -824,7 +796,7 @@ void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash,
|
||||
if (Md5Hash != MD5)
|
||||
{
|
||||
Status = StatError;
|
||||
ErrorText = "MD5Sum mismatch";
|
||||
ErrorText = _("MD5Sum mismatch");
|
||||
Rename(DestFile,DestFile + ".FAILED");
|
||||
return;
|
||||
}
|
||||
@ -864,6 +836,20 @@ void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash,
|
||||
void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
|
||||
{
|
||||
ErrorText = LookupTag(Message,"Message");
|
||||
|
||||
/* We don't really want to retry on failed media swaps, this prevents
|
||||
that. An interesting observation is that permanent failures are not
|
||||
recorded. */
|
||||
if (Cnf->Removable == true &&
|
||||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
|
||||
{
|
||||
// Vf = Version.FileList();
|
||||
while (Vf.end() == false) Vf++;
|
||||
StoreFilename = string();
|
||||
Item::Failed(Message,Cnf);
|
||||
return;
|
||||
}
|
||||
|
||||
if (QueueNext() == false)
|
||||
{
|
||||
// This is the retry counter
|
||||
@ -872,12 +858,11 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
|
||||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
|
||||
{
|
||||
Retries--;
|
||||
// wait a little before retrying
|
||||
sleep(1);
|
||||
Vf = Version.FileList();
|
||||
if (QueueNext() == true)
|
||||
return;
|
||||
}
|
||||
|
||||
StoreFilename = string();
|
||||
Item::Failed(Message,Cnf);
|
||||
}
|
||||
@ -894,6 +879,7 @@ void pkgAcqArchive::Finished()
|
||||
StoreFilename = string();
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
// AcqFile::pkgAcqFile - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* The file is added to the queue */
|
||||
@ -1005,8 +991,6 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
|
||||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
|
||||
{
|
||||
Retries--;
|
||||
// wait a little before retrying
|
||||
sleep(1);
|
||||
QueueURI(Desc);
|
||||
return;
|
||||
}
|
||||
@ -1014,3 +998,4 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
|
||||
Item::Failed(Message,Cnf);
|
||||
}
|
||||
/*}}}*/
|
||||
// vim:sts=3:sw=3
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: acquire-item.h,v 1.6 2001/11/12 16:34:00 kojima Exp $
|
||||
// $Id: acquire-item.h,v 1.2 2003/01/29 13:04:48 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Acquire Item - Item to acquire
|
||||
@ -21,7 +21,7 @@
|
||||
#define PKGLIB_ACQUIRE_ITEM_H
|
||||
|
||||
#include <apt-pkg/acquire.h>
|
||||
#include <apt-pkg/sourcelist.h>
|
||||
#include <apt-pkg/indexfile.h>
|
||||
#include <apt-pkg/pkgrecords.h>
|
||||
|
||||
#ifdef __GNUG__
|
||||
@ -32,7 +32,6 @@
|
||||
class pkgAcquire::Item
|
||||
{
|
||||
protected:
|
||||
bool RecheckFile(string path, string MD5, unsigned long Size);
|
||||
|
||||
// Some private helper methods for registering URIs
|
||||
pkgAcquire *Owner;
|
||||
@ -46,11 +45,13 @@ class pkgAcquire::Item
|
||||
public:
|
||||
|
||||
// State of the item
|
||||
enum {StatIdle, StatFetching, StatDone, StatError, StatAuthError} Status;
|
||||
/* CNC:2002-11-22
|
||||
* Do not use anonyomus enums, as this breaks swig in some cases */
|
||||
enum StatusFlags {StatIdle, StatFetching, StatDone, StatError} Status;
|
||||
string ErrorText;
|
||||
unsigned long FileSize;
|
||||
unsigned long PartialSize;
|
||||
char *Mode;
|
||||
const char *Mode;
|
||||
unsigned long ID;
|
||||
bool Complete;
|
||||
bool Local;
|
||||
@ -78,16 +79,21 @@ class pkgAcquire::Item
|
||||
virtual ~Item();
|
||||
};
|
||||
|
||||
// CNC:2002-07-03
|
||||
class pkgRepository;
|
||||
|
||||
// Item class for index files
|
||||
class pkgAcqIndex : public pkgAcquire::Item
|
||||
{
|
||||
protected:
|
||||
|
||||
const pkgSourceList::Item *Location;
|
||||
bool Decompression;
|
||||
bool Erase;
|
||||
pkgAcquire::ItemDesc Desc;
|
||||
unsigned int Retries;
|
||||
string RealURI;
|
||||
|
||||
// CNC:2002-07-03
|
||||
pkgRepository *Repository;
|
||||
|
||||
public:
|
||||
|
||||
@ -95,9 +101,11 @@ class pkgAcqIndex : public pkgAcquire::Item
|
||||
virtual void Done(string Message,unsigned long Size,string Md5Hash,
|
||||
pkgAcquire::MethodConfig *Cnf);
|
||||
virtual string Custom600Headers();
|
||||
virtual string DescURI() {return Location->PackagesURI();};
|
||||
virtual string DescURI() {return RealURI;};
|
||||
|
||||
pkgAcqIndex(pkgAcquire *Owner,const pkgSourceList::Item *Location);
|
||||
// CNC:2002-07-03
|
||||
pkgAcqIndex(pkgAcquire *Owner,pkgRepository *Repository,string URI,
|
||||
string URIDesc,string ShortDesct);
|
||||
};
|
||||
|
||||
// Item class for index files
|
||||
@ -105,9 +113,14 @@ class pkgAcqIndexRel : public pkgAcquire::Item
|
||||
{
|
||||
protected:
|
||||
|
||||
const pkgSourceList::Item *Location;
|
||||
pkgAcquire::ItemDesc Desc;
|
||||
unsigned int Retries;
|
||||
string RealURI;
|
||||
|
||||
// CNC:2002-07-03
|
||||
bool Authentication;
|
||||
bool Master;
|
||||
bool Erase;
|
||||
pkgRepository *Repository;
|
||||
|
||||
public:
|
||||
|
||||
@ -116,9 +129,11 @@ class pkgAcqIndexRel : public pkgAcquire::Item
|
||||
virtual void Done(string Message,unsigned long Size,string Md5Hash,
|
||||
pkgAcquire::MethodConfig *Cnf);
|
||||
virtual string Custom600Headers();
|
||||
virtual string DescURI() {return Location->ReleaseURI();};
|
||||
virtual string DescURI() {return RealURI;};
|
||||
|
||||
pkgAcqIndexRel(pkgAcquire *Owner,const pkgSourceList::Item *Location);
|
||||
// CNC:2002-07-03
|
||||
pkgAcqIndexRel(pkgAcquire *Owner,pkgRepository *Repository,string URI,
|
||||
string URIDesc,string ShortDesc,bool Master=false);
|
||||
};
|
||||
|
||||
// Item class for archive files
|
||||
@ -154,29 +169,6 @@ class pkgAcqArchive : public pkgAcquire::Item
|
||||
string &StoreFilename);
|
||||
};
|
||||
|
||||
|
||||
// Item class for index files
|
||||
class pkgAcqHashes : public pkgAcquire::Item
|
||||
{
|
||||
protected:
|
||||
|
||||
bool Authentication;
|
||||
pkgSourceList::RepositoryItem *Location;
|
||||
pkgAcquire::ItemDesc Desc;
|
||||
unsigned int Retries;
|
||||
|
||||
public:
|
||||
|
||||
// Specialized action members
|
||||
virtual void Done(string Message,unsigned long Size,string Md5Hash,
|
||||
pkgAcquire::MethodConfig *Cnf);
|
||||
virtual string DescURI() {return Location->HashesURI();};
|
||||
virtual void Failed(string Message,pkgAcquire::MethodConfig *Cnf);
|
||||
|
||||
pkgAcqHashes(pkgAcquire *Owner,
|
||||
pkgSourceList::RepositoryItem *Location);
|
||||
};
|
||||
|
||||
// Fetch a generic file to the current directory
|
||||
class pkgAcqFile : public pkgAcquire::Item
|
||||
{
|
||||
|
@ -1,12 +1,12 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: acquire-method.cc,v 1.4 2001/06/16 01:50:22 kojima Exp $
|
||||
// $Id: acquire-method.cc,v 1.1 2002/07/23 17:54:50 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Acquire Method
|
||||
|
||||
This is a skeleton class that implements most of the functionality
|
||||
of a method and some usefull functions to make method implementation
|
||||
of a method and some useful functions to make method implementation
|
||||
simpler. The methods all derive this and specialize it. The most
|
||||
complex implementation is the http method which needs to provide
|
||||
pipelining, it runs the message engine at the same time it is
|
||||
@ -23,21 +23,26 @@
|
||||
#include <apt-pkg/configuration.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
#include <apt-pkg/fileutl.h>
|
||||
#include <apt-pkg/hashes.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <stdarg.h>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
/*}}}*/
|
||||
|
||||
using namespace std;
|
||||
|
||||
// AcqMethod::pkgAcqMethod - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This constructs the initialization text */
|
||||
pkgAcqMethod::pkgAcqMethod(const char *Ver,unsigned long Flags)
|
||||
: Flags(Flags) // CNC:2002-07-11
|
||||
{
|
||||
char S[300] = "";
|
||||
char *End = S;
|
||||
strcat(End,"100 Capabilities\n");
|
||||
snprintf(End+strlen(End),sizeof(S),"Version: %s\n",Ver);
|
||||
sprintf(End+strlen(End),"Version: %s\n",Ver);
|
||||
|
||||
if ((Flags & SingleInstance) == SingleInstance)
|
||||
strcat(End,"Single-Instance: true\n");
|
||||
@ -85,7 +90,7 @@ void pkgAcqMethod::Fail(bool Transient)
|
||||
void pkgAcqMethod::Fail(string Err,bool Transient)
|
||||
{
|
||||
// Strip out junk from the error messages
|
||||
for (char *I = Err.begin(); I != Err.end(); I++)
|
||||
for (string::iterator I = Err.begin(); I != Err.end(); I++)
|
||||
{
|
||||
if (*I == '\r')
|
||||
*I = ' ';
|
||||
@ -97,7 +102,8 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
|
||||
if (Queue != 0)
|
||||
{
|
||||
snprintf(S,sizeof(S)-50,"400 URI Failure\nURI: %s\n"
|
||||
"Message: %s\n",Queue->Uri.c_str(),Err.c_str());
|
||||
"Message: %s %s\n",Queue->Uri.c_str(),Err.c_str(),
|
||||
FailExtra.c_str());
|
||||
|
||||
// Dequeue
|
||||
FetchItem *Tmp = Queue;
|
||||
@ -108,7 +114,8 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
|
||||
}
|
||||
else
|
||||
snprintf(S,sizeof(S)-50,"400 URI Failure\nURI: <UNKNOWN>\n"
|
||||
"Message: %s\n",Err.c_str());
|
||||
"Message: %s %s\n",Err.c_str(),
|
||||
FailExtra.c_str());
|
||||
|
||||
// Set the transient flag
|
||||
if (Transient == true)
|
||||
@ -173,10 +180,12 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
|
||||
|
||||
if (Res.MD5Sum.empty() == false)
|
||||
End += snprintf(End,sizeof(S)-50 - (End - S),"MD5-Hash: %s\n",Res.MD5Sum.c_str());
|
||||
if (Res.SHA1Sum.empty() == false)
|
||||
End += snprintf(End,sizeof(S)-50 - (End - S),"SHA1-Hash: %s\n",Res.SHA1Sum.c_str());
|
||||
|
||||
if (Res.SignatureKeyID.empty() == false)
|
||||
End += snprintf(End,sizeof(S)-80 - (End - S),"Signature-Key: %s\n",
|
||||
Res.SignatureKeyID.c_str());
|
||||
// CNC:2002-07-04
|
||||
if (Res.SignatureFP.empty() == false)
|
||||
End += snprintf(End,sizeof(S)-50 - (End - S),"Signature-Fingerprint: %s\n",Res.SignatureFP.c_str());
|
||||
|
||||
if (Res.ResumePoint != 0)
|
||||
End += snprintf(End,sizeof(S)-50 - (End - S),"Resume-Point: %lu\n",
|
||||
@ -201,10 +210,9 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
|
||||
if (Alt->MD5Sum.empty() == false)
|
||||
End += snprintf(End,sizeof(S)-50 - (End - S),"Alt-MD5-Hash: %s\n",
|
||||
Alt->MD5Sum.c_str());
|
||||
|
||||
if (Alt->SignatureKeyID.empty() == false)
|
||||
End += snprintf(End,sizeof(S)-80 - (End - S),"Alt-Signature-Key: %s\n",
|
||||
Alt->SignatureKeyID.c_str());
|
||||
if (Alt->SHA1Sum.empty() == false)
|
||||
End += snprintf(End,sizeof(S)-50 - (End - S),"Alt-SHA1-Hash: %s\n",
|
||||
Alt->SHA1Sum.c_str());
|
||||
|
||||
if (Alt->IMSHit == true)
|
||||
strcat(End,"Alt-IMS-Hit: true\n");
|
||||
@ -268,7 +276,7 @@ bool pkgAcqMethod::MediaFail(string Required,string Drive)
|
||||
MyMessages.erase(MyMessages.begin());
|
||||
}
|
||||
|
||||
return !StringToBool(LookupTag(Message,"Failed"),false);
|
||||
return !StringToBool(LookupTag(Message,"Fail"),false);
|
||||
}
|
||||
|
||||
Messages.push_back(Message);
|
||||
@ -283,10 +291,11 @@ bool pkgAcqMethod::Configuration(string Message)
|
||||
{
|
||||
::Configuration &Cnf = *_config;
|
||||
|
||||
const char *I = Message.begin();
|
||||
const char *I = Message.c_str();
|
||||
const char *MsgEnd = I + Message.length();
|
||||
|
||||
unsigned int Length = strlen("Config-Item");
|
||||
for (; I + Length < Message.end(); I++)
|
||||
for (; I + Length < MsgEnd; I++)
|
||||
{
|
||||
// Not a config item
|
||||
if (I[Length] != ':' || stringcasecmp(I,I+Length,"Config-Item") != 0)
|
||||
@ -294,11 +303,11 @@ bool pkgAcqMethod::Configuration(string Message)
|
||||
|
||||
I += Length + 1;
|
||||
|
||||
for (; I < Message.end() && *I == ' '; I++);
|
||||
for (; I < MsgEnd && *I == ' '; I++);
|
||||
const char *Equals = I;
|
||||
for (; Equals < Message.end() && *Equals != '='; Equals++);
|
||||
for (; Equals < MsgEnd && *Equals != '='; Equals++);
|
||||
const char *End = Equals;
|
||||
for (; End < Message.end() && *End != '\n'; End++);
|
||||
for (; End < MsgEnd && *End != '\n'; End++);
|
||||
if (End == Equals)
|
||||
return false;
|
||||
|
||||
@ -363,6 +372,11 @@ int pkgAcqMethod::Run(bool Single)
|
||||
Tmp->IndexFile = StringToBool(LookupTag(Message,"Index-File"),false);
|
||||
Tmp->Next = 0;
|
||||
|
||||
// CNC:2002-07-11
|
||||
if (StringToBool(LookupTag(Message,"Local-Only-IMS"),false) == true
|
||||
&& (Flags & LocalOnly) == 0)
|
||||
Tmp->LastModified = 0;
|
||||
|
||||
// Append it to the list
|
||||
FetchItem **I = &Queue;
|
||||
for (; *I != 0; I = &(*I)->Next);
|
||||
@ -440,3 +454,14 @@ pkgAcqMethod::FetchResult::FetchResult() : LastModified(0),
|
||||
{
|
||||
}
|
||||
/*}}}*/
|
||||
// AcqMethod::FetchResult::TakeHashes - Load hashes /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This hides the number of hashes we are supporting from the caller.
|
||||
It just deals with the hash class. */
|
||||
void pkgAcqMethod::FetchResult::TakeHashes(Hashes &Hash)
|
||||
{
|
||||
MD5Sum = Hash.MD5.Result();
|
||||
SHA1Sum = Hash.SHA1.Result();
|
||||
}
|
||||
/*}}}*/
|
||||
// vim:sts=3:sw=3
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: acquire-method.h,v 1.2 2000/09/26 14:22:14 kojima Exp $
|
||||
// $Id: acquire-method.h,v 1.1 2002/07/23 17:54:50 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Acquire Method - Method helper class + functions
|
||||
@ -20,10 +20,14 @@
|
||||
#pragma interface "apt-pkg/acquire-method.h"
|
||||
#endif
|
||||
|
||||
class Hashes;
|
||||
class pkgAcqMethod
|
||||
{
|
||||
protected:
|
||||
|
||||
// CNC:2002-07-11
|
||||
unsigned long Flags;
|
||||
|
||||
struct FetchItem
|
||||
{
|
||||
FetchItem *Next;
|
||||
@ -37,12 +41,16 @@ class pkgAcqMethod
|
||||
struct FetchResult
|
||||
{
|
||||
string MD5Sum;
|
||||
string SignatureKeyID;
|
||||
string SHA1Sum;
|
||||
// CNC:2002-07-03
|
||||
string SignatureFP;
|
||||
time_t LastModified;
|
||||
bool IMSHit;
|
||||
string Filename;
|
||||
unsigned long Size;
|
||||
unsigned long ResumePoint;
|
||||
|
||||
void TakeHashes(Hashes &Hash);
|
||||
FetchResult();
|
||||
};
|
||||
|
||||
@ -50,6 +58,7 @@ class pkgAcqMethod
|
||||
vector<string> Messages;
|
||||
FetchItem *Queue;
|
||||
FetchItem *QueueBack;
|
||||
string FailExtra;
|
||||
|
||||
// Handlers for messages
|
||||
virtual bool Configuration(string Message);
|
||||
@ -75,6 +84,7 @@ class pkgAcqMethod
|
||||
void Status(const char *Format,...);
|
||||
|
||||
int Run(bool Single = false);
|
||||
inline void SetFailExtraMsg(string Msg) {FailExtra = Msg;};
|
||||
|
||||
pkgAcqMethod(const char *Ver,unsigned long Flags = 0);
|
||||
virtual ~pkgAcqMethod() {};
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: acquire-worker.cc,v 1.2 2001/01/11 02:03:26 kojima Exp $
|
||||
// $Id: acquire-worker.cc,v 1.1 2002/07/23 17:54:50 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Acquire Worker
|
||||
@ -22,17 +22,21 @@
|
||||
#include <apt-pkg/fileutl.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
|
||||
#include <apti18n.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
#include <fcntl.h>
|
||||
#include <signal.h>
|
||||
#include <stdio.h>
|
||||
#include <errno.h>
|
||||
|
||||
#include <i18n.h>
|
||||
|
||||
/*}}}*/
|
||||
|
||||
using namespace std;
|
||||
|
||||
// Worker::Worker - Constructor for Queue startup /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
@ -114,7 +118,7 @@ bool pkgAcquire::Worker::Start()
|
||||
int Pipes[4] = {-1,-1,-1,-1};
|
||||
if (pipe(Pipes) != 0 || pipe(Pipes+2) != 0)
|
||||
{
|
||||
_error->Errno("pipe",_("Failed to create IPC pipe to subprocess"));
|
||||
_error->Errno("pipe","Failed to create IPC pipe to subprocess");
|
||||
for (int I = 0; I != 4; I++)
|
||||
close(Pipes[I]);
|
||||
return false;
|
||||
@ -124,14 +128,11 @@ bool pkgAcquire::Worker::Start()
|
||||
|
||||
// Fork off the process
|
||||
Process = ExecFork();
|
||||
|
||||
// Spawn the subprocess
|
||||
if (Process == 0)
|
||||
{
|
||||
// Setup the FDs
|
||||
dup2(Pipes[1],STDOUT_FILENO);
|
||||
dup2(Pipes[2],STDIN_FILENO);
|
||||
dup2(((filebuf *)clog.rdbuf())->fd(),STDERR_FILENO);
|
||||
SetCloseExec(STDOUT_FILENO,false);
|
||||
SetCloseExec(STDIN_FILENO,false);
|
||||
SetCloseExec(STDERR_FILENO,false);
|
||||
@ -194,7 +195,7 @@ bool pkgAcquire::Worker::RunMessages()
|
||||
char *End;
|
||||
int Number = strtol(Message.c_str(),&End,10);
|
||||
if (End == Message.c_str())
|
||||
return _error->Error(_("Invalid message from method %s: %s"),Access.c_str(),Message.c_str());
|
||||
return _error->Error("Invalid message from method %s: %s",Access.c_str(),Message.c_str());
|
||||
|
||||
string URI = LookupTag(Message,"URI");
|
||||
pkgAcquire::Queue::QItem *Itm = 0;
|
||||
@ -207,7 +208,7 @@ bool pkgAcquire::Worker::RunMessages()
|
||||
// 100 Capabilities
|
||||
case 100:
|
||||
if (Capabilities(Message) == false)
|
||||
return _error->Error(_("Unable to process Capabilities message from %s"),Access.c_str());
|
||||
return _error->Error("Unable to process Capabilities message from %s",Access.c_str());
|
||||
break;
|
||||
|
||||
// 101 Log
|
||||
@ -226,7 +227,7 @@ bool pkgAcquire::Worker::RunMessages()
|
||||
{
|
||||
if (Itm == 0)
|
||||
{
|
||||
_error->Error(_("Method gave invalid 200 URI Start message"));
|
||||
_error->Error("Method gave invalid 200 URI Start message");
|
||||
break;
|
||||
}
|
||||
|
||||
@ -265,7 +266,7 @@ bool pkgAcquire::Worker::RunMessages()
|
||||
OwnerQ->ItemDone(Itm);
|
||||
if (TotalSize != 0 &&
|
||||
(unsigned)atoi(LookupTag(Message,"Size","0").c_str()) != TotalSize)
|
||||
_error->Warning(_("Bizzar Error - File size is not what the server reported %s %u"),
|
||||
_error->Warning("Bizarre Error - File size is not what the server reported %s %lu",
|
||||
LookupTag(Message,"Size","0").c_str(),TotalSize);
|
||||
|
||||
Owner->Done(Message,atoi(LookupTag(Message,"Size","0").c_str()),
|
||||
@ -294,7 +295,7 @@ bool pkgAcquire::Worker::RunMessages()
|
||||
{
|
||||
if (Itm == 0)
|
||||
{
|
||||
_error->Error(_("Method gave invalid 400 URI Failure message"));
|
||||
_error->Error("Method gave invalid 400 URI Failure message");
|
||||
break;
|
||||
}
|
||||
|
||||
@ -316,7 +317,7 @@ bool pkgAcquire::Worker::RunMessages()
|
||||
|
||||
// 401 General Failure
|
||||
case 401:
|
||||
_error->Error(_("Method %s General failure: %s"),LookupTag(Message,"Message").c_str());
|
||||
_error->Error("Method %s General failure: %s",Access.c_str(),LookupTag(Message,"Message").c_str());
|
||||
break;
|
||||
|
||||
// 403 Media Change
|
||||
@ -370,7 +371,7 @@ bool pkgAcquire::Worker::MediaChange(string Message)
|
||||
LookupTag(Message,"Drive")) == false)
|
||||
{
|
||||
char S[300];
|
||||
sprintf(S,"603 Media Changed\nFailed: true\n\n");
|
||||
snprintf(S,sizeof(S),"603 Media Changed\nFailed: true\n\n");
|
||||
if (Debug == true)
|
||||
clog << " -> " << Access << ':' << QuoteString(S,"\n") << endl;
|
||||
OutQueue += S;
|
||||
@ -379,7 +380,7 @@ bool pkgAcquire::Worker::MediaChange(string Message)
|
||||
}
|
||||
|
||||
char S[300];
|
||||
sprintf(S,"603 Media Changed\n\n");
|
||||
snprintf(S,sizeof(S),"603 Media Changed\n\n");
|
||||
if (Debug == true)
|
||||
clog << " -> " << Access << ':' << QuoteString(S,"\n") << endl;
|
||||
OutQueue += S;
|
||||
@ -408,7 +409,7 @@ bool pkgAcquire::Worker::SendConfiguration()
|
||||
{
|
||||
if (Top->Value.empty() == false)
|
||||
{
|
||||
string Line = "Config-Item: " + Top->FullTag() + "=";
|
||||
string Line = "Config-Item: " + QuoteString(Top->FullTag(),"=\"\n") + "=";
|
||||
Line += QuoteString(Top->Value,"\n") + '\n';
|
||||
Message += Line;
|
||||
}
|
||||
@ -465,7 +466,7 @@ bool pkgAcquire::Worker::OutFdReady()
|
||||
int Res;
|
||||
do
|
||||
{
|
||||
Res = write(OutFd,OutQueue.begin(),OutQueue.length());
|
||||
Res = write(OutFd,OutQueue.c_str(),OutQueue.length());
|
||||
}
|
||||
while (Res < 0 && errno == EINTR);
|
||||
|
||||
@ -500,7 +501,7 @@ bool pkgAcquire::Worker::InFdReady()
|
||||
read returned -1. */
|
||||
bool pkgAcquire::Worker::MethodFailure()
|
||||
{
|
||||
_error->Error(_("Method %s has died unexpectedly!"),Access.c_str());
|
||||
_error->Error("Method %s has died unexpectedly!",Access.c_str());
|
||||
|
||||
ExecWait(Process,Access.c_str(),true);
|
||||
Process = -1;
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: acquire-worker.h,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: acquire-worker.h,v 1.1 2002/07/23 17:54:50 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Acquire Worker - Worker process manager
|
||||
@ -21,10 +21,10 @@
|
||||
// Interfacing to the method process
|
||||
class pkgAcquire::Worker
|
||||
{
|
||||
friend pkgAcquire;
|
||||
friend class pkgAcquire;
|
||||
|
||||
protected:
|
||||
friend Queue;
|
||||
friend class Queue;
|
||||
|
||||
/* Linked list starting at a Queue and a linked list starting
|
||||
at Acquire */
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: acquire.cc,v 1.3 2001/06/16 01:50:22 kojima Exp $
|
||||
// $Id: acquire.cc,v 1.2 2002/07/25 18:07:18 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Acquire - File Acquiration
|
||||
@ -23,15 +23,18 @@
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
|
||||
#include <apti18n.h>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <dirent.h>
|
||||
#include <sys/time.h>
|
||||
#include <errno.h>
|
||||
#include <sys/stat.h>
|
||||
|
||||
#include <i18n.h>
|
||||
|
||||
/*}}}*/
|
||||
|
||||
using namespace std;
|
||||
|
||||
// Acquire::pkgAcquire - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* We grab some runtime state from the configuration space */
|
||||
@ -110,10 +113,15 @@ void pkgAcquire::Remove(Item *Itm)
|
||||
{
|
||||
Dequeue(Itm);
|
||||
|
||||
for (vector<Item *>::iterator I = Items.begin(); I < Items.end(); I++)
|
||||
for (ItemIterator I = Items.begin(); I != Items.end();)
|
||||
{
|
||||
if (*I == Itm)
|
||||
{
|
||||
Items.erase(I);
|
||||
I = Items.begin();
|
||||
}
|
||||
else
|
||||
I++;
|
||||
}
|
||||
}
|
||||
/*}}}*/
|
||||
@ -365,7 +373,7 @@ pkgAcquire::RunResult pkgAcquire::Run()
|
||||
I->Shutdown(false);
|
||||
|
||||
// Shut down the items
|
||||
for (Item **I = Items.begin(); I != Items.end(); I++)
|
||||
for (ItemIterator I = Items.begin(); I != Items.end(); I++)
|
||||
(*I)->Finished();
|
||||
|
||||
if (_error->PendingError())
|
||||
@ -401,13 +409,13 @@ bool pkgAcquire::Clean(string Dir)
|
||||
{
|
||||
DIR *D = opendir(Dir.c_str());
|
||||
if (D == 0)
|
||||
return _error->Errno("opendir","Unable to read %s",Dir.c_str());
|
||||
return _error->Errno("opendir",_("Unable to read %s"),Dir.c_str());
|
||||
|
||||
string StartDir = SafeGetCWD();
|
||||
if (chdir(Dir.c_str()) != 0)
|
||||
{
|
||||
closedir(D);
|
||||
return _error->Errno("chdir","Unable to change to ",Dir.c_str());
|
||||
return _error->Errno("chdir",_("Unable to change to %s"),Dir.c_str());
|
||||
}
|
||||
|
||||
for (struct dirent *Dir = readdir(D); Dir != 0; Dir = readdir(D))
|
||||
@ -420,7 +428,7 @@ bool pkgAcquire::Clean(string Dir)
|
||||
continue;
|
||||
|
||||
// Look in the get list
|
||||
vector<Item *>::iterator I = Items.begin();
|
||||
ItemCIterator I = Items.begin();
|
||||
for (; I != Items.end(); I++)
|
||||
if (flNotDir((*I)->DestFile) == Dir->d_name)
|
||||
break;
|
||||
@ -438,10 +446,10 @@ bool pkgAcquire::Clean(string Dir)
|
||||
// Acquire::TotalNeeded - Number of bytes to fetch /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is the total number of bytes needed */
|
||||
unsigned long pkgAcquire::TotalNeeded()
|
||||
double pkgAcquire::TotalNeeded()
|
||||
{
|
||||
unsigned long Total = 0;
|
||||
for (pkgAcquire::Item **I = ItemsBegin(); I != ItemsEnd(); I++)
|
||||
double Total = 0;
|
||||
for (ItemCIterator I = ItemsBegin(); I != ItemsEnd(); I++)
|
||||
Total += (*I)->FileSize;
|
||||
return Total;
|
||||
}
|
||||
@ -449,10 +457,10 @@ unsigned long pkgAcquire::TotalNeeded()
|
||||
// Acquire::FetchNeeded - Number of bytes needed to get /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is the number of bytes that is not local */
|
||||
unsigned long pkgAcquire::FetchNeeded()
|
||||
double pkgAcquire::FetchNeeded()
|
||||
{
|
||||
unsigned long Total = 0;
|
||||
for (pkgAcquire::Item **I = ItemsBegin(); I != ItemsEnd(); I++)
|
||||
double Total = 0;
|
||||
for (ItemCIterator I = ItemsBegin(); I != ItemsEnd(); I++)
|
||||
if ((*I)->Local == false)
|
||||
Total += (*I)->FileSize;
|
||||
return Total;
|
||||
@ -461,10 +469,10 @@ unsigned long pkgAcquire::FetchNeeded()
|
||||
// Acquire::PartialPresent - Number of partial bytes we already have /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is the number of bytes that is not local */
|
||||
unsigned long pkgAcquire::PartialPresent()
|
||||
double pkgAcquire::PartialPresent()
|
||||
{
|
||||
unsigned long Total = 0;
|
||||
for (pkgAcquire::Item **I = ItemsBegin(); I != ItemsEnd(); I++)
|
||||
double Total = 0;
|
||||
for (ItemCIterator I = ItemsBegin(); I != ItemsEnd(); I++)
|
||||
if ((*I)->Local == false)
|
||||
Total += (*I)->PartialSize;
|
||||
return Total;
|
||||
@ -729,7 +737,7 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
|
||||
// Compute the total number of bytes to fetch
|
||||
unsigned int Unknown = 0;
|
||||
unsigned int Count = 0;
|
||||
for (pkgAcquire::Item **I = Owner->ItemsBegin(); I != Owner->ItemsEnd();
|
||||
for (pkgAcquire::ItemCIterator I = Owner->ItemsBegin(); I != Owner->ItemsEnd();
|
||||
I++, Count++)
|
||||
{
|
||||
TotalItems++;
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: acquire.h,v 1.2 2000/11/06 12:53:49 kojima Exp $
|
||||
// $Id: acquire.h,v 1.2 2003/01/29 13:04:48 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Acquire - File Acquiration
|
||||
@ -35,6 +35,9 @@
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
using std::vector;
|
||||
using std::string;
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/acquire.h"
|
||||
#endif
|
||||
@ -52,8 +55,11 @@ class pkgAcquire
|
||||
class Worker;
|
||||
struct MethodConfig;
|
||||
struct ItemDesc;
|
||||
friend Item;
|
||||
friend Queue;
|
||||
friend class Item;
|
||||
friend class Queue;
|
||||
|
||||
typedef vector<Item *>::iterator ItemIterator;
|
||||
typedef vector<Item *>::const_iterator ItemCIterator;
|
||||
|
||||
protected:
|
||||
|
||||
@ -100,8 +106,8 @@ class pkgAcquire
|
||||
// Simple iteration mechanism
|
||||
inline Worker *WorkersBegin() {return Workers;};
|
||||
Worker *WorkerStep(Worker *I);
|
||||
inline Item **ItemsBegin() {return Items.begin();};
|
||||
inline Item **ItemsEnd() {return Items.end();};
|
||||
inline ItemIterator ItemsBegin() {return Items.begin();};
|
||||
inline ItemIterator ItemsEnd() {return Items.end();};
|
||||
|
||||
// Iterate over queued Item URIs
|
||||
class UriIterator;
|
||||
@ -112,9 +118,9 @@ class pkgAcquire
|
||||
bool Clean(string Dir);
|
||||
|
||||
// Returns the size of the total download set
|
||||
unsigned long TotalNeeded();
|
||||
unsigned long FetchNeeded();
|
||||
unsigned long PartialPresent();
|
||||
double TotalNeeded();
|
||||
double FetchNeeded();
|
||||
double PartialPresent();
|
||||
|
||||
pkgAcquire(pkgAcquireStatus *Log = 0);
|
||||
virtual ~pkgAcquire();
|
||||
@ -132,11 +138,14 @@ struct pkgAcquire::ItemDesc
|
||||
// List of possible items queued for download.
|
||||
class pkgAcquire::Queue
|
||||
{
|
||||
friend pkgAcquire;
|
||||
friend pkgAcquire::UriIterator;
|
||||
friend class pkgAcquire;
|
||||
friend class pkgAcquire::UriIterator;
|
||||
friend class pkgAcquire::Worker;
|
||||
Queue *Next;
|
||||
|
||||
public:
|
||||
protected:
|
||||
|
||||
#ifndef SWIG
|
||||
// Queued item
|
||||
struct QItem : pkgAcquire::ItemDesc
|
||||
{
|
||||
@ -151,7 +160,8 @@ class pkgAcquire::Queue
|
||||
Owner = I.Owner;
|
||||
};
|
||||
};
|
||||
protected:
|
||||
#endif
|
||||
|
||||
// Name of the queue
|
||||
string Name;
|
||||
|
||||
@ -240,11 +250,11 @@ class pkgAcquireStatus
|
||||
|
||||
struct timeval Time;
|
||||
struct timeval StartTime;
|
||||
unsigned long LastBytes;
|
||||
double LastBytes;
|
||||
double CurrentCPS;
|
||||
unsigned long CurrentBytes;
|
||||
unsigned long TotalBytes;
|
||||
unsigned long FetchedBytes;
|
||||
double CurrentBytes;
|
||||
double TotalBytes;
|
||||
double FetchedBytes;
|
||||
unsigned long ElapsedTime;
|
||||
unsigned long TotalItems;
|
||||
unsigned long CurrentItems;
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: algorithms.cc,v 1.11 2001/11/13 17:32:07 kojima Exp $
|
||||
// $Id: algorithms.cc,v 1.11 2003/01/29 18:43:48 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Algorithms - A set of misc algorithms
|
||||
@ -20,27 +20,62 @@
|
||||
#include <apt-pkg/algorithms.h>
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/configuration.h>
|
||||
#include <iostream.h>
|
||||
#include <apt-pkg/sptr.h>
|
||||
|
||||
// CNC:2002-07-04
|
||||
#include <apt-pkg/pkgsystem.h>
|
||||
#include <apt-pkg/version.h>
|
||||
|
||||
#include <apti18n.h>
|
||||
|
||||
#include <iostream>
|
||||
/*}}}*/
|
||||
#include <i18n.h>
|
||||
using namespace std;
|
||||
|
||||
pkgProblemResolver *pkgProblemResolver::This = 0;
|
||||
|
||||
// Simulate::Simulate - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
pkgSimulate::pkgSimulate(pkgDepCache &Cache) : pkgPackageManager(Cache),
|
||||
Sim(Cache.GetMap())
|
||||
/* The legacy translations here of input Pkg iterators is obsolete,
|
||||
this is not necessary since the pkgCaches are fully shared now. */
|
||||
pkgSimulate::pkgSimulate(pkgDepCache *Cache) : pkgPackageManager(Cache),
|
||||
iPolicy(Cache),
|
||||
Sim(&Cache->GetCache(),&iPolicy)
|
||||
{
|
||||
Flags = new unsigned char[Cache.HeaderP->PackageCount];
|
||||
memset(Flags,0,sizeof(*Flags)*Cache.HeaderP->PackageCount);
|
||||
Sim.Init(0);
|
||||
Flags = new unsigned char[Cache->Head().PackageCount];
|
||||
memset(Flags,0,sizeof(*Flags)*Cache->Head().PackageCount);
|
||||
|
||||
// Fake a filename so as not to activate the media swapping
|
||||
string Jnk = "SIMULATE";
|
||||
for (unsigned int I = 0; I != Cache.Head().PackageCount; I++)
|
||||
for (unsigned int I = 0; I != Cache->Head().PackageCount; I++)
|
||||
FileNames[I] = Jnk;
|
||||
}
|
||||
/*}}}*/
|
||||
// Simulate::Describe - Describe a package /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Parameter Now == true gives both current and available varsion,
|
||||
Parameter Now == false gives only the available package version */
|
||||
void pkgSimulate::Describe(PkgIterator Pkg,ostream &out,bool Now)
|
||||
{
|
||||
VerIterator Ver(Sim);
|
||||
|
||||
out << Pkg.Name();
|
||||
|
||||
if (Now == true)
|
||||
{
|
||||
Ver = Pkg.CurrentVer();
|
||||
if (Ver.end() == false)
|
||||
out << " [" << Ver.VerStr() << ']';
|
||||
}
|
||||
|
||||
Ver = Sim[Pkg].CandidateVerIter(Sim);
|
||||
if (Ver.end() == true)
|
||||
return;
|
||||
|
||||
out << " (" << Ver.VerStr() << ' ' << Ver.RelStr() << ')';
|
||||
}
|
||||
/*}}}*/
|
||||
// Simulate::Install - Simulate unpacking of a package /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
@ -50,7 +85,8 @@ bool pkgSimulate::Install(PkgIterator iPkg,string /*File*/)
|
||||
PkgIterator Pkg = Sim.FindPkg(iPkg.Name());
|
||||
Flags[Pkg->ID] = 1;
|
||||
|
||||
cout << "Inst " << Pkg.Name();
|
||||
cout << "Inst ";
|
||||
Describe(Pkg,cout,true);
|
||||
Sim.MarkInstall(Pkg,false);
|
||||
|
||||
// Look for broken conflicts+predepends.
|
||||
@ -59,19 +95,24 @@ bool pkgSimulate::Install(PkgIterator iPkg,string /*File*/)
|
||||
if (Sim[I].InstallVer == 0)
|
||||
continue;
|
||||
|
||||
for (DepIterator D = Sim[I].InstVerIter(Sim).DependsList(); D.end() == false; D++)
|
||||
if (D->Type == pkgCache::Dep::Conflicts
|
||||
|| D->Type == pkgCache::Dep::Obsoletes
|
||||
|| D->Type == pkgCache::Dep::PreDepends)
|
||||
for (DepIterator D = Sim[I].InstVerIter(Sim).DependsList(); D.end() == false;)
|
||||
{
|
||||
if ((Sim[D] & pkgDepCache::DepInstall) == 0)
|
||||
DepIterator Start;
|
||||
DepIterator End;
|
||||
D.GlobOr(Start,End);
|
||||
if (Start->Type == pkgCache::Dep::Conflicts ||
|
||||
Start->Type == pkgCache::Dep::Obsoletes ||
|
||||
End->Type == pkgCache::Dep::PreDepends)
|
||||
{
|
||||
cout << " [" << I.Name() << " on " << D.TargetPkg().Name() << ']';
|
||||
if (D->Type == pkgCache::Dep::Conflicts)
|
||||
if ((Sim[End] & pkgDepCache::DepGInstall) == 0)
|
||||
{
|
||||
cout << " [" << I.Name() << " on " << Start.TargetPkg().Name() << ']';
|
||||
if (Start->Type == pkgCache::Dep::Conflicts)
|
||||
_error->Error("Fatal, conflicts violated %s",I.Name());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Sim.BrokenCount() != 0)
|
||||
ShortBreaks();
|
||||
@ -117,7 +158,10 @@ bool pkgSimulate::Configure(PkgIterator iPkg)
|
||||
_error->Error("Conf Broken %s",Pkg.Name());
|
||||
}
|
||||
else
|
||||
cout << "Conf " << Pkg.Name();
|
||||
{
|
||||
cout << "Conf ";
|
||||
Describe(Pkg,cout,false);
|
||||
}
|
||||
|
||||
if (Sim.BrokenCount() != 0)
|
||||
ShortBreaks();
|
||||
@ -138,9 +182,10 @@ bool pkgSimulate::Remove(PkgIterator iPkg,bool Purge)
|
||||
Flags[Pkg->ID] = 3;
|
||||
Sim.MarkDelete(Pkg);
|
||||
if (Purge == true)
|
||||
cout << "Purg " << Pkg.Name();
|
||||
cout << "Purg ";
|
||||
else
|
||||
cout << "Remv " << Pkg.Name();
|
||||
cout << "Remv ";
|
||||
Describe(Pkg,cout,false);
|
||||
|
||||
if (Sim.BrokenCount() != 0)
|
||||
ShortBreaks();
|
||||
@ -178,16 +223,20 @@ bool pkgApplyStatus(pkgDepCache &Cache)
|
||||
{
|
||||
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
|
||||
{
|
||||
if (I->VersionList == 0)
|
||||
continue;
|
||||
|
||||
// Only choice for a ReInstReq package is to reinstall
|
||||
if (I->InstState == pkgCache::State::ReInstReq ||
|
||||
I->InstState == pkgCache::State::HoldReInstReq)
|
||||
{
|
||||
if (I.CurrentVer().Downloadable() == true)
|
||||
if (I->CurrentVer != 0 && I.CurrentVer().Downloadable() == true)
|
||||
Cache.MarkKeep(I);
|
||||
else
|
||||
{
|
||||
// Is this right? Will dpkg choke on an upgrade?
|
||||
if (Cache[I].CandidateVerIter(Cache).Downloadable() == true)
|
||||
if (Cache[I].CandidateVer != 0 &&
|
||||
Cache[I].CandidateVerIter(Cache).Downloadable() == true)
|
||||
Cache.MarkInstall(I);
|
||||
else
|
||||
return _error->Error(_("The package %s needs to be reinstalled, "
|
||||
@ -203,12 +252,13 @@ bool pkgApplyStatus(pkgDepCache &Cache)
|
||||
re-unpacked (probably) */
|
||||
case pkgCache::State::UnPacked:
|
||||
case pkgCache::State::HalfConfigured:
|
||||
if (I.CurrentVer().Downloadable() == true ||
|
||||
if ((I->CurrentVer != 0 && I.CurrentVer().Downloadable() == true) ||
|
||||
I.State() != pkgCache::PkgIterator::NeedsUnpack)
|
||||
Cache.MarkKeep(I);
|
||||
else
|
||||
{
|
||||
if (Cache[I].CandidateVerIter(Cache).Downloadable() == true)
|
||||
if (Cache[I].CandidateVer != 0 &&
|
||||
Cache[I].CandidateVerIter(Cache).Downloadable() == true)
|
||||
Cache.MarkInstall(I);
|
||||
else
|
||||
Cache.MarkDelete(I);
|
||||
@ -222,8 +272,8 @@ bool pkgApplyStatus(pkgDepCache &Cache)
|
||||
|
||||
default:
|
||||
if (I->InstState != pkgCache::State::Ok)
|
||||
return _error->Error(_("The package %s is not ok and I "
|
||||
"don't know how to fix it!"),I.Name());
|
||||
return _error->Error("The package %s is not ok and I "
|
||||
"don't know how to fix it!",I.Name());
|
||||
}
|
||||
}
|
||||
return true;
|
||||
@ -254,11 +304,18 @@ bool pkgFixBroken(pkgDepCache &Cache)
|
||||
Cache.MarkInstall(I,true);
|
||||
}
|
||||
|
||||
pkgProblemResolver Fix(Cache);
|
||||
pkgProblemResolver Fix(&Cache);
|
||||
|
||||
// CNC:2002-07-04
|
||||
_system->ProcessCache(Cache,Fix);
|
||||
|
||||
// CNC:2002-08-08
|
||||
if (_config->FindB("APT::Remove-Depends",false) == true)
|
||||
Fix.RemoveDepends();
|
||||
|
||||
return Fix.Resolve(true);
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
// DistUpgrade - Distribution upgrade /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This autoinstalls every package and then force installs every
|
||||
@ -272,8 +329,29 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
|
||||
/* Auto upgrade all installed packages, this provides the basis
|
||||
for the installation */
|
||||
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
|
||||
{
|
||||
// CNC:2002-07-23
|
||||
if (I->CurrentVer != 0)
|
||||
{
|
||||
// Was it obsoleted?
|
||||
bool Obsoleted = false;
|
||||
for (pkgCache::DepIterator D = I.RevDependsList(); D.end() == false; D++)
|
||||
{
|
||||
if (D->Type == pkgCache::Dep::Obsoletes &&
|
||||
Cache[D.ParentPkg()].CandidateVer != 0 &&
|
||||
Cache[D.ParentPkg()].CandidateVerIter(Cache).Downloadable() == true &&
|
||||
(pkgCache::Version*)D.ParentVer() == Cache[D.ParentPkg()].CandidateVer &&
|
||||
Cache.VS().CheckDep(I.CurrentVer().VerStr(), D) == true)
|
||||
{
|
||||
Cache.MarkInstall(D.ParentPkg(),true);
|
||||
Obsoleted = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (Obsoleted == false)
|
||||
Cache.MarkInstall(I,true);
|
||||
}
|
||||
}
|
||||
|
||||
/* Now, auto upgrade all essential packages - this ensures that
|
||||
the essential packages are present and working */
|
||||
@ -284,10 +362,34 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
|
||||
/* We do it again over all previously installed packages to force
|
||||
conflict resolution on them all. */
|
||||
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
|
||||
{
|
||||
// CNC:2002-07-23
|
||||
if (I->CurrentVer != 0)
|
||||
{
|
||||
// Was it obsoleted?
|
||||
bool Obsoleted = false;
|
||||
for (pkgCache::DepIterator D = I.RevDependsList(); D.end() == false; D++)
|
||||
{
|
||||
if (D->Type == pkgCache::Dep::Obsoletes &&
|
||||
Cache[D.ParentPkg()].CandidateVer != 0 &&
|
||||
Cache[D.ParentPkg()].CandidateVerIter(Cache).Downloadable() == true &&
|
||||
(pkgCache::Version*)D.ParentVer() == Cache[D.ParentPkg()].CandidateVer &&
|
||||
Cache.VS().CheckDep(I.CurrentVer().VerStr(), D) == true)
|
||||
{
|
||||
Cache.MarkInstall(D.ParentPkg(),false);
|
||||
Obsoleted = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (Obsoleted == false)
|
||||
Cache.MarkInstall(I,false);
|
||||
}
|
||||
}
|
||||
|
||||
pkgProblemResolver Fix(Cache);
|
||||
pkgProblemResolver Fix(&Cache);
|
||||
|
||||
// CNC:2002-07-04
|
||||
_system->ProcessCache(Cache,Fix);
|
||||
|
||||
// Hold back held packages.
|
||||
if (_config->FindB("APT::Ignore-Hold",false) == false)
|
||||
@ -302,6 +404,10 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
|
||||
}
|
||||
}
|
||||
|
||||
// CNC:2002-08-08
|
||||
if (_config->FindB("APT::Remove-Depends",false) == true)
|
||||
Fix.RemoveDepends();
|
||||
|
||||
return Fix.Resolve();
|
||||
}
|
||||
/*}}}*/
|
||||
@ -312,7 +418,7 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
|
||||
to install packages not marked for install */
|
||||
bool pkgAllUpgrade(pkgDepCache &Cache)
|
||||
{
|
||||
pkgProblemResolver Fix(Cache);
|
||||
pkgProblemResolver Fix(&Cache);
|
||||
|
||||
if (Cache.BrokenCount() != 0)
|
||||
return false;
|
||||
@ -323,15 +429,16 @@ bool pkgAllUpgrade(pkgDepCache &Cache)
|
||||
if (Cache[I].Install() == true)
|
||||
Fix.Protect(I);
|
||||
|
||||
if (_config->FindB("APT::Ignore-Hold",false) == false) {
|
||||
if (_config->FindB("APT::Ignore-Hold",false) == false)
|
||||
if (I->SelectedState == pkgCache::State::Hold)
|
||||
continue;
|
||||
}
|
||||
|
||||
if (I->CurrentVer != 0 && Cache[I].InstallVer != 0) {
|
||||
if (I->CurrentVer != 0 && Cache[I].InstallVer != 0)
|
||||
Cache.MarkInstall(I,false);
|
||||
}
|
||||
}
|
||||
|
||||
// CNC:2002-07-04
|
||||
_system->ProcessCache(Cache,Fix);
|
||||
|
||||
return Fix.ResolveByKeep();
|
||||
}
|
||||
@ -346,7 +453,7 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache)
|
||||
if (Cache.BrokenCount() != 0)
|
||||
return false;
|
||||
|
||||
// We loop indefinately to get the minimal set size.
|
||||
// We loop for 10 tries to get the minimal set size.
|
||||
bool Change = false;
|
||||
unsigned int Count = 0;
|
||||
do
|
||||
@ -374,7 +481,7 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache)
|
||||
while (Change == true && Count < 10);
|
||||
|
||||
if (Cache.BrokenCount() != 0)
|
||||
return _error->Error(_("Internal Error in pkgMinimizeUpgrade"));
|
||||
return _error->Error("Internal Error in pkgMinimizeUpgrade");
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -383,10 +490,10 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache)
|
||||
// ProblemResolver::pkgProblemResolver - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
pkgProblemResolver::pkgProblemResolver(pkgDepCache &Cache) : Cache(Cache)
|
||||
pkgProblemResolver::pkgProblemResolver(pkgDepCache *pCache) : Cache(*pCache)
|
||||
{
|
||||
// Allocate memory
|
||||
unsigned long Size = Cache.HeaderP->PackageCount;
|
||||
unsigned long Size = Cache.Head().PackageCount;
|
||||
Scores = new signed short[Size];
|
||||
Flags = new unsigned char[Size];
|
||||
memset(Flags,0,sizeof(*Flags)*Size);
|
||||
@ -395,6 +502,15 @@ pkgProblemResolver::pkgProblemResolver(pkgDepCache &Cache) : Cache(Cache)
|
||||
Debug = _config->FindB("Debug::pkgProblemResolver",false);
|
||||
}
|
||||
/*}}}*/
|
||||
// ProblemResolver::~pkgProblemResolver - Destructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
pkgProblemResolver::~pkgProblemResolver()
|
||||
{
|
||||
delete [] Scores;
|
||||
delete [] Flags;
|
||||
}
|
||||
/*}}}*/
|
||||
// ProblemResolver::ScoreSort - Sort the list by score /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
@ -414,7 +530,7 @@ int pkgProblemResolver::ScoreSort(const void *a,const void *b)
|
||||
/* */
|
||||
void pkgProblemResolver::MakeScores()
|
||||
{
|
||||
unsigned long Size = Cache.HeaderP->PackageCount;
|
||||
unsigned long Size = Cache.Head().PackageCount;
|
||||
memset(Scores,0,sizeof(*Scores)*Size);
|
||||
|
||||
// Generate the base scores for a package based on its properties
|
||||
@ -458,7 +574,7 @@ void pkgProblemResolver::MakeScores()
|
||||
}
|
||||
|
||||
// Copy the scores to advoid additive looping
|
||||
signed short *OldScores = new signed short[Size];
|
||||
SPtrArray<signed short> OldScores = new signed short[Size];
|
||||
memcpy(OldScores,Scores,sizeof(*Scores)*Size);
|
||||
|
||||
/* Now we cause 1 level of dependency inheritance, that is we add the
|
||||
@ -502,8 +618,6 @@ void pkgProblemResolver::MakeScores()
|
||||
if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
|
||||
Scores[I->ID] += 5000;
|
||||
}
|
||||
|
||||
delete [] OldScores;
|
||||
}
|
||||
/*}}}*/
|
||||
// ProblemResolver::DoUpgrade - Attempt to upgrade this package /*{{{*/
|
||||
@ -514,6 +628,8 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
|
||||
{
|
||||
if ((Flags[Pkg->ID] & Upgradable) == 0 || Cache[Pkg].Upgradable() == false)
|
||||
return false;
|
||||
if ((Flags[Pkg->ID] & Protected) == Protected)
|
||||
return false;
|
||||
|
||||
Flags[Pkg->ID] &= ~Upgradable;
|
||||
|
||||
@ -581,8 +697,8 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
|
||||
{
|
||||
/* We let the algorithm deal with conflicts on its next iteration,
|
||||
it is much smarter than us */
|
||||
if (Start->Type == pkgCache::Dep::Conflicts
|
||||
|| Start->Type == pkgCache::Dep::Obsoletes)
|
||||
if (Start->Type == pkgCache::Dep::Conflicts ||
|
||||
Start->Type == pkgCache::Dep::Obsoletes)
|
||||
break;
|
||||
|
||||
if (Debug == true)
|
||||
@ -630,7 +746,7 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
|
||||
upgrade packages to advoid problems. */
|
||||
bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
{
|
||||
unsigned long Size = Cache.HeaderP->PackageCount;
|
||||
unsigned long Size = Cache.Head().PackageCount;
|
||||
|
||||
// Record which packages are marked for install
|
||||
bool Again = false;
|
||||
@ -666,7 +782,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
operates from highest score to lowest. This prevents problems when
|
||||
high score packages cause the removal of lower score packages that
|
||||
would cause the removal of even lower score packages. */
|
||||
pkgCache::Package **PList = new pkgCache::Package *[Size];
|
||||
SPtrArray<pkgCache::Package *> PList = new pkgCache::Package *[Size];
|
||||
pkgCache::Package **PEnd = PList;
|
||||
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
|
||||
*PEnd++ = I;
|
||||
@ -728,13 +844,16 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
if (Cache[I].InstallVer == 0 || Cache[I].InstBroken() == false)
|
||||
continue;
|
||||
|
||||
if (Debug == true)
|
||||
cout << "Investigating " << I.Name() << endl;
|
||||
|
||||
// Isolate the problem dependency
|
||||
PackageKill KillList[100];
|
||||
PackageKill *LEnd = KillList;
|
||||
bool InOr = false;
|
||||
pkgCache::DepIterator Start;
|
||||
pkgCache::DepIterator End;
|
||||
PackageKill *OldEnd = 0;
|
||||
PackageKill *OldEnd = LEnd;
|
||||
|
||||
enum {OrRemove,OrKeep} OrOp = OrRemove;
|
||||
for (pkgCache::DepIterator D = Cache[I].InstVerIter(Cache).DependsList();
|
||||
@ -749,27 +868,46 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
if (OldEnd == LEnd && OrOp == OrRemove)
|
||||
{
|
||||
if ((Flags[I->ID] & Protected) != Protected)
|
||||
{
|
||||
if (Debug == true)
|
||||
clog << " Or group remove for " << I.Name() << endl;
|
||||
Cache.MarkDelete(I);
|
||||
Change = true;
|
||||
}
|
||||
}
|
||||
if (OldEnd == LEnd && OrOp == OrKeep)
|
||||
{
|
||||
if (Debug == true)
|
||||
clog << " Or group keep for " << I.Name() << endl;
|
||||
Cache.MarkKeep(I);
|
||||
Change = true;
|
||||
}
|
||||
}
|
||||
|
||||
/* We do an extra loop (as above) to finalize the or group
|
||||
processing */
|
||||
InOr = false;
|
||||
OrOp = OrRemove;
|
||||
D.GlobOr(Start,End);
|
||||
if (Start.end() == true)
|
||||
break;
|
||||
|
||||
// We only worry about critical deps.
|
||||
if (End.IsCritical() != true)
|
||||
continue;
|
||||
|
||||
InOr = Start != End;
|
||||
OldEnd = LEnd;
|
||||
}
|
||||
else
|
||||
Start++;
|
||||
|
||||
// We only worry about critical deps.
|
||||
if (End.IsCritical() != true)
|
||||
continue;
|
||||
|
||||
// Dep is ok
|
||||
if ((Cache[End] & pkgDepCache::DepGInstall) == pkgDepCache::DepGInstall)
|
||||
{
|
||||
InOr = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Debug == true)
|
||||
clog << "Package " << I.Name() << " has broken dep on " << Start.TargetPkg().Name() << endl;
|
||||
@ -777,8 +915,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
/* Look across the version list. If there are no possible
|
||||
targets then we keep the package and bail. This is necessary
|
||||
if a package has a dep on another package that cant be found */
|
||||
pkgCache::Version **VList = Start.AllTargets();
|
||||
|
||||
SPtrArray<pkgCache::Version *> VList = Start.AllTargets();
|
||||
if (*VList == 0 && (Flags[I->ID] & Protected) != Protected &&
|
||||
Start->Type != pkgCache::Dep::Conflicts &&
|
||||
Start->Type != pkgCache::Dep::Obsoletes &&
|
||||
@ -805,6 +942,9 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
if (Debug == true)
|
||||
clog << " Considering " << Pkg.Name() << ' ' << (int)Scores[Pkg->ID] <<
|
||||
" as a solution to " << I.Name() << ' ' << (int)Scores[I->ID] << endl;
|
||||
|
||||
/* Try to fix the package under consideration rather than
|
||||
fiddle with the VList package */
|
||||
if (Scores[I->ID] <= Scores[Pkg->ID] ||
|
||||
((Cache[Start] & pkgDepCache::DepNow) == 0 &&
|
||||
End->Type != pkgCache::Dep::Conflicts &&
|
||||
@ -813,9 +953,9 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
// Try a little harder to fix protected packages..
|
||||
if ((Flags[I->ID] & Protected) == Protected)
|
||||
{
|
||||
|
||||
if (DoUpgrade(Pkg) == true)
|
||||
{
|
||||
if (Scores[Pkg->ID] > Scores[I->ID])
|
||||
Scores[Pkg->ID] = Scores[I->ID];
|
||||
break;
|
||||
}
|
||||
@ -847,20 +987,17 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
// Consider other options
|
||||
if (InOr == false)
|
||||
{
|
||||
pkgCache::Version **VV = V;
|
||||
VV++;
|
||||
if (*VV!=0) {
|
||||
// Consider other options that might not be in a Or
|
||||
continue;
|
||||
}
|
||||
if (Debug == true)
|
||||
clog << " Removing " << I.Name() << " rather than change " << Start.TargetPkg().Name() << endl;
|
||||
Cache.MarkDelete(I);
|
||||
if (Counter > 1)
|
||||
{
|
||||
if (Scores[Pkg->ID] > Scores[I->ID])
|
||||
Scores[I->ID] = Scores[Pkg->ID];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Change = true;
|
||||
Done = true;
|
||||
@ -868,25 +1005,39 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
}
|
||||
else
|
||||
{
|
||||
/* This is a conflicts, and the version we are looking
|
||||
at is not the currently selected version of the
|
||||
package, which means it is not necessary to
|
||||
remove/keep */
|
||||
if (Cache[Pkg].InstallVer != Ver &&
|
||||
(Start->Type == pkgCache::Dep::Conflicts ||
|
||||
Start->Type == pkgCache::Dep::Obsoletes))
|
||||
continue;
|
||||
|
||||
// Skip adding to the kill list if it is protected
|
||||
if ((Flags[Pkg->ID] & Protected) != 0)
|
||||
continue;
|
||||
|
||||
if (*(V+1) != 0)//xxx look for other solutions?
|
||||
if (Debug == true)
|
||||
clog << " Added " << Pkg.Name() << " to the remove list" << endl;
|
||||
|
||||
// CNC:2002-07-09
|
||||
if (*(V+1) != 0) //XXX Look for other solutions?
|
||||
continue;
|
||||
|
||||
LEnd->Pkg = Pkg;
|
||||
LEnd->Dep = End;
|
||||
LEnd++;
|
||||
|
||||
if (Start->Type != pkgCache::Dep::Conflicts
|
||||
|| Start->Type != pkgCache::Dep::Obsoletes)
|
||||
if (Start->Type != pkgCache::Dep::Conflicts &&
|
||||
Start->Type != pkgCache::Dep::Obsoletes)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Hm, nothing can possibly satisify this dep. Nuke it.
|
||||
if (VList[0] == 0 && Start->Type != pkgCache::Dep::Conflicts &&
|
||||
if (VList[0] == 0 &&
|
||||
Start->Type != pkgCache::Dep::Conflicts &&
|
||||
Start->Type != pkgCache::Dep::Obsoletes &&
|
||||
(Flags[I->ID] & Protected) != Protected)
|
||||
{
|
||||
@ -917,8 +1068,6 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
Done = true;
|
||||
}
|
||||
|
||||
delete [] VList;
|
||||
|
||||
// Try some more
|
||||
if (InOr == true)
|
||||
continue;
|
||||
@ -935,40 +1084,34 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
Change = true;
|
||||
if ((Cache[J->Dep] & pkgDepCache::DepGNow) == 0)
|
||||
{
|
||||
if (J->Dep->Type == pkgCache::Dep::Conflicts
|
||||
|| J->Dep->Type == pkgCache::Dep::Obsoletes)
|
||||
if (J->Dep->Type == pkgCache::Dep::Conflicts ||
|
||||
J->Dep->Type == pkgCache::Dep::Obsoletes)
|
||||
{
|
||||
if (Debug == true)
|
||||
clog << " Fixing " << I.Name() << " via remove of " << J->Pkg.Name() << endl;
|
||||
Cache.MarkDelete(J->Pkg, J->Dep->Type == pkgCache::Dep::Obsoletes);
|
||||
Cache.MarkDelete(J->Pkg);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (Cache[J->Pkg].Install()) {
|
||||
if (Debug == true)
|
||||
clog << " Fixing " << I.Name() << " via keep of " << J->Pkg.Name() << endl;
|
||||
Cache.MarkKeep(J->Pkg);
|
||||
} else {
|
||||
if (Debug == true)
|
||||
clog << " Fixing " << I.Name() << " via install of " << J->Pkg.Name() << endl;
|
||||
Cache.MarkInstall(J->Pkg, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (Counter > 1)
|
||||
{
|
||||
if (Scores[I->ID] > Scores[J->Pkg->ID])
|
||||
Scores[J->Pkg->ID] = Scores[I->ID];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Debug == true)
|
||||
clog << "Done" << endl;
|
||||
|
||||
delete [] Scores;
|
||||
delete [] PList;
|
||||
|
||||
if (Cache.BrokenCount() != 0)
|
||||
{
|
||||
// See if this is the result of a hold
|
||||
@ -993,7 +1136,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
|
||||
system was non-broken previously. */
|
||||
bool pkgProblemResolver::ResolveByKeep()
|
||||
{
|
||||
unsigned long Size = Cache.HeaderP->PackageCount;
|
||||
unsigned long Size = Cache.Head().PackageCount;
|
||||
|
||||
if (Debug == true)
|
||||
clog << "Entering ResolveByKeep" << endl;
|
||||
@ -1029,7 +1172,7 @@ bool pkgProblemResolver::ResolveByKeep()
|
||||
Cache.MarkKeep(I);
|
||||
if (Cache[I].InstBroken() == false)
|
||||
{
|
||||
K = PList;
|
||||
K = PList - 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -1037,17 +1180,9 @@ bool pkgProblemResolver::ResolveByKeep()
|
||||
// Isolate the problem dependencies
|
||||
for (pkgCache::DepIterator D = Cache[I].InstVerIter(Cache).DependsList(); D.end() == false;)
|
||||
{
|
||||
// Compute a single dependency element (glob or)
|
||||
pkgCache::DepIterator Start = D;
|
||||
pkgCache::DepIterator End = D;
|
||||
unsigned char State = 0;
|
||||
for (bool LastOR = true; D.end() == false && LastOR == true; D++)
|
||||
{
|
||||
State |= Cache[D];
|
||||
LastOR = (D->CompareOp & pkgCache::Dep::Or) == pkgCache::Dep::Or;
|
||||
if (LastOR == true)
|
||||
End = D;
|
||||
}
|
||||
DepIterator Start;
|
||||
DepIterator End;
|
||||
D.GlobOr(Start,End);
|
||||
|
||||
// We only worry about critical deps.
|
||||
if (End.IsCritical() != true)
|
||||
@ -1057,20 +1192,17 @@ bool pkgProblemResolver::ResolveByKeep()
|
||||
if ((Cache[End] & pkgDepCache::DepGInstall) == pkgDepCache::DepGInstall)
|
||||
continue;
|
||||
|
||||
// Hm, the group is broken.. I have no idea how to handle this
|
||||
if (Start != End)
|
||||
/* Hm, the group is broken.. I suppose the best thing to do is to
|
||||
is to try every combination of keep/not-keep for the set, but thats
|
||||
slow, and this never happens, just be conservative and assume the
|
||||
list of ors is in preference and keep till it starts to work. */
|
||||
while (true)
|
||||
{
|
||||
clog << _("Note, a broken package or group was found in ") << I.Name() << "." << endl;
|
||||
if ((Flags[I->ID] & Protected) == 0)
|
||||
Cache.MarkKeep(I);
|
||||
break;
|
||||
}
|
||||
|
||||
if (Debug == true)
|
||||
clog << "Package " << I.Name() << " has broken dep on " << End.TargetPkg().Name() << endl;
|
||||
clog << "Package " << I.Name() << " has broken dep on " << Start.TargetPkg().Name() << endl;
|
||||
|
||||
// Look at all the possible provides on this package
|
||||
pkgCache::Version **VList = End.AllTargets();
|
||||
SPtrArray<pkgCache::Version *> VList = Start.AllTargets();
|
||||
for (pkgCache::Version **V = VList; *V != 0; V++)
|
||||
{
|
||||
pkgCache::VerIterator Ver(Cache,*V);
|
||||
@ -1081,7 +1213,8 @@ bool pkgProblemResolver::ResolveByKeep()
|
||||
Pkg->CurrentVer == 0)
|
||||
continue;
|
||||
|
||||
if ((Flags[I->ID] & Protected) == 0)
|
||||
// CNC:2002-08-05
|
||||
if ((Flags[Pkg->ID] & Protected) == 0)
|
||||
{
|
||||
if (Debug == true)
|
||||
clog << " Keeping Package " << Pkg.Name() << " due to dep" << endl;
|
||||
@ -1092,6 +1225,14 @@ bool pkgProblemResolver::ResolveByKeep()
|
||||
break;
|
||||
}
|
||||
|
||||
if (Cache[I].InstBroken() == false)
|
||||
break;
|
||||
|
||||
if (Start == End)
|
||||
break;
|
||||
Start++;
|
||||
}
|
||||
|
||||
if (Cache[I].InstBroken() == false)
|
||||
break;
|
||||
}
|
||||
@ -1103,11 +1244,12 @@ bool pkgProblemResolver::ResolveByKeep()
|
||||
if (K == LastStop)
|
||||
return _error->Error("Internal Error, pkgProblemResolver::ResolveByKeep is looping on package %s.",I.Name());
|
||||
LastStop = K;
|
||||
K = PList;
|
||||
K = PList - 1;
|
||||
}
|
||||
|
||||
return true;
|
||||
} /*}}}*/
|
||||
}
|
||||
/*}}}*/
|
||||
// ProblemResolver::InstallProtect - Install all protected packages /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is used to make sure protected packages are installed */
|
||||
@ -1125,53 +1267,143 @@ void pkgProblemResolver::InstallProtect()
|
||||
}
|
||||
}
|
||||
/*}}}*/
|
||||
#if 0
|
||||
|
||||
class pkgMyUpgrader {
|
||||
pkgCache::Package *packL;
|
||||
|
||||
void topoSort();
|
||||
|
||||
public:
|
||||
pkgMyUpgrader(pkgDepCache &Cache);
|
||||
~pkgMyUpgrader();
|
||||
};
|
||||
|
||||
|
||||
pkgMyUpgrader::pkgMyUpgrader(pkgDepCache &Cache)
|
||||
// CNC:2002-08-01
|
||||
// ProblemSolver::RemoveDepends - Remove dependencies selectively /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
// This will remove every dependency which is required only by packages
|
||||
// already being removed. This will allow one to reverse the effect a
|
||||
// task package, for example.
|
||||
bool pkgProblemResolver::RemoveDepends()
|
||||
{
|
||||
unsigned long size = Cache.HeaderP->PackageCount;
|
||||
int i;
|
||||
bool Debug = _config->FindB("Debug::pkgRemoveDepends",false);
|
||||
bool MoreSteps = true;
|
||||
while (MoreSteps == true)
|
||||
{
|
||||
MoreSteps = false;
|
||||
for (pkgCache::PkgIterator Pkg = Cache.PkgBegin();
|
||||
Pkg.end() == false; Pkg++)
|
||||
{
|
||||
if (Cache[Pkg].Delete() == false)
|
||||
continue;
|
||||
for (pkgCache::DepIterator D = Pkg.CurrentVer().DependsList();
|
||||
D.end() == false; D++)
|
||||
{
|
||||
if (D->Type != pkgCache::Dep::Depends &&
|
||||
D->Type != pkgCache::Dep::PreDepends)
|
||||
continue;
|
||||
|
||||
packL = new PackInfo[size];
|
||||
pkgCache::PkgIterator DPkg = D.TargetPkg();
|
||||
if (DPkg->CurrentVer == 0 || Cache[DPkg].Delete() == true)
|
||||
continue;
|
||||
if ((Flags[DPkg->ID] & Protected) == Protected)
|
||||
continue;
|
||||
|
||||
// topologically sort the packages, so that if A depends on B,
|
||||
// then index of B is < index of A
|
||||
topoSort();
|
||||
bool Remove = true;
|
||||
|
||||
// Check if another package not being removed or being
|
||||
// installed requires this dependency.
|
||||
for (pkgCache::DepIterator R = DPkg.RevDependsList();
|
||||
R.end() == false; R++)
|
||||
{
|
||||
pkgCache::PkgIterator RPkg = R.ParentPkg();
|
||||
|
||||
if (R->Type != pkgCache::Dep::Depends &&
|
||||
R->Type != pkgCache::Dep::PreDepends)
|
||||
continue;
|
||||
|
||||
if ((Cache[RPkg].Install() &&
|
||||
(pkgCache::Version*)R.ParentVer() == Cache[RPkg].InstallVer &&
|
||||
Cache.VS().CheckDep(DPkg.CurrentVer().VerStr(), R) == true) ||
|
||||
(RPkg->CurrentVer != 0 &&
|
||||
Cache[RPkg].Install() == false &&
|
||||
Cache[RPkg].Delete() == false &&
|
||||
Cache.VS().CheckDep(DPkg.CurrentVer().VerStr(), R) == true))
|
||||
{
|
||||
Remove = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (Remove == false)
|
||||
continue;
|
||||
|
||||
|
||||
pkgMyUpgrader::~pkgMyUpgrader()
|
||||
// Also check every virtual package provided by this
|
||||
// dependency is required by packages not being removed,
|
||||
// or being installed.
|
||||
for (pkgCache::PrvIterator P = DPkg.CurrentVer().ProvidesList();
|
||||
P.end() == false; P++)
|
||||
{
|
||||
free(packL);
|
||||
pkgCache::PkgIterator PPkg = P.ParentPkg();
|
||||
for (pkgCache::DepIterator R = PPkg.RevDependsList();
|
||||
R.end() == false; R++)
|
||||
{
|
||||
pkgCache::PkgIterator RPkg = R.ParentPkg();
|
||||
|
||||
if (R->Type != pkgCache::Dep::Depends &&
|
||||
R->Type != pkgCache::Dep::PreDepends)
|
||||
continue;
|
||||
|
||||
if ((Cache[RPkg].Install() &&
|
||||
(pkgCache::Version*)R.ParentVer() == Cache[RPkg].InstallVer &&
|
||||
Cache.VS().CheckDep(P.ProvideVersion(), R) == true) ||
|
||||
(RPkg->CurrentVer != 0 &&
|
||||
Cache[RPkg].Install() == false &&
|
||||
Cache[RPkg].Delete() == false &&
|
||||
Cache.VS().CheckDep(P.ProvideVersion(), R) == true))
|
||||
{
|
||||
Remove = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Remove == false)
|
||||
continue;
|
||||
|
||||
if (Debug == true)
|
||||
clog << "Marking " << DPkg.Name() << " as a removable dependency of " << Pkg.Name() << endl;
|
||||
|
||||
void pkgMyUpgrader::topoSort()
|
||||
{
|
||||
char *colours = new char[Size];
|
||||
Cache.MarkDelete(DPkg);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
delete [] colours;
|
||||
// Do at least one more step, to ensure that packages which
|
||||
// were being hold because of this one also get removed.
|
||||
MoreSteps = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
// PrioSortList - Sort a list of versions by priority /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* This is ment to be used in conjunction with AllTargets to get a list
|
||||
of versions ordered by preference. */
|
||||
static pkgCache *PrioCache;
|
||||
static int PrioComp(const void *A,const void *B)
|
||||
{
|
||||
pkgCache::VerIterator L(*PrioCache,*(pkgCache::Version **)A);
|
||||
pkgCache::VerIterator R(*PrioCache,*(pkgCache::Version **)B);
|
||||
|
||||
// CNC:2002-11-27
|
||||
if ((R.ParentPkg()->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential &&
|
||||
(L.ParentPkg()->Flags & pkgCache::Flag::Essential) != pkgCache::Flag::Essential)
|
||||
return 1;
|
||||
if ((R.ParentPkg()->Flags & pkgCache::Flag::Essential) != pkgCache::Flag::Essential &&
|
||||
(L.ParentPkg()->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
|
||||
return -1;
|
||||
|
||||
|
||||
|
||||
#endif
|
||||
if (L->Priority != R->Priority)
|
||||
return R->Priority - L->Priority;
|
||||
return strcmp(L.ParentPkg().Name(),R.ParentPkg().Name());
|
||||
}
|
||||
void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List)
|
||||
{
|
||||
unsigned long Count = 0;
|
||||
PrioCache = &Cache;
|
||||
for (pkgCache::Version **I = List; *I != 0; I++)
|
||||
Count++;
|
||||
qsort(List,Count,sizeof(*List),PrioComp);
|
||||
}
|
||||
/*}}}*/
|
||||
// vim:sts=3:sw=3
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: algorithms.h,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: algorithms.h,v 1.4 2003/01/29 13:04:48 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Algorithms - A set of misc algorithms
|
||||
@ -27,7 +27,6 @@
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Header section: pkglib
|
||||
#ifndef PKGLIB_ALGORITHMS_H
|
||||
#define PKGLIB_ALGORITHMS_H
|
||||
|
||||
@ -38,12 +37,31 @@
|
||||
#include <apt-pkg/packagemanager.h>
|
||||
#include <apt-pkg/depcache.h>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
using std::ostream;
|
||||
|
||||
#ifndef SWIG
|
||||
class pkgSimulate : public pkgPackageManager
|
||||
{
|
||||
protected:
|
||||
|
||||
class Policy : public pkgDepCache::Policy
|
||||
{
|
||||
pkgDepCache *Cache;
|
||||
public:
|
||||
|
||||
virtual VerIterator GetCandidateVer(PkgIterator Pkg)
|
||||
{
|
||||
return (*Cache)[Pkg].CandidateVerIter(*Cache);
|
||||
}
|
||||
|
||||
Policy(pkgDepCache *Cache) : Cache(Cache) {};
|
||||
};
|
||||
|
||||
unsigned char *Flags;
|
||||
|
||||
Policy iPolicy;
|
||||
pkgDepCache Sim;
|
||||
|
||||
// The Actuall installation implementation
|
||||
@ -51,11 +69,13 @@ class pkgSimulate : public pkgPackageManager
|
||||
virtual bool Configure(PkgIterator Pkg);
|
||||
virtual bool Remove(PkgIterator Pkg,bool Purge);
|
||||
void ShortBreaks();
|
||||
void Describe(PkgIterator iPkg,ostream &out,bool Now);
|
||||
|
||||
public:
|
||||
|
||||
pkgSimulate(pkgDepCache &Cache);
|
||||
pkgSimulate(pkgDepCache *Cache);
|
||||
};
|
||||
#endif
|
||||
|
||||
class pkgProblemResolver
|
||||
{
|
||||
@ -101,7 +121,10 @@ class pkgProblemResolver
|
||||
|
||||
void InstallProtect();
|
||||
|
||||
pkgProblemResolver(pkgDepCache &Cache);
|
||||
bool RemoveDepends(); // CNC:2002-08-01
|
||||
|
||||
pkgProblemResolver(pkgDepCache *Cache);
|
||||
~pkgProblemResolver();
|
||||
};
|
||||
|
||||
bool pkgDistUpgrade(pkgDepCache &Cache);
|
||||
@ -110,4 +133,6 @@ bool pkgFixBroken(pkgDepCache &Cache);
|
||||
bool pkgAllUpgrade(pkgDepCache &Cache);
|
||||
bool pkgMinimizeUpgrade(pkgDepCache &Cache);
|
||||
|
||||
void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List);
|
||||
|
||||
#endif
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: cachefile.cc,v 1.8 2001/07/12 21:47:32 kojima Exp $
|
||||
// $Id: cachefile.cc,v 1.2 2002/07/25 18:07:18 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
CacheFile - Simple wrapper class for opening, generating and whatnot
|
||||
@ -16,26 +16,21 @@
|
||||
#pragma implementation "apt-pkg/cachefile.h"
|
||||
#endif
|
||||
|
||||
|
||||
#include <apt-pkg/cachefile.h>
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/sourcelist.h>
|
||||
#include <apt-pkg/pkgcachegen.h>
|
||||
#include <apt-pkg/configuration.h>
|
||||
#include <apt-pkg/systemfactory.h>
|
||||
|
||||
#include <i18n.h>
|
||||
#include <apt-pkg/policy.h>
|
||||
#include <apt-pkg/pkgsystem.h>
|
||||
|
||||
#include <apti18n.h>
|
||||
/*}}}*/
|
||||
|
||||
// CacheFile::CacheFile - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
pkgCacheFile::pkgCacheFile() : Map(0), Cache(0),
|
||||
#if 0//akk
|
||||
Lock(0),
|
||||
#endif
|
||||
RPM(0)
|
||||
pkgCacheFile::pkgCacheFile() : Map(0), Cache(0), DCache(0), Policy(0)
|
||||
{
|
||||
}
|
||||
/*}}}*/
|
||||
@ -44,36 +39,28 @@ RPM(0)
|
||||
/* */
|
||||
pkgCacheFile::~pkgCacheFile()
|
||||
{
|
||||
delete DCache;
|
||||
delete Policy;
|
||||
delete Cache;
|
||||
delete Map;
|
||||
#if 0//akk
|
||||
if (Lock)
|
||||
delete Lock;
|
||||
#endif
|
||||
if (RPM)
|
||||
delete RPM;
|
||||
_system->UnLock(true);
|
||||
}
|
||||
/*}}}*/
|
||||
// CacheFile::Open - Open the cache files, creating if necessary /*{{{*/
|
||||
// CacheFile::BuildCaches - Open and build the cache files /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
bool pkgCacheFile::Open(OpProgress &Progress,bool WithLock)
|
||||
bool pkgCacheFile::BuildCaches(OpProgress &Progress,bool WithLock)
|
||||
{
|
||||
if (WithLock == true)
|
||||
{
|
||||
#if 0 //akk
|
||||
if (0)//akk
|
||||
{
|
||||
Lock = new pkgDpkgLock;
|
||||
} else
|
||||
{
|
||||
if (_system->Lock() == false)
|
||||
return false;
|
||||
|
||||
}
|
||||
#endif
|
||||
}
|
||||
if (1) {
|
||||
RPM = new pkgRpmLock(WithLock);
|
||||
}
|
||||
// CNC:2002-07-06
|
||||
if (WithLock == false)
|
||||
_system->LockRead();
|
||||
|
||||
if (_config->FindB("Debug::NoLocking",false) == true)
|
||||
WithLock = false;
|
||||
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
@ -83,37 +70,43 @@ bool pkgCacheFile::Open(OpProgress &Progress,bool WithLock)
|
||||
if (List.ReadMainList() == false)
|
||||
return _error->Error(_("The list of sources could not be read."));
|
||||
|
||||
/* Build all of the caches, using the cache files if we are locking
|
||||
(ie as root) */
|
||||
if (WithLock == true)
|
||||
{
|
||||
_system->makeStatusCache(List, Progress);
|
||||
|
||||
// Read the caches
|
||||
bool Res = pkgMakeStatusCache(List,Progress,&Map,!WithLock);
|
||||
Progress.Done();
|
||||
if (_error->PendingError() == true)
|
||||
if (Res == false)
|
||||
return _error->Error(_("The package lists or status file could not be parsed or opened."));
|
||||
|
||||
/* This sux, remove it someday */
|
||||
if (_error->empty() == false)
|
||||
_error->Warning(_("You may want to run apt-get update to correct these missing files"));
|
||||
_error->Warning(_("You may want to run apt-get update to correct these problems"));
|
||||
|
||||
// Open the cache file
|
||||
FileFd File(_config->FindFile("Dir::Cache::pkgcache"),FileFd::ReadOnly);
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
|
||||
Map = new MMap(File,MMap::Public | MMap::ReadOnly);
|
||||
Cache = new pkgCache(Map);
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
/*}}}*/
|
||||
// CacheFile::Open - Open the cache files, creating if necessary /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
bool pkgCacheFile::Open(OpProgress &Progress,bool WithLock)
|
||||
{
|
||||
Map = _system->makeStatusCacheMem(List,Progress);
|
||||
Progress.Done();
|
||||
if (Map == 0)
|
||||
if (BuildCaches(Progress,WithLock) == false)
|
||||
return false;
|
||||
|
||||
// The policy engine
|
||||
Policy = new pkgPolicy(Cache);
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
if (ReadPinFile(*Policy) == false)
|
||||
return false;
|
||||
}
|
||||
|
||||
// Create the dependency cache
|
||||
Cache = new pkgDepCache(*Map,Progress);
|
||||
DCache = new pkgDepCache(Cache,Policy);
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
|
||||
DCache->Init(&Progress);
|
||||
Progress.Done();
|
||||
if (_error->PendingError() == true)
|
||||
return false;
|
||||
@ -121,3 +114,21 @@ bool pkgCacheFile::Open(OpProgress &Progress,bool WithLock)
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
|
||||
// CacheFile::Close - close the cache files /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
void pkgCacheFile::Close()
|
||||
{
|
||||
delete DCache;
|
||||
delete Policy;
|
||||
delete Cache;
|
||||
delete Map;
|
||||
_system->UnLock(true);
|
||||
|
||||
Map = 0;
|
||||
DCache = 0;
|
||||
Policy = 0;
|
||||
Cache = 0;
|
||||
}
|
||||
/*}}}*/
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: cachefile.h,v 1.4 2001/07/12 21:47:32 kojima Exp $
|
||||
// $Id: cachefile.h,v 1.2 2002/07/25 18:07:18 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
CacheFile - Simple wrapper class for opening, generating and whatnot
|
||||
@ -9,6 +9,9 @@
|
||||
of caches. It can operate as root, as not root, show progress and so on,
|
||||
it transparently handles everything necessary.
|
||||
|
||||
This means it can rebuild caches from the source list and instantiates
|
||||
and prepares the standard policy mechanism.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
#ifndef PKGLIB_CACHEFILE_H
|
||||
@ -18,43 +21,34 @@
|
||||
#pragma interface "apt-pkg/cachefile.h"
|
||||
#endif
|
||||
|
||||
|
||||
#include <apt-pkg/depcache.h>
|
||||
#include <apt-pkg/dpkginit.h>
|
||||
#include <apt-pkg/rpminit.h>
|
||||
|
||||
class pkgPolicy;
|
||||
class pkgCacheFile
|
||||
{
|
||||
protected:
|
||||
|
||||
MMap *Map;
|
||||
pkgDepCache *Cache;
|
||||
#if 0//akk
|
||||
pkgDpkgLock *Lock;
|
||||
#endif
|
||||
pkgRpmLock *RPM;
|
||||
pkgCache *Cache;
|
||||
pkgDepCache *DCache;
|
||||
|
||||
public:
|
||||
|
||||
pkgPolicy *Policy;
|
||||
|
||||
// We look pretty much exactly like a pointer to a dep cache
|
||||
inline operator pkgDepCache &() {return *Cache;};
|
||||
inline operator pkgDepCache *() {return Cache;};
|
||||
inline pkgDepCache *operator ->() {return Cache;};
|
||||
inline pkgDepCache &operator *() {return *Cache;};
|
||||
inline pkgDepCache::StateCache &operator [](pkgCache::PkgIterator const &I) {return (*Cache)[I];};
|
||||
inline unsigned char &operator [](pkgCache::DepIterator const &I) {return (*Cache)[I];};
|
||||
|
||||
// Release the dpkg status lock
|
||||
inline void ReleaseLock() {
|
||||
#if 0//akk
|
||||
if (0)
|
||||
Lock->Close();
|
||||
else
|
||||
#endif
|
||||
RPM->Close();
|
||||
};//akk
|
||||
inline operator pkgCache &() {return *Cache;};
|
||||
inline operator pkgCache *() {return Cache;};
|
||||
inline operator pkgDepCache &() {return *DCache;};
|
||||
inline operator pkgDepCache *() {return DCache;};
|
||||
inline pkgDepCache *operator ->() {return DCache;};
|
||||
inline pkgDepCache &operator *() {return *DCache;};
|
||||
inline pkgDepCache::StateCache &operator [](pkgCache::PkgIterator const &I) {return (*DCache)[I];};
|
||||
inline unsigned char &operator [](pkgCache::DepIterator const &I) {return (*DCache)[I];};
|
||||
|
||||
bool BuildCaches(OpProgress &Progress,bool WithLock = true);
|
||||
bool Open(OpProgress &Progress,bool WithLock = true);
|
||||
void Close();
|
||||
|
||||
pkgCacheFile();
|
||||
~pkgCacheFile();
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: cacheiterators.h,v 1.2 2000/09/20 15:20:06 kojima Exp $
|
||||
// $Id: cacheiterators.h,v 1.1 2002/07/23 17:54:50 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Cache Iterators - Iterators for navigating the cache structure
|
||||
@ -28,7 +28,6 @@
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Header section: pkglib
|
||||
#ifndef PKGLIB_CACHEITERATORS_H
|
||||
#define PKGLIB_CACHEITERATORS_H
|
||||
|
||||
@ -39,10 +38,20 @@
|
||||
// Package Iterator
|
||||
class pkgCache::PkgIterator
|
||||
{
|
||||
friend class pkgCache;
|
||||
Package *Pkg;
|
||||
pkgCache *Owner;
|
||||
long HashIndex;
|
||||
|
||||
protected:
|
||||
|
||||
// This constructor is the 'begin' constructor, never use it.
|
||||
inline PkgIterator(pkgCache &Owner) : Owner(&Owner), HashIndex(-1)
|
||||
{
|
||||
Pkg = Owner.PkgP;
|
||||
operator ++(0);
|
||||
};
|
||||
|
||||
public:
|
||||
|
||||
enum OkState {NeedsNothing,NeedsUnpack,NeedsConfigure};
|
||||
@ -62,10 +71,10 @@ class pkgCache::PkgIterator
|
||||
inline Package const &operator *() const {return *Pkg;};
|
||||
inline operator Package *() {return Pkg == Owner->PkgP?0:Pkg;};
|
||||
inline operator Package const *() const {return Pkg == Owner->PkgP?0:Pkg;};
|
||||
inline pkgCache *Cache() {return Owner;};
|
||||
|
||||
inline const char *Name() const {return Pkg->Name == 0?0:Owner->StrP + Pkg->Name;};
|
||||
inline const char *Section() const {return Pkg->Section == 0?0:Owner->StrP + Pkg->Section;};
|
||||
inline const char *TargetDist() const {return Pkg->TargetDist == 0?0:Owner->StrP + Pkg->TargetDist;};
|
||||
inline bool Purge() const {return Pkg->CurrentState == pkgCache::State::Purge ||
|
||||
(Pkg->CurrentVer == 0 && Pkg->CurrentState == pkgCache::State::NotInstalled);};
|
||||
inline VerIterator VersionList() const;
|
||||
@ -77,11 +86,6 @@ class pkgCache::PkgIterator
|
||||
OkState State() const;
|
||||
|
||||
// Constructors
|
||||
inline PkgIterator(pkgCache &Owner) : Owner(&Owner), HashIndex(-1)
|
||||
{
|
||||
Pkg = Owner.PkgP;
|
||||
operator ++(0);
|
||||
};
|
||||
inline PkgIterator(pkgCache &Owner,Package *Trg) : Pkg(Trg), Owner(&Owner),
|
||||
HashIndex(0)
|
||||
{
|
||||
@ -119,6 +123,7 @@ class pkgCache::VerIterator
|
||||
inline Version const &operator *() const {return *Ver;};
|
||||
inline operator Version *() {return Ver == Owner->VerP?0:Ver;};
|
||||
inline operator Version const *() const {return Ver == Owner->VerP?0:Ver;};
|
||||
inline pkgCache *Cache() {return Owner;};
|
||||
|
||||
inline const char *VerStr() const {return Ver->VerStr == 0?0:Owner->StrP + Ver->VerStr;};
|
||||
inline const char *Section() const {return Ver->Section == 0?0:Owner->StrP + Ver->Section;};
|
||||
@ -129,7 +134,8 @@ class pkgCache::VerIterator
|
||||
inline VerFileIterator FileList() const;
|
||||
inline unsigned long Index() const {return Ver - Owner->VerP;};
|
||||
bool Downloadable() const;
|
||||
const char *PriorityType();
|
||||
inline const char *PriorityType() {return Owner->Priority(Ver->Priority);};
|
||||
string RelStr();
|
||||
|
||||
bool Automatic() const;
|
||||
VerFileIterator NewestFile() const;
|
||||
@ -171,10 +177,11 @@ class pkgCache::DepIterator
|
||||
inline Dependency const &operator *() const {return *Dep;};
|
||||
inline operator Dependency *() {return Dep == Owner->DepP?0:Dep;};
|
||||
inline operator Dependency const *() const {return Dep == Owner->DepP?0:Dep;};
|
||||
inline pkgCache *Cache() {return Owner;};
|
||||
|
||||
inline const char *TargetVer() const {return Dep->Version == 0?0:Owner->StrP + Dep->Version;};
|
||||
inline PkgIterator TargetPkg() {return PkgIterator(*Owner,Owner->PkgP + Dep->Package);};
|
||||
inline PkgIterator SmartTargetPkg() {PkgIterator R(*Owner);SmartTargetPkg(R);return R;};
|
||||
inline PkgIterator SmartTargetPkg() {PkgIterator R(*Owner,0);SmartTargetPkg(R);return R;};
|
||||
inline VerIterator ParentVer() {return VerIterator(*Owner,Owner->VerP + Dep->ParentVer);};
|
||||
inline PkgIterator ParentPkg() {return PkgIterator(*Owner,Owner->PkgP + Owner->VerP[Dep->ParentVer].ParentPkg);};
|
||||
inline bool Reverse() {return Type == DepRev;};
|
||||
@ -183,8 +190,8 @@ class pkgCache::DepIterator
|
||||
void GlobOr(DepIterator &Start,DepIterator &End);
|
||||
Version **AllTargets();
|
||||
bool SmartTargetPkg(PkgIterator &Result);
|
||||
const char *CompType();
|
||||
const char *DepType();
|
||||
inline const char *CompType() {return Owner->CompType(Dep->CompareOp);};
|
||||
inline const char *DepType() {return Owner->DepType(Dep->Type);};
|
||||
|
||||
inline DepIterator(pkgCache &Owner,Dependency *Trg,Version * = 0) :
|
||||
Dep(Trg), Type(DepVer), Owner(&Owner)
|
||||
@ -229,6 +236,7 @@ class pkgCache::PrvIterator
|
||||
inline Provides const &operator *() const {return *Prv;};
|
||||
inline operator Provides *() {return Prv == Owner->ProvideP?0:Prv;};
|
||||
inline operator Provides const *() const {return Prv == Owner->ProvideP?0:Prv;};
|
||||
inline pkgCache *Cache() {return Owner;};
|
||||
|
||||
inline const char *Name() const {return Owner->StrP + Owner->PkgP[Prv->ParentPkg].Name;};
|
||||
inline const char *ProvideVersion() const {return Prv->ProvideVersion == 0?0:Owner->StrP + Prv->ProvideVersion;};
|
||||
@ -274,6 +282,7 @@ class pkgCache::PkgFileIterator
|
||||
inline PackageFile const &operator *() const {return *File;};
|
||||
inline operator PackageFile *() {return File == Owner->PkgFileP?0:File;};
|
||||
inline operator PackageFile const *() const {return File == Owner->PkgFileP?0:File;};
|
||||
inline pkgCache *Cache() {return Owner;};
|
||||
|
||||
inline const char *FileName() const {return File->FileName == 0?0:Owner->StrP + File->FileName;};
|
||||
inline const char *Archive() const {return File->Archive == 0?0:Owner->StrP + File->Archive;};
|
||||
@ -281,14 +290,18 @@ class pkgCache::PkgFileIterator
|
||||
inline const char *Version() const {return File->Version == 0?0:Owner->StrP + File->Version;};
|
||||
inline const char *Origin() const {return File->Origin == 0?0:Owner->StrP + File->Origin;};
|
||||
inline const char *Label() const {return File->Origin == 0?0:Owner->StrP + File->Label;};
|
||||
inline const char *Architecture() const {return File->Origin == 0?0:Owner->StrP + File->Architecture;};
|
||||
inline const char *Site() const {return File->Site == 0?0:Owner->StrP + File->Site;};
|
||||
inline const char *Architecture() const {return File->Architecture == 0?0:Owner->StrP + File->Architecture;};
|
||||
inline const char *IndexType() const {return File->IndexType == 0?0:Owner->StrP + File->IndexType;};
|
||||
|
||||
inline unsigned long Index() const {return File - Owner->PkgFileP;};
|
||||
|
||||
bool IsOk();
|
||||
string RelStr();
|
||||
|
||||
// Constructors
|
||||
inline PkgFileIterator(pkgCache &Owner) : Owner(&Owner), File(Owner.PkgFileP + Owner.Head().FileList) {};
|
||||
inline PkgFileIterator() : Owner(0), File(0) {};
|
||||
inline PkgFileIterator(pkgCache &Owner) : Owner(&Owner), File(Owner.PkgFileP) {};
|
||||
inline PkgFileIterator(pkgCache &Owner,PackageFile *Trg) : Owner(&Owner), File(Trg) {};
|
||||
};
|
||||
|
||||
@ -315,35 +328,28 @@ class pkgCache::VerFileIterator
|
||||
inline VerFile const &operator *() const {return *FileP;};
|
||||
inline operator VerFile *() {return FileP == Owner->VerFileP?0:FileP;};
|
||||
inline operator VerFile const *() const {return FileP == Owner->VerFileP?0:FileP;};
|
||||
inline pkgCache *Cache() {return Owner;};
|
||||
|
||||
inline PkgFileIterator File() const {return PkgFileIterator(*Owner,FileP->File + Owner->PkgFileP);};
|
||||
inline unsigned long Index() const {return FileP - Owner->VerFileP;};
|
||||
|
||||
inline VerFileIterator() : Owner(0), FileP(0) {};
|
||||
inline VerFileIterator(pkgCache &Owner,VerFile *Trg) : Owner(&Owner), FileP(Trg) {};
|
||||
};
|
||||
|
||||
// Inlined Begin functions cant be in the class because of order problems
|
||||
inline pkgCache::VerIterator pkgCache::PkgIterator::VersionList() const
|
||||
{return VerIterator(*Owner,Owner->VerP + Pkg->VersionList);};
|
||||
|
||||
inline pkgCache::VerIterator pkgCache::PkgIterator::CurrentVer() const
|
||||
{return VerIterator(*Owner,Owner->VerP + Pkg->CurrentVer);};
|
||||
|
||||
inline pkgCache::VerIterator pkgCache::PkgIterator::TargetVer() const
|
||||
{return VerIterator(*Owner,Owner->VerP + Pkg->TargetVer);};
|
||||
|
||||
inline pkgCache::DepIterator pkgCache::PkgIterator::RevDependsList() const
|
||||
{return DepIterator(*Owner,Owner->DepP + Pkg->RevDepends,Pkg);};
|
||||
|
||||
inline pkgCache::PrvIterator pkgCache::PkgIterator::ProvidesList() const
|
||||
{return PrvIterator(*Owner,Owner->ProvideP + Pkg->ProvidesList,Pkg);};
|
||||
|
||||
inline pkgCache::PrvIterator pkgCache::VerIterator::ProvidesList() const
|
||||
{return PrvIterator(*Owner,Owner->ProvideP + Ver->ProvidesList,Ver);};
|
||||
|
||||
inline pkgCache::DepIterator pkgCache::VerIterator::DependsList() const
|
||||
{return DepIterator(*Owner,Owner->DepP + Ver->DependsList,Ver);};
|
||||
|
||||
inline pkgCache::VerFileIterator pkgCache::VerIterator::FileList() const
|
||||
{return VerFileIterator(*Owner,Owner->VerFileP + Ver->FileList);};
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: clean.cc,v 1.2 2001/01/11 02:03:26 kojima Exp $
|
||||
// $Id: clean.cc,v 1.1 2002/07/23 17:54:50 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Clean - Clean out downloaded directories
|
||||
@ -17,12 +17,11 @@
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/configuration.h>
|
||||
|
||||
#include <apti18n.h>
|
||||
|
||||
#include <dirent.h>
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <i18n.h>
|
||||
|
||||
/*}}}*/
|
||||
|
||||
// ArchiveCleaner::Go - Perform smart cleanup of the archive /*{{{*/
|
||||
@ -32,6 +31,7 @@
|
||||
bool pkgArchiveCleaner::Go(string Dir,pkgCache &Cache)
|
||||
{
|
||||
bool CleanInstalled = _config->FindB("APT::Clean-Installed",true);
|
||||
string MyArch = _config->Find("APT::Architecture");
|
||||
|
||||
DIR *D = opendir(Dir.c_str());
|
||||
if (D == 0)
|
||||
@ -41,7 +41,7 @@ bool pkgArchiveCleaner::Go(string Dir,pkgCache &Cache)
|
||||
if (chdir(Dir.c_str()) != 0)
|
||||
{
|
||||
closedir(D);
|
||||
return _error->Errno("chdir",_("Unable to change to "),Dir.c_str());
|
||||
return _error->Errno("chdir",_("Unable to change to %s"),Dir.c_str());
|
||||
}
|
||||
|
||||
for (struct dirent *Dir = readdir(D); Dir != 0; Dir = readdir(D))
|
||||
@ -55,7 +55,11 @@ bool pkgArchiveCleaner::Go(string Dir,pkgCache &Cache)
|
||||
|
||||
struct stat St;
|
||||
if (stat(Dir->d_name,&St) != 0)
|
||||
{
|
||||
chdir(StartDir.c_str());
|
||||
closedir(D);
|
||||
return _error->Errno("stat",_("Unable to stat %s."),Dir->d_name);
|
||||
}
|
||||
|
||||
// Grab the package name
|
||||
const char *I = Dir->d_name;
|
||||
@ -78,6 +82,9 @@ bool pkgArchiveCleaner::Go(string Dir,pkgCache &Cache)
|
||||
continue;
|
||||
string Arch = DeQuoteString(string(Start,I-Start));
|
||||
|
||||
if (Arch != "all" && Arch != MyArch)
|
||||
continue;
|
||||
|
||||
// Lookup the package
|
||||
pkgCache::PkgIterator P = Cache.FindPkg(Pkg);
|
||||
if (P.end() != true)
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: clean.h,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: clean.h,v 1.1 2002/07/23 17:54:50 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Clean - Clean out downloaded directories
|
||||
|
@ -1 +0,0 @@
|
||||
D
|
@ -1 +0,0 @@
|
||||
rapt/apt-pkg/cnc
|
@ -1 +0,0 @@
|
||||
:pserver:anonymous@cvs.conectiva.com.br:/home/cvs
|
@ -1,24 +0,0 @@
|
||||
/cdromutl.cc/1.1.1.1/Fri Aug 10 14:00:24 2001//
|
||||
/cdromutl.h/1.1.1.1/Fri Aug 10 14:00:24 2001//
|
||||
/cmndline.cc/1.1.1.1/Fri Aug 10 14:00:24 2001//
|
||||
/cmndline.h/1.1.1.1/Fri Aug 10 14:00:28 2001//
|
||||
/configuration.cc/1.2/Fri Aug 10 14:00:30 2001//
|
||||
/configuration.h/1.2/Fri Aug 10 14:00:30 2001//
|
||||
/crc-16.cc/1.1.1.1/Fri Aug 10 14:00:30 2001//
|
||||
/crc-16.h/1.1.1.1/Fri Aug 10 14:00:30 2001//
|
||||
/error.h/1.1.1.1/Fri Aug 10 14:00:30 2001//
|
||||
/fileutl.cc/1.3/Fri Aug 10 14:00:30 2001//
|
||||
/fileutl.h/1.2/Fri Aug 10 14:00:30 2001//
|
||||
/md5.cc/1.1.1.1/Fri Aug 10 14:00:32 2001//
|
||||
/md5.h/1.1.1.1/Fri Aug 10 14:00:32 2001//
|
||||
/mmap.cc/1.1.1.1/Fri Aug 10 14:00:32 2001//
|
||||
/mmap.h/1.1.1.1/Fri Aug 10 14:00:32 2001//
|
||||
/progress.h/1.1.1.1/Fri Aug 10 14:00:32 2001//
|
||||
/sptr.h/1.1/Fri Aug 10 14:00:34 2001//
|
||||
/strutl.cc/1.3/Fri Aug 10 14:00:34 2001//
|
||||
/strutl.h/1.3/Fri Aug 10 14:00:34 2001//
|
||||
/system.h/1.1.1.1/Fri Aug 10 14:00:34 2001//
|
||||
/error.cc/1.4/Tue Nov 13 14:24:16 2001//
|
||||
/i18n.h/1.2/Fri Nov 16 01:13:06 2001//
|
||||
/progress.cc/1.3/Fri Nov 16 01:13:06 2001//
|
||||
D
|
@ -1 +0,0 @@
|
||||
rapt/apt-pkg/contrib
|
@ -1 +0,0 @@
|
||||
:pserver:anonymous@cvs.conectiva.com.br:/home/cvs
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: cdromutl.cc,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: cdromutl.cc,v 1.1 2002/07/23 17:54:51 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
CDROM Utilities - Some functions to manipulate CDROM mounts.
|
||||
@ -19,6 +19,8 @@
|
||||
#include <apt-pkg/fileutl.h>
|
||||
#include <apt-pkg/configuration.h>
|
||||
|
||||
#include <apti18n.h>
|
||||
|
||||
#include <sys/wait.h>
|
||||
#include <sys/errno.h>
|
||||
#include <sys/statvfs.h>
|
||||
@ -50,7 +52,7 @@ bool IsMounted(string &Path)
|
||||
struct stat Buf,Buf2;
|
||||
if (stat(Path.c_str(),&Buf) != 0 ||
|
||||
stat((Path + "../").c_str(),&Buf2) != 0)
|
||||
return _error->Errno("stat","Unable to stat the mount point %s",Path.c_str());
|
||||
return _error->Errno("stat",_("Unable to stat the mount point %s"),Path.c_str());
|
||||
|
||||
if (Buf.st_dev == Buf2.st_dev)
|
||||
return false;
|
||||
@ -93,7 +95,7 @@ bool UnmountCdrom(string Path)
|
||||
}
|
||||
|
||||
// Wait for mount
|
||||
return ExecWait(Child,"mount",true);
|
||||
return ExecWait(Child,"umount",true);
|
||||
}
|
||||
/*}}}*/
|
||||
// MountCdrom - Mount a cdrom /*{{{*/
|
||||
@ -144,11 +146,11 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version)
|
||||
|
||||
string StartDir = SafeGetCWD();
|
||||
if (chdir(CD.c_str()) != 0)
|
||||
return _error->Errno("chdir","Unable to change to %s",CD.c_str());
|
||||
return _error->Errno("chdir",_("Unable to change to %s"),CD.c_str());
|
||||
|
||||
DIR *D = opendir(".");
|
||||
if (D == 0)
|
||||
return _error->Errno("opendir","Unable to read %s",CD.c_str());
|
||||
return _error->Errno("opendir",_("Unable to read %s"),CD.c_str());
|
||||
|
||||
/* Run over the directory, we assume that the reader order will never
|
||||
change as the media is read-only. In theory if the kernel did
|
||||
@ -185,7 +187,7 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version)
|
||||
{
|
||||
struct statvfs Buf;
|
||||
if (statvfs(CD.c_str(),&Buf) != 0)
|
||||
return _error->Errno("statfs","Failed to stat the cdrom");
|
||||
return _error->Errno("statfs",_("Failed to stat the cdrom"));
|
||||
|
||||
// We use a kilobyte block size to advoid overflow
|
||||
sprintf(S,"%lu %lu",(long)(Buf.f_blocks*(Buf.f_bsize/1024)),
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: cdromutl.h,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: cdromutl.h,v 1.1 2002/07/23 17:54:51 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
CDROM Utilities - Some functions to manipulate CDROM mounts.
|
||||
@ -12,6 +12,8 @@
|
||||
|
||||
#include <string>
|
||||
|
||||
using std::string;
|
||||
|
||||
#ifdef __GNUG__
|
||||
#pragma interface "apt-pkg/cdromutl.h"
|
||||
#endif
|
||||
|
@ -1,10 +1,13 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: cmndline.cc,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: cmndline.cc,v 1.3 2003/01/29 18:43:48 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Command Line Class - Sophisticated command line parser
|
||||
|
||||
This source is placed in the Public Domain, do with it what you will
|
||||
It was originally written by Jason Gunthorpe <jgg@debian.org>.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include files /*{{{*/
|
||||
@ -14,7 +17,10 @@
|
||||
#include <apt-pkg/cmndline.h>
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
|
||||
#include <apti18n.h>
|
||||
/*}}}*/
|
||||
using namespace std;
|
||||
|
||||
// CommandLine::CommandLine - Constructor /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
@ -56,7 +62,10 @@ bool CommandLine::Parse(int argc,const char **argv)
|
||||
|
||||
// Double dash signifies the end of option processing
|
||||
if (*Opt == '-' && Opt[1] == 0)
|
||||
{
|
||||
I++;
|
||||
break;
|
||||
}
|
||||
|
||||
// Single dash is a short option
|
||||
if (*Opt != '-')
|
||||
@ -68,7 +77,7 @@ bool CommandLine::Parse(int argc,const char **argv)
|
||||
Args *A;
|
||||
for (A = ArgList; A->end() == false && A->ShortOpt != *Opt; A++);
|
||||
if (A->end() == true)
|
||||
return _error->Error("Command line option '%c' [from %s] is not known.",*Opt,argv[I]);
|
||||
return _error->Error(_("Command line option '%c' [from %s] is not known."),*Opt,argv[I]);
|
||||
|
||||
if (HandleOpt(I,argc,argv,Opt,A) == false)
|
||||
return false;
|
||||
@ -94,7 +103,7 @@ bool CommandLine::Parse(int argc,const char **argv)
|
||||
for (; Opt != OptEnd && *Opt != '-'; Opt++);
|
||||
|
||||
if (Opt == OptEnd)
|
||||
return _error->Error("Command line option %s is not understood",argv[I]);
|
||||
return _error->Error(_("Command line option %s is not understood"),argv[I]);
|
||||
Opt++;
|
||||
|
||||
for (A = ArgList; A->end() == false &&
|
||||
@ -102,7 +111,7 @@ bool CommandLine::Parse(int argc,const char **argv)
|
||||
|
||||
// Failed again..
|
||||
if (A->end() == true && OptEnd - Opt != 1)
|
||||
return _error->Error("Command line option %s is not understood",argv[I]);
|
||||
return _error->Error(_("Command line option %s is not understood"),argv[I]);
|
||||
|
||||
// The option could be a single letter option prefixed by a no-..
|
||||
if (A->end() == true)
|
||||
@ -110,12 +119,12 @@ bool CommandLine::Parse(int argc,const char **argv)
|
||||
for (A = ArgList; A->end() == false && A->ShortOpt != *Opt; A++);
|
||||
|
||||
if (A->end() == true)
|
||||
return _error->Error("Command line option %s is not understood",argv[I]);
|
||||
return _error->Error(_("Command line option %s is not understood"),argv[I]);
|
||||
}
|
||||
|
||||
// The option is not boolean
|
||||
if (A->IsBoolean() == false)
|
||||
return _error->Error("Command line option %s is not boolean",argv[I]);
|
||||
return _error->Error(_("Command line option %s is not boolean"),argv[I]);
|
||||
PreceedMatch = true;
|
||||
}
|
||||
|
||||
@ -154,7 +163,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
|
||||
|
||||
// Equals was specified but we fell off the end!
|
||||
if (Opt[1] == '=' && Argument == 0)
|
||||
return _error->Error("Option %s requires an argument.",argv[I]);
|
||||
return _error->Error(_("Option %s requires an argument."),argv[I]);
|
||||
if (Opt[1] == '=')
|
||||
CertainArg = true;
|
||||
|
||||
@ -175,7 +184,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
|
||||
if ((A->Flags & HasArg) == HasArg)
|
||||
{
|
||||
if (Argument == 0)
|
||||
return _error->Error("Option %s requires an argument.",argv[I]);
|
||||
return _error->Error(_("Option %s requires an argument."),argv[I]);
|
||||
Opt += strlen(Opt);
|
||||
I += IncI;
|
||||
|
||||
@ -189,13 +198,13 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
|
||||
const char *J;
|
||||
for (J = Argument; *J != 0 && *J != '='; J++);
|
||||
if (*J == 0)
|
||||
return _error->Error("Option %s: Configuration item sepecification must have an =<val>.",argv[I]);
|
||||
return _error->Error(_("Option %s: Configuration item sepecification must have an =<val>."),argv[I]);
|
||||
|
||||
// = is trailing
|
||||
if (J[1] == 0)
|
||||
{
|
||||
if (I+1 >= argc)
|
||||
return _error->Error("Option %s: Configuration item sepecification must have an =<val>.",argv[I]);
|
||||
return _error->Error(_("Option %s: Configuration item sepecification must have an =<val>."),argv[I]);
|
||||
Conf->Set(string(Argument,J-Argument),string(argv[I++ +1]));
|
||||
}
|
||||
else
|
||||
@ -225,7 +234,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
|
||||
|
||||
// Conversion failed and the argument was specified with an =s
|
||||
if (EndPtr == Argument && CertainArg == true)
|
||||
return _error->Error("Option %s requires an integer argument, not '%s'",argv[I],Argument);
|
||||
return _error->Error(_("Option %s requires an integer argument, not '%s'"),argv[I],Argument);
|
||||
|
||||
// Conversion was ok, set the value and return
|
||||
if (EndPtr != 0 && EndPtr != Argument && *EndPtr == 0)
|
||||
@ -256,7 +265,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
|
||||
break;
|
||||
|
||||
if (strlen(argv[I]) >= sizeof(Buffer))
|
||||
return _error->Error("Option '%s' is too long",argv[I]);
|
||||
return _error->Error(_("Option '%s' is too long"),argv[I]);
|
||||
|
||||
// Skip the leading dash
|
||||
const char *J = argv[I];
|
||||
@ -289,7 +298,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
|
||||
}
|
||||
|
||||
if (CertainArg == true)
|
||||
return _error->Error("Sense %s is not understood, try true or false.",Argument);
|
||||
return _error->Error(_("Sense %s is not understood, try true or false."),Argument);
|
||||
|
||||
Argument = 0;
|
||||
}
|
||||
@ -339,7 +348,7 @@ bool CommandLine::DispatchArg(Dispatch *Map,bool NoMatch)
|
||||
if (Map[I].Match == 0)
|
||||
{
|
||||
if (NoMatch == true)
|
||||
_error->Error("Invalid operation %s",FileList[0]);
|
||||
_error->Error(_("Invalid operation %s"),FileList[0]);
|
||||
}
|
||||
|
||||
return false;
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: cmndline.h,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: cmndline.h,v 1.1 2002/07/23 17:54:51 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Command Line Class - Sophisticated command line parser
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: configuration.cc,v 1.2 2000/10/30 18:49:49 kojima Exp $
|
||||
// $Id: configuration.cc,v 1.4 2003/01/29 18:43:48 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Configuration Class
|
||||
@ -9,6 +9,9 @@
|
||||
for a tree-oriented configuration environment. All runtime configuration
|
||||
is stored in here.
|
||||
|
||||
This source is placed in the Public Domain, do with it what you will
|
||||
It was originally written by Jason Gunthorpe <jgg@debian.org>.
|
||||
|
||||
##################################################################### */
|
||||
/*}}}*/
|
||||
// Include files /*{{{*/
|
||||
@ -18,9 +21,20 @@
|
||||
#include <apt-pkg/configuration.h>
|
||||
#include <apt-pkg/error.h>
|
||||
#include <apt-pkg/strutl.h>
|
||||
#include <apt-pkg/fileutl.h>
|
||||
#include <apti18n.h>
|
||||
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <fstream.h>
|
||||
#include <dirent.h>
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
|
||||
using namespace std;
|
||||
/*}}}*/
|
||||
|
||||
Configuration *_config = new Configuration;
|
||||
@ -84,7 +98,7 @@ Configuration::Item *Configuration::Lookup(Item *Head,const char *S,
|
||||
if (Len != 0)
|
||||
{
|
||||
for (; I != 0; Last = &I->Next, I = I->Next)
|
||||
if ((Res = stringcasecmp(I->Tag.begin(),I->Tag.end(),S,S + Len)) == 0)
|
||||
if ((Res = stringcasecmp(I->Tag,S,S + Len)) == 0)
|
||||
break;
|
||||
}
|
||||
else
|
||||
@ -175,15 +189,15 @@ string Configuration::FindFile(const char *Name,const char *Default) const
|
||||
while (Itm->Parent != 0 && Itm->Parent->Value.empty() == false)
|
||||
{
|
||||
// Absolute
|
||||
if (val[0] == '/')
|
||||
if (val.length() >= 1 && val[0] == '/')
|
||||
break;
|
||||
|
||||
// ~/foo or ./foo
|
||||
if ((val[0] == '~' || val[0] == '.') && val[1] == '/')
|
||||
if (val.length() >= 2 && (val[0] == '~' || val[0] == '.') && val[1] == '/')
|
||||
break;
|
||||
|
||||
// ../foo
|
||||
if (val[0] == '.' && val[1] == '.' && val[2] == '/')
|
||||
if (val.length() >= 3 && val[0] == '.' && val[1] == '.' && val[2] == '/')
|
||||
break;
|
||||
|
||||
if (Itm->Parent->Value.end()[-1] != '/')
|
||||
@ -268,7 +282,7 @@ string Configuration::FindAny(const char *Name,const char *Default) const
|
||||
case 'i':
|
||||
{
|
||||
char buf[16];
|
||||
snprintf(buf, sizeof(buf)-1, "%d", FindI(key, Default));
|
||||
snprintf(buf, sizeof(buf)-1, "%d", FindI(key, Default ? atoi(Default) : 0 ));
|
||||
return buf;
|
||||
}
|
||||
}
|
||||
@ -336,11 +350,12 @@ void Configuration::Clear(string Name)
|
||||
|
||||
while (Top != 0 && Top->Next == 0)
|
||||
{
|
||||
if (Top == Stop)
|
||||
return;
|
||||
Item *Tmp = Top;
|
||||
Top = Top->Parent;
|
||||
delete Tmp;
|
||||
|
||||
if (Top == Stop)
|
||||
return;
|
||||
}
|
||||
|
||||
Item *Tmp = Top;
|
||||
@ -382,14 +397,14 @@ bool Configuration::ExistsAny(const char *Name) const
|
||||
// Configuration::Dump - Dump the config /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* Dump the entire configuration space */
|
||||
void Configuration::Dump()
|
||||
void Configuration::Dump(ostream& str)
|
||||
{
|
||||
/* Write out all of the configuration directives by walking the
|
||||
configuration tree */
|
||||
const Configuration::Item *Top = Tree(0);
|
||||
for (; Top != 0;)
|
||||
{
|
||||
clog << Top->FullTag() << " \"" << Top->Value << "\";" << endl;
|
||||
str << Top->FullTag() << " \"" << Top->Value << "\";" << endl;
|
||||
|
||||
if (Top->Child != 0)
|
||||
{
|
||||
@ -424,14 +439,14 @@ string Configuration::Item::FullTag(const Item *Stop) const
|
||||
Sectional config files are like bind's named.conf where there are
|
||||
sections like 'zone "foo.org" { .. };' This causes each section to be
|
||||
added in with a tag like "zone::foo.org" instead of being split
|
||||
tag/value. */
|
||||
tag/value. AsSectional enables Sectional parsing.*/
|
||||
bool ReadConfigFile(Configuration &Conf,string FName,bool AsSectional,
|
||||
unsigned Depth)
|
||||
{
|
||||
// Open the stream for reading
|
||||
ifstream F(FName.c_str(),ios::in | ios::nocreate);
|
||||
ifstream F(FName.c_str(),ios::in);
|
||||
if (!F != 0)
|
||||
return _error->Errno("ifstream::ifstream","Opening configuration file %s",FName.c_str());
|
||||
return _error->Errno("ifstream::ifstream",_("Opening configuration file %s"),FName.c_str());
|
||||
|
||||
char Buffer[300];
|
||||
string LineBuffer;
|
||||
@ -541,7 +556,7 @@ bool ReadConfigFile(Configuration &Conf,string FName,bool AsSectional,
|
||||
|
||||
// Syntax Error
|
||||
if (TermChar == '{' && LineBuffer.empty() == true)
|
||||
return _error->Error("Syntax error %s:%u: Block starts with no name.",FName.c_str(),CurLine);
|
||||
return _error->Error(_("Syntax error %s:%u: Block starts with no name."),FName.c_str(),CurLine);
|
||||
|
||||
// No string on this line
|
||||
if (LineBuffer.empty() == true)
|
||||
@ -560,10 +575,11 @@ bool ReadConfigFile(Configuration &Conf,string FName,bool AsSectional,
|
||||
string Tag;
|
||||
const char *Pos = LineBuffer.c_str();
|
||||
if (ParseQuoteWord(Pos,Tag) == false)
|
||||
return _error->Error("Syntax error %s:%u: Malformed Tag",FName.c_str(),CurLine);
|
||||
return _error->Error(_("Syntax error %s:%u: Malformed Tag"),FName.c_str(),CurLine);
|
||||
|
||||
// Parse off the word
|
||||
string Word;
|
||||
bool NoWord = false;
|
||||
if (ParseCWord(Pos,Word) == false &&
|
||||
ParseQuoteWord(Pos,Word) == false)
|
||||
{
|
||||
@ -572,9 +588,11 @@ bool ReadConfigFile(Configuration &Conf,string FName,bool AsSectional,
|
||||
Word = Tag;
|
||||
Tag = "";
|
||||
}
|
||||
else
|
||||
NoWord = true;
|
||||
}
|
||||
if (strlen(Pos) != 0)
|
||||
return _error->Error("Syntax error %s:%u: Extra junk after value",FName.c_str(),CurLine);
|
||||
return _error->Error(_("Syntax error %s:%u: Extra junk after value"),FName.c_str(),CurLine);
|
||||
|
||||
// Go down a level
|
||||
if (TermChar == '{')
|
||||
@ -611,26 +629,35 @@ bool ReadConfigFile(Configuration &Conf,string FName,bool AsSectional,
|
||||
}
|
||||
|
||||
// Specials
|
||||
if (Tag[0] == '#')
|
||||
if (Tag.length() >= 1 && Tag[0] == '#')
|
||||
{
|
||||
if (ParentTag.empty() == false)
|
||||
return _error->Error("Syntax error %s:%u: Directives can only be done at the top level",FName.c_str(),CurLine);
|
||||
return _error->Error(_("Syntax error %s:%u: Directives can only be done at the top level"),FName.c_str(),CurLine);
|
||||
Tag.erase(Tag.begin());
|
||||
if (Tag == "clear")
|
||||
Conf.Clear(Word);
|
||||
else if (Tag == "include")
|
||||
{
|
||||
if (Depth > 10)
|
||||
return _error->Error("Syntax error %s:%u: Too many nested includes",FName.c_str(),CurLine);
|
||||
if (ReadConfigFile(Conf,Word,AsSectional,Depth+1) == false)
|
||||
return _error->Error("Syntax error %s:%u: Included from here",FName.c_str(),CurLine);
|
||||
return _error->Error(_("Syntax error %s:%u: Too many nested includes"),FName.c_str(),CurLine);
|
||||
if (Word.length() > 2 && Word.end()[-1] == '/')
|
||||
{
|
||||
if (ReadConfigDir(Conf,Word,AsSectional,Depth+1) == false)
|
||||
return _error->Error(_("Syntax error %s:%u: Included from here"),FName.c_str(),CurLine);
|
||||
}
|
||||
else
|
||||
return _error->Error("Syntax error %s:%u: Unsupported directive '%s'",FName.c_str(),CurLine,Tag.c_str());
|
||||
{
|
||||
if (ReadConfigFile(Conf,Word,AsSectional,Depth+1) == false)
|
||||
return _error->Error(_("Syntax error %s:%u: Included from here"),FName.c_str(),CurLine);
|
||||
}
|
||||
}
|
||||
else
|
||||
return _error->Error(_("Syntax error %s:%u: Unsupported directive '%s'"),FName.c_str(),CurLine,Tag.c_str());
|
||||
}
|
||||
else
|
||||
{
|
||||
// Set the item in the configuration class
|
||||
if (NoWord == false)
|
||||
Conf.Set(Item,Word);
|
||||
}
|
||||
|
||||
@ -659,7 +686,53 @@ bool ReadConfigFile(Configuration &Conf,string FName,bool AsSectional,
|
||||
}
|
||||
|
||||
if (LineBuffer.empty() == false)
|
||||
return _error->Error("Syntax error %s:%u: Extra junk at end of file",FName.c_str(),CurLine);
|
||||
return _error->Error(_("Syntax error %s:%u: Extra junk at end of file"),FName.c_str(),CurLine);
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
// ReadConfigDir - Read a directory of config files /*{{{*/
|
||||
// ---------------------------------------------------------------------
|
||||
/* */
|
||||
bool ReadConfigDir(Configuration &Conf,string Dir,bool AsSectional,
|
||||
unsigned Depth)
|
||||
{
|
||||
DIR *D = opendir(Dir.c_str());
|
||||
if (D == 0)
|
||||
return _error->Errno("opendir",_("Unable to read %s"),Dir.c_str());
|
||||
|
||||
vector<string> List;
|
||||
|
||||
for (struct dirent *Ent = readdir(D); Ent != 0; Ent = readdir(D))
|
||||
{
|
||||
if (Ent->d_name[0] == '.')
|
||||
continue;
|
||||
|
||||
// Skip bad file names ala run-parts
|
||||
const char *C = Ent->d_name;
|
||||
for (; *C != 0; C++)
|
||||
// CNC:2002-11-25
|
||||
if (isalpha(*C) == 0 && isdigit(*C) == 0
|
||||
&& *C != '_' && *C != '-' && *C != '.')
|
||||
break;
|
||||
if (*C != 0)
|
||||
continue;
|
||||
|
||||
// Make sure it is a file and not something else
|
||||
string File = flCombine(Dir,Ent->d_name);
|
||||
struct stat St;
|
||||
if (stat(File.c_str(),&St) != 0 || S_ISREG(St.st_mode) == 0)
|
||||
continue;
|
||||
|
||||
List.push_back(File);
|
||||
}
|
||||
closedir(D);
|
||||
|
||||
sort(List.begin(),List.end());
|
||||
|
||||
// Read the files
|
||||
for (vector<string>::const_iterator I = List.begin(); I != List.end(); I++)
|
||||
if (ReadConfigFile(Conf,*I,AsSectional,Depth) == false)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
/*}}}*/
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: configuration.h,v 1.2 2000/10/30 18:49:49 kojima Exp $
|
||||
// $Id: configuration.h,v 1.2 2003/01/29 18:43:48 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Configuration Class
|
||||
@ -33,6 +33,9 @@
|
||||
#endif
|
||||
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
|
||||
using std::string;
|
||||
|
||||
class Configuration
|
||||
{
|
||||
@ -70,7 +73,7 @@ class Configuration
|
||||
string FindFile(const char *Name,const char *Default = 0) const;
|
||||
string FindDir(const char *Name,const char *Default = 0) const;
|
||||
int FindI(const char *Name,int Default = 0) const;
|
||||
int FindI(string Name,bool Default = 0) const {return FindI(Name.c_str(),Default);};
|
||||
int FindI(string Name,int Default = 0) const {return FindI(Name.c_str(),Default);};
|
||||
bool FindB(const char *Name,bool Default = false) const;
|
||||
bool FindB(string Name,bool Default = false) const {return FindB(Name.c_str(),Default);};
|
||||
string FindAny(const char *Name,const char *Default = 0) const;
|
||||
@ -88,7 +91,8 @@ class Configuration
|
||||
|
||||
inline const Item *Tree(const char *Name) const {return Lookup(Name);};
|
||||
|
||||
void Dump();
|
||||
inline void Dump() { Dump(std::clog); };
|
||||
void Dump(std::ostream& str);
|
||||
|
||||
Configuration(const Item *Root);
|
||||
Configuration();
|
||||
@ -100,4 +104,7 @@ extern Configuration *_config;
|
||||
bool ReadConfigFile(Configuration &Conf,string FName,bool AsSectional = false,
|
||||
unsigned Depth = 0);
|
||||
|
||||
bool ReadConfigDir(Configuration &Conf,string Dir,bool AsSectional = false,
|
||||
unsigned Depth = 0);
|
||||
|
||||
#endif
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: crc-16.cc,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: crc-16.cc,v 1.1 2002/07/23 17:54:51 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
CRC16 - Compute a 16bit crc very quickly
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: crc-16.h,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: crc-16.h,v 1.1 2002/07/23 17:54:51 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
CRC16 - Compute a 16bit crc very quickly
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: error.cc,v 1.4 2001/11/12 16:04:37 kojima Exp $
|
||||
// $Id: error.cc,v 1.2 2002/07/25 18:07:18 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Global Erorr Class - Global error mechanism
|
||||
@ -20,21 +20,24 @@
|
||||
|
||||
#include <apt-pkg/error.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <errno.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <string>
|
||||
#include <stdarg.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include "config.h"
|
||||
/*}}}*/
|
||||
|
||||
using namespace std;
|
||||
|
||||
// Global Error Object /*{{{*/
|
||||
/* If the implementation supports posix threads then the accessor function
|
||||
is compiled to be thread safe otherwise a non-safe version is used. A
|
||||
Per-Thread error object is maintained in much the same manner as libc
|
||||
manages errno */
|
||||
#if _POSIX_THREADS == 1 && defined(HAVE_PTHREAD)
|
||||
#if defined(_POSIX_THREADS) && defined(HAVE_PTHREAD)
|
||||
#include <pthread.h>
|
||||
|
||||
static pthread_key_t ErrorKey;
|
||||
@ -225,11 +228,6 @@ void GlobalError::Discard()
|
||||
/* */
|
||||
void GlobalError::Insert(Item *Itm)
|
||||
{
|
||||
if (0) {//akk don't leave this here or it will have evil side effects
|
||||
// on acquire methods
|
||||
cerr << Itm->Text.c_str() << endl;
|
||||
return;
|
||||
}
|
||||
Item **End = &List;
|
||||
for (Item *I = List; I != 0; I = I->Next)
|
||||
End = &I->Next;
|
||||
|
@ -1,6 +1,6 @@
|
||||
// -*- mode: cpp; mode: fold -*-
|
||||
// Description /*{{{*/
|
||||
// $Id: error.h,v 1.1.1.1 2000/08/10 12:42:39 kojima Exp $
|
||||
// $Id: error.h,v 1.2 2003/01/29 13:04:48 niemeyer Exp $
|
||||
/* ######################################################################
|
||||
|
||||
Global Erorr Class - Global error mechanism
|
||||
@ -44,8 +44,19 @@
|
||||
#pragma interface "apt-pkg/error.h"
|
||||
#endif
|
||||
|
||||
#ifdef __GNUG__
|
||||
// Methods have a hidden this parameter that is visible to this attribute
|
||||
#define APT_MFORMAT1 __attribute__ ((format (printf, 2, 3)))
|
||||
#define APT_MFORMAT2 __attribute__ ((format (printf, 3, 4)))
|
||||
#else
|
||||
#define APT_MFORMAT1
|
||||
#define APT_MFORMAT2
|
||||
#endif
|
||||
|
||||
#include <string>
|
||||
|
||||
using std::string;
|
||||
|
||||
class GlobalError
|
||||
{
|
||||
struct Item
|
||||
@ -61,14 +72,25 @@ class GlobalError
|
||||
|
||||
public:
|
||||
|
||||
#ifndef SWIG
|
||||
// Call to generate an error from a library call.
|
||||
bool Errno(const char *Function,const char *Description,...);
|
||||
bool WarningE(const char *Function,const char *Description,...);
|
||||
bool Errno(const char *Function,const char *Description,...) APT_MFORMAT2;
|
||||
bool WarningE(const char *Function,const char *Description,...) APT_MFORMAT2;
|
||||
|
||||
/* A warning should be considered less severe than an error, and may be
|
||||
ignored by the client. */
|
||||
bool Error(const char *Description,...);
|
||||
bool Warning(const char *Description,...);
|
||||
bool Error(const char *Description,...) APT_MFORMAT1;
|
||||
bool Warning(const char *Description,...) APT_MFORMAT1;
|
||||
#else
|
||||
// Call to generate an error from a library call.
|
||||
bool Errno(const char *Function,const char *Description) APT_MFORMAT2;
|
||||
bool WarningE(const char *Function,const char *Description) APT_MFORMAT2;
|
||||
|
||||
/* A warning should be considered less severe than an error, and may be
|
||||
ignored by the client. */
|
||||
bool Error(const char *Description) APT_MFORMAT1;
|
||||
bool Warning(const char *Description) APT_MFORMAT1;
|
||||
#endif
|
||||
|
||||
// Simple accessors
|
||||
inline bool PendingError() {return PendingFlag;};
|
||||
@ -86,4 +108,7 @@ class GlobalError
|
||||
GlobalError *_GetErrorObj();
|
||||
#define _error _GetErrorObj()
|
||||
|
||||
#undef APT_MFORMAT1
|
||||
#undef APT_MFORMAT2
|
||||
|
||||
#endif
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user