Browse Source

fallback to well-known URI if by-hash fails

We uses a small trick to implement the fallback: We make it so, that
by-hash is a special compression algorithm and apt already knows how to
deal with fallback between compression algorithms.

The drawback with implementing this fallback is that a) we are guessing
again and more importantly b) by-hash is only tried for the first
compression algorithm we want to acquire, not for all as before – but
flipping between by-hash and well-known for each compression algorithm
seems to be not really worth it as it seems unlikely that there will
actually be mirrors who only mirror a subset of compressioned files, but
have by-hash enabled.

The user-experience is the usual fallback one: You see "Ign" lines in
the apt update output. The fallback is implemented as a transition
feature, so a (potentially huge) mirror network doesn't need a flagday.
It is not meant as a "someday we might" or "we don't, but some of our
mirrors might" option – we want to cut down on the 'Ign' lines front so
that they become meaningful – if we wanted to spam everyone with them, we
could enable by-hash by default for all repositories…
sources.list and config options are better suited for this.

Closes: 798919
tags/debian/1.1.exp13
David Kalnischkies 6 years ago
parent
commit
af81ab9030
2 changed files with 65 additions and 47 deletions
  1. +50
    -36
      apt-pkg/acquire-item.cc
  2. +15
    -11
      test/integration/test-apt-by-hash-update

+ 50
- 36
apt-pkg/acquire-item.cc View File

@@ -999,6 +999,10 @@ void pkgAcqMetaBase::QueueIndexes(bool const verify) /*{{{*/
// at this point the real Items are loaded in the fetcher
ExpectedAdditionalItems = 0;

bool metaBaseSupportsByHash = false;
if (TransactionManager != NULL && TransactionManager->MetaIndexParser != NULL)
metaBaseSupportsByHash = TransactionManager->MetaIndexParser->GetSupportsAcquireByHash();

for (std::vector <IndexTarget>::iterator Target = IndexTargets.begin();
Target != IndexTargets.end();
++Target)
@@ -1028,6 +1032,15 @@ void pkgAcqMetaBase::QueueIndexes(bool const verify) /*{{{*/
if (types.empty() == false)
{
std::ostringstream os;
// add the special compressiontype byhash first if supported
std::string const useByHashConf = Target->Option(IndexTarget::BY_HASH);
bool useByHash = false;
if(useByHashConf == "force")
useByHash = true;
else
useByHash = StringToBool(useByHashConf) == true && metaBaseSupportsByHash;
if (useByHash == true)
os << "by-hash ";
std::copy(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " "));
os << *types.rbegin();
Target->Options["COMPRESSIONTYPES"] = os.str();
@@ -2437,29 +2450,58 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner,
}
/*}}}*/
// AcqIndex::Init - defered Constructor /*{{{*/
void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
string const &ShortDesc)
static void NextCompressionExtension(std::string &CurrentCompressionExtension, std::string &CompressionExtensions, bool const preview)
{
Stage = STAGE_DOWNLOAD;

DestFile = GetPartialFileNameFromURI(URI);

size_t const nextExt = CompressionExtensions.find(' ');
if (nextExt == std::string::npos)
{
CurrentCompressionExtension = CompressionExtensions;
CompressionExtensions.clear();
if (preview == false)
CompressionExtensions.clear();
}
else
{
CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt);
CompressionExtensions = CompressionExtensions.substr(nextExt+1);
if (preview == false)
CompressionExtensions = CompressionExtensions.substr(nextExt+1);
}
}
void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
string const &ShortDesc)
{
Stage = STAGE_DOWNLOAD;

DestFile = GetPartialFileNameFromURI(URI);
NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false);

if (CurrentCompressionExtension == "uncompressed")
{
Desc.URI = URI;
}
else if (CurrentCompressionExtension == "by-hash")
{
NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, true);
if(unlikely(TransactionManager->MetaIndexParser == NULL || CurrentCompressionExtension.empty()))
return;
if (CurrentCompressionExtension != "uncompressed")
{
Desc.URI = URI + '.' + CurrentCompressionExtension;
DestFile = DestFile + '.' + CurrentCompressionExtension;
}

HashStringList const Hashes = GetExpectedHashes();
HashString const * const TargetHash = Hashes.find(NULL);
if (unlikely(TargetHash == nullptr))
return;
std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
size_t const trailing_slash = Desc.URI.find_last_of("/");
if (unlikely(trailing_slash == std::string::npos))
return;
Desc.URI = Desc.URI.replace(
trailing_slash,
Desc.URI.substr(trailing_slash+1).size()+1,
ByHash);
}
else if (unlikely(CurrentCompressionExtension.empty()))
return;
else
@@ -2468,8 +2510,6 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
DestFile = DestFile + '.' + CurrentCompressionExtension;
}

if(TransactionManager->MetaIndexParser != NULL)
InitByHashIfNeeded();

Desc.Description = URIDesc;
Desc.Owner = this;
@@ -2478,32 +2518,6 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
QueueURI(Desc);
}
/*}}}*/
// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/
void pkgAcqIndex::InitByHashIfNeeded()
{
std::string const useByHash = Target.Option(IndexTarget::BY_HASH);
if(useByHash == "force" || (StringToBool(useByHash) == true &&
TransactionManager->MetaIndexParser->GetSupportsAcquireByHash()))
{
HashStringList const Hashes = GetExpectedHashes();
if(Hashes.usable())
{
// FIXME: should we really use the best hash here? or a fixed one?
HashString const * const TargetHash = Hashes.find("");
std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
size_t const trailing_slash = Desc.URI.find_last_of("/");
Desc.URI = Desc.URI.replace(
trailing_slash,
Desc.URI.substr(trailing_slash+1).size()+1,
ByHash);
} else {
_error->Warning(
"Fetching ByHash requested but can not find record for %s",
GetMetaKey().c_str());
}
}
}
/*}}}*/
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */


+ 15
- 11
test/integration/test-apt-by-hash-update View File

@@ -14,16 +14,15 @@ insertpackage 'unstable' 'foo' 'all' '1.0'
setupaptarchive --no-update

# make Packages *only* accessible by-hash for this test
mkdir -p aptarchive/dists/unstable/main/binary-i386/by-hash/SHA512
(cd aptarchive/dists/unstable/main/binary-i386/by-hash/SHA512 &&
mv ../../Packages* . &&
ln -s Packages.gz $(sha512sum Packages.gz|cut -f1 -d' ') )

# add sources
mkdir -p aptarchive/dists/unstable/main/source/by-hash/SHA512
(cd aptarchive/dists/unstable/main/source/by-hash/SHA512 &&
ln -s ../../Sources.gz $(sha512sum ../../Sources.gz|cut -f1 -d' ')
)
makebyhashonly() {
local NORMAL="$(readlink -f "./aptarchive/dists/unstable/main/${1}")"
local BYHASH="${NORMAL}/by-hash/SHA512"
mkdir -p "${BYHASH}"
find "${NORMAL}/" -maxdepth 1 -name "${2}*" -exec mv '{}' "$BYHASH" \;
ln -s "${BYHASH}/${2}.gz" "${BYHASH}/$(sha512sum "${BYHASH}/${2}.gz" | cut -f1 -d' ')"
}
makebyhashonly 'binary-i386' 'Packages'
makebyhashonly 'source' 'Sources'

ensureitsbroken() {
rm -rf rootdir/var/lib/apt/lists
@@ -39,7 +38,12 @@ ensureitsbroken -o Acquire::By-Hash=1

ensureitworks() {
rm -rf rootdir/var/lib/apt/lists
testsuccess aptget update -o Acquire::Languages=none "$@"
testsuccess aptget update "$@" -o Acquire::Languages=none
testfailure grep '^Ign' rootdir/tmp/testsuccess.output
rm -rf rootdir/var/lib/apt/lists
testsuccess aptget update "$@"
cp -f rootdir/tmp/testsuccess.output rootdir/tmp/aptupdate.output
testsuccess grep '^Ign' rootdir/tmp/aptupdate.output
testsuccessequal "Inst foo (1.0 unstable [all])
Conf foo (1.0 unstable [all])" aptget install -qq -s foo
}


Loading…
Cancel
Save