Browse Source

calculate hashes while downloading in https

We do this in HTTP already to give the CPU some exercise while the disk
is heavily spinning (or flashing?) to store the data avoiding the need
to reread the entire file again later on to calculate the hashes – which
happens outside of the eyes of progress reporting, so you might ended up
with a bunch of https workers 'stuck' at 100% while they were busy
calculating hashes.

This is a bummer for everyone using apt as a connection speedtest as the
https method works slower now (not really, it just isn't reporting done
too early anymore).
tags/debian/1.1.exp9
David Kalnischkies 6 years ago
parent
commit
34faa8f7ae
7 changed files with 47 additions and 24 deletions
  1. +3
    -5
      methods/http.cc
  2. +1
    -1
      methods/http.h
  3. +26
    -12
      methods/https.cc
  4. +4
    -2
      methods/https.h
  5. +7
    -1
      methods/server.cc
  6. +2
    -1
      methods/server.h
  7. +4
    -2
      test/integration/test-apt-download-progress

+ 3
- 5
methods/http.cc View File

@@ -484,16 +484,14 @@ APT_PURE bool HttpServerState::IsOpen() /*{{{*/
return (ServerFd != -1);
}
/*}}}*/
bool HttpServerState::InitHashes(FileFd &File, HashStringList const &ExpectedHashes)/*{{{*/
bool HttpServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/
{
delete In.Hash;
In.Hash = new Hashes(ExpectedHashes);

// Set the expected size and read file for the hashes
File.Truncate(StartPos);
return In.Hash->AddFD(File, StartPos);
return true;
}
/*}}}*/

APT_PURE Hashes * HttpServerState::GetHashes() /*{{{*/
{
return In.Hash;


+ 1
- 1
methods/http.h View File

@@ -111,7 +111,7 @@ struct HttpServerState: public ServerState
virtual bool Open();
virtual bool IsOpen();
virtual bool Close();
virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes);
virtual bool InitHashes(HashStringList const &ExpectedHashes);
virtual Hashes * GetHashes();
virtual bool Die(FileFd &File);
virtual bool Flush(FileFd * const File);


+ 26
- 12
methods/https.cc View File

@@ -72,18 +72,18 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
else
me->https->Server->StartPos = 0;

me->https->File->Truncate(me->https->Server->StartPos);
me->https->File->Seek(me->https->Server->StartPos);

me->Res->LastModified = me->https->Server->Date;
me->Res->Size = me->https->Server->Size;
me->Res->ResumePoint = me->https->Server->StartPos;

// we expect valid data, so tell our caller we get the file now
if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 &&
me->https->Server->JunkSize == 0 &&
me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
me->https->URIStart(*me->Res);
if (me->https->Server->Result >= 200 && me->https->Server->Result < 300)
{
if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
me->https->URIStart(*me->Res);
if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false)
return 0;
}
}
else if (me->https->Server->HeaderLine(line) == false)
return 0;
@@ -116,16 +116,31 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
}
}

if (me->Server->GetHashes()->Add((unsigned char const * const)buffer, buffer_size) == false)
return 0;

return buffer_size;
}

// HttpsServerState::HttpsServerState - Constructor /*{{{*/
HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner)
HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner), Hash(NULL)
{
TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
Reset();
}
/*}}}*/
bool HttpsServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/
{
delete Hash;
Hash = new Hashes(ExpectedHashes);
return true;
}
/*}}}*/
APT_PURE Hashes * HttpsServerState::GetHashes() /*{{{*/
{
return Hash;
}
/*}}}*/

void HttpsMethod::SetupProxy() /*{{{*/
{
@@ -365,6 +380,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// go for it - if the file exists, append on it
File = new FileFd(Itm->DestFile, FileFd::WriteAny);
Server = CreateServerState(Itm->Uri);
if (Server->InitHashes(Itm->ExpectedHashes) == false)
return false;

// keep apt updated
Res.Filename = Itm->DestFile;
@@ -443,10 +460,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
Res.LastModified = resultStat.st_mtime;

// take hashes
Hashes Hash(Itm->ExpectedHashes);
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
Res.TakeHashes(*(Server->GetHashes()));

// keep apt updated
URIDone(Res);


+ 4
- 2
methods/https.h View File

@@ -29,6 +29,8 @@ class FileFd;

class HttpsServerState : public ServerState
{
Hashes * Hash;

protected:
virtual bool ReadHeaderLines(std::string &/*Data*/) { return false; }
virtual bool LoadNextResponse(bool const /*ToFile*/, FileFd * const /*File*/) { return false; }
@@ -42,8 +44,8 @@ class HttpsServerState : public ServerState
virtual bool Open() { return false; }
virtual bool IsOpen() { return false; }
virtual bool Close() { return false; }
virtual bool InitHashes(FileFd &/*File*/, HashStringList const &/*ExpectedHashes*/) { return false; }
virtual Hashes * GetHashes() { return NULL; }
virtual bool InitHashes(HashStringList const &ExpectedHashes);
virtual Hashes * GetHashes();
virtual bool Die(FileFd &/*File*/) { return false; }
virtual bool Flush(FileFd * const /*File*/) { return false; }
virtual bool Go(bool /*ToFile*/, FileFd * const /*File*/) { return false; }


+ 7
- 1
methods/server.cc View File

@@ -235,6 +235,12 @@ ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOu
Reset();
}
/*}}}*/
bool ServerState::AddPartialFileToHashes(FileFd &File) /*{{{*/
{
File.Truncate(StartPos);
return GetHashes()->AddFD(File, StartPos);
}
/*}}}*/

bool ServerMethod::Configuration(string Message) /*{{{*/
{
@@ -357,7 +363,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
FailFd = File->Fd();
FailTime = Server->Date;

if (Server->InitHashes(*File, Queue->ExpectedHashes) == false)
if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false)
{
_error->Errno("read",_("Problem hashing file"));
return ERROR_NOT_FROM_SERVER;


+ 2
- 1
methods/server.h View File

@@ -72,6 +72,7 @@ struct ServerState
};
/** \brief Get the headers before the data */
RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri);
bool AddPartialFileToHashes(FileFd &File);

bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0;
@@ -85,7 +86,7 @@ struct ServerState
virtual bool Open() = 0;
virtual bool IsOpen() = 0;
virtual bool Close() = 0;
virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes) = 0;
virtual bool InitHashes(HashStringList const &ExpectedHashes) = 0;
virtual Hashes * GetHashes() = 0;
virtual bool Die(FileFd &File) = 0;
virtual bool Flush(FileFd * const File) = 0;


+ 4
- 2
test/integration/test-apt-download-progress View File

@@ -26,14 +26,16 @@ assertprogress() {
TESTFILE=testfile.big
testsuccess dd if=/dev/zero of=./aptarchive/$TESTFILE bs=800k count=1

OPT='-o APT::Status-Fd=3 -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 -o Debug::Acquire::https=1'

msgtest 'download progress works via' 'http'
exec 3> apt-progress.log
testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE -o APT::Status-Fd=3 -o Acquire::http::Dl-Limit=800
testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE $OPT -o Acquire::http::Dl-Limit=800
assertprogress apt-progress.log

msgtest 'download progress works via' 'https'
exec 3> apt-progress.log
testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE -o APT::Status-Fd=3 -o Acquire::https::Dl-Limit=800
testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE $OPT -o Acquire::https::Dl-Limit=800
assertprogress apt-progress.log

# cleanup


Loading…
Cancel
Save