Browse Source

try to avoid direct usage of .Fd() if possible and do read()s and co

on the FileFd instead
tags/debian/0.9.0
David Kalnischkies 9 years ago
parent
commit
109eb1511d
15 changed files with 85 additions and 38 deletions
  1. +2
    -2
      apt-pkg/acquire-item.cc
  2. +35
    -5
      apt-pkg/contrib/hashes.cc
  3. +5
    -0
      apt-pkg/contrib/hashes.h
  4. +22
    -0
      apt-pkg/contrib/hashsum.cc
  5. +3
    -0
      apt-pkg/contrib/hashsum_template.h
  6. +0
    -3
      apt-pkg/deb/debindexfile.cc
  7. +4
    -4
      ftparchive/cachedb.cc
  8. +1
    -1
      ftparchive/writer.cc
  9. +1
    -1
      methods/cdrom.cc
  10. +1
    -1
      methods/copy.cc
  11. +1
    -1
      methods/file.cc
  12. +1
    -1
      methods/ftp.cc
  13. +7
    -17
      methods/http.cc
  14. +1
    -1
      methods/https.cc
  15. +1
    -1
      methods/rsh.cc

+ 2
- 2
apt-pkg/acquire-item.cc View File

@@ -438,7 +438,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/

FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
SHA1Summation SHA1;
SHA1.AddFD(fd.Fd(), fd.Size());
SHA1.AddFD(fd);
string const local_sha1 = SHA1.Result();

if(local_sha1 == ServerSha1)
@@ -669,7 +669,7 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/

FileFd fd(FinalFile, FileFd::ReadOnly);
SHA1Summation SHA1;
SHA1.AddFD(fd.Fd(), fd.Size());
SHA1.AddFD(fd);
string local_sha1 = string(SHA1.Result());
if(Debug)
std::clog << "QueueNextDiff: "


+ 35
- 5
apt-pkg/contrib/hashes.cc View File

@@ -61,25 +61,25 @@ bool HashString::VerifyFile(std::string filename) const /*{{{*/
if(Type == "MD5Sum")
{
MD5Summation MD5;
MD5.AddFD(Fd.Fd(), Fd.Size());
MD5.AddFD(Fd);
fileHash = (std::string)MD5.Result();
}
else if (Type == "SHA1")
{
SHA1Summation SHA1;
SHA1.AddFD(Fd.Fd(), Fd.Size());
SHA1.AddFD(Fd);
fileHash = (std::string)SHA1.Result();
}
else if (Type == "SHA256")
{
SHA256Summation SHA256;
SHA256.AddFD(Fd.Fd(), Fd.Size());
SHA256.AddFD(Fd);
fileHash = (std::string)SHA256.Result();
}
else if (Type == "SHA512")
{
SHA512Summation SHA512;
SHA512.AddFD(Fd.Fd(), Fd.Size());
SHA512.AddFD(Fd);
fileHash = (std::string)SHA512.Result();
}
Fd.Close();
@@ -134,6 +134,36 @@ bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5,
SHA512.Add(Buf,Res);
}
return true;
}
bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, bool const addMD5,
bool const addSHA1, bool const addSHA256, bool const addSHA512)
{
unsigned char Buf[64*64];
bool const ToEOF = (Size == 0);
while (Size != 0 || ToEOF)
{
unsigned long long n = sizeof(Buf);
if (!ToEOF) n = std::min(Size, n);
unsigned long long a = 0;
if (Fd.Read(Buf, n, &a) == false) // error
return false;
if (ToEOF == false)
{
if (a != n) // short read
return false;
}
else if (a == 0) // EOF
break;
Size -= a;
if (addMD5 == true)
MD5.Add(Buf, a);
if (addSHA1 == true)
SHA1.Add(Buf, a);
if (addSHA256 == true)
SHA256.Add(Buf, a);
if (addSHA512 == true)
SHA512.Add(Buf, a);
}
return true;
}
/*}}}*/


+ 5
- 0
apt-pkg/contrib/hashes.h View File

@@ -17,6 +17,7 @@
#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
#include <apt-pkg/sha2.h>
#include <apt-pkg/fileutl.h>

#include <algorithm>
#include <vector>
@@ -74,6 +75,10 @@ class Hashes
{ return AddFD(Fd, Size, true, true, true, true); };
bool AddFD(int const Fd, unsigned long long Size, bool const addMD5,
bool const addSHA1, bool const addSHA256, bool const addSHA512);
inline bool AddFD(FileFd &Fd,unsigned long long Size = 0)
{ return AddFD(Fd, Size, true, true, true, true); };
bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5,
bool const addSHA1, bool const addSHA256, bool const addSHA512);
inline bool Add(const unsigned char *Beg,const unsigned char *End)
{return Add(Beg,End-Beg);};
};


+ 22
- 0
apt-pkg/contrib/hashsum.cc View File

@@ -24,5 +24,27 @@ bool SummationImplementation::AddFD(int const Fd, unsigned long long Size) {
Add(Buf,Res);
}
return true;
}
bool SummationImplementation::AddFD(FileFd &Fd, unsigned long long Size) {
unsigned char Buf[64 * 64];
bool ToEOF = (Size == 0);
while (Size != 0 || ToEOF)
{
unsigned long long n = sizeof(Buf);
if (!ToEOF) n = std::min(Size, n);
unsigned long long a = 0;
if (Fd.Read(Buf, n, &a) == false) // error
return false;
if (ToEOF == false)
{
if (a != n) // short read
return false;
}
else if (a == 0) // EOF
break;
Size -= a;
Add(Buf, a);
}
return true;
}
/*}}}*/

+ 3
- 0
apt-pkg/contrib/hashsum_template.h View File

@@ -10,6 +10,8 @@
#ifndef APTPKG_HASHSUM_TEMPLATE_H
#define APTPKG_HASHSUM_TEMPLATE_H

#include <apt-pkg/fileutl.h>

#include <string>
#include <cstring>
#include <algorithm>
@@ -108,6 +110,7 @@ class SummationImplementation
{ return Add((const unsigned char *)Beg, End - Beg); };

bool AddFD(int Fd, unsigned long long Size = 0);
bool AddFD(FileFd &Fd, unsigned long long Size = 0);
};

#endif

+ 0
- 3
apt-pkg/deb/debindexfile.cc View File

@@ -600,9 +600,6 @@ bool debStatusIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const

// Store the IMS information
pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
struct stat St;
if (fstat(Pkg.Fd(),&St) != 0)
return _error->Errno("fstat","Failed to stat");
CFile->Size = Pkg.FileSize();
CFile->mtime = Pkg.ModificationTime();
CFile->Archive = Gen.WriteUniqString("now");


+ 4
- 4
ftparchive/cachedb.cc View File

@@ -351,7 +351,7 @@ bool CacheDB::GetMD5(bool const &GenOnly)
return false;
}
MD5Summation MD5;
if (Fd->Seek(0) == false || MD5.AddFD(Fd->Fd(),CurStat.FileSize) == false)
if (Fd->Seek(0) == false || MD5.AddFD(*Fd, CurStat.FileSize) == false)
return false;
MD5Res = MD5.Result();
@@ -382,7 +382,7 @@ bool CacheDB::GetSHA1(bool const &GenOnly)
return false;
}
SHA1Summation SHA1;
if (Fd->Seek(0) == false || SHA1.AddFD(Fd->Fd(),CurStat.FileSize) == false)
if (Fd->Seek(0) == false || SHA1.AddFD(*Fd, CurStat.FileSize) == false)
return false;
SHA1Res = SHA1.Result();
@@ -413,7 +413,7 @@ bool CacheDB::GetSHA256(bool const &GenOnly)
return false;
}
SHA256Summation SHA256;
if (Fd->Seek(0) == false || SHA256.AddFD(Fd->Fd(),CurStat.FileSize) == false)
if (Fd->Seek(0) == false || SHA256.AddFD(*Fd, CurStat.FileSize) == false)
return false;
SHA256Res = SHA256.Result();
@@ -444,7 +444,7 @@ bool CacheDB::GetSHA512(bool const &GenOnly)
return false;
}
SHA512Summation SHA512;
if (Fd->Seek(0) == false || SHA512.AddFD(Fd->Fd(),CurStat.FileSize) == false)
if (Fd->Seek(0) == false || SHA512.AddFD(*Fd, CurStat.FileSize) == false)
return false;
SHA512Res = SHA512.Result();


+ 1
- 1
ftparchive/writer.cc View File

@@ -1035,7 +1035,7 @@ bool ReleaseWriter::DoPackage(string FileName)
CheckSums[NewFileName].size = fd.Size();

Hashes hs;
hs.AddFD(fd.Fd(), 0, DoMD5, DoSHA1, DoSHA256, DoSHA512);
hs.AddFD(fd, 0, DoMD5, DoSHA1, DoSHA256, DoSHA512);
if (DoMD5 == true)
CheckSums[NewFileName].MD5 = hs.MD5.Result();
if (DoSHA1 == true)


+ 1
- 1
methods/cdrom.cc View File

@@ -268,7 +268,7 @@ bool CDROMMethod::Fetch(FetchItem *Itm)

Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd.Fd(), Fd.Size());
Hash.AddFD(Fd);
Res.TakeHashes(Hash);

URIDone(Res);


+ 1
- 1
methods/copy.cc View File

@@ -85,7 +85,7 @@ bool CopyMethod::Fetch(FetchItem *Itm)
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd.Fd(), Fd.Size());
Hash.AddFD(Fd);
Res.TakeHashes(Hash);

URIDone(Res);


+ 1
- 1
methods/file.cc View File

@@ -83,7 +83,7 @@ bool FileMethod::Fetch(FetchItem *Itm)

Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd.Fd(), Fd.Size());
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
URIDone(Res);
return true;


+ 1
- 1
methods/ftp.cc View File

@@ -868,7 +868,7 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
if (Resume != 0)
{
if (Hash.AddFD(To.Fd(),Resume) == false)
if (Hash.AddFD(To,Resume) == false)
{
_error->Errno("read",_("Problem hashing file"));
return false;


+ 7
- 17
methods/http.cc View File

@@ -1007,31 +1007,21 @@ HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv)
FailFile.c_str(); // Make sure we dont do a malloc in the signal handler
FailFd = File->Fd();
FailTime = Srv->Date;
// Set the expected size
if (Srv->StartPos >= 0)
{
Res.ResumePoint = Srv->StartPos;
if (ftruncate(File->Fd(),Srv->StartPos) < 0)
_error->Errno("ftruncate", _("Failed to truncate file"));
}
// Set the start point
lseek(File->Fd(),0,SEEK_END);

delete Srv->In.Hash;
Srv->In.Hash = new Hashes;
// Fill the Hash if the file is non-empty (resume)
if (Srv->StartPos > 0)

// Set the expected size and read file for the hashes
if (Srv->StartPos >= 0)
{
lseek(File->Fd(),0,SEEK_SET);
if (Srv->In.Hash->AddFD(File->Fd(),Srv->StartPos) == false)
Res.ResumePoint = Srv->StartPos;
File->Truncate(Srv->StartPos);

if (Srv->In.Hash->AddFD(*File,Srv->StartPos) == false)
{
_error->Errno("read",_("Problem hashing file"));
return ERROR_NOT_FROM_SERVER;
}
lseek(File->Fd(),0,SEEK_END);
}
SetNonBlock(File->Fd(),true);


+ 1
- 1
methods/https.cc View File

@@ -314,7 +314,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// take hashes
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd.Fd(), Fd.Size());
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
// keep apt updated


+ 1
- 1
methods/rsh.cc View File

@@ -305,7 +305,7 @@ bool RSHConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
return false;

if (Resume != 0) {
if (Hash.AddFD(To.Fd(),Resume) == false) {
if (Hash.AddFD(To,Resume) == false) {
_error->Errno("read",_("Problem hashing file"));
return false;
}


Loading…
Cancel
Save