Browse Source

Implement CacheDB for source packages in apt-ftparchive

tags/debian/1.0.4
Michael Vogt 7 years ago
parent
commit
ce928105d7
12 changed files with 459 additions and 108 deletions
  1. +1
    -1
      apt-pkg/contrib/hashes.cc
  2. +2
    -0
      apt-pkg/contrib/hashes.h
  3. +27
    -3
      ftparchive/apt-ftparchive.cc
  4. +101
    -45
      ftparchive/cachedb.cc
  5. +25
    -3
      ftparchive/cachedb.h
  6. +1
    -1
      ftparchive/makefile
  7. +47
    -0
      ftparchive/sources.cc
  8. +32
    -0
      ftparchive/sources.h
  9. +54
    -53
      ftparchive/writer.cc
  10. +1
    -1
      ftparchive/writer.h
  11. +4
    -1
      test/integration/framework
  12. +164
    -0
      test/integration/test-apt-ftparchive

+ 1
- 1
apt-pkg/contrib/hashes.cc View File

@@ -133,7 +133,7 @@ bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5,
bool const addSHA1, bool const addSHA256, bool const addSHA512)
{
unsigned char Buf[64*64];
bool const ToEOF = (Size == 0);
bool const ToEOF = (Size == UntilEOF);
while (Size != 0 || ToEOF)
{
unsigned long long n = sizeof(Buf);


+ 2
- 0
apt-pkg/contrib/hashes.h View File

@@ -78,6 +78,8 @@ class Hashes
SHA256Summation SHA256;
SHA512Summation SHA512;
static const int UntilEOF = 0;

inline bool Add(const unsigned char *Data,unsigned long long Size)
{
return MD5.Add(Data,Size) && SHA1.Add(Data,Size) && SHA256.Add(Data,Size) && SHA512.Add(Data,Size);


+ 27
- 3
ftparchive/apt-ftparchive.cc View File

@@ -62,6 +62,7 @@ struct PackageMap
// Stuff for the Package File
string PkgFile;
string BinCacheDB;
string SrcCacheDB;
string BinOverride;
string ExtraOverride;

@@ -106,6 +107,12 @@ struct PackageMap
inline bool operator() (const PackageMap &x,const PackageMap &y)
{return x.BinCacheDB < y.BinCacheDB;};
};

struct SrcDBCompare : public binary_function<PackageMap,PackageMap,bool>
{
inline bool operator() (const PackageMap &x,const PackageMap &y)
{return x.SrcCacheDB < y.SrcCacheDB;};
};
void GetGeneral(Configuration &Setup,Configuration &Block);
bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats);
@@ -232,11 +239,14 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats)
gettimeofday(&NewTime,0);
double Delta = NewTime.tv_sec - StartTime.tv_sec +
(NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
c0out << Packages.Stats.Packages << " files " <<
/* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */
SizeToStr(Packages.Stats.Bytes) << "B " <<
TimeToStr((long)Delta) << endl;

if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true)
c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl;
Stats.Add(Packages.Stats);
Stats.DeLinkBytes = Packages.Stats.DeLinkBytes;
@@ -263,7 +273,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
SrcDone = true;
// Create a package writer object.
SourcesWriter Sources(_config->Find("APT::FTPArchive::DB"),
SourcesWriter Sources(flCombine(CacheDir, SrcCacheDB),
flCombine(OverrideDir,BinOverride),
flCombine(OverrideDir,SrcOverride),
flCombine(OverrideDir,SrcExtraOverride));
@@ -323,6 +333,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
c0out << Sources.Stats.Packages << " pkgs in " <<
TimeToStr((long)Delta) << endl;

if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true)
c0out << " Misses in Cache: " << Sources.Stats.Misses << endl;

Stats.Add(Sources.Stats);
Stats.DeLinkBytes = Sources.Stats.DeLinkBytes;
@@ -435,6 +448,9 @@ bool PackageMap::GenContents(Configuration &Setup,
double Delta = NewTime.tv_sec - StartTime.tv_sec +
(NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true)
c0out << " Misses in Cache: " << Contents.Stats.Misses<< endl;

c0out << Contents.Stats.Packages << " files " <<
SizeToStr(Contents.Stats.Bytes) << "B " <<
TimeToStr((long)Delta) << endl;
@@ -465,6 +481,8 @@ static void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
string DContentsH = Setup.Find("TreeDefault::Contents::Header","");
string DBCache = Setup.Find("TreeDefault::BinCacheDB",
"packages-$(ARCH).db");
string SrcDBCache = Setup.Find("TreeDefault::SrcCacheDB",
"sources-$(SECTION).db");
string DSources = Setup.Find("TreeDefault::Sources",
"$(DIST)/$(SECTION)/source/Sources");
string DFLFile = Setup.Find("TreeDefault::FileList", "");
@@ -524,6 +542,7 @@ static void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars);
Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars);
Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars);
Itm.SrcCacheDB = SubstVar(Block.Find("SrcCacheDB",SrcDBCache.c_str()),Vars);
}
else
{
@@ -573,6 +592,7 @@ static void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup)
Itm.PkgFile = Block.Find("Packages");
Itm.SrcFile = Block.Find("Sources");
Itm.BinCacheDB = Block.Find("BinCacheDB");
Itm.SrcCacheDB = Block.Find("SrcCacheDB");
Itm.BinOverride = Block.Find("BinOverride");
Itm.ExtraOverride = Block.Find("ExtraOverride");
Itm.SrcExtraOverride = Block.Find("SrcExtraOverride");
@@ -777,6 +797,7 @@ static bool Generate(CommandLine &CmdL)

// Sort by cache DB to improve IO locality.
stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare());
// Generate packages
if (CmdL.FileSize() <= 2)
@@ -947,8 +968,11 @@ static bool Clean(CommandLine &CmdL)
_error->DumpErrors();
string CacheDB = I->BinCacheDB;
for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I);
for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I)
;
}

// FIXME: clean for the SourcesDB
return true;
}


+ 101
- 45
ftparchive/cachedb.cc View File

@@ -20,6 +20,7 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/fileutl.h>
#include <apt-pkg/debfile.h>
#include <apt-pkg/gpgv.h>

#include <netinet/in.h> // htonl, etc
#include <ctype.h>
@@ -99,13 +100,32 @@ bool CacheDB::OpenFile()
Fd = new FileFd(FileName,FileFd::ReadOnly);
if (_error->PendingError() == true)
{
delete Fd;
Fd = NULL;
return false;
CloseFile();
return false;
}
return true;
}
/*}}}*/
void CacheDB::CloseFile()
{
delete Fd;
Fd = NULL;
}

bool CacheDB::OpenDebFile()
{
DebFile = new debDebFile(*Fd);
if (_error->PendingError() == true)
return false;
return true;
}

void CacheDB::CloseDebFile()
{
delete DebFile;
DebFile = NULL;
}

// CacheDB::GetFileStat - Get stats from the file /*{{{*/
// ---------------------------------------------------------------------
/* This gets the size from the database if it's there. If we need
@@ -168,56 +188,94 @@ bool CacheDB::GetCurStat()
/*}}}*/
// CacheDB::GetFileInfo - Get all the info about the file /*{{{*/
// ---------------------------------------------------------------------
bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents,
bool const &GenContentsOnly, bool const &DoMD5, bool const &DoSHA1,
bool const &DoSHA256, bool const &DoSHA512,
bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl,
bool const &DoContents,
bool const &GenContentsOnly,
bool const &DoSource,
bool const &DoMD5, bool const &DoSHA1,
bool const &DoSHA256, bool const &DoSHA512,
bool const &checkMtime)
{
this->FileName = FileName;
bool result = true;
this->FileName = FileName;

if (GetCurStat() == false)
if (GetCurStat() == false)
{
return false;
return false;
}
OldStat = CurStat;

if (GetFileStat(checkMtime) == false)
{
delete Fd;
Fd = NULL;
return false;
}
if (GetFileStat(checkMtime) == false)
{
CloseFile();
return false;
}

/* if mtime changed, update CurStat from disk */
if (checkMtime == true && OldStat.mtime != CurStat.mtime)
CurStat.Flags = FlSize;

Stats.Bytes += CurStat.FileSize;
Stats.Packages++;

if ((DoControl && LoadControl() == false)
|| (DoContents && LoadContents(GenContentsOnly) == false)
|| (DoMD5 && GetMD5(false) == false)
|| (DoSHA1 && GetSHA1(false) == false)
|| (DoSHA256 && GetSHA256(false) == false)
|| (DoSHA512 && GetSHA512(false) == false)
)
{
delete Fd;
Fd = NULL;
delete DebFile;
DebFile = NULL;
return false;
}
CurStat.Flags = FlSize;

delete Fd;
Fd = NULL;
delete DebFile;
DebFile = NULL;
Stats.Bytes += CurStat.FileSize;
Stats.Packages++;

return true;
if ((DoControl && LoadControl() == false)
|| (DoContents && LoadContents(GenContentsOnly) == false)
|| (DoSource && LoadSource() == false)
|| (DoMD5 && GetMD5(false) == false)
|| (DoSHA1 && GetSHA1(false) == false)
|| (DoSHA256 && GetSHA256(false) == false)
|| (DoSHA512 && GetSHA512(false) == false)
)
{
result = false;
}
CloseFile();
CloseDebFile();
return result;
}
/*}}}*/

bool CacheDB::LoadSource()
{
// Try to read the control information out of the DB.
if ((CurStat.Flags & FlSource) == FlSource)
{
// Lookup the control information
InitQuery("cs");
if (Get() == true && Dsc.TakeDsc(Data.data, Data.size) == true)
return true;
CurStat.Flags &= ~FlSource;
}
if (Fd == NULL && OpenFile() == false)
{
return false;
}

// Read the .dsc file
if (Fd == NULL)
{
if(OpenFile() == false)
return false;
}
Stats.Misses++;
if (Dsc.Read(FileName) == false)
return false;

if (Dsc.Data == 0)
return _error->Error(_("Failed to read .dsc"));
// Write back the control information
InitQuery("cs");
if (Put(Dsc.Data, Dsc.Length) == true)
CurStat.Flags |= FlSource;

return true;
}

// CacheDB::LoadControl - Load Control information /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -238,11 +296,10 @@ bool CacheDB::LoadControl()
return false;
}
// Create a deb instance to read the archive
if (DebFile == 0)
if (DebFile == NULL)
{
DebFile = new debDebFile(*Fd);
if (_error->PendingError() == true)
return false;
if(OpenDebFile() == false)
return false;
}
Stats.Misses++;
@@ -288,8 +345,7 @@ bool CacheDB::LoadContents(bool const &GenOnly)
// Create a deb instance to read the archive
if (DebFile == 0)
{
DebFile = new debDebFile(*Fd);
if (_error->PendingError() == true)
if(OpenDebFile() == false)
return false;
}



+ 25
- 3
ftparchive/cachedb.h View File

@@ -22,9 +22,11 @@
#include <stdio.h>

#include "contents.h"
#include "sources.h"

class FileFd;


class CacheDB
{
protected:
@@ -65,10 +67,16 @@ class CacheDB
return true;
}
bool OpenFile();
void CloseFile();

bool OpenDebFile();
void CloseDebFile();

bool GetFileStat(bool const &doStat = false);
bool GetCurStat();
bool LoadControl();
bool LoadContents(bool const &GenOnly);
bool LoadSource();
bool GetMD5(bool const &GenOnly);
bool GetSHA1(bool const &GenOnly);
bool GetSHA256(bool const &GenOnly);
@@ -77,7 +85,8 @@ class CacheDB
// Stat info stored in the DB, Fixed types since it is written to disk.
enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2),
FlSize=(1<<3), FlSHA1=(1<<4), FlSHA256=(1<<5),
FlSHA512=(1<<6)};
FlSHA512=(1<<6), FlSource=(1<<7),
};

struct StatStore
{
@@ -101,6 +110,8 @@ class CacheDB
// Data collection helpers
debDebFile::MemControlExtract Control;
ContentsExtract Contents;
DscExtract Dsc;

std::string MD5Res;
std::string SHA1Res;
std::string SHA256Res;
@@ -139,8 +150,19 @@ class CacheDB
inline unsigned long long GetFileSize(void) {return CurStat.FileSize;}
bool SetFile(std::string const &FileName,struct stat St,FileFd *Fd);
bool GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents, bool const &GenContentsOnly,
bool const &DoMD5, bool const &DoSHA1, bool const &DoSHA256, bool const &DoSHA512, bool const &checkMtime = false);

// terrible old overloaded interface
bool GetFileInfo(std::string const &FileName,
bool const &DoControl,
bool const &DoContents,
bool const &GenContentsOnly,
bool const &DoSource,
bool const &DoMD5,
bool const &DoSHA1,
bool const &DoSHA256,
bool const &DoSHA512,
bool const &checkMtime = false);

bool Finish();
bool Clean();


+ 1
- 1
ftparchive/makefile View File

@@ -12,7 +12,7 @@ PROGRAM=apt-ftparchive
SLIBS = -lapt-pkg -lapt-inst $(BDBLIB) $(INTLLIBS)
LIB_MAKES = apt-pkg/makefile apt-inst/makefile
SOURCE = apt-ftparchive.cc cachedb.cc writer.cc contents.cc override.cc \
multicompress.cc
multicompress.cc sources.cc
include $(PROGRAM_H)
else
PROGRAM=apt-ftparchive


+ 47
- 0
ftparchive/sources.cc View File

@@ -0,0 +1,47 @@
#include <string>
#include <iostream>

// for memcpy
#include <cstring>

#include <apt-pkg/error.h>
#include <apt-pkg/gpgv.h>

#include "sources.h"

bool DscExtract::TakeDsc(const void *newData, unsigned long newSize)
{
if(newSize > maxSize)
return _error->Error("DSC data is too large %lu!", newSize);

if (newSize == 0)
{
Length = 0;
return true;
}
memcpy(Data, newData, newSize);
Length = newSize;

return true;
}

bool DscExtract::Read(std::string FileName)
{
FileFd F;
if (OpenMaybeClearSignedFile(FileName, F) == false)
return false;
unsigned long long const FSize = F.FileSize();
if(FSize > maxSize)
return _error->Error("DSC file '%s' is too large!",FileName.c_str());

if (F.Read(Data, FSize) == false)
return false;
Length = FSize;

IsClearSigned = (FileName != F.Name());

return true;
}



+ 32
- 0
ftparchive/sources.h View File

@@ -0,0 +1,32 @@
#ifndef SOURCES_H
#define SOURCES_H

#include <apt-pkg/tagfile.h>

class DscExtract
{
public:
//FIXME: do we really need to enforce a maximum size of the dsc file?
static const int maxSize = 128*1024;

char *Data;
pkgTagSection Section;
unsigned long Length;
bool IsClearSigned;

bool TakeDsc(const void *Data, unsigned long Size);
bool Read(std::string FileName);
DscExtract() : Data(0), Length(0) {
Data = new char[maxSize];
};
~DscExtract() {
if(Data != NULL) {
delete [] Data;
Data = NULL;
}
};
};


#endif

+ 54
- 53
ftparchive/writer.cc View File

@@ -385,10 +385,14 @@ bool FTWScanner::SetExts(string const &Vals)
bool PackagesWriter::DoPackage(string FileName)
{
// Pull all the data we need form the DB
if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat)
== false)
if (Db.GetFileInfo(FileName,
true, /* DoControl */
DoContents,
true, /* GenContentsOnly */
false, /* DoSource */
DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) == false)
{
return false;
return false;
}

unsigned long long FileSize = Db.GetFileSize();
@@ -614,59 +618,36 @@ SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string c
/* */
bool SourcesWriter::DoPackage(string FileName)
{
// Open the archive
FileFd F;
if (OpenMaybeClearSignedFile(FileName, F) == false)
return false;

unsigned long long const FSize = F.FileSize();
//FIXME: do we really need to enforce a maximum size of the dsc file?
if (FSize > 128*1024)
return _error->Error("DSC file '%s' is too large!",FileName.c_str());

if (BufSize < FSize + 2)
// Pull all the data we need form the DB
if (Db.GetFileInfo(FileName,
false, /* DoControl */
false, /* DoContents */
false, /* GenContentsOnly */
true, /* DoSource */
DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) == false)
{
BufSize = FSize + 2;
Buffer = (char *)realloc(Buffer , BufSize);
}

if (F.Read(Buffer, FSize) == false)
return false;
}

// Stat the file for later (F might be clearsigned, so not F.FileSize())
struct stat St;
if (stat(FileName.c_str(), &St) != 0)
return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
// we need to perform a "write" here (this is what finish is doing)
// because the call to Db.GetFileInfo() in the loop will change
// the "db cursor"
Db.Finish();

// Hash the file
char *Start = Buffer;
char *BlkEnd = Buffer + FSize;

Hashes DscHashes;
if (FSize == (unsigned long long) St.st_size)
{
if (DoMD5 == true)
DscHashes.MD5.Add((unsigned char *)Start,BlkEnd - Start);
if (DoSHA1 == true)
DscHashes.SHA1.Add((unsigned char *)Start,BlkEnd - Start);
if (DoSHA256 == true)
DscHashes.SHA256.Add((unsigned char *)Start,BlkEnd - Start);
if (DoSHA512 == true)
DscHashes.SHA512.Add((unsigned char *)Start,BlkEnd - Start);
}
else
{
FileFd DscFile(FileName, FileFd::ReadOnly);
DscHashes.AddFD(DscFile, St.st_size, DoMD5, DoSHA1, DoSHA256, DoSHA512);
}
// read stuff
char *Start = Db.Dsc.Data;
char *BlkEnd = Db.Dsc.Data + Db.Dsc.Length;

// Add extra \n to the end, just in case (as in clearsigned they are missing)
*BlkEnd++ = '\n';
*BlkEnd++ = '\n';

pkgTagSection Tags;
if (Tags.Scan(Start,BlkEnd - Start) == false || Tags.Exists("Source") == false)
if (Tags.Scan(Start,BlkEnd - Start) == false)
return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
if (Tags.Exists("Source") == false)
return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
Tags.Trim();

// Lookup the overide information, finding first the best priority.
@@ -714,6 +695,10 @@ bool SourcesWriter::DoPackage(string FileName)
OverItem = auto_ptr<Override::Item>(new Override::Item);
}
struct stat St;
if (stat(FileName.c_str(), &St) != 0)
return _error->Errno("fstat","Failed to stat %s",FileName.c_str());

auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
// const auto_ptr<Override::Item> autoSOverItem(SOverItem);
if (SOverItem.get() == 0)
@@ -732,23 +717,23 @@ bool SourcesWriter::DoPackage(string FileName)
string const strippedName = flNotDir(FileName);
std::ostringstream ostreamFiles;
if (DoMD5 == true && Tags.Exists("Files"))
ostreamFiles << "\n " << string(DscHashes.MD5.Result()) << " " << St.st_size << " "
ostreamFiles << "\n " << Db.MD5Res.c_str() << " " << St.st_size << " "
<< strippedName << "\n " << Tags.FindS("Files");
string const Files = ostreamFiles.str();

std::ostringstream ostreamSha1;
if (DoSHA1 == true && Tags.Exists("Checksums-Sha1"))
ostreamSha1 << "\n " << string(DscHashes.SHA1.Result()) << " " << St.st_size << " "
ostreamSha1 << "\n " << string(Db.SHA1Res.c_str()) << " " << St.st_size << " "
<< strippedName << "\n " << Tags.FindS("Checksums-Sha1");

std::ostringstream ostreamSha256;
if (DoSHA256 == true && Tags.Exists("Checksums-Sha256"))
ostreamSha256 << "\n " << string(DscHashes.SHA256.Result()) << " " << St.st_size << " "
ostreamSha256 << "\n " << string(Db.SHA256Res.c_str()) << " " << St.st_size << " "
<< strippedName << "\n " << Tags.FindS("Checksums-Sha256");

std::ostringstream ostreamSha512;
if (DoSHA512 == true && Tags.Exists("Checksums-Sha512"))
ostreamSha512 << "\n " << string(DscHashes.SHA512.Result()) << " " << St.st_size << " "
ostreamSha512 << "\n " << string(Db.SHA512Res.c_str()) << " " << St.st_size << " "
<< strippedName << "\n " << Tags.FindS("Checksums-Sha512");

// Strip the DirStrip prefix from the FileName and add the PathPrefix
@@ -785,8 +770,13 @@ bool SourcesWriter::DoPackage(string FileName)
(DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) ||
(DoSHA512 == true && !Tags.Exists("Checksums-Sha512")))
{
if (Db.GetFileInfo(OriginalPath, false, false, false, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat)
== false)
if (Db.GetFileInfo(OriginalPath,
false, /* DoControl */
false, /* DoContents */
false, /* GenContentsOnly */
false, /* DoSource */
DoMD5, DoSHA1, DoSHA256, DoSHA512,
DoAlwaysStat) == false)
{
return _error->Error("Error getting file info");
}
@@ -802,6 +792,9 @@ bool SourcesWriter::DoPackage(string FileName)
if (DoSHA512 == true && !Tags.Exists("Checksums-Sha512"))
ostreamSha512 << "\n " << string(Db.SHA512Res) << " "
<< Db.GetFileSize() << " " << ParseJnk;

// write back the GetFileInfo() stats data
Db.Finish();
}

// Perform the delinking operation
@@ -872,7 +865,7 @@ bool SourcesWriter::DoPackage(string FileName)

Stats.Packages++;
return Db.Finish();
return true;
}
/*}}}*/

@@ -893,7 +886,15 @@ ContentsWriter::ContentsWriter(string const &DB, string const &Arch) :
determine what the package name is. */
bool ContentsWriter::DoPackage(string FileName, string Package)
{
if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false))
if (!Db.GetFileInfo(FileName,
Package.empty(), /* DoControl */
true, /* DoContents */
false, /* GenContentsOnly */
false, /* DoSource */
false, /* DoMD5 */
false, /* DoSHA1 */
false, /* DoSHA256 */
false)) /* DoSHA512 */
{
return false;
}


+ 1
- 1
ftparchive/writer.h View File

@@ -174,7 +174,7 @@ class SourcesWriter : public FTWScanner
string PathPrefix;
string DirStrip;
FILE *Output;
struct CacheDB::Stats Stats;
struct CacheDB::Stats &Stats;

virtual bool DoPackage(string FileName);



+ 4
- 1
test/integration/framework View File

@@ -128,7 +128,10 @@ dpkgcheckbuilddeps() {
}
gdb() {
echo "gdb: run »$*«"
APT_CONFIG=aptconfig.conf LD_LIBRARY_PATH=${LIBRARYPATH} command gdb ${BUILDDIRECTORY}/$1 --args "$@"
CMD="$1"
shift

APT_CONFIG=aptconfig.conf LD_LIBRARY_PATH=${LIBRARYPATH} command gdb ${BUILDDIRECTORY}/$CMD --args ${BUILDDIRECTORY}/$CMD "$@"
}
gpg() {
# see apt-key for the whole trickery. Setup is done in setupenvironment


+ 164
- 0
test/integration/test-apt-ftparchive View File

@@ -0,0 +1,164 @@
#!/bin/sh
set -e

assert_correct_sources_file() {
testequal "Package: bar
Binary: bar
Version: 1.0
Architecture: all
Format: 3.0 (native)
Directory: pool/main
Files:
7b57dd065e51de5905288a5104d4bef5 406 bar_1.0.dsc
d41d8cd98f00b204e9800998ecf8427e 0 bar_1.0.tar.gz
Package-List:
bar deb admin extra
Checksums-Sha1:
17a40b76715f393ab7fd6485c9392a02f1adf903 406 bar_1.0.dsc
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 bar_1.0.tar.gz
Checksums-Sha256:
d9d7507f66a89258b6920aca47747d7a30e0e64b09ecabbf02b2efbdabf840a9 406 bar_1.0.dsc
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 bar_1.0.tar.gz
Checksums-Sha512:
ee0a9bfb6614159b45203fc29487d4f37387993ca0e6d6f27b80010498f3731d75753188ece307508ae9af0259bd11a6af15a1a38f0b87dbd5ea1273b7a7d53e 406 bar_1.0.dsc
cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e 0 bar_1.0.tar.gz

Package: foo
Binary: foo
Version: 1.0
Architecture: all
Format: 3.0 (native)
Directory: pool/main
Files:
d144826e6f02831c1933e910c92cd7e0 171 foo_1.0.dsc
d41d8cd98f00b204e9800998ecf8427e 0 foo_1.0.tar.gz
Package-List:
foo deb admin extra
Checksums-Sha1:
979306aa3ccff3d61bba062bb6977e2493c6f907 171 foo_1.0.dsc
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 foo_1.0.tar.gz
Checksums-Sha256:
8c780af8b5a6d5b3c2e2f9518940beebea52ac6d6ad7b52c082dc925cfe5b532 171 foo_1.0.dsc
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 foo_1.0.tar.gz
Checksums-Sha512:
3da0240fd764657c2f3661b4d750578a9a99b0580591b133756379d48117ebda87a5ed2467f513200d6e7eaf51422cbe91c15720eef7fb4bba2cc8ff81ebc547 171 foo_1.0.dsc
cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e 0 foo_1.0.tar.gz
" cat ./aptarchive/dists/test/main/source/Sources
}

create_source_files() {
NAME="$1"
REQUEST_CLEARSIGN="$2"
TARFILE="aptarchive/pool/main/${NAME}_1.0.tar.gz"
DSC_FILE="aptarchive/pool/main/${NAME}_1.0.dsc"
touch $TARFILE
if [ "$REQUEST_CLEARSIGN" = "CLEARSIGN" ]; then
printf -- "-----BEGIN PGP SIGNED MESSAGE-----\n\n" > $DSC_FILE
fi
cat >> $DSC_FILE << EOF
Format: 3.0 (native)
Source: $NAME
Binary: $NAME
Architecture: all
Version: 1.0
Package-List:
$NAME deb admin extra
Files:
$(md5sum $TARFILE|cut -f1 -d' ') $(stat --print="%s" $TARFILE) ${NAME}_1.0.tar.gz
EOF
if [ "$REQUEST_CLEARSIGN" = "CLEARSIGN" ]; then
cat >> $DSC_FILE <<EOF

-----BEGIN PGP SIGNATURE-----
Version: GnuPG v1.4.11 (GNU/Linux)

iEYEARECAAYFAk3k/VoACgkQliSD4VZixzQxlgCgpav7j68z48qNTDFuT9fLqwT5
DFwAoIXatJFENEC371bMKTkUKlwZxQEk
=iI9V
-----END PGP SIGNATURE-----
EOF
fi
}

create_clearsigned_source_files() {
NAME="$1"
create_source_files "$NAME" "CLEARSIGN"
}

#
# main()
#
TESTDIR=$(readlink -f $(dirname $0))
. $TESTDIR/framework
setupenvironment
configarchitecture "i386"

msgtest 'Test apt-ftparchive source with missing hashes in .dsc'

mkdir -p aptarchive/pool/main
create_source_files foo
create_clearsigned_source_files bar

mkdir -p aptarchive/dists/test/main/i18n/
mkdir -p aptarchive/dists/test/main/source/

mkdir aptarchive-overrides
mkdir aptarchive-cache
cat > apt-ftparchive.conf <<"EOF"
Dir {
ArchiveDir "./aptarchive";
OverrideDir "./aptarchive-overrides";
CacheDir "./aptarchive-cache";
};

Default {
Packages::Compress ". gzip bzip2";
Contents::Compress ". gzip bzip2";
LongDescription "false";
};

TreeDefault {
BinCacheDB "packages-$(SECTION)-$(ARCH).db";
SrcCacheDB "sources-$(SECTION).db";

Directory "pool/$(SECTION)";
SrcDirectory "pool/$(SECTION)";

Packages "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages";
Sources "$(DIST)/$(SECTION)/source/Sources";
Contents "$(DIST)/Contents-$(ARCH)";
};

Tree "dists/test" {
Sections "main";
Architectures "source";

};
EOF


# generate (no cachedb)
aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt
testequal " Misses in Cache: 2" grep Misses stats-out.txt
assert_correct_sources_file


# generate again out of the cache
rm -f ./aptarchive/dists/test/main/source/Sources
aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt
testequal " Misses in Cache: 0" grep Misses stats-out.txt
assert_correct_sources_file

# generate invalid files
mkdir aptarchive/pool/invalid
printf "meep" > aptarchive/pool/invalid/invalid_1.0.dsc
testequal "
E: Could not find a Source entry in the DSC 'aptarchive/pool/invalid/invalid_1.0.dsc'" aptftparchive sources aptarchive/pool/invalid
rm -f aptarchive/pool/invalid/invalid_1.0.dsc

dd if=/dev/zero of="aptarchive/pool/invalid/toobig_1.0.dsc" bs=1k count=129 2>/dev/null
testequal "
E: DSC file 'aptarchive/pool/invalid/toobig_1.0.dsc' is too large!" aptftparchive sources aptarchive/pool/invalid



Loading…
Cancel
Save