@ -5,6 +5,7 @@ local now = os.time()
local APT = require ' apt-panopticommon '
local D = APT.D
local I = APT.I
local T = APT.T
local W = APT.W
local E = APT.E
local C = APT.C
@ -23,14 +24,14 @@ local releases = {"jessie", "ascii", "beowulf", "ceres"}
local releaseFiles =
{
-- Release file.
" / Release" , -- 3.7 MB
" / Release.gpg" , --
-- "/ InRelease", -- 3.7 MB
-- "/ main/binary-all/Packages.xz", -- 2.6 GB for all that changed recently.
" Release " , -- 3.7 MB
" Release.gpg " , --
-- "InRelease", -- 3.7 MB
-- "main/binary-all/Packages.xz", -- 2.6 GB for all that changed recently.
-- Contents files. -- 3.3 GB
-- "/ main/Contents-all.xz",
-- "/ main/Contents-amd64.xz",
-- "/ main/Contents-arm64.xz",
-- "main/Contents-all.xz",
-- "main/Contents-amd64.xz",
-- "main/Contents-arm64.xz",
-- "-security/main/Contents-all.xz",
-- "-security/main/Contents-amd64.xz",
-- "-security/main/Contents-arm64.xz",
@ -160,6 +161,7 @@ local url = require 'socket.url'
local ip = " "
local cor = nil
local repoExists = function ( r )
@ -197,6 +199,7 @@ gatherIPs = function (host)
end
until nil == IPs
end
return IP [ host ]
end
-- Returns FTP directory listing
@ -229,7 +232,7 @@ checkHEAD = function (host, URL, r, retry, sanity)
E ( " too many timeouts! " .. check .. " " .. host .. " -> " .. URL , PU.scheme , " " , host )
return
end
if 4 <= ( totalTimeouts ) then
if APT.options . timeouts.value <= ( totalTimeouts ) then
E ( " Way too many timeouts! " , PU.scheme , " " , host )
return
end
@ -237,7 +240,7 @@ checkHEAD = function (host, URL, r, retry, sanity)
E ( " too many redirects! " .. check .. " " .. host .. " -> " .. URL , PU.scheme , " " , host )
return
end
if 4 <= retry then
if APT.options . retries.value <= retry then
E ( " too many retries! " .. check .. " " .. host .. " -> " .. URL , PU.scheme , " " , host )
return
end
@ -343,6 +346,8 @@ checkHEAD = function (host, URL, r, retry, sanity)
Still duplicates a tiny bit , but much less than the previous find based method .
TODO - maybe we can switch to using flock like we do with the other curl calls ?
] ]
C ( ' Commented out code being called in checkHEAD() for redirects that should no longer happen! ' )
--[[
local file = pu.host .. " :// " .. pu.path
local f = io.popen ( string.format ( ' if [ ! -f results/%s.check ] ; then touch results/%s.check; echo -n "check"; fi ' , file : gsub ( " / " , " _ " ) , file : gsub ( " / " , " _ " ) ) ) : read ( " *a " )
if ( nil == f ) or ( " check " == f ) then
@ -351,28 +356,27 @@ TODO - maybe we can switch to using flock like we do with the other curl calls?
else
D ( " Already checking " .. file )
end
] ]
end
end
end
end
local checkTimeouts = function ( host , scheme , URL )
if APT.testing ( scheme ) then
totalTimeouts = totalTimeouts + timeouts ; timeouts = 0
checkHEAD ( host , scheme .. " :// " .. URL )
if 4 <= ( totalTimeouts ) then
E ( " Way too many timeouts! " , scheme , " " , host )
return true
end
end
totalTimeouts = totalTimeouts + timeouts ; timeouts = 0
checkHEAD ( host , scheme .. " :// " .. URL )
if APT.testing ( " URLSanity " ) then
URL = URL : gsub ( " merged/ " , " merged/// " )
totalTimeouts = totalTimeouts + timeouts ; timeouts = 0
checkHEAD ( host , scheme .. " :// " .. URL , 0 , 0 , true )
if 4 <= ( totalTimeouts ) then
E ( " Way too many timeouts! " , scheme , " URLSanity " , host )
return true
end
end
if nil ~= cor then
D ( ' *>* About to resume coroutine after checkHEAD( ' .. host .. ' , ' .. scheme .. ' :// ' .. URL .. ' ) ' )
local ok , message = coroutine.resume ( cor )
if not ok then cor = nil ; print ( message ) end
end
if APT.options . timeouts.value <= ( totalTimeouts ) then
E ( " Way too many timeouts! " , scheme , " URLSanity " , host )
return true
end
return false
end
@ -387,19 +391,18 @@ local checkFiles = function (host, ip, path, file)
if checkTimeouts ( host , " https " , ip .. path .. " / " .. file ) then return end
else
I ( " Checking IP " .. host .. " -> " .. ip .. " " .. path )
for i , s in pairs ( referenceDevs ) do
if checkTimeouts ( host , " http " , ip .. path .. " / " .. s ) then return end
if checkTimeouts ( host , " https " , ip .. path .. " / " .. s ) then return end
end
for i , s in pairs ( releases ) do
for j , k in pairs ( releaseFiles ) do
if repoExists ( s .. k ) then
if checkTimeouts ( host , " http " , ip .. path .. " /merged/dists/ " .. s .. k ) then return end
if checkTimeouts ( host , " https " , ip .. path .. " /merged/dists/ " .. s .. k ) then return end
if checkTimeouts ( host , " http " , ip .. path .. " /merged/dists/ " .. s .. ' / ' .. k ) then return end
if checkTimeouts ( host , " https " , ip .. path .. " /merged/dists/ " .. s .. ' / ' .. k ) then return end
end
end
end
for i , s in pairs ( referenceDevs ) do
if checkTimeouts ( host , " http " , ip .. path .. " / " .. s ) then return end
if checkTimeouts ( host , " https " , ip .. path .. " / " .. s ) then return end
end
end
end
@ -421,127 +424,51 @@ checkHost = function (orig, host, path, ip, file)
D ( " checkHost " .. orig .. " " .. file )
if APT.testing ( " IPv4 " ) then APT.fork ( " ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " -o " .. orig .. path .. " " .. file ) end
else D ( " checkHost " .. orig .. " -> " .. host ) end
local h = APT.mirrors [ ph.host ]
if nil == h then return end
for k , v in pairs ( h.IPs ) do
if " table " == type ( v ) then
for k1 , v1 in pairs ( v ) do
if v1 == " A " then
if APT.testing ( " IPv4 " ) then APT.fork ( " ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k1 .. " " .. file ) end
elseif v1 == " AAAA " then
if APT.testing ( " IPv6 " ) then APT.fork ( " ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k1 .. " " .. file ) end
end
end
else
if v == " A " then
if APT.testing ( " IPv4 " ) then APT.fork ( " ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k .. " " .. file ) end
elseif v == " AAAA " then
if APT.testing ( " IPv6 " ) then APT.fork ( " ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k .. " " .. file ) end
end
end
end
end
end
local addDownload = function ( host , URL , f , r , k )
local file = k : match ( " .*/([%w%.%+%-_]*)$ " ) -- Get the filename.
if APT.checkFile ( " results/ " .. host .. " /merged/dists/ " .. r .. k ) then
if APT.checkFile ( " results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k ) then
-- Curls "check timestamp and overwrite file" stuff sucks.
-- -R means the destination file gets the timestamp of the remote file.
-- Can only do ONE timestamp check per command.
-- This doesn't work either. All downloads get all these headers. Pffft
-- local status, ts = APT.execute('TZ="GMT" ls -l --time-style="+%a, %d %b %Y %T %Z" results/' .. host .. "/merged/dists/" .. r .. k .. ' | cut -d " " -f 6-11')
-- local status, ts = APT.execute('TZ="GMT" ls -l --time-style="+%a, %d %b %Y %T %Z" results/' .. host .. "/merged/dists/" .. r .. '/' .. k .. ' | cut -d " " -f 6-11')
-- f:write('header "If-Modified-Since: ' .. ts:sub(2, -2) .. '"\n')
-- Curl will DELETE the existing file if the timestamp fails to download a new one, unless we change directory first,
-- which wont work with multiple files in multiple directories. WTF?
os.execute ( " mv results/ " .. host .. " /merged/dists/ " .. r .. k ..
" results/ " .. host .. " /merged/dists/ " .. r .. k .. " .old " )
os.execute ( " mv results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k ..
" results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " .old " )
end
D ( ' Downloading http:// ' .. host .. URL .. ' /merged/dists/ ' .. r .. k )
f : write ( ' url " ' .. ' http:// ' .. host .. URL .. ' /merged/dists/ ' .. r .. k .. ' " \n ' )
f : write ( ' output "results/ ' .. host .. ' /merged/dists/ ' .. r .. k .. ' " \n ' )
D ( ' Downloading http:// ' .. host .. URL .. ' /merged/dists/ ' .. r .. ' / ' .. k )
f : write ( ' url " ' .. ' http:// ' .. host .. URL .. ' /merged/dists/ ' .. r .. ' / ' .. k .. ' " \n ' )
f : write ( ' output "results/ ' .. host .. ' /merged/dists/ ' .. r .. ' / ' .. k .. ' " \n ' )
end
local postDownload = function ( host , r , k )
local file = k : match ( " .*/([%w%.%+%-_]*)$ " ) -- Get the filename.
local dir = k : sub ( 1 , 0 - ( # file + 1 ) )
os.execute ( " if [ -f results/ " .. host .. " /merged/dists/ " .. r .. k .. " .old ] " ..
" && [ ! -f results/ " .. host .. " /merged/dists/ " .. r .. k .. " ]; then cp -a " ..
" results/ " .. host .. " /merged/dists/ " .. r .. k .. " .old " ..
" results/ " .. host .. " /merged/dists/ " .. r .. k .. " ; fi " )
if " .gz " == k : sub ( - 3 , - 1 ) then APT.execute ( " ionice -c3 nice -n 19 gzip -dfk results/ " .. host .. " /merged/dists/ " .. r .. k ) end
if " .xz " == k : sub ( - 3 , - 1 ) then APT.execute ( " ionice -c3 nice -n 19 xz -dfk results/ " .. host .. " /merged/dists/ " .. r .. k .. " 2>/dev/null " ) end
if APT.testing ( " Integrity " ) then
if " .gpg " == k : sub ( - 4 , - 1 ) then
local status , out = APT.execute ( " gpgv --keyring /usr/share/keyrings/devuan-keyring.gpg results/ " .. host .. " /merged/dists/ " .. r .. k ..
" results/ " .. host .. " /merged/dists/ " .. r .. k : sub ( 1 , - 5 ) .. " 2>/dev/null " )
if " 0 " ~= status then E ( " GPG check failed - " .. host .. " /merged/dists/ " .. r .. k , " http " , " Integrity " , host ) end
os.execute ( ' rm results/ ' .. host .. ' /merged/dists/ ' .. r .. k )
end
-- TODO - should check the PGP sig of InRelease as well.
end
if APT.testing ( " Integrity " ) or APT.testing ( " Updated " ) then
if " Packages. " == file : sub ( 1 , 9 ) then
-- TODO - compare the SHA256 sums in pkgmaster's Release for both the packed and unpacked versions.
-- Also note that this might get only a partial download due to maxtime.
if APT.options . referenceSite.value == host then
local Pp , e = io.open ( ' results/ ' .. host .. ' /merged/dists/ ' .. r .. dir .. ' Packages.parsed ' , " w+ " )
if nil == Pp then W ( ' opening results/ ' .. host .. ' /merged/dists/ ' .. r .. dir .. ' Packages.parsed ' .. ' file - ' .. e ) else
local pp = { }
for l in io.lines ( ' results/ ' .. host .. ' /merged/dists/ ' .. r .. dir .. ' Packages ' ) do
if " Package: " == l : sub ( 1 , 9 ) then
if 0 ~= # pp then
for i = 1 , 5 do
if nil == pp [ i ] then print ( host .. " " .. r .. " " .. dir .. " " .. i ) else Pp : write ( pp [ i ] .. " | " ) end
end
Pp : write ( " \n " )
end
pp = { }
pp [ 1 ] = l : sub ( 10 , - 1 )
elseif " Version: " == l : sub ( 1 , 9 ) then
pp [ 2 ] = l : sub ( 10 , - 1 )
elseif " Filename: " == l : sub ( 1 , 10 ) then
pp [ 3 ] = l : sub ( 11 , - 1 )
elseif " Size: " == l : sub ( 1 , 6 ) then
pp [ 4 ] = l : sub ( 7 , - 1 )
elseif " SHA256: " == l : sub ( 1 , 8 ) then
pp [ 5 ] = l : sub ( 9 , - 1 )
end
end
Pp : close ( )
os.execute ( ' sort results/ ' .. host .. ' /merged/dists/ ' .. r .. dir .. ' Packages.parsed >results/ ' .. host .. ' /merged/dists/ ' .. r .. dir .. ' Packages_parsed-sorted ' )
if APT.checkFile ( ' Packages/ ' .. r .. dir .. ' Packages_parsed-sorted ' ) then
os.execute ( ' diff -U 0 Packages/ ' .. r .. dir .. ' Packages_parsed-sorted ' ..
' results/pkgmaster.devuan.org/merged/dists/ ' .. r .. dir .. ' Packages_parsed-sorted ' ..
' | grep -E "^-" | grep -Ev "^ \\ + \\ + \\ +|^---" >>results/OLD_PACKAGES_ ' .. r .. ' .txt ' )
os.execute ( ' diff -U 0 Packages/ ' .. r .. dir .. ' Packages_parsed-sorted ' ..
' results/pkgmaster.devuan.org/merged/dists/ ' .. r .. dir .. ' Packages_parsed-sorted ' ..
' | grep -E "^ \\ +" | grep -Ev "^ \\ + \\ + \\ +|^---" >>results/NEW_Packages_ ' .. r .. ' .txt ' )
-- Find the smallest new package for each release.
os.execute ( ' sort -b -k 9,9 -n results/NEW_Packages_ ' .. r .. ' .txt >results/NEW_Packages_ ' .. r .. ' .sorted.txt ' )
os.execute ( ' grep -s " | pool/DEBIAN/" results/NEW_Packages_ ' .. r .. ' .sorted.txt 2>/dev/null | head -n 1 >results/NEW_Packages_ ' .. r .. ' .test.tmp ' )
os.execute ( ' grep -s " | pool/DEBIAN-SECURITY/" results/NEW_Packages_ ' .. r .. ' .sorted.txt 2>/dev/null | head -n 1 >>results/NEW_Packages_ ' .. r .. ' .test.tmp ' )
os.execute ( ' grep -s " | pool/DEVUAN/" results/NEW_Packages_ ' .. r .. ' .sorted.txt 2>/dev/null | head -n 1 >>results/NEW_Packages_ ' .. r .. ' .test.tmp ' )
os.execute ( ' mv results/NEW_Packages_ ' .. r .. ' .test.tmp results/NEW_Packages_ ' .. r .. ' .test.txt ' )
else
C ( " Can't find file Packages/ " .. r .. dir .. " Packages_parsed-sorted " )
end
os.execute ( ' mkdir -p Packages/ ' .. r .. dir )
os.execute ( ' mv -f results/pkgmaster.devuan.org/merged/dists/ ' .. r .. dir .. ' Packages_parsed-sorted Packages/ ' .. r .. dir .. ' Packages_parsed-sorted ' )
end
else
while not APT.checkFile ( ' results/NEW_Packages_ ' .. r .. ' .test.txt ' ) do os.execute ( ' sleep 10 ' ) end
end
os.execute ( ' rm -f results/ ' .. host .. ' /merged/dists/ ' .. r .. dir .. ' Packages 2>/dev/null ' )
os.execute ( ' rm -f results/ ' .. host .. ' /merged/dists/ ' .. r .. dir .. ' Packages.* 2>/dev/null ' )
end
if nil == file then file = k end
os.execute ( " if [ -f results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " .old ] " ..
" && [ ! -f results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " ]; then cp -a " ..
" results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " .old " ..
" results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " ; fi " )
if APT.checkFile ( ' results/ ' .. host .. ' /merged/dists/ ' .. r .. ' / ' .. k ) then
if " .gz " == k : sub ( - 3 , - 1 ) then APT.execute ( " ionice -c3 nice -n 19 gzip -dfk results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k ) end
if " .xz " == k : sub ( - 3 , - 1 ) then APT.execute ( " ionice -c3 nice -n 19 xz -dfk results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " 2>/dev/null " ) end
end
end
local downloadLock = " flock -n results/curl- "
local download = " curl --connect-timeout " .. APT.options . timeout.value .. " --create-dirs -f -L --max-time " .. APT.options . maxtime.value .. " -z 'results/stamp.old' -v -R "
local download = " curl " ..
" --connect-timeout " .. APT.options . timeout.value ..
" --create-dirs -f -L " ..
" --fail-early " ..
" --max-time " .. APT.options . maxtime.value ..
" --retry " .. APT.options . retries.value ..
" -R -v -z 'results/stamp.old' "
local downloads = function ( host , URL , meta , release , list )
if nil == URL then URL = " " end
local lock = meta .. " - " .. host .. " .log "
@ -600,6 +527,7 @@ local validateURL = function(m)
m.BaseURL = p.authority .. p.path
return m
end
local getMirrors = function ( )
local mirrors = { }
local host = " "
@ -621,8 +549,6 @@ local getMirrors = function ()
end
host = d
m [ t ] = d
gatherIPs ( host )
m [ " IPs " ] = IP [ host ]
elseif " Protocols " == t then
local prot = { }
for w in d : gmatch ( " (%w+) " ) do
@ -643,19 +569,305 @@ local getMirrors = function ()
end
end
if APT.testing ( " DNSRR " ) then
mirrors [ APT.options . roundRobin.value ] = { [ " Protocols " ] = { [ " http " ] = true ; [ " https " ] = true ; } ; [ " FQDN " ] = ' deb.devuan.org ' ; [ " Active " ] = ' yes ' ; [ " BaseURL " ] = ' deb.devuan.org ' ; }
gatherIPs ( APT.options . roundRobin.value )
mirrors [ APT.options . roundRobin.value ] . IPs = IP [ APT.options . roundRobin.value ]
end
local file , e = io.open ( " results/mirrors.lua " , " w+ " )
if nil == file then C ( " opening mirrors file - " .. e ) else
file : write ( APT.dumpTable ( mirrors , " " , " mirrors " ) .. " \n return mirrors \n " )
file : close ( )
mirrors [ APT.options . roundRobin.value ] = { [ " Protocols " ] = { [ " http " ] = true ; [ " https " ] = true ; } ;
[ " FQDN " ] = APT.options . roundRobin.value ; [ " Active " ] = ' yes ' ; [ " BaseURL " ] = APT.options . roundRobin.value ; }
end
return mirrors
end
local postParse = function ( host , list )
if APT.options . referenceSite.value == host then
if nil ~= list then
local sem = ' results/NEW_ ' .. list.out .. ' _%s.txt '
for i , n in pairs ( releases ) do
local f = sem : format ( n )
if APT.checkFile ( f .. ' .tmp ' ) then
os.execute ( ' mv ' .. f .. ' .tmp ' .. f )
else
os.execute ( ' touch ' .. f )
end
end
end
end
end
local parseDebs = function ( host )
for i , n in pairs ( releases ) do
local inFile = ' results/NEW_debs_ ' .. n .. ' .txt '
local nfile , e = io.open ( inFile , " r " )
if nil == nfile then W ( " opening " .. inFile .. " file - " .. e ) else
for l in nfile : lines ( ) do
local v , p , sz , sha = l : match ( ' | (.+) | (pool/.+%.deb) | (%d.+) | (%x.+) | ' )
if nil ~= p then
if APT.checkFile ( ' results/ ' .. host .. " /merged/ " .. p ) then
local status , fsz = APT.execute ( ' ls -l results/ ' .. host .. " /merged/ " .. p .. ' | cut -d " " -f 5-5 ' )
if APT.testing ( " Integrity " ) then
if sz ~= fsz : sub ( 2 , - 2 ) then -- The sub bit is to slice off the EOLs at each end.
E ( ' Package size mismatch - results/ ' .. host .. " /merged/ " .. p .. ' should be ' .. sz .. ' , but is ' .. fsz : sub ( 2 , - 2 ) .. ' . ' , ' http ' , ' Integrity ' , host )
else
local status , fsha = APT.execute ( ' sha256sum results/ ' .. host .. " /merged/ " .. p .. ' | cut -d " " -f 1 ' )
if sha ~= fsha : sub ( 2 , - 2 ) then E ( ' Package SHA256 sum mismatch - results/ ' .. host .. " /merged/ " .. p , ' http ' , ' Integrity ' , host ) end
-- TODO - maybe check the PGP key, though packages are mostly not signed.
end
end
if APT.testing ( " Updated " ) then
if sz ~= fsz : sub ( 2 , - 2 ) then
E ( ' Package size mismatch - results/ ' .. host .. " /merged/ " .. p , ' http ' , ' Updated ' , host )
end
end
os.execute ( ' rm -f results/ ' .. host .. " /merged/ " .. p )
else
E ( ' Failed to download - results/ ' .. host .. " /merged/ " .. p , ' http ' , ' Updated ' , host )
end
end
end
end
end
return nil
end
local parsePackages = function ( host )
local list = { inf = ' Packages ' , parser = parseDebs , out = ' debs ' , files = { } , nextf = ' ' }
for i , n in pairs ( releases ) do
local inFile = ' results/NEW_ ' .. list.inf .. ' _ ' .. n .. ' .txt '
local outFile = ' results/NEW_ ' .. list.out .. ' _ ' .. n .. ' .txt '
if APT.options . referenceSite.value == host then
outFile = outFile .. ' .tmp '
end
local dFile , e = io.open ( inFile , " r " )
if nil == dFile then W ( " opening " .. inFile .. " file - " .. e ) else
for l in dFile : lines ( ) do
postDownload ( host , n , l )
l = ' / ' .. l
local file = l : match ( " .*/([%w%.%+%-_]*)$ " ) -- Get the filename.
local dir = l : sub ( 1 , 0 - ( # file + 1 ) )
if " Packages. " == file : sub ( 1 , 9 ) then
-- TODO - compare the SHA256 sums in pkgmaster's Release for both the packed and unpacked versions.
-- Also note that this might get only a partial download due to maxtime.
if APT.options . referenceSite.value == host then
local Pp , e = io.open ( ' results/ ' .. host .. ' /merged/dists/ ' .. n .. dir .. ' Packages.parsed ' , " w+ " )
if nil == Pp then W ( ' opening results/ ' .. host .. ' /merged/dists/ ' .. n .. dir .. ' Packages.parsed ' .. ' file - ' .. e ) else
local pp = { }
for l in io.lines ( ' results/ ' .. host .. ' /merged/dists/ ' .. n .. dir .. ' Packages ' ) do
if " Package: " == l : sub ( 1 , 9 ) then
if 0 ~= # pp then
for i = 1 , 5 do
if nil == pp [ i ] then print ( host .. " " .. n .. " " .. dir .. " " .. i ) else Pp : write ( pp [ i ] .. " | " ) end
end
Pp : write ( " \n " )
end
pp = { }
pp [ 1 ] = l : sub ( 10 , - 1 )
elseif " Version: " == l : sub ( 1 , 9 ) then
pp [ 2 ] = l : sub ( 10 , - 1 )
elseif " Filename: " == l : sub ( 1 , 10 ) then
pp [ 3 ] = l : sub ( 11 , - 1 )
elseif " Size: " == l : sub ( 1 , 6 ) then
pp [ 4 ] = l : sub ( 7 , - 1 )
elseif " SHA256: " == l : sub ( 1 , 8 ) then
pp [ 5 ] = l : sub ( 9 , - 1 )
end
end
Pp : close ( )
os.execute ( ' sort results/ ' .. host .. ' /merged/dists/ ' .. n .. dir .. ' Packages.parsed >results/ ' .. host .. ' /merged/dists/ ' .. n .. dir .. ' Packages_parsed-sorted ' )
if APT.checkFile ( ' Packages/ ' .. n .. dir .. ' Packages_parsed-sorted ' ) then
os.execute ( ' diff -U 0 Packages/ ' .. n .. dir .. ' Packages_parsed-sorted ' ..
' results/ ' .. APT.options . referenceSite.value .. ' /merged/dists/ ' .. n .. dir .. ' Packages_parsed-sorted ' ..
' | grep -E "^-" | grep -Ev "^ \\ + \\ + \\ +|^---" >>results/OLD_ ' .. list.out .. ' _ ' .. n .. ' .txt ' )
os.execute ( ' diff -U 0 Packages/ ' .. n .. dir .. ' Packages_parsed-sorted ' ..
' results/ ' .. APT.options . referenceSite.value .. ' /merged/dists/ ' .. n .. dir .. ' Packages_parsed-sorted ' ..
' | grep -E "^ \\ +" | grep -Ev "^ \\ + \\ + \\ +|^---" >>results/NEW_ ' .. list.out .. ' _TMP_ ' .. n .. ' .txt ' )
else
C ( " Can't find file Packages/ " .. n .. dir .. " Packages_parsed-sorted " )
end
os.execute ( ' mkdir -p Packages/ ' .. n .. dir )
os.execute ( ' mv -f results/ ' .. APT.options . referenceSite.value .. ' /merged/dists/ ' .. n .. dir .. ' Packages_parsed-sorted Packages/ ' .. n .. dir .. ' Packages_parsed-sorted ' )
end
else
end
os.execute ( ' rm -fr results/ ' .. host .. ' /merged/dists/ ' .. n .. dir .. ' 2>/dev/null ' )
end
end
if APT.checkFile ( ' results/NEW_ ' .. list.out .. ' _TMP_ ' .. n .. ' .txt ' ) then
-- Sort by size.
os.execute ( ' sort -b -k 9,9 -n results/NEW_ ' .. list.out .. ' _TMP_ ' .. n .. ' .txt >results/NEW_ ' .. list.out .. ' _ ' .. n .. ' .sorted.txt ' )
os.execute ( ' grep -s " | pool/DEBIAN/" results/NEW_ ' .. list.out .. ' _ ' .. n .. ' .sorted.txt 2>/dev/null | head -n 1 > ' .. outFile )
os.execute ( ' grep -s " | pool/DEBIAN-SECURITY/" results/NEW_ ' .. list.out .. ' _ ' .. n .. ' .sorted.txt 2>/dev/null | head -n 1 >> ' .. outFile )
os.execute ( ' grep -s " | pool/DEVUAN/" results/NEW_ ' .. list.out .. ' _ ' .. n .. ' .sorted.txt 2>/dev/null | head -n 1 >> ' .. outFile )
os.execute ( ' rm -f results/NEW_ ' .. list.out .. ' _TMP_ ' .. n .. ' .txt ' )
end
end
local nfile , e = io.open ( outFile , " r " )
if nil ~= nfile then
-- for l in nfile:lines() do
local l = nfile : read ( ' *l ' )
if nil ~= l then
local p = l : match ( ' (pool/.*%.deb) ' )
if nil ~= p then
table.insert ( list.files , p )
end
end
-- end
end
end
postParse ( host , list )
return list
end
local parseRelease = function ( host )
local list = { inf = ' Release ' , parser = parsePackages , out = ' Packages ' , files = { } , nextf = ' debs ' }
for i , n in pairs ( releases ) do
for l , o in pairs ( releaseFiles ) do
if repoExists ( i .. o ) then
postDownload ( host , n , o )
if ( " .gpg " == o : sub ( - 4 , - 1 ) ) and ( APT.checkFile ( ' results/ ' .. host .. ' /merged/dists/ ' .. n .. ' / ' .. o ) ) then
if APT.testing ( " Integrity " ) then
local status , out = APT.execute ( " gpgv --keyring /usr/share/keyrings/devuan-keyring.gpg results/ " .. host .. " /merged/dists/ " .. n .. ' / ' .. o ..
" results/ " .. host .. " /merged/dists/ " .. n .. ' / ' .. o : sub ( 1 , - 5 ) .. " 2>/dev/null " )
if " 0 " ~= status then E ( " GPG check failed - " .. host .. " /merged/dists/ " .. n .. ' / ' .. o , " http " , " Integrity " , host ) end
-- TODO - should check the PGP sig of InRelease as well.
end
os.execute ( ' rm results/ ' .. host .. ' /merged/dists/ ' .. n .. ' / ' .. o )
end
end
end
if APT.checkFile ( ' results/ ' .. host .. ' /merged/dists/ ' .. n .. ' /Release ' ) then
os.execute ( ' sort -k 3 results/ ' .. host .. ' /merged/dists/ ' .. n .. ' /Release >results/ ' .. host .. ' /merged/dists/ ' .. n .. ' /Release.SORTED ' )
local outFile = ' results/NEW_ ' .. list.out .. ' _ ' .. n .. ' .txt '
if APT.checkFile ( ' results_old/ ' .. APT.options . referenceSite.value .. ' /merged/dists/ ' .. n .. ' /Release.SORTED ' ) then
if APT.options . referenceSite.value == host then
outFile = outFile .. ' .tmp '
os.execute ( ' diff -U 0 results_old/ ' .. APT.options . referenceSite.value .. ' /merged/dists/ ' .. n .. ' /Release.SORTED ' ..
' results/ ' .. APT.options . referenceSite.value .. ' /merged/dists/ ' .. n .. ' /Release.SORTED ' ..
' | grep -v "@@" | grep "^+" | grep "Packages.xz$" | cut -c 77- > ' .. outFile )
-- TODO - Maybe check the date in Release, though since they are updated daily, is there any point? Perhaps it's for checking amprolla got run?
-- Also check if that date is in the future, apt recently got a check for that, though not sure why.
os.execute ( ' rm -f results/ ' .. host .. ' /merged/dists/ ' .. n .. ' /Release 2>/dev/null; ' )
else
-- TODO - compare to the pkgmaster copy.
end
-- TODO - if it's not Integrity and not reference, then just do a HEAD check and compare file times?
if APT.testing ( " Integrity " ) or ( APT.options . referenceSite.value == host ) then
local dfile , e = io.open ( outFile , " r " )
if nil == dfile then W ( " opening " .. outFile .. " file - " .. e ) else
for l in dfile : lines ( ) do
table.insert ( list.files , ' dists/ ' .. n .. ' / ' .. l )
end
end
end
end
end
end
postParse ( host , list )
return list
end
local parseStart = function ( host )
local list = { inf = ' ' , parser = parseRelease , out = ' Release ' , files = { } , nextf = ' Packages ' }
for i , n in pairs ( releases ) do
local outFile = ' results/NEW_ ' .. list.out .. ' _ ' .. n .. ' .txt '
for l , o in pairs ( releaseFiles ) do
if repoExists ( n .. o ) then
if APT.options . referenceSite.value == host then
local dfile , e = io.open ( outFile .. ' .tmp ' , " a+ " )
if nil == dfile then W ( " opening " .. outFile .. " .tmp file - " .. e ) else
dfile : write ( o .. ' \n ' )
end
end
table.insert ( list.files , ' dists/ ' .. n .. ' / ' .. o )
end
end
end
postParse ( host , list )
return list
end
local doDownloads = function ( host , path , list )
while nil ~= list do
if 0 ~= # ( list.files ) then
for j , f in pairs ( list.files ) do
downloads ( host , path , list.out , nil , f )
end
downloads ( host , path , list.out , nil , ' ' )
--[[ I've seen flock & curl act oddly. Perhaps flock didn't have time to start up?
/ var / www / html / apt - panopticon / apt - panopticon / results_2019 - 12 - 22 - 15 - 00
Mon Dec 23 01 : 02 : 54 2019 DEBUG : forking
ionice - c3 nice - n 19 flock - n results / curl - debs - pkgmaster.devuan . org.log curl --connect-timeout 5 --create-dirs -f -L --fail-early --max-time 300 --retry 3 -R -v -z 'results/stamp.old' --stderr results/curl-debs-pkgmaster.devuan.org.log -K results/curl-debs-pkgmaster.devuan.org.files
Mon Dec 23 01 : 02 : 54 2019 DEBUG : 0 flock - n results / curl - debs - pkgmaster.devuan . org.log commands still running .
Mon Dec 23 01 : 02 : 54 2019 DEBUG : *>* Resumed coroutine NO LONGER waiting on - 0 < APT.checkExes ( flock - n results / curl - debs - pkgmaster.devuan . org.log
Mon Dec 23 01 : 02 : 54 2019 DEBUG : *** Doing list.parser ( ) for debs
Mon Dec 23 01 : 02 : 54 2019 ERROR ( http Updated pkgmaster.devuan . org ) : Failed to download - results / pkgmaster.devuan . org / merged / pool / DEBIAN / main / a / aptly / aptly_1 .3 .0 + ds1 - 4 _amd64.deb
drwxr - x --- 2 www-data www-data 4096 2019-12-23 01:02:57.000000000 +1000 aptly
- rw - r --r-- 1 www-data www-data 7129 2019-12-23 01:03:54.000000000 +1000 curl-debs-pkgmaster.devuan.org.log
] ]
os.execute ( ' sleep 2 ' )
while 0 < APT.checkExes ( downloadLock .. list.out .. " - " .. host .. " .log " ) do
D ( ' *<* About to yield coroutine while waiting on - 0 < APT.checkExes( ' .. downloadLock .. list.out .. ' - ' .. host .. ' .log ' )
coroutine.yield ( )
D ( ' *>* Resumed coroutine while waiting on - 0 < APT.checkExes( ' .. downloadLock .. list.out .. ' - ' .. host .. ' .log ' )
end
D ( ' *>* Resumed coroutine NO LONGER waiting on - 0 < APT.checkExes( ' .. downloadLock .. list.out .. ' - ' .. host .. ' .log ' )
local min , max , spd = 999999999999 , 0
if APT.checkFile ( " results/curl- " .. list.out .. " - " .. host .. " .log " ) then
for l in io.lines ( " results/curl- " .. list.out .. " - " .. host .. " .log " ) do
local speed , crrnt = l : match ( ' ^%c *%d+ +%d+k? +%d+ +%d+k? +%d+ +%d+ +(%d+k?) +%d+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +(%d+k?) ' )
if nil ~= speed then
if ' k ' == speed : sub ( - 1 , - 1 ) then speed = speed : sub ( 1 , - 2 ) .. ' 000 ' end
if ' k ' == crrnt : sub ( - 1 , - 1 ) then crrnt = crrnt : sub ( 1 , - 2 ) .. ' 000 ' end
speed = tonumber ( speed )
crrnt = tonumber ( crrnt )
if speed < min and speed ~= 0 then min = speed end
if speed > max then max = speed end
if crrnt < min and crrnt ~= 0 then min = crrnt end
if crrnt > max then max = crrnt end
end
if l : find ( ' timed out ' ) ~= nil then
E ( " TIMEOUT " .. timeouts + 1 .. ' , details in curl- ' .. list.out .. ' - ' .. host .. ' .log ' , ' http ' , ' ' , host )
timeouts = timeouts + 1
APT.results [ " timeout " ] = true
end
end
end
APT.results [ " speed " ] = { min = min , max = max }
end
if ( APT.options . referenceSite.value ~= host ) and ( ' ' ~= list.nextf ) then
local sem = ' results/NEW_ ' .. list.nextf .. ' _%s.txt '
for i , n in pairs ( releases ) do
local f = sem : format ( n )
while not APT.checkFile ( f ) do
D ( ' *<* About to yield coroutine while waiting on - not APT.checkFile( ' .. f .. ' ) ' )
coroutine.yield ( )
D ( ' *>* Resumed coroutine while waiting on - not APT.checkFile( ' .. f .. ' ) ' )
end
end
end
D ( ' *** Doing list.parser() for ' .. list.out )
list = list.parser ( host )
if APT.options . timeouts.value <= ( totalTimeouts ) then break end
end
D ( ' *<<* About to end coroutine. ' )
cor = nil
end
if 0 < # arg then
if " / " == arg [ 1 ] : sub ( - 1 , - 1 ) then
W ( " slash at end of path! " .. arg [ 1 ] )
@ -673,6 +885,8 @@ if 0 < #arg then
end
end
if APT.origin then APT.results [ " IPs " ] = gatherIPs ( pu.host ) end
if nil ~= arg [ 2 ] then
APT.logFile , e = io.open ( " results/LOG_ " .. pu.host .. " _ " .. arg [ 2 ] .. " .html " , " a+ " )
else
@ -681,165 +895,57 @@ if 0 < #arg then
if nil == APT.logFile then C ( " opening log file - " .. e ) ; return end
APT.logPre ( )
I ( " Starting tests for " .. arg [ 1 ] .. " with these tests - " .. table.concat ( APT.options . tests.value , " , " ) )
APT.mirrors = loadfile ( " results/mirrors.lua " ) ( )
if nil ~= arg [ 2 ] then I ( " Using IP " .. arg [ 2 ] ) ; ip = arg [ 2 ] end
if nil ~= arg [ 3 ] then I ( " Using file " .. arg [ 3 ] ) ; end
APT.results = APT.padResults ( APT.results )
if APT.origin then
local file = arg [ 3 ]
if nil == file then file = ' ' end
local path = pu.path
if nil == path then path = ' ' end
local ips = APT.results [ " IPs " ]
for k , v in pairs ( ips ) do
if " table " == type ( v ) then
for k1 , v1 in pairs ( v ) do
if v1 == " A " then
if APT.testing ( " IPv4 " ) then APT.fork ( " ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. pu.host .. path .. " " .. k1 .. " " .. file ) end
elseif v1 == " AAAA " then
if APT.testing ( " IPv6 " ) then APT.fork ( " ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. pu.host .. path .. " " .. k1 .. " " .. file ) end
end
end
else
if v == " A " then
if APT.testing ( " IPv4 " ) then APT.fork ( " ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. pu.host .. path .. " " .. k .. " " .. file ) end
elseif v == " AAAA " then
if APT.testing ( " IPv6 " ) then APT.fork ( " ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. pu.host .. path .. " " .. k .. " " .. file ) end
end
end
end
if APT.testing ( " Integrity " ) or APT.testing ( " Updated " ) then
if APT.origin and ( APT.options . roundRobin.value ~= pu.host ) then
I ( " Starting file downloads for " .. pu.host )
downloads ( pu.host , pu.path , ' Release ' )
D ( ' *>* About to create coroutine. ' )
cor = coroutine.create ( doDownloads )
local ok , message = coroutine.resume ( cor , pu.host , pu.path , parseStart ( pu.host ) )
if not ok then cor = nil ; print ( message ) end
end
end
checkFiles ( pu.host , pu.host , pu.path ) ;
else
checkHost ( pu.host , pu.host , pu.path , arg [ 2 ] , arg [ 3 ] )
end
if APT.testing ( " Integrity " ) or APT.testing ( " Updated " ) then
if 4 > ( totalTimeouts ) then
if APT.origin and ( APT.options . roundRobin.value ~= pu.host ) then
while 0 < APT.checkExes ( downloadLock .. " Release- " .. pu.host .. " .log " ) do os.execute ( " sleep 10 " ) end
local fcount = 0
for i , n in pairs ( releases ) do
for l , o in pairs ( releaseFiles ) do
if repoExists ( i .. o ) then
postDownload ( pu.host , n , o )
end
end
if APT.checkFile ( ' results/ ' .. pu.host .. ' /merged/dists/ ' .. n .. ' /Release ' ) then
os.execute ( ' sort -k 3 results/ ' .. pu.host .. ' /merged/dists/ ' .. n .. ' /Release >results/ ' .. pu.host .. ' /merged/dists/ ' .. n .. ' /Release.SORTED ' )
if APT.checkFile ( ' results_old/pkgmaster.devuan.org/merged/dists/ ' .. n .. ' /Release.SORTED ' ) then
if APT.options . referenceSite.value == pu.host then
os.execute ( ' diff -U 0 results_old/pkgmaster.devuan.org/merged/dists/ ' .. n .. ' /Release.SORTED ' ..
' results/pkgmaster.devuan.org/merged/dists/ ' .. n .. ' /Release.SORTED ' ..
' | grep -v "@@" | grep "^+" | grep "Packages.xz$" | cut -c 77- >results/NEW_Release_ ' .. n .. ' .tmp ' )
-- TODO - Maybe check the date in Release, though since they are updated daily, is there any point? Perhaps it's for checking amprolla got run?
os.execute ( ' rm -f results/ ' .. pu.host .. ' /merged/dists/ ' .. n .. ' /Release 2>/dev/null; ' ..
' mv results/NEW_Release_ ' .. n .. ' .tmp results/NEW_Release_ ' .. n .. ' .txt ' )
else
-- TODO - compare to the pkgmaster copy.
while not APT.checkFile ( ' results/NEW_Release_ ' .. n .. ' .txt ' ) do os.execute ( ' sleep 10 ' ) end
end
local dfile , e = io.open ( ' results/NEW_Release_ ' .. n .. ' .txt ' , " r " )
if nil == dfile then W ( " opening results/NEW_Release_ " .. n .. " file - " .. e ) else
local diff = dfile : read ( " *a " )
if " " ~= diff then
downloads ( pu.host , pu.path , ' Packages ' , n , diff )
fcount = fcount + 1
end
end
end
end
end
if 0 ~= fcount then
downloads ( pu.host , pu.path , ' Packages ' , " " , " " )
while 0 < APT.checkExes ( downloadLock .. " Packages- " .. pu.host .. " .log " ) do os.execute ( " sleep 10 " ) end
for i , n in pairs ( releases ) do
local dfile , e = io.open ( ' results/NEW_Release_ ' .. n .. ' .txt ' , " r " )
if nil == dfile then W ( " opening results/NEW_Release_ " .. n .. " .txt file - " .. e ) else
local diff = dfile : read ( " *a " )
for l in diff : gmatch ( " \n *([^ \n ]+) \n * " ) do
postDownload ( pu.host , n , " / " .. l )
end
end
if APT.options . referenceSite.value == pu.host then
-- In case it wasn't dealt with already.
os.execute ( ' touch results/NEW_Packages_ ' .. n .. ' .test.txt ' )
end
end
fcount = 0
for i , n in pairs ( releases ) do
local nfile , e = io.open ( ' results/NEW_Packages_ ' .. n .. ' .test.txt ' , " r " )
if nil == nfile then W ( " opening results/NEW_Packages_ " .. n .. " .test.txt file - " .. e ) else
for l in nfile : lines ( ) do
local p = l : match ( ' (pool/.*%.deb) ' )
if nil ~= p then
downloads ( pu.host , pu.path , ' package ' , nil , p )
fcount = fcount + 1
end
end
end
end
if 0 ~= fcount then
downloads ( pu.host , pu.path , ' package ' , nil , " " )
while 0 < APT.checkExes ( downloadLock .. " package- " .. pu.host .. " .log " ) do os.execute ( " sleep 10 " ) end
for i , n in pairs ( releases ) do
local nfile , e = io.open ( ' results/NEW_Packages_ ' .. n .. ' .test.txt ' , " r " )
if nil == nfile then W ( " opening results/NEW_Packages_ " .. n .. " .test.txt file - " .. e ) else
for l in nfile : lines ( ) do
local v , p , sz , sha = l : match ( ' | (.+) | (pool/.+%.deb) | (%d.+) | (%x.+) | ' )
if nil ~= p then
if APT.checkFile ( ' results/ ' .. pu.host .. " /merged/ " .. p ) then
local status , fsz = APT.execute ( ' ls -l results/ ' .. pu.host .. " /merged/ " .. p .. ' | cut -d " " -f 5-5 ' )
if APT.testing ( " Integrity " ) then
if sz ~= fsz : sub ( 2 , - 2 ) then -- The sub bit is to slice off the EOLs at each end.
E ( ' Package size mismatch - results/ ' .. pu.host .. " /merged/ " .. p .. ' should be ' .. sz .. ' , but is ' .. fsz : sub ( 2 , - 2 ) .. ' . ' , ' http ' , ' Integrity ' , pu.host )
else
local status , fsha = APT.execute ( ' sha256sum results/ ' .. pu.host .. " /merged/ " .. p .. ' | cut -d " " -f 1 ' )
if sha ~= fsha : sub ( 2 , - 2 ) then E ( ' Package SHA256 sum mismatch - results/ ' .. pu.host .. " /merged/ " .. p , ' http ' , ' Integrity ' , pu.host ) end
-- TODO - maybe check the PGP key, though packages are mostly not signed.
end
end
if APT.testing ( " Updated " ) then
if sz ~= fsz : sub ( 2 , - 2 ) then
E ( ' Package size mismatch - results/ ' .. pu.host .. " /merged/ " .. p , ' http ' , ' Updated ' , pu.host )
end
end
else
E ( ' Failed to download - results/ ' .. pu.host .. " /merged/ " .. p , ' http ' , ' Updated ' , pu.host )
end
end
end
end
end
end
else
for i , n in pairs ( releases ) do
os.execute ( ' touch results/NEW_Packages_ ' .. n .. ' .test.txt ' )
end
end
end
while nil ~= cor do
os.execute ( ' sleep 10 ' )
D ( ' *>* About to resume coroutine before writing results. ' )
APT.results [ " timeout " ] = false
else
APT.results [ " timeout " ] = true
end
end
local min , max , spd = 999999999999 , 0
for i , mt in pairs ( { ' Release ' , ' Packages ' , ' package ' } ) do
if APT.checkFile ( " results/curl- " .. mt .. " - " .. pu.host .. " .log " ) then
for l in io.lines ( " results/curl- " .. mt .. " - " .. pu.host .. " .log " ) do
local speed , crrnt = l : match ( ' ^%c *%d+ +%d+k? +%d+ +%d+k? +%d+ +%d+ +(%d+k?) +%d+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +(%d+k?) ' )
if nil ~= speed then
if ' k ' == speed : sub ( - 1 , - 1 ) then speed = speed : sub ( 1 , - 2 ) .. ' 000 ' end
if ' k ' == crrnt : sub ( - 1 , - 1 ) then crrnt = crrnt : sub ( 1 , - 2 ) .. ' 000 ' end
speed = tonumber ( speed )
crrnt = tonumber ( crrnt )
if speed < min and speed ~= 0 then min = speed end
if speed > max then max = speed end
if crrnt < min and crrnt ~= 0 then min = crrnt end
if crrnt > max then max = crrnt end
end
if l : find ( ' timed out ' ) ~= nil then
E ( " TIMEOUT " .. timeouts + 1 .. ' , details in curl- ' .. mt .. ' - ' .. pu.host .. ' .log ' , ' http ' , ' ' , pu.host )
timeouts = timeouts + 1
APT.results [ " timeout " ] = true
end
end
end
local ok , message = coroutine.resume ( cor )
if not ok then cor = nil ; print ( message ) ; break end
end
APT.results [ " speed " ] = { min = min , max = max }
local f = pu.host
if " " ~= ip then f = f .. " _ " .. ip end
@ -891,13 +997,23 @@ else
local pu = url.parse ( " http:// " .. m.BaseURL )
if APT.options . referenceSite.value ~= pu.host then
checkHost ( m.BaseURL )
APT.checkExes ( " apt-panopticon.lua " .. sendArgs )
if APT.testing ( " Integrity " ) or APT.testing ( " Updated " ) then APT.checkExes ( downloadLock ) end
end
end
while 1 <= APT.checkExes ( " apt-panopticon.lua " .. sendArgs ) do os.execute ( " sleep 10 " ) end
os.execute ( ' rm -f results/*.check; rm STATUS_* 2>/dev/null ' )
for k , v in pairs ( APT.mirrors ) do
local f = ' results/ ' .. k .. ' .lua '
if APT.checkFile ( f ) then
results = loadfile ( f ) ( )
APT.mirrors [ k ] [ ' IPs ' ] = results.IPs
end
end
local file , e = io.open ( " results/mirrors.lua " , " w+ " )
if nil == file then C ( " opening mirrors file - " .. e ) else
file : write ( APT.dumpTable ( APT.mirrors , " " , " mirrors " ) .. " \n return mirrors \n " )
file : close ( )
end
-- Create the reports.
for n , r in pairs ( APT.options . reports.value ) do