#!/usr/bin/env luajit
local now = os.time ( )
local APT = require ' apt-panopticommon '
local D = APT.D
local I = APT.I
local T = APT.T
local W = APT.W
local E = APT.E
local C = APT.C
local arg , sendArgs = APT.parseArgs ( { ... } )
APT.html = true
local defaultURL = { scheme = " http " }
local releases = { " jessie " , " ascii " , " beowulf " , " ceres " }
local releaseFiles =
{
-- Release file.
" Release " , -- 3.7 MB
" Release.gpg " , --
-- "InRelease", -- 3.7 MB
-- "main/binary-all/Packages.xz", -- 2.6 GB for all that changed recently.
-- Contents files. -- 3.3 GB
-- "main/Contents-all.xz",
-- "main/Contents-amd64.xz",
-- "main/Contents-arm64.xz",
-- "-security/main/Contents-all.xz",
-- "-security/main/Contents-amd64.xz",
-- "-security/main/Contents-arm64.xz",
}
local notExist =
{
" ceres-security " -- This will never exist, it's our code name for the testing suite.
}
local referenceDebs =
{
-- Debian package.
" merged/pool/DEBIAN/main/d/debian-keyring/debian-keyring_2019.02.25_all.deb " ,
-- Debian security package. NOTE this one should always be redirected?
" merged/pool/DEBIAN-SECURITY/updates/main/a/apt/apt-transport-https_1.4.9_amd64.deb " ,
}
local referenceDevs =
{
-- Devuan package. NOTE this one should not get redirected, but that's more a warning than an error.
" merged/pool/DEVUAN/main/d/devuan-keyring/devuan-keyring_2017.10.03_all.deb " ,
}
local curlStatus =
{
[ 1 ] = " Unsupported protocol. This build of curl has no support for this protocol. " ,
[ 2 ] = " Failed to initialize. " ,
[ 3 ] = " URL malformed. The syntax was not correct. " ,
[ 4 ] = " A feature or option that was needed to perform the desired request was not enabled or was explicitly disabled at build-time. To make curl able to do this, you probably need another build of libcurl! " ,
[ 5 ] = " Couldn't resolve proxy. The given proxy host could not be resolved. " ,
[ 6 ] = " Couldn't resolve host. The given remote host was not resolved. " ,
[ 7 ] = " Failed to connect to host. " ,
[ 8 ] = " Weird server reply. The server sent data curl couldn't parse. " ,
[ 9 ] = " FTP access denied. The server denied login or denied access to the particular resource or directory you wanted to reach. Most often you tried to change to a directory that doesn't exist on the server. " ,
[ 10 ] = " While waiting for the server to connect back when an active FTP session is used, an error code was sent over the control connection or similar. " ,
[ 11 ] = " FTP weird PASS reply. Curl couldn't parse the reply sent to the PASS request. " ,
[ 12 ] = " During an active FTP session while waiting for the server to connect, the CURLOPT_ACCEPTTIMEOUT_MS (or the internal default) timeout expired. " ,
[ 13 ] = " FTP weird PASV reply, Curl couldn't parse the reply sent to the PASV request. " ,
[ 14 ] = " FTP weird 227 format. Curl couldn't parse the 227-line the server sent. " ,
[ 15 ] = " FTP can't get host. Couldn't resolve the host IP we got in the 227-line. " ,
[ 16 ] = " A problem was detected in the HTTP2 framing layer. This is somewhat generic and can be one out of several problems, see the error buffer for details. " ,
[ 17 ] = " FTP couldn't set binary. Couldn't change transfer method to binary. " ,
[ 18 ] = " Partial file. Only a part of the file was transferred. " ,
[ 19 ] = " FTP couldn't download/access the given file, the RETR (or similar) command failed. " ,
[ 21 ] = " FTP quote error. A quote command returned error from the server. " ,
[ 22 ] = " HTTP page not retrieved. The requested url was not found or returned another error with the HTTP error code being 400 or above. This return code only appears if -f, --fail is used. " ,
[ 23 ] = " Write error. Curl couldn't write data to a local filesystem or similar. " ,
[ 25 ] = " FTP couldn't STOR file. The server denied the STOR operation, used for FTP uploading. " ,
[ 26 ] = " Read error. Various reading problems. " ,
[ 27 ] = " Out of memory. A memory allocation request failed. " ,
[ 28 ] = " Operation timeout. The specified time-out period was reached according to the conditions. " ,
[ 30 ] = " FTP PORT failed. The PORT command failed. Not all FTP servers support the PORT command, try doing a transfer using PASV instead! " ,
[ 31 ] = " FTP couldn't use REST. The REST command failed. This command is used for resumed FTP transfers. " ,
[ 33 ] = " HTTP range error. The range \" command \" didn't work. " ,
[ 34 ] = " HTTP post error. Internal post-request generation error. " ,
[ 35 ] = " SSL connect error. The SSL handshaking failed. " ,
[ 36 ] = " FTP bad download resume. Couldn't continue an earlier aborted download. " ,
[ 37 ] = " FILE couldn't read file. Failed to open the file. Permissions? " ,
[ 38 ] = " LDAP cannot bind. LDAP bind operation failed. " ,
[ 39 ] = " LDAP search failed. " ,
[ 41 ] = " Function not found. A required LDAP function was not found. " ,
[ 42 ] = " Aborted by callback. An application told curl to abort the operation. " ,
[ 43 ] = " Internal error. A function was called with a bad parameter. " ,
[ 45 ] = " Interface error. A specified outgoing interface could not be used. " ,
[ 47 ] = " Too many redirects. When following redirects, curl hit the maximum amount. " ,
[ 48 ] = " Unknown option specified to libcurl. This indicates that you passed a weird option to curl that was passed on to libcurl and rejected. Read up in the manual! " ,
[ 49 ] = " Malformed telnet option. " ,
[ 51 ] = " The peer's SSL certificate or SSH MD5 fingerprint was not OK. " ,
[ 52 ] = " The server didn't reply anything, which here is considered an error. " ,
[ 53 ] = " SSL crypto engine not found. " ,
[ 54 ] = " Cannot set SSL crypto engine as default. " ,
[ 55 ] = " Failed sending network data. " ,
[ 56 ] = " Failure in receiving network data. " ,
[ 58 ] = " Problem with the local certificate. " ,
[ 59 ] = " Couldn't use specified SSL cipher. " ,
[ 60 ] = " Peer certificate cannot be authenticated with known CA certificates. " ,
[ 61 ] = " Unrecognized transfer encoding. " ,
[ 62 ] = " Invalid LDAP URL. " ,
[ 63 ] = " Maximum file size exceeded. " ,
[ 64 ] = " Requested FTP SSL level failed. " ,
[ 65 ] = " Sending the data requires a rewind that failed. " ,
[ 66 ] = " Failed to initialise SSL Engine. " ,
[ 67 ] = " The user name, password, or similar was not accepted and curl failed to log in. " ,
[ 68 ] = " File not found on TFTP server. " ,
[ 69 ] = " Permission problem on TFTP server. " ,
[ 70 ] = " Out of disk space on TFTP server. " ,
[ 71 ] = " Illegal TFTP operation. " ,
[ 72 ] = " Unknown TFTP transfer ID. " ,
[ 73 ] = " File already exists (TFTP). " ,
[ 74 ] = " No such user (TFTP). " ,
[ 75 ] = " Character conversion failed. " ,
[ 76 ] = " Character conversion functions required. " ,
[ 77 ] = " Problem with reading the SSL CA cert (path? access rights?). " ,
[ 78 ] = " The resource referenced in the URL does not exist. " ,
[ 79 ] = " An unspecified error occurred during the SSH session. " ,
[ 80 ] = " Failed to shut down the SSL connection. " ,
[ 81 ] = " Socket is not ready for send/recv wait till it's ready and try again. This return code is only returned from curl_easy_recv and curl_easy_send. " ,
[ 82 ] = " Could not load CRL file, missing or wrong format (added in 7.19.0). " ,
[ 83 ] = " Issuer check failed (added in 7.19.0). " ,
[ 84 ] = " The FTP PRET command failed " ,
[ 85 ] = " RTSP: mismatch of CSeq numbers " ,
[ 86 ] = " RTSP: mismatch of Session Identifiers " ,
[ 87 ] = " unable to parse FTP file list " ,
[ 88 ] = " FTP chunk callback reported error " ,
[ 89 ] = " No connection available, the session will be queued " ,
[ 90 ] = " SSL public key does not matched pinned public key " ,
[ 91 ] = " Status returned failure when asked with CURLOPT_SSL_VERIFYSTATUS. " ,
[ 92 ] = " Stream error in the HTTP/2 framing layer. " ,
[ 93 ] = " An API function was called from inside a callback. " ,
[ 94 ] = " An authentication function returned an error. " ,
[ 95 ] = " A problem was detected in the HTTP/3 layer. This is somewhat generic and can be one out of several problems, see the error buffer for details. " ,
}
local socket = require ' socket '
local ftp = require ' socket.ftp '
local http = require ' socket.http '
local url = require ' socket.url '
local ip = " "
local cor = nil
local Updating = false
local downloadLock = " flock -n results/curl- "
local arw = ' <font color="magenta"><b>-></b></font> '
local repoExists = function ( r )
r = r : match ( " ([%a-]*) " )
if nil == r then return false end
for k , v in pairs ( notExist ) do
if v == r then return false end
end
return true
end
local IP = { }
gatherIPs = function ( host )
if nil == IP [ host ] then
local IPs
-- Takes about 30 seconds to look up the lot.
-- I tested using dig's -f option, it didn't seem much faster.
-- The sort -r assumes that deb.devuan.org is the first alphabetically.
local dig = io.popen ( ' dig +keepopen +noall +nottlid +answer ' .. host .. ' A ' .. host .. ' AAAA ' .. host .. ' CNAME ' .. host .. ' SRV | sort -r | uniq ' )
repeat
IPs = dig : read ( " *l " )
if nil ~= IPs then
for k , t , v in IPs : gmatch ( " ([%w_%-%.]*)%.%s*IN%s*(%a*)%s*(.*) " ) do
if " . " == v : sub ( - 1 , - 1 ) then v = v : sub ( 1 , - 2 ) end
if nil == IP [ k ] then IP [ k ] = { } end
IP [ k ] [ v ] = t
D ( " DNS record " .. host .. " == " .. k .. " type " .. t .. " -> " .. v )
if t == " CNAME " then
gatherIPs ( v )
IP [ k ] [ v ] = IP [ v ]
elseif v == " SRV " then
print ( " SVR record found, now what do we do? " )
end
end
end
until nil == IPs
end
-- If this is the DNS-RR domain name, gather the IPs for the mirrors that mirror_list.txt says should be in it.
if host == APT.options . roundRobin.value then
for k , m in pairs ( APT.mirrors ) do
if " yes " == m.DNSRR then
gatherIPs ( m.FQDN )
IP [ host ] [ m.FQDN ] = IP [ m.FQDN ]
end
end
end
return IP [ host ]
end
-- Returns FTP directory listing
local nlst = function ( u )
local t = { }
local p = url.parse ( u )
p.command = " nlst "
p.sink = ltn12.sink . table ( t )
local r , e = ftp.get ( p )
return r and table.concat ( t ) , e
end
local timeouts = 0 ;
local totalTimeouts = 0
local spcd = ' '
checkHEAD = function ( host , URL , r , retry , sanity )
if nil == r then r = 0 end
if nil == retry then retry = 0 end
if true == sanity then sanity = ' URLSanity ' else sanity = ' ' end
local check = " HEAD testing file "
local PU = url.parse ( URL , defaultURL )
local pu = url.parse ( PU.scheme .. " :// " .. host , defaultURL )
local fname = host .. " _ " .. PU.host .. " _ " .. PU.path : gsub ( " / " , " _ " ) .. " .log.txt "
local hdr = " "
local IP = " "
if pu.host ~= PU.host then
if " http " == PU.scheme then
hdr = ' -H "Host: ' .. host .. ' " '
end
IP = ' --connect-to " ' .. pu.host .. ' :: ' .. PU.host .. ' :" '
fname = host .. " _ " .. pu.host .. ' _ ' .. PU.host .. " _ " .. PU.path : gsub ( " / " , " _ " ) .. " .txt "
end
os.execute ( ' rm -f results/HEADERS_ ' .. fname .. ' 2>/dev/null; rm -f results/STATUS_ ' .. fname .. ' 2>/dev/null; touch results/STATUS_ ' .. fname )
if not APT.testing ( PU.scheme , host ) and APT.redir then I ( spcd .. string.upper ( PU.scheme ) .. " not supported, not tested. " .. URL , host ) ; return end
if 0 < r then
check = " Redirecting to "
end
if 0 < retry then
os.execute ( " sleep " .. math.random ( 1 , 3 ) )
check = " Retry " .. retry .. " " .. check
end
if 2 <= timeouts then
E ( spcd .. spcd .. " too many timeouts! " .. check .. " " .. host .. arw .. URL , PU.scheme , " " , host )
return
end
if APT.options . timeouts.value <= ( totalTimeouts ) then
E ( spcd .. spcd .. " Way too many timeouts! " , PU.scheme , " " , host )
return
end
if 20 <= r then
E ( spcd .. spcd .. " too many redirects! " .. check .. " " .. host .. arw .. URL , PU.scheme , " " , host )
return
end
if APT.options . retries.value <= retry then
E ( spcd .. spcd .. " too many retries! " .. check .. " " .. host .. arw .. URL , PU.scheme , " " , host )
return
end
if " https " == PU.scheme and APT.options . roundRobin.value == host then
I ( spcd .. " Not testing " .. APT.lnk ( URL ) .. " mirrors wont have the correct HTTPS certificate for the round robin. " , host )
return
else
I ( spcd .. check .. " " .. APT.lnk ( URL ) , host )
end
--[[ Using curl command line -
- I - HEAD
--connect-to domain:port:IP:port - connect to IP, but use SNI from URL.
- header " " - add extra headers .
- L - DO follow redirects .
--max-redirs n - set maximum redirects, default is 50, -1 = unlimited.
- 4 or - 6 - Only use IPv4 or IPv6
--retry n - maximum retries, default is 0, no retries.
- o file - write to file instead of stdout .
--path-as-is - https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html might be useful for URLSanity.
- s silent - don ' t output progress or error messages.
--connect-timeout n - timeout in seconds.
Should return with error code 28 on a timeout ?
- D file - write the received headers to a file . This includes the status code and string .
] ]
local status = APT.exe (
' curl -I --retry 0 -s --path-as-is --connect-timeout ' .. APT.options . timeout.value .. ' --max-redirs 0 ' .. APT.IPv46 .. ' ' ..
IP .. ' ' .. ' -o /dev/null -D results/"HEADERS_ ' .. fname .. ' " ' ..
hdr .. ' -w "#%{http_code} %{ssl_verify_result} %{url_effective} \\ n" ' .. PU.scheme .. ' :// ' .. host .. PU.path .. ' >>results/"STATUS_ ' .. fname .. ' " '
) : Nice ( ) : log ( ) : Do ( ) . status
if 0 < r then
APT.tested ( PU.scheme , ' Redirects ' , host )
else
APT.tested ( PU.scheme , ' ' , host )
end
local code = " ??? "
local cstr = " "
local location = nil
local tmot = 1
while not APT.checkFile ( ' results/STATUS_ ' .. fname ) do
D ( spcd .. spcd .. ' Waiting for results/STATUS_ ' .. fname .. ' file. ' )
os.execute ( ' sleep ' .. tmot )
tmot = tmot * 2
if 8 < tmot then
T ( spcd .. spcd .. " TIMEOUT " .. timeouts + 1 .. " , retry " .. retry + 1 .. ' ' .. APT.lnk ( URL ) , PU.scheme , sanity , host )
timeouts = timeouts + 1
checkHEAD ( host , URL , r , retry + 1 , ' ' ~= sanity )
os.execute ( ' cat results/"HEADERS_ ' .. fname .. ' " >>results/"STATUS_ ' .. fname .. ' " 2>/dev/null; rm -f results/"HEADERS_ ' .. fname .. ' " 2>/dev/null ' )
return
end
end
os.execute ( ' cat results/"HEADERS_ ' .. fname .. ' " >>results/"STATUS_ ' .. fname .. ' " 2>/dev/null; rm -f results/"HEADERS_ ' .. fname .. ' " 2>/dev/null ' )
if 0 ~= status then
local msg = curlStatus [ status ]
if nil == msg then msg = " UNKNOWN CURL STATUS CODE! " end
if ( 28 == status ) or ( 7 == status ) then
T ( spcd .. spcd .. " TIMEOUT " .. timeouts + 1 .. " , retry " .. retry + 1 .. ' ' .. APT.lnk ( URL ) , PU.scheme , sanity , host )
timeouts = timeouts + 1
else
E ( spcd .. spcd .. " The curl command return an error code of " .. status .. " - " .. msg .. ' for ' .. APT.lnk ( URL ) , PU.scheme , sanity , host )
end
if 60 == status then return end -- Certificate is invalid, don't bother retrying.
checkHEAD ( host , URL , r , retry + 1 , ' ' ~= sanity )
return
end
local rfile , e = io.open ( " results/STATUS_ " .. fname , " r " )
if nil == rfile then W ( " opening results/STATUS_ " .. fname .. " file - " .. e ) else
for line in rfile : lines ( " *l " ) do
if " # " == line : sub ( 1 , 1 ) then
code = line : sub ( 2 , 4 )
if ( " https " == PU.scheme ) and ( " 0 " ~= line : sub ( 6 , 6 ) ) then
os.execute ( ' cp results/STATUS_ ' .. fname .. ' results/STATUS_ ' .. fname .. ' _SAVED ' )
if ' ' ~= sanity then
E ( spcd .. spcd .. " The certificate is invalid. " , PU.scheme , sanity , host )
else
E ( spcd .. spcd .. " The certificate is invalid. " , PU.scheme , " https " , host )
end
end
elseif " http " == line : sub ( 1 , 4 ) : lower ( ) then
-- -2 coz the headers file gets a \r at the end.
cstr = line : sub ( 14 , - 2 )
elseif " location " == line : sub ( 1 , 8 ) : lower ( ) then
location = line : sub ( 11 , - 2 )
end
end
if ' ??? ' == code then
W ( spcd .. spcd .. ' Could not find response code. ' .. APT.lnk ( URL ) , PU.scheme , sanity , host )
end
end
os.execute ( ' cat results/STATUS_ ' .. fname .. ' >> results/curl_HEAD_ ' .. fname .. ' ; rm -f results/STATUS_ ' .. fname .. ' 2>/dev/null ' )
if ( " 4 " == tostring ( code ) : sub ( 1 , 1 ) ) or ( " 5 " == tostring ( code ) : sub ( 1 , 1 ) ) then
E ( spcd .. spcd .. code .. " " .. cstr .. " . " .. check .. " " .. APT.lnk ( URL ) , PU.scheme , sanity , host )
else
if not APT.testing ( PU.scheme , host ) then
I ( spcd .. spcd .. " Not supported, but works " .. PU.scheme .. " " .. APT.lnk ( URL ) , PU.scheme , " " , host )
end
I ( spcd .. spcd .. code .. " " .. cstr .. " . " .. check .. " " .. APT.lnk ( URL ) , host )
-- timeouts = timeouts - 1 -- Backoff the timeouts count if we managed to get through.
if nil ~= location then
pu = url.parse ( location , defaultURL )
if ( pu.host == APT.options . roundRobin.value ) and ( nil ~= PU.path : find ( ' merged/pool/DEVUAN/ ' ) ) then
E ( ' DEVUAN packages must not be redirected to ' .. APT.options . roundRobin.value .. ' - ' .. APT.lnk ( URL ) .. arw .. APT.lnk ( location ) , PU.scheme , ' Redirects ' , host )
end
if APT.testing ( " Protocol " ) then
if ( ' http ' == location : sub ( 1 , 4 ) ) and ( pu.scheme ~= PU.scheme ) then -- Sometimes a location sans scheme is returned, this is not a protocol change.
if APT.options . roundRobin.value == host then -- Coz HTTPS shouldn't happen via the round robin.
E ( spcd .. spcd .. " Protocol changed during redirect! " .. check .. " " .. APT.lnk ( URL ) .. arw .. APT.lnk ( location ) , PU.scheme , " Protocol " , host )
end
W ( spcd .. spcd .. " Protocol changed during redirect! " .. check .. " " .. APT.lnk ( URL ) .. arw .. APT.lnk ( location ) , PU.scheme , " Protocol " , host )
else
end
APT.tested ( PU.scheme , ' Protocol ' , host )
end
if location == URL then
E ( spcd .. spcd .. " Redirect loop! " .. check .. " " .. APT.lnk ( URL ) .. arw .. APT.lnk ( location ) , PU.scheme , " " , host )
elseif nil == pu.host then
I ( spcd .. spcd .. " Relative redirect. " .. check .. " " .. APT.lnk ( URL ) .. arw .. APT.lnk ( location ) , host )
if 1 <= APT.options . bandwidth.value then checkHEAD ( host , PU.scheme .. " :// " .. PU.host .. location , r + 1 , retry , ' ' ~= sanity ) end
elseif ( PU.host == pu.host ) or ( host == pu.host ) then
if PU.host ~= host then
local t = pu.host
pu.host = PU.host
location = url.build ( pu )
pu.host = t
end
I ( spcd .. spcd .. " Redirect to same host. " .. check .. " " .. APT.lnk ( URL ) .. arw .. APT.lnk ( location ) , host )
if 1 <= APT.options . bandwidth.value then checkHEAD ( host , location , r + 1 , retry , ' ' ~= sanity ) end
else
I ( spcd .. spcd .. " Redirect to different host. " .. check .. " " .. APT.lnk ( URL ) .. arw .. APT.lnk ( location ) , host )
if 1 <= APT.options . bandwidth.value then
--[[ The hard part here is that we end up throwing ALL of the test files at the redirected location.
Not good for deb.debian . org , which we should only be throwing . debs at .
What we do is loop through the DNS entries , and only test the specific protocol & file being tested here .
] ]
local u = pu.host .. " / " .. pu.path
local file = pu.path : match ( " .*/([%w%.%+%-_]*)$ " ) -- Get the filename.
local path = pu.path : sub ( 2 , - 1 - ( # file ) )
local check = u : gsub ( " / " , " _ " )
local extraArgs = sendArgs .. ' -o -r '
if ' https ' == pu.scheme then extraArgs = extraArgs .. ' --tests=-http ' end
if ' http ' == pu.scheme then extraArgs = extraArgs .. ' --tests=-https ' end
local pth = path : match ( ' ^(.*/pool/).*$ ' )
if nil ~= pth then table.insert ( APT.results [ PU.scheme ] . redirects , pu.host .. " / " .. pth ) else E ( spcd .. spcd .. ' Odd redirect path ' .. path ) end
I ( spcd .. spcd .. " Now checking redirected host " .. u .. ' for ' .. APT.lnk ( URL ) .. arw .. APT.lnk ( location ) , host )
APT.exe ( downloadLock .. " REDIR- " .. check .. " .log.txt " .. " ./apt-panopticon.lua " .. extraArgs .. ' ' .. pu.host .. " / " .. path .. " " .. file ) : Nice ( ) : log ( ) : fork ( )
D ( spcd .. ' logging to ' .. APT.logName ( pu.host , nil , file ) [ 2 ] )
APT.tested ( PU.scheme , ' Redirects ' , host )
end
end
elseif nil ~= PU.path : find ( ' merged/pool/DEBIAN-SECURITY/ ' ) then
W ( ' DEBIAN-SECURITY packages must be redirected to a Debian mirror - ' .. APT.lnk ( URL ) .. arw .. APT.lnk ( location ) , PU.scheme , ' Redirects ' , host )
end
end
end
local checkTimeouts = function ( host , scheme , URL )
totalTimeouts = totalTimeouts + timeouts ; timeouts = 0
checkHEAD ( host , scheme .. " :// " .. URL )
if ( 1 <= APT.options . bandwidth.value ) and APT.testing ( " URLSanity " ) then
URL = URL : gsub ( " / " , " /// " )
URL = URL : gsub ( " /// " , " / " , 1 )
checkHEAD ( host , scheme .. " :// " .. URL , 0 , 0 , true )
APT.tested ( scheme , ' URLSanity ' , host )
end
if nil ~= cor then
D ( ' *>* About to resume coroutine after checkHEAD( ' .. host .. ' , ' .. scheme .. ' :// ' .. URL .. ' ) ' )
local ok , message = coroutine.resume ( cor )
if not ok then cor = nil ; print ( message ) end
end
if APT.options . timeouts.value <= ( totalTimeouts ) then
E ( " Way too many timeouts! " , scheme , " URLSanity " , host )
return true
end
return false
end
local checkFiles = function ( host , ip , path , file )
timeouts = 0
if nil == path then path = " " end
if nil ~= file then
if " redir " == ip then ip = host end
if checkTimeouts ( host , " http " , ip .. path .. " / " .. file ) then return end
if checkTimeouts ( host , " https " , ip .. path .. " / " .. file ) then return end
else
I ( " HEAD testing files for " .. host .. arw .. ip .. " " .. path , host )
if 1 <= APT.options . bandwidth.value then
-- Do these first, coz they are likely to fork off a different server.
for i , s in pairs ( referenceDebs ) do
if checkTimeouts ( host , " http " , ip .. path .. " / " .. s ) then return end
if checkTimeouts ( host , " https " , ip .. path .. " / " .. s ) then return end
end
end
for i , s in pairs ( releases ) do
for j , k in pairs ( releaseFiles ) do
if repoExists ( s .. k ) then
if checkTimeouts ( host , " http " , ip .. path .. " /merged/dists/ " .. s .. ' / ' .. k ) then return end
if 1 <= APT.options . bandwidth.value then
if checkTimeouts ( host , " https " , ip .. path .. " /merged/dists/ " .. s .. ' / ' .. k ) then return end
else
break
end
end
if 2 >= APT.options . bandwidth.value then break end
end
if 2 >= APT.options . bandwidth.value then break end
end
if 1 <= APT.options . bandwidth.value then
for i , s in pairs ( referenceDevs ) do
if checkTimeouts ( host , " http " , ip .. path .. " / " .. s ) then return end
if checkTimeouts ( host , " https " , ip .. path .. " / " .. s ) then return end
end
end
end
end
checkHost = function ( orig , host , path , ip , file )
if nil == host then host = orig end
if nil == path then path = " " end
if nil == file then file = " " end
local ph = url.parse ( " http:// " .. host )
if ( nil ~= ip ) and ( " redir " ~= ip ) then
local po = url.parse ( " http:// " .. orig )
if " " ~= file then
D ( " checking redirected file " .. po.host .. " " .. file )
checkFiles ( po.host , ip , path , file )
else
checkFiles ( po.host , ip , path )
end
else
if orig == host then
I ( " Testing mirror " .. orig .. " " .. file )
APT.exe ( " ./apt-panopticon.lua " .. sendArgs .. " -o " .. orig .. path .. " " .. file ) : Nice ( ) : log ( ) : fork ( )
D ( ' logging to ' .. APT.logName ( ph.host , nil , file ) [ 2 ] )
else D ( " checkHost " .. orig .. arw .. host ) end
end
end
local addDownload = function ( host , URL , f , r , k )
local file = k : match ( " .*/([%w%.%+%-_]*)$ " ) -- Get the filename.
if APT.checkFile ( " results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k ) then
-- Curls "check timestamp and overwrite file" stuff sucks.
-- -R means the destination file gets the timestamp of the remote file.
-- Can only do ONE timestamp check per command.
-- This doesn't work either. All downloads get all these headers. Pffft
-- local status, ts = APT.execute('TZ="GMT" ls -l --time-style="+%a, %d %b %Y %T %Z" results/' .. host .. "/merged/dists/" .. r .. '/' .. k .. ' | cut -d " " -f 6-11')
-- f:write('header "If-Modified-Since: ' .. ts:sub(2, -2) .. '"\n')
-- Curl will DELETE the existing file if the timestamp fails to download a new one, unless we change directory first,
-- which wont work with multiple files in multiple directories. WTF?
--TODO - change tactic, do a HEAD if-modified test first before adding the file to the list to download.
os.execute ( " mv results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k ..
" results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " .old " )
end
D ( ' Downloading http:// ' .. host .. URL .. ' /merged/dists/ ' .. r .. ' / ' .. k )
f : write ( ' url " ' .. ' http:// ' .. host .. URL .. ' /merged/dists/ ' .. r .. ' / ' .. k .. ' " \n ' )
f : write ( ' output "results/ ' .. host .. ' /merged/dists/ ' .. r .. ' / ' .. k .. ' " \n ' )
end
local postDownload = function ( host , r , k )
local file = k : match ( " .*/([%w%.%+%-_]*)$ " ) -- Get the filename.
if nil == file then file = k end
os.execute ( " if [ -f results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " .old ] " ..
" && [ ! -f results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " ]; then cp -a " ..
" results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " .old " ..
" results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k .. " ; fi " )
if APT.checkFile ( ' results/ ' .. host .. ' /merged/dists/ ' .. r .. ' / ' .. k ) then
if " .gz " == k : sub ( - 3 , - 1 ) then APT.exe ( " gzip -dfk results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k ) : Nice ( ) : noErr ( ) : Do ( ) end
if " .xz " == k : sub ( - 3 , - 1 ) then APT.exe ( " xz -dfk results/ " .. host .. " /merged/dists/ " .. r .. ' / ' .. k ) : Nice ( ) : noErr ( ) : Do ( ) end
end
end
local download = " curl " ..
" --connect-timeout " .. APT.options . timeout.value ..
" --create-dirs -f -L " ..
" --fail-early " ..
" --max-time " .. APT.options . maxtime.value ..
APT.IPv46 .. ' ' ..
" --retry " .. APT.options . retries.value ..
" -R -v -z 'results/stamp.old' --stderr results/ "
local downloads = function ( host , URL , meta , release , list )
if nil == URL then URL = " " end
local files = ' curl- ' .. meta .. ' - ' .. host .. ' .files.txt '
local lock = meta .. " - " .. host .. " .log.txt "
local log = " curl- " .. meta .. " - " .. host .. " .log.txt "
local cm = downloadLock .. lock .. " " .. download .. log .. " -K results/ " .. files
if APT.testing ( " IPv4 " ) and ( not APT.testing ( " IPv6 " ) ) then cm = cm .. ' -4 ' end
if ( not APT.testing ( " IPv4 " ) ) and APT.testing ( " IPv6 " ) then cm = cm .. ' -6 ' end
f , e = io.open ( " results/curl- " .. meta .. ' - ' .. host .. " .files.txt " , " a+ " )
if nil == f then C ( " opening curl downloads list file - " .. e ) ; return end
if nil ~= list then
if " " ~= list then
if nil ~= release then
for l in list : gmatch ( " \n *([^ \n ]+) \n * " ) do
addDownload ( host , URL , f , release , " / " .. l )
end
else
I ( ' Downloading ' .. APT.lnk ( ' http:// ' .. host .. URL .. ' /merged/ ' .. list ) )
f : write ( ' url " ' .. ' http:// ' .. host .. URL .. ' /merged/ ' .. list .. ' " \n ' )
f : write ( ' output "results/ ' .. host .. ' /merged/ ' .. list .. ' " \n ' )
end
f : close ( )
return
end
else
for i , s in pairs ( releases ) do
for j , k in pairs ( releaseFiles ) do
if repoExists ( s .. k ) then
addDownload ( host , URL , f , s , k )
end
end
end
end
f : close ( )
APT.exe ( cm ) : Nice ( ) : log ( ) : fork ( )
D ( ' logging to <a href=" ' .. log .. ' "> ' .. log .. ' </a>, with <a href=" ' .. files .. ' ">these files</a> ' )
end
local validateURL = function ( m )
if " " == m.BaseURL : sub ( - 1 , - 1 ) then
W ( " space at end of BaseURL in mirror_list.txt! " .. m.BaseURL , " " , " " , m.FQDN )
m.BaseURL = m.BaseURL : sub ( 1 , - 2 )
end
if " / " == m.BaseURL : sub ( - 1 , - 1 ) then
W ( " slash at end of BaseURL in mirror_list.txt! " .. m.BaseURL , " " , " " , m.FQDN )
m.BaseURL = m.BaseURL : sub ( 1 , - 2 )
end
local p = url.parse ( " http:// " .. m.BaseURL )
if nil == p.path then p.path = ' ' end
if nil ~= p.port then p.authority = authority .. ' : ' .. p.port end
if nil == m.FDQN then W ( " Something wrong in FDQN from mirror_list.txt! nil " , " " , " " , p.authority ) else
if m.FQDN ~= p.authority then W ( " Something wrong in FDQN from mirror_list.txt! " .. m.FDQN , " " , " " , p.authority ) end
end
if nil == m.BaseURL then W ( " Something wrong in BaseURL from mirror_list.txt! nil " , " " , " " , p.authority ) else
if m.BaseURL ~= ( p.authority .. p.path ) then W ( " Something wrong in BaseURL from mirror_list.txt! " .. m.BaseURL , " " , " " , p.authority ) end
end
if ( nil ~= p.query ) or ( nil ~= p.fragment ) or ( nil ~= p.params ) then W ( " Something wrong in BaseURL from mirror_list.txt, should be nothing after the path! " .. m.BaseURL , " " , " " , p.authority ) end
if ( nil ~= p.user ) or ( nil ~= p.userinfo ) or ( nil ~= p.password ) then W ( " Something wrong in BaseURL from mirror_list.txt, should be no credentials! " .. m.BaseURL , " " , " " , p.authority ) end
m.FQDN = p.authority
m.BaseURL = p.authority .. p.path
return m
end
local getMirrors = function ( )
local mirrors = { }
local host = " "
local m = { }
local active = true
local URL = ' http:// ' .. APT.options . referenceSite.value .. ' /mirror_list.txt '
I ( ' Downloading and parsing http:// ' .. APT.options . referenceSite.value .. ' /mirror_list.txt ' )
local p , c , h = http.request ( URL )
if nil == p then E ( c .. " fetching " .. URL ) else
for l in p : gmatch ( " \n *([^ \n ]+) \n * " ) do
local t , d = l : match ( " (%a*):%s*(.*) " )
d = string.lower ( d )
if " FQDN " == t then
if " " ~= host then
mirrors [ host ] = validateURL ( m )
m = { }
active = true
end
host = d
m [ t ] = d
elseif " Protocols " == t then
local prot = { }
for w in d : gmatch ( " (%w+) " ) do
if APT.search ( APT.protocols , w : lower ( ) ) then prot [ w ] = true end
end
m [ t ] = prot
elseif " Active " == t and nil == d : sub ( 1 , 3 ) : find ( " yes " , 1 , true ) then
W ( " Mirror " .. host .. " is not active - " .. d , " " , " " , host )
active = false
m [ t ] = d
-- TODO - Should do some more input validation on everything.
elseif " Rate " == t then
local time , unit = d : match ( ' (%d+) *(%a+) ' )
time = tonumber ( time )
unit = unit : sub ( 1 , 1 )
m [ t ] = time .. ' ' .. unit
if ' m ' == unit then
time = time * 60
elseif ' h ' == unit then
time = time * 60 * 60
else
C ( ' Unknown Rate for mirror ' .. host )
end
m [ ' Updated ' ] = time
else
m [ t ] = d
end
end
if " " ~= host --[[and active]] then
mirrors [ host ] = validateURL ( m )
end
end
if APT.testing ( " DNSRR " ) then
mirrors [ APT.options . roundRobin.value ] = { [ " Protocols " ] = { [ " http " ] = true ; } ; [ ' Updated ' ] = 300 ; [ ' DNSRR ' ] = false ;
[ " FQDN " ] = APT.options . roundRobin.value ; [ " Active " ] = ' yes ' ; [ " BaseURL " ] = APT.options . roundRobin.value ; }
end
local file , e = io.open ( " results/mirrors.lua " , " w+ " )
if nil == file then C ( " opening mirrors file - " .. e ) else
file : write ( APT.dumpTable ( mirrors , " mirrors " ) .. " \n return mirrors \n " )
file : close ( )
end
if 42 < # mirrors then print ( # mirrors .. ' is too many mirrors! ' ) ; os.exit ( false ) end
return mirrors
end
local postParse = function ( host , list )
if APT.options . referenceSite.value == host then
if nil ~= list then
local sem = ' results/NEW_ ' .. list.out .. ' _%s.txt '
for i , n in pairs ( releases ) do
local f = sem : format ( n )
if APT.checkFile ( f .. ' .tmp ' ) then
os.execute ( ' mv ' .. f .. ' .tmp ' .. f )
else
os.execute ( ' touch ' .. f )
end
end
end
end
end
local parseDebs = function ( host )
for i , n in pairs ( releases ) do
local inFile = ' results/NEW_debs_ ' .. n .. ' .txt '
local nfile , e = io.open ( inFile , " r " )
if nil == nfile then W ( " opening " .. inFile .. " file - " .. e ) else
for l in nfile : lines ( ) do
local v , p , sz , sha = l : match ( ' | (.+) | (pool/.+%.deb) | (%d.+) | (%x.+) | ' )
if nil ~= p then
if APT.checkFile ( ' results/ ' .. host .. " /merged/ " .. p ) then
local fsz = APT.exe ( ' ls -l results/ ' .. host .. " /merged/ " .. p .. ' | cut -d " " -f 5-5 ' ) : Do ( ) . result
if APT.testing ( " Integrity " ) then
if sz ~= fsz : sub ( 2 , - 2 ) then -- The sub bit is to slice off the EOLs at each end.
E ( ' Package size mismatch - results/ ' .. host .. " /merged/ " .. p .. ' should be ' .. sz .. ' , but is ' .. fsz : sub ( 2 , - 2 ) .. ' . ' , ' http ' , ' Integrity ' , host )
else
local fsha = APT.exe ( ' sha256sum results/ ' .. host .. " /merged/ " .. p .. ' | cut -d " " -f 1 ' ) : log ( ) : Do ( ) . result
if sha ~= fsha : sub ( 2 , - 2 ) then E ( ' Package SHA256 sum mismatch - results/ ' .. host .. " /merged/ " .. p , ' http ' , ' Integrity ' , host ) end
-- TODO - maybe check the PGP key, though packages are mostly not signed.
end
APT.tested ( ' http ' , ' Integrity ' , host )
end
if APT.testing ( " Updated " ) then
if sz ~= fsz : sub ( 2 , - 2 ) then