Browse Source

implement proper updating logic

debianarchive-update
parazyd 6 years ago
parent
commit
e07a9378a4
Signed by untrusted user: parazyd GPG Key ID: F0CB28FCF78637DE
  1. 6
      README.md
  2. 4
      amprolla_init.py
  3. 4
      amprolla_merge.py
  4. 122
      amprolla_update.py
  5. 32
      lib/config.py

6
README.md

@ -34,9 +34,11 @@ Basic usage
Edit `lib/config.py` to your needs, and then run `amprolla_init.py`.
This will download the repositories we will merge afterwards. When this
is done, you can now run `amprolla_merge.py` which will perform the
merge, and finally sign the Release files needed.
merge, and finally sign the Release files needed. To perform incremental
updates, run `amprolla_update.py` with a cron job in your desired
intervals.
A `nginx` configuration for the amprolla server can be found in
An `nginx` configuration for the amprolla server can be found in
`contrib`.
More information on amprolla should be found in the `doc` directory.

4
amprolla_init.py

@ -8,6 +8,7 @@ the spooldir, along with all the files hashed inside the Release files
from os.path import join
from multiprocessing import Pool
from time import time
from lib.config import repos, suites, aliases, spooldir, mainrepofiles
from lib.net import download
@ -78,4 +79,7 @@ def main():
if __name__ == '__main__':
t1 = time()
main()
t2 = time()
print('total time: %s' % (t2 - t1))

4
amprolla_merge.py

@ -7,6 +7,7 @@ Amprolla main module
from os.path import basename, join
from multiprocessing import Pool
from time import time
# from pprint import pprint
from lib.package import (write_packages, load_packages_file,
@ -182,4 +183,7 @@ def main():
if __name__ == '__main__':
t1 = time()
main()
t2 = time()
print('total time: %s' % (t2 - t1))

122
amprolla_update.py

@ -7,58 +7,104 @@ Perform incremental updates
from os.path import join
import requests
from multiprocessing import Pool
from amprolla_init import pop_dirs
from amprolla_merge import prepare_merge_dict
from lib.config import repos, spooldir
from lib.config import repos, spooldir, repo_order
from lib.parse import parse_release, get_time, get_date, compare_dict
from lib.net import download
from pprint import pprint
roots = prepare_merge_dict()
needsmerge = []
for suite, paths in roots.items():
print(suite)
print(paths)
devuan_loc = paths[0]
debian_sec_loc = paths[1]
debian_loc = paths[2]
if devuan_loc:
devuan_rem = devuan_loc.replace(spooldir, repos['devuan']['host'])
print(devuan_rem)
remoterel = join(devuan_rem, 'Release')
localrel = join(devuan_loc, 'Release')
if remote_is_newer(remoterel, localrel):
print('Do something')
# probably add suite to needsmerge
if debian_sec_loc:
print('Do the same')
if debian_loc:
print('Do the same')
break
def remote_is_newer(remote, local):
rem = requests.get(remote)
rem_contents = rem.text
rem_date = get_date(rem_contents)
loc_contents = open(localrel).read()
loc_date = get_date(loc_contents)
rem_date = get_date(remote)
loc_date = get_date(local)
print('Remote date: %s' % rem_date)
print('Local date: %s' % loc_date)
print('Local date: %s' % loc_date)
if get_time(rem_date) > get_time(loc_date):
print('Remote Release is newer!')
return True
return False
def perform_update(suite, paths):
print('==================================================')
print('Checking for updates in %s' % suite)
print(paths)
needsmerge = {}
needsmerge['downloads'] = [] # all files that have to be downloaded
c = 0
for i in repo_order:
# i = repository name
needsmerge[i] = {}
needsmerge[i]['mergelist'] = []
if paths[c]:
print('Working on %s repo' % i)
remote_path = paths[c].replace(spooldir, repos[i]['host'])
remote_rel = requests.get(join(remote_path, 'Release'))
remote_rel_text = remote_rel.text
tup = (remote_rel, join(paths[c], 'Release'))
download(tup)
local_rel_text = open(join(paths[c], 'Release')).read()
if remote_is_newer(remote_rel_text, local_rel_text):
remote_parsed = parse_release(remote_rel_text)
local_parsed = parse_release(local_rel_text)
diffs = compare_dict(remote_parsed, local_parsed)
if diffs:
for k in diffs:
if k.endswith('Packages.gz') or k.endswith('Sources.gz'):
needsmerge[i]['mergelist'].append(k)
rmt = join(paths[c].replace(spooldir, repos[i]['host']), k)
loc = join(paths[c], k)
dlf = (rmt, loc)
needsmerge['downloads'].append(dlf)
c += 1
# break
# download what needs to be downloaded
print('Downloading updates...')
dlpool = Pool(4)
dlpool.map(download, needsmerge['downloads'])
dlpool.close
# create union of our Packages.gz and Sources.gz files we will merge
uni = []
for i in repo_order:
uni.append(needsmerge[i]['mergelist'])
updpkg_list = set().union(*uni)
# perform the actual merge
if updpkg_list:
print('Merging files...')
mrgpool = Pool(4)
mrgpool.map(merge, updpkg_list)
mrgpool.close()
print('Generating Release...')
gen_release(suite)
print('==================================================')
def main():
"""
Do the update for all repos
"""
roots = prepare_merge_dict()
for suite, paths in roots.items():
perform_update(suite, paths)
break
if __name__ == '__main__':
main()

32
lib/config.py

@ -38,7 +38,7 @@ repos = {
},
'debian': {
'name': 'DEBIAN',
'host': 'http://ftp.debian.org',
'host': 'http://deb.debian.org',
'dists': 'debian/dists',
'pool': 'debian/pool',
'aliases': True,
@ -84,24 +84,24 @@ categories = ['main', 'contrib', 'non-free']
arches = [
'source',
'binary-all',
# 'binary-alpha',
'binary-alpha',
'binary-amd64',
# 'binary-arm64',
# 'binary-armel',
'binary-arm64',
'binary-armel',
'binary-armhf',
# 'binary-hppa',
# 'binary-hurd-i386',
'binary-hppa',
'binary-hurd-i386',
'binary-i386',
# 'binary-ia64',
# 'binary-kfreebsd-amd64',
# 'binary-kfreebsd-i386',
# 'binary-mips',
# 'binary-mips64el',
# 'binary-mipsel',
# 'binary-powerpc',
# 'binary-ppc64el',
# 'binary-s390x',
# 'binary-sparc'
'binary-ia64',
'binary-kfreebsd-amd64',
'binary-kfreebsd-i386',
'binary-mips',
'binary-mips64el',
'binary-mipsel',
'binary-powerpc',
'binary-ppc64el',
'binary-s390x',
'binary-sparc'
]
mainrepofiles = [

Loading…
Cancel
Save