Browse Source

cleanup; remove old cruft

debianarchive-update
parazyd 6 years ago
parent
commit
1d85a8d1cd
Signed by untrusted user: parazyd GPG Key ID: F0CB28FCF78637DE
  1. 13
      amprolla_init.py
  2. 20
      amprolla_merge.py
  3. 10
      amprolla_update.py
  4. 1
      lib/config.def.py
  5. 4
      lib/net.py
  6. 4
      lib/package.py
  7. 33
      lib/parse.py
  8. 2
      lib/release.py

13
amprolla_init.py

@ -10,7 +10,7 @@ from os.path import join
from multiprocessing import Pool
from time import time
from lib.config import repos, suites, aliases, spooldir, mainrepofiles
from lib.config import aliases, cpunm, mainrepofiles, repos, spooldir, suites
from lib.net import download
from lib.parse import parse_release
@ -21,9 +21,10 @@ def pop_dirs(repo):
directory structure.
Returns a list of tuples holding the remote and local locations
of the files
Example:
(http://auto.mirror.devuan.org/devuan/dists/jessie/main/binary-armhf/Packages.gz,
./spool/devuan/dists/unstable/contrib/binary-armhf/Packages.gz)
(http://deb.debian.org/debian/dists/jessie/main/binary-all/Packages.gz,
./spool/debian/dists/jessie/main/binary-all/Packages.gz)
"""
repodata = repos[repo]
@ -51,7 +52,7 @@ def pop_dirs(repo):
def main():
"""
Loops through all repositories, and downloads their *Release* files, along
Loops through all repositories, and downloads their Release files, along
with all the files listed within those Release files.
"""
for dist in repos:
@ -62,7 +63,7 @@ def main():
for file in mainrepofiles:
urls = (join(url[0], file), join(url[1], file))
tpl.append(urls)
dlpool = Pool(4)
dlpool = Pool(cpunm)
dlpool.map(download, tpl)
dlpool.close()
@ -73,7 +74,7 @@ def main():
# if k.endswith('/binary-armhf/Packages.gz'):
urls = (join(url[0], k), join(url[1], k))
tpl.append(urls)
dlpool = Pool(4)
dlpool = Pool(cpunm)
dlpool.map(download, tpl)
dlpool.close()

20
amprolla_merge.py

@ -8,13 +8,13 @@ Amprolla main module
from os.path import basename, join
from multiprocessing import Pool
from time import time
# from pprint import pprint
from lib.package import (write_packages, load_packages_file,
merge_packages_many)
from lib.config import (aliases, banpkgs, repo_order, repos, spooldir, suites,
mergedir, mergesubdir, pkgfiles, srcfiles, categories,
arches)
from lib.config import (aliases, arches, banpkgs, categories, cpunm, mergedir,
mergesubdir, pkgfiles, repos, repo_order, spooldir,
srcfiles, suites)
from lib.package import (load_packages_file, merge_packages_many,
write_packages)
from lib.release import write_release
@ -61,7 +61,7 @@ def devuan_rewrite(pkg, repo_name):
repos[repo_name]['name'])
if 'Directory' in pkg:
pkg['Directory'] = pkg['Directory'].replace('pool/', 'pool/%s/' %
repos[repo_name]['name'])
repos[repo_name]['name'])
return pkg
@ -171,8 +171,7 @@ def main():
pkg.append(join(j, i, mrgfile))
# pprint(pkg)
mrgpool = Pool(4) # Set it to the number of CPUs you want to use
mrgpool = Pool(cpunm)
mrgpool.map(main_merge, pkg)
mrgpool.close()
@ -180,9 +179,8 @@ def main():
for i in suites:
for j in suites[i]:
rel_list.append(j)
# gen_release(j)
relpool = Pool(4) # Set it to the number of CPUs you want to use
relpool = Pool(cpunm)
relpool.map(gen_release, rel_list)
relpool.close()

10
amprolla_update.py

@ -10,9 +10,9 @@ from multiprocessing import Pool
from time import time
import requests
from amprolla_merge import prepare_merge_dict, gen_release, merge
from lib.config import repos, spooldir, repo_order, aliases
from lib.parse import parse_release, get_time, get_date, compare_dict
from amprolla_merge import gen_release, merge, prepare_merge_dict
from lib.config import aliases, cpunm, repos, repo_order, spooldir
from lib.parse import compare_dict, get_date, get_time, parse_release
from lib.net import download
@ -77,7 +77,7 @@ def perform_update(suite, paths):
# download what needs to be downloaded
if needsmerge['downloads']:
print('Downloading updates...')
dlpool = Pool(4)
dlpool = Pool(cpunm)
dlpool.map(download, needsmerge['downloads'])
# create union of our Packages.gz and Sources.gz files we will merge
@ -112,7 +112,7 @@ def perform_update(suite, paths):
# perform the actual merge
if merge_list:
print('Merging files...')
mrgpool = Pool(4)
mrgpool = Pool(cpunm)
mrgpool.map(merge, merge_list)
# generate Release files if we got any new files

1
lib/config.def.py

@ -6,6 +6,7 @@ amprolla configuration file
from hashlib import md5, sha1, sha256
cpunm = 4 # number of cpus you want to use for multiprocessing
spooldir = './spool'
signingkey = 'CA608125'
mergedir = './merged'

4
lib/net.py

@ -17,7 +17,9 @@ def download(uris):
url = uris[0]
path = uris[1]
print("downloading: %s\nto: %s" % (url, path))
r = requests.get(url, stream=True)
if r.status_code == 404:
warn("download of %s failed: not found!" % url)
return
@ -32,5 +34,3 @@ def download(uris):
f.write(chunk)
# f.flush()
f.close()
print("\033[1;32m . done\033[0m")
return

4
lib/package.py

@ -10,8 +10,8 @@ from gzip import open as gzip_open
from lzma import open as lzma_open
from shutil import copyfile
from lib.parse import (parse_packages, parse_dependencies)
from lib.config import packages_keys, sources_keys, mergedir, spooldir
from lib.config import mergedir, packages_keys, sources_keys, spooldir
from lib.parse import parse_dependencies, parse_packages
def write_packages(packages, filename, sort=True, sources=False):

33
lib/parse.py

@ -50,10 +50,7 @@ def parse_release(reltext):
def parse_release_head(reltext):
"""
Parses the header of the release file to grab potentially needed
metadata
"""
"""Parses the header of the release file to grab needed metadata"""
metadata = {}
contents = reltext.split('\n')
@ -72,23 +69,6 @@ def parse_release_head(reltext):
return metadata
def parse_release_re(reltext):
"""
Parses a Release file using regular expressions and returns a dict
of the files we keed
key = filename, value = sha256 checksum
"""
_hash = {}
match = re.search('SHA256:+', reltext)
if match:
line = reltext[match.start():-1]
for i in line.split('\n'):
if i == 'SHA256:' or i == '\n': # XXX: hack
continue
_hash[(i.split()[2])] = i.split()[0]
return _hash
def parse_package(entry):
""" Parses a single Packages entry """
pkgs = {}
@ -113,17 +93,6 @@ def parse_package(entry):
return pkgs
PACKAGES_REGEX = re.compile('([A-Za-z0-9\-]+): ')
def parse_package_re(entry):
""" Parses a single Packages entry """
contents = PACKAGES_REGEX.split(entry)[1:] # Throw away the first ''
keys = contents[::2]
vals = map(lambda x: x.strip(), contents[1::2])
return dict(zip(keys, vals))
def parse_packages(pkgtext):
"""
Parses our package file contents into a hashmap

2
lib/release.py

@ -8,7 +8,7 @@ from datetime import datetime, timedelta
from os.path import getsize, isfile
import gnupg
from lib.config import release_keys, checksums, signingkey
from lib.config import checksums, release_keys, signingkey
from lib.parse import parse_release_head

Loading…
Cancel
Save