Browse Source

initial dl of Release files for spooldir; styling fixes

keep-around/e977c9e07f177e8bc2b51265de1bc93475397f61
parazyd 6 years ago
parent
commit
bc499c81e9
Signed by untrusted user: parazyd GPG Key ID: F0CB28FCF78637DE
  1. 96
      amprolla-init
  2. 131
      lib/config.py
  3. 2
      lib/fs.py
  4. 8
      lib/net.py

96
amprolla-init

@ -1,62 +1,40 @@
#!/usr/bin/env python2
# copyright (c) 2017 - Ivan J. <parazyd@dyne.org>
# see LICENSE file for copyright and license details
#!/usr/bin/env python3
import os
import sys
from os.path import join
from pprint import pprint
import lib.config as config
import lib.delta as delta
import lib.fs as fs
import lib.net as net
from lib.log import notice, warn
def pop_dirs():
paths = fs.crawl()
notice("creating initial directory structure")
for i in range(0, len(paths)):
baseurl = "http://" + \
"/".join([config.repos[i]["host"], config.repos[i]["dists"]])
basepath = "/".join([config.amprolla["spooldir"],
config.repos[i]["dists"]])
for j in paths[config.repos[i]["name"]]:
suiteurl = "/".join([baseurl, j])
suitepath = "/".join([basepath, j])
if not os.path.exists(suitepath):
os.makedirs(suitepath)
for k in config.mainrepofiles:
relurl = "/".join([suiteurl, k])
relfile = "/".join([suitepath, k])
if not os.path.isfile(relfile):
net.download(relurl, relfile)
try:
with open(suitepath + "/Release", "rb") as frel:
rels = frel.read()
relmap = delta.parse_release(rels)
except IOError:
warn("no Release file for %s" % suitepath)
try:
for k in relmap:
if relmap[k] == "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855":
continue # ^ this is /dev/null
fileurl = "/".join([suiteurl, k])
filepath = "/".join([suitepath, k])
if not os.path.exists(os.path.dirname(filepath)):
os.makedirs(os.path.dirname(filepath))
if not os.path.isfile(filepath):
net.download(fileurl, filepath)
except TypeError:
warn("Typeerror")
def merge():
for i in config.amprolla["mergedsubdirs"]:
mdir = "/".join([config.amprolla["mergedir"], i])
if not os.path.exists(mdir):
os.makedirs(mdir)
from lib.net import download
def pop_dirs(repo):
print('Downloading %s directory structure' % repo)
repodata = config.repos[repo]
urls = []
for i in config.suites:
for j in config.suites[i]:
baseurl = join(repodata['host'], repodata['dists'])
suite = j
if repodata['aliases'] is True:
if j in config.aliases[repodata['name']]:
suite = config.aliases[repodata['name']][j]
elif repodata['skipmissing'] is True:
continue
skips = ['jessie-security', 'ascii-security'] # XXX: hack
if repo == 'debian' and j in skips:
continue
pair = (join(baseurl, suite), join(baseurl.replace(repodata['host'],
config.spooldir), suite))
urls.append(pair)
return urls
for dist in config.repos:
urls = pop_dirs(dist)
for url in urls:
for file in config.mainrepofiles:
remote = join(url[0], file)
local = join(url[1], file)
download(remote, local)

131
lib/config.py

@ -2,39 +2,41 @@
# copyright (c) 2017 - Ivan J. <parazyd@dyne.org>
# see LICENSE file for copyright and license details
spooldir = "./spool"
sign_key = "fa1b0274"
mergedir = "./merged"
mergedsubdirs = ["dists", "pool"]
spooldir = './spool'
sign_key = 'fa1b0274'
mergedir = './merged'
mergedsubdirs = ['dists', 'pool']
banpkgs = {'systemd', 'systemd-sysv'}
#checksums = [ 'md5sum', 'sha1', 'sha256', 'sha512' ]
repos = {
# key name is priority, first is 0
0: {
"name": "DEVUAN",
"host": "packages.devuan.org",
"dists": "devuan/dists",
"pool": "devuan/pool",
"aliases": False,
"skipmissing": False
'devuan': {
'name': 'DEVUAN',
'host': 'http://auto.mirror.devuan.org',
'dists': 'devuan/dists',
'pool': 'devuan/pool',
'aliases': False,
'skipmissing': False,
'priority': 0,
},
1: {
"name": "DEBIAN-SECURITY",
"host": "security.debian.org",
"dists": "dists",
"pool": "pool",
"aliases": True,
"skipmissing": True
'debian-sec': {
'name': 'DEBIAN-SECURITY',
'host': 'http://security.debian.org',
'dists': 'dists',
'pool': 'pool',
'aliases': True,
'skipmissing': True,
'priority': 1,
},
2: {
"name": "DEBIAN",
#"host": "httpredir.debian.org",
"host": "ftp.debian.org",
"dists": "debian/dists",
"pool": "debian/pool",
"aliases": True,
"skipmissing": False
'debian': {
'name': 'DEBIAN',
#'host': 'httpredir.debian.org',
'host': 'http://ftp.debian.org',
'dists': 'debian/dists',
'pool': 'debian/pool',
'aliases': True,
'skipmissing': False,
'priority': 2,
}
}
@ -44,94 +46,39 @@ suites = {
'jessie-backports',
'jessie-proposed-updates',
'jessie-security',
'jessie-updates'
'jessie-updates',
],
'ascii': [
'ascii',
'ascii-backports',
'ascii-proposed-updates',
'ascii-security',
'ascii-updates'
'ascii-updates',
],
'unstable': [
'unstable'
'unstable',
]
}
aliases = {
"DEBIAN-SECURITY": {
'DEBIAN-SECURITY': {
'ascii-security': 'testing/updates',
'jessie-security': 'jessie/updates'
'jessie-security': 'jessie/updates',
},
"DEBIAN": {
'DEBIAN': {
'ascii': 'testing',
'ascii-backports': 'testing-backports',
'ascii-proposed-updates': 'testing-proposed-updates',
'ascii-updates': 'testing-updates'
'ascii-updates': 'testing-updates',
}
}
categories = ['main', 'contrib', 'non-free']
releases = {
"Release-jessie": {
"Suite": "stable",
"Codename": "jessie",
"Label": "Devuan",
"Version": "1.0",
"Description": "Devuan 1.0 Jessie (stable release)"
},
"Release-ascii": {
"Suite": "testing",
"Codename": "ascii",
"Label": "Devuan",
"Version": "2.0",
"Description": "Devuan 2.0 Ascii (testing release)"
},
"Release-unstable": {
"Suite": "unstable",
"Codename": "ceres",
"Label": "Devuan",
"Version": "x.x",
"Description": "Devuan x.x Ceres (unstable release)"
}
}
binaryarches = [
'all',
'alpha',
'amd64',
'arm64',
'armel',
'armhf',
'hppa',
'hurd-i386',
'i386',
'ia64',
'kfreebsd-amd64',
'kfreebsd-i386',
'mips',
'mips64el',
'mipsel',
'powerpc',
'ppc64el',
's390x',
'sparc'
]
installerarches = [
'amd64',
'arm64',
'armel',
'i386'
]
mainrepofiles = [
"InRelease",
"Release",
"Release.gpg"
'InRelease',
'Release',
'Release.gpg'
]
packages_keys = [

2
lib/fs.py

@ -31,4 +31,4 @@ def crawl():
paths[repo] = sts
return paths
# print(crawl())
print(crawl())

8
lib/net.py

@ -1,21 +1,23 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# copyright (c) 2017 - Ivan J. <parazyd@dyne.org>
# see LICENSE file for copyright and license details
import requests
import os
from log import die, notice, warn, cleanexit
from .log import die, notice, warn, cleanexit
def download(url, path):
print("\tdownloading: %s\n\tto: %s" % (url, path))
r = requests.get(url, stream=True)
if r.status_code == 404:
warn("download of %s failed: not found!", url)
warn("download of %s failed: not found!" % url)
return
elif r.status_code != 200:
die("download of %s failed", url)
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "wb") as f:
# XXX: should be more on gbit servers
for chunk in r.iter_content(chunk_size=1024):

Loading…
Cancel
Save