4 changed files with 82 additions and 155 deletions
@ -1,62 +1,40 @@ |
|||
#!/usr/bin/env python2 |
|||
# copyright (c) 2017 - Ivan J. <parazyd@dyne.org> |
|||
# see LICENSE file for copyright and license details |
|||
#!/usr/bin/env python3 |
|||
|
|||
import os |
|||
import sys |
|||
from os.path import join |
|||
from pprint import pprint |
|||
|
|||
import lib.config as config |
|||
import lib.delta as delta |
|||
import lib.fs as fs |
|||
import lib.net as net |
|||
from lib.log import notice, warn |
|||
|
|||
|
|||
def pop_dirs(): |
|||
paths = fs.crawl() |
|||
|
|||
notice("creating initial directory structure") |
|||
for i in range(0, len(paths)): |
|||
baseurl = "http://" + \ |
|||
"/".join([config.repos[i]["host"], config.repos[i]["dists"]]) |
|||
basepath = "/".join([config.amprolla["spooldir"], |
|||
config.repos[i]["dists"]]) |
|||
|
|||
for j in paths[config.repos[i]["name"]]: |
|||
suiteurl = "/".join([baseurl, j]) |
|||
suitepath = "/".join([basepath, j]) |
|||
if not os.path.exists(suitepath): |
|||
os.makedirs(suitepath) |
|||
|
|||
for k in config.mainrepofiles: |
|||
relurl = "/".join([suiteurl, k]) |
|||
relfile = "/".join([suitepath, k]) |
|||
if not os.path.isfile(relfile): |
|||
net.download(relurl, relfile) |
|||
|
|||
try: |
|||
with open(suitepath + "/Release", "rb") as frel: |
|||
rels = frel.read() |
|||
relmap = delta.parse_release(rels) |
|||
except IOError: |
|||
warn("no Release file for %s" % suitepath) |
|||
|
|||
try: |
|||
for k in relmap: |
|||
if relmap[k] == "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855": |
|||
continue # ^ this is /dev/null |
|||
fileurl = "/".join([suiteurl, k]) |
|||
filepath = "/".join([suitepath, k]) |
|||
if not os.path.exists(os.path.dirname(filepath)): |
|||
os.makedirs(os.path.dirname(filepath)) |
|||
if not os.path.isfile(filepath): |
|||
net.download(fileurl, filepath) |
|||
except TypeError: |
|||
warn("Typeerror") |
|||
|
|||
|
|||
def merge(): |
|||
for i in config.amprolla["mergedsubdirs"]: |
|||
mdir = "/".join([config.amprolla["mergedir"], i]) |
|||
if not os.path.exists(mdir): |
|||
os.makedirs(mdir) |
|||
from lib.net import download |
|||
|
|||
|
|||
def pop_dirs(repo): |
|||
print('Downloading %s directory structure' % repo) |
|||
repodata = config.repos[repo] |
|||
|
|||
urls = [] |
|||
|
|||
for i in config.suites: |
|||
for j in config.suites[i]: |
|||
baseurl = join(repodata['host'], repodata['dists']) |
|||
suite = j |
|||
if repodata['aliases'] is True: |
|||
if j in config.aliases[repodata['name']]: |
|||
suite = config.aliases[repodata['name']][j] |
|||
elif repodata['skipmissing'] is True: |
|||
continue |
|||
skips = ['jessie-security', 'ascii-security'] # XXX: hack |
|||
if repo == 'debian' and j in skips: |
|||
continue |
|||
pair = (join(baseurl, suite), join(baseurl.replace(repodata['host'], |
|||
config.spooldir), suite)) |
|||
urls.append(pair) |
|||
|
|||
return urls |
|||
|
|||
for dist in config.repos: |
|||
urls = pop_dirs(dist) |
|||
for url in urls: |
|||
for file in config.mainrepofiles: |
|||
remote = join(url[0], file) |
|||
local = join(url[1], file) |
|||
download(remote, local) |
|||
|
Loading…
Reference in new issue