Browse Source

styling fixes

debianarchive-update
parazyd 5 years ago
parent
commit
57ac2b2a17
Signed by untrusted user: parazyd GPG Key ID: F0CB28FCF78637DE
  1. 6
      amprolla_merge_contents.py
  2. 3
      lib/config.def.py
  3. 6
      lib/log.py
  4. 20
      lib/net.py
  5. 2
      lib/package.py
  6. 5
      lib/release.py

6
amprolla_merge_contents.py

@ -52,9 +52,9 @@ def write_contents(pkgs, filename):
gzf = gzip_open(filename, 'w')
for pkg, files in sorted(pkgs.items()):
for f in files:
ln = "%s %s\n" % (f, pkg)
gzf.write(ln.encode('utf-8'))
for file in files:
line = "%s %s\n" % (file, pkg)
gzf.write(line.encode('utf-8'))
gzf.write(b'\n')
gzf.close()

3
lib/config.def.py

@ -4,7 +4,8 @@
amprolla configuration file
"""
from hashlib import md5, sha1, sha256
# from hashlib import md5, sha1, sha256
from hashlib import sha256
cpunm = 4 # number of cpus you want to use for multiprocessing
logdir = './log'

6
lib/log.py

@ -51,6 +51,6 @@ def logtofile(filename, text, redo=False):
makedirs(logdir, exist_ok=True)
if redo:
remove(join(logdir, filename))
lf = open(join(logdir, filename), 'a')
lf.write(text)
lf.close()
lfile = open(join(logdir, filename), 'a')
lfile.write(text)
lfile.close()

20
lib/net.py

@ -8,7 +8,7 @@ from os import makedirs
from os.path import dirname
import requests
from lib.log import die, info, warn
from lib.log import info, warn
def download(uris):
@ -20,21 +20,21 @@ def download(uris):
info("dl: %s" % url)
try:
r = requests.get(url, stream=True, timeout=20)
rfile = requests.get(url, stream=True, timeout=20)
except (requests.exceptions.ConnectionError,
requests.exceptions.ReadTimeout) as e:
warn('Caught exception: "%s". Retrying...' % e)
requests.exceptions.ReadTimeout) as err:
warn('Caught exception: "%s". Retrying...' % err)
return download(uris)
if r.status_code != 200:
warn('%s failed: %d' % (url, r.status_code))
if rfile.status_code != 200:
warn('%s failed: %d' % (url, rfile.status_code))
return
makedirs(dirname(path), exist_ok=True)
f = open(path, 'wb')
lfile = open(path, 'wb')
# chunk_size {sh,c}ould be more on gbit servers
for chunk in r.iter_content(chunk_size=1024):
for chunk in rfile.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
lfile.write(chunk)
# f.flush()
f.close()
lfile.close()

2
lib/package.py

@ -7,7 +7,7 @@ Package merging functions and helpers
from os import makedirs
from os.path import dirname, isfile, join
from gzip import open as gzip_open
from lzma import open as lzma_open
# from lzma import open as lzma_open
from shutil import copyfile
import lib.globalvars as globalvars

5
lib/release.py

@ -77,11 +77,6 @@ def write_release(oldrel, newrel, filelist, r, sign=True, rewrite=True):
uncomp = gzip_decomp(open(f+'.gz', 'rb').read())
new.write(' %s %8s %s\n' % (csum['f'](uncomp).hexdigest(),
len(uncomp), f.replace(r+'/', '')))
# elif basename(f).startswith('Contents') and isfile(f+'.gz'):
# uncomp = gzip_decomp(open(f+'.gz', 'rb').read())
# new.write(' %s %8s %s\n' % (csum['f'](uncomp).hexdigest(),
# len(uncomp), f.replace(r+'/', '')))
new.close()
if sign:

Loading…
Cancel
Save