Browse Source

begin logging refactoring

keep-around/e977c9e07f177e8bc2b51265de1bc93475397f61
parazyd 6 years ago
parent
commit
ae0f2f3219
Signed by untrusted user: parazyd GPG Key ID: F0CB28FCF78637DE
  1. 19
      amprolla_update.py
  2. 54
      lib/log.py
  3. 13
      lib/net.py

19
amprolla_update.py

@ -12,6 +12,7 @@ import requests
from amprolla_merge import gen_release, merge, prepare_merge_dict
from lib.config import aliases, cpunm, repos, repo_order, spooldir
from lib.log import info
from lib.parse import compare_dict, get_date, get_time, parse_release
from lib.net import download
@ -23,11 +24,11 @@ def remote_is_newer(remote, local):
rem_date = get_date(remote)
loc_date = get_date(local)
print('Remote date: %s' % rem_date)
print('Local date: %s' % loc_date)
# print('Remote date: %s' % rem_date)
# print('Local date: %s' % loc_date)
if get_time(rem_date) > get_time(loc_date):
print('Remote Release is newer!')
info('Remote Release is newer!')
return True
return False
@ -37,8 +38,8 @@ def perform_update(suite, paths):
"""
Performs an incremental update and merge of a given suite
"""
print('Checking for updates in %s' % suite)
print(paths)
info('Checking for updates in %s' % suite)
# print(paths)
needsmerge = {}
needsmerge['downloads'] = [] # all files that have to be downloaded
@ -49,7 +50,7 @@ def perform_update(suite, paths):
needsmerge[i]['mergelist'] = []
if paths[c]:
print('Working on %s repo' % i)
info('Working on %s repo' % i)
remote_path = paths[c].replace(spooldir, repos[i]['host'])
remote_rel = requests.get(join(remote_path, 'Release'))
@ -76,7 +77,7 @@ def perform_update(suite, paths):
# download what needs to be downloaded
if needsmerge['downloads']:
print('Downloading updates...')
info('Downloading updates...')
dlpool = Pool(cpunm)
dlpool.map(download, needsmerge['downloads'])
@ -111,13 +112,13 @@ def perform_update(suite, paths):
# perform the actual merge
if merge_list:
print('Merging files...')
info('Merging files...')
mrgpool = Pool(cpunm)
mrgpool.map(merge, merge_list)
# generate Release files if we got any new files
if needsmerge['downloads']:
print('Generating Release...')
info('Generating Release...')
gen_release(suite)

54
lib/log.py

@ -4,33 +4,53 @@
Logging functions
"""
# TODO: Replace with logging
import os
from time import time
from os import makedirs, remove
from os.path import join
import sys
from lib.config import logdir
def die(msg):
print("\033[1;31m[E] %s\033[0m" % msg)
sys.exit(1)
def die(msg, tofile=True):
"""
Log error and exit with exitcode 1
"""
msg = "%d [ERR] %s\n" % (int(time()), msg)
print(msg)
if tofile:
logtofile('amprolla.txt', msg)
sys.exit(1)
def notice(msg):
print("\033[1;32m(*) %s\033[0m" % msg)
return
def warn(msg, tofile=True):
"""
Log warning and continue
"""
msg = "%d [WARN] %s\n" % (int(time()), msg)
print(msg)
if tofile:
logtofile('amprolla.txt', msg)
def warn(msg):
print("\033[1;33m[W] %s\033[0m" % msg)
return
def info(msg, tofile=True):
"""
Log informational message and continue
"""
msg = "%d [INFO] %s\n" % (int(time()), msg)
print(msg)
if tofile:
logtofile('amprolla.txt', msg)
def cleanexit():
notice("exiting cleanly...")
sys.exit(0)
def logtofile(filename, text, redo=False):
os.makedirs(os.path.dirname(filename), exist_ok=True)
lf = open(filename, 'a')
"""
Log given text to a given file.
If redo is True, rewrites the file
"""
makedirs(logdir, exist_ok=True)
if redo:
remove(join(logdir, filename))
lf = open(join(logdir, filename), 'a')
lf.write(text)
lf.close()

13
lib/net.py

@ -4,10 +4,11 @@
Network functions/helpers
"""
import os
from os import makedirs
from os.path import dirname
import requests
from .log import die, warn
from lib.log import die, info, warn
def download(uris):
@ -16,17 +17,17 @@ def download(uris):
"""
url = uris[0]
path = uris[1]
print("downloading: %s\nto: %s" % (url, path))
info("dl: %s" % url)
r = requests.get(url, stream=True)
if r.status_code == 404:
warn("download of %s failed: not found!" % url)
warn("failed: 404 not found!")
return
elif r.status_code != 200:
die("download of %s failed" % url)
die("failed: %d" % r.status_code)
os.makedirs(os.path.dirname(path), exist_ok=True)
makedirs(dirname(path), exist_ok=True)
f = open(path, 'wb')
# chunk_size {sh,c}ould be more on gbit servers
for chunk in r.iter_content(chunk_size=1024):

Loading…
Cancel
Save