Browse Source

fix multiprocessing issue with download()

the process should not forcefully exit with die() since it causes
a deadlock for multiprocessing.Pool

this commit also reimplements handling of
requests.exceptions.ReadTimeout
keep-around/e977c9e07f177e8bc2b51265de1bc93475397f61
parazyd 6 years ago
parent
commit
e57b27612f
Signed by untrusted user: parazyd GPG Key ID: F0CB28FCF78637DE
  1. 10
      lib/net.py

10
lib/net.py

@ -21,16 +21,14 @@ def download(uris):
try:
r = requests.get(url, stream=True, timeout=20)
# TODO: investigate also requests.exceptions.ReadTimeout
except requests.exceptions.ConnectionError as e:
except (requests.exceptions.ConnectionError,
requests.exceptions.ReadTimeout) as e:
warn('Caught exception: "%s". Retrying...' % e)
return download(uris)
if r.status_code == 404:
warn('failed: 404 not found!')
if r.status_code != 200:
warn('%s failed: %d' % (url, r.status_code))
return
elif r.status_code != 200:
die('failed: %d' % r.status_code)
makedirs(dirname(path), exist_ok=True)
f = open(path, 'wb')

Loading…
Cancel
Save