Browse Source

port reportbug.checkversions to py3k

master
Sandro Tosi 6 years ago
parent
commit
4194c1886a
  1. 103
      reportbug/checkversions.py
  2. 14
      test/test_checkversions.py

103
reportbug/checkversions.py

@ -24,13 +24,13 @@
import sys
import os
import re
import urllib2
import sgmllib
import urllib.request, urllib.error, urllib.parse
import gc
import time
import gzip
import utils
from urlutils import open_url
from . import utils
from .urlutils import open_url
from reportbug.exceptions import (
NoNetwork,
)
@ -38,54 +38,30 @@ from reportbug.exceptions import (
# needed to parse new.822
from debian.deb822 import Deb822
from debian import debian_support
from functools import reduce
RMADISON_URL = 'http://qa.debian.org/madison.php?package=%s&text=on'
RMADISON_URL = 'https://qa.debian.org/madison.php?package=%s&text=on'
INCOMING_URL = 'http://incoming.debian.org/'
NEWQUEUE_URL = 'http://ftp-master.debian.org/new.822'
# The format is an unordered list
class BaseParser(sgmllib.SGMLParser):
def __init__(self):
sgmllib.SGMLParser.__init__(self)
self.savedata = None
# --- Formatter interface, taking care of 'savedata' mode;
# shouldn't need to be overridden
def handle_data(self, data):
if self.savedata is not None:
self.savedata = self.savedata + data
# --- Hooks to save data; shouldn't need to be overridden
def save_bgn(self):
self.savedata = ''
def save_end(self, mode=0):
data = self.savedata
self.savedata = None
if not mode and data is not None:
data = ' '.join(data.split())
return data
class IncomingParser(sgmllib.SGMLParser):
def __init__(self, package, arch='i386'):
sgmllib.SGMLParser.__init__(self)
self.found = []
self.savedata = None
arch = r'(?:all|' + re.escape(arch) + ')'
self.package = re.compile(re.escape(package) + r'_([^_]+)_' + arch + '.deb')
def start_a(self, attrs):
for attrib, value in attrs:
if attrib.lower() != 'href':
continue
mob = self.package.match(value)
if mob:
self.found.append(mob.group(1))
## This needs to be adapted now that incoming is an APT repository
# class IncomingParser(sgmllib.SGMLParser):
# def __init__(self, package, arch='i386'):
# sgmllib.SGMLParser.__init__(self)
# self.found = []
# self.savedata = None
# arch = r'(?:all|' + re.escape(arch) + ')'
# self.package = re.compile(re.escape(package) + r'_([^_]+)_' + arch + '.deb')
#
# def start_a(self, attrs):
# for attrib, value in attrs:
# if attrib.lower() != 'href':
# continue
#
# mob = self.package.match(value)
# if mob:
# self.found.append(mob.group(1))
def compare_versions(current, upstream):
@ -117,15 +93,14 @@ def get_versions_available(package, timeout, dists=None, http_proxy=None, arch='
page = open_url(url)
except NoNetwork:
return {}
except urllib2.HTTPError, x:
print >> sys.stderr, "Warning:", x
except urllib.error.HTTPError as x:
print("Warning:", x, file=sys.stderr)
return {}
if not page:
return {}
# read the content of the page, remove spaces, empty lines
content = page.read().replace(' ', '').strip()
page.close()
content = page.replace(' ', '').strip()
versions = {}
for line in content.split('\n'):
@ -147,8 +122,8 @@ def get_newqueue_available(package, timeout, dists=None, http_proxy=None, arch='
page = open_url(NEWQUEUE_URL, http_proxy, timeout)
except NoNetwork:
return {}
except urllib2.HTTPError, x:
print >> sys.stderr, "Warning:", x
except urllib.error.HTTPError as x:
print("Warning:", x, file=sys.stderr)
return {}
if not page:
return {}
@ -170,21 +145,21 @@ def get_incoming_version(package, timeout, http_proxy=None, arch='i386'):
page = open_url(INCOMING_URL, http_proxy, timeout)
except NoNetwork:
return None
except urllib2.HTTPError, x:
print >> sys.stderr, "Warning:", x
except urllib.error.HTTPError as x:
print("Warning:", x, file=sys.stderr)
return None
if not page:
return None
parser = IncomingParser(package, arch)
for line in page:
parser.feed(line)
parser.close()
try:
page.fp._sock.recv = None
except:
pass
page.close()
# parser = IncomingParser(package, arch)
# for line in page:
# parser.feed(line)
# parser.close()
# try:
# page.fp._sock.recv = None
# except:
# pass
# page.close()
if parser.found:
found = parser.found

14
test/test_checkversions.py

@ -1,4 +1,4 @@
import unittest2
import unittest
from reportbug import checkversions
from nose.plugins.attrib import attr
@ -6,7 +6,7 @@ from nose.plugins.attrib import attr
import mock
class TestCheckversions(unittest2.TestCase):
class TestCheckversions(unittest.TestCase):
def test_compare_versions(self):
# <current, upstream>
# 1 upstream newer than current
@ -34,7 +34,7 @@ class TestCheckversions(unittest2.TestCase):
self.assertEqual(checkversions.later_version('1.2.4', '1.2.3'), '1.2.4')
class TestNewQueue(unittest2.TestCase):
class TestNewQueue(unittest.TestCase):
def test_bts704040(self):
# return an iterable object, so that Deb822 (what parses the result)
# will work
@ -62,14 +62,14 @@ Source: aaa
res = checkversions.get_newqueue_available('procps', 60)
self.assertEqual(res.keys()[0], u'experimental (new)')
self.assertEqual(res[u'experimental (new)'], u'1:3.3.7-1')
self.assertEqual(list(res.keys())[0], 'experimental (new)')
self.assertEqual(res['experimental (new)'], '1:3.3.7-1')
# restore the original checkversions.open_url() method
checkversions.open_url = save_open_url
class TestVersionAvailable(unittest2.TestCase):
class TestVersionAvailable(unittest.TestCase):
@attr('network') # marking the test as using network
def test_bts642032(self):
vers = checkversions.get_versions_available('reportbug', 60)
@ -94,4 +94,4 @@ class TestVersionAvailable(unittest2.TestCase):
def test_codenames(self):
vers = checkversions.get_versions_available('reportbug', 60, ['sid'])
self.assertEqual(1, len(vers))
self.assertEqual(vers.keys()[0], 'unstable')
self.assertEqual(list(vers.keys())[0], 'unstable')

Loading…
Cancel
Save