diff --git a/checkversions.py b/checkversions.py index 6c9fafe..7d9f9ca 100644 --- a/checkversions.py +++ b/checkversions.py @@ -20,7 +20,7 @@ ## ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS ## SOFTWARE. # -# $Id: checkversions.py,v 1.6.2.1 2006-09-29 02:21:57 lawrencc Exp $ +# $Id: checkversions.py,v 1.6.2.2 2006-10-16 17:14:03 lawrencc Exp $ # # Version ##VERSION##; see changelog for revision history @@ -194,10 +194,15 @@ def get_versions_available(package, dists=None, http_proxy=None, arch='i386'): return {} parser = PackagesParser(arch) - for line in page.readlines(): + for line in page: parser.feed(line) parser.close() + try: + page.fp._sock.recv = None + except: + pass page.close() + ## content = page.read() ## parser.feed(content) ## parser.close() @@ -207,6 +212,8 @@ def get_versions_available(package, dists=None, http_proxy=None, arch='i386'): for dist in dists: if dist in parser.versions: versions[dist] = parser.versions[dist] + del parser + del page return versions @@ -223,15 +230,25 @@ def get_newqueue_available(package, dists=None, http_proxy=None, arch='i386'): if not page: return {} parser = NewQueueParser(package, arch) - parser.feed(page.read()) + for line in page: + parser.feed(line) parser.close() + try: + page.fp._sock.recv = None + except: + pass page.close() + print repr(page) + versions = {} for dist in dists: if dist in parser.versions: versions[dist] = parser.versions[dist] + del parser + del page + print 'HERE', gc.garbage return versions def get_incoming_version(package, http_proxy=None, arch='i386'): @@ -246,15 +263,25 @@ def get_incoming_version(package, http_proxy=None, arch='i386'): return None parser = IncomingParser(package, arch) - parser.feed(page.read()) + for line in page: + parser.feed(line) parser.close() + try: + page.fp._sock.recv = None + except: + pass page.close() if parser.found: - return reduce(later_version, parser.found, '0') - + found = parser.found + del parser + return reduce(later_version, found, '0') + + del page + del parser return None +import gc def check_available(package, version, dists=None, check_incoming=True, check_newqueue=True, http_proxy=None, arch='i386'): @@ -264,13 +291,16 @@ def check_available(package, version, dists=None, check_incoming=True, iv = get_incoming_version(package, http_proxy, arch) if iv: avail['incoming'] = iv - avail.update(get_versions_available(package, dists, http_proxy, arch)) + stuff = get_versions_available(package, dists, http_proxy, arch) + avail.update(stuff) if check_newqueue: import reportbug srcpackage = reportbug.get_source_name(package) if srcpackage is None: srcpackage = package - avail.update(get_newqueue_available(srcpackage, dists, http_proxy, arch)) + stuff = get_newqueue_available(srcpackage, dists, http_proxy, arch) + avail.update(stuff) + print gc.garbage, stuff new = {} newer = 0 @@ -291,7 +321,14 @@ def check_available(package, version, dists=None, check_incoming=True, return new, too_new if __name__=='__main__': - #print check_available('mozilla-browser', '2:1.5-3', arch='s390') - print check_available('openssh-server', '1:4.2p1-8', arch='i386') - print check_available('openssh-server', '1:4.2p1-8', arch='kfreebsd-i386') + import time + import gc + + gc.set_debug(gc.DEBUG_LEAK) + print get_newqueue_available('reportbug') + print gc.garbage + print check_available('reportbug', '3.7', arch='s390') + #print check_available('openssh-server', '1:4.2p1-8', arch='i386') + #print check_available('openssh-server', '1:4.2p1-8', arch='kfreebsd-i386') + time.sleep(1000) #print check_available('dpkg', '1.10.2', arch='sparc') diff --git a/debianbts.py b/debianbts.py index 81d11e3..7997f68 100644 --- a/debianbts.py +++ b/debianbts.py @@ -22,7 +22,7 @@ # # Version ##VERSION##; see changelog for revision history # -# $Id: debianbts.py,v 1.24.2.6 2006-09-29 02:29:52 lawrencc Exp $ +# $Id: debianbts.py,v 1.24.2.7 2006-10-16 17:14:03 lawrencc Exp $ import sgmllib, glob, os, re, reportbug, rfc822, time, urllib, checkversions from urlutils import open_url @@ -665,11 +665,17 @@ def parse_html_report(number, url, http_proxy, followups=False, cgi=True): if not page: return None - content = page.read() parser = BTSParser(cgi=cgi, followups=followups) - parser.feed(content) + for line in page: + parser.feed(line) parser.close() + try: + page.fp._sock.recv = None + except: + pass + page.close() + items = parser.preblock title = "#%d: %s" % (number, parser.title) @@ -705,6 +711,12 @@ def parse_mbox_report(number, url, http_proxy, followups=False): # Make this seekable wholefile = cStringIO.StringIO(page.read()) + try: + page.fp._sock.recv = None + except: + pass + page.close() + mbox = mailbox.UnixMailbox(wholefile, msgfactory) title = '' @@ -754,18 +766,27 @@ def get_cgi_reports(package, system='debian', http_proxy='', archived=False, if not page: return (0, None, None) - content = page.read() - if 'Maintainer' not in content: - return (0, None, None) + #content = page.read() + #if 'Maintainer' not in content: + # return (0, None, None) parser = BTSParser(cgi=True) - parser.feed(content) + for line in page: + parser.feed(line) parser.close() + try: + page.fp._sock.recv = None + except: + pass + page.close() # Reorganize hierarchy to put recently-fixed bugs at top parser.reorganize() - return parser.bugcount, parser.title, parser.hierarchy + data = (parser.bugcount, parser.title, parser.hierarchy) + del parser + + return data def get_cgi_report(number, system='debian', http_proxy='', archived=False, followups=False): @@ -796,13 +817,19 @@ def get_reports(package, system='debian', mirrors=None, version=None, if not page: return (0, None, None) - content = page.read() - if 'Maintainer' not in content: - return (0, None, None) + #content = page.read() + #if 'Maintainer' not in content: + # return (0, None, None) parser = BTSParser() - parser.feed(content) + for line in page: + parser.feed(line) parser.close() + try: + page.fp._sock.recv = None + except: + pass + page.close() return parser.bugcount, parser.title, parser.hierarchy @@ -836,5 +863,13 @@ def get_report(number, system='debian', mirrors=None, return parse_html_report(number, url, http_proxy, followups, cgi=False) +class NullParser(sgmllib.SGMLParser): + def __init__(self): + sgmllib.SGMLParser.__init__(self) + if __name__ == '__main__': - print get_cgi_report(2) + import pprint + + data = get_cgi_reports('reportbug') + pprint.pprint(data) + time.sleep(1000)