[Pypt-offline-general] SF.net SVN: pypt-offline: [136] trunk
Status: Beta
Brought to you by:
riteshsarraf
|
From: <rit...@us...> - 2007-04-25 14:18:33
|
Revision: 136
http://svn.sourceforge.net/pypt-offline/?rev=136&view=rev
Author: riteshsarraf
Date: 2007-04-25 07:18:34 -0700 (Wed, 25 Apr 2007)
Log Message:
-----------
* Adding offline bug report functionality for Debian.
* We rely on reportbug packages. We ship them here for user convenience because they also have to be run on windows.
* For the latest and greates reportbug, you can always go and download it from Debian Alioth
Added Paths:
-----------
trunk/checkversions.py
trunk/debianbts.py
trunk/fetch_bugs.py
trunk/reportbug.py
trunk/reportbug_exceptions.py
trunk/urlutils.py
Added: trunk/checkversions.py
===================================================================
--- trunk/checkversions.py (rev 0)
+++ trunk/checkversions.py 2007-04-25 14:18:34 UTC (rev 136)
@@ -0,0 +1,334 @@
+#
+# checkversions.py - Find if the installed version of a package is the latest
+#
+# Written by Chris Lawrence <law...@de...>
+# (C) 2002-06 Chris Lawrence
+#
+# This program is freely distributable per the following license:
+#
+## Permission to use, copy, modify, and distribute this software and its
+## documentation for any purpose and without fee is hereby granted,
+## provided that the above copyright notice appears in all copies and that
+## both that copyright notice and this permission notice appear in
+## supporting documentation.
+##
+## I DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL
+## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL I
+## BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
+## DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+## WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
+## ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+## SOFTWARE.
+#
+# $Id: checkversions.py,v 1.6.2.3 2006/10/16 18:52:41 lawrencc Exp $
+#
+# Version 3.35; see changelog for revision history
+
+import sgmllib
+#import HTMLParser
+
+import os, re, sys, urllib2
+from urlutils import open_url
+from reportbug_exceptions import *
+
+PACKAGES_URL = 'http://packages.debian.org/%s'
+INCOMING_URL = 'http://incoming.debian.org/'
+NEWQUEUE_URL = 'http://ftp-master.debian.org/new.html'
+
+# The format is an unordered list
+
+class BaseParser(sgmllib.SGMLParser):
+ def __init__(self):
+ sgmllib.SGMLParser.__init__(self)
+ self.savedata = None
+
+ # --- Formatter interface, taking care of 'savedata' mode;
+ # shouldn't need to be overridden
+
+ def handle_data(self, data):
+ if self.savedata is not None:
+ self.savedata = self.savedata + data
+
+ # --- Hooks to save data; shouldn't need to be overridden
+ def save_bgn(self):
+ self.savedata = ''
+
+ def save_end(self, mode=0):
+ data = self.savedata
+ self.savedata = None
+ if not mode and data is not None: data = ' '.join(data.split())
+ return data
+
+class PackagesParser(BaseParser):
+ def __init__(self, arch='i386'):
+ BaseParser.__init__(self)
+ self.versions = {}
+ self.row = None
+ arch = r'\s(all|'+re.escape(arch)+r')\b'
+ self.arch = re.compile(arch)
+ self.dist = None
+
+ def start_li(self, attrs):
+ if self.row is not None:
+ self.end_li()
+ self.row = []
+
+ def start_a(self, attrs):
+ if self.row is not None:
+ self.save_bgn()
+
+ def end_a(self):
+ if self.row is not None and self.savedata:
+ self.dist = self.save_end()
+
+ def lineend(self):
+ line = self.save_end().strip()
+ if self.arch.search(line):
+ version = line.split(': ', 1)
+ self.versions[self.dist] = version[0]
+
+ def start_br(self, attrs):
+ if self.savedata:
+ self.lineend()
+ self.save_bgn()
+
+ def end_li(self):
+ if self.savedata:
+ self.lineend()
+ self.row = None
+
+class IncomingParser(sgmllib.SGMLParser):
+ def __init__(self, package, arch='i386'):
+ sgmllib.SGMLParser.__init__(self)
+ self.found = []
+ self.savedata = None
+ arch = r'(?:all|'+re.escape(arch)+')'
+ self.package = re.compile(re.escape(package)+r'_([^_]+)_'+arch+'.deb')
+
+ def start_a(self, attrs):
+ for attrib, value in attrs:
+ if attrib.lower() != 'href':
+ continue
+
+ mob = self.package.match(value)
+ if mob:
+ self.found.append(mob.group(1))
+
+class NewQueueParser(BaseParser):
+ def __init__(self, package, arch='i386'):
+ BaseParser.__init__(self)
+ self.package = package
+ self.row = None
+ arch = r'\s(all|'+re.escape(arch)+r')\b'
+ self.arch = re.compile(arch)
+ self.versions = {}
+
+ def start_tr (self, attrs):
+ for name, value in attrs:
+ if name == 'class' and value in ("odd", "even"):
+ self.row = []
+
+ def end_tr (self):
+ if self.row is not None:
+ # row (name, versions, architectures, distribution)
+ dist = "%s (new queue)" % self.row[3]
+ for version in self.row[1].split():
+ self.versions[dist] = version
+ self.row = None
+
+ def start_td (self, attrs):
+ if self.row is None:
+ return
+ self.save_bgn()
+
+ def end_td (self):
+ if self.row is None:
+ return
+ data = self.save_end()
+ l = len(self.row)
+ if l == 0:
+ # package name
+ if self.package == data:
+ # found package name
+ self.row.append(data)
+ else:
+ self.row = None
+ elif l == 2:
+ # architecture
+ if self.arch.search(data):
+ self.row.append(data)
+ else:
+ self.row = None
+ else:
+ self.row.append(data)
+
+def compare_versions(current, upstream):
+ """Return 1 if upstream is newer than current, -1 if current is
+ newer than upstream, and 0 if the same."""
+ if not upstream: return 0
+ rc = os.system('dpkg --compare-versions %s lt %s' % (current, upstream))
+ rc2 = os.system('dpkg --compare-versions %s gt %s' % (current, upstream))
+ if not rc:
+ return 1
+ elif not rc2:
+ return -1
+ return 0
+
+def later_version(a, b):
+ if compare_versions(a, b) > 0:
+ return b
+ return a
+
+def get_versions_available(package, dists=None, http_proxy=None, arch='i386'):
+ if not dists:
+ dists = ('stable', 'testing', 'unstable')
+
+ try:
+ page = open_url(PACKAGES_URL % package, http_proxy)
+ except NoNetwork:
+ return {}
+ except urllib2.HTTPError, x:
+ print >> sys.stderr, "Warning:", x
+ return {}
+ if not page:
+ return {}
+
+ parser = PackagesParser(arch)
+ for line in page:
+ parser.feed(line)
+ parser.close()
+ try:
+ page.fp._sock.recv = None
+ except:
+ pass
+ page.close()
+
+## content = page.read()
+## parser.feed(content)
+## parser.close()
+## page.close()
+
+ versions = {}
+ for dist in dists:
+ if dist in parser.versions:
+ versions[dist] = parser.versions[dist]
+ del parser
+ del page
+
+ return versions
+
+def get_newqueue_available(package, dists=None, http_proxy=None, arch='i386'):
+ if dists is None:
+ dists = ('unstable (new queue)', )
+ try:
+ page = open_url(NEWQUEUE_URL, http_proxy)
+ except NoNetwork:
+ return {}
+ except urllib2.HTTPError, x:
+ print >> sys.stderr, "Warning:", x
+ return {}
+ if not page:
+ return {}
+ parser = NewQueueParser(package, arch)
+ for line in page:
+ parser.feed(line)
+ parser.close()
+ try:
+ page.fp._sock.recv = None
+ except:
+ pass
+ page.close()
+
+ #print repr(page)
+
+ versions = {}
+ for dist in dists:
+ if dist in parser.versions:
+ versions[dist] = parser.versions[dist]
+
+ del parser
+ del page
+ #print 'HERE', gc.garbage
+ return versions
+
+def get_incoming_version(package, http_proxy=None, arch='i386'):
+ try:
+ page = open_url(INCOMING_URL, http_proxy)
+ except NoNetwork:
+ return None
+ except urllib2.HTTPError, x:
+ print >> sys.stderr, "Warning:", x
+ return None
+ if not page:
+ return None
+
+ parser = IncomingParser(package, arch)
+ for line in page:
+ parser.feed(line)
+ parser.close()
+ try:
+ page.fp._sock.recv = None
+ except:
+ pass
+ page.close()
+
+ if parser.found:
+ found = parser.found
+ del parser
+ return reduce(later_version, found, '0')
+
+ del page
+ del parser
+ return None
+
+import gc
+def check_available(package, version, dists=None, check_incoming=True,
+ check_newqueue=True,
+ http_proxy=None, arch='i386'):
+ avail = {}
+
+ if check_incoming:
+ iv = get_incoming_version(package, http_proxy, arch)
+ if iv:
+ avail['incoming'] = iv
+ stuff = get_versions_available(package, dists, http_proxy, arch)
+ avail.update(stuff)
+ if check_newqueue:
+ import reportbug
+ srcpackage = reportbug.get_source_name(package)
+ if srcpackage is None:
+ srcpackage = package
+ stuff = get_newqueue_available(srcpackage, dists, http_proxy, arch)
+ avail.update(stuff)
+ #print gc.garbage, stuff
+
+ new = {}
+ newer = 0
+ for dist in avail:
+ if dist == 'incoming':
+ if ':' in version:
+ ver = version.split(':', 1)[1]
+ else:
+ ver = version
+ comparison = compare_versions(ver, avail[dist])
+ else:
+ comparison = compare_versions(version, avail[dist])
+ if comparison > 0:
+ new[dist] = avail[dist]
+ elif comparison < 0:
+ newer += 1
+ too_new = (newer and newer == len(avail))
+ return new, too_new
+
+if __name__=='__main__':
+ import time
+ import gc
+
+ gc.set_debug(gc.DEBUG_LEAK)
+ print get_newqueue_available('reportbug')
+ print gc.garbage
+ print check_available('reportbug', '3.7', arch='s390')
+ #print check_available('openssh-server', '1:4.2p1-8', arch='i386')
+ #print check_available('openssh-server', '1:4.2p1-8', arch='kfreebsd-i386')
+ time.sleep(1000)
+ #print check_available('dpkg', '1.10.2', arch='sparc')
Added: trunk/debianbts.py
===================================================================
--- trunk/debianbts.py (rev 0)
+++ trunk/debianbts.py 2007-04-25 14:18:34 UTC (rev 136)
@@ -0,0 +1,875 @@
+#
+# debianbts.py - Routines to deal with the debbugs web pages
+#
+# Written by Chris Lawrence <law...@de...>
+# (C) 1999-2006 Chris Lawrence
+#
+# This program is freely distributable per the following license:
+#
+## Permission to use, copy, modify, and distribute this software and its
+## documentation for any purpose and without fee is hereby granted,
+## provided that the above copyright notice appears in all copies and that
+## both that copyright notice and this permission notice appear in
+## supporting documentation.
+##
+## I DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL
+## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL I
+## BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
+## DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+## WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
+## ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+## SOFTWARE.
+#
+# Version 3.35; see changelog for revision history
+#
+# $Id: debianbts.py,v 1.24.2.7 2006/10/16 17:14:03 lawrencc Exp $
+
+import sgmllib, glob, os, re, reportbug, rfc822, time, urllib, checkversions
+from urlutils import open_url
+import sys
+
+import mailbox
+import email
+import email.Errors
+import cStringIO
+import cgi
+
+def msgfactory(fp):
+ try:
+ return email.message_from_file(fp)
+ except email.Errors.MessageParseError:
+ # Don't return None since that will
+ # stop the mailbox iterator
+ return ''
+
+class Error(Exception):
+ pass
+
+# Severity levels
+SEVERITIES = {
+ 'critical' : """makes unrelated software on the system (or the
+ whole system) break, or causes serious data loss, or introduces a
+ security hole on systems where you install the package.""",
+ 'grave' : """makes the package in question unusable by most or all users,
+ or causes data loss, or introduces a security hole allowing access
+ to the accounts of users who use the package.""",
+ 'serious' : """is a severe violation of Debian policy (that is,
+ the problem is a violation of a 'must' or 'required' directive);
+ may or may not affect the usability of the package. Note that non-severe
+ policy violations may be 'normal,' 'minor,' or 'wishlist' bugs.
+ (Package maintainers may also designate other bugs as 'serious' and thus
+ release-critical; however, end users should not do so.)""",
+ 'important' : """a bug which has a major effect on the usability
+ of a package, without rendering it completely unusable to
+ everyone.""",
+ 'does-not-build' : """a bug that stops the package from being built
+ from source. (This is a 'virtual severity'.)""",
+ 'normal' : """a bug that does not undermine the usability of the
+ whole package; for example, a problem with a particular option or
+ menu item.""",
+ 'minor' : """things like spelling mistakes and other minor
+ cosmetic errors that do not affect the core functionality of the
+ package.""",
+ 'wishlist' : "suggestions and requests for new features.",
+ }
+
+# justifications for critical bugs
+JUSTIFICATIONS = {
+ 'critical' : (
+ ('breaks unrelated software', """breaks unrelated software on the system
+ (packages that have a dependency relationship are not unrelated)"""),
+ ('breaks the whole system', """renders the entire system unusable (e.g.,
+ unbootable, unable to reach a multiuser runlevel, etc.)"""),
+ ('causes serious data loss', """causes loss of important, irreplaceable
+ data"""),
+ ('root security hole', """introduces a security hole allowing access to
+ root (or another privileged system account), or data normally
+ accessible only by such accounts"""),
+ ('unknown', """not sure, or none of the above"""),
+ ),
+ 'grave' : (
+ ('renders package unusable', """renders the package unusable, or mostly
+ so, on all or nearly all possible systems on which it could be installed
+ (i.e., not a hardware-specific bug); or renders package uninstallable
+ or unremovable without special effort"""),
+ ('causes non-serious data loss', """causes the loss of data on the system
+ that is unimportant, or restorable without resorting to backup media"""),
+ ('user security hole', """introduces a security hole allowing access to
+ user accounts or data not normally accessible"""),
+ ('unknown', """not sure, or none of the above"""),
+ )
+ }
+
+
+# Ordering for justifications
+JUSTORDER = {
+ 'critical' : ['breaks unrelated software',
+ 'breaks the whole system',
+ 'causes serious data loss',
+ 'root security hole',
+ 'unknown'],
+ 'grave' : ['renders package unusable',
+ 'causes non-serious data loss',
+ 'user security hole',
+ 'unknown']
+ }
+
+SEVERITIES_gnats = {
+ 'critical' : 'The product, component or concept is completely'
+ 'non-operational or some essential functionality is missing. No'
+ 'workaround is known.',
+ 'serious' : 'The product, component or concept is not working'
+ 'properly or significant functionality is missing. Problems that'
+ 'would otherwise be considered ''critical'' are rated ''serious'' when'
+ 'a workaround is known.',
+ 'non-critical' : 'The product, component or concept is working'
+ 'in general, but lacks features, has irritating behavior, does'
+ 'something wrong, or doesn''t match its documentation.',
+ }
+
+# Rank order of severities, for sorting
+SEVLIST = ['critical', 'grave', 'serious', 'important', 'does-not-build',
+ 'normal', 'non-critical', 'minor', 'wishlist', 'fixed']
+
+def convert_severity(severity, type='debbugs'):
+ "Convert severity names if needed."
+ if type == 'debbugs':
+ return {'non-critical' : 'normal'}.get(severity, severity)
+ elif type == 'gnats':
+ return {'grave' : 'critical',
+ 'important' : 'serious',
+ 'normal' : 'non-critical',
+ 'minor' : 'non-critical',
+ 'wishlist' : 'non-critical'}.get(severity, severity)
+ else:
+ return severity
+
+# These packages are virtual in Debian; we don't look them up...
+debother = {
+ 'base' : 'General bugs in the base system',
+# Actually a real package, but most people don't have boot-floppies installed for good reason
+# 'boot-floppy' : '(Obsolete, please use boot-floppies instead.)',
+ 'boot-floppies' : 'Bugs in the woody installation subsystem',
+ 'bugs.debian.org' : 'The bug tracking system, @bugs.debian.org',
+ 'cdimage.debian.org' : 'CD Image issues',
+ 'cdrom' : 'Problems with installation from CD-ROMs',
+# dpkg-iwj -- The dpkg branch maintained by Ian Jackson
+ 'debian-policy' : 'Proposed changes in the Debian policy documentation',
+ 'ftp.debian.org' : 'Problems with the FTP site',
+ 'general' : 'General problems (e.g., that many manpages are mode 755)',
+ 'install' : 'Problems with the sarge installer.',
+ 'installation' : 'General installation problems not covered otherwise.',
+# 'kernel' : '(Obsolete, please use "linux-image" instead.)',
+ 'linux-image' : 'Problems with the Linux kernel, or the kernel shipped with Debian',
+ 'listarchives' : 'Problems with the WWW mailing list archives',
+ 'lists.debian.org' : 'The mailing lists, debian-*@lists.debian.org.',
+ 'mirrors' : 'Problems with Debian archive mirrors.',
+ 'nonus.debian.org' : 'Problems with the non-US FTP site',
+ 'press' : 'Press release issues',
+ 'project' : 'Problems related to Project administration',
+ 'qa.debian.org' : 'Problems related to the quality assurance group',
+#slink-cd -- Slink CD
+#spam -- Spam (reassign spam to here so we can complain about it)
+ 'security.debian.org' : 'Problems with the security updates server',
+ 'upgrade-reports' : 'Reports of successful and unsucessful upgrades',
+ 'wnpp' : 'Work-Needing and Prospective Packages list',
+ 'www.debian.org' : 'Problems with the WWW site (including other *.debian.org sites)'
+ }
+
+progenyother = {
+ 'debian-general' : 'Any non-package-specific bug',
+ }
+
+def handle_wnpp(package, bts, ui, fromaddr, online=True, http_proxy=None):
+ desc = body = ''
+ headers = []
+ pseudos = []
+ query = True
+
+ tag = ui.menu('What sort of request is this? (If none of these '
+ 'things mean anything to you, or you are trying to report '
+ 'a bug in an existing package, please press Enter to '
+ 'exit reportbug.)', {
+ 'O' :
+ "The package has been `Orphaned'. It needs a new maintainer as soon as possible.",
+ 'RFA' :
+ "This is a `Request for Adoption'. Due to lack of time, resources, interest or something similar, the current maintainer is asking for someone else to maintain this package. He/she will maintain it in the meantime, but perhaps not in the best possible way. In short: the package needs a new maintainer.",
+ 'RFH' :
+ "This is a `Request For Help'. The current maintainer wants to continue to maintain this package, but he/she needs some help to do this, because his/her time is limited or the package is quite big and needs several maintainers.",
+ 'ITP' :
+ "This is an `Intent To Package'. Please submit a package description along with copyright and URL in such a report.",
+ 'RFP' :
+ "This is a `Request For Package'. You have found an interesting piece of software and would like someone else to maintain it for Debian. Please submit a package description along with copyright and URL in such a report.",
+ }, 'Choose the request type: ', empty_ok=True)
+ if not tag:
+ ui.long_message('To report a bug in a package, use the name of the package, not wnpp.\n')
+ raise SystemExit
+
+ if tag in ('RFP', 'ITP'):
+ prompt = 'Please enter the proposed package name: '
+ else:
+ prompt = 'Please enter the name of the package: '
+ package = ui.get_string(prompt)
+ if not package: return
+
+ ui.ewrite('Checking status database...\n')
+ info = reportbug.get_package_status(package)
+ available = info[1]
+
+ severity = 'normal'
+ if tag in ('ITP', 'RFP'):
+ if available and (not online or checkversions.check_available(
+ package, '0', http_proxy=http_proxy)):
+ if not ui.yes_no(
+ ('A package called %s already appears to exist (at least on '
+ 'your system); continue?' % package),
+ 'Ignore this problem and continue. If you have '
+ 'already locally created a package with this name, this '
+ 'warning message may have been produced in error.',
+ 'Exit without filing a report.', default=0):
+ sys.exit(1)
+
+ severity = 'wishlist'
+
+ desc = ui.get_string(
+ 'Please briefly describe this package; this should be an '
+ 'appropriate short description for the eventual package: ')
+ if not desc:
+ return
+
+ if tag == 'ITP':
+ headers.append('X-Debbugs-CC: deb...@li...')
+ pseudos.append('Owner: %s' % fromaddr)
+ ui.ewrite('Your report will be carbon-copied to debian-devel, '
+ 'per Debian policy.\n')
+
+ body = """* Package name : %s
+ Version : x.y.z
+ Upstream Author : Name <som...@ex...>
+* URL : http://www.example.org/
+* License : (GPL, LGPL, BSD, MIT/X, etc.)
+ Programming Lang: (C, C++, C#, Perl, Python, etc.)
+ Description : %s
+
+(Include the long description here.)
+""" % (package, desc)
+ elif tag in ('O', 'RFA', 'RFH'):
+ severity = 'normal'
+ query = False
+ if not available:
+ info = reportbug.get_source_package(package)
+ if info:
+ info = reportbug.get_package_status(info[0][0])
+
+ if not info:
+ cont = ui.select_options(
+ "This package doesn't appear to exist; continue?",
+ 'yN', {'y': 'Ignore this problem and continue.',
+ 'n': 'Exit without filing a report.' })
+ if cont == 'n':
+ sys.exit(1)
+ desc = fulldesc = ''
+ else:
+ desc = info[11] or ''
+ package = info[12] or package
+ fulldesc = info[13]
+
+ if tag == 'O' and info and info[9] in \
+ ('required', 'important', 'standard'):
+ severity = 'important'
+
+ if tag == 'RFH':
+ headers.append('X-Debbugs-CC: deb...@li...')
+ ui.ewrite('Your request will be carbon-copied to debian-devel, '
+ 'per Debian policy.\n')
+
+ if fulldesc:
+ orphstr = 'intend to orphan'
+ if tag == 'RFA':
+ orphstr = 'request an adopter for'
+ elif tag == 'RFH':
+ orphstr = 'request assistance with maintaining'
+
+ body = ('I %s the %s package.\n\n'
+ 'The package description is:\n') % (orphstr, package)
+ body = body + fulldesc + '\n'
+
+ if desc:
+ subject = '%s: %s -- %s' % (tag, package, desc)
+ else:
+ subject = '%s: %s' % (tag, package)
+
+ return (subject, severity, headers, pseudos, body, query)
+
+# Supported servers
+# Theoretically support for GNATS and Jitterbug could be added here.
+SYSTEMS = { 'debian' :
+ { 'name' : 'Debian', 'email': '%s...@bu...',
+ 'btsroot' : 'http://www.debian.org/Bugs/',
+ 'otherpkgs' : debother,
+ 'nonvirtual' : ['linux-image', 'kernel-image'],
+ 'specials' : { 'wnpp': handle_wnpp },
+ # Dependency packages
+ 'deppkgs' : ('gcc', 'g++', 'cpp', 'gcj', 'gpc', 'gobjc',
+ 'chill', 'gij', 'g77', 'python', 'python-base',
+ 'x-window-system-core', 'x-window-system'),
+ 'cgiroot' : 'http://bugs.debian.org/cgi-bin/' },
+ 'kde' :
+ { 'name' : 'KDE Project', 'email': '%s...@bu...',
+ 'btsroot': 'http://bugs.kde.org/' },
+ 'mandrake' :
+ { 'name' : 'Linux-Mandrake', 'email': '%s...@bu...',
+ 'type' : 'mailto', 'query-dpkg' : False },
+ 'gnome' :
+ { 'name' : 'GNOME Project', 'email': '%s...@bu...',
+ 'type' : 'mailto', 'query-dpkg' : False },
+ 'ximian' :
+ { 'name' : 'Ximian', 'email': '%s...@bu...',
+ 'type' : 'mailto' },
+ 'progeny' :
+ { 'name' : 'Progeny', 'email' : 'bu...@pr...',
+ 'type' : 'gnats', 'otherpkgs' : progenyother },
+ 'ubuntu' :
+ { 'name' : 'Ubuntu', 'email' : 'ubu...@li...',
+ 'type' : 'mailto' },
+ 'guug' :
+ { 'name' : 'GUUG (German Unix User Group)',
+ 'email' : '%s...@bu...', 'query-dpkg' : False },
+ 'grml' :
+ { 'name' : 'grml', 'email': '%s...@bu...',
+ 'btsroot' : 'http://bugs.grml.org/',
+ 'cgiroot' : 'http://bugs.grml.org/cgi-bin/' },
+ }
+
+SYSTEMS['helixcode'] = SYSTEMS['ximian']
+
+CLASSES = {
+ 'sw-bug' : 'The problem is a bug in the software or code. For'
+ 'example, a crash would be a sw-bug.',
+ 'doc-bug' : 'The problem is in the documentation. For example,'
+ 'an error in a man page would be a doc-bug.',
+ 'change-request' : 'You are requesting a new feature or a change'
+ 'in the behavior of software, or are making a suggestion. For'
+ 'example, if you wanted reportbug to be able to get your local'
+ 'weather forecast, as well as report bugs, that would be a'
+ 'change-request.',
+ }
+
+CLASSLIST = ['sw-bug', 'doc-bug', 'change-request']
+
+CRITICAL_TAGS = {
+ 'security' : 'This problem is a security vulnerability in Debian.',
+}
+
+TAGS = {
+ 'patch' : 'You are including a patch to fix this problem.',
+## 'upstream' : 'You believe this problem is not specific to Debian.',
+## 'potato' : 'This bug only applies to the potato release (Debian 2.2).',
+## 'woody' : 'This bug only applies to the woody release (Debian 3.0).',
+## 'sarge' : 'This bug only applies to the sarge release (Debian 3.1).',
+## 'sid' : 'This bug only applies to the unstable branch of Debian.',
+ "l10n" : "This bug reports a localization/internationalization issue.",
+## 'done' : 'No more tags.',
+ }
+
+EXTRA_TAGS = ['potato', 'woody', 'sarge', 'security', 'sid', 'upstream']
+
+TAGLIST = ['l10n', 'patch']
+CRITICAL_TAGLIST = ['security']
+
+def yn_bool(setting):
+ if setting:
+ if str(setting) == 'no':
+ return 'no'
+ return 'yes'
+ else:
+ return 'no'
+
+def cgi_report_url(system, number, archived=False, mbox=False):
+ root = SYSTEMS[system].get('cgiroot')
+ if root:
+ return '%sbugreport.cgi?bug=%d&archived=%s&mbox=%s' % (
+ root, number, archived, yn_bool(mbox))
+ return None
+
+def cgi_package_url(system, package, archived=False, source=False,
+ repeatmerged=True, version=None):
+ root = SYSTEMS[system].get('cgiroot')
+ if not root: return None
+
+ #package = urllib.quote_plus(package.lower())
+ if source:
+ query = {'src' : package.lower()}
+ else:
+ query = {'pkg' : package.lower()}
+
+ query['repeatmerged'] = yn_bool(repeatmerged)
+ query['archived'] = yn_bool(archived)
+
+ if version:
+ query['version'] = str(version)
+
+ qstr = urllib.urlencode(query)
+ #print qstr
+ return '%spkgreport.cgi?%s' % (root, qstr)
+
+def package_url(system, package, mirrors=None, source=False,
+ repeatmerged=True):
+ btsroot=get_btsroot(system, mirrors)
+ package = urllib.quote_plus(package.lower())
+ return btsroot+('db/pa/l%s.html' % package)
+
+def report_url(system, number, mirrors=None):
+ number = str(number)
+ if len(number) < 2: return None
+ btsroot=get_btsroot(system, mirrors)
+ return btsroot+('db/%s/%s.html' % (number[:2], number))
+
+def get_package_url(system, package, mirrors=None, source=False,
+ archived=False, repeatmerged=True):
+ return (cgi_package_url(system, package, archived, source, repeatmerged) or
+ package_url(system, package, mirrors, source, repeatmerged))
+
+def get_report_url(system, number, mirrors=None, archived=False, mbox=False):
+ return (cgi_report_url(system, number, archived, mbox) or
+ report_url(system, number, mirrors))
+
+def parse_bts_url(url):
+ bits = url.split(':', 1)
+ if len(bits) != 2: return None
+
+ type, loc = bits
+ if loc.startswith('//'): loc = loc[2:]
+ while loc.endswith('/'): loc = loc[:-1]
+ return type, loc
+
+# Dynamically add any additional systems found
+for origin in glob.glob('/etc/dpkg/origins/*'):
+ try:
+ fp = file(origin)
+ system = os.path.basename(origin)
+ SYSTEMS[system] = SYSTEMS.get(system, { 'otherpkgs' : {},
+ 'query-dpkg' : True,
+ 'mirrors' : {},
+ 'cgiroot' : None } )
+ for line in fp:
+ try:
+ (header, content) = line.split(': ', 1)
+ header = header.lower()
+ content = content.strip()
+ if header == 'vendor':
+ SYSTEMS[system]['name'] = content
+ elif header == 'bugs':
+ (type, root) = parse_bts_url(content)
+ SYSTEMS[system]['type'] = type
+ if type == 'debbugs':
+ SYSTEMS[system]['btsroot'] = 'http://'+root+'/'
+ SYSTEMS[system]['email'] = '%s@'+root
+ elif type == 'mailto':
+ SYSTEMS[system]['btsroot'] = None
+ SYSTEMS[system]['email'] = root
+ else:
+ # We don't know what to do...
+ pass
+ except ValueError:
+ pass
+ fp.close()
+ except IOError:
+ pass
+
+# For summary pages, we want to keep:
+#
+# - Contents of <title>...</title>
+# - Contents of <h2>...</h2>
+# - Contents of each <li>
+#
+# For individual bugs, we want to keep:
+# - Contents of <title>...</title>
+# - Contents of every <pre>...</pre> after a <h2>....</h2> tag.
+
+class BTSParser(sgmllib.SGMLParser):
+ def __init__(self, mode='summary', cgi=False, followups=False):
+ sgmllib.SGMLParser.__init__(self)
+ self.hierarchy = []
+ self.lidata = None
+ self.lidatalist = None
+ self.savedata = None
+ self.title = None
+ self.bugcount = 0
+ self.mode = mode
+ self.cgi = cgi
+ self.followups = followups
+ self.inbuglist = self.intrailerinfo = False
+ self.bugtitle = None
+ if followups:
+ self.preblock = []
+ else:
+ self.preblock = ''
+ self.endh2 = False
+
+ # --- Formatter interface, taking care of 'savedata' mode;
+ # shouldn't need to be overridden
+
+ def handle_data(self, data):
+ if self.savedata is not None:
+ self.savedata += data
+
+ # --- Hooks to save data; shouldn't need to be overridden
+
+ def save_bgn(self):
+ self.savedata = ''
+
+ def save_end(self, mode=False):
+ data = self.savedata
+ if not mode and data:
+ data = ' '.join(data.split())
+ self.savedata = None
+ return data
+
+ def start_h1(self, attrs):
+ self.save_bgn()
+ self.oldmode = self.mode
+ self.mode = 'title'
+
+ def end_h1(self):
+ self.title = self.save_end()
+ self.mode = self.oldmode
+
+ def start_h2(self, attrs):
+ if self.lidata: self.check_li()
+
+ self.save_bgn()
+
+ def end_h2(self):
+ if self.mode == 'summary':
+ hiertitle = self.save_end()
+ if 'bug' in hiertitle:
+ self.hierarchy.append( (hiertitle, []) )
+ self.endh2 = True # We are at the end of a title, flag <pre>
+
+ def start_ul(self, attrs):
+ if self.mode == 'summary':
+ for k, v in attrs:
+ if k == 'class' and v == 'bugs':
+ self.inbuglist = True
+
+ def end_ul(self):
+ if self.inbuglist:
+ self.check_li()
+
+ self.inbuglist = False
+
+ def do_br(self, attrs):
+ if self.mode == 'title':
+ self.savedata = ""
+ elif self.mode == 'summary' and self.inbuglist and not self.intrailerinfo:
+ self.bugtitle = self.save_end()
+ self.intrailerinfo = True
+ self.save_bgn()
+
+ def check_li(self):
+ if self.mode == 'summary':
+ if not self.intrailerinfo:
+ self.bugtitle = self.save_end()
+ trailinfo = ''
+ else:
+ trailinfo = self.save_end()
+
+ match = re.search(r'fixed:\s+([\w.+~-]+(\s+[\w.+~:-]+)?)', trailinfo)
+ if match:
+ title = self.bugtitle
+ bits = re.split(r':\s+', title, 1)
+ if len(bits) > 1:
+ buginfo = '%s [FIXED %s]: %s' % (
+ bits[0], match.group(1), bits[1])
+ else:
+ if title.endswith(':'):
+ title = title[:-1]
+
+ buginfo = '%s [FIXED %s]' % (title, match.group(1))
+ else:
+ buginfo = self.bugtitle
+
+ self.lidatalist.append(buginfo)
+ self.bugcount += 1
+
+ self.lidata = self.intrailerinfo = False
+
+ def do_li(self, attrs):
+ if self.mode == 'summary' and self.inbuglist:
+ if self.lidata: self.check_li()
+
+ self.lidata = True
+ if self.hierarchy:
+ self.lidatalist = self.hierarchy[-1][1]
+ else:
+ self.lidatalist = []
+ self.save_bgn()
+
+ def start_pre(self, attrs):
+ "Save <pre> when we follow a </h2>"
+ if self.followups:
+ if not self.endh2: return
+ else:
+ if self.cgi and self.preblock: return
+
+ self.save_bgn()
+
+ def end_pre(self):
+ if self.followups:
+ if not self.endh2: return
+ self.endh2 = False # Done with a report, reset </h2>.
+ stuff = self.save_end(1)
+ if not self.cgi:
+ self.preblock.insert(0, stuff)
+ else:
+ self.preblock.append(stuff)
+ elif not (self.preblock and self.cgi):
+ self.preblock = self.save_end(1)
+
+ def reorganize(self):
+ if not self.hierarchy:
+ return
+
+ newhierarchy = []
+ fixed = []
+ fixedfinder = re.compile(r'\[FIXED ([^\]]+)\]')
+ resolvedfinder = re.compile(r'Resolved')
+
+ for (title, buglist) in self.hierarchy:
+ if 'Resolved' in title:
+ newhierarchy.append( (title, buglist) )
+ continue
+
+ bugs = []
+ for bug in buglist:
+ if fixedfinder.search(bug):
+ fixed.append(bug)
+ else:
+ bugs.append(bug)
+
+ if bugs:
+ title = ' '.join(title.split()[:-2])
+ if len(bugs) != 1:
+ title += ' (%d bugs)' % len(bugs)
+ else:
+ title += ' (1 bug)'
+
+ newhierarchy.append( (title, bugs) )
+
+ if fixed:
+ self.hierarchy = [('Bugs fixed in subsequent releases (%d bugs)' % len(fixed), fixed)] + newhierarchy
+
+def parse_html_report(number, url, http_proxy, followups=False, cgi=True):
+ page = open_url(url, http_proxy)
+ if not page:
+ return None
+
+ parser = BTSParser(cgi=cgi, followups=followups)
+ for line in page:
+ parser.feed(line)
+ parser.close()
+
+ try:
+ page.fp._sock.recv = None
+ except:
+ pass
+ page.close()
+
+ items = parser.preblock
+ title = "#%d: %s" % (number, parser.title)
+
+ if not followups:
+ items = [items]
+
+ output = []
+ for stuff in items:
+ parts = stuff.split('\n\n')
+ match = re.search('^Date: (.*)$', parts[0], re.M | re.I)
+ date_submitted = ''
+ if match:
+ date_submitted = 'Date: %s\n' % match.group(1)
+
+ stuff = ('\n\n'.join(parts[1:])).rstrip()
+ if not stuff:
+ continue
+
+ item = date_submitted+stuff+os.linesep
+ output.append(item)
+
+ if not output:
+ return None
+
+ return (title, output)
+
+# XXX: Need to handle charsets properly
+def parse_mbox_report(number, url, http_proxy, followups=False):
+ page = open_url(url, http_proxy)
+ if not page:
+ return None
+
+ # Make this seekable
+ wholefile = cStringIO.StringIO(page.read())
+
+ try:
+ page.fp._sock.recv = None
+ except:
+ pass
+ page.close()
+
+ mbox = mailbox.UnixMailbox(wholefile, msgfactory)
+ title = ''
+
+ output = []
+ for message in mbox:
+ if not message:
+ pass
+
+ subject = message.get('Subject')
+ if not title:
+ title = subject
+
+ date = message.get('Date')
+ fromhdr = message.get('From')
+
+ body = entry = ''
+ for part in message.walk():
+ if part.get_content_type() == 'text/plain' and not body:
+ body = part.get_payload(None, True)
+
+ if fromhdr:
+ entry += 'From: %s%s' % (fromhdr, os.linesep)
+
+ if subject and subject != title:
+ entry += 'Subject: %s%s' % (subject, os.linesep)
+
+ if date:
+ entry += 'Date: %s%s' % (date, os.linesep)
+
+ if entry:
+ entry += os.linesep
+
+ entry += body.rstrip('\n') + os.linesep
+
+ output.append(entry)
+
+ if not output:
+ return None
+
+ title = "#%d: %s" % (number, title)
+ return (title, output)
+
+def get_cgi_reports(package, system='debian', http_proxy='', archived=False,
+ source=False, version=None):
+ page = open_url(cgi_package_url(system, package, archived, source,
+ version=version), http_proxy)
+ if not page:
+ return (0, None, None)
+
+ #content = page.read()
+ #if 'Maintainer' not in content:
+ # return (0, None, None)
+
+ parser = BTSParser(cgi=True)
+ for line in page:
+ parser.feed(line)
+ parser.close()
+ try:
+ page.fp._sock.recv = None
+ except:
+ pass
+ page.close()
+
+ # Reorganize hierarchy to put recently-fixed bugs at top
+ parser.reorganize()
+
+ data = (parser.bugcount, parser.title, parser.hierarchy)
+ del parser
+
+ return data
+
+def get_cgi_report(number, system='debian', http_proxy='', archived=False,
+ followups=False):
+ number = int(number)
+
+ url = cgi_report_url(system, number, archived='no', mbox=True)
+ return parse_mbox_report(number, url, http_proxy, followups)
+ #return parse_html_report(number, url, http_proxy, followups, cgi=True)
+
+def get_btsroot(system, mirrors=None):
+ if mirrors:
+ alternates = SYSTEMS[system].get('mirrors')
+ for mirror in mirrors:
+ if alternates.has_key(mirror):
+ return alternates[mirror]
+ return SYSTEMS[system].get('btsroot', '')
+
+def get_reports(package, system='debian', mirrors=None, version=None,
+ http_proxy='', archived=False, source=False):
+ if isinstance(package, basestring):
+ if SYSTEMS[system].get('cgiroot'):
+ result = get_cgi_reports(package, system, http_proxy, archived,
+ source, version=version)
+ if result: return result
+
+ url = package_url(system, package, mirrors, source)
+ page = open_url(url, http_proxy)
+ if not page:
+ return (0, None, None)
+
+ #content = page.read()
+ #if 'Maintainer' not in content:
+ # return (0, None, None)
+
+ parser = BTSParser()
+ for line in page:
+ parser.feed(line)
+ parser.close()
+ try:
+ page.fp._sock.recv = None
+ except:
+ pass
+ page.close()
+
+ return parser.bugcount, parser.title, parser.hierarchy
+
+ # A list of bug numbers
+ this_hierarchy = []
+ package = [int(x) for x in package]
+ package.sort()
+ for bug in package:
+ result = get_report(bug, system, mirrors, http_proxy, archived)
+ if result:
+ title, body = result
+ this_hierarchy.append(title)
+ #print title
+
+ title = "Multiple bug reports"
+ bugcount = len(this_hierarchy)
+ hierarchy = [('Reports', this_hierarchy)]
+
+ return bugcount, title, hierarchy
+
+def get_report(number, system='debian', mirrors=None,
+ http_proxy='', archived=False, followups=False):
+ number = int(number)
+ if SYSTEMS[system].get('cgiroot'):
+ result = get_cgi_report(number, system, http_proxy, archived,
+ followups)
+ if result: return result
+
+ url = report_url(system, number, mirrors)
+ if not url: return None
+
+ return parse_html_report(number, url, http_proxy, followups, cgi=False)
+
+class NullParser(sgmllib.SGMLParser):
+ def __init__(self):
+ sgmllib.SGMLParser.__init__(self)
+
+if __name__ == '__main__':
+ import pprint
+
+ data = get_cgi_reports('reportbug')
+ pprint.pprint(data)
+ time.sleep(1000)
Added: trunk/fetch_bugs.py
===================================================================
--- trunk/fetch_bugs.py (rev 0)
+++ trunk/fetch_bugs.py 2007-04-25 14:18:34 UTC (rev 136)
@@ -0,0 +1,28 @@
+import os
+import sys
+import string
+
+sys.path.append('/usr/share/reportbug')
+import debianbts
+
+package = "sl"
+bug_dict = {}
+bug_list = []
+file = "/tmp/bug_report.txt"
+file_handle = open(file, 'w')
+
+(num_of_bugs, header, bugs_list) = debianbts.get_reports(package)
+
+if num_of_bugs:
+ for x in bugs_list:
+ (sub_bugs_header, sub_bugs_list) = x
+ for x in sub_bugs_list:
+ break_bugs = x.split(':')
+ bug_num = string.lstrip(break_bugs[0], '#')
+ data = debianbts.get_report(bug_num, followups=True)
+ file_handle.write(data[0] + "\n\n")
+ for x in data[1]:
+ file_handle.write(x)
+ file_handle.write("\n")
+ file_handle.write("\n\n\n")
+ file_handle.flush()
\ No newline at end of file
Added: trunk/reportbug.py
===================================================================
--- trunk/reportbug.py (rev 0)
+++ trunk/reportbug.py 2007-04-25 14:18:34 UTC (rev 136)
@@ -0,0 +1,1026 @@
+#
+# Reportbug module - common functions for reportbug and greportbug
+# Written by Chris Lawrence <law...@de...>
+# Copyright (C) 1999-2006 Chris Lawrence
+#
+# This program is freely distributable per the following license:
+#
+## Permission to use, copy, modify, and distribute this software and its
+## documentation for any purpose and without fee is hereby granted,
+## provided that the above copyright notice appears in all copies and that
+## both that copyright notice and this permission notice appear in
+## supporting documentation.
+##
+## I DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL
+## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL I
+## BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
+## DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+## WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
+## ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+## SOFTWARE.
+#
+# Version 3.35; see changelog for revision history
+#
+# $Id: reportbug.py,v 1.35.2.18 2007/04/08 03:22:21 lawrencc Exp $
+
+VERSION = "reportbug 3.35"
+VERSION_NUMBER = "3.35"
+COPYRIGHT = VERSION + '\nCopyright (C) 1999-2006 Chris Lawrence <law...@de...>'
+
+import time, sys, os, locale, re, pwd, commands, shlex, debianbts, rfc822
+import socket
+import pprint
+import subprocess
+
+from string import ascii_letters, digits
+
+# Paths for dpkg
+DPKGLIB = '/var/lib/dpkg'
+AVAILDB = os.path.join(DPKGLIB, 'available')
+STATUSDB = os.path.join(DPKGLIB, 'status')
+
+# Headers other than these become email headers for debbugs servers
+PSEUDOHEADERS = ('Package', 'Version', 'Severity', 'File', 'Tags',
+ 'Justification', 'Followup-For', 'Owner')
+
+VALID_UIS = ['newt', 'text', 'gnome2', 'urwid']
+AVAILABLE_UIS = []
+for ui in VALID_UIS:
+ pkgname = 'reportbug_ui_%s.py' % ui
+ for d in sys.path:
+ if os.path.exists(os.path.join(d, pkgname)):
+ AVAILABLE_UIS.append(ui)
+ break
+
+UIS = {'text': 'A text-oriented console interface',
+ 'urwid': 'A menu-based console interface',
+ 'gnome2': 'A graphical (Gnome 2) interface'}
+
+MODES = {'novice': 'Offer simple prompts, bypassing technical questions.',
+ 'standard': 'Offer more extensive prompts, including asking about '
+ 'things that a moderately sophisticated user would be expected to '
+ 'know about Debian.',
+ 'advanced' : 'Like standard, but assumes you know a bit more about '
+ 'Debian, including "incoming".',
+ 'expert': 'Bypass most handholding measures and preliminary triage '
+ 'routines. This mode should not be used by people unfamiliar with '
+ 'Debian\'s policies and operating procedures.'}
+MODELIST = ['novice', 'standard', 'advanced', 'expert']
+for mode in MODELIST:
+ exec 'MODE_%s=%d' % (mode.upper(), MODELIST.index(mode))
+del mode
+
+NEWBIELINE = '*** Please type your report below this line ***'
+
+fhs_directories = ['/', '/usr', '/usr/share', '/var', '/usr/X11R6',
+ '/usr/man', '/usr/doc', '/usr/bin']
+
+def realpath(filename):
+ filename = os.path.abspath(filename)
+
+ bits = filename.split('/')
+ for i in range(2, len(bits)+1):
+ component = '/'.join(bits[0:i])
+ if component in fhs_directories:
+ continue
+
+ if os.path.islink(component):
+ resolved = os.readlink(component)
+ (dir, file) = os.path.split(component)
+ resolved = os.path.normpath(os.path.join(dir, resolved))
+ newpath = apply(os.path.join, [resolved] + bits[i:])
+ return realpath(newpath)
+
+ return filename
+
+pathdirs = ['/usr/sbin', '/usr/bin', '/sbin', '/bin', '/usr/X11R6/bin',
+ '/usr/games']
+
+def search_path_for(filename):
+ d, f = os.path.split(filename)
+ if d: return realpath(filename)
+
+ path = os.environ.get("PATH", os.defpath).split('/')
+ for d in pathdirs:
+ if not d in path:
+ path.append(d)
+
+ for d in path:
+ fullname = os.path.join(d, f)
+ if os.path.exists(fullname):
+ return realpath(fullname)
+ return None
+
+def glob_escape(filename):
+ filename = re.sub(r'([*?\[\]])', r'\\\1', filename)
+ return filename
+
+def search_pipe(searchfile, use_dlocate=True):
+ arg = commands.mkarg(searchfile)
+ if use_dlocate and os.path.exists('/usr/bin/dlocate'):
+ pipe = os.popen('COLUMNS=79 dlocate -S %s 2>/dev/null' % arg)
+ else:
+ use_dlocate = False
+ pipe = os.popen('COLUMNS=79 dpkg --search %s 2>/dev/null' % arg)
+ return (pipe, use_dlocate)
+
+def query_dpkg_for(filename, use_dlocate=True):
+ try:
+ x = os.getcwd()
+ except OSError:
+ os.chdir('/')
+ searchfilename = glob_escape(filename)
+ (pipe, dlocate_used) = search_pipe(searchfilename)
+ packages = {}
+
+ for line in pipe:
+ line = line.strip()
+ # Ignore diversions
+ if 'diversion by' in line: continue
+
+ (package, path) = line.split(':', 1)
+ path = path.strip()
+ packlist = package.split(', ')
+ for package in packlist:
+ if packages.has_key(package):
+ packages[package].append(path)
+ else:
+ packages[package] = [path]
+ pipe.close()
+ # Try again without dlocate if no packages found
+ if not packages and dlocate_used:
+ return query_dpkg_for(filename, use_dlocate=False)
+
+ return filename, packages
+
+def find_package_for(filename, pathonly=False):
+ """Find the package(s) containing this file."""
+ packages = {}
+ if filename[0] == '/':
+ fn, pkglist = query_dpkg_for(filename)
+ if pkglist: return fn, pkglist
+
+ newfilename = search_path_for(filename)
+ if pathonly and not newfilename:
+ return (filename, None)
+ return query_dpkg_for(newfilename or filename)
+
+def find_rewritten(username):
+ for filename in ['/etc/email-addresses']:
+ if os.path.exists(filename):
+ try:
+ fp = file(filename)
+ except IOError:
+ continue
+ for line in fp:
+ line = line.strip().split('#')[0]
+ if not line:
+ continue
+ try:
+ name, alias = line.split(':')
+ if name.strip() == username:
+ return alias.strip()
+ except ValueError:
+ print 'Invalid entry in %s' % filename
+ return None
+
+def get_email_addr(addr):
+ addr = rfc822.AddressList(addr)
+ return addr.addresslist[0]
+
+def get_email(email='', realname=''):
+ return get_email_addr(get_user_id(email, realname))
+
+def get_user_id(email='', realname='', charset='utf-8'):
+ uid = os.getuid()
+ info = pwd.getpwuid(uid)
+ email = (os.environ.get('REPORTBUGEMAIL', email) or
+ os.environ.get('DEBEMAIL') or os.environ.get('EMAIL'))
+
+ email = email or find_rewritten(info[0]) or info[0]
+
+ if '@' not in email:
+ if os.path.exists('/etc/mailname'):
+ domainname = file('/etc/mailname').readline().strip()
+ else:
+ domainname = socket.getfqdn()
+
+ email = email+'@'+domainname
+
+ # Handle EMAIL if it's formatted as 'Bob <bob@host>'.
+ if '<' in email or '(' in email:
+ realname, email = get_email_addr(email)
+
+ if not realname:
+ realname = (os.environ.get('DEBFULLNAME') or os.environ.get('DEBNAME')
+ or os.environ.get('NAME'))
+ if not realname:
+ realname = info[4].split(',', 1)[0]
+ # Convert & in gecos field 4 to capitalized logname: #224231
+ realname = realname.replace('&', info[0].upper())
+
+ if not realname:
+ return email
+
+ # Decode the realname from the charset -
+ # but only if it is not already in Unicode
+ if isinstance(realname, str):
+ realname = realname.decode(charset, 'replace')
+
+ if re.match(r'[\w\s]+$', realname):
+ return '%s <%s>' % (realname, email)
+
+ return rfc822.dump_address_pair( (realname, email) )
+
+statuscache = {}
+def get_package_status(package, avail=False):
+ if not avail and package in statuscache:
+ return statuscache[package]
+
+ versionre = re.compile('Version: ')
+ packagere = re.compile('Package: ')
+ priorityre = re.compile('Priority: ')
+ dependsre = re.compile('(Pre-)?Depends: ')
+ recsre = re.compile('Recommends: ')
+ conffilesre = re.compile('Conffiles: ')
+ maintre = re.compile('Maintainer: ')
+ statusre = re.compile('Status: ')
+ originre = re.compile('Origin: ')
+ bugsre = re.compile('Bugs: ')
+ descre = re.compile('Description: ')
+ fullre = re.compile(' ')
+ srcre = re.compile('Source: ')
+
+ pkgversion = pkgavail = maintainer = status = origin = None
+ bugs = vendor = priority = desc = src_name = None
+ conffiles = []
+ fulldesc = []
+ depends = []
+ recommends = []
+ confmode = False
+ state = ''
+
+ try:
+ x = os.getcwd()
+ except OSError:
+ os.chdir('/')
+
+ packarg = commands.mkarg(package)
+ if avail:
+ output = commands.getoutput(
+ "COLUMNS=79 dpkg --print-avail %s 2>/dev/null" % packarg)
+ else:
+ output = commands.getoutput(
+ "COLUMNS=79 dpkg --status %s 2>/dev/null" % packarg)
+
+ # dpkg output is in UTF-8 format
+ output = output.decode('utf-8', 'replace')
+
+ for line in output.split(os.linesep):
+ line = line.rstrip()
+ if not line: continue
+
+ if confmode:
+ if line[0] != '/':
+ confmode = False
+ else:
+ conffiles = conffiles + (line.split(),)
+
+ if versionre.match(line):
+ (crud, pkgversion) = line.split(": ", 1)
+ elif statusre.match(line):
+ (crud, status) = line.split(": ", 1)
+ elif priorityre.match(line):
+ (crud, priority) = line.split(": ", 1)
+ elif packagere.match(line):
+ (crud, pkgavail) = line.split(": ", 1)
+ elif originre.match(line):
+ (crud, origin) = line.split(": ", 1)
+ elif bugsre.match(line):
+ (crud, bugs) = line.split(": ", 1)
+ elif descre.match(line):
+ (crud, desc) = line.split(": ", 1)
+ elif dependsre.match(line):
+ (crud, thisdepends) = line.split(": ", 1)
+ # Remove versioning crud
+ thisdepends = [[y.split()[0] for y in x.split('|')]
+ for x in (thisdepends.split(', '))]
+ depends.extend(thisdepends)
+ elif recsre.match(line):
+ (crud, thisdepends) = line.split(": ", 1)
+ # Remove versioning crud
+ thisdepends = [[y.split()[0] for y in x.split('|')]
+ for x in (thisdepends.split(', '))]
+ recommends.extend(thisdepends)
+ elif conffilesre.match(line):
+ confmode = True
+ elif maintre.match(line):
+ crud, maintainer = line.split(": ", 1)
+ elif srcre.match(line):
+ crud, src_name = line.split(": ", 1)
+ src_name = src_name.split()[0]
+ elif desc and line[0]==' ':
+ fulldesc.append(line)
+
+ installed = False
+ if status:
+ state = status.split()[2]
+ installed = (state not in ('config-files', 'not-installed'))
+
+ reportinfo = None
+ if bugs:
+ reportinfo = debianbts.parse_bts_url(bugs)
+ elif origin:
+ if debianbts.SYSTEMS.has_key(origin):
+ vendor = debianbts.SYSTEMS[origin]['name']
+ reportinfo = (debianbts.SYSTEMS[origin].get('type', 'debbugs'),
+ debianbts.SYSTEMS[origin]['btsroot'])
+ else:
+ vendor = origin.capitalize()
+ else:
+ vendor = ''
+
+ info = (pkgversion, pkgavail, tuple(depends), tuple(recommends),
+ tuple(conffiles),
+ maintainer, installed, origin, vendor, reportinfo, priority,
+ desc, src_name, os.linesep.join(fulldesc), state)
+
+ if not avail:
+ statuscache[package] = info
+ return info
+
+#dbase = []
+#avail = []
+
+# Object that essentially chunkifies the output of apt-cache dumpavail
+class AvailDB(object):
+ def __init__(self, fp=None, popenob=None):
+ self.popenob = popenob
+ if fp:
+ self.fp = fp
+ elif popenob:
+ self.fp = popenob.stdout
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ chunk = u''
+ while True:
+ if self.popenob:
+ if self.popenob.returncode:
+ break
+
+ line = self.fp.readline()
+ if not line:
+ break
+
+ if line == '\n':
+ return chunk
+ chunk += line.decode('utf-8', 'replace')
+
+ if chunk:
+ return chunk
+
+ raise StopIteration
+
+ def __del__(self):
+ #print >> sys.stderr, 'availdb cleanup', repr(self.popenob), repr(self.fp)
+ if self.fp:
+ self.fp.close()
+ if self.popenob:
+ try:
+ self.popenob.wait()
+ except:
+ pass
+
+def get_dpkg_database():
+ if os.path.exists(STATUSDB):
+ fp = open(STATUSDB)
+ if fp:
+ return AvailDB(fp=fp)
+
+ print >> sys.stderr, 'Unable to open', STATUSDB
+ sys.exit(1)
+
+def get_avail_database():
+ #print >> sys.stderr, 'Searching available database'
+ subp = subprocess.Popen(('apt-cache', 'dumpavail'), stdout=subprocess.PIPE)
+ return AvailDB(popenob=subp)
+
+def get_source_package(package):
+ """Return any binary packages provided by a source package."""
+ packinfo = get_avail_database()
+ packages = []
+ packob = re.compile(r'^Package: (?P<pkg>.*)$', re.MULTILINE)
+ descob = re.compile(r'^Description: (?P<desc>.*)$', re.MULTILINE)
+ hassource = re.compile(r'^Source: .*$', re.MULTILINE)
+ if package == 'libc':
+ searchob1 = re.compile(r'^Source: g?libc[\d.]*$', re.MULTILINE)
+ searchob2 = re.compile(r'^Package: g?libc[\d.]*$', re.MULTILINE)
+ else:
+ searchob1 = re.compile(r'^Source: '+re.escape(package)+r'$',
+ re.MULTILINE)
+ searchob2 = re.compile(r'^Package: '+re.escape(package)+r'$',
+ re.MULTILINE)
+
+ for p in packinfo:
+ match = searchob1.search(p)
+ if match:
+ packname = packdesc = ''
+ namematch, descmatch = packob.search(p), descob.search(p)
+
+ if namematch:
+ packname = namematch.group('pkg')
+ if descmatch:
+ packdesc = descmatch.group('desc')
+
+ if packname:
+ packages.append( (packname, packdesc) )
+ elif hassource.search(p):
+ continue
+
+ match = searchob2.search(p)
+ if match:
+ packname = packdesc = ''
+ namematch, descmatch = packob.search(p), descob.search(p)
+
+ if namematch:
+ packname = namematch.group('pkg')
+ if descmatch:
+ packdesc = descmatch.group('desc')
+
+ if packname:
+ packages.append( (packname, packdesc) )
+
+
+ packages.sort()
+ return packages
+
+def get_source_name(package):
+ """Return source package name for given package or None."""
+ packinfo = get_avail_database()
+ has_source = re.compile(r'^Source: %s$' % re.escap...
[truncated message content] |