From: Arun Babu Neelicattu abn@redhat.com
--- requirements.txt | 1 + test-requirements.txt | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 requirements.txt create mode 100644 test-requirements.txt
diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..d4ba5cd --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +pycurl diff --git a/test-requirements.txt b/test-requirements.txt new file mode 100644 index 0000000..f45fca2 --- /dev/null +++ b/test-requirements.txt @@ -0,0 +1,3 @@ +# additional packages needed for testing +coverage +pep8
From: Arun Babu Neelicattu abn@redhat.com
--- bin/bugzilla | 2 +- bugzilla/base.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/bin/bugzilla b/bin/bugzilla index 5798db9..cacaf9e 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -63,7 +63,7 @@ def open_without_clobber(name, *args): orig_name = name while fd is None: try: - fd = os.open(name, os.O_CREAT | os.O_EXCL, 0666) + fd = os.open(name, os.O_CREAT | os.O_EXCL, 0o666) except OSError: err = sys.exc_info()[1] if err.errno == os.errno.EEXIST: diff --git a/bugzilla/base.py b/bugzilla/base.py index ee384fb..9740d3c 100644 --- a/bugzilla/base.py +++ b/bugzilla/base.py @@ -76,7 +76,7 @@ def _build_cookiejar(cookiefile): if not os.path.exists(cookiefile): # Make sure a new file has correct permissions open(cookiefile, 'a').close() - os.chmod(cookiefile, 0600) + os.chmod(cookiefile, 0o600) cj.save() return cj
From: Arun Babu Neelicattu abn@redhat.com
- reduce is no longer a built-in function --- bin/bugzilla | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/bin/bugzilla b/bin/bugzilla index cacaf9e..5b32030 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -845,8 +845,7 @@ def _do_new(bz, opt):
def _do_modify(bz, parser, opt, args): - bugid_list = reduce(lambda l1, l2: l1 + l2, - [a.split(",") for a in args]) + bugid_list = [bugid for a in args for bugid in a.split(',')]
add_wb, rm_wb, set_wb = _parse_triset(opt.whiteboard) add_devwb, rm_devwb, set_devwb = _parse_triset(opt.devel_whiteboard)
From: Arun Babu Neelicattu abn@redhat.com
- sorted() no longer accepts cmp argument - default sorting works for python-bugzilla use case --- bugzilla/base.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/bugzilla/base.py b/bugzilla/base.py index 9740d3c..1756908 100644 --- a/bugzilla/base.py +++ b/bugzilla/base.py @@ -444,8 +444,7 @@ class BugzillaBase(object): section = "" # Substring match - prefer the longest match found log.debug("Searching for config section matching %s", self.url) - for s in sorted(c.sections(), - lambda a, b: cmp(len(a), len(b)) or cmp(a, b)): + for s in sorted(c.sections()): if s in self.url: log.debug("Found matching section: %s", s) section = s
From: Arun Babu Neelicattu abn@redhat.com
--- bin/bugzilla | 5 ++--- bugzilla/base.py | 4 +--- 2 files changed, 3 insertions(+), 6 deletions(-)
diff --git a/bin/bugzilla b/bin/bugzilla index 5b32030..b15730e 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -922,9 +922,8 @@ def _do_modify(bz, parser, opt, args): "qa": (add_qawb, rm_qawb), "devel": (add_devwb, rm_devwb), } - for key in wbmap.keys(): - if not wbmap[key][0] and not wbmap[key][1]: - wbmap.pop(key) + + wbmap = {k: v for k, v in wbmap.items() if wbmap[k][0] or wbmap[k][1]}
log.debug("update bug dict=%s", update) log.debug("update flags dict=%s", flags) diff --git a/bugzilla/base.py b/bugzilla/base.py index 1756908..a4e30c1 100644 --- a/bugzilla/base.py +++ b/bugzilla/base.py @@ -973,9 +973,7 @@ class BugzillaBase(object): }
# Strip out None elements in the dict - for key in query.keys(): - if query[key] is None: - del(query[key]) + query = {k: v for k, v in query.items() if v is not None} return query
def _query(self, query):
From: Arun Babu Neelicattu abn@redhat.com
--- bugzilla/base.py | 5 +++++ 1 file changed, 5 insertions(+)
diff --git a/bugzilla/base.py b/bugzilla/base.py index a4e30c1..168c8ad 100644 --- a/bugzilla/base.py +++ b/bugzilla/base.py @@ -30,10 +30,15 @@ mimemagic = None
def _detect_filetype(fname): + # pylint: disable=E1103 + # E1103: Instance of 'bool' has no '%s' member + # pylint confuses mimemagic to be of type 'bool' global mimemagic
if mimemagic is None: try: + # pylint: disable=F0401 + # F0401: Unable to import 'magic' (import-error) import magic mimemagic = magic.open(magic.MAGIC_MIME_TYPE) mimemagic.load()
From: Arun Babu Neelicattu abn@redhat.com
--- bin/bugzilla | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/bin/bugzilla b/bin/bugzilla index b15730e..df02e33 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -771,7 +771,7 @@ def _format_output(bz, opt, buglist): (c['time'], c['author'], c['text']))
elif fieldname == "__unicode__": - val = unicode(b) + val = b.__unicode__() else: val = getattr(b, fieldname, "")
From: Arun Babu Neelicattu abn@redhat.com
- some import updates to facilitate easy porting to python3 - remove any usage that is not available in python3, eg: urllib2.__version__ --- bin/bugzilla | 9 ++++--- bugzilla/__init__.py | 8 +++--- bugzilla/base.py | 72 +++++++++++++++++++++++++------------------------- tests/__init__.py | 5 ++-- tests/rw_functional.py | 5 ++-- 5 files changed, 51 insertions(+), 48 deletions(-)
diff --git a/bin/bugzilla b/bin/bugzilla index df02e33..da0c7a0 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -22,8 +22,9 @@ import re import socket import sys import tempfile -import urllib2 -import xmlrpclib + +from urllib2 import HTTPError +from xmlrpclib import Fault, ProtocolError
import bugzilla
@@ -1169,12 +1170,12 @@ if __name__ == '__main__': log.debug("", exc_info=True) print("\nConnection lost/failed: %s" % str(e)) sys.exit(2) - except (xmlrpclib.Fault, urllib2.HTTPError): + except (Fault, HTTPError): e = sys.exc_info()[1] log.debug("", exc_info=True) print("\nServer error: %s" % str(e)) sys.exit(3) - except xmlrpclib.ProtocolError: + except ProtocolError: e = sys.exc_info()[1] log.debug("", exc_info=True) print("\nInvalid server response: %d %s" % (e.errcode, e.errmsg)) diff --git a/bugzilla/__init__.py b/bugzilla/__init__.py index 1697fea..7db68ef 100644 --- a/bugzilla/__init__.py +++ b/bugzilla/__init__.py @@ -13,7 +13,7 @@ __version__ = "0.9.0" version = __version__
import logging -import xmlrpclib +from xmlrpclib import Fault, ServerProxy
log = logging.getLogger("bugzilla")
@@ -33,7 +33,7 @@ class NovellBugzilla(Bugzilla34): def getBugzillaClassForURL(url): url = Bugzilla3.fix_url(url) log.debug("Detecting subclass for %s", url) - s = xmlrpclib.ServerProxy(url) + s = ServerProxy(url) rhbz = False bzversion = '' c = None @@ -51,7 +51,7 @@ def getBugzillaClassForURL(url): extensions = s.Bugzilla.extensions() if extensions.get('extensions', {}).get('RedHat', False): rhbz = True - except xmlrpclib.Fault: + except Fault: pass log.debug("rhbz=%s", str(rhbz))
@@ -60,7 +60,7 @@ def getBugzillaClassForURL(url): log.debug("Checking return value of Bugzilla.version()") r = s.Bugzilla.version() bzversion = r['version'] - except xmlrpclib.Fault: + except Fault: pass log.debug("bzversion='%s'", str(bzversion))
diff --git a/bugzilla/base.py b/bugzilla/base.py index 168c8ad..d4c2632 100644 --- a/bugzilla/base.py +++ b/bugzilla/base.py @@ -9,13 +9,15 @@ # option) any later version. See http://www.gnu.org/copyleft/gpl.html for # the full text of the license.
-import cookielib import os -import StringIO import sys -import urllib2 -import urlparse -import xmlrpclib + +from ConfigParser import SafeConfigParser +from cookielib import LoadError, LWPCookieJar, MozillaCookieJar +from urllib2 import Request, HTTPError, build_opener +from urlparse import urlparse, parse_qsl +from StringIO import StringIO +from xmlrpclib import Binary, Fault, ProtocolError, ServerProxy, Transport
import pycurl
@@ -75,7 +77,7 @@ def _decode_rfc2231_value(val):
def _build_cookiejar(cookiefile): - cj = cookielib.MozillaCookieJar(cookiefile) + cj = MozillaCookieJar(cookiefile) if cookiefile is None: return cj if not os.path.exists(cookiefile): @@ -90,17 +92,17 @@ def _build_cookiejar(cookiefile): try: cj.load() return cj - except cookielib.LoadError: + except LoadError: pass
try: - cj = cookielib.LWPCookieJar(cookiefile) + cj = LWPCookieJar(cookiefile) cj.load() - except cookielib.LoadError: + except LoadError: raise BugzillaError("cookiefile=%s not in LWP or Mozilla format" % cookiefile)
- retcj = cookielib.MozillaCookieJar(cookiefile) + retcj = MozillaCookieJar(cookiefile) for cookie in cj: retcj.set_cookie(cookie) retcj.save() @@ -115,7 +117,7 @@ def _check_http_error(uri, request_body, response_data): import httplib import urllib
- class FakeSocket(StringIO.StringIO): + class FakeSocket(StringIO): def makefile(self, *args, **kwarg): ignore = args ignore = kwarg @@ -127,30 +129,30 @@ def _check_http_error(uri, request_body, response_data): resp.code = httpresp.status resp.msg = httpresp.reason
- req = urllib2.Request(uri) + req = Request(uri) req.add_data(request_body) - opener = urllib2.build_opener() + opener = build_opener()
for handler in opener.handlers: if hasattr(handler, "http_response"): handler.http_response(req, resp) - except urllib2.HTTPError: + except HTTPError: raise except: pass
-class _CURLTransport(xmlrpclib.Transport): +class _CURLTransport(Transport): def __init__(self, url, cookiejar, sslverify=True, sslcafile=None, debug=0): - if hasattr(xmlrpclib.Transport, "__init__"): - xmlrpclib.Transport.__init__(self, use_datetime=False) + if hasattr(Transport, "__init__"): + Transport.__init__(self, use_datetime=False)
self.verbose = debug
# transport constructor needs full url too, as xmlrpc does not pass # scheme to request - self.scheme = urlparse.urlparse(url)[0] + self.scheme = urlparse(url)[0] if self.scheme not in ["http", "https"]: raise Exception("Invalid URL scheme: %s (%s)" % (self.scheme, url))
@@ -186,8 +188,8 @@ class _CURLTransport(xmlrpclib.Transport): self.c.setopt(pycurl.URL, url) self.c.setopt(pycurl.POSTFIELDS, request_body)
- b = StringIO.StringIO() - headers = StringIO.StringIO() + b = StringIO() + headers = StringIO() self.c.setopt(pycurl.WRITEFUNCTION, b.write) self.c.setopt(pycurl.HEADERFUNCTION, headers.write)
@@ -216,7 +218,7 @@ class _CURLTransport(xmlrpclib.Transport): raise KeyboardInterrupt except pycurl.error: e = sys.exc_info()[1] - raise xmlrpclib.ProtocolError(url, e[0], e[1], None) + raise ProtocolError(url, e[0], e[1], None)
b.seek(0) headers.seek(0) @@ -291,12 +293,12 @@ class BugzillaBase(object): ''' q = {} (ignore, ignore, path, - ignore, query, ignore) = urlparse.urlparse(url) + ignore, query, ignore) = urlparse(url)
if os.path.basename(path) not in ('buglist.cgi', 'query.cgi'): return {}
- for (k, v) in urlparse.parse_qsl(query): + for (k, v) in parse_qsl(query): if k not in q: q[k] = v elif isinstance(q[k], list): @@ -357,9 +359,8 @@ class BugzillaBase(object): self._components_details = {}
def _get_user_agent(self): - ret = ('Python-urllib2/%s bugzilla.py/%s %s/%s' % - (urllib2.__version__, __version__, - str(self.__class__.__name__), self.version)) + ret = ('Python-urllib bugzilla.py/%s %s/%s' % + (__version__, str(self.__class__.__name__), self.version)) return ret user_agent = property(_get_user_agent)
@@ -437,11 +438,10 @@ class BugzillaBase(object): ''' Read bugzillarc file(s) into memory. ''' - import ConfigParser if not configpath: configpath = self.configpath configpath = [os.path.expanduser(p) for p in configpath] - c = ConfigParser.SafeConfigParser() + c = SafeConfigParser() r = c.read(configpath) if not r: return @@ -477,7 +477,7 @@ class BugzillaBase(object): self._transport = _CURLTransport(url, self._cookiejar, sslverify=self._sslverify) self._transport.user_agent = self.user_agent - self._proxy = xmlrpclib.ServerProxy(url, self._transport) + self._proxy = ServerProxy(url, self._transport)
self.url = url @@ -533,7 +533,7 @@ class BugzillaBase(object): self.logged_in = True log.info("login successful - dropping password from memory") self.password = '' - except xmlrpclib.Fault: + except Fault: r = False
return r @@ -1218,7 +1218,7 @@ class BugzillaBase(object): def attachfile(self, idlist, attachfile, description, **kwargs): ''' Attach a file to the given bug IDs. Returns the ID of the attachment - or raises xmlrpclib.Fault if something goes wrong. + or raises XMLRPC Fault if something goes wrong.
attachfile may be a filename (which will be opened) or a file-like object, which must provide a 'read' method. If it's not one of these, @@ -1259,7 +1259,7 @@ class BugzillaBase(object): kwargs["file_name"] = kwargs.pop("filename")
kwargs['summary'] = description - kwargs['data'] = xmlrpclib.Binary(f.read()) + kwargs['data'] = Binary(f.read()) kwargs['ids'] = self._listify(idlist)
if 'file_name' not in kwargs and hasattr(f, "name"): @@ -1290,7 +1290,7 @@ class BugzillaBase(object): att_uri = self._attachment_uri(attachid)
headers = {} - ret = StringIO.StringIO() + ret = StringIO()
def headers_cb(buf): if not ":" in buf: @@ -1440,7 +1440,7 @@ class BugzillaBase(object): :kwarg names: list of user names to return data on :kwarg match: list of patterns. Returns users whose real name or login name match the pattern. - :raises xmlrpclib.Fault: Code 51: if a Bad Login Name was sent to the + :raises XMLRPC Fault: Code 51: if a Bad Login Name was sent to the names array. Code 304: if the user was not authorized to see user they requested. @@ -1467,7 +1467,7 @@ class BugzillaBase(object): '''Return a bugzilla User for the given username
:arg username: The username used in bugzilla. - :raises xmlrpclib.Fault: Code 51 if the username does not exist + :raises XMLRPC Fault: Code 51 if the username does not exist :returns: User record for the username ''' ret = self.getusers(username) @@ -1509,7 +1509,7 @@ class BugzillaBase(object): :arg email: The email address to use in bugzilla :kwarg name: Real name to associate with the account :kwarg password: Password to set for the bugzilla account - :raises xmlrpclib.Fault: Code 501 if the username already exists + :raises XMLRPC Fault: Code 501 if the username already exists Code 500 if the email address isn't valid Code 502 if the password is too short Code 503 if the password is too long diff --git a/tests/__init__.py b/tests/__init__.py index 10df4b4..38c252b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -8,7 +8,8 @@ import imp import os import shlex import sys -import StringIO + +from StringIO import StringIO
_cleanup = [] @@ -63,7 +64,7 @@ def clicomm(argv, bzinstance, returnmain=False, printcliout=False, oldargv = sys.argv try: if not printcliout: - out = StringIO.StringIO() + out = StringIO() sys.stdout = out sys.stderr = out if stdin: diff --git a/tests/rw_functional.py b/tests/rw_functional.py index e4b0296..ffcb640 100644 --- a/tests/rw_functional.py +++ b/tests/rw_functional.py @@ -16,7 +16,8 @@ import os import random import sys import unittest -import urllib2 + +from urlparse import urlparse
import bugzilla from bugzilla import Bugzilla @@ -40,7 +41,7 @@ class BaseTest(unittest.TestCase):
def _testCookie(self): cookiefile = cf - domain = urllib2.urlparse.urlparse(self.url)[1] + domain = urlparse(self.url)[1] if os.path.exists(cookiefile): out = open(cookiefile).read(1024) if domain in out:
From: Arun Babu Neelicattu abn@redhat.com
--- bin/bugzilla | 2 +- bugzilla/base.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-)
diff --git a/bin/bugzilla b/bin/bugzilla index da0c7a0..8b6b92b 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -993,7 +993,7 @@ def _do_set_attach(bz, opt, parser, args): data = sys.stdin.read(4096)
while data: - fileobj.write(data) + fileobj.write(data.encode(locale.getpreferredencoding())) data = sys.stdin.read(4096) fileobj.seek(0)
diff --git a/bugzilla/base.py b/bugzilla/base.py index d4c2632..d1a65c6 100644 --- a/bugzilla/base.py +++ b/bugzilla/base.py @@ -9,6 +9,7 @@ # option) any later version. See http://www.gnu.org/copyleft/gpl.html for # the full text of the license.
+import locale import os import sys
@@ -1259,7 +1260,12 @@ class BugzillaBase(object): kwargs["file_name"] = kwargs.pop("filename")
kwargs['summary'] = description - kwargs['data'] = Binary(f.read()) + + data = f.read() + if not isinstance(data, bytes): + data = data.encode(locale.getpreferredencoding()) + kwargs['data'] = Binary(data) + kwargs['ids'] = self._listify(idlist)
if 'file_name' not in kwargs and hasattr(f, "name"):
From: Arun Babu Neelicattu abn@redhat.com
--- bin/bugzilla | 11 +-- bugzilla/base.py | 211 ++++++++++++++++++--------------------------------- python-bugzilla.spec | 4 +- requirements.txt | 2 +- 4 files changed, 80 insertions(+), 148 deletions(-)
diff --git a/bin/bugzilla b/bin/bugzilla index 8b6b92b..c694ebe 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -23,8 +23,8 @@ import socket import sys import tempfile
-from urllib2 import HTTPError from xmlrpclib import Fault, ProtocolError +from requests.exceptions import SSLError
import bugzilla
@@ -1170,7 +1170,7 @@ if __name__ == '__main__': log.debug("", exc_info=True) print("\nConnection lost/failed: %s" % str(e)) sys.exit(2) - except (Fault, HTTPError): + except (Fault, bugzilla.BugzillaError): e = sys.exc_info()[1] log.debug("", exc_info=True) print("\nServer error: %s" % str(e)) @@ -1181,16 +1181,13 @@ if __name__ == '__main__': print("\nInvalid server response: %d %s" % (e.errcode, e.errmsg))
# Give SSL recommendations - import pycurl - sslerrcodes = [getattr(pycurl, ename) for ename in dir(pycurl) if - ename.startswith("E_SSL")] - if e.errcode in sslerrcodes: + if isinstance(e, SSLError): print("\nIf you trust the remote server, you can work " "around this error with:\n" " bugzilla --nosslverify ...")
# Detect redirect - redir = (e.headers and e.headers.getheader("location", 0) or None) + redir = (e.headers and 'location' in e.headers) if redir: print("\nServer was attempting a redirect. Try: " " bugzilla --bugzilla %s ..." % redir) diff --git a/bugzilla/base.py b/bugzilla/base.py index d1a65c6..024c839 100644 --- a/bugzilla/base.py +++ b/bugzilla/base.py @@ -15,12 +15,11 @@ import sys
from ConfigParser import SafeConfigParser from cookielib import LoadError, LWPCookieJar, MozillaCookieJar -from urllib2 import Request, HTTPError, build_opener +from io import BytesIO from urlparse import urlparse, parse_qsl -from StringIO import StringIO from xmlrpclib import Binary, Fault, ProtocolError, ServerProxy, Transport
-import pycurl +import requests
from bugzilla import __version__, log from bugzilla.bug import _Bug, _User @@ -63,20 +62,6 @@ def _detect_filetype(fname): return None
-def _decode_rfc2231_value(val): - # BUG WORKAROUND: decode_header doesn't work unless there's whitespace - # around the encoded string (see http://bugs.python.org/issue1079) - from email import utils - from email import header - - # pylint: disable=W1401 - # Anomolous backslash in string - val = utils.ecre.sub(' \g<0> ', val) - val = val.strip('"') - return ''.join(f[0].decode(f[1] or 'us-ascii') - for f in header.decode_header(val)) - - def _build_cookiejar(cookiefile): cj = MozillaCookieJar(cookiefile) if cookiefile is None: @@ -110,46 +95,18 @@ def _build_cookiejar(cookiefile): return retcj
-def _check_http_error(uri, request_body, response_data): - # This pulls some of the guts from urllib to give us HTTP error - # code checking. Wrap it all in try/except incase this breaks in - # the future, it's only for error handling. - try: - import httplib - import urllib - - class FakeSocket(StringIO): - def makefile(self, *args, **kwarg): - ignore = args - ignore = kwarg - return self - - httpresp = httplib.HTTPResponse(FakeSocket(response_data)) - httpresp.begin() - resp = urllib.addinfourl(FakeSocket(response_data), httpresp.msg, uri) - resp.code = httpresp.status - resp.msg = httpresp.reason - - req = Request(uri) - req.add_data(request_body) - opener = build_opener() - - for handler in opener.handlers: - if hasattr(handler, "http_response"): - handler.http_response(req, resp) - except HTTPError: - raise - except: - pass - +class RequestsTransport(Transport): + user_agent = 'Python/Bugzilla'
-class _CURLTransport(Transport): def __init__(self, url, cookiejar, sslverify=True, sslcafile=None, debug=0): + # pylint: disable=W0231 + # pylint does not handle multiple import of Transport well if hasattr(Transport, "__init__"): Transport.__init__(self, use_datetime=False)
self.verbose = debug + self._cookiejar = cookiejar
# transport constructor needs full url too, as xmlrpc does not pass # scheme to request @@ -157,87 +114,67 @@ class _CURLTransport(Transport): if self.scheme not in ["http", "https"]: raise Exception("Invalid URL scheme: %s (%s)" % (self.scheme, url))
- self.c = pycurl.Curl() - self.c.setopt(pycurl.POST, 1) - self.c.setopt(pycurl.CONNECTTIMEOUT, 30) - self.c.setopt(pycurl.HTTPHEADER, [ - "Content-Type: text/xml", - ]) - self.c.setopt(pycurl.VERBOSE, debug) - - self.set_cookiejar(cookiejar) + self.use_https = self.scheme == 'https'
- # ssl settings - if self.scheme == "https": - # override curl built-in ca file setting - if sslcafile is not None: - self.c.setopt(pycurl.CAINFO, sslcafile) - - # disable ssl verification - if not sslverify: - self.c.setopt(pycurl.SSL_VERIFYPEER, 0) - self.c.setopt(pycurl.SSL_VERIFYHOST, 0) + self.request_defaults = { + 'cert': sslcafile if self.use_https else None, + 'cookies': cookiejar, + 'verify': sslverify, + 'headers': { + 'Content-Type': 'text/xml', + 'User-Agent': self.user_agent, + } + }
- def set_cookiejar(self, cj): - self.c.setopt(pycurl.COOKIEFILE, cj.filename or "") - self.c.setopt(pycurl.COOKIEJAR, cj.filename or "") + def parse_response(self, response): + """ Parse XMLRPC response """ + parser, unmarshaller = self.getparser() + parser.feed(response.text.encode('utf-8')) + parser.close() + return unmarshaller.close()
- def get_cookies(self): - return self.c.getinfo(pycurl.INFO_COOKIELIST) + def _request_helper(self, url, request_body): + """ + A helper method to assist in making a request and provide a parsed + response. + """ + try: + response = requests.post( + url, data=request_body, **self.request_defaults)
- def _open_helper(self, url, request_body): - self.c.setopt(pycurl.URL, url) - self.c.setopt(pycurl.POSTFIELDS, request_body) + # We expect utf-8 from the server + response.encoding = 'UTF-8'
- b = StringIO() - headers = StringIO() - self.c.setopt(pycurl.WRITEFUNCTION, b.write) - self.c.setopt(pycurl.HEADERFUNCTION, headers.write) + # update/set any cookies + for cookie in response.cookies: + self._cookiejar.set_cookie(cookie)
- try: - m = pycurl.CurlMulti() - m.add_handle(self.c) - while True: - if m.perform()[0] == -1: - continue - num, ok, err = m.info_read() - ignore = num + if self._cookiejar.filename is not None: + # Save is required only if we have a filename + self._cookiejar.save()
- if ok: - m.remove_handle(self.c) - break - if err: - m.remove_handle(self.c) - raise pycurl.error(*err[0][1:]) - if m.select(.1) == -1: - # Looks like -1 is passed straight up from select(2) - # While it's not true that this will always be caused - # by SIGINT, it should be the only case we hit - log.debug("pycurl select failed, this likely came from " - "SIGINT, raising") - m.remove_handle(self.c) - raise KeyboardInterrupt - except pycurl.error: + response.raise_for_status() + return self.parse_response(response) + except requests.RequestException: e = sys.exc_info()[1] - raise ProtocolError(url, e[0], e[1], None) - - b.seek(0) - headers.seek(0) - return b, headers + raise ProtocolError( + url, response.status_code, str(e), response.headers) + except Fault: + raise sys.exc_info()[1] + except Exception: + # pylint: disable=W0201 + e = BugzillaError(str(sys.exc_info()[1])) + e.__traceback__ = sys.exc_info()[2] + raise e
def request(self, host, handler, request_body, verbose=0): self.verbose = verbose url = "%s://%s%s" % (self.scheme, host, handler)
# xmlrpclib fails to escape \r - request_body = request_body.replace('\r', '
') - - body, headers = self._open_helper(url, request_body) - _check_http_error(url, body.getvalue(), headers.getvalue()) - - return self.parse_response(body) - + request_body = request_body.replace(b'\r', b'
')
+ return self._request_helper(url, request_body)
class BugzillaError(Exception): @@ -475,8 +412,8 @@ class BugzillaBase(object): url = self.url url = self.fix_url(url)
- self._transport = _CURLTransport(url, self._cookiejar, - sslverify=self._sslverify) + self._transport = RequestsTransport( + url, self._cookiejar, sslverify=self._sslverify) self._transport.user_agent = self.user_agent self._proxy = ServerProxy(url, self._transport)
@@ -1293,29 +1230,27 @@ class BugzillaBase(object): def openattachment(self, attachid): '''Get the contents of the attachment with the given attachment ID. Returns a file-like object.''' + + def get_filename(headers): + import re + + match = re.search( + r'^.*filename="?(.*)"$', + headers.get('content-disposition', '') + ) + + # default to attchid if no match was found + return match.group(1) if match else attachid + att_uri = self._attachment_uri(attachid)
- headers = {} - ret = StringIO() + response = requests.get(att_uri, cookies=self._cookiejar, stream=True)
- def headers_cb(buf): - if not ":" in buf: - return - name, val = buf.split(":", 1) - headers[name.lower()] = val - - c = pycurl.Curl() - c.setopt(pycurl.URL, att_uri) - c.setopt(pycurl.WRITEFUNCTION, ret.write) - c.setopt(pycurl.HEADERFUNCTION, headers_cb) - c.setopt(pycurl.COOKIEFILE, self._cookiejar.filename or "") - c.perform() - c.close() - - disp = headers['content-disposition'].split(';') - disp.pop(0) - parms = dict([p.strip().split("=", 1) for p in disp]) - ret.name = _decode_rfc2231_value(parms['filename']) + ret = BytesIO() + for chunk in response.iter_content(chunk_size=1024): + if chunk: + ret.write(chunk) + ret.name = get_filename(response.headers)
# Hooray, now we have a file-like object with .read() and .name ret.seek(0) diff --git a/python-bugzilla.spec b/python-bugzilla.spec index c6d6f81..1445a94 100644 --- a/python-bugzilla.spec +++ b/python-bugzilla.spec @@ -19,8 +19,8 @@ BuildRequires: python-setuptools BuildRequires: python-setuptools-devel %endif
-BuildRequires: python-pycurl -Requires: python-pycurl +BuildRequires: python-requests +Requires: python-requests
%if ! 0%{?rhel} || 0%{?rhel} >= 6 Requires: python-magic diff --git a/requirements.txt b/requirements.txt index d4ba5cd..f229360 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -pycurl +requests
This one might have an issue with the latest requests release.
https://github.com/kennethreitz/requests/issues/1711
Was tested on requests-2.0.0.
----- Original Message -----
From: abn@redhat.com To: python-bugzilla@lists.fedorahosted.org Cc: "Arun Babu Neelicattu" abn@redhat.com Sent: Tuesday, October 29, 2013 11:22:29 PM Subject: [PATCH 10/13] Use requests instead of pycurl
From: Arun Babu Neelicattu abn@redhat.com
bin/bugzilla | 11 +-- bugzilla/base.py | 211 ++++++++++++++++++--------------------------------- python-bugzilla.spec | 4 +- requirements.txt | 2 +- 4 files changed, 80 insertions(+), 148 deletions(-)
diff --git a/bin/bugzilla b/bin/bugzilla index 8b6b92b..c694ebe 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -23,8 +23,8 @@ import socket import sys import tempfile
-from urllib2 import HTTPError from xmlrpclib import Fault, ProtocolError +from requests.exceptions import SSLError
import bugzilla
@@ -1170,7 +1170,7 @@ if __name__ == '__main__': log.debug("", exc_info=True) print("\nConnection lost/failed: %s" % str(e)) sys.exit(2)
- except (Fault, HTTPError):
- except (Fault, bugzilla.BugzillaError): e = sys.exc_info()[1] log.debug("", exc_info=True) print("\nServer error: %s" % str(e))
@@ -1181,16 +1181,13 @@ if __name__ == '__main__': print("\nInvalid server response: %d %s" % (e.errcode, e.errmsg))
# Give SSL recommendations
import pycurl
sslerrcodes = [getattr(pycurl, ename) for ename in dir(pycurl) if
ename.startswith("E_SSL")]
if e.errcode in sslerrcodes:
if isinstance(e, SSLError): print("\nIf you trust the remote server, you can work " "around this error with:\n" " bugzilla --nosslverify ...") # Detect redirect
redir = (e.headers and e.headers.getheader("location", 0) or None)
redir = (e.headers and 'location' in e.headers) if redir: print("\nServer was attempting a redirect. Try: " " bugzilla --bugzilla %s ..." % redir)
diff --git a/bugzilla/base.py b/bugzilla/base.py index d1a65c6..024c839 100644 --- a/bugzilla/base.py +++ b/bugzilla/base.py @@ -15,12 +15,11 @@ import sys
from ConfigParser import SafeConfigParser from cookielib import LoadError, LWPCookieJar, MozillaCookieJar -from urllib2 import Request, HTTPError, build_opener +from io import BytesIO from urlparse import urlparse, parse_qsl -from StringIO import StringIO from xmlrpclib import Binary, Fault, ProtocolError, ServerProxy, Transport
-import pycurl +import requests
from bugzilla import __version__, log from bugzilla.bug import _Bug, _User @@ -63,20 +62,6 @@ def _detect_filetype(fname): return None
-def _decode_rfc2231_value(val):
- # BUG WORKAROUND: decode_header doesn't work unless there's whitespace
- # around the encoded string (see http://bugs.python.org/issue1079)
- from email import utils
- from email import header
- # pylint: disable=W1401
- # Anomolous backslash in string
- val = utils.ecre.sub(' \g<0> ', val)
- val = val.strip('"')
- return ''.join(f[0].decode(f[1] or 'us-ascii')
for f in header.decode_header(val))
def _build_cookiejar(cookiefile): cj = MozillaCookieJar(cookiefile) if cookiefile is None: @@ -110,46 +95,18 @@ def _build_cookiejar(cookiefile): return retcj
-def _check_http_error(uri, request_body, response_data):
- # This pulls some of the guts from urllib to give us HTTP error
- # code checking. Wrap it all in try/except incase this breaks in
- # the future, it's only for error handling.
- try:
import httplib
import urllib
class FakeSocket(StringIO):
def makefile(self, *args, **kwarg):
ignore = args
ignore = kwarg
return self
httpresp = httplib.HTTPResponse(FakeSocket(response_data))
httpresp.begin()
resp = urllib.addinfourl(FakeSocket(response_data), httpresp.msg,
uri)
resp.code = httpresp.status
resp.msg = httpresp.reason
req = Request(uri)
req.add_data(request_body)
opener = build_opener()
for handler in opener.handlers:
if hasattr(handler, "http_response"):
handler.http_response(req, resp)
- except HTTPError:
raise
- except:
pass
+class RequestsTransport(Transport):
- user_agent = 'Python/Bugzilla'
-class _CURLTransport(Transport): def __init__(self, url, cookiejar, sslverify=True, sslcafile=None, debug=0):
# pylint: disable=W0231
# pylint does not handle multiple import of Transport well if hasattr(Transport, "__init__"): Transport.__init__(self, use_datetime=False) self.verbose = debug
self._cookiejar = cookiejar # transport constructor needs full url too, as xmlrpc does not pass # scheme to request
@@ -157,87 +114,67 @@ class _CURLTransport(Transport): if self.scheme not in ["http", "https"]: raise Exception("Invalid URL scheme: %s (%s)" % (self.scheme, url))
self.c = pycurl.Curl()
self.c.setopt(pycurl.POST, 1)
self.c.setopt(pycurl.CONNECTTIMEOUT, 30)
self.c.setopt(pycurl.HTTPHEADER, [
"Content-Type: text/xml",
])
self.c.setopt(pycurl.VERBOSE, debug)
self.set_cookiejar(cookiejar)
self.use_https = self.scheme == 'https'
# ssl settings
if self.scheme == "https":
# override curl built-in ca file setting
if sslcafile is not None:
self.c.setopt(pycurl.CAINFO, sslcafile)
# disable ssl verification
if not sslverify:
self.c.setopt(pycurl.SSL_VERIFYPEER, 0)
self.c.setopt(pycurl.SSL_VERIFYHOST, 0)
self.request_defaults = {
'cert': sslcafile if self.use_https else None,
'cookies': cookiejar,
'verify': sslverify,
'headers': {
'Content-Type': 'text/xml',
'User-Agent': self.user_agent,
}
}
- def set_cookiejar(self, cj):
self.c.setopt(pycurl.COOKIEFILE, cj.filename or "")
self.c.setopt(pycurl.COOKIEJAR, cj.filename or "")
- def parse_response(self, response):
""" Parse XMLRPC response """
parser, unmarshaller = self.getparser()
parser.feed(response.text.encode('utf-8'))
parser.close()
return unmarshaller.close()
- def get_cookies(self):
return self.c.getinfo(pycurl.INFO_COOKIELIST)
- def _request_helper(self, url, request_body):
"""
A helper method to assist in making a request and provide a parsed
response.
"""
try:
response = requests.post(
url, data=request_body, **self.request_defaults)
- def _open_helper(self, url, request_body):
self.c.setopt(pycurl.URL, url)
self.c.setopt(pycurl.POSTFIELDS, request_body)
# We expect utf-8 from the server
response.encoding = 'UTF-8'
b = StringIO()
headers = StringIO()
self.c.setopt(pycurl.WRITEFUNCTION, b.write)
self.c.setopt(pycurl.HEADERFUNCTION, headers.write)
# update/set any cookies
for cookie in response.cookies:
self._cookiejar.set_cookie(cookie)
try:
m = pycurl.CurlMulti()
m.add_handle(self.c)
while True:
if m.perform()[0] == -1:
continue
num, ok, err = m.info_read()
ignore = num
if self._cookiejar.filename is not None:
# Save is required only if we have a filename
self._cookiejar.save()
if ok:
m.remove_handle(self.c)
break
if err:
m.remove_handle(self.c)
raise pycurl.error(*err[0][1:])
if m.select(.1) == -1:
# Looks like -1 is passed straight up from select(2)
# While it's not true that this will always be caused
# by SIGINT, it should be the only case we hit
log.debug("pycurl select failed, this likely came from "
"SIGINT, raising")
m.remove_handle(self.c)
raise KeyboardInterrupt
except pycurl.error:
response.raise_for_status()
return self.parse_response(response)
except requests.RequestException: e = sys.exc_info()[1]
raise ProtocolError(url, e[0], e[1], None)
b.seek(0)
headers.seek(0)
return b, headers
raise ProtocolError(
url, response.status_code, str(e), response.headers)
except Fault:
raise sys.exc_info()[1]
except Exception:
# pylint: disable=W0201
e = BugzillaError(str(sys.exc_info()[1]))
e.__traceback__ = sys.exc_info()[2]
raise e
def request(self, host, handler, request_body, verbose=0): self.verbose = verbose url = "%s://%s%s" % (self.scheme, host, handler)
# xmlrpclib fails to escape \r
request_body = request_body.replace('\r', '
')
body, headers = self._open_helper(url, request_body)
_check_http_error(url, body.getvalue(), headers.getvalue())
return self.parse_response(body)
request_body = request_body.replace(b'\r', b'
')
return self._request_helper(url, request_body)
class BugzillaError(Exception): @@ -475,8 +412,8 @@ class BugzillaBase(object): url = self.url url = self.fix_url(url)
self._transport = _CURLTransport(url, self._cookiejar,
sslverify=self._sslverify)
self._transport = RequestsTransport(
url, self._cookiejar, sslverify=self._sslverify) self._transport.user_agent = self.user_agent self._proxy = ServerProxy(url, self._transport)
@@ -1293,29 +1230,27 @@ class BugzillaBase(object): def openattachment(self, attachid): '''Get the contents of the attachment with the given attachment ID. Returns a file-like object.'''
def get_filename(headers):
import re
match = re.search(
r'^.*filename="?(.*)"$',
headers.get('content-disposition', '')
)
# default to attchid if no match was found
return match.group(1) if match else attachid
att_uri = self._attachment_uri(attachid)
headers = {}
ret = StringIO()
response = requests.get(att_uri, cookies=self._cookiejar,
stream=True)
def headers_cb(buf):
if not ":" in buf:
return
name, val = buf.split(":", 1)
headers[name.lower()] = val
c = pycurl.Curl()
c.setopt(pycurl.URL, att_uri)
c.setopt(pycurl.WRITEFUNCTION, ret.write)
c.setopt(pycurl.HEADERFUNCTION, headers_cb)
c.setopt(pycurl.COOKIEFILE, self._cookiejar.filename or "")
c.perform()
c.close()
disp = headers['content-disposition'].split(';')
disp.pop(0)
parms = dict([p.strip().split("=", 1) for p in disp])
ret.name = _decode_rfc2231_value(parms['filename'])
ret = BytesIO()
for chunk in response.iter_content(chunk_size=1024):
if chunk:
ret.write(chunk)
ret.name = get_filename(response.headers) # Hooray, now we have a file-like object with .read() and .name ret.seek(0)
diff --git a/python-bugzilla.spec b/python-bugzilla.spec index c6d6f81..1445a94 100644 --- a/python-bugzilla.spec +++ b/python-bugzilla.spec @@ -19,8 +19,8 @@ BuildRequires: python-setuptools BuildRequires: python-setuptools-devel %endif
-BuildRequires: python-pycurl -Requires: python-pycurl +BuildRequires: python-requests +Requires: python-requests
%if ! 0%{?rhel} || 0%{?rhel} >= 6 Requires: python-magic diff --git a/requirements.txt b/requirements.txt index d4ba5cd..f229360 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -pycurl
+requests
1.8.3.1
On 10/29/2013 09:22 AM, abn@redhat.com wrote:
From: Arun Babu Neelicattu abn@redhat.com
bin/bugzilla | 11 +-- bugzilla/base.py | 211 ++++++++++++++++++--------------------------------- python-bugzilla.spec | 4 +- requirements.txt | 2 +- 4 files changed, 80 insertions(+), 148 deletions(-)
I've pushed 1-9, 11, 12. I'll push this when I get time to test it some more. 13 didn't apply cleanly without this patch applied, so if you wanted to rework it and resend I can apply it first, or just wait and I'll push it after this patch.
Also, feel free to send the python2/python3 test infrastructure patches whenever they're ready and I'll poke at those too.
Thanks for all the work Arun!
- Cole
On 10/29/2013 09:22 AM, abn@redhat.com wrote:
From: Arun Babu Neelicattu abn@redhat.com
bin/bugzilla | 11 +-- bugzilla/base.py | 211 ++++++++++++++++++--------------------------------- python-bugzilla.spec | 4 +- requirements.txt | 2 +- 4 files changed, 80 insertions(+), 148 deletions(-)
Okay, I played with this some, patches are pushed now!
Somehow I managed to empty ~/.bugzillacookies twice, but I haven't found a way to reproduce it yet, so not sure if it was just user error or not (I was messing with the cookies in other terminals, etc.). But if anyone else hits similar issues, please speak up. This is with python-requests 1.2 on F20.
And there was some minor error handling issues with SSL problems, so I added a patch on top to fix those.
I tried to test things on RHEL6, but apparently RHEL6 doesn't have a python27 package yet :/ There's one under review that has had activity this month:
https://bugzilla.redhat.com/show_bug.cgi?id=829892
But I think it's fine to not push the next version to EPEL6 until that package is ready, I don't think there's much in the way of critical bug fixes.
Thanks, Cole
Thanks Cole that is awesome.
As for the cookies, I was not able to reproduce it. Will try again a bit later. For RHEL6, if required you can use IUS [1] for py27.
I have sent a patch that adds contrib scripts and additional documentation. I need to still play around with tox, should be able to get it soon-ish.
-arun
[1] http://iuscommunity.org/pages/Repos.html
----- Original Message -----
From: "Cole Robinson" crobinso@redhat.com To: "python-bugzilla user/developer list" python-bugzilla@lists.fedorahosted.org Cc: "Arun Neelicattu" abn@redhat.com Sent: Friday, November 1, 2013 5:57:54 AM Subject: Re: [python-bugzilla] [PATCH 10/13] Use requests instead of pycurl
On 10/29/2013 09:22 AM, abn@redhat.com wrote:
From: Arun Babu Neelicattu abn@redhat.com
bin/bugzilla | 11 +-- bugzilla/base.py | 211 ++++++++++++++++++--------------------------------- python-bugzilla.spec | 4 +- requirements.txt | 2 +- 4 files changed, 80 insertions(+), 148 deletions(-)
Okay, I played with this some, patches are pushed now!
Somehow I managed to empty ~/.bugzillacookies twice, but I haven't found a way to reproduce it yet, so not sure if it was just user error or not (I was messing with the cookies in other terminals, etc.). But if anyone else hits similar issues, please speak up. This is with python-requests 1.2 on F20.
And there was some minor error handling issues with SSL problems, so I added a patch on top to fix those.
I tried to test things on RHEL6, but apparently RHEL6 doesn't have a python27 package yet :/ There's one under review that has had activity this month:
https://bugzilla.redhat.com/show_bug.cgi?id=829892
But I think it's fine to not push the next version to EPEL6 until that package is ready, I don't think there's much in the way of critical bug fixes.
Thanks, Cole
On Thu, Oct 31, 2013 at 07:35:41PM -0400, Arun Neelicattu wrote:
Thanks Cole that is awesome.
As for the cookies, I was not able to reproduce it. Will try again a bit later. For RHEL6, if required you can use IUS [1] for py27.
I have sent a patch that adds contrib scripts and additional documentation. I need to still play around with tox, should be able to get it soon-ish.
-arun
Regarding switching to requests -- did anyone test whether it works with proxies? We just got a strange error message for python-fedora that hadto do with proxies... we hadn't had this happen while we were using pycurl. Only since we've switched to requests.
-Toshio
Hi Toshio,
Do you have a stack trace I can look at or some details on the error?
-arun
----- Original Message -----
From: "Toshio Kuratomi" a.badger@gmail.com To: "python-bugzilla user/developer list" python-bugzilla@lists.fedorahosted.org Sent: Friday, November 1, 2013 9:42:56 AM Subject: Re: [python-bugzilla] [PATCH 10/13] Use requests instead of pycurl
On Thu, Oct 31, 2013 at 07:35:41PM -0400, Arun Neelicattu wrote:
Thanks Cole that is awesome.
As for the cookies, I was not able to reproduce it. Will try again a bit later. For RHEL6, if required you can use IUS [1] for py27.
I have sent a patch that adds contrib scripts and additional documentation. I need to still play around with tox, should be able to get it soon-ish.
-arun
Regarding switching to requests -- did anyone test whether it works with proxies? We just got a strange error message for python-fedora that hadto do with proxies... we hadn't had this happen while we were using pycurl. Only since we've switched to requests.
-Toshio
python-bugzilla mailing list python-bugzilla@lists.fedorahosted.org https://lists.fedorahosted.org/mailman/listinfo/python-bugzilla
On Thu, Oct 31, 2013 at 07:51:19PM -0400, Arun Neelicattu wrote:
Hi Toshio,
Do you have a stack trace I can look at or some details on the error?
https://bugzilla.redhat.com/show_bug.cgi?id=1019120
No worries if you can't look at it -- it's not a python-bugzilla bug after all :-)
-Toshio
From: Arun Babu Neelicattu abn@redhat.com
- fix import statement to prevent accidental usage --- bugzilla/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/bugzilla/__init__.py b/bugzilla/__init__.py index 7db68ef..1f9edb9 100644 --- a/bugzilla/__init__.py +++ b/bugzilla/__init__.py @@ -12,10 +12,10 @@ __version__ = "0.9.0" version = __version__
-import logging +from logging import getLogger from xmlrpclib import Fault, ServerProxy
-log = logging.getLogger("bugzilla") +log = getLogger("bugzilla")
from bugzilla.base import BugzillaBase as _BugzillaBase @@ -42,7 +42,7 @@ def getBugzillaClassForURL(url): log.info("Using RHBugzilla for URL containing bugzilla.redhat.com") return RHBugzilla if "bugzilla.novell.com" in url: - logging.info("Using NovellBugzilla for URL containing novell.com") + log.info("Using NovellBugzilla for URL containing novell.com") return NovellBugzilla
# Check for a Red Hat extension
From: Arun Babu Neelicattu abn@redhat.com
--- bin/bugzilla | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-)
diff --git a/bin/bugzilla b/bin/bugzilla index c694ebe..da0b52e 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -47,13 +47,16 @@ log.addHandler(handler) ################
def to_encoding(ustring): + string = '' if isinstance(ustring, basestring): - if isinstance(ustring, unicode): - return ustring.encode(locale.getpreferredencoding(), 'replace') - return ustring - if ustring is None: - return u'' - return str(ustring) + string = ustring + elif ustring is not None: + string = str(ustring) + + if sys.version_info.major >= 3: + return string + else: + return string.encode(locale.getpreferredencoding(), 'replace')
def open_without_clobber(name, *args): @@ -779,7 +782,7 @@ def _format_output(bz, opt, buglist): vallist = type(val) is list and val or [val] val = ','.join([to_encoding(v) for v in vallist])
- return to_encoding(val) + return val
for b in buglist: print(format_field_re.sub(bug_field, opt.outputformat))
From: Arun Babu Neelicattu abn@redhat.com
--- bin/bugzilla | 8 +++++++- bugzilla/__init__.py | 8 +++++++- bugzilla/base.py | 18 ++++++++++++++---- tests/__init__.py | 6 ++++-- tests/rw_functional.py | 6 +++++- 5 files changed, 37 insertions(+), 9 deletions(-)
diff --git a/bin/bugzilla b/bin/bugzilla index da0b52e..5190d11 100755 --- a/bin/bugzilla +++ b/bin/bugzilla @@ -23,7 +23,13 @@ import socket import sys import tempfile
-from xmlrpclib import Fault, ProtocolError +if sys.version_info.major >= 3: + # pylint: disable=F0401,W0622 + from xmlrpc.client import Fault, ProtocolError + basestring = (str, bytes) +else: + from xmlrpclib import Fault, ProtocolError + from requests.exceptions import SSLError
import bugzilla diff --git a/bugzilla/__init__.py b/bugzilla/__init__.py index 1f9edb9..e6fdeef 100644 --- a/bugzilla/__init__.py +++ b/bugzilla/__init__.py @@ -12,8 +12,14 @@ __version__ = "0.9.0" version = __version__
+import sys from logging import getLogger -from xmlrpclib import Fault, ServerProxy + +if sys.version_info.major >= 3: + # pylint: disable=F0401 + from xmlrpc.client import Fault, ServerProxy +else: + from xmlrpclib import Fault, ServerProxy
log = getLogger("bugzilla")
diff --git a/bugzilla/base.py b/bugzilla/base.py index 024c839..774a5a6 100644 --- a/bugzilla/base.py +++ b/bugzilla/base.py @@ -13,11 +13,21 @@ import locale import os import sys
-from ConfigParser import SafeConfigParser -from cookielib import LoadError, LWPCookieJar, MozillaCookieJar from io import BytesIO -from urlparse import urlparse, parse_qsl -from xmlrpclib import Binary, Fault, ProtocolError, ServerProxy, Transport + +if sys.version_info.major >= 3: + # pylint: disable=F0401,E0611 + from configparser import SafeConfigParser + from http.cookiejar import LoadError, LWPCookieJar, MozillaCookieJar + from urllib.parse import urlparse, parse_qsl + from xmlrpc.client import ( + Binary, Fault, ProtocolError, ServerProxy, Transport) +else: + from ConfigParser import SafeConfigParser + from cookielib import LoadError, LWPCookieJar, MozillaCookieJar + from urlparse import urlparse, parse_qsl + from xmlrpclib import ( + Binary, Fault, ProtocolError, ServerProxy, Transport)
import requests
diff --git a/tests/__init__.py b/tests/__init__.py index 38c252b..0cc7108 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -2,14 +2,16 @@ from __future__ import print_function
import atexit -import commands import difflib import imp import os import shlex import sys
-from StringIO import StringIO +if sys.version_info.major >= 3: + from io import StringIO +else: + from StringIO import StringIO
_cleanup = [] diff --git a/tests/rw_functional.py b/tests/rw_functional.py index ffcb640..a71dffe 100644 --- a/tests/rw_functional.py +++ b/tests/rw_functional.py @@ -17,7 +17,11 @@ import random import sys import unittest
-from urlparse import urlparse +if sys.version_info.major >= 3: + # pylint: disable=F0401,E0611 + from urllib.parse import urlparse +else: + from urlparse import urlparse
import bugzilla from bugzilla import Bugzilla
python-bugzilla@lists.stg.fedorahosted.org