--- httplib2\__init__.py	(original)
+++ httplib2\__init__.py	(refactored)
@@ -1,4 +1,4 @@
-from __future__ import generators
+
 """
 httplib2
 
@@ -27,10 +27,10 @@
 
 import re
 import sys
-import StringIO
+import io
 import gzip
 import zlib
-import httplib
+import http.client
 import base64
 import os
 import copy
@@ -38,13 +38,14 @@
 import time
 import random
 import errno
+import collections
 
 # calibre Python 3 compatibility.
 try:
     from urllib.parse import urlparse, urljoin, splitport
 except ImportError as e:
-    from urlparse import urlparse, urljoin 
-    from urllib import splitport
+    from urllib.parse import urlparse, urljoin 
+    from urllib.parse import splitport
 try:
 #     import email
     import email.Utils as email_utils
@@ -73,7 +74,7 @@
     from httplib2 import socks
 except ImportError:
     try:
-        import socks
+        from . import socks
     except (ImportError, AttributeError):
         socks = None
 
@@ -124,14 +125,14 @@
                 "the ssl module installed. To avoid this error, install "
                 "the ssl module, or explicity disable validation.")
     ssl_sock = socket.ssl(sock, key_file, cert_file)
-    return httplib.FakeSocket(sock, ssl_sock)
+    return http.client.FakeSocket(sock, ssl_sock)
 
 if ssl is None:
     _ssl_wrap_socket = _ssl_wrap_socket_unsupported
 
 
 if sys.version_info >= (2,3):
-    from iri2uri import iri2uri
+    from .iri2uri import iri2uri
 else:
     def iri2uri(uri):
         return uri
@@ -165,11 +166,11 @@
 def HTTPResponse__getheaders(self):
     """Return list of (header, value) tuples."""
     if self.msg is None:
-        raise httplib.ResponseNotReady()
-    return self.msg.items()
-
-if not hasattr(httplib.HTTPResponse, 'getheaders'):
-    httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
+        raise http.client.ResponseNotReady()
+    return list(self.msg.items())
+
+if not hasattr(http.client.HTTPResponse, 'getheaders'):
+    http.client.HTTPResponse.getheaders = HTTPResponse__getheaders
 
 # All exceptions raised here derive from HttpLib2Error
 class HttpLib2Error(Exception): pass
@@ -242,7 +243,7 @@
 def _get_end2end_headers(response):
     hopbyhop = list(HOP_BY_HOP)
     hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
-    return [header for header in response.keys() if header not in hopbyhop]
+    return [header for header in list(response.keys()) if header not in hopbyhop]
 
 URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
 
@@ -290,7 +291,7 @@
                 filename = filename.encode('idna')
     except UnicodeError:
         pass
-    if isinstance(filename,unicode):
+    if isinstance(filename,str):
         filename=filename.encode('utf-8')
     filemd5 = _md5(filename).hexdigest()
     filename = re_url_scheme.sub("", filename)
@@ -303,11 +304,11 @@
 
 NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
 def _normalize_headers(headers):
-    return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip())  for (key, value) in headers.items()])
+    return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip())  for (key, value) in list(headers.items())])
 
 def _parse_cache_control(headers):
     retval = {}
-    if headers.has_key('cache-control'):
+    if 'cache-control' in headers:
         parts =  headers['cache-control'].split(',')
         parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
         parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
@@ -332,7 +333,7 @@
     """Returns a dictionary of dictionaries, one dict
     per auth_scheme."""
     retval = {}
-    if headers.has_key(headername):
+    if headername in headers:
         try:
 
             authenticate = headers[headername].strip()
@@ -392,26 +393,26 @@
     cc = _parse_cache_control(request_headers)
     cc_response = _parse_cache_control(response_headers)
 
-    if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
+    if 'pragma' in request_headers and request_headers['pragma'].lower().find('no-cache') != -1:
         retval = "TRANSPARENT"
         if 'cache-control' not in request_headers:
             request_headers['cache-control'] = 'no-cache'
-    elif cc.has_key('no-cache'):
+    elif 'no-cache' in cc:
         retval = "TRANSPARENT"
-    elif cc_response.has_key('no-cache'):
+    elif 'no-cache' in cc_response:
         retval = "STALE"
-    elif cc.has_key('only-if-cached'):
+    elif 'only-if-cached' in cc:
         retval = "FRESH"
-    elif response_headers.has_key('date'):
+    elif 'date' in response_headers:
         date = calendar.timegm(email_utils.parsedate_tz(response_headers['date']))
         now = time.time()
         current_age = max(0, now - date)
-        if cc_response.has_key('max-age'):
+        if 'max-age' in cc_response:
             try:
                 freshness_lifetime = int(cc_response['max-age'])
             except ValueError:
                 freshness_lifetime = 0
-        elif response_headers.has_key('expires'):
+        elif 'expires' in response_headers:
             expires = email_utils.parsedate_tz(response_headers['expires'])
             if None == expires:
                 freshness_lifetime = 0
@@ -419,12 +420,12 @@
                 freshness_lifetime = max(0, calendar.timegm(expires) - date)
         else:
             freshness_lifetime = 0
-        if cc.has_key('max-age'):
+        if 'max-age' in cc:
             try:
                 freshness_lifetime = int(cc['max-age'])
             except ValueError:
                 freshness_lifetime = 0
-        if cc.has_key('min-fresh'):
+        if 'min-fresh' in cc:
             try:
                 min_fresh = int(cc['min-fresh'])
             except ValueError:
@@ -440,7 +441,7 @@
         encoding = response.get('content-encoding', None)
         if encoding in ['gzip', 'deflate']:
             if encoding == 'gzip':
-                content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
+                content = gzip.GzipFile(fileobj=io.StringIO(new_content)).read()
             if encoding == 'deflate':
                 content = zlib.decompress(content, -zlib.MAX_WBITS)
             response['content-length'] = str(len(content))
@@ -456,11 +457,11 @@
     if cachekey:
         cc = _parse_cache_control(request_headers)
         cc_response = _parse_cache_control(response_headers)
-        if cc.has_key('no-store') or cc_response.has_key('no-store'):
+        if 'no-store' in cc or 'no-store' in cc_response:
             cache.delete(cachekey)
         else:
             info = email_message.Message()
-            for key, value in response_headers.items():
+            for key, value in list(response_headers.items()):
                 if key not in ['status','content-encoding','transfer-encoding']:
                     info[key] = value
 
@@ -592,7 +593,7 @@
         self.challenge['nc'] += 1
 
     def response(self, response, content):
-        if not response.has_key('authentication-info'):
+        if 'authentication-info' not in response:
             challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
             if 'true' == challenge.get('stale'):
                 self.challenge['nonce'] = challenge['nonce']
@@ -601,7 +602,7 @@
         else:
             updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
 
-            if updated_challenge.has_key('nextnonce'):
+            if 'nextnonce' in updated_challenge:
                 self.challenge['nonce'] = updated_challenge['nextnonce']
                 self.challenge['nc'] = 1
         return False
@@ -693,7 +694,7 @@
 
 class GoogleLoginAuthentication(Authentication):
     def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        from urllib import urlencode
+        from urllib.parse import urlencode
         Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
         challenge = _parse_www_authenticate(response, 'www-authenticate')
         service = challenge['googlelogin'].get('service', 'xapi')
@@ -910,7 +911,7 @@
     return pi
 
 
-class HTTPConnectionWithTimeout(httplib.HTTPConnection):
+class HTTPConnectionWithTimeout(http.client.HTTPConnection):
     """
     HTTPConnection subclass that supports timeouts
 
@@ -921,7 +922,7 @@
     """
 
     def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
-        httplib.HTTPConnection.__init__(self, host, port, strict)
+        http.client.HTTPConnection.__init__(self, host, port, strict)
         self.timeout = timeout
         self.proxy_info = proxy_info
 
@@ -958,18 +959,18 @@
                     self.sock.settimeout(self.timeout)
                     # End of difference from httplib.
                 if self.debuglevel > 0:
-                    print("connect: (%s, %s) ************" % (self.host, self.port))
+                    print(("connect: (%s, %s) ************" % (self.host, self.port)))
                     if use_proxy:
-                        print("proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
+                        print(("proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))))
                 if use_proxy:
                     self.sock.connect((self.host, self.port) + sa[2:])
                 else:
                     self.sock.connect(sa)
             except socket.error as msg:
                 if self.debuglevel > 0:
-                    print("connect fail: (%s, %s)" % (self.host, self.port))
+                    print(("connect fail: (%s, %s)" % (self.host, self.port)))
                     if use_proxy:
-                        print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
+                        print(("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))))
                 if self.sock:
                     self.sock.close()
                 self.sock = None
@@ -978,7 +979,7 @@
         if not self.sock:
             raise socket.error(msg)
 
-class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
+class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
     """
     This class allows communication via SSL.
 
@@ -991,7 +992,7 @@
                  strict=None, timeout=None, proxy_info=None,
                  ca_certs=None, disable_ssl_certificate_validation=False,
                  ssl_version=None):
-        httplib.HTTPSConnection.__init__(self, host, port=port,
+        http.client.HTTPSConnection.__init__(self, host, port=port,
                                          key_file=key_file,
                                          cert_file=cert_file, strict=strict)
         self.timeout = timeout
@@ -1093,9 +1094,9 @@
                     self.disable_ssl_certificate_validation, self.ca_certs,
                     self.ssl_version, self.host)
                 if self.debuglevel > 0:
-                    print("connect: (%s, %s)" % (self.host, self.port))
+                    print(("connect: (%s, %s)" % (self.host, self.port)))
                     if use_proxy:
-                        print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
+                        print(("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))))
                 if not self.disable_ssl_certificate_validation:
                     cert = self.sock.getpeercert()
                     hostname = self.host.split(':', 0)[0]
@@ -1121,9 +1122,9 @@
                 raise
             except socket.error as msg:
                 if self.debuglevel > 0:
-                    print("connect fail: (%s, %s)" % (self.host, self.port))
+                    print(("connect fail: (%s, %s)" % (self.host, self.port)))
                     if use_proxy:
-                        print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
+                        print(("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))))
                 if self.sock:
                     self.sock.close()
                 self.sock = None
@@ -1151,7 +1152,7 @@
     return fixed_fetch
 
 
-class AppEngineHttpConnection(httplib.HTTPConnection):
+class AppEngineHttpConnection(http.client.HTTPConnection):
     """Use httplib on App Engine, but compensate for its weirdness.
 
     The parameters key_file, cert_file, proxy_info, ca_certs,
@@ -1162,11 +1163,11 @@
                  strict=None, timeout=None, proxy_info=None, ca_certs=None,
                  disable_ssl_certificate_validation=False,
                  ssl_version=None):
-        httplib.HTTPConnection.__init__(self, host, port=port,
+        http.client.HTTPConnection.__init__(self, host, port=port,
                                         strict=strict, timeout=timeout)
 
 
-class AppEngineHttpsConnection(httplib.HTTPSConnection):
+class AppEngineHttpsConnection(http.client.HTTPSConnection):
     """Same as AppEngineHttpConnection, but for HTTPS URIs.
 
     The parameters proxy_info, ca_certs, disable_ssl_certificate_validation,
@@ -1176,7 +1177,7 @@
                  strict=None, timeout=None, proxy_info=None, ca_certs=None,
                  disable_ssl_certificate_validation=False,
                  ssl_version=None):
-        httplib.HTTPSConnection.__init__(self, host, port=port,
+        http.client.HTTPSConnection.__init__(self, host, port=port,
                                          key_file=key_file,
                                          cert_file=cert_file, strict=strict,
                                          timeout=timeout)
@@ -1260,7 +1261,7 @@
         self.connections = {}
         # The location of the cache, for now a directory
         # where cached responses are held.
-        if cache and isinstance(cache, basestring):
+        if cache and isinstance(cache, str):
             self.cache = FileCache(cache)
         else:
             self.cache = cache
@@ -1315,7 +1316,7 @@
         challenges = _parse_www_authenticate(response, 'www-authenticate')
         for cred in self.credentials.iter(host):
             for scheme in AUTH_SCHEME_ORDER:
-                if challenges.has_key(scheme):
+                if scheme in challenges:
                     yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
 
     def add_credentials(self, name, password, domain=""):
@@ -1361,7 +1362,7 @@
                     raise
                 if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
                     continue  # retry on potentially transient socket errors
-            except httplib.HTTPException:
+            except http.client.HTTPException:
                 # Just because the server closed the connection doesn't apparently mean
                 # that the server didn't send a response.
                 if hasattr(conn, 'sock') and conn.sock is None:
@@ -1378,7 +1379,7 @@
                     continue
             try:
                 response = conn.getresponse()
-            except httplib.BadStatusLine:
+            except http.client.BadStatusLine:
                 # If we get a BadStatusLine on the first try then that means
                 # the connection just went stale, so retry regardless of the
                 # number of RETRIES set.
@@ -1391,7 +1392,7 @@
                 else:
                     conn.close()
                     raise
-            except (socket.error, httplib.HTTPException):
+            except (socket.error, http.client.HTTPException):
                 if i < RETRIES-1:
                     conn.close()
                     conn.connect()
@@ -1443,29 +1444,29 @@
                 # Pick out the location header and basically start from the beginning
                 # remembering first to strip the ETag header and decrement our 'depth'
                 if redirections:
-                    if not response.has_key('location') and response.status != 300:
+                    if 'location' not in response and response.status != 300:
                         raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
                     # Fix-up relative redirects (which violate an RFC 2616 MUST)
-                    if response.has_key('location'):
+                    if 'location' in response:
                         location = response['location']
                         (scheme, authority, path, query, fragment) = parse_uri(location)
                         if authority == None:
                             response['location'] = urljoin(absolute_uri, location)
                     if response.status == 301 and method in ["GET", "HEAD"]:
                         response['-x-permanent-redirect-url'] = response['location']
-                        if not response.has_key('content-location'):
+                        if 'content-location' not in response:
                             response['content-location'] = absolute_uri
                         _updateCache(headers, response, content, self.cache, cachekey)
-                    if headers.has_key('if-none-match'):
+                    if 'if-none-match' in headers:
                         del headers['if-none-match']
-                    if headers.has_key('if-modified-since'):
+                    if 'if-modified-since' in headers:
                         del headers['if-modified-since']
                     if 'authorization' in headers and not self.forward_authorization_headers:
                         del headers['authorization']
-                    if response.has_key('location'):
+                    if 'location' in response:
                         location = response['location']
                         old_response = copy.deepcopy(response)
-                        if not old_response.has_key('content-location'):
+                        if 'content-location' not in old_response:
                             old_response['content-location'] = absolute_uri
                         redirect_method = method
                         if response.status in [302, 303]:
@@ -1480,7 +1481,7 @@
                     raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
             elif response.status in [200, 203] and method in ["GET", "HEAD"]:
                 # Don't cache 206's since we aren't going to handle byte range requests
-                if not response.has_key('content-location'):
+                if 'content-location' not in response:
                     response['content-location'] = absolute_uri
                 _updateCache(headers, response, content, self.cache, cachekey)
 
@@ -1522,7 +1523,7 @@
             else:
                 headers = self._normalize_headers(headers)
 
-            if not headers.has_key('user-agent'):
+            if 'user-agent' not in headers:
                 headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
 
             uri = iri2uri(uri)
@@ -1593,7 +1594,7 @@
             else:
                 cachekey = None
 
-            if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
+            if method in self.optimistic_concurrency_methods and self.cache and 'etag' in info and not self.ignore_etag and 'if-match' not in headers:
                 # http://www.w3.org/1999/04/Editing/
                 headers['if-match'] = info['etag']
 
@@ -1614,7 +1615,7 @@
                         break
 
             if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
-                if info.has_key('-x-permanent-redirect-url'):
+                if '-x-permanent-redirect-url' in info:
                     # Should cached permanent redirects be counted in our redirection count? For now, yes.
                     if redirections <= 0:
                         raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
@@ -1644,9 +1645,9 @@
                         return (response, content)
 
                     if entry_disposition == "STALE":
-                        if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
+                        if 'etag' in info and not self.ignore_etag and not 'if-none-match' in headers:
                             headers['if-none-match'] = info['etag']
-                        if info.has_key('last-modified') and not 'last-modified' in headers:
+                        if 'last-modified' in info and not 'last-modified' in headers:
                             headers['if-modified-since'] = info['last-modified']
                     elif entry_disposition == "TRANSPARENT":
                         pass
@@ -1676,7 +1677,7 @@
                     content = new_content
             else:
                 cc = _parse_cache_control(headers)
-                if cc.has_key('only-if-cached'):
+                if 'only-if-cached' in cc:
                     info['status'] = '504'
                     response = Response(info)
                     content = ""
@@ -1717,7 +1718,7 @@
         """
         hostname, port = splitport(authority)
         proxy_info = self.proxy_info
-        if callable(proxy_info):
+        if isinstance(proxy_info, collections.Callable):
             proxy_info = proxy_info(scheme)
 
         if (hasattr(proxy_info, 'applies_to')
@@ -1746,7 +1747,7 @@
     def __init__(self, info):
         # info is either an email.Message or
         # an httplib.HTTPResponse object.
-        if isinstance(info, httplib.HTTPResponse):
+        if isinstance(info, http.client.HTTPResponse):
             for key, value in info.getheaders():
                 self[key.lower()] = value
             self.status = info.status
@@ -1754,11 +1755,11 @@
             self.reason = info.reason
             self.version = info.version
         elif isinstance(info, email_message.Message):
-            for key, value in info.items():
+            for key, value in list(info.items()):
                 self[key.lower()] = value
             self.status = int(self['status'])
         else:
-            for key, value in info.items():
+            for key, value in list(info.items()):
                 self[key.lower()] = value
             self.status = int(self.get('status', self.status))
             self.reason = self.get('reason', self.reason)
--- httplib2\iri2uri.py	(original)
+++ httplib2\iri2uri.py	(refactored)
@@ -13,9 +13,9 @@
 """
 
 # calibre Python 3 compatibility.
-import urlparse
+import urllib.parse
 import six
-from six import text_type as unicode
+from six import text_type as str
 
 # Convert an IRI to a URI following the rules in RFC 3987
 #
@@ -68,13 +68,13 @@
     """Convert an IRI to a URI. Note that IRIs must be
     passed in a unicode strings. That is, do not utf-8 encode
     the IRI before passing it into the function."""
-    if isinstance(uri ,unicode):
-        (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
+    if isinstance(uri ,str):
+        (scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri)
         authority = authority.encode('idna')
         # For each character in 'ucschar' or 'iprivate'
         #  1. encode as utf-8
         #  2. then %-encode each octet of that utf-8
-        uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
+        uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment))
         uri = "".join([encode(c) for c in uri])
     return uri
 
@@ -86,26 +86,26 @@
         def test_uris(self):
             """Test that URIs are invariant under the transformation."""
             invariant = [
-                u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
-                u"http://www.ietf.org/rfc/rfc2396.txt",
-                u"ldap://[2001:db8::7]/c=GB?objectClass?one",
-                u"mailto:John.Doe@example.com",
-                u"news:comp.infosystems.www.servers.unix",
-                u"tel:+1-816-555-1212",
-                u"telnet://192.0.2.16:80/",
-                u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
+                "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+                "http://www.ietf.org/rfc/rfc2396.txt",
+                "ldap://[2001:db8::7]/c=GB?objectClass?one",
+                "mailto:John.Doe@example.com",
+                "news:comp.infosystems.www.servers.unix",
+                "tel:+1-816-555-1212",
+                "telnet://192.0.2.16:80/",
+                "urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
             for uri in invariant:
                 self.assertEqual(uri, iri2uri(uri))
 
         def test_iri(self):
             """ Test that the right type of escaping is done for each part of the URI."""
-            self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
-            self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
-            self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
-            self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
-            self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
+            self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri("http://\N{COMET}.com/\N{COMET}"))
+            self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri("http://bitworking.org/?fred=\N{COMET}"))
+            self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri("http://bitworking.org/#\N{COMET}"))
+            self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}"))
+            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
+            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
+            self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
 
     unittest.main()
 
--- httplib2\socks.py	(original)
+++ httplib2\socks.py	(refactored)
@@ -369,7 +369,7 @@
         wrote_host_header = False
         wrote_auth_header = False
         if self.__proxy[6] != None:
-            for key, val in self.__proxy[6].items():
+            for key, val in list(self.__proxy[6].items()):
                 headers += [key, ": ", val, "\r\n"]
                 wrote_host_header = (key.lower() == "host")
                 wrote_auth_header = (key.lower() == "proxy-authorization")
@@ -409,7 +409,7 @@
         To select the proxy server use setproxy().
         """
         # Do a minimal input check first
-        if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int):
+        if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], str)) or (type(destpair[1]) != int):
             raise GeneralProxyError((5, _generalerrors[5]))
         if self.__proxy[0] == PROXY_TYPE_SOCKS5:
             if self.__proxy[2] != None:
