--- .\action.py	(original)
+++ .\action.py	(refactored)
@@ -179,7 +179,7 @@
                                          triggered=triggered_action)
         else:
             # No user submenu so just have the action directly
-            user_name = self.users.keys()[0]
+            user_name = list(self.users.keys())[0]
             unique_name = 'User "%s" %s' % (user_name, title)
             if action in ['add','remove']:
                 triggered_action = partial(self.add_or_remove_to_shelf, action, user_name)
@@ -201,7 +201,7 @@
                 user_sub_menu.setStatusTip(user_sub_menu.title())
                 self.create_sub_menu_for_shelves_action(user_sub_menu, user_name, title, action)
         else:
-            user_name = self.users.keys()[0]
+            user_name = list(self.users.keys())[0]
             self.create_sub_menu_for_shelves_action(sub_menu, user_name, title, action)
 
     def create_sub_menu_for_shelves_action(self, parent_menu, user_name, title, action):
@@ -236,7 +236,7 @@
                                    'Add book to shelves represented by your tags', is_download=False,
                                    unique_name='User "%s" Upload tags as shelves' % (user_name,))
         else:
-            user_name = self.users.keys()[0]
+            user_name = list(self.users.keys())[0]
             self.create_shelves_tags_action(parent_menu, user_name, 'Download tags from shelves', 'images/tags_download.png',
                                    'Download shelves your book is on as tags', is_download=True)
             self.create_shelves_tags_action(parent_menu, user_name, 'Upload tags as shelves', 'images/tags_upload.png',
@@ -810,7 +810,7 @@
                 current_shelves = []
             calibre_tags = self._get_calibre_tags_for_book(db, calibre_id, tag_column, tag_column_label, is_multiple)
             for calibre_tag in calibre_tags:
-                for shelf_name, mapped_tag_values in tag_mappings.iteritems():
+                for shelf_name, mapped_tag_values in tag_mappings.items():
                     if calibre_tag not in mapped_tag_values:
                         continue
                     if len(mapped_tag_values) > 1:
--- .\common_utils.py	(original)
+++ .\common_utils.py	(refactored)
@@ -54,7 +54,7 @@
     def convert_qvariant(x):
         vt = x.type()
         if vt == x.String:
-            return unicode(x.toString())
+            return str(x.toString())
         if vt == x.List:
             return [convert_qvariant(i) for i in x.toList()]
         return x.toPyObject()
@@ -453,7 +453,7 @@
         self.setCurrentIndex(selected_idx)
 
     def selected_value(self):
-        return unicode(self.currentText())
+        return str(self.currentText())
 
 
 class KeyValueComboBox(QComboBox):
@@ -466,7 +466,7 @@
     def populate_combo(self, selected_key):
         self.clear()
         selected_idx = idx = -1
-        for key, value in self.values.iteritems():
+        for key, value in self.values.items():
             idx = idx + 1
             self.addItem(value)
             if key == selected_key:
@@ -474,8 +474,8 @@
         self.setCurrentIndex(selected_idx)
 
     def selected_key(self):
-        for key, value in self.values.iteritems():
-            if value == unicode(self.currentText()).strip():
+        for key, value in self.values.items():
+            if value == str(self.currentText()).strip():
                 return key
 
 
@@ -725,7 +725,7 @@
     def _populate_settings(self):
         self.keys_list.clear()
         ns_prefix = self._get_ns_prefix()
-        keys = sorted([k[len(ns_prefix):] for k in self.db.prefs.iterkeys()
+        keys = sorted([k[len(ns_prefix):] for k in self.db.prefs.keys()
                        if k.startswith(ns_prefix)])
         for key in keys:
             self.keys_list.addItem(key)
@@ -736,7 +736,7 @@
         if new_row < 0:
             self.value_text.clear()
             return
-        key = unicode(self.keys_list.currentItem().text())
+        key = str(self.keys_list.currentItem().text())
         val = self.db.prefs.get_namespaced(self.namespace, key, '')
         self.value_text.setPlainText(self.db.prefs.to_raw(val))
     
@@ -752,8 +752,8 @@
         if not confirm(message, self.namespace+'_clear_settings', self):
             return
         
-        val = self.db.prefs.raw_to_object(unicode(self.value_text.toPlainText()))
-        key = unicode(self.keys_list.currentItem().text())
+        val = self.db.prefs.raw_to_object(str(self.value_text.toPlainText()))
+        key = str(self.keys_list.currentItem().text())
         self.db.prefs.set_namespaced(self.namespace, key, val)
         
         restart = prompt_for_restart(self, 'Settings changed',
@@ -773,7 +773,7 @@
             return
         
         ns_prefix = self._get_ns_prefix()
-        keys = [k for k in self.db.prefs.iterkeys() if k.startswith(ns_prefix)]
+        keys = [k for k in self.db.prefs.keys() if k.startswith(ns_prefix)]
         for k in keys:
             del self.db.prefs[k]
         self._populate_settings()
--- .\config.py	(original)
+++ .\config.py	(refactored)
@@ -132,13 +132,13 @@
             prints('Migrating Goodreads Sync schema from prior to 1.6')
         tag_mapping_column = 'tags'
         users_settings = plugin_prefs[STORE_USERS]
-        for user_info in users_settings.values():
-            if KEY_TAG_MAPPINGS in user_info.keys():
+        for user_info in list(users_settings.values()):
+            if KEY_TAG_MAPPINGS in list(user_info.keys()):
                 tag_mappings = user_info[KEY_TAG_MAPPINGS]
                 for shelf in user_info[KEY_SHELVES]:
                     shelf[KEY_TAG_MAPPINGS] = tag_mappings.get(shelf['name'], [])
                 del user_info[KEY_TAG_MAPPINGS]
-            if KEY_TAG_MAPPING_COLUMN in user_info.keys():
+            if KEY_TAG_MAPPING_COLUMN in list(user_info.keys()):
                 tag_mapping_column = user_info.get(KEY_TAG_MAPPING_COLUMN, tag_mapping_column)
                 del user_info[KEY_TAG_MAPPING_COLUMN]
             for shelf in user_info[KEY_SHELVES]:
@@ -161,7 +161,7 @@
         if DEBUG:
             prints('Migrating Goodreads Sync schema from prior to 1.68')
         users_settings = plugin_prefs[STORE_USERS]
-        for user_info in users_settings.values():
+        for user_info in list(users_settings.values()):
             for shelf in user_info[KEY_SHELVES]:
                 shelf[KEY_ADD_REVIEW_TEXT] = False
                 shelf[KEY_SYNC_REVIEW_TEXT] = False
@@ -214,7 +214,7 @@
 
     def selected_value(self):
         for value, text in self.VALUES:
-            if text == unicode(self.currentText()).strip():
+            if text == str(self.currentText()).strip():
                 return value
 
 
@@ -293,14 +293,14 @@
     def populate_combo(self, selected_key):
         self.clear()
         selected_text = self.values_map[selected_key]
-        for text in self.values_map.values():
+        for text in list(self.values_map.values()):
             self.addItem(text)
         idx = self.findText(selected_text)
         self.setCurrentIndex(idx)
 
     def selected_key(self):
-        text = unicode(self.currentText()).strip()
-        for key, value in self.values_map.iteritems():
+        text = str(self.currentText()).strip()
+        for key, value in self.values_map.items():
             if value == text:
                 return key
 
@@ -337,7 +337,7 @@
 
     @property
     def shelf_name(self):
-        return unicode(self.name_ledit.text()).strip().lower().replace('  ', ' ').replace(' ','-')
+        return str(self.name_ledit.text()).strip().lower().replace('  ', ' ').replace(' ','-')
 
     @property
     def is_featured(self):
@@ -400,9 +400,9 @@
         combo = CustomColumnComboBox(self, self.custom_columns, column_key, initial_items=['tags'])
         combo.currentIndexChanged.connect(partial(self.column_type_changed, combo, row))
         self.setCellWidget(row, 1, combo)
-        print("populate_table_row - column_key=", column_key)
+        print(("populate_table_row - column_key=", column_key))
         try:
-            print("populate_table_row - column=", self.custom_columns[column_key])
+            print(("populate_table_row - column=", self.custom_columns[column_key]))
         except:
             pass
         if self.is_bool_custom_column(column_key):
@@ -508,13 +508,13 @@
             sync_action['action'] = self.cellWidget(row, 0).get_selected_action()
             column_key = sync_action['column'] = self.cellWidget(row, 1).get_selected_column()
             if self.is_bool_custom_column(column_key) or self.is_enumeration_custom_column(column_key):
-                value = unicode(self.cellWidget(row, 2).currentText()).strip()
+                value = str(self.cellWidget(row, 2).currentText()).strip()
             elif self.is_datetime_custom_column(column_key):
                 value = self.cellWidget(row, 2).selected_key()
             elif column_key == 'tags' or self.custom_columns[column_key]['is_multiple'] is not None:
-                value = unicode(self.cellWidget(row, 2).text()).strip()
+                value = str(self.cellWidget(row, 2).text()).strip()
             else:
-                value = unicode(self.item(row, 2).text()).strip()
+                value = str(self.item(row, 2).text()).strip()
             if not value:
                 continue
             sync_action['value'] = value
@@ -767,12 +767,12 @@
         for row in range(self.rowCount()):
             shelf = self.shelves[row]
             shelf['active'] = self.item(row, 0).checkState() == Qt.Checked
-            shelf['book_count'] = unicode(self.item(row, 2).text()).strip()
+            shelf['book_count'] = str(self.item(row, 2).text()).strip()
             shelf[KEY_TAG_MAPPINGS] = self._get_tags_data(row)
         return self.shelves
 
     def _get_tags_data(self, row):
-        tags_text = unicode(self.cellWidget(row, 3).text()).strip()
+        tags_text = str(self.cellWidget(row, 3).text()).strip()
         tag_values = tags_text.split(',')
         tags_list = []
         for tag in tag_values:
@@ -1028,7 +1028,7 @@
     def get_custom_columns(self, column_types):
         custom_columns = self.plugin_action.gui.library_view.model().custom_columns
         available_columns = {}
-        for key, column in custom_columns.iteritems():
+        for key, column in custom_columns.items():
             typ = column['datatype']
             if typ in column_types:
                 available_columns[key] = column
@@ -1050,7 +1050,7 @@
         if self._user_combo.count() == 0:
             self.user_name = None
         else:
-            self.user_name = unicode(self._user_combo.currentText()).strip()
+            self.user_name = str(self._user_combo.currentText()).strip()
         is_controls_enabled = is_sync_shelf_enabled = is_delete_enabled = False
         shelves = []
         if self.user_name:
@@ -1084,9 +1084,9 @@
         if not ok:
             # Operation cancelled
             return
-        new_user_name = unicode(new_user_name).strip()
+        new_user_name = str(new_user_name).strip()
         # Verify it does not clash with any other users in the list
-        for user_name in self.users.keys():
+        for user_name in list(self.users.keys()):
             if user_name.lower() == new_user_name.lower():
                 return error_dialog(self, 'Add Failed', 'A user with the same name already exists', show=True)
         user_info = {}
--- .\core.py	(original)
+++ .\core.py	(refactored)
@@ -8,7 +8,7 @@
 __copyright__ = '2011, Grant Drake <grant.drake@gmail.com>, 2015-2019 additions by David Forrester <davidfor@internode.on.net>'
 __docformat__ = 'restructuredtext en'
 
-import re, urllib, urlparse, json, os, traceback, collections
+import re, urllib.request, urllib.parse, urllib.error, urllib.parse, json, os, traceback, collections
 import xml.etree.ElementTree as et
 
 try:
@@ -206,7 +206,7 @@
         response, content = self._oauth_request_get(oauth_client, REQUEST_TOKEN_URL)
         if not response:
             return None, None
-        request_token = dict(urlparse.parse_qsl(content))
+        request_token = dict(urllib.parse.parse_qsl(content))
         return (request_token['oauth_token'], request_token['oauth_token_secret'])
 
     def get_user_token_secret(self, oauth_token, oauth_secret):
@@ -216,7 +216,7 @@
         response, content = self._oauth_request_post(oauth_client, ACCESS_TOKEN_URL)
         if not response:
             return None, None
-        access_token = dict(urlparse.parse_qsl(content))
+        access_token = dict(urllib.parse.parse_qsl(content))
         return (access_token['oauth_token'], access_token['oauth_token_secret'])
 
     def get_goodreads_user_id(self, oauth_token, oauth_secret):
@@ -236,7 +236,7 @@
         # Creates a shelf
         oauth_client = self.create_oauth_client(user_name)
         url = '%s/user_shelves.xml' % cfg.URL_HTTPS
-        body = urllib.urlencode({
+        body = urllib.parse.urlencode({
                                  'user_shelf[name]': str(new_shelf_name).lower(),
                                 'user_shelf[featured]': str(is_featured).lower(),
                                 'user_shelf[exclusive_flag]': str(is_exclusive).lower(),
@@ -308,7 +308,7 @@
         if action == 'remove':
             body_info['a'] = 'remove'
             success_status = '200'
-        body = urllib.urlencode(body_info)
+        body = urllib.parse.urlencode(body_info)
         _response, content = self._oauth_request_post(oauth_client, url, body, success_status)
         if _response:
             if action == 'add':
@@ -327,7 +327,7 @@
         if review_text:
             body_info['review[review_text]'] = review_text
         success_status = '201'
-        body = urllib.urlencode(body_info)
+        body = urllib.parse.urlencode(body_info)
         response = self._oauth_request_post(oauth_client, url, body, success_status)
         if response:
             return True
@@ -349,7 +349,7 @@
             body_info['review[review]'] = review_text
 
         success_status = '200'
-        body = urllib.urlencode(body_info)
+        body = urllib.parse.urlencode(body_info)
         response, _content = self._oauth_request_post(oauth_client, url, body, success_status, method='PUT')
         if response:
             return True
@@ -371,7 +371,7 @@
         if comment and len(comment) > 0:
             body_info['user_status[body]'] = comment
         success_status = '201'
-        body = urllib.urlencode(body_info)
+        body = urllib.parse.urlencode(body_info)
         response, _content = self._oauth_request_post(oauth_client, url, body, success_status)
         debug_print('HttpHelper::update_status: response=%s' % (response, ))
         debug_print('HttpHelper::update_status: _content=%s' % (_content, ))
@@ -503,7 +503,7 @@
         elif title and not authors:
             scope = 'search=title&'
             query = title
-        query = urllib.quote_plus(query.strip().encode('utf-8')).replace('++', '+')
+        query = urllib.parse.quote_plus(query.strip().encode('utf-8')).replace('++', '+')
         search_books = []
         url = '%s/search/search.xml?%spage=1&q=%s' % (cfg.URL_HTTPS, scope, query)
         (response, content) = self._request_get(url)
@@ -716,7 +716,7 @@
             br = browser()
             raw = br.open_novisit(url, timeout=20).read().strip()
         except Exception as e:
-            if callable(getattr(e, 'getcode', None)):
+            if isinstance(getattr(e, 'getcode', None), collections.Callable):
                 error_code = e.getcode()
             else:
                 error_code = None
@@ -753,7 +753,7 @@
             for idx, data_row_node in enumerate(edition_data_node.xpath('div[@class="dataRow"]')):
                 if idx == 0:
                     continue # Will be the title
-                text = tostring(data_row_node, method='text', encoding=unicode).strip()
+                text = tostring(data_row_node, method='text', encoding=str).strip()
                 if text.startswith('Published'):
                     continue
                 goodreads_edition_book['goodreads_edition'] = text
--- .\dialogs.py	(original)
+++ .\dialogs.py	(refactored)
@@ -7,7 +7,7 @@
 __copyright__ = '2011, Grant Drake <grant.drake@gmail.com>, 2016-2018 additions by David Forrester <davidfor@internode.on.net>'
 __docformat__ = 'restructuredtext en'
 
-import re, urllib, collections, copy
+import re, urllib.request, urllib.parse, urllib.error, collections, copy
 from functools import partial
 try:
     from PyQt5.Qt import (Qt, QVBoxLayout, QLabel, QLineEdit, QApplication,
@@ -46,7 +46,7 @@
     and represent urls on the goodreads site.
     '''
     if event.mimeData().hasFormat('text/uri-list'):
-        urls = [unicode(u.toString()).strip() for u in event.mimeData().urls()]
+        urls = [str(u.toString()).strip() for u in event.mimeData().urls()]
         return [u for u in urls if u.startswith(SHOW_BOOK_URL_PREFIX) or u.startswith(SHOW_BOOK_URL_PREFIX2)]
 
 
@@ -128,7 +128,7 @@
 
     def paste_url(self):
         cb = QApplication.instance().clipboard()
-        txt = unicode(cb.text()).strip()
+        txt = str(cb.text()).strip()
         if txt:
             self.add_url_to_grid(txt)
 
@@ -245,7 +245,7 @@
         query = title
         if author:
             query = query + ' ' + get_searchable_author(author)
-        query = urllib.quote_plus(query.strip().encode('utf-8')).replace('++', '+')
+        query = urllib.parse.quote_plus(query.strip().encode('utf-8')).replace('++', '+')
         url = '%s/search?search_type=books&search[query]=%s' % (cfg.URL, query)
         if not isinstance(url, bytes):
             url = url.encode('utf-8')
@@ -333,7 +333,7 @@
     def _get_selected_shelf_names(self):
         values = []
         for item in self.values_list.selectedItems():
-            values.append(unicode(item.text()))
+            values.append(str(item.text()))
         return values
 
     def _accept_clicked(self):
@@ -463,7 +463,7 @@
             for calibre_book in calibre_books:
                 if calibre_book['calibre_id'] == calibre_id:
                     calibre_book['calibre_reading_progress'] = convert_qvariant(self.item(row, 4).data(Qt.DisplayRole))
-                    calibre_book['status_comment_text'] = unicode(convert_qvariant((self.item(row, 5).data(Qt.DisplayRole))))
+                    calibre_book['status_comment_text'] = str(convert_qvariant((self.item(row, 5).data(Qt.DisplayRole))))
                     if not self.isColumnHidden(6):
                         calibre_book['calibre_rating'] = convert_qvariant(self.item(row, 6).data(Qt.DisplayRole))
                     if not self.isColumnHidden(7):
@@ -472,7 +472,7 @@
                         if not qtdate == '':
                             calibre_book['calibre_date_read'] = qt_to_dt(qtdate, as_utc=False)
                     if not self.isColumnHidden(8):
-                        calibre_book['calibre_review_text'] = unicode(convert_qvariant((self.item(row, 8).data(Qt.DisplayRole))))
+                        calibre_book['calibre_review_text'] = str(convert_qvariant((self.item(row, 8).data(Qt.DisplayRole))))
                     break
 
     def item_selection_changed(self):
@@ -639,7 +639,7 @@
         # Currently we will only match on ISBN. Maybe in future will do title/author too
         if not isbn:
             return None
-        for goodreads_id, goodreads_book in goodreads_shelf_books.iteritems():
+        for goodreads_id, goodreads_book in goodreads_shelf_books.items():
             if goodreads_book['goodreads_isbn'] == isbn:
                 return goodreads_id
 
@@ -951,7 +951,7 @@
 
     def paste_url(self):
         cb = QApplication.instance().clipboard()
-        txt = unicode(cb.text()).strip()
+        txt = str(cb.text()).strip()
         if txt:
             self.add_url_to_grid(txt)
 
@@ -1074,7 +1074,7 @@
         query = title
         if author:
             query = query + ' ' + get_searchable_author(author)
-        query = urllib.quote_plus(query.strip().encode('utf-8')).replace('++', '+')
+        query = urllib.parse.quote_plus(query.strip().encode('utf-8')).replace('++', '+')
         url = '%s/search?search_type=books&search[query]=%s' % (cfg.URL, query)
         if not isinstance(url, bytes):
             url = url.encode('utf-8')
@@ -1279,8 +1279,8 @@
     def search_click(self):
         self.search_button.setEnabled(False)
         try:
-            title = unicode(self.title_ledit.text()).strip()
-            author = unicode(self.author_ledit.text()).strip()
+            title = str(self.title_ledit.text()).strip()
+            author = str(self.author_ledit.text()).strip()
             calibre_books = self.search_calibre_fn(title, author)
             self.pick_book_table.populate_table(calibre_books)
         finally:
@@ -1413,7 +1413,7 @@
                         qtdate = convert_qvariant(self.item(row, 5).data(Qt.DisplayRole))
                         calibre_book['calibre_date_read'] = qt_to_dt(qtdate, as_utc=False)
                     if not self.isColumnHidden(6):
-                        calibre_book['calibre_review_text'] = unicode(convert_qvariant((self.item(row, 6).data(Qt.DisplayRole))))
+                        calibre_book['calibre_review_text'] = str(convert_qvariant((self.item(row, 6).data(Qt.DisplayRole))))
                     break
 
     def show_columns(self, is_rating_visible, is_dateread_visible, is_reviewtext_visible):
@@ -1621,7 +1621,7 @@
     def _get_selected_shelf_names(self):
         values = []
         for item in self.values_list.selectedItems():
-            values.append(unicode(item.text()))
+            values.append(str(item.text()))
         return values
 
     def update_book_status(self, calibre_book):
@@ -1648,7 +1648,7 @@
         # Currently we will only match on ISBN. Maybe in future will do title/author too
         if not isbn:
             return None
-        for goodreads_id, goodreads_book in goodreads_shelf_books.iteritems():
+        for goodreads_id, goodreads_book in goodreads_shelf_books.items():
             if goodreads_book['goodreads_isbn'] == isbn:
                 return goodreads_id
 
@@ -2142,7 +2142,7 @@
         # Flatten out the structure, from its 1:M to a 1:1
         flattened_books = []
         autolinked_calibre_ids = []
-        for goodreads_id, goodreads_book in goodreads_books.iteritems():
+        for goodreads_id, goodreads_book in goodreads_books.items():
             calibre_ids = gr_cache.get(goodreads_id, [])
             is_auto_link_by_isbn = False
             if len(calibre_ids) == 0 and goodreads_book['goodreads_isbn']:
--- .\httplib2\__init__.py	(original)
+++ .\httplib2\__init__.py	(refactored)
@@ -1,4 +1,4 @@
-from __future__ import generators
+
 """
 httplib2
 
@@ -31,12 +31,12 @@
 import email.Utils
 import email.Message
 import email.FeedParser
-import StringIO
+import io
 import gzip
 import zlib
-import httplib
-import urlparse
-import urllib
+import http.client
+import urllib.parse
+import urllib.request, urllib.parse, urllib.error
 import base64
 import os
 import copy
@@ -44,6 +44,7 @@
 import time
 import random
 import errno
+import collections
 try:
     from hashlib import sha1 as _sha, md5 as _md5
 except ImportError:
@@ -60,7 +61,7 @@
     from httplib2 import socks
 except ImportError:
     try:
-        import socks
+        from . import socks
     except (ImportError, AttributeError):
         socks = None
 
@@ -111,14 +112,14 @@
                 "the ssl module installed. To avoid this error, install "
                 "the ssl module, or explicity disable validation.")
     ssl_sock = socket.ssl(sock, key_file, cert_file)
-    return httplib.FakeSocket(sock, ssl_sock)
+    return http.client.FakeSocket(sock, ssl_sock)
 
 if ssl is None:
     _ssl_wrap_socket = _ssl_wrap_socket_unsupported
 
 
 if sys.version_info >= (2,3):
-    from iri2uri import iri2uri
+    from .iri2uri import iri2uri
 else:
     def iri2uri(uri):
         return uri
@@ -152,11 +153,11 @@
 def HTTPResponse__getheaders(self):
     """Return list of (header, value) tuples."""
     if self.msg is None:
-        raise httplib.ResponseNotReady()
-    return self.msg.items()
-
-if not hasattr(httplib.HTTPResponse, 'getheaders'):
-    httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
+        raise http.client.ResponseNotReady()
+    return list(self.msg.items())
+
+if not hasattr(http.client.HTTPResponse, 'getheaders'):
+    http.client.HTTPResponse.getheaders = HTTPResponse__getheaders
 
 # All exceptions raised here derive from HttpLib2Error
 class HttpLib2Error(Exception): pass
@@ -229,7 +230,7 @@
 def _get_end2end_headers(response):
     hopbyhop = list(HOP_BY_HOP)
     hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
-    return [header for header in response.keys() if header not in hopbyhop]
+    return [header for header in list(response.keys()) if header not in hopbyhop]
 
 URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
 
@@ -277,7 +278,7 @@
                 filename = filename.encode('idna')
     except UnicodeError:
         pass
-    if isinstance(filename,unicode):
+    if isinstance(filename,str):
         filename=filename.encode('utf-8')
     filemd5 = _md5(filename).hexdigest()
     filename = re_url_scheme.sub("", filename)
@@ -290,11 +291,11 @@
 
 NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
 def _normalize_headers(headers):
-    return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip())  for (key, value) in headers.iteritems()])
+    return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip())  for (key, value) in headers.items()])
 
 def _parse_cache_control(headers):
     retval = {}
-    if headers.has_key('cache-control'):
+    if 'cache-control' in headers:
         parts =  headers['cache-control'].split(',')
         parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
         parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
@@ -319,7 +320,7 @@
     """Returns a dictionary of dictionaries, one dict
     per auth_scheme."""
     retval = {}
-    if headers.has_key(headername):
+    if headername in headers:
         try:
 
             authenticate = headers[headername].strip()
@@ -379,26 +380,26 @@
     cc = _parse_cache_control(request_headers)
     cc_response = _parse_cache_control(response_headers)
 
-    if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
+    if 'pragma' in request_headers and request_headers['pragma'].lower().find('no-cache') != -1:
         retval = "TRANSPARENT"
         if 'cache-control' not in request_headers:
             request_headers['cache-control'] = 'no-cache'
-    elif cc.has_key('no-cache'):
+    elif 'no-cache' in cc:
         retval = "TRANSPARENT"
-    elif cc_response.has_key('no-cache'):
+    elif 'no-cache' in cc_response:
         retval = "STALE"
-    elif cc.has_key('only-if-cached'):
+    elif 'only-if-cached' in cc:
         retval = "FRESH"
-    elif response_headers.has_key('date'):
+    elif 'date' in response_headers:
         date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
         now = time.time()
         current_age = max(0, now - date)
-        if cc_response.has_key('max-age'):
+        if 'max-age' in cc_response:
             try:
                 freshness_lifetime = int(cc_response['max-age'])
             except ValueError:
                 freshness_lifetime = 0
-        elif response_headers.has_key('expires'):
+        elif 'expires' in response_headers:
             expires = email.Utils.parsedate_tz(response_headers['expires'])
             if None == expires:
                 freshness_lifetime = 0
@@ -406,12 +407,12 @@
                 freshness_lifetime = max(0, calendar.timegm(expires) - date)
         else:
             freshness_lifetime = 0
-        if cc.has_key('max-age'):
+        if 'max-age' in cc:
             try:
                 freshness_lifetime = int(cc['max-age'])
             except ValueError:
                 freshness_lifetime = 0
-        if cc.has_key('min-fresh'):
+        if 'min-fresh' in cc:
             try:
                 min_fresh = int(cc['min-fresh'])
             except ValueError:
@@ -427,7 +428,7 @@
         encoding = response.get('content-encoding', None)
         if encoding in ['gzip', 'deflate']:
             if encoding == 'gzip':
-                content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
+                content = gzip.GzipFile(fileobj=io.StringIO(new_content)).read()
             if encoding == 'deflate':
                 content = zlib.decompress(content, -zlib.MAX_WBITS)
             response['content-length'] = str(len(content))
@@ -443,11 +444,11 @@
     if cachekey:
         cc = _parse_cache_control(request_headers)
         cc_response = _parse_cache_control(response_headers)
-        if cc.has_key('no-store') or cc_response.has_key('no-store'):
+        if 'no-store' in cc or 'no-store' in cc_response:
             cache.delete(cachekey)
         else:
             info = email.Message.Message()
-            for key, value in response_headers.iteritems():
+            for key, value in response_headers.items():
                 if key not in ['status','content-encoding','transfer-encoding']:
                     info[key] = value
 
@@ -579,7 +580,7 @@
         self.challenge['nc'] += 1
 
     def response(self, response, content):
-        if not response.has_key('authentication-info'):
+        if 'authentication-info' not in response:
             challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
             if 'true' == challenge.get('stale'):
                 self.challenge['nonce'] = challenge['nonce']
@@ -588,7 +589,7 @@
         else:
             updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
 
-            if updated_challenge.has_key('nextnonce'):
+            if 'nextnonce' in updated_challenge:
                 self.challenge['nonce'] = updated_challenge['nextnonce']
                 self.challenge['nc'] = 1
         return False
@@ -680,7 +681,7 @@
 
 class GoogleLoginAuthentication(Authentication):
     def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        from urllib import urlencode
+        from urllib.parse import urlencode
         Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
         challenge = _parse_www_authenticate(response, 'www-authenticate')
         service = challenge['googlelogin'].get('service', 'xapi')
@@ -851,7 +852,7 @@
     """
     Construct a ProxyInfo from a URL (such as http_proxy env var)
     """
-    url = urlparse.urlparse(url)
+    url = urllib.parse.urlparse(url)
     username = None
     password = None
     port = None
@@ -897,7 +898,7 @@
     return pi
 
 
-class HTTPConnectionWithTimeout(httplib.HTTPConnection):
+class HTTPConnectionWithTimeout(http.client.HTTPConnection):
     """
     HTTPConnection subclass that supports timeouts
 
@@ -908,7 +909,7 @@
     """
 
     def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
-        httplib.HTTPConnection.__init__(self, host, port, strict)
+        http.client.HTTPConnection.__init__(self, host, port, strict)
         self.timeout = timeout
         self.proxy_info = proxy_info
 
@@ -945,27 +946,27 @@
                     self.sock.settimeout(self.timeout)
                     # End of difference from httplib.
                 if self.debuglevel > 0:
-                    print "connect: (%s, %s) ************" % (self.host, self.port)
+                    print("connect: (%s, %s) ************" % (self.host, self.port))
                     if use_proxy:
-                        print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))
+                        print("proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
                 if use_proxy:
                     self.sock.connect((self.host, self.port) + sa[2:])
                 else:
                     self.sock.connect(sa)
-            except socket.error, msg:
+            except socket.error as msg:
                 if self.debuglevel > 0:
-                    print "connect fail: (%s, %s)" % (self.host, self.port)
+                    print("connect fail: (%s, %s)" % (self.host, self.port))
                     if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))
+                        print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
                 if self.sock:
                     self.sock.close()
                 self.sock = None
                 continue
             break
         if not self.sock:
-            raise socket.error, msg
-
-class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
+            raise socket.error(msg)
+
+class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
     """
     This class allows communication via SSL.
 
@@ -978,7 +979,7 @@
                  strict=None, timeout=None, proxy_info=None,
                  ca_certs=None, disable_ssl_certificate_validation=False,
                  ssl_version=None):
-        httplib.HTTPSConnection.__init__(self, host, port=port,
+        http.client.HTTPSConnection.__init__(self, host, port=port,
                                          key_file=key_file,
                                          cert_file=cert_file, strict=strict)
         self.timeout = timeout
@@ -1080,9 +1081,9 @@
                     self.disable_ssl_certificate_validation, self.ca_certs,
                     self.ssl_version, self.host)
                 if self.debuglevel > 0:
-                    print "connect: (%s, %s)" % (self.host, self.port)
+                    print("connect: (%s, %s)" % (self.host, self.port))
                     if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))
+                        print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
                 if not self.disable_ssl_certificate_validation:
                     cert = self.sock.getpeercert()
                     hostname = self.host.split(':', 0)[0]
@@ -1090,7 +1091,7 @@
                         raise CertificateHostnameMismatch(
                             'Server presented certificate that does not match '
                             'host %s: %s' % (hostname, cert), hostname, cert)
-            except (ssl_SSLError, ssl_CertificateError, CertificateHostnameMismatch), e:
+            except (ssl_SSLError, ssl_CertificateError, CertificateHostnameMismatch) as e:
                 if sock:
                     sock.close()
                 if self.sock:
@@ -1106,18 +1107,18 @@
                     raise
             except (socket.timeout, socket.gaierror):
                 raise
-            except socket.error, msg:
+            except socket.error as msg:
                 if self.debuglevel > 0:
-                    print "connect fail: (%s, %s)" % (self.host, self.port)
+                    print("connect fail: (%s, %s)" % (self.host, self.port))
                     if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))
+                        print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
                 if self.sock:
                     self.sock.close()
                 self.sock = None
                 continue
             break
         if not self.sock:
-            raise socket.error, msg
+            raise socket.error(msg)
 
 SCHEME_TO_CONNECTION = {
     'http': HTTPConnectionWithTimeout,
@@ -1138,7 +1139,7 @@
     return fixed_fetch
 
 
-class AppEngineHttpConnection(httplib.HTTPConnection):
+class AppEngineHttpConnection(http.client.HTTPConnection):
     """Use httplib on App Engine, but compensate for its weirdness.
 
     The parameters key_file, cert_file, proxy_info, ca_certs,
@@ -1149,11 +1150,11 @@
                  strict=None, timeout=None, proxy_info=None, ca_certs=None,
                  disable_ssl_certificate_validation=False,
                  ssl_version=None):
-        httplib.HTTPConnection.__init__(self, host, port=port,
+        http.client.HTTPConnection.__init__(self, host, port=port,
                                         strict=strict, timeout=timeout)
 
 
-class AppEngineHttpsConnection(httplib.HTTPSConnection):
+class AppEngineHttpsConnection(http.client.HTTPSConnection):
     """Same as AppEngineHttpConnection, but for HTTPS URIs.
 
     The parameters proxy_info, ca_certs, disable_ssl_certificate_validation,
@@ -1163,7 +1164,7 @@
                  strict=None, timeout=None, proxy_info=None, ca_certs=None,
                  disable_ssl_certificate_validation=False,
                  ssl_version=None):
-        httplib.HTTPSConnection.__init__(self, host, port=port,
+        http.client.HTTPSConnection.__init__(self, host, port=port,
                                          key_file=key_file,
                                          cert_file=cert_file, strict=strict,
                                          timeout=timeout)
@@ -1247,7 +1248,7 @@
         self.connections = {}
         # The location of the cache, for now a directory
         # where cached responses are held.
-        if cache and isinstance(cache, basestring):
+        if cache and isinstance(cache, str):
             self.cache = FileCache(cache)
         else:
             self.cache = cache
@@ -1302,7 +1303,7 @@
         challenges = _parse_www_authenticate(response, 'www-authenticate')
         for cred in self.credentials.iter(host):
             for scheme in AUTH_SCHEME_ORDER:
-                if challenges.has_key(scheme):
+                if scheme in challenges:
                     yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
 
     def add_credentials(self, name, password, domain=""):
@@ -1338,7 +1339,7 @@
             except ssl_SSLError:
                 conn.close()
                 raise
-            except socket.error, e:
+            except socket.error as e:
                 err = 0
                 if hasattr(e, 'args'):
                     err = getattr(e, 'args')[0]
@@ -1348,7 +1349,7 @@
                     raise
                 if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
                     continue  # retry on potentially transient socket errors
-            except httplib.HTTPException:
+            except http.client.HTTPException:
                 # Just because the server closed the connection doesn't apparently mean
                 # that the server didn't send a response.
                 if hasattr(conn, 'sock') and conn.sock is None:
@@ -1365,7 +1366,7 @@
                     continue
             try:
                 response = conn.getresponse()
-            except httplib.BadStatusLine:
+            except http.client.BadStatusLine:
                 # If we get a BadStatusLine on the first try then that means
                 # the connection just went stale, so retry regardless of the
                 # number of RETRIES set.
@@ -1378,7 +1379,7 @@
                 else:
                     conn.close()
                     raise
-            except (socket.error, httplib.HTTPException):
+            except (socket.error, http.client.HTTPException):
                 if i < RETRIES-1:
                     conn.close()
                     conn.connect()
@@ -1430,29 +1431,29 @@
                 # Pick out the location header and basically start from the beginning
                 # remembering first to strip the ETag header and decrement our 'depth'
                 if redirections:
-                    if not response.has_key('location') and response.status != 300:
+                    if 'location' not in response and response.status != 300:
                         raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
                     # Fix-up relative redirects (which violate an RFC 2616 MUST)
-                    if response.has_key('location'):
+                    if 'location' in response:
                         location = response['location']
                         (scheme, authority, path, query, fragment) = parse_uri(location)
                         if authority == None:
-                            response['location'] = urlparse.urljoin(absolute_uri, location)
+                            response['location'] = urllib.parse.urljoin(absolute_uri, location)
                     if response.status == 301 and method in ["GET", "HEAD"]:
                         response['-x-permanent-redirect-url'] = response['location']
-                        if not response.has_key('content-location'):
+                        if 'content-location' not in response:
                             response['content-location'] = absolute_uri
                         _updateCache(headers, response, content, self.cache, cachekey)
-                    if headers.has_key('if-none-match'):
+                    if 'if-none-match' in headers:
                         del headers['if-none-match']
-                    if headers.has_key('if-modified-since'):
+                    if 'if-modified-since' in headers:
                         del headers['if-modified-since']
                     if 'authorization' in headers and not self.forward_authorization_headers:
                         del headers['authorization']
-                    if response.has_key('location'):
+                    if 'location' in response:
                         location = response['location']
                         old_response = copy.deepcopy(response)
-                        if not old_response.has_key('content-location'):
+                        if 'content-location' not in old_response:
                             old_response['content-location'] = absolute_uri
                         redirect_method = method
                         if response.status in [302, 303]:
@@ -1467,7 +1468,7 @@
                     raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
             elif response.status in [200, 203] and method in ["GET", "HEAD"]:
                 # Don't cache 206's since we aren't going to handle byte range requests
-                if not response.has_key('content-location'):
+                if 'content-location' not in response:
                     response['content-location'] = absolute_uri
                 _updateCache(headers, response, content, self.cache, cachekey)
 
@@ -1509,7 +1510,7 @@
             else:
                 headers = self._normalize_headers(headers)
 
-            if not headers.has_key('user-agent'):
+            if 'user-agent' not in headers:
                 headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
 
             uri = iri2uri(uri)
@@ -1580,7 +1581,7 @@
             else:
                 cachekey = None
 
-            if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
+            if method in self.optimistic_concurrency_methods and self.cache and 'etag' in info and not self.ignore_etag and 'if-match' not in headers:
                 # http://www.w3.org/1999/04/Editing/
                 headers['if-match'] = info['etag']
 
@@ -1601,7 +1602,7 @@
                         break
 
             if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
-                if info.has_key('-x-permanent-redirect-url'):
+                if '-x-permanent-redirect-url' in info:
                     # Should cached permanent redirects be counted in our redirection count? For now, yes.
                     if redirections <= 0:
                         raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
@@ -1631,9 +1632,9 @@
                         return (response, content)
 
                     if entry_disposition == "STALE":
-                        if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
+                        if 'etag' in info and not self.ignore_etag and not 'if-none-match' in headers:
                             headers['if-none-match'] = info['etag']
-                        if info.has_key('last-modified') and not 'last-modified' in headers:
+                        if 'last-modified' in info and not 'last-modified' in headers:
                             headers['if-modified-since'] = info['last-modified']
                     elif entry_disposition == "TRANSPARENT":
                         pass
@@ -1663,13 +1664,13 @@
                     content = new_content
             else:
                 cc = _parse_cache_control(headers)
-                if cc.has_key('only-if-cached'):
+                if 'only-if-cached' in cc:
                     info['status'] = '504'
                     response = Response(info)
                     content = ""
                 else:
                     (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
-        except Exception, e:
+        except Exception as e:
             if self.force_exception_to_status_code:
                 if isinstance(e, HttpLib2ErrorWithResponse):
                     response = e.response
@@ -1702,9 +1703,9 @@
         """Return a ProxyInfo instance (or None) based on the scheme
         and authority.
         """
-        hostname, port = urllib.splitport(authority)
+        hostname, port = urllib.parse.splitport(authority)
         proxy_info = self.proxy_info
-        if callable(proxy_info):
+        if isinstance(proxy_info, collections.Callable):
             proxy_info = proxy_info(scheme)
 
         if (hasattr(proxy_info, 'applies_to')
@@ -1733,7 +1734,7 @@
     def __init__(self, info):
         # info is either an email.Message or
         # an httplib.HTTPResponse object.
-        if isinstance(info, httplib.HTTPResponse):
+        if isinstance(info, http.client.HTTPResponse):
             for key, value in info.getheaders():
                 self[key.lower()] = value
             self.status = info.status
@@ -1741,11 +1742,11 @@
             self.reason = info.reason
             self.version = info.version
         elif isinstance(info, email.Message.Message):
-            for key, value in info.items():
+            for key, value in list(info.items()):
                 self[key.lower()] = value
             self.status = int(self['status'])
         else:
-            for key, value in info.iteritems():
+            for key, value in info.items():
                 self[key.lower()] = value
             self.status = int(self.get('status', self.status))
             self.reason = self.get('reason', self.reason)
@@ -1755,4 +1756,4 @@
         if name == 'dict':
             return self
         else:
-            raise AttributeError, name
+            raise AttributeError(name)
--- .\httplib2\iri2uri.py	(original)
+++ .\httplib2\iri2uri.py	(refactored)
@@ -12,7 +12,7 @@
 __history__ = """
 """
 
-import urlparse
+import urllib.parse
 
 
 # Convert an IRI to a URI following the rules in RFC 3987
@@ -66,13 +66,13 @@
     """Convert an IRI to a URI. Note that IRIs must be
     passed in a unicode strings. That is, do not utf-8 encode
     the IRI before passing it into the function."""
-    if isinstance(uri ,unicode):
-        (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
+    if isinstance(uri ,str):
+        (scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri)
         authority = authority.encode('idna')
         # For each character in 'ucschar' or 'iprivate'
         #  1. encode as utf-8
         #  2. then %-encode each octet of that utf-8
-        uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
+        uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment))
         uri = "".join([encode(c) for c in uri])
     return uri
 
@@ -84,26 +84,26 @@
         def test_uris(self):
             """Test that URIs are invariant under the transformation."""
             invariant = [
-                u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
-                u"http://www.ietf.org/rfc/rfc2396.txt",
-                u"ldap://[2001:db8::7]/c=GB?objectClass?one",
-                u"mailto:John.Doe@example.com",
-                u"news:comp.infosystems.www.servers.unix",
-                u"tel:+1-816-555-1212",
-                u"telnet://192.0.2.16:80/",
-                u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
+                "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+                "http://www.ietf.org/rfc/rfc2396.txt",
+                "ldap://[2001:db8::7]/c=GB?objectClass?one",
+                "mailto:John.Doe@example.com",
+                "news:comp.infosystems.www.servers.unix",
+                "tel:+1-816-555-1212",
+                "telnet://192.0.2.16:80/",
+                "urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
             for uri in invariant:
                 self.assertEqual(uri, iri2uri(uri))
 
         def test_iri(self):
             """ Test that the right type of escaping is done for each part of the URI."""
-            self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
-            self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
-            self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
-            self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
-            self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
+            self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri("http://\N{COMET}.com/\N{COMET}"))
+            self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri("http://bitworking.org/?fred=\N{COMET}"))
+            self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri("http://bitworking.org/#\N{COMET}"))
+            self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}"))
+            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
+            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
+            self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
 
     unittest.main()
 
--- .\httplib2\socks.py	(original)
+++ .\httplib2\socks.py	(refactored)
@@ -369,7 +369,7 @@
         wrote_host_header = False
         wrote_auth_header = False
         if self.__proxy[6] != None:
-            for key, val in self.__proxy[6].iteritems():
+            for key, val in self.__proxy[6].items():
                 headers += [key, ": ", val, "\r\n"]
                 wrote_host_header = (key.lower() == "host")
                 wrote_auth_header = (key.lower() == "proxy-authorization")
@@ -409,7 +409,7 @@
         To select the proxy server use setproxy().
         """
         # Do a minimal input check first
-        if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int):
+        if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], str)) or (type(destpair[1]) != int):
             raise GeneralProxyError((5, _generalerrors[5]))
         if self.__proxy[0] == PROXY_TYPE_SOCKS5:
             if self.__proxy[2] != None:
--- .\oauth2\__init__.py	(original)
+++ .\oauth2\__init__.py	(refactored)
@@ -23,17 +23,17 @@
 """
 
 import base64
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import time
 import random
-import urlparse
+import urllib.parse
 import hmac
 import binascii
 import calibre_plugins.goodreads_sync.httplib2 as httplib2
 from calibre_plugins.goodreads_sync.common_utils import debug_print
 
 try:
-    from urlparse import parse_qs
+    from urllib.parse import parse_qs
     parse_qs # placate pyflakes
 except ImportError:
     # fall back for Python 2.5
@@ -46,7 +46,7 @@
     # hashlib was added in Python 2.5
     import sha
 
-import _version
+from . import _version
 
 __version__ = _version.__version__
 
@@ -88,7 +88,7 @@
     request.sign_request(signing_method, consumer, token)
 
     params = []
-    for k, v in sorted(request.iteritems()):
+    for k, v in sorted(request.items()):
         if v is not None:
             params.append('%s="%s"' % (k, escape(v)))
 
@@ -98,12 +98,12 @@
 def to_unicode(s):
     """ Convert to unicode, raise exception with instructive error
     message if s is not unicode, ascii, or utf-8. """
-    if not isinstance(s, unicode):
+    if not isinstance(s, str):
         if not isinstance(s, str):
             raise TypeError('You are required to pass either unicode or string here, not: %r (%s)' % (type(s), s))
         try:
             s = s.decode('utf-8')
-        except UnicodeDecodeError, le:
+        except UnicodeDecodeError as le:
             raise TypeError('You are required to pass either a unicode object or a utf-8 string here. You passed a Python string object which contained non-utf-8: %r. The UnicodeDecodeError that resulted from attempting to interpret it as utf-8 was: %s' % (s, le,))
     return s
 
@@ -111,13 +111,13 @@
     return to_unicode(s).encode('utf-8')
 
 def to_unicode_if_string(s):
-    if isinstance(s, basestring):
+    if isinstance(s, str):
         return to_unicode(s)
     else:
         return s
 
 def to_utf8_if_string(s):
-    if isinstance(s, basestring):
+    if isinstance(s, str):
         return to_utf8(s)
     else:
         return s
@@ -127,12 +127,12 @@
     Raise TypeError if x is a str containing non-utf8 bytes or if x is
     an iterable which contains such a str.
     """
-    if isinstance(x, basestring):
+    if isinstance(x, str):
         return to_unicode(x)
 
     try:
         l = list(x)
-    except TypeError, e:
+    except TypeError as e:
         assert 'is not iterable' in str(e)
         return x
     else:
@@ -143,12 +143,12 @@
     Raise TypeError if x is a str or if x is an iterable which
     contains a str.
     """
-    if isinstance(x, basestring):
+    if isinstance(x, str):
         return to_utf8(x)
 
     try:
         l = list(x)
-    except TypeError, e:
+    except TypeError as e:
         assert 'is not iterable' in str(e)
         return x
     else:
@@ -156,7 +156,7 @@
 
 def escape(s):
     """Escape a URL including any /."""
-    return urllib.quote(s.encode('utf-8'), safe='~')
+    return urllib.parse.quote(s.encode('utf-8'), safe='~')
 
 def generate_timestamp():
     """Get seconds since epoch (UTC)."""
@@ -207,7 +207,7 @@
         data = {'oauth_consumer_key': self.key,
             'oauth_consumer_secret': self.secret}
 
-        return urllib.urlencode(data)
+        return urllib.parse.urlencode(data)
 
 
 class Token(object):
@@ -251,13 +251,13 @@
     def get_callback_url(self):
         if self.callback and self.verifier:
             # Append the oauth_verifier.
-            parts = urlparse.urlparse(self.callback)
+            parts = urllib.parse.urlparse(self.callback)
             scheme, netloc, path, params, query, fragment = parts[:6]
             if query:
                 query = '%s&oauth_verifier=%s' % (query, self.verifier)
             else:
                 query = 'oauth_verifier=%s' % self.verifier
-            return urlparse.urlunparse((scheme, netloc, path, params,
+            return urllib.parse.urlunparse((scheme, netloc, path, params,
                 query, fragment))
         return self.callback
 
@@ -275,7 +275,7 @@
 
         if self.callback_confirmed is not None:
             data['oauth_callback_confirmed'] = self.callback_confirmed
-        return urllib.urlencode(data)
+        return urllib.parse.urlencode(data)
  
     @staticmethod
     def from_string(s):
@@ -346,7 +346,7 @@
             self.url = to_unicode(url)
         self.method = method
         if parameters is not None:
-            for k, v in parameters.iteritems():
+            for k, v in parameters.items():
                 k = to_unicode(k)
                 v = to_unicode_optional_iterator(v)
                 self[k] = v
@@ -358,7 +358,7 @@
     def url(self, value):
         self.__dict__['url'] = value
         if value is not None:
-            scheme, netloc, path, params, query, fragment = urlparse.urlparse(value)
+            scheme, netloc, path, params, query, fragment = urllib.parse.urlparse(value)
 
             # Exclude default port numbers.
             if scheme == 'http' and netloc[-3:] == ':80':
@@ -369,7 +369,7 @@
                 raise ValueError("Unsupported URL %s (%s)." % (value, scheme))
 
             # Normalized URL excludes params, query, and fragment.
-            self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None))
+            self.normalized_url = urllib.parse.urlunparse((scheme, netloc, path, None, None, None))
         else:
             self.normalized_url = None
             self.__dict__['url'] = None
@@ -383,12 +383,12 @@
  
     def get_nonoauth_parameters(self):
         """Get any non-OAuth parameters."""
-        return dict([(k, v) for k, v in self.iteritems() 
+        return dict([(k, v) for k, v in self.items() 
                     if not k.startswith('oauth_')])
  
     def to_header(self, realm=''):
         """Serialize as a header for an HTTPAuth request."""
-        oauth_params = ((k, v) for k, v in self.items() 
+        oauth_params = ((k, v) for k, v in list(self.items()) 
                             if k.startswith('oauth_'))
         stringy_params = ((k, escape(str(v))) for k, v in oauth_params)
         header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
@@ -403,24 +403,24 @@
     def to_postdata(self):
         """Serialize as post data for a POST request."""
         d = {}
-        for k, v in self.iteritems():
+        for k, v in self.items():
             d[k.encode('utf-8')] = to_utf8_optional_iterator(v)
 
         # tell urlencode to deal with sequence values and map them correctly
         # to resulting querystring. for example self["k"] = ["v1", "v2"] will
         # result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D
-        return urllib.urlencode(d, True).replace('+', '%20')
+        return urllib.parse.urlencode(d, True).replace('+', '%20')
  
     def to_url(self):
         """Serialize as a URL for a GET request."""
-        base_url = urlparse.urlparse(self.url)
+        base_url = urllib.parse.urlparse(self.url)
         try:
             query = base_url.query
         except AttributeError:
             # must be python <2.5
             query = base_url[4]
         query = parse_qs(query)
-        for k, v in self.items():
+        for k, v in list(self.items()):
             query.setdefault(k, []).append(v)
         
         try:
@@ -438,8 +438,8 @@
             fragment = base_url[5]
         
         url = (scheme, netloc, path, params,
-               urllib.urlencode(query, True), fragment)
-        return urlparse.urlunparse(url)
+               urllib.parse.urlencode(query, True), fragment)
+        return urllib.parse.urlunparse(url)
 
     def get_parameter(self, parameter):
         ret = self.get(parameter)
@@ -451,31 +451,31 @@
     def get_normalized_parameters(self):
         """Return a string that contains the parameters that must be signed."""
         items = []
-        for key, value in self.iteritems():
+        for key, value in self.items():
             if key == 'oauth_signature':
                 continue
             # 1.0a/9.1.1 states that kvp must be sorted by key, then by value,
             # so we unpack sequence values into multiple items for sorting.
-            if isinstance(value, basestring):
+            if isinstance(value, str):
                 items.append((to_utf8_if_string(key), to_utf8(value)))
             else:
                 try:
                     value = list(value)
-                except TypeError, e:
+                except TypeError as e:
                     assert 'is not iterable' in str(e)
                     items.append((to_utf8_if_string(key), to_utf8_if_string(value)))
                 else:
                     items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value)
 
         # Include any query string parameters from the provided URL
-        query = urlparse.urlparse(self.url)[4]
-
-        url_items = self._split_url_string(query).items()
+        query = urllib.parse.urlparse(self.url)[4]
+
+        url_items = list(self._split_url_string(query).items())
         url_items = [(to_utf8(k), to_utf8(v)) for k, v in url_items if k != 'oauth_signature' ]
         items.extend(url_items)
 
         items.sort()
-        encoded_str = urllib.urlencode(items)
+        encoded_str = urllib.parse.urlencode(items)
         # Encode signature parameters per Oauth Core 1.0 protocol
         # spec draft 7, section 3.6
         # (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
@@ -539,7 +539,7 @@
             parameters.update(query_params)
  
         # URL parameters.
-        param_str = urlparse.urlparse(http_url)[4] # query
+        param_str = urllib.parse.urlparse(http_url)[4] # query
         url_params = cls._split_url_string(param_str)
         parameters.update(url_params)
  
@@ -601,15 +601,15 @@
             # Split key-value.
             param_parts = param.split('=', 1)
             # Remove quotes and unescape the value.
-            params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
+            params[param_parts[0]] = urllib.parse.unquote(param_parts[1].strip('\"'))
         return params
  
     @staticmethod
     def _split_url_string(param_str):
         """Turn URL string into parameters."""
         parameters = parse_qs(param_str.encode('utf-8'), keep_blank_values=True)
-        for k, v in parameters.iteritems():
-            parameters[k] = urllib.unquote(v[0])
+        for k, v in parameters.items():
+            parameters[k] = urllib.parse.unquote(v[0])
         return parameters
 
 
@@ -663,12 +663,12 @@
 
         req.sign_request(self.method, self.consumer, self.token)
 
-        schema, rest = urllib.splittype(uri)
+        schema, rest = urllib.parse.splittype(uri)
         if rest.startswith('//'):
             hierpart = '//'
         else:
             hierpart = ''
-        host, rest = urllib.splithost(rest)
+        host, rest = urllib.parse.splithost(rest)
 
         realm = schema + ':' + hierpart + host
 
@@ -742,7 +742,7 @@
             # Get the signature method object.
             signature_method = self.signature_methods[signature_method]
         except:
-            signature_method_names = ', '.join(self.signature_methods.keys())
+            signature_method_names = ', '.join(list(self.signature_methods.keys()))
             raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
 
         return signature_method
