Description: Remove python2 syntax, port to py3
Author: Nilesh Patra <nilesh@debian.org>
Last-Update: 2021-11-10
--- a/keepalive/keepalive.py
+++ b/keepalive/keepalive.py
@@ -104,10 +104,10 @@
 
 # $Id: keepalive.py,v 1.17 2006/12/08 00:14:16 mstenner Exp $
 
-import urllib2
-import httplib
+import urllib.request, urllib.error, urllib.parse
+import http.client
 import socket
-import thread
+import _thread
 
 DEBUG = None
 
@@ -121,7 +121,7 @@
       * keep track of all existing
       """
     def __init__(self):
-        self._lock = thread.allocate_lock()
+        self._lock = _thread.allocate_lock()
         self._hostmap = {} # map hosts to a list of connections
         self._connmap = {} # map connections to host
         self._readymap = {} # map connection to ready state
@@ -129,7 +129,7 @@
     def add(self, host, connection, ready):
         self._lock.acquire()
         try:
-            if not self._hostmap.has_key(host): self._hostmap[host] = []
+            if host not in self._hostmap: self._hostmap[host] = []
             self._hostmap[host].append(connection)
             self._connmap[connection] = host
             self._readymap[connection] = ready
@@ -159,7 +159,7 @@
         conn = None
         self._lock.acquire()
         try:
-            if self._hostmap.has_key(host):
+            if host in self._hostmap:
                 for c in self._hostmap[host]:
                     if self._readymap[c]:
                         self._readymap[c] = 0
@@ -183,7 +183,7 @@
     def open_connections(self):
         """return a list of connected hosts and the number of connections
         to each.  [('foo.com:80', 2), ('bar.org', 1)]"""
-        return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
+        return [(host, len(li)) for (host, li) in list(self._cm.get_all().items())]
 
     def close_connection(self, host):
         """close connection(s) to <host>
@@ -195,7 +195,7 @@
         
     def close_all(self):
         """close all open connections"""
-        for host, conns in self._cm.get_all().items():
+        for host, conns in list(self._cm.get_all().items()):
             for h in conns:
                 self._cm.remove(h)
                 h.close()
@@ -213,7 +213,7 @@
     def do_open(self, req):
         host = req.host
         if not host:
-            raise urllib2.URLError('no host given')
+            raise urllib.error.URLError('no host given')
 
         try:
             h = self._cm.get_ready_conn(host)
@@ -237,8 +237,8 @@
                 self._cm.add(host, h, 0)
                 self._start_transaction(h, req)
                 r = h.getresponse()
-        except (socket.error, httplib.HTTPException), err:
-            raise urllib2.URLError(err)
+        except (socket.error, http.client.HTTPException) as err:
+            raise urllib.error.URLError(err)
             
         if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
 
@@ -273,7 +273,7 @@
             r = h.getresponse()
             # note: just because we got something back doesn't mean it
             # worked.  We'll check the version below, too.
-        except (socket.error, httplib.HTTPException):
+        except (socket.error, http.client.HTTPException):
             r = None
         except:
             # adding this block just in case we've missed
@@ -312,22 +312,22 @@
                     h.putrequest('POST', req.selector)
                 else:
                     h.putrequest('POST', req.get_selector())
-                if not req.headers.has_key('Content-type'):
+                if 'Content-type' not in req.headers:
                     h.putheader('Content-type',
                                 'application/x-www-form-urlencoded')
-                if not req.headers.has_key('Content-length'):
+                if 'Content-length' not in req.headers:
                     h.putheader('Content-length', '%d' % len(data))
             else:
                 if hasattr(req, 'selector'):
                     h.putrequest('GET', req.selector)
                 else:
                     h.putrequest('GET', req.get_selector())
-        except (socket.error, httplib.HTTPException), err:
-            raise urllib2.URLError(err)
+        except (socket.error, http.client.HTTPException) as err:
+            raise urllib.error.URLError(err)
 
         for args in self.parent.addheaders:
             h.putheader(*args)
-        for k, v in req.headers.items():
+        for k, v in list(req.headers.items()):
             h.putheader(k, v)
         h.endheaders()
         if req.data:
@@ -336,7 +336,7 @@
     def _get_connection(self, host):
         return NotImplementedError
 
-class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
+class HTTPHandler(KeepAliveHandler, urllib.request.HTTPHandler):
     def __init__(self):
         KeepAliveHandler.__init__(self)
 
@@ -346,7 +346,7 @@
     def _get_connection(self, host):
         return HTTPConnection(host)
 
-class HTTPSHandler(KeepAliveHandler, urllib2.HTTPSHandler):
+class HTTPSHandler(KeepAliveHandler, urllib.request.HTTPSHandler):
     def __init__(self, ssl_factory=None):
         KeepAliveHandler.__init__(self)
         if not ssl_factory:
@@ -364,7 +364,7 @@
         try: return self._ssl_factory.get_https_connection(host)
         except AttributeError: return HTTPSConnection(host)
         
-class HTTPResponse(httplib.HTTPResponse):
+class HTTPResponse(http.client.HTTPResponse):
     # we need to subclass HTTPResponse in order to
     # 1) add readline() and readlines() methods
     # 2) add close_connection() methods
@@ -386,9 +386,9 @@
 
     def __init__(self, sock, debuglevel=0, strict=0, method=None):
         if method: # the httplib in python 2.3 uses the method arg
-            httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
+            http.client.HTTPResponse.__init__(self, sock, debuglevel, method)
         else: # 2.2 doesn't
-            httplib.HTTPResponse.__init__(self, sock, debuglevel)
+            http.client.HTTPResponse.__init__(self, sock, debuglevel)
         self.fileno = sock.fileno
         self.code = None
         self._rbuf = b""
@@ -398,7 +398,7 @@
         self._url = None     # (same)
         self._connection = None # (same)
 
-    _raw_read = httplib.HTTPResponse.read
+    _raw_read = http.client.HTTPResponse.read
 
     def close(self):
         if self.fp:
@@ -462,11 +462,11 @@
         return list
 
 
-class HTTPConnection(httplib.HTTPConnection):
+class HTTPConnection(http.client.HTTPConnection):
     # use the modified response class
     response_class = HTTPResponse
 
-class HTTPSConnection(httplib.HTTPSConnection):
+class HTTPSConnection(http.client.HTTPSConnection):
     response_class = HTTPResponse
     
 #########################################################################
@@ -477,26 +477,26 @@
     global HANDLE_ERRORS
     orig = HANDLE_ERRORS
     keepalive_handler = HTTPHandler()
-    opener = urllib2.build_opener(keepalive_handler)
-    urllib2.install_opener(opener)
+    opener = urllib.request.build_opener(keepalive_handler)
+    urllib.request.install_opener(opener)
     pos = {0: 'off', 1: 'on'}
     for i in (0, 1):
-        print "  fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
+        print("  fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i))
         HANDLE_ERRORS = i
         try:
-            fo = urllib2.urlopen(url)
+            fo = urllib.request.urlopen(url)
             foo = fo.read()
             fo.close()
             try: status, reason = fo.status, fo.reason
             except AttributeError: status, reason = None, None
-        except IOError, e:
-            print "  EXCEPTION: %s" % e
+        except IOError as e:
+            print("  EXCEPTION: %s" % e)
             raise
         else:
-            print "  status = %s, reason = %s" % (status, reason)
+            print("  status = %s, reason = %s" % (status, reason))
     HANDLE_ERRORS = orig
     hosts = keepalive_handler.open_connections()
-    print "open connections:", hosts
+    print("open connections:", hosts)
     keepalive_handler.close_all()
 
 def continuity(url):
@@ -504,25 +504,25 @@
     format = '%25s: %s'
     
     # first fetch the file with the normal http handler
-    opener = urllib2.build_opener()
-    urllib2.install_opener(opener)
-    fo = urllib2.urlopen(url)
+    opener = urllib.request.build_opener()
+    urllib.request.install_opener(opener)
+    fo = urllib.request.urlopen(url)
     foo = fo.read()
     fo.close()
     m = md5.new(foo)
-    print format % ('normal urllib', m.hexdigest())
+    print(format % ('normal urllib', m.hexdigest()))
 
     # now install the keepalive handler and try again
-    opener = urllib2.build_opener(HTTPHandler())
-    urllib2.install_opener(opener)
+    opener = urllib.request.build_opener(HTTPHandler())
+    urllib.request.install_opener(opener)
 
-    fo = urllib2.urlopen(url)
+    fo = urllib.request.urlopen(url)
     foo = fo.read()
     fo.close()
     m = md5.new(foo)
-    print format % ('keepalive read', m.hexdigest())
+    print(format % ('keepalive read', m.hexdigest()))
 
-    fo = urllib2.urlopen(url)
+    fo = urllib.request.urlopen(url)
     foo = ''
     while 1:
         f = fo.readline()
@@ -530,25 +530,25 @@
         else: break
     fo.close()
     m = md5.new(foo)
-    print format % ('keepalive readline', m.hexdigest())
+    print(format % ('keepalive readline', m.hexdigest()))
 
 def comp(N, url):
-    print '  making %i connections to:\n  %s' % (N, url)
+    print('  making %i connections to:\n  %s' % (N, url))
 
     sys.stdout.write('  first using the normal urllib handlers')
     # first use normal opener
-    opener = urllib2.build_opener()
-    urllib2.install_opener(opener)
+    opener = urllib.request.build_opener()
+    urllib.request.install_opener(opener)
     t1 = fetch(N, url)
-    print '  TIME: %.3f s' % t1
+    print('  TIME: %.3f s' % t1)
 
     sys.stdout.write('  now using the keepalive handler       ')
     # now install the keepalive handler and try again
-    opener = urllib2.build_opener(HTTPHandler())
-    urllib2.install_opener(opener)
+    opener = urllib.request.build_opener(HTTPHandler())
+    urllib.request.install_opener(opener)
     t2 = fetch(N, url)
-    print '  TIME: %.3f s' % t2
-    print '  improvement factor: %.2f' % (t1/t2, )
+    print('  TIME: %.3f s' % t2)
+    print('  improvement factor: %.2f' % (t1/t2, ))
     
 def fetch(N, url, delay=0):
     import time
@@ -556,7 +556,7 @@
     starttime = time.time()
     for i in range(N):
         if delay and i > 0: time.sleep(delay)
-        fo = urllib2.urlopen(url)
+        fo = urllib.request.urlopen(url)
         foo = fo.read()
         fo.close()
         lens.append(len(foo))
@@ -566,7 +566,7 @@
     for i in lens[1:]:
         j = j + 1
         if not i == lens[0]:
-            print "WARNING: inconsistent length on read %i: %i" % (j, i)
+            print("WARNING: inconsistent length on read %i: %i" % (j, i))
 
     return diff
 
@@ -574,16 +574,16 @@
     global DEBUG
     dbbackup = DEBUG
     class FakeLogger:
-        def debug(self, msg, *args): print msg % args
+        def debug(self, msg, *args): print(msg % args)
         info = warning = error = debug
     DEBUG = FakeLogger()
-    print "  fetching the file to establish a connection"
-    fo = urllib2.urlopen(url)
+    print("  fetching the file to establish a connection")
+    fo = urllib.request.urlopen(url)
     data1 = fo.read()
     fo.close()
  
     i = 20
-    print "  waiting %i seconds for the server to close the connection" % i
+    print("  waiting %i seconds for the server to close the connection" % i)
     while i > 0:
         sys.stdout.write('\r  %2i' % i)
         sys.stdout.flush()
@@ -591,33 +591,33 @@
         i -= 1
     sys.stderr.write('\r')
 
-    print "  fetching the file a second time"
-    fo = urllib2.urlopen(url)
+    print("  fetching the file a second time")
+    fo = urllib.request.urlopen(url)
     data2 = fo.read()
     fo.close()
 
     if data1 == data2:
-        print '  data are identical'
+        print('  data are identical')
     else:
-        print '  ERROR: DATA DIFFER'
+        print('  ERROR: DATA DIFFER')
 
     DEBUG = dbbackup
 
     
 def test(url, N=10):
-    print "checking error hander (do this on a non-200)"
+    print("checking error hander (do this on a non-200)")
     try: error_handler(url)
-    except IOError, e:
-        print "exiting - exception will prevent further tests"
+    except IOError as e:
+        print("exiting - exception will prevent further tests")
         sys.exit()
-    print
-    print "performing continuity test (making sure stuff isn't corrupted)"
+    print()
+    print("performing continuity test (making sure stuff isn't corrupted)")
     continuity(url)
-    print
-    print "performing speed comparison"
+    print()
+    print("performing speed comparison")
     comp(N, url)
-    print
-    print "performing dropped-connection check"
+    print()
+    print("performing dropped-connection check")
     test_timeout(url)
     
 if __name__ == '__main__':
@@ -627,7 +627,7 @@
         N = int(sys.argv[1])
         url = sys.argv[2]
     except:
-        print "%s <integer> <url>" % sys.argv[0]
+        print("%s <integer> <url>" % sys.argv[0])
     else:
         test(url, N)
 
