jenkins-bot has submitted this change and it was merged.

Change subject: Revert "Make tests/api_tests.py run on py3k"
......................................................................


Revert "Make tests/api_tests.py run on py3k"

This reverts commit 909331ab7d863866e19dfc007989b92bb3345ebd.

Change-Id: Icc9ddcd431a1c68702ade89075406eff6a15f59e
---
M pywikibot/comms/http.py
M pywikibot/comms/threadedhttp.py
M pywikibot/data/api.py
M pywikibot/data/wikidataquery.py
M pywikibot/textlib.py
M pywikibot/throttle.py
M pywikibot/userinterfaces/terminal_interface_unix.py
M tests/api_tests.py
8 files changed, 23 insertions(+), 50 deletions(-)

Approvals:
  Merlijn van Deen: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py
index 97f5f40..a3a25fc 100644
--- a/pywikibot/comms/http.py
+++ b/pywikibot/comms/http.py
@@ -35,7 +35,7 @@
 else:
     from ssl import SSLError as SSLHandshakeError
     import queue as Queue
-    import urllib.parse as urlparse
+    import urllib as urlparse
     from http import cookiejar as cookielib
 
 from pywikibot import config
@@ -76,7 +76,7 @@
 
 
 # Build up HttpProcessors
-pywikibot.log(u'Starting %(numthreads)i threads...' % locals())
+pywikibot.log('Starting %(numthreads)i threads...' % locals())
 for i in range(numthreads):
     proc = threadedhttp.HttpProcessor(http_queue, cookie_jar, connection_pool)
     proc.setDaemon(True)
@@ -92,7 +92,7 @@
     message = u'Waiting for %i network thread(s) to finish. Press ctrl-c to 
abort' % len(threads)
     if hasattr(sys, 'last_type'):
         # we quit because of an exception
-        print(sys.last_type)
+        print sys.last_type
         pywikibot.critical(message)
     else:
         pywikibot.log(message)
diff --git a/pywikibot/comms/threadedhttp.py b/pywikibot/comms/threadedhttp.py
index f59060d..3b6f6a8 100644
--- a/pywikibot/comms/threadedhttp.py
+++ b/pywikibot/comms/threadedhttp.py
@@ -31,10 +31,8 @@
 
 if sys.version_info[0] == 2:
     import cookielib
-    from urllib import splittype, splithost, unquote
 else:
     from http import cookiejar as cookielib
-    from urllib.parse import splittype, splithost, unquote
 
 import pywikibot
 from pywikibot import config
@@ -394,10 +392,10 @@
         self.url = url
         self.headers = headers
         self.origin_req_host = cookielib.request_host(self)
-        self.type, r = splittype(url)
-        self.host, r = splithost(r)
+        self.type, r = urllib.splittype(url)
+        self.host, r = urllib.splithost(r)
         if self.host:
-            self.host = unquote(self.host)
+            self.host = urllib.unquote(self.host)
 
     def get_full_url(self):
         return self.url
@@ -426,8 +424,6 @@
         # TODO to match urllib2, this should be set to True when the
         #  request is the result of a redirect
         return False
-
-    unverifiable = property(is_unverifiable)
 
 
 class DummyResponse(object):
@@ -461,9 +457,3 @@
         #  to split carefully here - header.split(',') won't do it.
         HEADERVAL = re.compile(r'\s*(([^,]|(,\s*\d))+)')
         return [h[0] for h in HEADERVAL.findall(self.response[k])]
-
-    def get_all(self, k, failobj=None):
-        rv = self.getheaders(k)
-        if not rv:
-            return failobj
-        return rv
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 8d3a2f4..f129665 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -35,13 +35,8 @@
 from pywikibot.exceptions import *
 
 import sys
-
 if sys.version_info[0] > 2:
     basestring = (str, )
-    from urllib.parse import urlencode, unquote
-    unicode = str
-else:
-    from urllib import urlencode, unquote
 
 _logger = "data.api"
 
@@ -182,8 +177,6 @@
         """Return the parameters formatted for inclusion in an HTTP request."""
 
         for key in self.params:
-            if isinstance(self.params[key], bytes):
-                self.params[key] = self.params[key].decode('ascii')
             if isinstance(self.params[key], basestring):
                 # convert a stringified sequence into a list
                 self.params[key] = self.params[key].split("|")
@@ -215,15 +208,16 @@
         for key in self.params:
             try:
                 self.params[key] = "|".join(self.params[key])
-                self.params[key] = 
self.params[key].encode(self.site.encoding())
+                if isinstance(self.params[key], unicode):
+                    self.params[key] = 
self.params[key].encode(self.site.encoding())
             except Exception:
                 pywikibot.error(
                     u"http_params: Key '%s' could not be encoded to '%s'; 
params=%r"
                     % (key, self.site.encoding(), self.params[key]))
-        return urlencode(self.params)
+        return urllib.urlencode(self.params)
 
     def __str__(self):
-        return unquote(self.site.scriptpath()
+        return urllib.unquote(self.site.scriptpath()
                               + "/api.php?"
                               + self.http_params())
 
@@ -448,7 +442,7 @@
             pass
 
     def _create_file_name(self):
-        return hashlib.sha256((str(self.site) + 
str(self)).encode('ascii')).hexdigest()
+        return hashlib.sha256(str(self.site) + str(self)).hexdigest()
 
     def _cachefile_path(self):
         return os.path.join(self._get_cache_dir(), self._create_file_name())
@@ -459,8 +453,7 @@
     def _load_cache(self):
         """ Returns whether the cache can be used """
         try:
-            with open(self._cachefile_path(), 'b') as f:
-                sitestr, selfstr, self._data, self._cachetime = pickle.load(f)
+            sitestr, selfstr, self._data, self._cachetime = 
pickle.load(open(self._cachefile_path()))
             assert(sitestr == str(self.site))
             assert(selfstr == str(self))
             if self._expired(self._cachetime):
@@ -473,8 +466,7 @@
     def _write_cache(self, data):
         """ writes data to self._cachefile_path() """
         data = [str(self.site), str(self), data, datetime.datetime.now()]
-        with open(self._cachefile_path(), 'wb') as f:
-            pickle.dump(data, f)
+        pickle.dump(data, open(self._cachefile_path(), 'w'))
 
     def submit(self):
         cached_available = self._load_cache()
@@ -735,7 +727,7 @@
                     # otherwise we proceed as usual
                     else:
                         count += 1
-                    if self.limit and count >= self.limit:
+                    if self.limit > 0 and count >= self.limit:
                         return
             if self.module == "random" and self.limit:
                 # "random" module does not return "query-continue"
diff --git a/pywikibot/data/wikidataquery.py b/pywikibot/data/wikidataquery.py
index 79a315a..fdd66c7 100644
--- a/pywikibot/data/wikidataquery.py
+++ b/pywikibot/data/wikidataquery.py
@@ -8,11 +8,7 @@
 # Distributed under the terms of the MIT license.
 
 import json
-import sys
-if sys.version_info[0] == 2:
-    from urllib2 import quote
-else:
-    from urllib.parse import quote
+import urllib2
 from pywikibot.comms import http
 import pickle
 import os
@@ -425,7 +421,7 @@
         Get the query string for a given query or queryset
         @return query string including lables and props
         """
-        qStr = "q=%s" % quote(str(q))
+        qStr = "q=%s" % urllib2.quote(str(q))
 
         if labels:
             qStr += "&labels=%s" % ','.join(labels)
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 15e29b5..4535b4d 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -1180,7 +1180,7 @@
         timeR = r'(?P<time>(?P<hour>[0-2]\d)[:\.h](?P<minute>[0-5]\d))'
         timeznR = r'\((?P<tzinfo>[A-Z]+)\)'
         yearR = r'(?P<year>(19|20)\d\d)'
-        monthR = r'(?P<month>(%s))' % (u'|'.join(self.origNames2monthNum))
+        monthR = ur'(?P<month>(%s))' % (u'|'.join(self.origNames2monthNum))
         dayR = r'(?P<day>(3[01]|[12]\d|0?[1-9]))'
 
         self.ptimeR = re.compile(timeR)
diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py
index 95cc6b1..4e56464 100644
--- a/pywikibot/throttle.py
+++ b/pywikibot/throttle.py
@@ -108,7 +108,6 @@
                                           'site': this_site})
                     if not pid and this_pid >= my_pid:
                         my_pid = this_pid + 1  # next unused process id
-                f.close()
 
             if not pid:
                 pid = my_pid
diff --git a/pywikibot/userinterfaces/terminal_interface_unix.py 
b/pywikibot/userinterfaces/terminal_interface_unix.py
index 49dc9d9..984a5e5 100755
--- a/pywikibot/userinterfaces/terminal_interface_unix.py
+++ b/pywikibot/userinterfaces/terminal_interface_unix.py
@@ -50,7 +50,4 @@
             # just to be sure, reset the color
             text += unixColors['default']
 
-        if hasattr(targetStream, 'encoding'):
-            targetStream.write(text)
-        else:
-            targetStream.write(text.encode(self.encoding, 'replace'))
+        targetStream.write(text.encode(self.encoding, 'replace'))
diff --git a/tests/api_tests.py b/tests/api_tests.py
index c5b02b5..6863935 100644
--- a/tests/api_tests.py
+++ b/tests/api_tests.py
@@ -19,9 +19,9 @@
     def testObjectCreation(self):
         """Test that api.Request() creates an object with desired attributes"""
         req = api.Request(site=mysite, action="test", foo="", bar="test")
-        self.assertTrue(req)
+        self.assert_(req)
         self.assertEqual(req.site, mysite)
-        self.assertIn("foo", req.params)
+        self.assert_("foo" in req.params)
         self.assertEqual(req["bar"], "test")
         # test item assignment
         req["one"] = "1"
@@ -29,9 +29,8 @@
         # test compliance with dict interface
         # req.keys() should contain "action", "foo", "bar", "one"
         self.assertEqual(len(req.keys()), 4)
-        self.assertIn("test", req.values())
-        for item in req.items():
-            self.assertEqual(len(item), 2, item)
+        self.assert_("test" in req.values())
+        self.assert_(all(len(item) == 2 for item in req.items()))
 
 
 class TestPageGenerator(PywikibotTestCase):
@@ -71,7 +70,7 @@
         for page in results:
             self.assertEqual(type(page), pywikibot.Page)
             self.assertEqual(page.site, mysite)
-            self.assertIn(page.title(), titles)
+            self.assert_(page.title() in titles)
 
 
 class TestCachedRequest(unittest.TestCase):

-- 
To view, visit https://gerrit.wikimedia.org/r/127601
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: Icc9ddcd431a1c68702ade89075406eff6a15f59e
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Merlijn van Deen <valhall...@arctus.nl>
Gerrit-Reviewer: Merlijn van Deen <valhall...@arctus.nl>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to