jenkins-bot merged this change.

View Change

Approvals: Dalba: Looks good to me, approved jenkins-bot: Verified
[cleanup] cleanup tests/[family_tests.py-http_tests.py]

- use single quotes for string literals
- remove preleading "u" fron strings
- indentation to make sure code lines are less than 79 characters
- use str.format(...) instead of modulo for type specifier arguments

Change-Id: Id456426c1d09a095bb71cfa2cfd73c6e93ee8ce2
---
M tests/family_tests.py
M tests/file_tests.py
M tests/flow_edit_tests.py
M tests/flow_tests.py
M tests/http_tests.py
5 files changed, 89 insertions(+), 67 deletions(-)

diff --git a/tests/family_tests.py b/tests/family_tests.py
index 560faf7..1538693 100644
--- a/tests/family_tests.py
+++ b/tests/family_tests.py
@@ -30,7 +30,8 @@
FAMILY_TYPEERROR_RE = (
'Family.obsolete not updatable; '
'use Family.interwiki_removals and Family.interwiki_replacements')
- FROZENSET_TYPEERROR_RE = '\'frozenset\' object does not support item assignment'
+ FROZENSET_TYPEERROR_RE = ("'frozenset' object does not support item "
+ 'assignment')
net = False

def test_family_load_valid(self):
@@ -240,9 +241,9 @@
family = Family.load(family)
for code in family.codes:
self.current_code = code
- url = ('%s://%s%s/$1' % (family.protocol(code),
- family.hostname(code),
- family.path(code)))
+ url = ('{}://{}{}/$1'.format(family.protocol(code),
+ family.hostname(code),
+ family.path(code)))
# Families can switch off if they want to be detected using URL
# this applies for test:test (there is test:wikipedia)
if family._ignore_from_url or code in family._ignore_from_url:
@@ -275,7 +276,8 @@
self.assertEqual(f.name, 'i18n')
self.assertDeprecationParts('pywikibot.site.Family',
'pywikibot.family.Family.load')
- self.assertDeprecationParts('fatal argument of pywikibot.family.Family.load')
+ self.assertDeprecationParts(
+ 'fatal argument of pywikibot.family.Family.load')

def test_old_site_family_function_invalid(self):
"""Test that an invalid family raised UnknownFamily exception."""
@@ -295,7 +297,8 @@
'unknown')
self.assertDeprecationParts('pywikibot.site.Family',
'pywikibot.family.Family.load')
- self.assertDeprecationParts('fatal argument of pywikibot.family.Family.load')
+ self.assertDeprecationParts(
+ 'fatal argument of pywikibot.family.Family.load')


if __name__ == '__main__': # pragma: no cover
diff --git a/tests/file_tests.py b/tests/file_tests.py
index c187654..be16886 100644
--- a/tests/file_tests.py
+++ b/tests/file_tests.py
@@ -99,7 +99,7 @@
self.assertFalse(enwp_file.fileIsShared())

page_doesnt_exist_exc_regex = re.escape(
- 'Page [[commons:%s]] doesn\'t exist.' % title)
+ "Page [[commons:{}]] doesn't exist.".format(title))
with self.assertRaisesRegex(
pywikibot.NoPage,
page_doesnt_exist_exc_regex):
@@ -116,7 +116,7 @@
commons_file.get()

def testOnBoth(self):
- """Test fileIsShared() on file page with both local and shared file."""
+ """Test fileIsShared() on file page with local and shared file."""
title = 'File:Pulsante spam.png'

commons = self.get_site('commons')
@@ -170,19 +170,19 @@
def test_file_info_with_no_page(self):
"""FilePage:latest_file_info raises NoPage for non existing pages."""
site = self.get_site()
- image = pywikibot.FilePage(site, u'File:NoPage')
+ image = pywikibot.FilePage(site, 'File:NoPage')
self.assertFalse(image.exists())

with self.assertRaisesRegex(
pywikibot.NoPage,
(r'Page \[\[(wikipedia\:|)test:File:NoPage\]\] '
- r'doesn\'t exist\.')):
+ r"doesn't exist\.")):
image = image.latest_file_info

def test_file_info_with_no_file(self):
- """FilePage:latest_file_info raises PagerelatedError if no file is present."""
+ """FilePage:latest_file_info raises PagerelatedError if no file."""
site = self.get_site()
- image = pywikibot.FilePage(site, u'File:Test with no image')
+ image = pywikibot.FilePage(site, 'File:Test with no image')
self.assertTrue(image.exists())
with self.assertRaisesRegex(
pywikibot.PageRelatedError,
@@ -227,9 +227,10 @@
"""Get File thumburl from width."""
self.assertTrue(self.image.exists())
# url_param has no precedence over height/width.
- self.assertEqual(self.image.get_file_url(url_width=100, url_param='1000px'),
- 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
- 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
+ self.assertEqual(
+ self.image.get_file_url(url_width=100, url_param='1000px'),
+ 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
+ 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
self.assertEqual(self.image.latest_file_info.thumbwidth, 100)
self.assertEqual(self.image.latest_file_info.thumbheight, 133)

@@ -237,9 +238,10 @@
"""Get File thumburl from height."""
self.assertTrue(self.image.exists())
# url_param has no precedence over height/width.
- self.assertEqual(self.image.get_file_url(url_height=100, url_param='1000px'),
- 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
- 'd/d3/Albert_Einstein_Head.jpg/75px-Albert_Einstein_Head.jpg')
+ self.assertEqual(
+ self.image.get_file_url(url_height=100, url_param='1000px'),
+ 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
+ 'd/d3/Albert_Einstein_Head.jpg/75px-Albert_Einstein_Head.jpg')
self.assertEqual(self.image.latest_file_info.thumbwidth, 75)
self.assertEqual(self.image.latest_file_info.thumbheight, 100)

@@ -247,9 +249,10 @@
"""Get File thumburl from height."""
self.assertTrue(self.image.exists())
# url_param has no precedence over height/width.
- self.assertEqual(self.image.get_file_url(url_param='100px'),
- 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
- 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
+ self.assertEqual(
+ self.image.get_file_url(url_param='100px'),
+ 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
+ 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
self.assertEqual(self.image.latest_file_info.thumbwidth, 100)
self.assertEqual(self.image.latest_file_info.thumbheight, 133)

@@ -301,13 +304,14 @@

def test_not_existing_download(self):
"""Test not existing download."""
- page = pywikibot.FilePage(self.site, 'File:Albert Einstein.jpg_notexisting')
+ page = pywikibot.FilePage(self.site,
+ 'File:Albert Einstein.jpg_notexisting')
filename = join_images_path('Albert Einstein.jpg')

with self.assertRaisesRegex(
pywikibot.NoPage,
re.escape('Page [[commons:File:Albert Einstein.jpg '
- 'notexisting]] doesn\'t exist.')):
+ "notexisting]] doesn't exist.")):
page.download(filename)


diff --git a/tests/flow_edit_tests.py b/tests/flow_edit_tests.py
index d486ee0..bc60a16 100644
--- a/tests/flow_edit_tests.py
+++ b/tests/flow_edit_tests.py
@@ -75,7 +75,8 @@
def test_reply_to_topic_root(self):
"""Test replying to the topic's root post directly."""
# Setup
- content = "I am a reply to the topic's root post. Replying still works!"
+ content = ("I am a reply to the topic's root post. "
+ 'Replying still works!')
topic = Topic(self.site, self._topic_title)
topic_root = topic.root
old_replies = topic_root.replies(force=True)[:]
@@ -114,7 +115,8 @@
"""Test replying to a previous reply to a topic."""
# Setup
first_content = 'I am a reply to the topic with my own replies. Great!'
- second_content = 'I am a nested reply. This conversation is getting pretty good!'
+ second_content = ('I am a nested reply. This conversation is '
+ 'getting pretty good!')
topic = Topic(self.site, self._topic_title)
topic_root = topic.root
# First reply
@@ -145,7 +147,8 @@
# Test reply list in first reply
# Broken due to current Flow reply structure (T105438)
# new_nested_replies = first_reply_post.replies(force=True)
- # self.assertEqual(len(new_nested_replies), len(old_nested_replies) + 1)
+ # self.assertEqual(len(new_nested_replies),
+ # len(old_nested_replies) + 1)

# Current test for nested reply list
self.assertListEqual(old_nested_replies, [])
diff --git a/tests/flow_tests.py b/tests/flow_tests.py
index 2c8ddb3..4b8df96 100644
--- a/tests/flow_tests.py
+++ b/tests/flow_tests.py
@@ -169,7 +169,8 @@
real_topic = Topic(self.site, 'Topic:Slbktgav46omarsd')
fake_topic = Topic(self.site, 'Topic:Abcdefgh12345678')
# Topic.from_topiclist_data
- self.assertRaises(TypeError, Topic.from_topiclist_data, self.site, '', {})
+ self.assertRaises(TypeError, Topic.from_topiclist_data, self.site,
+ '', {})
self.assertRaises(TypeError, Topic.from_topiclist_data, board, 521, {})
self.assertRaises(TypeError, Topic.from_topiclist_data, board,
'slbktgav46omarsd', [0, 1, 2])
diff --git a/tests/http_tests.py b/tests/http_tests.py
index bb3d3e0..3f8d4ad 100644
--- a/tests/http_tests.py
+++ b/tests/http_tests.py
@@ -143,9 +143,9 @@

def test_https_cert_error(self):
"""Test if http.fetch respects disable_ssl_certificate_validation."""
- self.assertRaisesRegex(pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE,
- http.fetch,
- uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
+ self.assertRaisesRegex(
+ pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE, http.fetch,
+ uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection

with warnings.catch_warnings(record=True) as warning_log:
@@ -158,9 +158,9 @@
http.session.close() # clear the connection

# Verify that it now fails again
- self.assertRaisesRegex(pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE,
- http.fetch,
- uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
+ self.assertRaisesRegex(
+ pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE, http.fetch,
+ uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection

# Verify that the warning occurred
@@ -202,10 +202,10 @@
def test_invalid_scheme(self):
"""Test invalid scheme."""
# A InvalidSchema is raised within requests
- self.assertRaisesRegex(requests.exceptions.InvalidSchema,
- 'No connection adapters were found for \'invalid://url\'',
- http.fetch,
- uri='invalid://url')
+ self.assertRaisesRegex(
+ requests.exceptions.InvalidSchema,
+ "No connection adapters were found for 'invalid://url'",
+ http.fetch, uri='invalid://url')

def test_follow_redirects(self):
"""Test follow 301 redirects correctly."""
@@ -256,7 +256,7 @@
self.assertEqual("'", http.user_agent_username("'"))
self.assertEqual('foo_bar', http.user_agent_username('foo bar'))

- self.assertEqual('%E2%81%82', http.user_agent_username(u'⁂'))
+ self.assertEqual('%E2%81%82', http.user_agent_username('⁂'))

def test_version(self):
"""Test http.user_agent {version}."""
@@ -266,8 +266,9 @@
http.user_agent(format_string='version does not appear')
self.assertIsNone(pywikibot.version.cache)
pywikibot.version.cache = {'rev': 'dummy'}
- self.assertEqual(http.user_agent(format_string='{version} does appear'),
- 'dummy does appear')
+ self.assertEqual(
+ http.user_agent(format_string='{version} does appear'),
+ 'dummy does appear')
self.assertIsNotNone(pywikibot.version.cache)
finally:
pywikibot.version.cache = old_cache
@@ -283,8 +284,9 @@
"""Set up unit test."""
super(DefaultUserAgentTestCase, self).setUp()
self.orig_format = config.user_agent_format
- config.user_agent_format = ('{script_product} ({script_comments}) {pwb} '
- '({revision}) {http_backend} {python}')
+ config.user_agent_format = ('{script_product} ({script_comments}) '
+ '{pwb} ({revision}) {http_backend} '
+ '{python}')

def tearDown(self):
"""Tear down unit test."""
@@ -308,9 +310,9 @@

"""Test the generation of fake user agents.

- If the method cannot import either browseragents or fake_useragent, the
- default user agent will be returned, causing tests to fail. Therefore tests
- will skip if neither is present.
+ If the method cannot import either browseragents or fake_useragent,
+ the default user agent will be returned, causing tests to fail.
+ Therefore tests will skip if neither is present.
"""

net = False
@@ -336,32 +338,39 @@

def setUp(self):
"""Set up the unit test."""
- self.orig_fake_user_agent_exceptions = config.fake_user_agent_exceptions
+ self.orig_fake_user_agent_exceptions = (
+ config.fake_user_agent_exceptions)
super(LiveFakeUserAgentTestCase, self).setUp()

def tearDown(self):
"""Tear down unit test."""
- config.fake_user_agent_exceptions = self.orig_fake_user_agent_exceptions
+ config.fake_user_agent_exceptions = (
+ self.orig_fake_user_agent_exceptions)
super(LiveFakeUserAgentTestCase, self).tearDown()

def _test_fetch_use_fake_user_agent(self):
"""Test `use_fake_user_agent` argument of http.fetch."""
# Existing headers
r = http.fetch(
- self.get_httpbin_url('/status/200'), headers={'user-agent': 'EXISTING'})
+ self.get_httpbin_url('/status/200'),
+ headers={'user-agent': 'EXISTING'})
self.assertEqual(r.headers['user-agent'], 'EXISTING')

# Argument value changes
- r = http.fetch(self.get_httpbin_url('/status/200'), use_fake_user_agent=True)
+ r = http.fetch(self.get_httpbin_url('/status/200'),
+ use_fake_user_agent=True)
self.assertNotEqual(r.headers['user-agent'], http.user_agent())
- r = http.fetch(self.get_httpbin_url('/status/200'), use_fake_user_agent=False)
+ r = http.fetch(self.get_httpbin_url('/status/200'),
+ use_fake_user_agent=False)
self.assertEqual(r.headers['user-agent'], http.user_agent())
r = http.fetch(
- self.get_httpbin_url('/status/200'), use_fake_user_agent='ARBITRARY')
+ self.get_httpbin_url('/status/200'),
+ use_fake_user_agent='ARBITRARY')
self.assertEqual(r.headers['user-agent'], 'ARBITRARY')

# Manually overridden domains
- config.fake_user_agent_exceptions = {self.get_httpbin_hostname(): 'OVERRIDDEN'}
+ config.fake_user_agent_exceptions = {
+ self.get_httpbin_hostname(): 'OVERRIDDEN'}
r = http.fetch(
self.get_httpbin_url('/status/200'), use_fake_user_agent=False)
self.assertEqual(r.headers['user-agent'], 'OVERRIDDEN')
@@ -396,7 +405,8 @@
def _test_fake_user_agent_randomness(self):
"""Test if user agent returns are randomized."""
config.fake_user_agent = True
- self.assertNotEqual(http.get_fake_user_agent(), http.get_fake_user_agent())
+ self.assertNotEqual(http.get_fake_user_agent(),
+ http.get_fake_user_agent())

def _test_config_settings(self):
"""Test if method honours configuration toggle."""
@@ -429,10 +439,10 @@

"""Test that HttpRequest correct handles the charsets given."""

- CODEC_CANT_DECODE_RE = 'codec can\'t decode byte'
+ CODEC_CANT_DECODE_RE = "codec can't decode byte"
net = False

- STR = u'äöü'
+ STR = 'äöü'
LATIN1_BYTES = STR.encode('latin1')
UTF8_BYTES = STR.encode('utf8')

@@ -516,8 +526,9 @@
req = CharsetTestCase._create_request()
resp = requests.Response()
req._data = resp
- resp._content = '<?xml version="1.0" encoding="UTF-8" someparam="ignored"?>'.encode(
- 'utf-8')
+ resp._content = (
+ '<?xml version="1.0" encoding="UTF-8" someparam="ignored"?>'
+ .encode('utf-8'))
resp.headers = {'content-type': 'text/xml'}
self.assertIsNone(req.charset)
self.assertEqual('UTF-8', req.encoding)
@@ -639,8 +650,8 @@
"""
Test the query string parameter of request methods.

- The /get endpoint of httpbin returns JSON that can include an 'args' key with
- urldecoded query string parameters.
+ The /get endpoint of httpbin returns JSON that can include an
+ 'args' key with urldecoded query string parameters.
"""

def setUp(self):
@@ -661,10 +672,10 @@

def test_unencoded_params(self):
"""
- Test fetch method with unencoded parameters, which should be encoded internally.
+ Test fetch method with unencoded parameters to be encoded internally.

- HTTPBin returns the args in their urldecoded form, so what we put in should be
- the same as what we get out.
+ HTTPBin returns the args in their urldecoded form, so what we put in
+ should be the same as what we get out.
"""
r = http.fetch(uri=self.url, params={'fish&chips': 'delicious'})
if r.status == 503: # T203637
@@ -677,10 +688,10 @@

def test_encoded_params(self):
"""
- Test fetch method with encoded parameters, which should be re-encoded internally.
+ Test fetch method with encoded parameters to be re-encoded internally.

- HTTPBin returns the args in their urldecoded form, so what we put in should be
- the same as what we get out.
+ HTTPBin returns the args in their urldecoded form, so what we put in
+ should be the same as what we get out.
"""
r = http.fetch(uri=self.url, params={'fish%26chips': 'delicious'})
if r.status == 503: # T203637
@@ -693,10 +704,10 @@


class DataBodyParameterTestCase(HttpbinTestCase):
- """Test that the data and body parameters of fetch/request methods are equivalent."""
+ """Test data and body params of fetch/request methods are equivalent."""

def test_fetch(self):
- """Test that using the data parameter and body parameter produce same results."""
+ """Test that using the data and body params produce same results."""
r_data = http.fetch(uri=self.get_httpbin_url('/post'), method='POST',
data={'fish&chips': 'delicious'})
r_body = http.fetch(uri=self.get_httpbin_url('/post'), method='POST',

To view, visit change 463518. To unsubscribe, or for help writing mail filters, visit settings.

Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: Id456426c1d09a095bb71cfa2cfd73c6e93ee8ce2
Gerrit-Change-Number: 463518
Gerrit-PatchSet: 2
Gerrit-Owner: D3r1ck01 <alangiderick@gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki@gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb@gmail.com>
Gerrit-Reviewer: jenkins-bot (75)